N <- c(1,2,4,8,16)
year <- 2020:2024
plot(N ~ year,
type = "l",
col = "red")
Population growth rate lambda is the ratio of two successive population sizes
#time 2 vs 1
N[2]/N[1]
## [1] 2
2/1
## [1] 2
#time 3 vs 2
N[3]/N[2]
## [1] 2
4/2
## [1] 2
#time 4 vs 3
N[4]/N[3]
## [1] 2
8/4
## [1] 2
N.time = (lambda^time)N.intial N.time = pop some time in the future(any given time) lambda = pop growthrate time = number of years passing *N.initial = initial pop size
#initial data
#time = 0 1 2 3 4
N <- c(1,2,4,8,16)
year <- 2020:2024
#pop growth equation
N.initial <- 1
lambda <- 2
time <- 4
N.time.4 <- (lambda^time)*N.initial
N.time.4
## [1] 16
is(N.initial)
## [1] "numeric" "vector"
class(N.initial)
## [1] "numeric"
length(N.initial)
## [1] 1
N.initial <- 1 #intial population size; same as before
lambda <- 2 #pop growth; same as before
time <- 5 #change from 4 years to 5 years
N.time.5 <- (lambda^time)*N.initial #rename vector for time = 5
N.time.5
## [1] 32
Recreate
#vector of time
time <- c(1:10)
#scalars
lambda.1.5 <- 1.5
lambda.2 <- 2.0
lambda.2.5 <- 2.5
N.initial <- 1
N.timeseries.L1.5 <- (lambda.1.5^time)*N.initial
##9-17 LECTURE
census <- 1:39
year.t <- 1959:1997
females.N <- c(44,47,46,44,46,
45,46,40,39,39,
42,39,41,40,33,
36,34,39,35,34,
38,36,37,41,39,
51,47,57,48,60,
65,74,69,65,57,
70,81,99,99)
lambda.i <- females.N[-1]/females.N[-length(females.N)]
lambda.i <- c(lambda.i,NA)
lambda_log <- log(lambda.i)
bear_N <- data.frame(census,
year.t,
females.N,
lambda.i,
lambda_log)
plot(females.N ~ year.t, data = bear_N,
type = "b",
ylab = "Population index (females + cubs)",
xlab = "Year")
abline(v = 1970)
ADD 1-2 sentences note here we randomly pull pop growth rates out of a hat
hist(bear_N$lambda.i)
hat_of_lambdas <- bear_N$lambda.i
#is.na() tells you T/F is something NA
is.na(hat_of_lambdas)
## [1] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [13] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [25] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [37] FALSE FALSE TRUE
any(is.na(hat_of_lambdas) == TRUE)
## [1] TRUE
Drop the NA
length(hat_of_lambdas)
## [1] 39
hat_of_lambdas[39]
## [1] NA
hat_of_lambdas[-39]
## [1] 1.0681818 0.9787234 0.9565217 1.0454545 0.9782609 1.0222222 0.8695652
## [8] 0.9750000 1.0000000 1.0769231 0.9285714 1.0512821 0.9756098 0.8250000
## [15] 1.0909091 0.9444444 1.1470588 0.8974359 0.9714286 1.1176471 0.9473684
## [22] 1.0277778 1.1081081 0.9512195 1.3076923 0.9215686 1.2127660 0.8421053
## [29] 1.2500000 1.0833333 1.1384615 0.9324324 0.9420290 0.8769231 1.2280702
## [36] 1.1571429 1.2222222 1.0000000
hat_of_lambdas[-length(hat_of_lambdas)]#more general form
## [1] 1.0681818 0.9787234 0.9565217 1.0454545 0.9782609 1.0222222 0.8695652
## [8] 0.9750000 1.0000000 1.0769231 0.9285714 1.0512821 0.9756098 0.8250000
## [15] 1.0909091 0.9444444 1.1470588 0.8974359 0.9714286 1.1176471 0.9473684
## [22] 1.0277778 1.1081081 0.9512195 1.3076923 0.9215686 1.2127660 0.8421053
## [29] 1.2500000 1.0833333 1.1384615 0.9324324 0.9420290 0.8769231 1.2280702
## [36] 1.1571429 1.2222222 1.0000000
#nam.omit()
na.omit(hat_of_lambdas)
## [1] 1.0681818 0.9787234 0.9565217 1.0454545 0.9782609 1.0222222 0.8695652
## [8] 0.9750000 1.0000000 1.0769231 0.9285714 1.0512821 0.9756098 0.8250000
## [15] 1.0909091 0.9444444 1.1470588 0.8974359 0.9714286 1.1176471 0.9473684
## [22] 1.0277778 1.1081081 0.9512195 1.3076923 0.9215686 1.2127660 0.8421053
## [29] 1.2500000 1.0833333 1.1384615 0.9324324 0.9420290 0.8769231 1.2280702
## [36] 1.1571429 1.2222222 1.0000000
## attr(,"na.action")
## [1] 39
## attr(,"class")
## [1] "omit"
hat_of_lambdas <- hat_of_lambdas[-length(hat_of_lambdas)]
hist(hat_of_lambdas)
Apull random lambda from the hat
# stuck hand into hat and pulled a lambda out of the hat
sample(x = hat_of_lambdas,
size = 1,
replace = TRUE)
## [1] 0.9787234
#pull lambda from hat and save to object
lambda_rand.t <- sample(x = hat_of_lambdas, size = 1,replace = TRUE)
get the top and bottom values of the plot
head(bear_N)
## census year.t females.N lambda.i lambda_log
## 1 1 1959 44 1.0681818 0.06595797
## 2 2 1960 47 0.9787234 -0.02150621
## 3 3 1961 46 0.9565217 -0.04445176
## 4 4 1962 44 1.0454545 0.04445176
## 5 5 1963 46 0.9782609 -0.02197891
## 6 6 1964 45 1.0222222 0.02197891
tail(bear_N)
## census year.t females.N lambda.i lambda_log
## 34 34 1992 65 0.8769231 -0.1313360
## 35 35 1993 57 1.2280702 0.2054440
## 36 36 1994 70 1.1571429 0.1459539
## 37 37 1995 81 1.2222222 0.2006707
## 38 38 1996 99 1.0000000 0.0000000
## 39 39 1997 99 NA NA
summary(bear_N)
## census year.t females.N lambda.i
## Min. : 1.0 Min. :1959 Min. :33.00 Min. :0.8250
## 1st Qu.:10.5 1st Qu.:1968 1st Qu.:39.00 1st Qu.:0.9452
## Median :20.0 Median :1978 Median :44.00 Median :1.0000
## Mean :20.0 Mean :1978 Mean :49.79 Mean :1.0281
## 3rd Qu.:29.5 3rd Qu.:1988 3rd Qu.:57.00 3rd Qu.:1.1038
## Max. :39.0 Max. :1997 Max. :99.00 Max. :1.3077
## NA's :1
## lambda_log
## Min. :-0.19237
## 1st Qu.:-0.05639
## Median : 0.00000
## Mean : 0.02134
## 3rd Qu.: 0.09874
## Max. : 0.26826
## NA's :1
dim(bear_N)
## [1] 39 5
N.1997 <- 99
**here is the simualtion*
1.22807*99
## [1] 121.5789
lambda_rand.t*N.1997
## [1] 83.36842
N.1998 <- lambda_rand.t*N.1997
pull a random lambda and multiply with pop size and get pop for next year
# 1997 to 1998
lambda_rand.t <- sample(x = hat_of_lambdas, size = 1,replace = TRUE)
N.1998 <- lambda_rand.t*N.1997
# 1998 to 1999
lambda_rand.t <- sample(x = hat_of_lambdas, size = 1,replace = TRUE)
N.1999 <- lambda_rand.t*N.1998
# 1999 to 2000
lambda_rand.t <- sample(x = hat_of_lambdas, size = 1,replace = TRUE)
N.2000 <- lambda_rand.t*N.1999
# 2000 to 2001
lambda_rand.t <- sample(x = hat_of_lambdas, size = 1,replace = TRUE)
N.2001 <- lambda_rand.t*N.2000
# 2001 to 2002
lambda_rand.t <- sample(x = hat_of_lambdas, size = 1,replace = TRUE)
N.2002 <- lambda_rand.t*N.2001
# 2002 to 2003
lambda_rand.t <- sample(x = hat_of_lambdas, size = 1,replace = TRUE)
N.2003 <- lambda_rand.t*N.2002
# 2003 to 2004
lambda_rand.t <- sample(x = hat_of_lambdas, size = 1,replace = TRUE)
N.2004 <- lambda_rand.t*N.2003
# 2004 to 2005
lambda_rand.t <- sample(x = hat_of_lambdas, size = 1,replace = TRUE)
N.2005 <- lambda_rand.t*N.2004
#ignoring the possibility of carrying capacity
plot the data here
year <- seq(1997, 2004)
N.rand <- c(N.1998,N.1999,N.2000,N.2001,N.2002,N.2003,N.2004,N.2005)
df.rand <- data.frame(N.rand, year)
plot(N.rand ~ year, data = df.rand, type = "b")
step by step instructions on conducting for loop in data
# initial conditions
N.1997 <- 99
N.initial <- 99
# explore xlim = argument
plot(N.1997 ~ c(1997)) #plot a single point in space
plot(N.1997 ~ c(1997), xlim = c(1997, 1997+50))
plot(N.1997 ~ c(1997), xlim = c(1997, 1997+50), ylim = c(0, 550))#xlim and ylim
# not a for() loop for loop
# for loop the hard way
N.current <- N.initial
# this is where the for loop would be
t <- 1
#grab a lambda
lambda_rand.t <- sample(x = hat_of_lambdas,size = 1,replace = TRUE)
#determine pop size
N.t <- N.current*lambda_rand.t
#set the year
year.t <- 1997+t
#plot the population
#points() updates to existing graph
points(N.t ~ year.t)
#update N.current
N.current <- N.t
#make a new plot
plot(N.1997 ~ c(1997), xlim = c(1997, 1997+50), ylim = c(0, 550))
#start at 1997s pop size
N.current <- N.1997
for(t in 1:50){ #start with t = 1 then go to t = 50
#grab a lambda
lambda_rand.t <- sample(x = hat_of_lambdas,
size = 1,
replace = TRUE)
#determine pop size
N.t <- N.current*lambda_rand.t
#set the year
year.t <- 1997+t
#plot the population
#points() updates to existing graph
points(N.t ~ year.t)
#update N.current
N.current <- N.t
}
par(mfrow = c(3,3), mar = c(1,1,1,1))
#make a new plot
plot(N.1997 ~ c(1997), xlim = c(1997, 1997+50), ylim = c(0, 550))
#start at 1997s pop size
N.current <- N.1997
for(t in 1:50){ #start with t = 1 then go to t = 50
#grab a lambda
lambda_rand.t <- sample(x = hat_of_lambdas,
size = 1,
replace = TRUE)
#determine pop size
N.t <- N.current*lambda_rand.t
#set the year
year.t <- 1997+t
#plot the population
#points() updates to existing graph
points(N.t ~ year.t)
#update N.current
N.current <- N.t
}