Latihan 1
#banyaknya amatan
n=1000
# membangkitkan U(0,1)
u=runif(n)
#Fungsi inverse CDF
set.seed(10)
x= -(log(1-u)/3)
head(x)
## [1] 1.2348012 0.3519658 1.1545725 0.1328090 0.5004931 0.1101978
length(x)
## [1] 1000
set.seed(10)
y=rexp(n,rate=3)
head(y)
## [1] 0.004985469 0.306740402 0.250719646 0.525013950 0.077219539 0.362224334
length(y)
## [1] 1000
par(mfrow=c(1,2))
hist(x,main="Exp dari Invers Transform",col="sky blue")
hist(y,main="Exp dari fungsi rexp",col="pink")

Latihan 2
n=1000
set.seed(10)
y=runif(n,0,1)
set.seed(30)
u = runif(n,0,1)
h=function(y) 6*y*(1-y)
derivH=optimize(f=h,interval=c(0,1),maximum=T)
derivH$objective
## [1] 1.5
f=function(y) 6*y*(1-y)
g=function(y)1
nilaic=derivH$objective
criteria=u<f(y)/(nilaic*g(y))
head(criteria)
## [1] TRUE TRUE TRUE TRUE TRUE TRUE
x=y[criteria]
head(x)
## [1] 0.50747820 0.30676851 0.42690767 0.69310208 0.08513597 0.22543662
length(x)
## [1] 668
z <- rbeta(n,shape1 = 2,shape2 = 2)
par(mfrow=c(1,2))
hist(x,main="beta(2,2) dari AR", col="sky blue")
hist(z,main="beta(2,2) dari rbeta", col="pink")

Latihan 3
n=1500
set.seed(10)
y=rexp(n,rate=2/3)
set.seed(30)
u = runif(n,0,1)
h <- function(y) (-1) * (3/(2*gamma(3/2))) * (y)^(1/2) * exp(-y/3)
derivH <- optim(par = 0,fn=h,lower = 0,method = "L-BFGS-B")
derivH$value
## [1] -1.257317
f <- function(y) (3/(2*gamma(3/2))) * (y)^(1/2) * exp(-y)
g <- function(y) (2/3) * exp(-(2/3) * y)
nilaic <- -derivH$value #jangan lupa tambahkan negatif
#kriteria penerimaan
criteria <- u < f(y)/(nilaic*g(y))
head(criteria)
## [1] TRUE TRUE TRUE TRUE TRUE TRUE
#jadikan Y sebagai X
x <- y[criteria]
head(x)
## [1] 0.02243461 1.38033181 1.12823841 2.36256278 0.34748792 1.63000950
# banyaknya amatan
length(x)
## [1] 1393
#karena >1000 ambil 1000 saja
x=x[1:1000]
z <- rgamma(n,shape = 3/2,scale = 1)
par(mfrow=c(1,2))
hist(x,main="gamma(3/2,1) dari AR", col="blue")
hist(z,main="beta(3/2,1) dari rgamma", col="red")

Latihan 4
#banyaknya amatan
n=1000
set.seed(15)
z=rnorm(n,mean = 0,sd = 1)
y=rchisq(n,df = 2)
x=z/sqrt(y/2)
head(x)
## [1] 0.4249926 2.2981978 -0.3956900 0.8938340 0.9930945 -1.4325087
set.seed(5)
v=rt(n,df=2)
par(mfrow=c(1,2))
hist(x,main="t(2) dari Direct", col="pink")
hist(v,main="t(2) dari rt", col="yellow")

Latihan 5
n=1000
set.seed(12)
epsilon=rnorm(n,mean = 0,sd = 1)
set.seed(10)
X=replicate(3,rnorm(n,mean=0,sd=1.5))
head(X)
## [,1] [,2] [,3]
## [1,] 0.02811926 1.5750205 -0.46179747
## [2,] -0.27637881 0.4291389 1.13712837
## [3,] -2.05699582 0.3608472 -0.86079512
## [4,] -0.89875157 1.2490578 -1.40811669
## [5,] 0.44181769 -0.3344748 -0.04154899
## [6,] 0.58469145 0.4325163 -1.59937298
betas=matrix(c(10,3,5,7),nrow = 4,ncol = 1)
Xgab=cbind(1,X)
Y=Xgab%*%betas+epsilon
dataRegresi <- data.frame(Y,X)
colnames(dataRegresi) <- c("Y","X1","X2","X3")
head(dataRegresi)
## Y X1 X2 X3
## 1 13.246310 0.02811926 1.5750205 -0.46179747
## 2 20.853626 -0.27637881 0.4291389 1.13712837
## 3 -1.349062 -2.05699582 0.3608472 -0.86079512
## 4 2.772212 -0.89875157 1.2490578 -1.40811669
## 5 7.364594 0.44181769 -0.3344748 -0.04154899
## 6 2.448749 0.58469145 0.4325163 -1.59937298
modelRegresi=lm(Y~.,data = dataRegresi)
summary(modelRegresi)
##
## Call:
## lm(formula = Y ~ ., data = dataRegresi)
##
## Residuals:
## Min 1Q Median 3Q Max
## -3.0364 -0.6052 -0.0207 0.6087 3.1774
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 9.97484 0.03033 328.8 <2e-16 ***
## X1 2.97225 0.02042 145.6 <2e-16 ***
## X2 4.97282 0.01939 256.5 <2e-16 ***
## X3 7.00493 0.01989 352.2 <2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 0.9589 on 996 degrees of freedom
## Multiple R-squared: 0.9955, Adjusted R-squared: 0.9954
## F-statistic: 7.275e+04 on 3 and 996 DF, p-value: < 2.2e-16