Distribusi Dari Variabel Respon
# Respon 1
respon1 <- databaru$Y1
shapiro.test(respon1)
##
## Shapiro-Wilk normality test
##
## data: respon1
## W = 0.91499, p-value = 0.0009703
#descdist(respon1, discrete = TRUE)
#fit_norm <- fitdist(respon1, "norm")
#summary(fit_norm)
# Respon 2
respon2 <- databaru$Y2
shapiro.test(respon2)
##
## Shapiro-Wilk normality test
##
## data: respon2
## W = 0.9625, p-value = 0.08917
#descdist(respon2, discrete = TRUE)
#fit_norm <- fitdist(respon1, "norm")
#summary(fit_norm)
Statistik Deskriptif Respon
respon <- databaru[,1:2]
deskriptif <- rbind(apply(respon, MARGIN = 2, FUN = summary),
SD = apply(respon, MARGIN = 2, FUN = sd),
Skeweness = skewness(respon),
Kurtosis = kurtosis(respon))
deskriptif
## Y1 Y2
## Min. 0.1959505 0.0000000
## 1st Qu. 0.8062879 3.0000000
## Median 1.3912993 5.0000000
## Mean 1.5241917 4.7222222
## 3rd Qu. 1.7959342 7.0000000
## Max. 4.1153102 9.0000000
## SD 0.9823460 2.2353647
## Skeweness 0.9410942 -0.2304348
## Kurtosis 3.4297495 2.2604276
shapiro.test(respon$Y2)
##
## Shapiro-Wilk normality test
##
## data: respon$Y2
## W = 0.9625, p-value = 0.08917
# Uji Korelasi Variabel Respon
cor.test(respon1,respon2)
##
## Pearson's product-moment correlation
##
## data: respon1 and respon2
## t = 0.23368, df = 52, p-value = 0.8161
## alternative hypothesis: true correlation is not equal to 0
## 95 percent confidence interval:
## -0.2374307 0.2975689
## sample estimates:
## cor
## 0.03238897
Uji Multikolinearitas
model_1 <- lm(Y1~(X1+X2+X3)^2 + I(X1^2) + I(X2^2) + I(X3^2), data = databaru)
model1 <- rsm(Y1~SO(X1,X2,X3), data = databaru)
vif(model1)
## GVIF Df GVIF^(1/(2*Df))
## FO(X1, X2, X3) 1 3 1
## TWI(X1, X2, X3) 1 3 1
## PQ(X1, X2, X3) 1 3 1
vif(model_1)
## there are higher-order terms (interactions) in this model
## consider setting type = 'predictor'; see ?vif
## X1 X2 X3 I(X1^2) I(X2^2) I(X3^2) X1:X2 X1:X3 X2:X3
## 1 1 1 1 1 1 1 1 1
Membuat Model
model1 <- rsm(Y1~SO(X1,X2,X3), data = databaru)
model2 <- rsm(Y2~SO(X1,X2,X3), data = databaru)
cat("\n====================Koefisien Model Respon Y1====================\n")
##
## ====================Koefisien Model Respon Y1====================
model1$coefficients
## (Intercept) FO(X1, X2, X3)X1 FO(X1, X2, X3)X2
## 2.02658716 0.13671834 -0.02469169
## FO(X1, X2, X3)X3 TWI(X1, X2, X3)X1:X2 TWI(X1, X2, X3)X1:X3
## 0.16907266 0.12258848 0.19763932
## TWI(X1, X2, X3)X2:X3 PQ(X1, X2, X3)X1^2 PQ(X1, X2, X3)X2^2
## 0.32895808 -0.47416328 -0.01330727
## PQ(X1, X2, X3)X3^2
## -0.26612257
cat("\n====================Koefisien Model Respon Y2====================\n")
##
## ====================Koefisien Model Respon Y2====================
model2$coefficients
## (Intercept) FO(X1, X2, X3)X1 FO(X1, X2, X3)X2
## 5.33333333 0.88888889 0.05555556
## FO(X1, X2, X3)X3 TWI(X1, X2, X3)X1:X2 TWI(X1, X2, X3)X1:X3
## 0.41666667 -0.70833333 -0.25000000
## TWI(X1, X2, X3)X2:X3 PQ(X1, X2, X3)X1^2 PQ(X1, X2, X3)X2^2
## -0.08333333 0.16666667 -1.16666667
## PQ(X1, X2, X3)X3^2
## 0.08333333
Uji Simultan dan Parsial
cat("\n====================Model Respon Y1====================\n")
##
## ====================Model Respon Y1====================
summary(model1)
##
## Call:
## rsm(formula = Y1 ~ SO(X1, X2, X3), data = databaru)
##
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 2.026587 0.351663 5.7629 7.521e-07 ***
## X1 0.136718 0.162788 0.8399 0.40553
## X2 -0.024692 0.162788 -0.1517 0.88013
## X3 0.169073 0.162788 1.0386 0.30466
## X1:X2 0.122588 0.199374 0.6149 0.54181
## X1:X3 0.197639 0.199374 0.9913 0.32696
## X2:X3 0.328958 0.199374 1.6500 0.10607
## X1^2 -0.474163 0.281958 -1.6817 0.09972 .
## X2^2 -0.013307 0.281958 -0.0472 0.96257
## X3^2 -0.266123 0.281958 -0.9438 0.35041
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Multiple R-squared: 0.1793, Adjusted R-squared: 0.0114
## F-statistic: 1.068 on 9 and 44 DF, p-value: 0.4047
##
## Analysis of Variance Table
##
## Response: Y1
## Df Sum Sq Mean Sq F value Pr(>F)
## FO(X1, X2, X3) 3 1.724 0.57465 0.6024 0.6169
## TWI(X1, X2, X3) 3 3.895 1.29842 1.3610 0.2671
## PQ(X1, X2, X3) 3 3.550 1.18332 1.2404 0.3065
## Residuals 44 41.976 0.95400
## Lack of fit 17 16.737 0.98454 1.0532 0.4401
## Pure error 27 25.239 0.93477
##
## Stationary point of response surface:
## X1 X2 X3
## 0.073288001 -0.553147978 0.002996276
##
## Eigenanalysis:
## eigen() decomposition
## $values
## [1] 0.08539808 -0.32532952 -0.51366167
##
## $vectors
## [,1] [,2] [,3]
## X1 -0.1762533 0.3330359 0.92629470
## X2 -0.8716254 -0.4900635 0.01034424
## X3 -0.4573882 0.8055588 -0.37665783
cat("\n====================Model Respon Y2====================\n")
##
## ====================Model Respon Y2====================
summary(model2)
##
## Call:
## rsm(formula = Y2 ~ SO(X1, X2, X3), data = databaru)
##
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 5.333333 0.766784 6.9555 1.329e-08 ***
## X1 0.888889 0.354952 2.5043 0.01605 *
## X2 0.055556 0.354952 0.1565 0.87634
## X3 0.416667 0.354952 1.1739 0.24676
## X1:X2 -0.708333 0.434725 -1.6294 0.11037
## X1:X3 -0.250000 0.434725 -0.5751 0.56817
## X2:X3 -0.083333 0.434725 -0.1917 0.84887
## X1^2 0.166667 0.614795 0.2711 0.78759
## X2^2 -1.166667 0.614795 -1.8977 0.06432 .
## X3^2 0.083333 0.614795 0.1355 0.89280
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Multiple R-squared: 0.2464, Adjusted R-squared: 0.0923
## F-statistic: 1.599 on 9 and 44 DF, p-value: 0.1454
##
## Analysis of Variance Table
##
## Response: Y2
## Df Sum Sq Mean Sq F value Pr(>F)
## FO(X1, X2, X3) 3 34.806 11.6019 2.5579 0.06721
## TWI(X1, X2, X3) 3 13.708 4.5694 1.0074 0.39849
## PQ(X1, X2, X3) 3 16.750 5.5833 1.2310 0.30981
## Residuals 44 199.569 4.5357
## Lack of fit 17 84.069 4.9453 1.1560 0.35854
## Pure error 27 115.500 4.2778
##
## Stationary point of response surface:
## X1 X2 X3
## -5.668610 2.100097 -9.952867
##
## Eigenanalysis:
## eigen() decomposition
## $values
## [1] 0.31020387 0.03177459 -1.25864512
##
## $vectors
## [,1] [,2] [,3]
## X1 0.8735427 0.4204880 -0.24517934
## X2 -0.1969248 -0.1553424 -0.96803376
## X3 -0.4451333 0.8939008 -0.05289366
Plot Efek Faktor
# Fungsi untuk main effect plot
plot_main_effects <- function(data, faktor_names, response) {
effect_means <- lapply(faktor_names, function(faktor) {
aggregate(data[[response]], by = list(Factor = data[[faktor]]), FUN = mean)
})
# Buat plot menggunakan ggplot2
plots <- lapply(seq_along(faktor_names), function(i) {
ggplot(effect_means[[i]], aes(x = Factor, y = x)) +
geom_line(group = 1, color = "blue") +
geom_point(size = 3) +
labs(
title = paste("Main Effect Plot -", faktor_names[i]),
x = faktor_names[i],
y = paste("Mean", response)
) +
theme_minimal()
})
# Tampilkan semua plot
library(gridExtra)
do.call(grid.arrange, c(plots, ncol = length(plots)))
}
# Untuk Y1
plot_main_effects(data, c("Persen_Karbonasi","Tekanan_Operasi","Kecepatan_Pengisian"), "Y1")

#Untuk Y2
plot_main_effects(data, c("Persen_Karbonasi","Tekanan_Operasi","Kecepatan_Pengisian"), "Y2")

Contour Plot
model_1 <- rsm(Y1~SO(Persen_Karbonasi,Tekanan_Operasi,Kecepatan_Pengisian), data = data)
model_2 <- rsm(Y2~SO(Persen_Karbonasi,Tekanan_Operasi,Kecepatan_Pengisian), data = data)
# Model 1
par(mfrow = c(1,3))
contour(
model_1, ~ Persen_Karbonasi+Tekanan_Operasi+Kecepatan_Pengisian,
image = TRUE,
xlabs = c("Persen_Karbonasi","Tekanan_Operasi","Kecepatan_Pengisian"),
main = "Contour Plot Respon Y1"
)

#Menambahkan Point kedalam Nanti
# Model 2
par(mfrow = c(1,3))
contour(
model_2, ~ Persen_Karbonasi+Tekanan_Operasi+Kecepatan_Pengisian,
image = TRUE,
xlabs = c("Persen_Karbonasi","Tekanan_Operasi","Kecepatan_Pengisian"),
main = "Contour Plot Respon Y2"
)

#Menambahkan Point kedalam Nanti
Respon Surface
# Model 1
par(mfrow = c(1,3))
persp(model_1, ~ Persen_Karbonasi+Tekanan_Operasi+Kecepatan_Pengisian,
col = terrain.colors(50), contours = "colors",
cex.lab = 0.6,
cex.axis = 0.6,
theta = -60,
)

# Model 2
par(mfrow = c(1,3))
persp(model_2, ~ Persen_Karbonasi+Tekanan_Operasi+Kecepatan_Pengisian,
col = terrain.colors(50), contours = "colors",
cex.lab = 0.6,
cex.axis = 0.6,
theta = -60,
)

Mencari Nilai Optimum
x_opt1 <- t(canonical(model1)$xs)
x_opt2 <- t(canonical(model2)$xs)
y_opt1 <- predict(model1, newdata = data.frame(x_opt1))
y_opt2 <- predict(model2, newdata = data.frame(x_opt2))
cat("\n==================== Menampilkan Nilai x_1 Optimum ====================\n")
##
## ==================== Menampilkan Nilai x_1 Optimum ====================
print(x_opt1)
## X1 X2 X3
## [1,] 0.073288 -0.553148 0.002996276
cat("\n==================== Menampilkan Nilai y_1 Optimum ====================\n")
##
## ==================== Menampilkan Nilai y_1 Optimum ====================
cat("y_1 =",y_opt1)
## y_1 = 2.038679
cat("\n\n==================== Menampilkan Nilai x_2 Optimum ====================\n")
##
##
## ==================== Menampilkan Nilai x_2 Optimum ====================
print(x_opt2)
## X1 X2 X3
## [1,] -0.7881353 0.2970861 0.422364
cat("\n==================== Menampilkan Nilai y_2 Optimum ====================\n")
##
## ==================== Menampilkan Nilai y_2 Optimum ====================
cat("y_2 =",y_opt2)
## y_2 = 5.079296
Mencari Nilai Optimum dengan Kendala
#Fungsi Tujuan
objective <- function(x){
-predict(model1,
newdata = data.frame(X1 = x[1], X2 = x[2], X3 = x[3]))
}
#Fungsi Kendala
constraint1 <- function(x){
predict(model2,
newdata = data.frame(X1 = x[1], X2 = x[2], X3 = x[3])) - y_opt2
}
constraint2 <- function(x){
x[1]^2 + x[2]^2 + x[3]^2 - 1
}
constraint3 <- function(x){
c(-predict(model2,
newdata = data.frame(X1 = x[1], X2 = x[2], X3 = x[3])) - y_opt2,
x[1]^2 + x[2]^2 + x[3]^2 - 1)
}
start_point <- c(0,0,0)
lower_bounds <- c(-1,-1,-1)
upper_bounds <- c(1,1,1)
- Nilai Optimum \(\hat{Y}_1\) dengan
Fungsi Kendala \(\hat{Y} =
\hat{Y}_{2,opt}\)
solution <- auglag(start_point,
fn = objective,
heq = constraint1,
lower = lower_bounds,
upper = upper_bounds
)
cat("\n==================== Menampilkan Nilai x_1 Optimum dengan Kendala ====================\n")
##
## ==================== Menampilkan Nilai x_1 Optimum dengan Kendala ====================
print(solution$par)
## [1] 0.4935473 0.8269793 1.0000000
cat("\n==================== Menampilkan Nilai y_1 Optimum dengan Kendala ====================\n")
##
## ==================== Menampilkan Nilai y_1 Optimum dengan Kendala ====================
cat("y_1 =",solution$value)
## y_1 = -2.271614
- Nilai Optimum \(\hat{Y}_1\) dengan
Fungsi Kendala \(\vec{X}^T\vec{X} =
1\)
solution <- auglag(start_point,
fn = objective,
heq = constraint2,
lower = lower_bounds,
upper = upper_bounds
)
cat("\n==================== Menampilkan Nilai x_1 Optimum dengan Kendala ====================\n")
##
## ==================== Menampilkan Nilai x_1 Optimum dengan Kendala ====================
print(solution$par)
## [1] 0.2887666 0.7456114 0.6005644
cat("\n==================== Menampilkan Nilai y_1 Optimum dengan Kendala ====================\n")
##
## ==================== Menampilkan Nilai y_1 Optimum dengan Kendala ====================
cat("y_1 =",solution$value)
## y_1 = -2.214247
- Nilai Optimum \(\hat{Y}_1\) dengan
Fungsi Kendala \(\hat{Y} =
\hat{Y}_{2,opt}\) dan \(\vec{X}^T\vec{X} = 1\)
solution <- auglag(start_point,
fn = objective,
heq = constraint3,
lower = lower_bounds,
upper = upper_bounds
)
cat("\n ==================== Menampilkan Nilai x_1 Optimum dengan Kendala ====================\n")
##
## ==================== Menampilkan Nilai x_1 Optimum dengan Kendala ====================
print(solution$par)
## [1] -0.4671486 1.0000000 -0.7555576
cat("\n ==================== Menampilkan Nilai y_1 Optimum dengan Kendala ====================\n")
##
## ==================== Menampilkan Nilai y_1 Optimum dengan Kendala ====================
cat("y_1 =",solution$value)
## y_1 = -1.305525