Import Data

library(readr)
#Dataset
dfMANOVA=read_csv("MANOVA.csv")
dfMANOVA
## # A tibble: 113 × 5
##        N MP                      MB        HB    BK
##    <dbl> <chr>                   <chr>  <dbl> <dbl>
##  1     1 Direct Flipped-learning Tinggi    86    88
##  2     2 Direct Flipped-learning Tinggi    86    88
##  3     3 Direct Flipped-learning Tinggi    85    87
##  4     4 Direct Flipped-learning Tinggi    85    87
##  5     5 Direct Flipped-learning Tinggi    85    87
##  6     6 Direct Flipped-learning Tinggi    84    86
##  7     7 Direct Flipped-learning Rendah    84    86
##  8     8 Direct Flipped-learning Tinggi    84    86
##  9     9 Direct Flipped-learning Tinggi    84    86
## 10    10 Direct Flipped-learning Rendah    84    86
## # ℹ 103 more rows

Statistik Deskriptif

Variabel Bebas Model Pembelajaran (MP) dan Variabel Terikat Hasil Belajar (HB)

library(dplyr)

dfMANOVA %>% group_by(MP) %>% summarise(n = n(), mean = mean(HB), sd = sd(HB))
## # A tibble: 2 × 4
##   MP                           n  mean    sd
##   <chr>                    <int> <dbl> <dbl>
## 1 Direct Flipped-learning     53  80.0  3.40
## 2 Inquiry Flipped-learning    60  85.9  4.57

Variabel Bebas Model Pembelajaran (MP) dan Variabel Terikat Berpikir Kritis (BK)

dfMANOVA %>% group_by(MP) %>% summarise(n = n(), mean = mean(BK), sd = sd(BK))
## # A tibble: 2 × 4
##   MP                           n  mean    sd
##   <chr>                    <int> <dbl> <dbl>
## 1 Direct Flipped-learning     53  83.1  2.80
## 2 Inquiry Flipped-learning    60  88.7  4.73

Variabel Bebas Motivasi Belajar (MB) dan Variabel Terikat Hasil Belajar (HB)

dfMANOVA %>% group_by(MB) %>% summarise(n = n(), mean = mean(HB), sd = sd(HB))
## # A tibble: 2 × 4
##   MB         n  mean    sd
##   <chr>  <int> <dbl> <dbl>
## 1 Rendah    52  80.6  3.57
## 2 Tinggi    61  85.4  5.00

Variabel Bebas Motivasi Belajar (MB) dan Variabel Terikat Berpikir Kritis (BK)

dfMANOVA %>% group_by(MB) %>% summarise(n = n(), mean = mean(BK), sd = sd(BK))
## # A tibble: 2 × 4
##   MB         n  mean    sd
##   <chr>  <int> <dbl> <dbl>
## 1 Rendah    52  83.9  3.53
## 2 Tinggi    61  87.9  5.07

Visualisasi Data Variabel Bebas Model Pembelajaran

library(gridExtra)
library(tidyverse)

p1 <- ggplot(dfMANOVA, aes(x = MP, y = HB, fill = MP)) + geom_boxplot(outlier.shape = NA) + geom_jitter(width = 0.2) + theme(legend.position="top")
p2 <- ggplot(dfMANOVA, aes(x = MP, y = BK, fill = MP)) + geom_boxplot(outlier.shape = NA) + geom_jitter(width = 0.2) + theme(legend.position="top")
grid.arrange(p1, p2, ncol=2)

Visualisasi Data Variabel Bebas Motivasi Belajar

p1 <- ggplot(dfMANOVA, aes(x = MB, y = HB, fill = MB)) + geom_boxplot(outlier.shape = NA) + geom_jitter(width = 0.2) + theme(legend.position="top")
p2 <- ggplot(dfMANOVA, aes(x = MB, y = BK, fill = MB)) + geom_boxplot(outlier.shape = NA) + geom_jitter(width = 0.2) + theme(legend.position="top")
grid.arrange(p1, p2, ncol=2)

Uji Asumsi Normalitas

Uji Asumsi Normalitas masing-masing Variabel Bebas terhadap masing-masing Variabel Terikat satu-kesatu (Univariate Normality)

*Jika uji asumsi normalitas Univariat tidak terpenuhi, maka asumsi normalitas multivariat telah dilanggar. Oleh karena itu, lakukan uji outlier data untuk memastikan tidak ada outlier dalam data.

dfMANOVA %>% group_by(MP) %>% shapiro_test(HB, BK)
## # A tibble: 4 × 4
##   MP                       variable statistic      p
##   <chr>                    <chr>        <dbl>  <dbl>
## 1 Direct Flipped-learning  BK           0.970 0.193 
## 2 Direct Flipped-learning  HB           0.958 0.0572
## 3 Inquiry Flipped-learning BK           0.968 0.110 
## 4 Inquiry Flipped-learning HB           0.964 0.0760

Hasil uji asumsi normalitas univariat terpenuhi (p > 0.05). Oleh karena itu, dapat dilanjutkan ke uji normalitas multivariat.

Uji Asumsi Normalitas masing-masing Variabel Bebas terhadap masing-masing Variabel Terikat satu-kesatu (Multivariate Normality using Mardia’s Skewness and Kurtosis)

library(mvnormalTest)

mardia(dfMANOVA[, c(4,5)])$mv.test
##           Test Statistic p-value Result
## 1     Skewness    3.8168  0.4314    YES
## 2     Kurtosis    0.2978  0.7658    YES
## 3 MV Normality      <NA>    <NA>    YES

Hasil uji asumsi normalitas univariat terpenuhi (p > 0.05). Oleh karena itu, dapat dilanjutkan ke uji homogenitas.

Uji Asumsi Homogenitas Matriks Varian-covarian

Homogenitas untuk Variabel Bebas Model Pembelajaran

library(heplots)

boxM(Y = dfMANOVA[, c(4,5)], group = dfMANOVA$MP)
## 
##  Box's M-test for Homogeneity of Covariance Matrices
## 
## data:  dfMANOVA[, c(4, 5)]
## Chi-Sq (approx.) = 39.717, df = 3, p-value = 1.223e-08

Homogenitas untuk Variabel Bebas Motivasi Belajar

boxM(Y = dfMANOVA[, c(4,5)], group = dfMANOVA$MB)
## 
##  Box's M-test for Homogeneity of Covariance Matrices
## 
## data:  dfMANOVA[, c(4, 5)]
## Chi-Sq (approx.) = 40.785, df = 3, p-value = 7.264e-09

Uji Asumsi Outliers

library(rstatix)

# get distance
mahalanobis_distance(data = dfMANOVA[, c("HB", "BK")])$is.outlier
##   [1] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
##  [13] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
##  [25] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
##  [37] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
##  [49] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
##  [61] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
##  [73] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
##  [85] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
##  [97] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [109] FALSE FALSE FALSE FALSE FALSE

Tidak ada multivariate outlier, ditandai dengan semua data bernilai FALSE.

Uji Asumsi Linearitas

Linearitas Variabel Bebas Model Pembelajaran

library(gridExtra)

p1 <- dfMANOVA  %>% group_by(MP) %>% filter(MP == "Direct Flipped-learning") %>% ggplot(aes(x = HB, y = BK)) + geom_point() + ggtitle("MP: DFL")
p2 <- dfMANOVA  %>% group_by(MP) %>% filter(MP == "Inquiry Flipped-learning") %>% ggplot(aes(x = HB, y = BK)) + geom_point() + ggtitle("MP: IFL") 
grid.arrange(p1, p2, ncol=2)

library(ggpubr)

p1 <- ggscatter(dfMANOVA, x = "HB", y = "BK", add = "reg.line", color = "MP")+
  stat_regline_equation(
    aes(label =  paste(..eq.label.., ..rr.label.., sep = "~~~~"), color = MP)
  )
p2 <- ggscatter(dfMANOVA, x = "HB", y = "BK", add = "reg.line")+
  stat_regline_equation(
    aes(label =  paste(..eq.label.., ..rr.label.., sep = "~~~~"))
  )
grid.arrange(p1, p2, ncol=2)

Variabel Bebas Model Pembelajaran terhadap HB dan BK terlihat linear.

Linearitas Variabel Bebas Motivasi Belajar

p1 <- dfMANOVA  %>% group_by(MB) %>% filter(MB == "Tinggi") %>% ggplot(aes(x = HB, y = BK)) + geom_point() + ggtitle("MB: Tinggi")
p2 <- dfMANOVA  %>% group_by(MB) %>% filter(MB == "Rendah") %>% ggplot(aes(x = HB, y = BK)) + geom_point() + ggtitle("MB: Rendah") 
grid.arrange(p1, p2, ncol=2)

p1 <- ggscatter(dfMANOVA, x = "HB", y = "BK", add = "reg.line", color = "MB")+ 
  stat_regline_equation(
    aes(label =  paste(..eq.label.., ..rr.label.., sep = "~~~~"), color = MB)
  )
p2 <- ggscatter(dfMANOVA, x = "HB", y = "BK", add = "reg.line")+
  stat_regline_equation(
    aes(label =  paste(..eq.label.., ..rr.label.., sep = "~~~~"))
  )
grid.arrange(p1, p2, ncol=2)

Variabel Bebas Motivasi Belajar terhadap HB dan BK terlihat linear.

Uji Asumsi Multikolinearitas

cor.test(x = dfMANOVA$HB, y = dfMANOVA$BK, method = "pearson")$estimate
##      cor 
## 0.737064

Tidak terdapat multikolinearitas (cor < 0.9)

Uji Manova

Dengan Interaksi

result <- manova(cbind(HB,BK) ~ MP * MB, data=dfMANOVA)
summary(result, test="Wilks")
##            Df   Wilks approx F num Df den Df    Pr(>F)    
## MP          1 0.43759   69.403      2    108 < 2.2e-16 ***
## MB          1 0.58099   38.945      2    108 1.841e-13 ***
## MP:MB       1 0.79558   13.875      2    108 4.335e-06 ***
## Residuals 109                                             
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

Tanpa Interaksi

result <- manova(cbind(HB,BK) ~ MP + MB, data=dfMANOVA)
summary(result, test="Wilks")
##            Df   Wilks approx F num Df den Df    Pr(>F)    
## MP          1 0.49238   56.187      2    109 < 2.2e-16 ***
## MB          1 0.63056   31.930      2    109 1.217e-11 ***
## Residuals 110                                             
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

Hasil Uji Hipotesis:

  1. MP berpengaruh signifikan terhadap HB dan BK

  2. MB berpengaruh signifikan terhadap HB dan BK

  3. Interaksi antara MP dan MB berpengaruh signifikan terhadap HB dan BK

EMM

Model Pembelajaran x Hasil Belajar

adj_means <- emmeans_test(data = dfMANOVA, formula = HB ~ MP)
get_emmeans(adj_means)
## # A tibble: 2 × 7
##   MP                       emmean    se    df conf.low conf.high method      
##   <fct>                     <dbl> <dbl> <dbl>    <dbl>     <dbl> <chr>       
## 1 Direct Flipped-learning    80.0 0.558   111     78.9      81.1 Emmeans test
## 2 Inquiry Flipped-learning   85.9 0.524   111     84.9      87.0 Emmeans test
#emmeans_test(data = dfMANOVA, formula = HB ~ MP, p.adjust.method = "bonferroni", ref.group = "Direct Flipped-learning")

Model Pembelajaran x Berpikir Kritis

adj_means <- emmeans_test(data = dfMANOVA, formula = BK ~ MP)
get_emmeans(adj_means)
## # A tibble: 2 × 7
##   MP                       emmean    se    df conf.low conf.high method      
##   <fct>                     <dbl> <dbl> <dbl>    <dbl>     <dbl> <chr>       
## 1 Direct Flipped-learning    83.1 0.542   111     82.0      84.2 Emmeans test
## 2 Inquiry Flipped-learning   88.7 0.510   111     87.7      89.7 Emmeans test
#emmeans_test(data = dfMANOVA, formula = HB ~ MP, p.adjust.method = "bonferroni", ref.group = "Direct Flipped-learning")

Motivasi Belajar x Hasil Belajar

adj_means <- emmeans_test(data = dfMANOVA, formula = HB ~ MB)
get_emmeans(adj_means)
## # A tibble: 2 × 7
##   MB     emmean    se    df conf.low conf.high method      
##   <fct>   <dbl> <dbl> <dbl>    <dbl>     <dbl> <chr>       
## 1 Rendah   80.6 0.610   111     79.3      81.8 Emmeans test
## 2 Tinggi   85.4 0.564   111     84.3      86.5 Emmeans test
#emmeans_test(data = dfMANOVA, formula = HB ~ MP, p.adjust.method = "bonferroni", ref.group = "Direct Flipped-learning")

Motivasi Belajar x Berpikir Kritis

adj_means <- emmeans_test(data = dfMANOVA, formula = BK ~ MB)
get_emmeans(adj_means)
## # A tibble: 2 × 7
##   MB     emmean    se    df conf.low conf.high method      
##   <fct>   <dbl> <dbl> <dbl>    <dbl>     <dbl> <chr>       
## 1 Rendah   83.9 0.615   111     82.7      85.2 Emmeans test
## 2 Tinggi   87.9 0.568   111     86.8      89.0 Emmeans test
#emmeans_test(data = dfMANOVA, formula = HB ~ MP, p.adjust.method = "bonferroni", ref.group = "Direct Flipped-learning")

Uji Post-Hoc Variabel Bebas Model Pembelajaran

*Post-hoc dilakukan apabila terdapat 3 atau lebih kelompok di dalam variabel bebas

library(MASS)

dv <- cbind(dfMANOVA$HB, dfMANOVA$BK)
post_hoc <- lda(dfMANOVA$MP ~ dv, CV=F)
post_hoc
## Call:
## lda(dfMANOVA$MP ~ dv, CV = F)
## 
## Prior probabilities of groups:
##  Direct Flipped-learning Inquiry Flipped-learning 
##                0.4690265                0.5309735 
## 
## Group means:
##                               dv1      dv2
## Direct Flipped-learning  80.03774 83.09434
## Inquiry Flipped-learning 85.91667 88.73333
## 
## Coefficients of linear discriminants:
##           LD1
## dv1 0.1413884
## dv2 0.1377925
#buat plot ketika kelompok di dalam variabel bebas lebih dari 2
#plot_lda <- data.frame(dfMANOVA[, "MP"], lda = predict(post_hoc)$x)
#ggplot(plot_lda) + geom_point(aes(x = lda.LD1, y = lda.LD2, colour = MP), size = 4)

Uji Post-Hoc Variabel Bebas Motivasi Belajar

*Post-hoc dilakukan apabila terdapat 3 atau lebih kelompok di dalam variabel bebas

dv <- cbind(dfMANOVA$HB, dfMANOVA$BK)
post_hoc <- lda(dfMANOVA$MB ~ dv, CV=F)
post_hoc
## Call:
## lda(dfMANOVA$MB ~ dv, CV = F)
## 
## Prior probabilities of groups:
##   Rendah   Tinggi 
## 0.460177 0.539823 
## 
## Group means:
##             dv1      dv2
## Rendah 80.55769 83.94231
## Tinggi 85.37705 87.91803
## 
## Coefficients of linear discriminants:
##            LD1
## dv1 0.18302780
## dv2 0.05872075