1. Build PCA-PI-PLS Function

###Import Data
semad_import_from_excel = function(file_path="data.xlsx") {
  library(readxl)
  Variabel=read_excel(file_path, sheet = "Variabel")
  Variabel=data.frame(Variabel=Variabel$Variabel)
  Indikator=read_excel(file_path, sheet = "Indikator")
  Indikator=data.frame(Indikator=Indikator$Indikator)
  Data=read_excel(file_path, sheet = "Data")
  MIM=read_excel(file_path, sheet = "MIM")
  MOM=read_excel(file_path, sheet = "MOM")
  return(list(Variabel = Variabel, Indikator = Indikator, Data = Data, MIM = MIM, MOM = MOM))
}
###OLS
ols = function(X, y) {
  beta = solve(t(X) %*% X) %*% t(X) %*% y
  return(beta)
}
###Power Iteration for find Eigen Vector and Eigen Value
eigen_poweriteration <- function(A, maxiter = 1000, tol = 1e-10) {
  n <- nrow(A)
  cat("Covariance:\n ")
  print(A)
  # Inisialisasi vektor awal secara acak
  b <- runif(n)
  b <- b / sqrt(sum(b^2))

  for (i in 1:maxiter) {
    # Hitung produk matriks-vektor
    cat("iterasi ke-:",i,"\n")
    cat("eigen vector:\n ")
    print(b)
    b_new <- A %*% b
    # Normalisasi vektor hasil
    b_new <- b_new / sqrt(sum(b_new^2))

    cat("eigen vector baru:\n ")
    print(b_new)

    # Periksa konvergensi
    if (sqrt(sum((b_new - b)^2)) < tol) {
      break
    }

    b <- b_new
  }

  # Hitung eigenvalue sebagai Rayleigh quotient
  eigenvalue <- t(b) %*% A %*% b / (t(b) %*% b)

  return(list(eigenvalues = eigenvalue, eigenvectors = b))
}
###PCA
pca_semad <- function(data) {
  # Langkah 1: Normalisasi Data
  data_normalized <- scale(data)

  # Langkah 2: Menghitung Matriks Kovariansi
  covariance_matrix <- cov(data_normalized)

  # Langkah 3: Menghitung Eigenvalues dan Eigenvectors
  cat("covariance: ")
  print(covariance_matrix)
  eigen_values_vectors <- eigen_poweriteration(covariance_matrix)
  eigen_values <- eigen_values_vectors$eigenvalues
  eigen_vectors <- eigen_values_vectors$eigenvectors

  # Langkah 4: Memilih Principal Components
  # Pada pendekatan Power Iteration yang terbentuk hanya Principal Component 1 yang paling dominan (sehingga tidak bisa memilih)

  # Langkah 5: Transformasi Data ke Ruang Principal Components
  cat("cek transformasi:\n")
  print(head(data_normalized))
  print(eigen_vectors)
  transformed_data <- data_normalized %*% eigen_vectors
  print(head(transformed_data))

  return(list(scores = transformed_data,
              rotation = eigen_vectors,
              sdev = sqrt(eigen_values)))
}
###PLS Algorithm Modification using PCA-PI (PCA-PI-PLS Algorithm)
plsalgorithm <- function(data, matrix_outer_model, matrix_inner_model, indicator, laten_variables){
  #Set Output yang masih kosongan
  outputplsalgoritm=list()
  indexoutput=0

  #Menghitung banyaknya indikator
  n_indicator=nrow(matrix_outer_model)

  #Menghitung banyaknya variabel laten
  n_laten_variables=ncol(matrix_outer_model)

  #Menghitung banyaknya observasi
  n=nrow(data)

  #Standarisasi data dalam bentuk matriks
  standardize_data=scale(data)

  #Pemetaan_indikator_ke_variabel_laten
  indicator_to_laten_variable_map=list()
  for (i in 1:n_laten_variables) {
    indicator_to_laten_variable_connection=c()
    for (j in 1:n_indicator) {
      if (matrix_outer_model[j,i]==1 | matrix_outer_model[j,i]==2 | matrix_outer_model[j,i]==3) {
        indicator_to_laten_variable_connection=append(indicator_to_laten_variable_connection,j)
      }
    }
    indicator_to_laten_variable_map=append(indicator_to_laten_variable_map,list(indicator_to_laten_variable_connection))
  }

  #Pemetaan_tipe_outer_model
  type_outer_model_map=list()
  for (i in 1:n_laten_variables) {
    type_outer_model=c()
    for (j in 1:n_indicator) {
      if (matrix_outer_model[j,i]==1 | matrix_outer_model[j,i]==2 | matrix_outer_model[j,i]==3) {
        type_outer_model=append(type_outer_model,matrix_outer_model[j,i])
      }
    }
    type_outer_model_map=append(type_outer_model_map,list(type_outer_model))
  }

  #Standarisasi data dalam bentuk list
  list_indicator=list()
  for (i in 1:n_laten_variables) {
    part_standardize_data=standardize_data[,indicator_to_laten_variable_map[[i]]]
    list_indicator=append(list_indicator,list(part_standardize_data))
  }

  #Inisialisasi Weight
  Weight=list()
  for (i in 1:n_laten_variables) {
    # 1=single construct, 2=reflective, 3=formative
    if (type_outer_model_map[[i]][1]==1) {
      vector_one=rep(1,length(indicator_to_laten_variable_map[[i]]))
      matrix_one=matrix(vector_one,nrow = length(indicator_to_laten_variable_map[[i]]),ncol = 1)
      Weight=append(Weight,list(matrix_one))
    } else if (type_outer_model_map[[i]][1]==3 || type_outer_model_map[[i]][1]==2) {
      pca_result=pca_semad(list_indicator[[i]])
      vector_one=pca_result$rotation[,1]
      matrix_one=matrix(vector_one,nrow = length(indicator_to_laten_variable_map[[i]]),ncol = 1)
      Weight=append(Weight,list(matrix_one))
    }
  }

  #Menghitung nilai f
  f=list()
  for (i in 1:n_laten_variables) {
    f_i=sqrt(1/(t(Weight[[i]])%*%var(list_indicator[[i]])%*%Weight[[i]]))
    f=append(f,list(f_i))
  }

  #Menghitung Outer Weight
  outer_weight=list()
  for (i in 1:n_laten_variables) {
    outer_weight_i=Weight[[i]]%*%f[[i]]
    outer_weight=append(outer_weight,list(outer_weight_i))
  }

  #Outsite Approximation Awal
  Y=list()
  for (i in 1:n_laten_variables) {
    Y_i=list_indicator[[i]]%*%outer_weight[[i]]
    Y=append(Y,list(Y_i))
  }

  #Cetak Outer Weight Final dan Skor Laten Final
  indexoutput=indexoutput+1
  outputplsalgoritm[[indexoutput]]=outer_weight
  new_text=paste("Outer Weight Final")
  names(outputplsalgoritm)[indexoutput]=new_text
  cat("Outer Weight","\n")
  for(i in seq_along(outer_weight)) {
    cat("Variabel Laten ",laten_variables[i,1],"\n")
    print(outer_weight[[i]]) # Menampilkan elemen ke-i
    cat("\n") # Baris kosong untuk pemisah antar elemen
  }
  indexoutput=indexoutput+1
  outputplsalgoritm[[indexoutput]]=Y
  new_text=paste("Y Final")
  names(outputplsalgoritm)[indexoutput]=new_text
  cat("Skor Variabel Laten","\n")
  for(i in seq_along(Y)) {
    cat("Variabel Laten ",laten_variables[i,1],"\n")
    print(head(Y[[i]])) # Menampilkan elemen ke-i
    cat("\n") # Baris kosong untuk pemisah antar elemen
  }

  #tahap 2
  ##Membuat daftar kolom eksogen
  list_exogenous_columns=list()
  for (i in 1:n_laten_variables) {
    exogenous_columns=c()
    for (j in 1:n_laten_variables) {
      if (matrix_inner_model[j,i]==1) {
        exogenous_columns=append(exogenous_columns,j)
      }
    }
    list_exogenous_columns=append(list_exogenous_columns,list(exogenous_columns))
  }

  ##Eksogen
  exogenous=list()
  for (i in 1:n_laten_variables) {
    k=length(list_exogenous_columns[[i]])
    if (k==0) {
      exogenous_i = NULL
    } else if (k==1) {
      fill=list_exogenous_columns[[i]][1]
      exogenous_i = Y[[fill]]
    } else if (k>1) {
      for (j in 1:k) {
        fill=list_exogenous_columns[[i]][j]
        # print(fill)
        if (j==1) {
          exogenous_i = Y[[fill]]
        } else {
          exogenous_i = cbind(exogenous_i,Y[[fill]])
        }
      }
    }
    exogenous=append(exogenous,list(exogenous_i))
  }

  ##Koefisien Jalur
  path_coefficient=list()
  for (i in 1:n_laten_variables) {
    if (is.null(list_exogenous_columns[[i]])) {
      path_coefficient_i=NULL
    } else {
      path_coefficient_i=ols(exogenous[[i]],Y[[i]])
    }
    path_coefficient=append(path_coefficient,list(path_coefficient_i))
  }
  indexoutput=indexoutput+1
  outputplsalgoritm[[indexoutput]]=path_coefficient
  names(outputplsalgoritm)[indexoutput]="Path Coefficient"
  cat("Path Coefficient:\n ")
  for(i in seq_along(path_coefficient)) {
    cat("Variabel Laten ",laten_variables[i,1],"\n")
    print(path_coefficient[[i]]) # Menampilkan elemen ke-i
    cat("\n") # Baris kosong untuk pemisah antar elemen
  }

  ##Outer Loading
  outer_loading=list()
  for (i in 1:n_laten_variables) {
    if (type_outer_model_map[[i]][1]==2 | type_outer_model_map[[i]][1]==3) {
      outer_loading_i=matrix(0,nrow = length(type_outer_model_map[[i]]), ncol = 1)
      for (j in 1:length(type_outer_model_map[[i]])) {
        outer_loading_i[j,1]=ols(Y[[i]],list_indicator[[i]][,j])
      }
      outer_loading=append(outer_loading,list(outer_loading_i))

    } else if (type_outer_model_map[[i]][1]==1) {
      outer_loading_i=1
      outer_loading=append(outer_loading,list(outer_loading_i))
    }
  }
  indexoutput=indexoutput+1
  outputplsalgoritm[[indexoutput]]=outer_loading
  names(outputplsalgoritm)[indexoutput]="Outer Loading"
  cat("Outer Loading:\n ")
  for(i in seq_along(outer_loading)) {
    cat("Variabel Laten ",laten_variables[i,1],"\n")
    print(outer_loading[[i]]) # Menampilkan elemen ke-i
    cat("\n") # Baris kosong untuk pemisah antar elemen
  }
  vector_outer_loading=unlist(outer_loading)

  ##R Square
  R_Square=list()
  for (i in 1:n_laten_variables) {
    if (!is.null(exogenous[[i]]) && !is.null(path_coefficient[[i]])) {
      y_actual = Y[[i]]
      y_predicted = exogenous[[i]] %*% path_coefficient[[i]]
      ss_total = sum((y_actual - mean(y_actual))^2)
      ss_residual = sum((y_actual - y_predicted)^2)
      r_square = 1 - (ss_residual / ss_total)
      R_Square[[i]] = r_square
    } else {
      R_Square[[i]] = NA
    }
  }
  indexoutput=indexoutput+1
  outputplsalgoritm[[indexoutput]]=R_Square
  names(outputplsalgoritm)[indexoutput]="R Square"
  cat("R Square:\n ")
  for(i in seq_along(R_Square)) {
    cat("Variabel Laten ",laten_variables[i,1],"\n")
    print(R_Square[[i]]) # Menampilkan elemen ke-i
    cat("\n") # Baris kosong untuk pemisah antar elemen
  }

  #tahap 3
  ##Menghitung m
  m=list()
  for (i in 1:n_laten_variables) {
    pertama=TRUE
    for (j in 1:length(indicator_to_laten_variable_map[[i]])) {
      if (pertama==TRUE) {
        if (type_outer_model_map[[i]][j]==2 | type_outer_model_map[[i]][j]==3) {
          m_i=matrix(sum(list_indicator[[i]][,j]))
          pertama=FALSE
        } else {
          m_i=matrix(sum(list_indicator[[i]]))
          pertama=FALSE
        }
      } else if (pertama==FALSE) {
        if (type_outer_model_map[[i]][j]==2 | type_outer_model_map[[i]][j]==3) {
          m_i=rbind(m_i,matrix(sum(list_indicator[[i]][,j])))
        } else {
          m_i=rbind(m_i,matrix(sum(list_indicator[[i]])))
        }
      }
    }
    m=append(m,list(m_i))
  }

  ##Menghitung nx
  nx=list()
  for (i in 1:n_laten_variables) {
    nx_i=t(outer_weight[[i]])%*%m[[i]]
    nx=append(nx,list(nx_i))
  }

  ##Menghitung p0
  p0=list()
  for (i in 1:n_laten_variables) {
    p0_i=m[[i]]-outer_loading[[i]]%*%nx[[i]]
    p0=append(p0,list(p0_i))
  }

  b0=list()
  for (i in 1:n_laten_variables) {
    if (is.null(list_exogenous_columns[[i]])) {
      b0_i=NULL
    } else {
      n_exogenous=length(list_exogenous_columns[[i]])
      nxexo=c()
      for (j in 1:n_exogenous) {
        nxexo=append(nxexo,nx[[j]])
      }
      b0_i=nx[[i]]-t(nxexo)%*%path_coefficient[[i]]
    }
    b0=append(b0,list(b0_i))
  }

  #Penamaan
  names(outputplsalgoritm$"Outer Weight Final")=laten_variables[,1]
  names(outputplsalgoritm$"Y Final")=laten_variables[,1]
  names(outputplsalgoritm$"Path Coefficient")=laten_variables[,1]
  names(outputplsalgoritm$"Outer Loading")=laten_variables[,1]
  names(outputplsalgoritm$"R Square")=laten_variables[,1]

  #output
  return(outputplsalgoritm)

}

2. Generate Data Simulation 1

laten_variables <- data.frame(Variabel = c("X_Eksogen", "Y_Endogen"))
indicator <- data.frame(Indikator = c("x1", "x2", "x3", "y1", "y2", "y3"))
matrix_inner_model <- matrix(0, nrow = 2, ncol = 2)
matrix_inner_model[1, 2] <- 1
matrix_outer_model <- matrix(0, nrow = 6, ncol = 2)
matrix_outer_model[1:3, 1] <- 2
matrix_outer_model[4:6, 2] <- 2
###Generate data function
generate_group_data <- function(n, path_coeff) {
  Lx <- rnorm(n, mean = 0, sd = 1.5)
  Ly <- (path_coeff * Lx) + rnorm(n, mean = 0, sd = 0.1)
  x1 <- 0.90 * Lx + rnorm(n, 0, 0.1); x2 <- 0.90 * Lx + rnorm(n, 0, 0.1); x3 <- 0.90 * Lx + rnorm(n, 0, 0.1)
  y1 <- 0.90 * Ly + rnorm(n, 0, 0.1); y2 <- 0.90 * Ly + rnorm(n, 0, 0.1); y3 <- 0.90 * Ly + rnorm(n, 0, 0.1)
  return(data.frame(x1, x2, x3, y1, y2, y3))
}
###Generate data
set.seed(771)
data_g1 <- generate_group_data(n = 100, path_coeff = 1)
data_g1$True_Label <- 1
data_g2 <- generate_group_data(n = 100, path_coeff = -1)
data_g2$True_Label <- 2
data_simulasi_full <- rbind(data_g1, data_g2)
print(data_simulasi_full)
##               x1          x2           x3           y1          y2           y3
## 1    1.444724011  1.46917273  1.470834715  1.371269242  1.42979955  1.369551241
## 2    0.970484802  0.89226584  0.806292280  0.837141758  0.95938807  0.848705960
## 3    1.483350309  1.44907105  1.315204320  1.208448196  1.38819655  1.333301667
## 4   -0.899766011 -0.72605046 -0.783792872 -0.760552831 -0.57555780 -0.699692703
## 5   -2.061009340 -1.88065448 -2.154854164 -2.319287913 -2.13922717 -2.302704225
## 6   -1.257306440 -1.30508333 -1.327900691 -1.241560718 -1.29127596 -1.271167658
## 7   -0.047782924  0.10063766 -0.045406828  0.158141107  0.15031599  0.118872018
## 8    0.407568309  0.29658974  0.540704317  0.429887802  0.62986276  0.383915935
## 9    0.062280275  0.16336602  0.383161863  0.132201441  0.12164507 -0.023647050
## 10   0.192602344  0.36536375  0.466710977  0.203184624  0.29173865  0.433759003
## 11  -2.174890629 -2.14757548 -2.188150832 -2.420588465 -2.52098127 -2.380343760
## 12   0.768603862  0.89181442  0.860363132  0.849531555  0.90375147  0.981111798
## 13  -0.970934646 -0.98244803 -0.946530336 -1.029073678 -0.83758513 -1.197288383
## 14  -1.909850354 -1.69664524 -1.703574828 -1.759407442 -1.68462864 -1.913311814
## 15   0.986789032  1.00239368  0.964556125  0.895664565  0.90721897  0.976752170
## 16  -0.189574938 -0.38408419 -0.366069934 -0.255566907 -0.14798968 -0.052021792
## 17  -1.611718181 -1.52873861 -1.474883448 -1.566437556 -1.69243763 -1.659129576
## 18  -0.765347845 -1.07545995 -0.937698642 -0.787404088 -0.74729167 -0.772288455
## 19   0.616372605  0.74509290  0.651307330  0.834392772  0.66063765  0.574958927
## 20  -0.714751922 -0.91854944 -0.745759612 -0.882297039 -0.71772423 -0.671850245
## 21  -1.656092817 -1.77704432 -1.779351572 -1.620137117 -1.81792290 -1.681649575
## 22  -0.480570853 -0.47127330 -0.545187519 -0.312860112 -0.14884850 -0.259532796
## 23  -0.870811908 -0.81841272 -0.860404463 -0.502813922 -0.42359636 -0.689985298
## 24  -1.263875896 -1.37379824 -1.431374854 -1.246200536 -1.46481611 -1.282062051
## 25  -2.284784501 -2.28300356 -2.335862745 -2.400223171 -2.50025555 -2.496647457
## 26   0.385027617  0.20032793  0.331389582  0.381254969  0.59735768  0.555216230
## 27   1.302711570  1.51587496  1.381979003  1.326546610  1.48908088  1.339735940
## 28   1.035981843  1.29490027  1.340402247  1.440433507  1.35271749  1.348544423
## 29   0.987981664  1.20422393  1.103220785  0.913264064  0.74449402  1.002389557
## 30  -0.494814625 -0.42403493 -0.343459023 -0.600060344 -0.46106431 -0.399907048
## 31   0.976049619  1.19822060  1.215037110  1.151681085  1.30208496  1.160673532
## 32  -1.415698687 -1.68953933 -1.658569926 -1.415230437 -1.34482061 -1.463442967
## 33  -2.289432021 -2.40184246 -2.309515523 -2.563736586 -2.58585673 -2.612981900
## 34  -0.388307827 -0.41805258 -0.332525266 -0.567078036 -0.30849761 -0.361689518
## 35  -0.543100532 -0.55706529 -0.404341590 -0.562427494 -0.65863210 -0.762472337
## 36   0.702161836  0.68988518  0.751774496  0.845589496  0.68364759  0.646762779
## 37   0.709577128  0.52240524  0.620976371  0.736229502  0.76522432  0.598206182
## 38  -0.693318958 -0.48919942 -0.662562871 -0.545433799 -0.69051519 -0.444300947
## 39  -0.536322149 -0.74800122 -0.759758266 -0.792562501 -0.70685229 -0.604841894
## 40  -0.300190323 -0.20980022 -0.388787169 -0.285685990 -0.17763717 -0.148890051
## 41   2.083693555  1.98405781  1.869399816  2.066391303  2.03961820  1.921825358
## 42  -1.179788315 -1.20101036 -1.245288275 -1.545031854 -1.41778032 -1.306272322
## 43   0.612029410  0.47718184  0.622357934  0.389821812  0.43270170  0.418466997
## 44  -0.201218016 -0.09924536 -0.171514988 -0.175780640 -0.28209158 -0.160601165
## 45  -0.256855369 -0.22072369 -0.358372049 -0.368606321 -0.36071858 -0.218947268
## 46  -2.564432175 -2.44693831 -2.507205097 -2.376954797 -2.28122994 -2.319072460
## 47  -0.837340213 -0.84998391 -0.690944159 -0.804258610 -0.96361170 -0.933147332
## 48   2.607720271  2.45342097  2.493943194  2.707897127  2.58815516  2.507831991
## 49   2.151956030  2.06374632  2.140052929  1.982205905  1.87634530  2.064986522
## 50   0.118834627  0.07831786  0.124450728  0.222710808  0.18996648  0.285994287
## 51   0.182167602  0.12344793  0.065026416 -0.038252772 -0.08504968 -0.014091299
## 52  -0.106849129  0.10051251 -0.133735872  0.074496352 -0.21335306 -0.042010535
## 53  -0.819925498 -0.81247101 -1.006522477 -1.050945861 -0.96484451 -0.898139674
## 54   0.487367986  0.59431016  0.301064757  0.213444850  0.50500830  0.371561611
## 55   1.248723248  0.95242164  1.180829788  0.849099273  1.16568584  0.938029736
## 56   2.465585616  2.12712113  2.500589713  2.169159224  2.15977789  2.211770108
## 57   0.823697699  0.85102702  0.838924024  1.063322954  0.84237589  0.724405142
## 58  -3.536560046 -3.45466550 -3.480156839 -3.299684554 -3.66254383 -3.604383245
## 59  -0.174694307 -0.32887879 -0.185979844 -0.270779565 -0.26840726 -0.396963917
## 60   0.906739710  0.82090661  0.554207190  0.888542568  0.77622023  0.871551112
## 61   2.312294921  2.23362241  2.190388462  2.071950083  2.25000408  1.906664615
## 62   0.402289965  0.58621382  0.449013638  0.314930353  0.21321420  0.090469228
## 63   2.030142946  1.97559351  2.089647920  2.080512754  2.02563675  2.135308267
## 64  -0.005108298 -0.00961999  0.080265709  0.163291815  0.05526585 -0.079181033
## 65  -1.515267813 -1.48688153 -1.425108674 -1.716405201 -1.80805024 -1.952973849
## 66  -0.067153081 -0.17435213 -0.342053537 -0.245596741 -0.49803524 -0.499787035
## 67   2.198725306  2.34991233  2.061285927  2.167693233  2.18333484  2.161843647
## 68  -0.731368108 -0.72309819 -0.504664466 -0.678883182 -0.50321836 -0.724518014
## 69   0.208885028  0.34791394  0.326715646  0.192058475  0.26036464  0.080829586
## 70   0.081523515 -0.09213394  0.075243873  0.091604964 -0.19175706  0.020293249
## 71  -2.716123876 -2.33975360 -2.311673707 -2.545525255 -2.50248184 -2.500687825
## 72   2.271834322  2.24347551  2.296629042  2.281571365  2.20925643  2.400347404
## 73   1.897259576  1.98112602  1.657070027  1.970710885  1.91560768  2.085326539
## 74  -1.213955246 -1.27562906 -1.242204862 -1.200377261 -1.22215479 -1.245779784
## 75   0.883792004  0.73052474  0.882754577  1.016103928  0.94454360  1.138580964
## 76  -2.977973701 -2.82546887 -2.803028800 -2.774955106 -2.84937281 -2.792376032
## 77   2.512919242  2.67399390  2.692705461  2.624685994  2.71370290  2.577710854
## 78   0.771094275  0.85816710  0.569026728  0.743222451  0.82790957  0.830717646
## 79   0.388205111  0.60729722  0.473879967  0.583617192  0.44640801  0.641909388
## 80   2.805571608  2.63399149  2.973998972  2.760200482  2.73360529  2.705319268
## 81   1.311480919  1.00146404  1.236498795  1.422938947  1.12028584  1.436686685
## 82   1.695017354  1.86934353  1.784280720  1.873841604  1.59725470  1.603402964
## 83   0.290132012  0.31295644  0.262355255  0.040646664  0.15989593  0.232864757
## 84   0.770294191  0.79906229  1.005787453  0.712691244  0.63181659  0.824986881
## 85  -0.382512629 -0.38525866 -0.281985245 -0.280208209 -0.35547163 -0.340847987
## 86  -1.030556989 -1.13329947 -1.182321223 -1.246767106 -1.17814995 -1.418730044
## 87   2.814154084  2.81271901  2.623400916  2.625560457  2.46604441  2.667919050
## 88   2.387203286  2.34915047  2.217441785  2.630292811  2.38260058  2.307114039
## 89  -2.070698256 -2.11120972 -1.966219581 -2.145298826 -2.15867986 -2.135114343
## 90   0.131751522  0.16760180  0.136283742 -0.071544550  0.10815670  0.208041540
## 91  -0.160143100 -0.11769673 -0.418791962 -0.104217084 -0.31581082 -0.347651649
## 92   0.399532112  0.43868965  0.564930545  0.538897435  0.63716840  0.434409005
## 93  -0.745727441 -0.88662789 -0.853063969 -0.812713284 -0.99367756 -0.861926497
## 94   0.030015166  0.15025031  0.223389555  0.130715802  0.26336066  0.023458562
## 95   0.368825441  0.23721342  0.173470222  0.269441824  0.23388616  0.354966734
## 96   1.077236458  1.20379816  1.172078523  0.786709102  0.90482056  0.935197246
## 97   0.362933471  0.39673878  0.217193954  0.488251768  0.47559615  0.437656590
## 98  -0.586811727 -0.58610570 -0.777934043 -0.800671869 -0.66578766 -0.862923386
## 99  -1.021655776 -0.81497373 -0.868011515 -0.731441940 -0.99040015 -0.895381031
## 100  1.713255949  1.88464856  1.991367021  1.740865108  1.75899592  1.892105845
## 101 -1.988762006 -1.88129361 -1.768625515  1.782339193  1.69490002  1.757778705
## 102  1.178127617  0.98469084  1.220469802 -0.836911388 -0.96650919 -1.036566636
## 103 -0.912228555 -1.11375400 -1.087310504  0.937499625  0.78297771  0.914631304
## 104  0.506038772  0.83783206  0.742921229 -0.908138087 -1.01125956 -0.849075713
## 105 -2.240983969 -2.30989696 -2.375909608  2.562361154  2.38752362  2.463824417
## 106 -1.078773768 -1.17412725 -1.045298691  1.248106626  1.16033302  1.142501239
## 107  0.165605614  0.22725493  0.328177274 -0.010702322 -0.08675085 -0.110353222
## 108  0.228074402  0.10358939  0.076613631 -0.455386916 -0.41057823 -0.478767089
## 109 -0.842324428 -0.77403119 -0.957257419  0.801073874  0.91247272  0.844748999
## 110  0.044357893 -0.13103272 -0.128930691  0.234953273  0.21605464  0.228579091
## 111  0.131570445  0.03368790  0.072594312 -0.086709419  0.09772059 -0.044791522
## 112 -1.861514183 -1.78416034 -1.891529073  1.907983145  2.12865142  2.249295818
## 113 -1.311334681 -1.18182468 -1.245007298  1.335351483  1.23781973  1.394483794
## 114  0.082012277  0.03549843 -0.023424960 -0.115364635 -0.32033042 -0.115066867
## 115  1.589691913  1.55037232  1.493193069 -1.824112322 -1.81161713 -1.703873700
## 116 -1.169085451 -1.12638990 -1.062833839  1.220634970  1.32982508  1.316440467
## 117 -2.097668859 -1.86400387 -1.901211602  1.975549432  2.07556051  1.886579889
## 118  0.004314268  0.03661516  0.011315662  0.013590166 -0.18224598 -0.002666644
## 119  1.953814464  1.73319984  1.878235581 -1.645952331 -1.71581517 -1.428406676
## 120  0.036714277  0.12582164 -0.009972532  0.009510717  0.13356179  0.252004840
## 121 -1.119113045 -1.11183082 -1.105701457  1.129638554  1.14529420  1.149847007
## 122 -2.083861572 -2.13127972 -2.062273884  2.202362687  2.27071809  2.259025373
## 123 -1.536854636 -1.67916208 -1.428000992  1.277197781  1.39008200  1.463304963
## 124 -1.588415386 -1.60068151 -1.405068417  1.429061779  1.70215660  1.524961242
## 125  1.199578210  1.03647020  1.142439165 -1.053412310 -0.79944706 -0.660642181
## 126 -0.621783436 -0.55433546 -0.694586149  0.491606187  0.59135011  0.691915726
## 127  1.196634512  1.27636214  1.210685042 -1.265098881 -1.19150931 -1.345761587
## 128  1.454883938  1.65612993  1.490592703 -1.381095887 -1.48404402 -1.440830937
## 129 -1.064517993 -1.00662518 -0.931931031  1.028850905  1.01852114  0.945415324
## 130 -1.453727375 -1.51691735 -1.364849267  1.457121753  1.45351612  1.313112822
## 131  1.743934647  1.99190139  1.901963119 -2.023069129 -1.87878081 -1.799608390
## 132  0.922656261  0.78026679  0.938527125 -0.978763951 -0.62843221 -0.789191050
## 133  0.363903756  0.34572789  0.381310675 -0.464731065 -0.38138091 -0.520671068
## 134  1.494496611  1.64262219  1.299799855 -1.488574332 -1.53933245 -1.566444011
## 135  2.266162883  2.02144989  2.186360415 -2.137214583 -2.44723455 -2.462412507
## 136 -0.667570392 -0.61817586 -0.638360590  0.652355874  0.67294936  0.699788457
## 137 -2.323186255 -2.55794267 -2.238414467  2.286384361  2.44046498  2.423790627
## 138  2.543032435  2.34216410  2.206589611 -2.492390737 -2.52345381 -2.460571094
## 139  1.397627360  1.34380199  1.360648174 -1.487120728 -1.53779692 -1.456327083
## 140 -1.565010184 -1.47568003 -1.778651787  1.877592488  1.72357319  1.432690535
## 141 -0.224877453 -0.10052165 -0.181376293  0.204591967  0.13417740  0.208710544
## 142  2.278113198  2.32868441  2.217473365 -2.604861241 -2.52145826 -2.471229857
## 143 -0.959011271 -0.99654936 -0.958173263  0.988453206  0.92746297  0.670757355
## 144  0.454427307  0.54118574  0.434004580 -0.385206325 -0.56211016 -0.323772452
## 145  1.859445873  1.78914556  1.997266226 -2.055918051 -1.82666258 -1.788830990
## 146  0.613999007  0.66662702  0.686034216 -0.719861148 -0.55308022 -0.634860174
## 147 -0.573786056 -0.45151617 -0.407478007  0.499998317  0.59136717  0.459500817
## 148 -0.742620623 -0.69403668 -0.735809747  0.638586477  0.73331721  0.651106114
## 149  0.530921534  0.63530572  0.721870050 -0.726523097 -1.04244708 -0.952737716
## 150 -1.578956115 -1.24971303 -1.242616363  1.650183781  1.51107098  1.689670741
## 151  0.204780329  0.33910651  0.202492876 -0.367484238 -0.59904234 -0.499555696
## 152 -0.686863293 -0.63324616 -0.972037656  0.823875476  0.77163875  0.766924521
## 153  2.521103469  2.35988854  2.408828487 -2.660670955 -2.67823016 -2.649397869
## 154  1.846303621  1.93232432  1.924244562 -1.981776071 -1.69789288 -1.992670180
## 155  0.544483671  0.74744001  0.570991608 -0.493793397 -0.53514155 -0.442223277
## 156  0.369783993  0.42188560  0.374037261 -0.503607217 -0.50923183 -0.559646627
## 157 -0.539870284 -0.53457842 -0.331836661  0.505800048  0.38568030  0.657792759
## 158  0.921424826  1.03044499  0.873375469 -1.376250696 -1.34609257 -1.303495758
## 159 -1.863367282 -1.95074014 -1.940664880  1.674771263  1.62464069  1.844236438
## 160 -0.120347918 -0.05804940 -0.019371029 -0.096704762  0.01184689  0.190951800
## 161  0.717784177  0.78354088  0.785879942 -0.803323084 -0.82530706 -0.789953249
## 162 -1.046259382 -0.91611477 -0.939281270  0.964477424  0.96672396  0.904535994
## 163 -1.326047575 -1.13584169 -1.303109296  1.337313547  1.21220354  1.253399438
## 164  0.171446862  0.06897271  0.066363965 -0.130241316 -0.15157090 -0.270416427
## 165  1.008082372  0.82432537  0.935808190 -0.993304995 -0.88244935 -0.934016054
## 166  1.536321091  1.35695768  1.437085352 -1.228347333 -1.35661650 -1.284061720
## 167  0.555924965  0.54541900  0.518765731 -0.505621407 -0.22204491 -0.334777464
## 168 -0.660917104 -0.73175201 -0.487401462  0.429846643  0.69069440  0.565064244
## 169 -0.290226174 -0.20139326 -0.132789591  0.292934347  0.42760324  0.162901080
## 170  0.094068962  0.08051384  0.016918124 -0.360442807 -0.25511356 -0.155268170
## 171  4.712425861  4.42927652  4.455862051 -4.514246022 -4.42542872 -4.398491283
## 172 -0.365361501 -0.28852953 -0.387361473  0.581905134  0.33121564  0.340898145
## 173 -0.179610171 -0.36982381 -0.249368382  0.553322051  0.43872187  0.310806051
## 174 -2.500564638 -2.78181712 -2.551952209  2.356865577  2.49259033  2.376422147
## 175 -0.089838607  0.08493266  0.052788117 -0.155091776 -0.04315520 -0.242952728
## 176 -1.286979632 -1.21944748 -1.280725629  1.155300798  1.46468756  1.217367275
## 177  1.352679232  1.38519901  1.302822075 -1.459957258 -1.34967444 -1.436834780
## 178  0.010408689  0.01366192  0.270040630 -0.088563909 -0.02600116 -0.121313809
## 179 -1.542988349 -1.48246333 -1.455284621  1.493420196  1.46905568  1.335459688
## 180  0.896649195  0.76881177  0.948480011 -0.732168335 -0.84231166 -0.865483867
## 181  3.386102120  3.46533342  3.386780822 -3.691230498 -3.40254215 -3.380582953
## 182 -0.860151939 -0.99645742 -1.089034610  1.072399087  0.98340115  1.051480260
## 183  0.210933264  0.14644313  0.389565614 -0.313551858 -0.33325779 -0.215986373
## 184  1.993480915  2.05133586  2.048503528 -2.196609415 -2.31491178 -2.079435342
## 185  0.183856965  0.37457919  0.107578277 -0.055397873 -0.25865189 -0.027176516
## 186 -1.032837603 -1.09589848 -1.094696626  1.187636069  1.17500212  1.084726265
## 187  0.234443422  0.08933111  0.124284219 -0.058313569 -0.08640887 -0.029126011
## 188 -0.447571247 -0.23138565 -0.360612537  0.487341270  0.51821024  0.548941879
## 189  1.366824524  1.43263629  1.338361106 -1.128858102 -1.27222470 -0.983362545
## 190  0.162734105  0.35248127  0.193768891 -0.032627635  0.01302099 -0.014105478
## 191 -0.473897244 -0.41255647 -0.440529890  0.735003549  0.66747863  0.683027681
## 192  0.079131034  0.13821512  0.220170161 -0.066926522  0.23875159 -0.077184490
## 193  1.177242773  1.00645312  1.036885757 -1.005239211 -0.95456673 -0.840050626
## 194 -2.440885481 -2.47165005 -2.481184413  2.327760280  2.58520372  2.559593925
## 195  1.110783067  1.06828746  1.050060400 -1.180046642 -1.30983749 -1.529072186
## 196  2.481704914  2.29690196  2.302749816 -2.091116224 -1.98887265 -2.195327279
## 197 -0.131296283  0.10045614 -0.025930839  0.079763442 -0.14024559  0.074332402
## 198 -0.807413949 -0.99949148 -0.896827847  0.812644854  0.79103571  0.560901151
## 199 -0.878669122 -0.84791038 -0.793217081  0.784972382  0.90066991  0.722386786
## 200  0.757807959  0.58870180  0.672241880 -0.971360002 -0.62598174 -0.787691299
##     True_Label
## 1            1
## 2            1
## 3            1
## 4            1
## 5            1
## 6            1
## 7            1
## 8            1
## 9            1
## 10           1
## 11           1
## 12           1
## 13           1
## 14           1
## 15           1
## 16           1
## 17           1
## 18           1
## 19           1
## 20           1
## 21           1
## 22           1
## 23           1
## 24           1
## 25           1
## 26           1
## 27           1
## 28           1
## 29           1
## 30           1
## 31           1
## 32           1
## 33           1
## 34           1
## 35           1
## 36           1
## 37           1
## 38           1
## 39           1
## 40           1
## 41           1
## 42           1
## 43           1
## 44           1
## 45           1
## 46           1
## 47           1
## 48           1
## 49           1
## 50           1
## 51           1
## 52           1
## 53           1
## 54           1
## 55           1
## 56           1
## 57           1
## 58           1
## 59           1
## 60           1
## 61           1
## 62           1
## 63           1
## 64           1
## 65           1
## 66           1
## 67           1
## 68           1
## 69           1
## 70           1
## 71           1
## 72           1
## 73           1
## 74           1
## 75           1
## 76           1
## 77           1
## 78           1
## 79           1
## 80           1
## 81           1
## 82           1
## 83           1
## 84           1
## 85           1
## 86           1
## 87           1
## 88           1
## 89           1
## 90           1
## 91           1
## 92           1
## 93           1
## 94           1
## 95           1
## 96           1
## 97           1
## 98           1
## 99           1
## 100          1
## 101          2
## 102          2
## 103          2
## 104          2
## 105          2
## 106          2
## 107          2
## 108          2
## 109          2
## 110          2
## 111          2
## 112          2
## 113          2
## 114          2
## 115          2
## 116          2
## 117          2
## 118          2
## 119          2
## 120          2
## 121          2
## 122          2
## 123          2
## 124          2
## 125          2
## 126          2
## 127          2
## 128          2
## 129          2
## 130          2
## 131          2
## 132          2
## 133          2
## 134          2
## 135          2
## 136          2
## 137          2
## 138          2
## 139          2
## 140          2
## 141          2
## 142          2
## 143          2
## 144          2
## 145          2
## 146          2
## 147          2
## 148          2
## 149          2
## 150          2
## 151          2
## 152          2
## 153          2
## 154          2
## 155          2
## 156          2
## 157          2
## 158          2
## 159          2
## 160          2
## 161          2
## 162          2
## 163          2
## 164          2
## 165          2
## 166          2
## 167          2
## 168          2
## 169          2
## 170          2
## 171          2
## 172          2
## 173          2
## 174          2
## 175          2
## 176          2
## 177          2
## 178          2
## 179          2
## 180          2
## 181          2
## 182          2
## 183          2
## 184          2
## 185          2
## 186          2
## 187          2
## 188          2
## 189          2
## 190          2
## 191          2
## 192          2
## 193          2
## 194          2
## 195          2
## 196          2
## 197          2
## 198          2
## 199          2
## 200          2
data_input <- data_simulasi_full[, 1:6]

3. Running PCA-PI-PLS using All Data (Global)

pls_global=plsalgorithm(data = data_input,
                           matrix_outer_model = matrix_outer_model,
                           matrix_inner_model = matrix_inner_model,
                           indicator = indicator,
                           laten_variables = laten_variables)
## covariance:           x1        x2        x3
## x1 1.0000000 0.9948351 0.9950455
## x2 0.9948351 1.0000000 0.9949455
## x3 0.9950455 0.9949455 1.0000000
## Covariance:
##            x1        x2        x3
## x1 1.0000000 0.9948351 0.9950455
## x2 0.9948351 1.0000000 0.9949455
## x3 0.9950455 0.9949455 1.0000000
## iterasi ke-: 1 
## eigen vector:
##  [1] 0.4376506 0.4103228 0.8000607
## eigen vector baru:
##          [,1]
## x1 0.5771604
## x2 0.5770827
## x3 0.5778075
## iterasi ke-: 2 
## eigen vector:
##          [,1]
## x1 0.5771604
## x2 0.5770827
## x3 0.5778075
## eigen vector baru:
##          [,1]
## x1 0.5773493
## x2 0.5773298
## x3 0.5773717
## iterasi ke-: 3 
## eigen vector:
##          [,1]
## x1 0.5773493
## x2 0.5773298
## x3 0.5773717
## eigen vector baru:
##          [,1]
## x1 0.5773496
## x2 0.5773303
## x3 0.5773710
## iterasi ke-: 4 
## eigen vector:
##          [,1]
## x1 0.5773496
## x2 0.5773303
## x3 0.5773710
## eigen vector baru:
##          [,1]
## x1 0.5773496
## x2 0.5773303
## x3 0.5773710
## iterasi ke-: 5 
## eigen vector:
##          [,1]
## x1 0.5773496
## x2 0.5773303
## x3 0.5773710
## eigen vector baru:
##          [,1]
## x1 0.5773496
## x2 0.5773303
## x3 0.5773710
## cek transformasi:
##              x1         x2         x3
## [1,]  1.0011254  1.0263349  1.0332523
## [2,]  0.6560364  0.6021001  0.5427552
## [3,]  1.0292325  1.0115529  0.9183819
## [4,] -0.7048864 -0.5879463 -0.6308827
## [5,] -1.5498867 -1.4369969 -1.6428596
## [6,] -0.9650573 -1.0137444 -1.0324873
##         [,1]
## x1 0.5773496
## x2 0.5773303
## x3 0.5773710
##           [,1]
## [1,]  1.767103
## [2,]  1.039744
## [3,]  1.708474
## [4,] -1.110658
## [5,] -2.672988
## [6,] -1.738569
## covariance:           y1        y2        y3
## y1 1.0000000 0.9942435 0.9943640
## y2 0.9942435 1.0000000 0.9948101
## y3 0.9943640 0.9948101 1.0000000
## Covariance:
##            y1        y2        y3
## y1 1.0000000 0.9942435 0.9943640
## y2 0.9942435 1.0000000 0.9948101
## y3 0.9943640 0.9948101 1.0000000
## iterasi ke-: 1 
## eigen vector:
##  [1] 0.9377378 0.3323570 0.1009286
## eigen vector baru:
##          [,1]
## y1 0.5784723
## y2 0.5770183
## y3 0.5765584
## iterasi ke-: 2 
## eigen vector:
##          [,1]
## y1 0.5784723
## y2 0.5770183
## y3 0.5765584
## eigen vector baru:
##          [,1]
## y1 0.5772873
## y2 0.5773705
## y3 0.5773930
## iterasi ke-: 3 
## eigen vector:
##          [,1]
## y1 0.5772873
## y2 0.5773705
## y3 0.5773930
## eigen vector baru:
##          [,1]
## y1 0.5772849
## y2 0.5773713
## y3 0.5773946
## iterasi ke-: 4 
## eigen vector:
##          [,1]
## y1 0.5772849
## y2 0.5773713
## y3 0.5773946
## eigen vector baru:
##          [,1]
## y1 0.5772849
## y2 0.5773713
## y3 0.5773946
## iterasi ke-: 5 
## eigen vector:
##          [,1]
## y1 0.5772849
## y2 0.5773713
## y3 0.5773946
## eigen vector baru:
##          [,1]
## y1 0.5772849
## y2 0.5773713
## y3 0.5773946
## cek transformasi:
##              y1         y2         y3
## [1,]  0.9915833  1.0305803  0.9950764
## [2,]  0.6052728  0.6904871  0.6169446
## [3,]  0.8738221  1.0005026  0.9687593
## [4,] -0.5502681 -0.4192321 -0.5071872
## [5,] -1.6776313 -1.5497175 -1.6709678
## [6,] -0.8981595 -0.9366746 -0.9220760
##         [,1]
## y1 0.5772849
## y2 0.5773713
## y3 0.5773946
##            [,1]
## [1,]  1.7420053
## [2,]  1.1043027
## [3,]  1.6414622
## [4,] -0.8525612
## [5,] -2.8280414
## [6,] -1.5917047
## Outer Weight 
## Variabel Laten  X_Eksogen 
##           [,1]
## [1,] 0.3338964
## [2,] 0.3338852
## [3,] 0.3339087
## 
## Variabel Laten  Y_Endogen 
##           [,1]
## [1,] 0.3339114
## [2,] 0.3339614
## [3,] 0.3339748
## 
## Skor Variabel Laten 
## Variabel Laten  X_Eksogen 
##            [,1]
## [1,]  1.0219621
## [2,]  0.6013112
## [3,]  0.9880552
## [4,] -0.6423228
## [5,] -1.5458587
## [6,] -1.0054599
## 
## Variabel Laten  Y_Endogen 
##            [,1]
## [1,]  1.0076054
## [2,]  0.6387475
## [3,]  0.9494496
## [4,] -0.4931359
## [5,] -1.6357872
## [6,] -0.9206690
## 
## Path Coefficient:
##  Variabel Laten  X_Eksogen 
## NULL
## 
## Variabel Laten  Y_Endogen 
##            [,1]
## [1,] 0.01329007
## 
## Outer Loading:
##  Variabel Laten  X_Eksogen 
##           [,1]
## [1,] 0.9983114
## [2,] 0.9982780
## [3,] 0.9983483
## 
## Variabel Laten  Y_Endogen 
##           [,1]
## [1,] 0.9980429
## [2,] 0.9981921
## [3,] 0.9982324
## 
## R Square:
##  Variabel Laten  X_Eksogen 
## [1] NA
## 
## Variabel Laten  Y_Endogen 
## [1] 0.000176626

4. Plot PCA-PI-PLS Global Result

###View Laten Score for Exogenous and Endogenous Variables
scores_list <- pls_global[["Y Final"]]
score_x <- as.numeric(scores_list[[1]])
score_y <- as.numeric(scores_list[[2]])

###Plot Global
library(ggplot2)
## Warning: package 'ggplot2' was built under R version 4.4.3
data_plot <- data.frame(Score_X = score_x, Score_Y = score_y)
ggplot(data_plot, aes(x = Score_X, y = Score_Y)) +
  geom_point(color = "#3498db", alpha = 0.6, size = 2) + # Titik scatter
  geom_abline(intercept = 0, slope = pls_global$`Path Coefficient`$Y_Endogen[1,1], color = "#e74c3c", size = 1) + # Garis regresi
  labs(
    title = "Scatter Plot Global Model: Exogenous vs Endogenous",
    x = "Latent Score Xi (Exogenous)",
    y = "Latent Score Eta (Endogenous)"
  ) +
  theme_minimal()
## Warning: Using `size` aesthetic for lines was deprecated in ggplot2 3.4.0.
## ℹ Please use `linewidth` instead.
## This warning is displayed once every 8 hours.
## Call `lifecycle::last_lifecycle_warnings()` to see where this warning was
## generated.

5. Segmentation with Angle-Base Segmentation

theta=atan(score_y / (score_x + 0.000001))
theta_stadarization=scale(theta)
dist_matrix <- dist(theta_stadarization, method = "euclidean")
hac_model <- hclust(dist_matrix, method = "ward.D2")

plot(hac_model,
     main = "Dendrogram HAC (Ward's Method)",
     xlab = "Observation",
     ylab = "Height (Euclidean Distance)",
     sub = "",
     labels = FALSE,
     hang = -1)
rect.hclust(hac_model, k = 2, border = c("red", "blue"))
abline(h = mean(rev(hac_model$height)[1:2]), col = "green", lty = 2)

predicted_labels <- cutree(hac_model, k = 2)
print("Real Group")
## [1] "Real Group"
print(data_simulasi_full$True_Label)
##   [1] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
##  [38] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
##  [75] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 2 2 2 2 2 2 2 2 2 2 2
## [112] 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2
## [149] 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2
## [186] 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2
print("Estimation Group With ABS Segmentation")
## [1] "Estimation Group With ABS Segmentation"
print(predicted_labels)
##   [1] 1 1 1 1 1 1 2 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
##  [38] 1 1 1 1 1 1 1 1 1 1 1 1 1 2 1 1 1 1 1 1 1 1 1 1 1 1 2 1 1 1 1 1 1 1 1 1 1
##  [75] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 2 2 2 2 2 2 2 2 2 2 2
## [112] 2 2 1 2 2 2 1 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2
## [149] 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 1 2 2 2 2 1 2 2 2 2 2 2 2 2 2 2
## [186] 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2
true_labels <- data_simulasi_full$True_Label
conf_matrix <- table(True = true_labels, Predicted = predicted_labels)
print(conf_matrix)
##     Predicted
## True  1  2
##    1 97  3
##    2  5 95
acc_1 <- sum(diag(conf_matrix)) / sum(conf_matrix)
acc_2 <- (conf_matrix[1,2] + conf_matrix[2,1]) / sum(conf_matrix)
final_accuracy <- max(acc_1, acc_2) * 100
cat("SUCCESS RATE / RECOVERY RATE: ", round(final_accuracy, 2), "%\n")
## SUCCESS RATE / RECOVERY RATE:  96 %
###Partition Per Cluster
data_cluster=split(data_input,predicted_labels)

4. Result PCA-PI-PLS-ABS

library(ggplot2)
library(patchwork)
## Warning: package 'patchwork' was built under R version 4.4.3
print("Result of PCA-PI-PLS-ABS")
## [1] "Result of PCA-PI-PLS-ABS"
###PLS Local 1 with PCA-PI-PLS
print("PCA-PI-PLS Local Group A")
## [1] "PCA-PI-PLS Local Group A"
pls_local_1 <- plsalgorithm(data = data_cluster$`1`,
                           matrix_outer_model = matrix_outer_model,
                           matrix_inner_model = matrix_inner_model,
                           indicator = indicator,
                           laten_variables = laten_variables)
## covariance:           x1        x2        x3
## x1 1.0000000 0.9946603 0.9940536
## x2 0.9946603 1.0000000 0.9946014
## x3 0.9940536 0.9946014 1.0000000
## Covariance:
##            x1        x2        x3
## x1 1.0000000 0.9946603 0.9940536
## x2 0.9946603 1.0000000 0.9946014
## x3 0.9940536 0.9946014 1.0000000
## iterasi ke-: 1 
## eigen vector:
##  [1] 0.8862032 0.1762702 0.4284538
## eigen vector baru:
##          [,1]
## x1 0.5781647
## x2 0.5767825
## x3 0.5771027
## iterasi ke-: 2 
## eigen vector:
##          [,1]
## x1 0.5781647
## x2 0.5767825
## x3 0.5771027
## eigen vector baru:
##          [,1]
## x1 0.5773203
## x2 0.5774236
## x3 0.5773068
## iterasi ke-: 3 
## eigen vector:
##          [,1]
## x1 0.5773203
## x2 0.5774236
## x3 0.5773068
## eigen vector baru:
##          [,1]
## x1 0.5773187
## x2 0.5774247
## x3 0.5773073
## iterasi ke-: 4 
## eigen vector:
##          [,1]
## x1 0.5773187
## x2 0.5774247
## x3 0.5773073
## eigen vector baru:
##          [,1]
## x1 0.5773187
## x2 0.5774247
## x3 0.5773073
## iterasi ke-: 5 
## eigen vector:
##          [,1]
## x1 0.5773187
## x2 0.5774247
## x3 0.5773073
## eigen vector baru:
##          [,1]
## x1 0.5773187
## x2 0.5774247
## x3 0.5773073
## cek transformasi:
##           x1         x2         x3
## 1  0.9960741  1.0170118  1.0207515
## 2  0.6499535  0.5922835  0.5327028
## 3  1.0242653  1.0022126  0.9064545
## 4 -0.7150375 -0.5991475 -0.6350767
## 5 -1.5625637 -1.4491859 -1.6420022
## 6 -0.9759861 -1.0254410 -1.0346767
##         [,1]
## x1 0.5773187
## x2 0.5774247
## x3 0.5773073
##        [,1]
## 1  1.751587
## 2  1.024763
## 3  1.693333
## 4 -1.125402
## 5 -2.686833
## 6 -1.752897
## covariance:           y1        y2        y3
## y1 1.0000000 0.9939511 0.9948310
## y2 0.9939511 1.0000000 0.9943435
## y3 0.9948310 0.9943435 1.0000000
## Covariance:
##            y1        y2        y3
## y1 1.0000000 0.9939511 0.9948310
## y2 0.9939511 1.0000000 0.9943435
## y3 0.9948310 0.9943435 1.0000000
## iterasi ke-: 1 
## eigen vector:
##  [1] 0.5979573 0.7878073 0.1476713
## eigen vector baru:
##          [,1]
## y1 0.5774687
## y2 0.5778756
## y3 0.5767059
## iterasi ke-: 2 
## eigen vector:
##          [,1]
## y1 0.5774687
## y2 0.5778756
## y3 0.5767059
## eigen vector baru:
##          [,1]
## y1 0.5773565
## y2 0.5772633
## y3 0.5774310
## iterasi ke-: 3 
## eigen vector:
##          [,1]
## y1 0.5773565
## y2 0.5772633
## y3 0.5774310
## eigen vector baru:
##          [,1]
## y1 0.5773564
## y2 0.5772620
## y3 0.5774323
## iterasi ke-: 4 
## eigen vector:
##          [,1]
## y1 0.5773564
## y2 0.5772620
## y3 0.5774323
## eigen vector baru:
##          [,1]
## y1 0.5773564
## y2 0.5772620
## y3 0.5774323
## iterasi ke-: 5 
## eigen vector:
##          [,1]
## y1 0.5773564
## y2 0.5772620
## y3 0.5774323
## eigen vector baru:
##          [,1]
## y1 0.5773564
## y2 0.5772620
## y3 0.5774323
## cek transformasi:
##           y1         y2         y3
## 1  0.9561596  1.0011989  0.9575002
## 2  0.5670999  0.6591033  0.5803995
## 3  0.8375604  0.9709441  0.9312549
## 4 -0.5966643 -0.4571496 -0.5406671
## 5 -1.7320503 -1.5942910 -1.7012743
## 6 -0.9470315 -0.9776386 -0.9544246
##         [,1]
## y1 0.5773564
## y2 0.5772620
## y3 0.5774323
##         [,1]
## 1  1.6828905
## 2  1.0430355
## 3  1.5817967
## 4 -0.9205817
## 5 -2.9027048
## 6 -1.6622440
## Outer Weight 
## Variabel Laten  X_Eksogen 
##           [,1]
## [1,] 0.3339348
## [2,] 0.3339961
## [3,] 0.3339282
## 
## Variabel Laten  Y_Endogen 
##           [,1]
## [1,] 0.3339636
## [2,] 0.3339090
## [3,] 0.3340075
## 
## Skor Variabel Laten 
## Variabel Laten  X_Eksogen 
##         [,1]
## 1  1.0131594
## 2  0.5927469
## 3  0.9794636
## 4 -0.6509588
## 5 -1.5541276
## 6 -1.0139167
## 
## Variabel Laten  Y_Endogen 
##         [,1]
## 1  0.9734442
## 2  0.6033291
## 3  0.9149679
## 4 -0.5324974
## 5 -1.6790284
## 6 -0.9615014
## 
## Path Coefficient:
##  Variabel Laten  X_Eksogen 
## NULL
## 
## Variabel Laten  Y_Endogen 
##           [,1]
## [1,] 0.9954932
## 
## Outer Loading:
##  Variabel Laten  X_Eksogen 
##           [,1]
## [1,] 0.9980899
## [2,] 0.9982732
## [3,] 0.9980702
## 
## Variabel Laten  Y_Endogen 
##           [,1]
## [1,] 0.9981339
## [2,] 0.9979708
## [3,] 0.9982652
## 
## R Square:
##  Variabel Laten  X_Eksogen 
## [1] NA
## 
## Variabel Laten  Y_Endogen 
## [1] 0.9910067
scores_list1 <- pls_local_1[["Y Final"]]
score_x1 <- as.numeric(scores_list1[[1]])
score_y1 <- as.numeric(scores_list1[[2]])
slope_1 <- pls_local_1$`Path Coefficient`$Y_Endogen[1,1]
data_plot1 <- data.frame(Score_X = score_x1, Score_Y = score_y1)


p1 <- ggplot(data_plot1, aes(x = Score_X, y = Score_Y)) +
  geom_point(color = "#3498db", alpha = 0.6, size = 2) +
  geom_abline(intercept = 0, slope = slope_1, color = "#e74c3c", size = 1) +
  labs(
    title = "Local Model A (Cluster 1)",
    subtitle = paste("Path Coefficient =", round(slope_1, 3)),
    x = "Latent Score Ksi",
    y = "Latent Score Eta"
  ) +
  theme_minimal() +
  theme(plot.title = element_text(face = "bold"))


###PLS Local 2 with PCA-PI-PLS
print("PCA-PI-PLS Local Group B")
## [1] "PCA-PI-PLS Local Group B"
pls_local_2 <- plsalgorithm(data = data_cluster$`2`,
                           matrix_outer_model = matrix_outer_model,
                           matrix_inner_model = matrix_inner_model,
                           indicator = indicator,
                           laten_variables = laten_variables)
## covariance:           x1        x2        x3
## x1 1.0000000 0.9950228 0.9961387
## x2 0.9950228 1.0000000 0.9953523
## x3 0.9961387 0.9953523 1.0000000
## Covariance:
##            x1        x2        x3
## x1 1.0000000 0.9950228 0.9961387
## x2 0.9950228 1.0000000 0.9953523
## x3 0.9961387 0.9953523 1.0000000
## iterasi ke-: 1 
## eigen vector:
##  [1] 0.1030067 0.7340191 0.6712716
## eigen vector baru:
##          [,1]
## x1 0.5767039
## x2 0.5777071
## x3 0.5776392
## iterasi ke-: 2 
## eigen vector:
##          [,1]
## x1 0.5767039
## x2 0.5777071
## x3 0.5776392
## eigen vector baru:
##          [,1]
## x1 0.5773787
## x2 0.5772285
## x3 0.5774436
## iterasi ke-: 3 
## eigen vector:
##          [,1]
## x1 0.5773787
## x2 0.5772285
## x3 0.5774436
## eigen vector baru:
##          [,1]
## x1 0.5773797
## x2 0.5772277
## x3 0.5774434
## iterasi ke-: 4 
## eigen vector:
##          [,1]
## x1 0.5773797
## x2 0.5772277
## x3 0.5774434
## eigen vector baru:
##          [,1]
## x1 0.5773797
## x2 0.5772277
## x3 0.5774434
## iterasi ke-: 5 
## eigen vector:
##          [,1]
## x1 0.5773797
## x2 0.5772277
## x3 0.5774434
## eigen vector baru:
##          [,1]
## x1 0.5773797
## x2 0.5772277
## x3 0.5774434
## cek transformasi:
##              x1          x2           x3
## 7   -0.07595861  0.03071019 -0.078211550
## 51   0.09001629  0.04738018  0.003310397
## 64  -0.04515669 -0.04986728  0.014560060
## 101 -1.47692871 -1.41770642 -1.350293554
## 102  0.80888556  0.67678574  0.856260025
## 103 -0.69990273 -0.85678025 -0.847346034
##         [,1]
## x1 0.5773797
## x2 0.5772277
## x3 0.5774434
##            [,1]
## 7   -0.07129293
## 51   0.08123430
## 64  -0.04644972
## 101 -2.45080616
## 102  1.35213527
## 103 -1.38796128
## covariance:           y1        y2        y3
## y1 1.0000000 0.9945714 0.9940165
## y2 0.9945714 1.0000000 0.9953521
## y3 0.9940165 0.9953521 1.0000000
## Covariance:
##            y1        y2        y3
## y1 1.0000000 0.9945714 0.9940165
## y2 0.9945714 1.0000000 0.9953521
## y3 0.9940165 0.9953521 1.0000000
## iterasi ke-: 1 
## eigen vector:
##  [1] 0.1565895 0.3435132 0.9260013
## eigen vector baru:
##          [,1]
## y1 0.5763850
## y2 0.5772997
## y3 0.5783644
## iterasi ke-: 2 
## eigen vector:
##          [,1]
## y1 0.5763850
## y2 0.5772997
## y3 0.5783644
## eigen vector baru:
##          [,1]
## y1 0.5772120
## y2 0.5774721
## y3 0.5773667
## iterasi ke-: 3 
## eigen vector:
##          [,1]
## y1 0.5772120
## y2 0.5774721
## y3 0.5773667
## eigen vector baru:
##          [,1]
## y1 0.5772137
## y2 0.5774722
## y3 0.5773649
## iterasi ke-: 4 
## eigen vector:
##          [,1]
## y1 0.5772137
## y2 0.5774722
## y3 0.5773649
## eigen vector baru:
##          [,1]
## y1 0.5772137
## y2 0.5774722
## y3 0.5773649
## iterasi ke-: 5 
## eigen vector:
##          [,1]
## y1 0.5772137
## y2 0.5774722
## y3 0.5773649
## eigen vector baru:
##          [,1]
## y1 0.5772137
## y2 0.5774722
## y3 0.5773649
## cek transformasi:
##              y1          y2          y3
## 7    0.15643774  0.14084529  0.12335052
## 51   0.01587383 -0.02765715  0.02691530
## 64   0.16012423  0.07279721 -0.02029278
## 101  1.31891610  1.24664021  1.31201170
## 102 -0.55574571 -0.65870954 -0.71466245
## 103  0.71424372  0.59377896  0.70049636
##         [,1]
## y1 0.5772137
## y2 0.5774722
## y3 0.5773649
##            [,1]
## 7    0.24285051
## 51   0.00873131
## 64   0.12274793
## 101  2.23870599
## 102 -1.11379148
## 103  1.15960411
## Outer Weight 
## Variabel Laten  X_Eksogen 
##           [,1]
## [1,] 0.3338510
## [2,] 0.3337630
## [3,] 0.3338878
## 
## Variabel Laten  Y_Endogen 
##           [,1]
## [1,] 0.3338508
## [2,] 0.3340002
## [3,] 0.3339382
## 
## Skor Variabel Laten 
## Variabel Laten  X_Eksogen 
##            [,1]
## 7   -0.04122281
## 51   0.04697108
## 64  -0.02685803
## 101 -1.41709865
## 102  0.78182808
## 103 -0.80254329
## 
## Variabel Laten  Y_Endogen 
##             [,1]
## 7    0.140460672
## 51   0.005050044
## 64   0.070995351
## 101  1.294830100
## 102 -0.644198361
## 103  0.670695620
## 
## Path Coefficient:
##  Variabel Laten  X_Eksogen 
## NULL
## 
## Variabel Laten  Y_Endogen 
##            [,1]
## [1,] -0.9944641
## 
## Outer Loading:
##  Variabel Laten  X_Eksogen 
##           [,1]
## [1,] 0.9985514
## [2,] 0.9982884
## [3,] 0.9986615
## 
## Variabel Laten  Y_Endogen 
##           [,1]
## [1,] 0.9979779
## [2,] 0.9984247
## [3,] 0.9982392
## 
## R Square:
##  Variabel Laten  X_Eksogen 
## [1] NA
## 
## Variabel Laten  Y_Endogen 
## [1] 0.9889589
scores_list2 <- pls_local_2[["Y Final"]]
score_x2 <- as.numeric(scores_list2[[1]])
score_y2 <- as.numeric(scores_list2[[2]])
slope_2 <- pls_local_2$`Path Coefficient`$Y_Endogen[1,1]
data_plot2 <- data.frame(Score_X = score_x2, Score_Y = score_y2)

p2 <- ggplot(data_plot2, aes(x = Score_X, y = Score_Y)) +
  geom_point(color = "#3498db", alpha = 0.6, size = 2) +
  geom_abline(intercept = 0, slope = slope_2, color = "#e74c3c", size = 1) +
  labs(
    title = "Local Model B (Cluster 2)",
    subtitle = paste("Path Coefficient =", round(slope_2, 3)),
    x = "Latent Score Ksi",
    y = "Latent Score Eta"
  ) +
  theme_minimal() +
  theme(plot.title = element_text(face = "bold"))

final_plot <- p1 + p2 +
  plot_annotation(
    title = 'Comparison Local Model: Group A vs Group B',
    caption = 'Method: PCA-PI-PLS-ABS'
  )
print(final_plot)