library(readxl)
datos<-read_excel("C:/Users/corte/Desktop/datos_parcial_2_repetido.xlsx")
library(psych)
modelo_3<-principal(r = datos, nfactors = 3, covar = FALSE, rotate = "varimax")
modelo_3$loadings
##
## Loadings:
## RC2 RC1 RC3
## X1 0.597 0.278
## X2 0.827
## X3 0.896
## X4 0.925
## X5 0.875
## X6 0.124 0.284 0.496
## X7 -0.472 0.649 0.322
## X8 -0.156 0.819
## X9 0.907
## X10 0.541 0.395
## X11 0.702 0.386
##
## RC2 RC1 RC3
## SS loadings 2.977 2.785 1.520
## Proportion Var 0.271 0.253 0.138
## Cumulative Var 0.271 0.524 0.662
En el Factor 1: x8 y x9
En el Factor 2: X2, X3 y X4
En el Factor 3: X5
# Funciones para normalizar los Datos
norm_directa<-function(x){
return((x-min(x)) / (max(x)-min(x)))
}
norm_inversa<-function(x){
return((max(x)-x) / (max(x)-min(x)))
}
#Normalización de Datos
library(dplyr)
datos %>% dplyr::select(X8, X9) %>% dplyr::transmute(X8=norm_directa(X8), X9=norm_directa(X9)) -> data_factor_1
print(data_factor_1)
## # A tibble: 111 x 2
## X8 X9
## <dbl> <dbl>
## 1 0.975 0.881
## 2 0.774 0.814
## 3 1 0.976
## 4 0.830 0.746
## 5 0.833 0.599
## 6 0.838 0.898
## 7 0.814 0.932
## 8 0.578 0.623
## 9 0.846 0.678
## 10 0.800 0.785
## # ... with 101 more rows
#Calculo de las desviaciones estandar de cada variable
data_factor_1%>% dplyr::summarise(S8=sd(X8),S9=sd(X9))->sd_vector
print(sd_vector)
## # A tibble: 1 x 2
## S8 S9
## <dbl> <dbl>
## 1 0.195 0.242
#Calculo de la matriz de correlacion
cor(data_factor_1)->mat_R_F1
print(mat_R_F1)
## X8 X9
## X8 1.0000000 0.7257355
## X9 0.7257355 1.0000000
#Calculo de los ponderadores brutos
1-mat_R_F1->sum_data
colSums(sum_data)->sum_vector
sd_vector*sum_vector->vj
print(vj)
## S8 S9
## 1 0.05339757 0.0663698
#Calculo de los ponderadores netos
vj/sum(vj)->wj
print(wj)
## S8 S9
## 1 0.445844 0.554156
#Ponderadores
print(round(wj*100,2))
## S8 S9
## 1 44.58 55.42
#Normalizacion de los datos
datos%>% dplyr::select(X2,X3,X4)->data_norm
apply(data_norm,1,prop.table)->data_norm
#Formula de entropia
entropy<-function(x){
return(x*log(x))
}
apply(data_norm,1,entropy)->data_norm_2
data_norm_2<-na.omit(data_norm_2)
head(data_norm_2,15)
## X2 X3 X4
## [1,] -0.2845680 -0.3288104 -0.3611918
## [2,] -0.3218876 -0.3556654 -0.3662041
## [3,] -0.3300954 -0.3197805 -0.3300954
## [4,] -0.3579323 -0.3631277 -0.3579323
## [5,] -0.2179905 -0.3667584 -0.3427084
## [6,] -0.2932130 -0.3400418 -0.3646190
## [7,] -0.1732868 -0.2823672 -0.3593752
## [8,] -0.3006371 -0.3678770 -0.3568569
## [9,] -0.3567688 -0.3659445 -0.3634846
## [10,] -0.3579323 -0.3579323 -0.3631277
## [11,] -0.2302585 -0.3064954 -0.3611918
## [12,] -0.2070756 -0.3575727 -0.3575727
## [13,] -0.2346914 -0.3677037 -0.3409891
## [14,] -0.1666596 -0.3675720 -0.3121343
## [15,] -0.2393135 -0.3654772 -0.3517304
#Numero de variables en el factor
ncol(data_norm)->m
#Constante de entropia
-1/log(m)->K
print(K)
## [1] -0.2123354
#Calculo de las entropias
K*colSums(data_norm_2)->Ej
print(Ej)
## X2 X3 X4
## 5.238436 6.792271 6.862508
#Calculo de las especifidades
1-Ej->vj
print(vj)
## X2 X3 X4
## -4.238436 -5.792271 -5.862508
#Calculo de los ponderadores
prop.table(vj)->wj
print(wj)
## X2 X3 X4
## 0.2666821 0.3644493 0.3688686
pj<-1:1
ponderadores<-(pj)/sum(pj)
#
print(names(ponderadores)<-c("X5"))
## [1] "X5"