A través del análisis de componentes principales, identifique para un modelo de 3 factores:
#CARGA DE DATOS
load("C:/Users/8abla/Documents/MAE118/PRACTICA PRE PARCIAL 2/data_parcial_2_A_rev.RData")
#PREPARACION DE LA MATRIZ DE INFORMACIÓN
library(kableExtra)
mat_XA<-datos_parcial_2
mat_A<-mat_XA[,c(-1,-2)]
mat_A %>% head() %>%
kable(caption ="Matriz de información:" ,align = "c",digits = 6) %>%
kable_material(html_font = "sans-serif")
| X1 | X2 | X3 | X4 | X5 | X6 | X7 | X8 |
|---|---|---|---|---|---|---|---|
| 9 | 2 | 20.00000 | 20.00000 | 0.00000 | 0.000000 | 2 | 56.4000 |
| 10 | 6 | 62.50000 | 50.00000 | 37.50000 | 3.947368 | 11 | 147.3750 |
| 10 | 20 | 50.00000 | 50.00000 | 50.00000 | 2.564103 | 16 | 135.0000 |
| 8 | 3 | 42.85714 | 42.85714 | 14.28571 | 1.351351 | 35 | 121.1429 |
| 7 | 7 | 75.00000 | 75.00000 | 75.00000 | 9.090909 | 8 | 202.5000 |
| 6 | 13 | 30.00000 | 30.00000 | 30.00000 | 8.108108 | 25 | 81.0000 |
#NORMALIZACION DE LOS DATOS
library(dplyr)
norm_directa<-function(x){(x-min(x))/(max(x)-min(x))}
norm_inversa<-function(x){(max(x)-x)/(max(x)-min(x))}
#Seleccionando las variables con correlación positiva para la Salud Financiera
mat_A %>%
select(X1,X2,X3,X5,X6,X8) %>%
apply(MARGIN = 2,FUN = norm_directa) %>% as.data.frame()->variables_corr_positiva
#Seleccionando las variables con correlación negativa para la Salud Financiera
mat_A %>%
select(X4,X7) %>%
apply(MARGIN = 2,FUN = norm_inversa) %>% as.data.frame()->variables_corr_negativa
#Juntando y reordenando las variables
variables_corr_positiva %>%
bind_cols(variables_corr_negativa) %>%
select(X1,X2,X3,X4,X5,X6,X7,X8)->datos_normalizados
head(datos_normalizados)
## X1 X2 X3 X4 X5 X6 X7
## 1 0.19354839 0.000000000 0.0400000 0.8000000 0.0000000 0.00000000 1.0000000
## 2 0.22580645 0.017167382 0.5500000 0.5000000 0.4285714 0.21558704 0.9010989
## 3 0.22580645 0.077253219 0.4000000 0.5000000 0.5714286 0.14003945 0.8461538
## 4 0.16129032 0.004291845 0.3142857 0.5714286 0.1632653 0.07380457 0.6373626
## 5 0.12903226 0.021459227 0.7000000 0.2500000 0.8571429 0.49650350 0.9340659
## 6 0.09677419 0.047210300 0.1600000 0.7000000 0.3428571 0.44282744 0.7472527
## X8
## 1 0.1582266
## 2 0.5167488
## 3 0.4679803
## 4 0.4133709
## 5 0.7339901
## 6 0.2551724
#MATRIZ DE CORRELACIÓN Y PRUEBAS DE BARLETT Y KMO
#Matriz de correlación
library(PerformanceAnalytics)
chart.Correlation(as.matrix(datos_normalizados),histogram = TRUE,pch=12)
Se puede destacar que hay evidente correlación entre las variables propuestas en la batería de indicadores, esto se muestra por los asteriscos, y que varias de ellas tienen correlaciones que son significativas al 1%
#KMO
library(rela)
KMO<-paf(as.matrix(datos_normalizados))$KMO
print(KMO)
## [1] 0.67931
El valor mínimo de KMO para considerar aceptable el análisis factorial es de 0.5 y la batería de indicadores tiene el 0.67931, por lo tal es apropiado continuar con el análisis.
#Prueba de Barlett
library(psych)
options(scipen = 99999)
Barlett<-cortest.bartlett(datos_normalizados)
print(Barlett)
## $chisq
## [1] 1025.9
##
## $p.value
## [1] 0.0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000046951
##
## $df
## [1] 28
El P-value es casi 0, quiere decir que no se rechaza la hipótesis alternativa, hay evidencia de correlación poblacional entre la batería de indicadores propuestas.
#ANALISIS FACTORIAL
library(FactoMineR)
library(factoextra)
library(kableExtra)
Rx<-cor(datos_normalizados)
PC<-princomp(x = datos_normalizados,cor = TRUE,fix_sign = FALSE)
variables_pca<-get_pca_var(PC)
factoextra::get_eig(PC) %>% kable(caption="Resumen de PCA",
align = "c",
digits = 2) %>%
kable_material(html_font = "sans-serif") %>%
kable_styling(bootstrap_options = c("hover"))
| eigenvalue | variance.percent | cumulative.variance.percent | |
|---|---|---|---|
| Dim.1 | 3.90 | 48.72 | 48.72 |
| Dim.2 | 1.96 | 24.55 | 73.27 |
| Dim.3 | 0.84 | 10.52 | 83.78 |
| Dim.4 | 0.50 | 6.24 | 90.03 |
| Dim.5 | 0.45 | 5.68 | 95.70 |
| Dim.6 | 0.28 | 3.45 | 99.16 |
| Dim.7 | 0.07 | 0.82 | 99.98 |
| Dim.8 | 0.00 | 0.02 | 100.00 |
Por el criterio de raíz latente: tendríamos 2 componentes.
Por el criterio de porcentaje acumulado de la varianza: tedríamos tres componentes ya que esas 3 son superior a las 3 cuartas partes de la varianza total.
#GRAFICO DE SEDIMENTACION
fviz_eig(PC,
choice = "eigenvalue",
barcolor = "red",
barfill = "red",
addlabels = TRUE,
)+labs(title = "Gráfico de Sedimentación",subtitle = "Usando princomp, con Autovalores")+
xlab(label = "Componentes")+
ylab(label = "Autovalores")+geom_hline(yintercept = 1)
#ROTACIÓN DE LA SOLUCIÓN
library(corrplot)
#Modelo de 3 Factores (Rotada)
numero_de_factores<-3
modelo_factores<-principal(r = Rx,
nfactors = numero_de_factores,
covar = FALSE,
rotate = "varimax")
modelo_factores
## Principal Components Analysis
## Call: principal(r = Rx, nfactors = numero_de_factores, rotate = "varimax",
## covar = FALSE)
## Standardized loadings (pattern matrix) based upon correlation matrix
## RC1 RC2 RC3 h2 u2 com
## X1 -0.16 0.80 -0.03 0.67 0.3316 1.1
## X2 0.08 0.84 -0.03 0.71 0.2879 1.0
## X3 0.93 -0.09 0.28 0.95 0.0493 1.2
## X4 -0.95 0.05 -0.26 0.98 0.0208 1.2
## X5 0.43 -0.06 0.80 0.83 0.1742 1.5
## X6 0.25 -0.03 0.91 0.89 0.1142 1.2
## X7 0.07 -0.83 0.04 0.69 0.3107 1.0
## X8 0.96 -0.06 0.27 0.99 0.0087 1.2
##
## RC1 RC2 RC3
## SS loadings 2.97 2.05 1.68
## Proportion Var 0.37 0.26 0.21
## Cumulative Var 0.37 0.63 0.84
## Proportion Explained 0.44 0.31 0.25
## Cumulative Proportion 0.44 0.75 1.00
##
## Mean item complexity = 1.2
## Test of the hypothesis that 3 components are sufficient.
##
## The root mean square of the residuals (RMSR) is 0.06
##
## Fit based upon off diagonal values = 0.98
Al hacerlo con 3 factores, en la primer variable, el 67% de su varianza es explicada por la solución. La segunda variable, el 71% de su varianza es explicada por la extracción. De la tercer variable, el 95% de su varianza es explicada. Entonces es una solución representativa de los datos originales.
En los ponderadores que se han extraído, la primera variable que se construya va a tener un ponderador de 0.44, la segunda de 0.31 y la tercera de 0.25.
correlaciones_modelo<-variables_pca$coord
rotacion<-varimax(correlaciones_modelo[,1:numero_de_factores])
correlaciones_modelo_rotada<-rotacion$loadings
corrplot(correlaciones_modelo_rotada[,1:numero_de_factores],
is.corr = FALSE,
method = "square",
addCoef.col="black",
number.cex = 0.75)
La dimensión 1, está representada con X3, X4 y X8. La dimensión 2 está más asociada con X1, X2 y X7. La dimensión 3 está más asociada con X5 y X6.
library(kableExtra)
cargas<-rotacion$loadings[1:8,1:numero_de_factores]
ponderadores<-prop.table(apply(cargas^2,MARGIN = 2,sum))
t(ponderadores) %>% kable(caption="Ponderadores de los Factores Extraídos",
align = "c",
digits = 2) %>%
kable_material(html_font = "sans-serif") %>%
kable_styling(bootstrap_options = c("striped", "hover"))
| Dim.1 | Dim.2 | Dim.3 |
|---|---|---|
| 0.44 | 0.31 | 0.25 |
contribuciones<-apply(cargas^2,MARGIN = 2,prop.table)
contribuciones %>% kable(caption="Contribución de las variables en los Factores",
align = "c",
digits = 2) %>%
kable_material(html_font = "sans-serif") %>%
kable_styling(bootstrap_options = c("striped", "hover"))
| Dim.1 | Dim.2 | Dim.3 | |
|---|---|---|---|
| X1 | 0.01 | 0.31 | 0.00 |
| X2 | 0.00 | 0.34 | 0.00 |
| X3 | 0.29 | 0.00 | 0.05 |
| X4 | 0.31 | 0.00 | 0.04 |
| X5 | 0.06 | 0.00 | 0.38 |
| X6 | 0.02 | 0.00 | 0.49 |
| X7 | 0.00 | 0.33 | 0.00 |
| X8 | 0.31 | 0.00 | 0.04 |
Para el factor 1, utilice el método CRITIC para obtener los ponderadores normalizados para cada variable.
library(dplyr)
# Funciones para normalizar los datos
norm_directa_a <- function(x){
return((x-min(x)) / (max(x)-min(x)))
}
norm_inverza_a <- function(x){
return((max(x)-x) / (max(x)-min(x)))
}
# Normalización de los datos
datos_parcial_2 %>% dplyr::select(X3,X4,X8) %>%
dplyr::transmute(X3 = norm_directa_a(X3),
X4 = norm_inverza_a(X4),
X8 = norm_directa_a(X8)) -> data_factor_1
print(data_factor_1)
## # A tibble: 108 × 3
## X3 X4 X8
## <dbl> <dbl> <dbl>
## 1 0.04 0.8 0.158
## 2 0.55 0.5 0.517
## 3 0.4 0.5 0.468
## 4 0.314 0.571 0.413
## 5 0.7 0.25 0.734
## 6 0.16 0.7 0.255
## 7 0.673 0.273 0.714
## 8 0.55 0.5 0.446
## 9 0.4 0.563 0.433
## 10 0.68 0.467 0.552
## # … with 98 more rows
#Cálculo de las desviaciones estándar de cada variable
data_factor_1 %>% dplyr::summarise(S3=sd(X3),S4=sd(X4),S8=sd(X8))-> sd_vector
print(sd_vector)
## # A tibble: 1 × 3
## S3 S4 S8
## <dbl> <dbl> <dbl>
## 1 0.246 0.201 0.209
#Cálculo de la matriz de correlación
cor(data_factor_1)->mat_R_F1
print(mat_R_F1)
## X3 X4 X8
## X3 1.00000 -0.93872 0.95904
## X4 -0.93872 1.00000 -0.99585
## X8 0.95904 -0.99585 1.00000
#Cálculo de los ponderadores brutos
1-mat_R_F1->sum_data
colSums(sum_data)->sum_vector
sd_vector*sum_vector->vj
print(vj)
## S3 S4 S8
## 1 0.48755 0.79129 0.42517
#Cálculo de los ponderadores netos
vj/sum(vj)->wj
print(wj)
## S3 S4 S8
## 1 0.28612 0.46437 0.24951
print(round(wj*100,2))
## S3 S4 S8
## 1 28.61 46.44 24.95
Para el factor 2, utilice el método de Entropía para obtener los ponderadores normalizados para cada variable.
mat_A %>% dplyr::select(X1,X2,X7)->data_norm
apply(data_norm,2,prop.table)->data_norm
print(data_norm)
## X1 X2 X7
## [1,] 0.0078125 0.00070734 0.0013986
## [2,] 0.0086806 0.00212202 0.0076923
## [3,] 0.0086806 0.00707339 0.0111888
## [4,] 0.0069444 0.00106101 0.0244755
## [5,] 0.0060764 0.00247569 0.0055944
## [6,] 0.0052083 0.00459770 0.0174825
## [7,] 0.0095486 0.00318302 0.0062937
## [8,] 0.0078125 0.00106101 0.0041958
## [9,] 0.0086806 0.00141468 0.0034965
## [10,] 0.0104167 0.00212202 0.0041958
## [11,] 0.0112847 0.00141468 0.0020979
## [12,] 0.0095486 0.00070734 0.0062937
## [13,] 0.0104167 0.00424403 0.0013986
## [14,] 0.0147569 0.00141468 0.0027972
## [15,] 0.0086806 0.00636605 0.0160839
## [16,] 0.0104167 0.00070734 0.0048951
## [17,] 0.0078125 0.00141468 0.0125874
## [18,] 0.0060764 0.00247569 0.0125874
## [19,] 0.0069444 0.00318302 0.0027972
## [20,] 0.0095486 0.00282935 0.0153846
## [21,] 0.0104167 0.00707339 0.0097902
## [22,] 0.0095486 0.00565871 0.0013986
## [23,] 0.0095486 0.00176835 0.0111888
## [24,] 0.0060764 0.01061008 0.0209790
## [25,] 0.0069444 0.00565871 0.0027972
## [26,] 0.0086806 0.00636605 0.0048951
## [27,] 0.0104167 0.00565871 0.0062937
## [28,] 0.0052083 0.01061008 0.0111888
## [29,] 0.0086806 0.06825818 0.0020979
## [30,] 0.0095486 0.00141468 0.0069930
## [31,] 0.0086806 0.00070734 0.0013986
## [32,] 0.0043403 0.00141468 0.0027972
## [33,] 0.0112847 0.01237843 0.0069930
## [34,] 0.0069444 0.00141468 0.0055944
## [35,] 0.0060764 0.00106101 0.0048951
## [36,] 0.0190972 0.00813439 0.0076923
## [37,] 0.0086806 0.00212202 0.0034965
## [38,] 0.0078125 0.00353669 0.0090909
## [39,] 0.0078125 0.01591512 0.0027972
## [40,] 0.0078125 0.00282935 0.0013986
## [41,] 0.0095486 0.00282935 0.0034965
## [42,] 0.0095486 0.00353669 0.0076923
## [43,] 0.0052083 0.00070734 0.0139860
## [44,] 0.0112847 0.00176835 0.0034965
## [45,] 0.0069444 0.04244032 0.0209790
## [46,] 0.0069444 0.00070734 0.0013986
## [47,] 0.0086806 0.00141468 0.0027972
## [48,] 0.0060764 0.00106101 0.0020979
## [49,] 0.0086806 0.00353669 0.0069930
## [50,] 0.0086806 0.00389036 0.0027972
## [51,] 0.0069444 0.00070734 0.0013986
## [52,] 0.0052083 0.00389036 0.0076923
## [53,] 0.0121528 0.00282935 0.0027972
## [54,] 0.0052083 0.00070734 0.0062937
## [55,] 0.0130208 0.00159151 0.0209790
## [56,] 0.0095486 0.00106101 0.0020979
## [57,] 0.0052083 0.00459770 0.0650350
## [58,] 0.0182292 0.00070734 0.0041958
## [59,] 0.0095486 0.00070734 0.0013986
## [60,] 0.0104167 0.00176835 0.0048951
## [61,] 0.0069444 0.00247569 0.0125874
## [62,] 0.0295139 0.08311229 0.0650350
## [63,] 0.0078125 0.00070734 0.0097902
## [64,] 0.0104167 0.06825818 0.0440559
## [65,] 0.0069444 0.00707339 0.0034965
## [66,] 0.0104167 0.01061008 0.0020979
## [67,] 0.0086806 0.06790451 0.0118881
## [68,] 0.0086806 0.00459770 0.0209790
## [69,] 0.0104167 0.00565871 0.0069930
## [70,] 0.0052083 0.00070734 0.0111888
## [71,] 0.0104167 0.01591512 0.0013986
## [72,] 0.0086806 0.00318302 0.0195804
## [73,] 0.0086806 0.00212202 0.0041958
## [74,] 0.0078125 0.00070734 0.0013986
## [75,] 0.0112847 0.00212202 0.0041958
## [76,] 0.0086806 0.00212202 0.0020979
## [77,] 0.0086806 0.06790451 0.0013986
## [78,] 0.0086806 0.00247569 0.0041958
## [79,] 0.0086806 0.00106101 0.0027972
## [80,] 0.0069444 0.00636605 0.0125874
## [81,] 0.0078125 0.01061008 0.0209790
## [82,] 0.0295139 0.08311229 0.0650350
## [83,] 0.0060764 0.06861185 0.0034965
## [84,] 0.0095486 0.00070734 0.0013986
## [85,] 0.0086806 0.00176835 0.0034965
## [86,] 0.0060764 0.00070734 0.0013986
## [87,] 0.0052083 0.00106101 0.0027972
## [88,] 0.0130208 0.00176835 0.0062937
## [89,] 0.0086806 0.00070734 0.0013986
## [90,] 0.0138889 0.00212202 0.0069930
## [91,] 0.0078125 0.00176835 0.0062937
## [92,] 0.0043403 0.00141468 0.0013986
## [93,] 0.0034722 0.00141468 0.0020979
## [94,] 0.0121528 0.05057471 0.0027972
## [95,] 0.0078125 0.00141468 0.0048951
## [96,] 0.0199653 0.00247569 0.0048951
## [97,] 0.0086806 0.00141468 0.0027972
## [98,] 0.0078125 0.00247569 0.0048951
## [99,] 0.0095486 0.00282935 0.0440559
## [100,] 0.0295139 0.08311229 0.0650350
## [101,] 0.0060764 0.00106101 0.0034965
## [102,] 0.0052083 0.00141468 0.0020979
## [103,] 0.0104167 0.00106101 0.0013986
## [104,] 0.0104167 0.00565871 0.0020979
## [105,] 0.0060764 0.00070734 0.0013986
## [106,] 0.0104167 0.00636605 0.0125874
## [107,] 0.0026042 0.00282935 0.0111888
## [108,] 0.0095486 0.00212202 0.0020979
#Fórmula de entropía
entropy<-function(x){return(x*log(x))}
apply(data_norm,2,entropy)->data_norm_2
print(data_norm_2)
## X1 X2 X7
## [1,] -0.037906 -0.0051310 -0.009192
## [2,] -0.041204 -0.0130618 -0.037443
## [3,] -0.041204 -0.0350233 -0.050270
## [4,] -0.034513 -0.0072664 -0.090806
## [5,] -0.031010 -0.0148572 -0.029013
## [6,] -0.027383 -0.0247457 -0.070744
## [7,] -0.044414 -0.0183021 -0.031898
## [8,] -0.037906 -0.0072664 -0.022966
## [9,] -0.041204 -0.0092815 -0.019776
## [10,] -0.047545 -0.0130618 -0.022966
## [11,] -0.050604 -0.0092815 -0.012937
## [12,] -0.044414 -0.0051310 -0.031898
## [13,] -0.047545 -0.0231819 -0.009192
## [14,] -0.062216 -0.0092815 -0.016445
## [15,] -0.041204 -0.0321917 -0.066426
## [16,] -0.047545 -0.0051310 -0.026040
## [17,] -0.037906 -0.0092815 -0.055071
## [18,] -0.031010 -0.0148572 -0.055071
## [19,] -0.034513 -0.0183021 -0.016445
## [20,] -0.044414 -0.0166018 -0.064221
## [21,] -0.047545 -0.0350233 -0.045293
## [22,] -0.044414 -0.0292813 -0.009192
## [23,] -0.044414 -0.0112073 -0.050270
## [24,] -0.031010 -0.0482329 -0.081068
## [25,] -0.034513 -0.0292813 -0.016445
## [26,] -0.041204 -0.0321917 -0.026040
## [27,] -0.047545 -0.0292813 -0.031898
## [28,] -0.027383 -0.0482329 -0.050270
## [29,] -0.041204 -0.1832362 -0.012937
## [30,] -0.044414 -0.0092815 -0.034705
## [31,] -0.041204 -0.0051310 -0.009192
## [32,] -0.023610 -0.0092815 -0.016445
## [33,] -0.050604 -0.0543636 -0.034705
## [34,] -0.034513 -0.0092815 -0.029013
## [35,] -0.031010 -0.0072664 -0.026040
## [36,] -0.075591 -0.0391399 -0.037443
## [37,] -0.041204 -0.0130618 -0.019776
## [38,] -0.037906 -0.0199631 -0.042732
## [39,] -0.037906 -0.0658963 -0.016445
## [40,] -0.037906 -0.0166018 -0.009192
## [41,] -0.044414 -0.0166018 -0.019776
## [42,] -0.044414 -0.0199631 -0.037443
## [43,] -0.027383 -0.0051310 -0.059716
## [44,] -0.050604 -0.0112073 -0.019776
## [45,] -0.034513 -0.1340968 -0.081068
## [46,] -0.034513 -0.0051310 -0.009192
## [47,] -0.041204 -0.0092815 -0.016445
## [48,] -0.031010 -0.0072664 -0.012937
## [49,] -0.041204 -0.0199631 -0.034705
## [50,] -0.041204 -0.0215886 -0.016445
## [51,] -0.034513 -0.0051310 -0.009192
## [52,] -0.027383 -0.0215886 -0.037443
## [53,] -0.053596 -0.0166018 -0.016445
## [54,] -0.027383 -0.0051310 -0.031898
## [55,] -0.056526 -0.0102542 -0.081068
## [56,] -0.044414 -0.0072664 -0.012937
## [57,] -0.027383 -0.0247457 -0.177730
## [58,] -0.073003 -0.0051310 -0.022966
## [59,] -0.044414 -0.0051310 -0.009192
## [60,] -0.047545 -0.0112073 -0.026040
## [61,] -0.034513 -0.0148572 -0.055071
## [62,] -0.103974 -0.2067470 -0.177730
## [63,] -0.037906 -0.0051310 -0.045293
## [64,] -0.047545 -0.1832362 -0.137556
## [65,] -0.034513 -0.0350233 -0.019776
## [66,] -0.047545 -0.0482329 -0.012937
## [67,] -0.041204 -0.1826396 -0.052691
## [68,] -0.041204 -0.0247457 -0.081068
## [69,] -0.047545 -0.0292813 -0.034705
## [70,] -0.027383 -0.0051310 -0.050270
## [71,] -0.047545 -0.0658963 -0.009192
## [72,] -0.041204 -0.0183021 -0.077014
## [73,] -0.041204 -0.0130618 -0.022966
## [74,] -0.037906 -0.0051310 -0.009192
## [75,] -0.050604 -0.0130618 -0.022966
## [76,] -0.041204 -0.0130618 -0.012937
## [77,] -0.041204 -0.1826396 -0.009192
## [78,] -0.041204 -0.0148572 -0.022966
## [79,] -0.041204 -0.0072664 -0.016445
## [80,] -0.034513 -0.0321917 -0.055071
## [81,] -0.037906 -0.0482329 -0.081068
## [82,] -0.103974 -0.2067470 -0.177730
## [83,] -0.031010 -0.1838310 -0.019776
## [84,] -0.044414 -0.0051310 -0.009192
## [85,] -0.041204 -0.0112073 -0.019776
## [86,] -0.031010 -0.0051310 -0.009192
## [87,] -0.027383 -0.0072664 -0.016445
## [88,] -0.056526 -0.0112073 -0.031898
## [89,] -0.041204 -0.0051310 -0.009192
## [90,] -0.059398 -0.0130618 -0.034705
## [91,] -0.037906 -0.0112073 -0.031898
## [92,] -0.023610 -0.0092815 -0.009192
## [93,] -0.019663 -0.0092815 -0.012937
## [94,] -0.053596 -0.1509303 -0.016445
## [95,] -0.037906 -0.0092815 -0.026040
## [96,] -0.078139 -0.0148572 -0.026040
## [97,] -0.041204 -0.0092815 -0.016445
## [98,] -0.037906 -0.0148572 -0.026040
## [99,] -0.044414 -0.0166018 -0.137556
## [100,] -0.103974 -0.2067470 -0.177730
## [101,] -0.031010 -0.0072664 -0.019776
## [102,] -0.027383 -0.0092815 -0.012937
## [103,] -0.047545 -0.0072664 -0.009192
## [104,] -0.047545 -0.0292813 -0.012937
## [105,] -0.031010 -0.0051310 -0.009192
## [106,] -0.047545 -0.0321917 -0.055071
## [107,] -0.015496 -0.0166018 -0.050270
## [108,] -0.044414 -0.0130618 -0.012937
#Número de variables en el factor:
ncol(data_norm)->m
#Constante de entropía:
-1/log(m)->K
print(K)
## [1] -0.91024
#Cálculo de las entropías
K*colSums(data_norm_2)->Ej
print(Ej)
## X1 X2 X7
## 4.1805 3.2029 3.7019
#Cálculo de las especificidades:
1-Ej->vj
print(vj)
## X1 X2 X7
## -3.1805 -2.2029 -2.7019
#Cálculo de los ponderadores:
prop.table(vj)->wj #es lo mismo que utilizar vj/sum(vj)
print(wj)
## X1 X2 X7
## 0.39337 0.27245 0.33417
Para el factor 3, utilice el método de Ranking para obtener los ponderadores normalizados para cada variable (utilice la numeración de las variables para establecer la jerarquía).
library(magrittr)
#Vector de Jerarquías
rj<-c(1,2)
names(rj)<-c("X5","X6")
#Función para generar los pesos
ponderadores_subjetivos_rank_suma<-function(vector_jerarquias){
n<-length(vector_jerarquias)
vector_pesos<-n-vector_jerarquias+1
list(w_brutos=vector_pesos,w_normalizados=vector_pesos/sum(vector_pesos))
}
#Aplicando la función:
pesos_ranking_suma<-ponderadores_subjetivos_rank_suma(rj)
#Pesos brutos
pesos_ranking_suma$w_brutos
## X5 X6
## 2 1
#Pesos normalizados
pesos_ranking_suma$w_normalizados %>% round(digits = 3)
## X5 X6
## 0.667 0.333
#Gráfico de los pesos normalizados
barplot(pesos_ranking_suma$w_normalizados,
main = "Ponderadores Ranking de Suma",
ylim = c(0,0.7),col = "green")
#Vector de Jerarquías
rj<-c(1,2)
names(rj)<-c("X5","X6")
#Función para generar los pesos
ponderadores_subjetivos_rank_reciproco<-function(vector_jerarquias){
vector_pesos<-1/vector_jerarquias
list(w_brutos=vector_pesos,w_normalizados=vector_pesos/sum(vector_pesos))
}
#Aplicando la función:
pesos_ranking_reciproco<-ponderadores_subjetivos_rank_reciproco(rj)
#Pesos brutos
pesos_ranking_reciproco$w_brutos
## X5 X6
## 1.0 0.5
#Pesos normalizados
pesos_ranking_reciproco$w_normalizados %>% round(digits = 3)
## X5 X6
## 0.667 0.333
#Gráfico de los pesos normalizados
barplot(pesos_ranking_reciproco$w_normalizados,
main = "Ponderadores Ranking Recíproco",
ylim = c(0,0.7),col = "gray")
library(magrittr)
#Vector de Jerarquías
rj<-c(1,2)
names(rj)<-c("X5","X6")
#Función para generar los pesos
ponderadores_subjetivos_rank_exponencial<-function(vector_jerarquias,p=2){
n<-length(vector_jerarquias)
vector_pesos<-(n-vector_jerarquias+1)^p
list(w_brutos=vector_pesos,w_normalizados=vector_pesos/sum(vector_pesos))
}
#Aplicando la función:
pesos_ranking_exponencial<-ponderadores_subjetivos_rank_exponencial(rj)
#Pesos brutos
pesos_ranking_exponencial$w_brutos
## X5 X6
## 4 1
#Pesos normalizados
pesos_ranking_exponencial$w_normalizados %>% round(digits = 3)
## X5 X6
## 0.8 0.2
#Gráfico de los pesos normalizados (por default p=2)
barplot(pesos_ranking_suma$w_normalizados,
main = "Ponderadores Ranking Exponencial",
ylim = c(0,0.9),col = "purple")
#Comparación de valores de "p"
par(mfrow=c(1,3))
for(p in 2:4){
pesos<-ponderadores_subjetivos_rank_exponencial(vector_jerarquias = rj,p = p)
barplot(pesos$w_normalizados,main = paste0("p=",p),ylim = c(0,1),col = "blue",cex.main=3,cex.axis = 3)
}