This is an R Markdown document. Markdown is a simple formatting syntax for authoring HTML, PDF, and MS Word documents. For more details on using R Markdown see http://rmarkdown.rstudio.com.
When you click the Knit button a document will be generated that includes both content as well as the output of any embedded R code chunks within the document. You can embed an R code chunk like this:
summary(cars)
## speed dist
## Min. : 4.0 Min. : 2.00
## 1st Qu.:12.0 1st Qu.: 26.00
## Median :15.0 Median : 36.00
## Mean :15.4 Mean : 42.98
## 3rd Qu.:19.0 3rd Qu.: 56.00
## Max. :25.0 Max. :120.00
You can also embed plots, for example:
Note that the echo = FALSE parameter was added to the code chunk to prevent printing of the R code that generated the plot.
#CARGAMOS LA DATA
library(readr)
library(kableExtra)
library(dplyr)
##
## Attaching package: 'dplyr'
## The following object is masked from 'package:kableExtra':
##
## group_rows
## The following objects are masked from 'package:stats':
##
## filter, lag
## The following objects are masked from 'package:base':
##
## intersect, setdiff, setequal, union
load("C:/Users/naye/Downloads/data_parcial_2.RData")
datos_parcial_2
## # A tibble: 108 x 10
## ID Municipio X1 X2 X3 X4 X5 X6 X7 X8
## <dbl> <chr> <dbl> <dbl> <dbl> <dbl> <dbl> <dbl> <dbl> <dbl>
## 1 1 ATIQUIZAYA 9 2 20 20 0 0 2 56.4
## 2 2 EL CARMEN 10 6 62.5 50 37.5 3.95 11 147.
## 3 3 ALEGRIA 10 20 50 50 50 2.56 16 135
## 4 4 SAN JULIAN 8 3 42.9 42.9 14.3 1.35 35 121.
## 5 5 TEJUTLA 7 7 75 75 75 9.09 8 202.
## 6 6 PASAQUINA 6 13 30 30 30 8.11 25 81
## 7 7 JUAYUA 11 9 72.7 72.7 63.6 9.21 9 197.
## 8 8 SAN SALVADOR 9 3 62.5 50 25 2.70 6 129.
## 9 9 SAN PABLO TACACHICO 10 4 50 43.7 12.5 4.55 5 126
## 10 10 TEPECOYO 12 6 73.3 53.3 40 8.45 6 156.
## # ... with 98 more rows
#PONDERADORES NORMALIZADOS PARA CADA FACTOR
library(dplyr)
norm_directa<-function(x){(x-min(x))/(max(x)-min(x))}
norm_inversa<-function(x){(max(x)-x)/(max(x)-min(x))}
#Seleccionando las variables con correlación Seguridad municipal
datos_parcial_2 %>%
select(X1,X2,X3,X5,X7,X8) %>%
apply(MARGIN = 2,FUN = norm_directa) %>% as.data.frame()->variables_corr_positiva
#Seleccionando las variables con correlación Seguridad municpal
datos_parcial_2 %>%
select(X4,X6) %>%
apply(MARGIN = 2,FUN = norm_inversa) %>% as.data.frame()->variables_corr_negativa
#Juntando y reordenando las variables
variables_corr_positiva %>%
bind_cols(variables_corr_negativa) %>%
select(X1,X2,X3,X4,X5,X6,X7,X8)->datos_parcial_2_normalizados
head(datos_parcial_2_normalizados)
## X1 X2 X3 X4 X5 X6 X7
## 1 0.19354839 0.000000000 0.0400000 0.8000000 0.0000000 1.0000000 0.00000000
## 2 0.22580645 0.017167382 0.5500000 0.5000000 0.4285714 0.7844130 0.09890110
## 3 0.22580645 0.077253219 0.4000000 0.5000000 0.5714286 0.8599606 0.15384615
## 4 0.16129032 0.004291845 0.3142857 0.5714286 0.1632653 0.9261954 0.36263736
## 5 0.12903226 0.021459227 0.7000000 0.2500000 0.8571429 0.5034965 0.06593407
## 6 0.09677419 0.047210300 0.1600000 0.7000000 0.3428571 0.5571726 0.25274725
## X8
## 1 0.1582266
## 2 0.5167488
## 3 0.4679803
## 4 0.4133709
## 5 0.7339901
## 6 0.2551724
#Matriz de correlación
library(PerformanceAnalytics)
## Loading required package: xts
## Loading required package: zoo
##
## Attaching package: 'zoo'
## The following objects are masked from 'package:base':
##
## as.Date, as.Date.numeric
##
## Attaching package: 'xts'
## The following objects are masked from 'package:dplyr':
##
## first, last
##
## Attaching package: 'PerformanceAnalytics'
## The following object is masked from 'package:graphics':
##
## legend
chart.Correlation(as.matrix(datos_parcial_2_normalizados),histogram = TRUE,pch=12)
#KMO
library(rela)
KMO<-paf(as.matrix(datos_parcial_2_normalizados))$KMO
print(KMO)
## [1] 0.67931
####################
#Prueba de Barlett
library(psych)
options(scipen = 99999)
Barlett<-cortest.bartlett(datos_parcial_2_normalizados)
## R was not square, finding R from data
print(Barlett)
## $chisq
## [1] 1025.9
##
## $p.value
## [1] 0.0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000046951
##
## $df
## [1] 28
#ANALISIS FACTORIAL
library(FactoMineR)
library(factoextra)
## Loading required package: ggplot2
##
## Attaching package: 'ggplot2'
## The following objects are masked from 'package:psych':
##
## %+%, alpha
## Welcome! Want to learn more? See two factoextra-related books at https://goo.gl/ve3WBa
library(kableExtra)
Rx<-cor(datos_parcial_2_normalizados)
PC<-princomp(x = datos_parcial_2_normalizados,cor = TRUE,fix_sign = FALSE)
variables_pca<-get_pca_var(PC)
factoextra::get_eig(PC) %>% kable(caption="Resumen de PCA",
align = "c",
digits = 2) %>%
kable_material(html_font = "sans-serif") %>%
kable_styling(bootstrap_options = c("hover"))
| eigenvalue | variance.percent | cumulative.variance.percent | |
|---|---|---|---|
| Dim.1 | 3.90 | 48.72 | 48.72 |
| Dim.2 | 1.96 | 24.55 | 73.27 |
| Dim.3 | 0.84 | 10.52 | 83.78 |
| Dim.4 | 0.50 | 6.24 | 90.03 |
| Dim.5 | 0.45 | 5.68 | 95.70 |
| Dim.6 | 0.28 | 3.45 | 99.16 |
| Dim.7 | 0.07 | 0.82 | 99.98 |
| Dim.8 | 0.00 | 0.02 | 100.00 |
fviz_eig(PC,
choice = "eigenvalue",
barcolor = "red",
barfill = "red",
addlabels = TRUE,
)+labs(title = "Gráfico de Sedimentación",subtitle = "Usando princomp, con Autovalores")+
xlab(label = "Componentes")+
ylab(label = "Autovalores")+geom_hline(yintercept = 1)
library(corrplot)
## corrplot 0.90 loaded
#Modelo de 2 Factores (Rotada)
numero_de_factores<-2
modelo_factores<-principal(r = Rx,
nfactors = numero_de_factores,
covar = FALSE,
rotate = "varimax")
modelo_factores
## Principal Components Analysis
## Call: principal(r = Rx, nfactors = numero_de_factores, rotate = "varimax",
## covar = FALSE)
## Standardized loadings (pattern matrix) based upon correlation matrix
## RC1 RC2 h2 u2 com
## X1 -0.14 0.80 0.67 0.335 1.1
## X2 0.06 0.84 0.71 0.293 1.0
## X3 0.93 -0.10 0.88 0.117 1.0
## X4 -0.94 0.06 0.90 0.104 1.0
## X5 0.79 -0.06 0.62 0.376 1.0
## X6 -0.69 0.03 0.48 0.516 1.0
## X7 -0.07 0.83 0.69 0.311 1.0
## X8 0.95 -0.07 0.91 0.088 1.0
##
## RC1 RC2
## SS loadings 3.80 2.06
## Proportion Var 0.48 0.26
## Cumulative Var 0.48 0.73
## Proportion Explained 0.65 0.35
## Cumulative Proportion 0.65 1.00
##
## Mean item complexity = 1
## Test of the hypothesis that 2 components are sufficient.
##
## The root mean square of the residuals (RMSR) is 0.1
##
## Fit based upon off diagonal values = 0.96
correlaciones_modelo<-variables_pca$coord
rotacion<-varimax(correlaciones_modelo[,1:numero_de_factores])
correlaciones_modelo_rotada<-rotacion$loadings
corrplot(correlaciones_modelo_rotada[,1:numero_de_factores],
is.corr = FALSE,
method = "square",
addCoef.col="grey",
number.cex = 0.75)
library(kableExtra)
cargas<-rotacion$loadings[1:6,1:numero_de_factores]
ponderadores<-prop.table(apply(cargas^2,MARGIN = 2,sum))
t(ponderadores) %>% kable(caption="Ponderadores de los Factores Extraídos",
align = "c",
digits = 2) %>%
kable_material(html_font = "sans-serif") %>%
kable_styling(bootstrap_options = c("striped", "hover"))
| Dim.1 | Dim.2 |
|---|---|
| 0.68 | 0.32 |
#########################################################
contribuciones<-apply(cargas^2,MARGIN = 2,prop.table)
contribuciones %>% kable(caption="Contribución de las variables en los Factores",
align = "c",
digits = 2) %>%
kable_material(html_font = "sans-serif") %>%
kable_styling(bootstrap_options = c("striped", "hover"))
| Dim.1 | Dim.2 | |
|---|---|---|
| X1 | 0.01 | 0.47 |
| X2 | 0.00 | 0.51 |
| X3 | 0.30 | 0.01 |
| X4 | 0.31 | 0.00 |
| X5 | 0.21 | 0.00 |
| X6 | 0.17 | 0.00 |
#2 METODO CRITIC
#Funciones para normalizar los datos
norm_directa <- function(x){
return((x-min(x)) / (max(x)-min(x)))
}
norm_inverza <- function(x){
return((max(x)-x) / (max(x)-min(x)))
}
# Normalización de los datos
library(dplyr)
datos_parcial_2 %>% dplyr::select(X1,X2,X3,X4,X7,X8) %>% dplyr::transmute(X3=norm_directa(X3),X7=norm_directa(X7),X2=norm_directa(X2),X1=norm_directa(X1),X4=norm_inversa(X4),X8=norm_directa(X8)) ->data_factor_1
print(data_factor_1)
## # A tibble: 108 x 6
## X3 X7 X2 X1 X4 X8
## <dbl> <dbl> <dbl> <dbl> <dbl> <dbl>
## 1 0.04 0 0 0.194 0.8 0.158
## 2 0.55 0.0989 0.0172 0.226 0.5 0.517
## 3 0.4 0.154 0.0773 0.226 0.5 0.468
## 4 0.314 0.363 0.00429 0.161 0.571 0.413
## 5 0.7 0.0659 0.0215 0.129 0.25 0.734
## 6 0.16 0.253 0.0472 0.0968 0.7 0.255
## 7 0.673 0.0769 0.0300 0.258 0.273 0.714
## 8 0.55 0.0440 0.00429 0.194 0.5 0.446
## 9 0.4 0.0330 0.00858 0.226 0.563 0.433
## 10 0.68 0.0440 0.0172 0.290 0.467 0.552
## # ... with 98 more rows
#Cálculo de las desviaciones estándar de cada variable
data_factor_1 %>% dplyr::summarise(S3=sd(X3),S7=sd(X7),S8=sd(X8))-> sd_vector
print(sd_vector)
## # A tibble: 1 x 3
## S3 S7 S8
## <dbl> <dbl> <dbl>
## 1 0.246 0.209 0.209
#####################
#Cálculo de la matriz de correlación
cor(data_factor_1)->mat_R_F1
print(mat_R_F1)
## X3 X7 X2 X1 X4 X8
## X3 1.000000 -0.16395 -0.0257521 -0.19978 -0.938716 0.9590445
## X7 -0.163948 1.00000 0.5399241 0.51164 0.111487 -0.1193517
## X2 -0.025752 0.53992 1.0000000 0.50899 -0.004519 -0.0078062
## X1 -0.199778 0.51164 0.5089902 1.00000 0.182149 -0.1867325
## X4 -0.938716 0.11149 -0.0045190 0.18215 1.000000 -0.9958479
## X8 0.959045 -0.11935 -0.0078062 -0.18673 -0.995848 1.0000000
#Cálculo de los ponderadores brutos
1-mat_R_F1->sum_data
colSums(sum_data)->sum_vector
sd_vector*sum_vector->vj
print(vj)
## S3 S7 S8
## 1 1.3223 0.86001 0.8327
##############
#Cálculo de los ponderadores netos
vj/sum(vj)->wj
print(wj)
## S3 S7 S8
## 1 0.43857 0.28524 0.27618
#############
#Ponderadores:
print(round(wj*100,2))
## S3 S7 S8
## 1 43.86 28.52 27.62
#METODO DE ENTROPIA
#Normalización de los datos
datos_parcial_2 %>% dplyr::select(X1,X2,X3,X4,X5,X6,X7,X8)->data_norm
apply(data_norm,2,prop.table)->data_norm
print(data_norm)
## X1 X2 X3 X4 X5 X6 X7
## [1,] 0.0078125 0.00070734 0.0031877 0.0034612 0.0000000 0.0000000 0.0013986
## [2,] 0.0086806 0.00212202 0.0099616 0.0086530 0.0114900 0.0066718 0.0076923
## [3,] 0.0086806 0.00707339 0.0079693 0.0086530 0.0153199 0.0043338 0.0111888
## [4,] 0.0069444 0.00106101 0.0068308 0.0074168 0.0043771 0.0022840 0.0244755
## [5,] 0.0060764 0.00247569 0.0119539 0.0129795 0.0229799 0.0153652 0.0055944
## [6,] 0.0052083 0.00459770 0.0047816 0.0051918 0.0091920 0.0137041 0.0174825
## [7,] 0.0095486 0.00318302 0.0115916 0.0125862 0.0194981 0.0155674 0.0062937
## [8,] 0.0078125 0.00106101 0.0099616 0.0086530 0.0076600 0.0045680 0.0041958
## [9,] 0.0086806 0.00141468 0.0079693 0.0075714 0.0038300 0.0076826 0.0034965
## [10,] 0.0104167 0.00212202 0.0116882 0.0092299 0.0122560 0.0142832 0.0041958
## [11,] 0.0112847 0.00141468 0.0143447 0.0121142 0.0137879 0.0197553 0.0020979
## [12,] 0.0095486 0.00070734 0.0101427 0.0094396 0.0139272 0.0114201 0.0062937
## [13,] 0.0104167 0.00424403 0.0139462 0.0151427 0.0229799 0.0130014 0.0013986
## [14,] 0.0147569 0.00141468 0.0039846 0.0043265 0.0025533 0.0024495 0.0027972
## [15,] 0.0086806 0.00636605 0.0079693 0.0086530 0.0000000 0.0000000 0.0160839
## [16,] 0.0104167 0.00070734 0.0112507 0.0122160 0.0072094 0.0100906 0.0048951
## [17,] 0.0078125 0.00141468 0.0119539 0.0086530 0.0000000 0.0000000 0.0125874
## [18,] 0.0060764 0.00247569 0.0095631 0.0103836 0.0091920 0.0123671 0.0125874
## [19,] 0.0069444 0.00318302 0.0139462 0.0151427 0.0268099 0.0149762 0.0027972
## [20,] 0.0095486 0.00282935 0.0127508 0.0138448 0.0183839 0.0095670 0.0153846
## [21,] 0.0104167 0.00707339 0.0073052 0.0064897 0.0063833 0.0027983 0.0097902
## [22,] 0.0095486 0.00565871 0.0063754 0.0069224 0.0000000 0.0000000 0.0013986
## [23,] 0.0095486 0.00176835 0.0057958 0.0047198 0.0055709 0.0047611 0.0111888
## [24,] 0.0060764 0.01061008 0.0053128 0.0057687 0.0051066 0.0034493 0.0209790
## [25,] 0.0069444 0.00565871 0.0099616 0.0086530 0.0153199 0.0088957 0.0027972
## [26,] 0.0086806 0.00636605 0.0132821 0.0144217 0.0204266 0.0086676 0.0048951
## [27,] 0.0104167 0.00565871 0.0063754 0.0069224 0.0000000 0.0000000 0.0062937
## [28,] 0.0052083 0.01061008 0.0079693 0.0086530 0.0153199 0.0021395 0.0111888
## [29,] 0.0086806 0.06825818 0.0073562 0.0079874 0.0070707 0.0072436 0.0020979
## [30,] 0.0095486 0.00141468 0.0039846 0.0000000 0.0000000 0.0000000 0.0069930
## [31,] 0.0086806 0.00070734 0.0026564 0.0028843 0.0000000 0.0000000 0.0013986
## [32,] 0.0043403 0.00141468 0.0119539 0.0129795 0.0229799 0.0153652 0.0027972
## [33,] 0.0112847 0.01237843 0.0132821 0.0144217 0.0204266 0.0086676 0.0069930
## [34,] 0.0069444 0.00141468 0.0123966 0.0115373 0.0170222 0.0111196 0.0055944
## [35,] 0.0060764 0.00106101 0.0119539 0.0129795 0.0134050 0.0166637 0.0048951
## [36,] 0.0190972 0.00813439 0.0059769 0.0064897 0.0038300 0.0023153 0.0076923
## [37,] 0.0086806 0.00212202 0.0106257 0.0096144 0.0102133 0.0133435 0.0034965
## [38,] 0.0078125 0.00353669 0.0109577 0.0097346 0.0153199 0.0193163 0.0090909
## [39,] 0.0078125 0.01591512 0.0039846 0.0043265 0.0000000 0.0000000 0.0027972
## [40,] 0.0078125 0.00282935 0.0029885 0.0032449 0.0057450 0.0075680 0.0013986
## [41,] 0.0095486 0.00282935 0.0119539 0.0086530 0.0153199 0.0042789 0.0034965
## [42,] 0.0095486 0.00353669 0.0101427 0.0094396 0.0055709 0.0023805 0.0076923
## [43,] 0.0052083 0.00070734 0.0095631 0.0103836 0.0122560 0.0091361 0.0139860
## [44,] 0.0112847 0.00176835 0.0117806 0.0127914 0.0159860 0.0293944 0.0034965
## [45,] 0.0069444 0.04244032 0.0079693 0.0086530 0.0091920 0.0133435 0.0209790
## [46,] 0.0069444 0.00070734 0.0110344 0.0106498 0.0094277 0.0095221 0.0013986
## [47,] 0.0086806 0.00141468 0.0074380 0.0080761 0.0040853 0.0051217 0.0027972
## [48,] 0.0060764 0.00106101 0.0117806 0.0127914 0.0186504 0.0309469 0.0020979
## [49,] 0.0086806 0.00353669 0.0057958 0.0062931 0.0083563 0.0046949 0.0069930
## [50,] 0.0086806 0.00389036 0.0095631 0.0103836 0.0122560 0.0142832 0.0027972
## [51,] 0.0069444 0.00070734 0.0073562 0.0053249 0.0023569 0.0024495 0.0013986
## [52,] 0.0052083 0.00389036 0.0099616 0.0108162 0.0076600 0.0130014 0.0076923
## [53,] 0.0121528 0.00282935 0.0079693 0.0086530 0.0000000 0.0000000 0.0027972
## [54,] 0.0052083 0.00070734 0.0079693 0.0086530 0.0051066 0.0022536 0.0062937
## [55,] 0.0130208 0.00159151 0.0079693 0.0086530 0.0000000 0.0000000 0.0209790
## [56,] 0.0095486 0.00106101 0.0079693 0.0060571 0.0061280 0.0079227 0.0020979
## [57,] 0.0052083 0.00459770 0.0043469 0.0047198 0.0055709 0.0047611 0.0650350
## [58,] 0.0182292 0.00070734 0.0053128 0.0057687 0.0102133 0.0025227 0.0041958
## [59,] 0.0095486 0.00070734 0.0146103 0.0115373 0.0102133 0.0093899 0.0013986
## [60,] 0.0104167 0.00176835 0.0075898 0.0065928 0.0102133 0.0179261 0.0048951
## [61,] 0.0069444 0.00247569 0.0053128 0.0057687 0.0000000 0.0000000 0.0125874
## [62,] 0.0295139 0.08311229 0.0088547 0.0096144 0.0068089 0.0046306 0.0650350
## [63,] 0.0078125 0.00070734 0.0106257 0.0115373 0.0127666 0.0115766 0.0097902
## [64,] 0.0104167 0.06825818 0.0111570 0.0121142 0.0122560 0.0091361 0.0440559
## [65,] 0.0069444 0.00707339 0.0092975 0.0072108 0.0102133 0.0143845 0.0034965
## [66,] 0.0104167 0.01061008 0.0073052 0.0064897 0.0051066 0.0137974 0.0020979
## [67,] 0.0086806 0.06790451 0.0095631 0.0080761 0.0102133 0.0120727 0.0118881
## [68,] 0.0086806 0.00459770 0.0100354 0.0070506 0.0124829 0.0106850 0.0209790
## [69,] 0.0104167 0.00565871 0.0061983 0.0067301 0.0085111 0.0128044 0.0069930
## [70,] 0.0052083 0.00070734 0.0127508 0.0138448 0.0061280 0.0091361 0.0111888
## [71,] 0.0104167 0.01591512 0.0059769 0.0064897 0.0076600 0.0022840 0.0013986
## [72,] 0.0086806 0.00318302 0.0124736 0.0135438 0.0146539 0.0273411 0.0195804
## [73,] 0.0086806 0.00212202 0.0053128 0.0057687 0.0000000 0.0000000 0.0041958
## [74,] 0.0078125 0.00070734 0.0087662 0.0069224 0.0015320 0.0027708 0.0013986
## [75,] 0.0112847 0.00212202 0.0141676 0.0134602 0.0136177 0.0193163 0.0041958
## [76,] 0.0086806 0.00212202 0.0159385 0.0173060 0.0102133 0.0068521 0.0020979
## [77,] 0.0086806 0.06790451 0.0106257 0.0103836 0.0102133 0.0120727 0.0013986
## [78,] 0.0086806 0.00247569 0.0043469 0.0047198 0.0027854 0.0021395 0.0041958
## [79,] 0.0086806 0.00106101 0.0123966 0.0124988 0.0136177 0.0193163 0.0027972
## [80,] 0.0069444 0.00636605 0.0079693 0.0086530 0.0153199 0.0059304 0.0125874
## [81,] 0.0078125 0.01061008 0.0132821 0.0144217 0.0102133 0.0059304 0.0209790
## [82,] 0.0295139 0.08311229 0.0047816 0.0051918 0.0061280 0.0104011 0.0650350
## [83,] 0.0060764 0.06861185 0.0139462 0.0151427 0.0114900 0.0187797 0.0034965
## [84,] 0.0095486 0.00070734 0.0159385 0.0173060 0.0000000 0.0000000 0.0013986
## [85,] 0.0086806 0.00176835 0.0085823 0.0079874 0.0047138 0.0048991 0.0034965
## [86,] 0.0060764 0.00070734 0.0061302 0.0053249 0.0023569 0.0024856 0.0013986
## [87,] 0.0052083 0.00106101 0.0152140 0.0133728 0.0167127 0.0289745 0.0027972
## [88,] 0.0130208 0.00176835 0.0035419 0.0038458 0.0034044 0.0023475 0.0062937
## [89,] 0.0086806 0.00070734 0.0084380 0.0091620 0.0144188 0.0166637 0.0013986
## [90,] 0.0138889 0.00212202 0.0026564 0.0028843 0.0000000 0.0000000 0.0069930
## [91,] 0.0078125 0.00176835 0.0123966 0.0134602 0.0204266 0.0131702 0.0062937
## [92,] 0.0043403 0.00141468 0.0127508 0.0115373 0.0142986 0.0231985 0.0013986
## [93,] 0.0034722 0.00141468 0.0053128 0.0057687 0.0000000 0.0000000 0.0020979
## [94,] 0.0121528 0.05057471 0.0119539 0.0100952 0.0025533 0.0024495 0.0027972
## [95,] 0.0078125 0.00141468 0.0079693 0.0086530 0.0000000 0.0000000 0.0048951
## [96,] 0.0199653 0.00247569 0.0083157 0.0082768 0.0093252 0.0184863 0.0048951
## [97,] 0.0086806 0.00141468 0.0097402 0.0105759 0.0102133 0.0160969 0.0027972
## [98,] 0.0078125 0.00247569 0.0110877 0.0105341 0.0093252 0.0184863 0.0048951
## [99,] 0.0095486 0.00282935 0.0089068 0.0081440 0.0099129 0.0094375 0.0440559
## [100,] 0.0295139 0.08311229 0.0079693 0.0086530 0.0065657 0.0073486 0.0650350
## [101,] 0.0060764 0.00106101 0.0159385 0.0173060 0.0000000 0.0000000 0.0034965
## [102,] 0.0052083 0.00141468 0.0119539 0.0129795 0.0153199 0.0137041 0.0020979
## [103,] 0.0104167 0.00106101 0.0111570 0.0112489 0.0183839 0.0281696 0.0013986
## [104,] 0.0104167 0.00565871 0.0121436 0.0123614 0.0189675 0.0305171 0.0020979
## [105,] 0.0060764 0.00070734 0.0106257 0.0115373 0.0102133 0.0044478 0.0013986
## [106,] 0.0104167 0.00636605 0.0079693 0.0086530 0.0065657 0.0073486 0.0125874
## [107,] 0.0026042 0.00282935 0.0113847 0.0123614 0.0218856 0.0108345 0.0111888
## [108,] 0.0095486 0.00212202 0.0065203 0.0070797 0.0083563 0.0158454 0.0020979
## X8
## [1,] 0.0035170
## [2,] 0.0091902
## [3,] 0.0084185
## [4,] 0.0075543
## [5,] 0.0126277
## [6,] 0.0050511
## [7,] 0.0123131
## [8,] 0.0080677
## [9,] 0.0078572
## [10,] 0.0097405
## [11,] 0.0126183
## [12,] 0.0097677
## [13,] 0.0147323
## [14,] 0.0043288
## [15,] 0.0084185
## [16,] 0.0114814
## [17,] 0.0093694
## [18,] 0.0102456
## [19,] 0.0147323
## [20,] 0.0135194
## [21,] 0.0068101
## [22,] 0.0067847
## [23,] 0.0050908
## [24,] 0.0056539
## [25,] 0.0090421
## [26,] 0.0140723
## [27,] 0.0070902
## [28,] 0.0084185
## [29,] 0.0079915
## [30,] 0.0010133
## [31,] 0.0029724
## [32,] 0.0126277
## [33,] 0.0142282
## [34,] 0.0116473
## [35,] 0.0127173
## [36,] 0.0064074
## [37,] 0.0097765
## [38,] 0.0101684
## [39,] 0.0045054
## [40,] 0.0034297
## [41,] 0.0093694
## [42,] 0.0095523
## [43,] 0.0103890
## [44,] 0.0124555
## [45,] 0.0084185
## [46,] 0.0106730
## [47,] 0.0078739
## [48,] 0.0125992
## [49,] 0.0062983
## [50,] 0.0102310
## [51,] 0.0058617
## [52,] 0.0107024
## [53,] 0.0084185
## [54,] 0.0086991
## [55,] 0.0086991
## [56,] 0.0067971
## [57,] 0.0049604
## [58,] 0.0060904
## [59,] 0.0121756
## [60,] 0.0070495
## [61,] 0.0059137
## [62,] 0.0095132
## [63,] 0.0113234
## [64,] 0.0117858
## [65,] 0.0076909
## [66,] 0.0068803
## [67,] 0.0084642
## [68,] 0.0078826
## [69,] 0.0065893
## [70,] 0.0134695
## [71,] 0.0066412
## [72,] 0.0125586
## [73,] 0.0056539
## [74,] 0.0074238
## [75,] 0.0135180
## [76,] 0.0168369
## [77,] 0.0099483
## [78,] 0.0046826
## [79,] 0.0120699
## [80,] 0.0087147
## [81,] 0.0142282
## [82,] 0.0051259
## [83,] 0.0147323
## [84,] 0.0168369
## [85,] 0.0077373
## [86,] 0.0054732
## [87,] 0.0137133
## [88,] 0.0043236
## [89,] 0.0089283
## [90,] 0.0028893
## [91,] 0.0130954
## [92,] 0.0118440
## [93,] 0.0058271
## [94,] 0.0098839
## [95,] 0.0084185
## [96,] 0.0083534
## [97,] 0.0104486
## [98,] 0.0106119
## [99,] 0.0084423
## [100,] 0.0086590
## [101,] 0.0168369
## [102,] 0.0126485
## [103,] 0.0113369
## [104,] 0.0122194
## [105,] 0.0114637
## [106,] 0.0079641
## [107,] 0.0120976
## [108,] 0.0069870
#Número de variables en el factor:
ncol(data_norm)->m
#Constante de entropía:
-1/log(m)->K
print(K)
## [1] -0.4809
#Cálculo de las entropías
K*colSums(data_norm)->Ej
print(Ej)
## X1 X2 X3 X4 X5 X6 X7 X8
## -0.4809 -0.4809 -0.4809 -0.4809 -0.4809 -0.4809 -0.4809 -0.4809
#Cálculo de las especificidades:
1-Ej->vj
print(vj)
## X1 X2 X3 X4 X5 X6 X7 X8
## 1.4809 1.4809 1.4809 1.4809 1.4809 1.4809 1.4809 1.4809
################
#Cálculo de los ponderadores:
prop.table(vj)->wj #es igual a usar vj/sum(vj)
print(wj)
## X1 X2 X3 X4 X5 X6 X7 X8
## 0.125 0.125 0.125 0.125 0.125 0.125 0.125 0.125
#METODO RANKING
#jerarquia suma
library(magrittr)
#Vector de Jerarquías
rj<-c(1,2,3,4,5,6,7,8)
names(rj)<-c("X1","X2","X3","X4","X5","X6","X7","X8")
#Función para generar los pesos
ponderadores_subjetivos_rank_suma<-function(vector_jerarquias){
n<-length(vector_jerarquias)
vector_pesos<-n-vector_jerarquias+1
list(w_brutos=vector_pesos,w_normalizados=vector_pesos/sum(vector_pesos))
}
#Aplicando la función:
pesos_ranking_suma<-ponderadores_subjetivos_rank_suma(rj)
#Pesos brutos
pesos_ranking_suma$w_brutos
## X1 X2 X3 X4 X5 X6 X7 X8
## 8 7 6 5 4 3 2 1
#Pesos normalizados
pesos_ranking_suma$w_normalizados %>% round(digits = 3)
## X1 X2 X3 X4 X5 X6 X7 X8
## 0.222 0.194 0.167 0.139 0.111 0.083 0.056 0.028
#Gráfico de los pesos normalizados
barplot(pesos_ranking_suma$w_normalizados,
main = "Ponderadores Ranking de Suma",
ylim = c(0,0.5),col = "Blue")
library(magrittr)
#Vector de Jerarquías
rj<-c(1,2,3,4,5,6,7,8)
names(rj)<-c("X1","X2","X3","X4","X5","X6","X7","X8")
#Función para generar los pesos
ponderadores_subjetivos_rank_reciproco<-function(vector_jerarquias){
vector_pesos<-1/vector_jerarquias
list(w_brutos=vector_pesos,w_normalizados=vector_pesos/sum(vector_pesos))
}
#Aplicando la función:
pesos_ranking_reciproco<-ponderadores_subjetivos_rank_reciproco(rj)
#Pesos brutos
pesos_ranking_reciproco$w_brutos
## X1 X2 X3 X4 X5 X6 X7 X8
## 1.00000 0.50000 0.33333 0.25000 0.20000 0.16667 0.14286 0.12500
#Pesos normalizados
pesos_ranking_reciproco$w_normalizados %>% round(digits = 3)
## X1 X2 X3 X4 X5 X6 X7 X8
## 0.368 0.184 0.123 0.092 0.074 0.061 0.053 0.046
#Gráfico de los pesos normalizados
barplot(pesos_ranking_reciproco$w_normalizados,
main = "Ponderadores Ranking Recíproco",
ylim = c(0,0.5),col = "green")
library(magrittr)
#Vector de Jerarquías
rj<-c(1,2,3,4,5,6,7,8)
names(rj)<-c("X1","X2","X3","X4","X5","X6","X7","X8")
#Función para generar los pesos
ponderadores_subjetivos_rank_exponencial<-function(vector_jerarquias,p=2){
n<-length(vector_jerarquias)
vector_pesos<-(n-vector_jerarquias+1)^p
list(w_brutos=vector_pesos,w_normalizados=vector_pesos/sum(vector_pesos))
}
#Aplicando la función:
pesos_ranking_exponencial<-ponderadores_subjetivos_rank_exponencial(rj)
#Pesos brutos
pesos_ranking_exponencial$w_brutos
## X1 X2 X3 X4 X5 X6 X7 X8
## 64 49 36 25 16 9 4 1
#Pesos normalizados
pesos_ranking_exponencial$w_normalizados %>% round(digits = 3)
## X1 X2 X3 X4 X5 X6 X7 X8
## 0.314 0.240 0.176 0.123 0.078 0.044 0.020 0.005
#Gráfico de los pesos normalizados (por default p=2)
barplot(pesos_ranking_suma$w_normalizados,
main = "Ponderadores Ranking Exponencial",
ylim = c(0,0.5),col = "Blue")
library(FuzzyAHP)
## Loading required package: MASS
##
## Attaching package: 'MASS'
## The following object is masked from 'package:dplyr':
##
## select
valores_matriz_comparacion = c(1,4,7,
NA,1,5,
NA,NA,1)
matriz_comparacion<-matrix(valores_matriz_comparacion,
nrow = 3, ncol = 3, byrow = TRUE)
matriz_comparacion<-pairwiseComparisonMatrix(matriz_comparacion)
matriz_comparacion@variableNames<-c("price","slope","view")
show(matriz_comparacion)
## An object of class "PairwiseComparisonMatrix"
## Slot "valuesChar":
## [,1] [,2] [,3]
## [1,] "1" "4" "7"
## [2,] "1/4" "1" "5"
## [3,] "1/7" "1/5" "1"
##
## Slot "values":
## [,1] [,2] [,3]
## [1,] 1.00000 4.0 7
## [2,] 0.25000 1.0 5
## [3,] 0.14286 0.2 1
##
## Slot "variableNames":
## [1] "price" "slope" "view"
# Cálculo de los pesos:
pesos_normalizados = calculateWeights(matriz_comparacion)
show(pesos_normalizados)
## An object of class "Weights"
## Slot "weights":
## w_price w_slope w_view
## 0.687086 0.243741 0.069173
barplot(pesos_normalizados@weights,
main = "Ponderadores por comparación de pares",
ylim = c(0,0.7),col = "blue")