Objetivo 2: Ancova

Published

September 2, 2024

ANCOVA

rm(list = ls())

library(tidyverse)
── Attaching core tidyverse packages ──────────────────────── tidyverse 2.0.0 ──
✔ dplyr     1.1.4     ✔ readr     2.1.5
✔ forcats   1.0.0     ✔ stringr   1.5.1
✔ ggplot2   3.5.1     ✔ tibble    3.2.1
✔ lubridate 1.9.3     ✔ tidyr     1.3.1
✔ purrr     1.0.2     
── Conflicts ────────────────────────────────────────── tidyverse_conflicts() ──
✖ dplyr::filter() masks stats::filter()
✖ dplyr::lag()    masks stats::lag()
ℹ Use the conflicted package (<http://conflicted.r-lib.org/>) to force all conflicts to become errors
library(ggpubr)
library(rstatix)

Adjuntando el paquete: 'rstatix'

The following object is masked from 'package:stats':

    filter
library(broom)
library(tuneR)
library(readxl)
library(tidyverse)
library(egg)
Cargando paquete requerido: gridExtra

Adjuntando el paquete: 'gridExtra'

The following object is masked from 'package:dplyr':

    combine


Adjuntando el paquete: 'egg'

The following object is masked from 'package:ggpubr':

    ggarrange
library(tidyverse)
library(ggplot2)
library(ggpmisc)
Cargando paquete requerido: ggpp
Registered S3 methods overwritten by 'ggpp':
  method                  from   
  heightDetails.titleGrob ggplot2
  widthDetails.titleGrob  ggplot2

Adjuntando el paquete: 'ggpp'

The following objects are masked from 'package:ggpubr':

    as_npc, as_npcx, as_npcy

The following object is masked from 'package:ggplot2':

    annotate
library(broom)
library(ggplot2)
library(patchwork)
library(egg)
library(ggpubr)
library(readxl)
library(tidyverse)
library(egg)
library(tidyverse)


dat_clean=read.csv("dat_clean_modified_zscore_anchoveta.csv")


dat_clean$group <- factor(dat_clean$group,      # Reordering group factor levels
                         levels = c("3.5", "4", "5", "7.5","10.5","11","12","12.5","13.5"),labels = c("3.5", "4", "5", "7.5","10.5","11","12","12.5","13.5"))

dat_clean$Banda <- factor(dat_clean$Banda,
  levels = c("35-45","45-90","90-170","170-260"),labels = c("35-45","45-90","90-170","170-260"))

dat=dat_clean

dat=dat%>%
 select(-Value)%>%
  rename(Value="Value_linear")
anxiety <- dat %>%
  select(group,Frequency,Value)%>%
  mutate(Frecuencia=round(as.numeric(Frequency,0)),Sv=round(Value,20),Class=group)%>%
  #rename(pretest = Frecuencia, posttest = Valor, group=Class) %>%
  drop_na()%>%
  mutate(Frecuencia=as.numeric(Frecuencia))
library(dplyr)

# Especifica el tamaño del subconjunto balanceado
tamaño_subconjunto <- 10000

# Realiza el muestreo aleatorio y balanceado
subset_balanceado <- anxiety %>%
  group_by(Class) %>%
  sample_n(tamaño_subconjunto, replace = T) %>%
  ungroup()

# Verifica el tamaño del subconjunto balanceado
nrow(subset_balanceado)
[1] 90000
anxiety=subset_balanceado

# Ahora `subset_balanceado` contiene un subconjunto aleatorio y balanceado de tus datos.

Comprobar supuestos Supuesto de linealidad

Comprobar supuestos

Supuesto de linealidad

Cree un diagrama de dispersión entre la covariable (es decir, ) y la variable de resultado (es decir, frecuencia). Agregue líneas de regresión, muestre las ecuaciones correspondientes y el R2 por grupos.

ggscatter(
  anxiety, x = "Frecuencia", y = "Sv",
  color = "Class", add = "reg.line",size = 0.5, alpha=0.5
  )+
  stat_regline_equation(
    aes(label =  paste(..eq.label.., ..rr.label.., sep = "~~~~"), color = Class)
    )+
  theme_bw()
Warning: The dot-dot notation (`..eq.label..`) was deprecated in ggplot2 3.4.0.
ℹ Please use `after_stat(eq.label)` instead.
ℹ The deprecated feature was likely used in the ggpubr package.
  Please report the issue at <https://github.com/kassambara/ggpubr/issues>.

library(ggpubr)  # Para ggscatter y stat_regline_equation
library(dplyr)    # Para manipulación de datos

# Suponiendo que 'anxiety' es tu data frame y ya contiene las variables 'Frecuencia', 'Sv' y 'Class'

# Aplicar la transformación logarítmica al eje Y
anxiety <- anxiety %>%
  mutate(Log_Sv = 10 * log10(Sv))

# Crear el gráfico ggscatter con la transformación en el eje Y
ggscatter(
  anxiety, x = "Frecuencia", y = "Log_Sv",  # Utilizamos Log_Sv en lugar de Sv
  color = "Class", add = "reg.line", size = 0.5, alpha = 0.5
) +
  stat_regline_equation(
    aes(label = paste(..eq.label.., ..rr.label.., sep = "~~~~"), color = Class)
  ) +
  theme_bw()

Hubo una relación lineal entre la puntuación de ansiedad antes y después de la prueba para cada grupo de entrenamiento, según se evaluó mediante la inspección visual de un diagrama de dispersión.

Homogeneidad de las pendientes de regresión

Esta suposición verifica que no haya interacción significativa entre la covariable y la variable de agrupación. Esto se puede evaluar de la siguiente manera:

anxiety %>% anova_test(Sv ~ Class*Frecuencia)
ANOVA Table (type II tests)

            Effect DFn   DFd        F        p p<.05   ges
1            Class   8 89982 2283.109  0.0e+00     * 0.169
2       Frecuencia   1 89982 1228.202 2.9e-267     * 0.013
3 Class:Frecuencia   8 89982  478.533  0.0e+00     * 0.041

Hubo homogeneidad de pendientes de regresión ya que el término de interacción no fue estadísticamente significativo, F(4, 4990) = 76.544, p = *…..en realidad fue significativa

Normalidad de los residuos

En primer lugar, debe calcular el modelo mediante . En R, puede aumentar fácilmente sus datos para agregar valores ajustados y residuales mediante la función [paquete de escobas]. Llamemos a la salida porque contiene varias métricas útiles para el diagnóstico de regresión.lm()augment(model)model.metrics

Normality of residuals You first need to compute the model using lm(). In R, you can easily augment your data to add fitted values and residuals by using the function augment(model) [broom package]. Let’s call the output model.metrics because it contains several metrics useful for regression diagnostics.

# Fit the model, the covariate goes first
model <- lm(Sv ~ Frecuencia + Class, data = anxiety)
model

Call:
lm(formula = Sv ~ Frecuencia + Class, data = anxiety)

Coefficients:
(Intercept)   Frecuencia       Class4       Class5     Class7.5    Class10.5  
  2.972e-05   -1.718e-07   -3.677e-07   -6.366e-08    1.887e-06    1.012e-06  
    Class11      Class12    Class12.5    Class13.5  
  5.807e-06    1.719e-05    4.820e-05    1.373e-04  
# Inspect the model diagnostic metrics
model.metrics <- augment(model) %>%
  select(-.hat, -.sigma, -.fitted, -.fitted) # Remove details

model.metrics 
# A tibble: 90,000 × 6
            Sv Frecuencia Class      .resid       .cooksd .std.resid
         <dbl>      <dbl> <fct>       <dbl>         <dbl>      <dbl>
 1 0.00000175         198 3.5    0.00000605 0.0000000406      0.0622
 2 0.00000215         224 3.5    0.0000109  0.000000142       0.112 
 3 0.00000331         234 3.5    0.0000138  0.000000235       0.142 
 4 0.00000472         199 3.5    0.00000920 0.0000000940      0.0946
 5 0.00000211         148 3.5   -0.00000218 0.00000000503    -0.0224
 6 0.00000142         230 3.5    0.0000112  0.000000153       0.115 
 7 0.000000998         92 3.5   -0.0000129  0.000000195      -0.133 
 8 0.00000512         259 3.5    0.0000199  0.000000539       0.205 
 9 0.000000853         90 3.5   -0.0000134  0.000000211      -0.138 
10 0.00000662         161 3.5    0.00000457 0.0000000221      0.0470
# ℹ 89,990 more rows
#Assess normality of residuals using shapiro wilk test
#shapiro_test(model.metrics$.resid)

La prueba de Shapiro Wilk no fue significativa (p > 0,05), por lo que podemos suponer normalidad de los residuos

Homogeneidad de las varianzas

Homogeneidad de las varianzas ANCOVA asume que la varianza de los residuos es igual para todos los grupos. Esto se puede comprobar mediante el test de Levene:

Homogeneity of variances ANCOVA assumes that the variance of the residuals is equal for all groups. This can be checked using the Levene’s test:

model.metrics %>% levene_test(.resid ~ Class)
# A tibble: 1 × 4
    df1   df2 statistic     p
  <int> <int>     <dbl> <dbl>
1     8 89991     1219.     0

La prueba de Levene no fue significativa (p > 0,05), por lo que podemos suponer homogeneidad de las varianzas residuales para todos los grupos.

The Levene’s test was not significant (p > 0.05), so we can assume homogeneity of the residual variances for all groups.

Outliers

An outlier is a point that has an extreme outcome variable value. The presence of outliers may affect the interpretation of the model.

Outliers can be identified by examining the standardized residual (or studentized residual), which is the residual divided by its estimated standard error. Standardized residuals can be interpreted as the number of standard errors away from the regression line.

Las observaciones cuyos residuos estandarizados son mayores que 3 en valor absoluto son posibles valores atípicos

model.metrics %>% 
  filter(abs(.std.resid) > 7) %>%
  as.data.frame()
              Sv Frecuencia Class       .resid      .cooksd .std.resid
1   0.0011271975         46    12 0.0010881997 0.0016591610  11.193276
2   0.0008016781         91    12 0.0007704121 0.0006996141   7.924414
3   0.0035727284         46    12 0.0035337306 0.0174959770  36.348127
4   0.0007396053         92    12 0.0007085112 0.0005898728   7.287703
5   0.0010000000         46    12 0.0009610023 0.0012939579   9.884916
6   0.0008016781         91    12 0.0007704121 0.0006996141   7.924414
7   0.0007550922        160    12 0.0007356817 0.0005728510   7.567136
8   0.0007888601         47    12 0.0007500342 0.0007847200   7.714886
9   0.0011271975         46    12 0.0010881997 0.0016591610  11.193276
10  0.0014354894         46    12 0.0013964917 0.0027324236  14.364382
11  0.0011376273        160  12.5 0.0010872010 0.0012511092  11.182822
12  0.0020701413         46  12.5 0.0020001278 0.0056009157  20.573412
13  0.0014962357         46  12.5 0.0014262221 0.0028478550  14.670190
14  0.0007603263         46  12.5 0.0006903127 0.0006671682   7.100590
15  0.0013273945         46  12.5 0.0012573809 0.0022134885  12.933481
16  0.0015739829         46  12.5 0.0015039693 0.0031668063  15.469901
17  0.0013273945         46  12.5 0.0012573809 0.0022134885  12.933481
18  0.0007943282         46  12.5 0.0007243147 0.0007345107   7.450336
19  0.0007943282         46  12.5 0.0007243147 0.0007345107   7.450336
20  0.0007943282         46  12.5 0.0007243147 0.0007345107   7.450336
21  0.0008375293         46  12.5 0.0007675157 0.0008247420   7.894704
22  0.0013273945         46  12.5 0.0012573809 0.0022134885  12.933481
23  0.0007603263         46  12.5 0.0006903127 0.0006671682   7.100590
24  0.0009885531         47  12.5 0.0009187114 0.0011764799   9.449907
25  0.0007762471        160  12.5 0.0007258208 0.0005576149   7.465708
26  0.0008222426        160  12.5 0.0007718164 0.0006305267   7.938813
27  0.0008222426        160  12.5 0.0007718164 0.0006305267   7.938813
28  0.0013489629         46  12.5 0.0012789493 0.0022900777  13.155335
29  0.0007603263         46  12.5 0.0006903127 0.0006671682   7.100590
30  0.0007943282         46  12.5 0.0007243147 0.0007345107   7.450336
31  0.0007603263         46  12.5 0.0006903127 0.0006671682   7.100590
32  0.0020701413         46  12.5 0.0020001278 0.0056009157  20.573412
33  0.0010069317         47  12.5 0.0009370900 0.0012240211   9.638950
34  0.0015739829         46  12.5 0.0015039693 0.0031668063  15.469901
35  0.0012793813         46  12.5 0.0012093678 0.0020476716  12.439616
36  0.0010375284         47  12.5 0.0009676867 0.0013052565   9.953669
37  0.0019054607         46  12.5 0.0018354472 0.0047165813  18.879499
38  0.0008689604         47  13.5 0.0007100247 0.0007054275   7.303348
39  0.0009354057        178  13.5 0.0007989781 0.0006831440   8.218199
40  0.0019543395        160  13.5 0.0018148191 0.0034856077  18.667016
41  0.0011481536        107  13.5 0.0009995269 0.0011279485  10.281051
42  0.0011350108        259  13.5 0.0010125005 0.0013823070  10.414604
43  0.0020606299        160  13.5 0.0019211096 0.0039058547  19.760307
44  0.0008892011         84  13.5 0.0007366226 0.0006558784   7.576868
45  0.0026730064         46  13.5 0.0025138988 0.0088823140  25.858092
46  0.0013677288         35  13.5 0.0012067312 0.0021516537  12.412542
47  0.0008689604         47  13.5 0.0007100247 0.0007054275   7.303348
48  0.0008336812        162  13.5 0.0006945045 0.0005106617   7.143592
49  0.0008472274         35  13.5 0.0006862298 0.0006958096   7.058619
50  0.0009057326        163  13.5 0.0007667277 0.0006225663   7.886472
51  0.0017060824         45  13.5 0.0015468030 0.0033778031  15.910499
52  0.0010495424         44  13.5 0.0008900912 0.0011235069   9.155529
53  0.0009705100        117  13.5 0.0008236015 0.0007486014   8.471486
54  0.0008452788         35  13.5 0.0006842813 0.0006918637   7.038576
55  0.0011350108        259  13.5 0.0010125005 0.0013823070  10.414604
56  0.0011668096         41  13.5 0.0010068429 0.0014571612  10.356454
57  0.0026730064         46  13.5 0.0025138988 0.0088823140  25.858092
58  0.0019142559        165  13.5 0.0017755947 0.0033411821  18.263560
59  0.0011668096         41  13.5 0.0010068429 0.0014571612  10.356454
60  0.0009484185        162  13.5 0.0008092418 0.0006933296   8.323766
61  0.0008689604         47  13.5 0.0007100247 0.0007054275   7.303348
62  0.0010209395        129  13.5 0.0008760928 0.0008293508   9.011397
63  0.0009057326         72  13.5 0.0007510923 0.0007120120   7.725721
64  0.0009375620         85  13.5 0.0007851553 0.0007426338   8.076071
65  0.0019860949         90  13.5 0.0018345473 0.0039884671  18.870051
66  0.0009931160        120  13.5 0.0008467230 0.0007865438   8.709309
67  0.0012793813        162  13.5 0.0011402046 0.0013764129  11.728011
68  0.0026001596         46  13.5 0.0024410520 0.0083749956  25.108786
69  0.0009440609         71  13.5 0.0007892487 0.0007891899   8.118200
70  0.0018197009         95  13.5 0.0016690124 0.0032505425  17.167353
71  0.0009440609         60  13.5 0.0007873587 0.0008205324   8.098781
72  0.0027039584        168  13.5 0.0025658126 0.0069871156  26.391650
73  0.0010423174        112  13.5 0.0008945498 0.0008927350   9.201259
74  0.0009885531        258  13.5 0.0008658709 0.0010066700   8.906366
75  0.0016982437         94  13.5 0.0015473833 0.0028024737  15.916287
76  0.0009908319        124  13.5 0.0008451262 0.0007779250   8.692881
77  0.0010568175         34  13.5 0.0008956481 0.0011908208   9.212717
78  0.0016865530         35  13.5 0.0015255554 0.0034388026  15.691995
79  0.0009571941        103  13.5 0.0008078801 0.0007445332   8.309792
80  0.0009705100        128  13.5 0.0008254915 0.0007374145   8.490918
81  0.0011885022         65  13.5 0.0010326592 0.0013830907  10.621931
82  0.0008689604         47  13.5 0.0007100247 0.0007054275   7.303348
83  0.0009931160        120  13.5 0.0008467230 0.0007865438   8.709309
84  0.0015703628         37  13.5 0.0014097089 0.0029093167  14.500378
85  0.0008472274         35  13.5 0.0006862298 0.0006958096   7.058619
86  0.0011481536        107  13.5 0.0009995269 0.0011279485  10.281051
87  0.0009354057        178  13.5 0.0007989781 0.0006831440   8.218199
88  0.0012331048        166  13.5 0.0010946154 0.0012703535  11.259087
89  0.0011534533         49  13.5 0.0009948611 0.0013728002  10.233184
90  0.0036728230        170  13.5 0.0035350209 0.0132791387  36.360816
91  0.0010715193        126  13.5 0.0009261572 0.0009311450   9.526356
92  0.0031477483         46  13.5 0.0029886407 0.0125538794  30.741311
93  0.0010447202        160  13.5 0.0009051999 0.0008671631   9.310780
94  0.0011350108        170  13.5 0.0009972087 0.0010567154  10.257173
95  0.0010185914         75  13.5 0.0008644665 0.0009326467   8.891881
96  0.0009749896        118  13.5 0.0008282530 0.0007555500   8.519330
97  0.0009749896        118  13.5 0.0008282530 0.0007555500   8.519330
98  0.0012331048        166  13.5 0.0010946154 0.0012703535  11.259087
99  0.0010568175         64  13.5 0.0009008027 0.0010566609   9.265657
100 0.0010568175         34  13.5 0.0008956481 0.0011908208   9.212717
101 0.0011040786         67  13.5 0.0009485792 0.0011578179   9.757080
102 0.0009594006         46  13.5 0.0008002931 0.0009001785   8.231855
103 0.0012331048         44  13.5 0.0010736536 0.0016346889  11.043663
104 0.0036559479         91  13.5 0.0035045721 0.0145091815  36.047827
105 0.0026424088         92  13.5 0.0024912048 0.0073085770  25.624384
106 0.0011481536        107  13.5 0.0009995269 0.0011279485  10.281051
107 0.0012941958        160  13.5 0.0011546755 0.0014110153  11.876857
108 0.0009571941        103  13.5 0.0008078801 0.0007445332   8.309792
109 0.0009931160         45  13.5 0.0008338366 0.0009815813   8.576889
110 0.0014521116         35  13.5 0.0012911140 0.0024630910  13.280510
111 0.0011246050        106  13.5 0.0009758065 0.0010777603  10.037066
112 0.0023173946        167  13.5 0.0021790771 0.0050368441  22.413733
113 0.0009462372        131  13.5 0.0008017341 0.0006925748   8.246550
114 0.0012502590        163  13.5 0.0011112542 0.0013077661  11.430230
115 0.0026001596         46  13.5 0.0024410520 0.0083749956  25.108786
116 0.0008491805        168  13.5 0.0007110347 0.0005365740   7.313620
117 0.0011668096         41  13.5 0.0010068429 0.0014571612  10.356454
118 0.0011402498         47  13.5 0.0009813140 0.0013474775  10.093843
119 0.0010209395        127  13.5 0.0008757492 0.0008312200   9.007864
120 0.0009817479        161  13.5 0.0008423994 0.0007511421   8.664822
121 0.0030269134        169  13.5 0.0028889395 0.0088630358  29.715297
122 0.0021877616         48  13.5 0.0020289977 0.0057352521  20.870362
123 0.0012502590        163  13.5 0.0011112542 0.0013077661  11.430230
124 0.0010139114        109  13.5 0.0008656283 0.0008418425   8.903779
125 0.0010889301         69  13.5 0.0009337743 0.0011132229   9.604792
126 0.0019543395        160  13.5 0.0018148191 0.0034856077  18.667016
127 0.0011246050        106  13.5 0.0009758065 0.0010777603  10.037066
128 0.0011668096        115  13.5 0.0010195575 0.0011520168  10.487073
129 0.0014521116         35  13.5 0.0012911140 0.0024630910  13.280510
130 0.0021037784         35  13.5 0.0019427809 0.0055769774  19.983611
131 0.0009594006         46  13.5 0.0008002931 0.0009001785   8.231855
132 0.0024774221         47  13.5 0.0023184863 0.0075216752  23.848062
133 0.0011534533         49  13.5 0.0009948611 0.0013728002  10.233184
134 0.0013061709         44  13.5 0.0011467197 0.0018647528  11.795224
135 0.0008241381        180  13.5 0.0006880542 0.0005077803   7.077249
136 0.0010889301         69  13.5 0.0009337743 0.0011132229   9.604792
137 0.0011402498         47  13.5 0.0009813140 0.0013474775  10.093843
138 0.0009682779         48  13.5 0.0008095139 0.0009129312   8.326697
139 0.0011534533        113  13.5 0.0010058575 0.0011261786  10.346159
140 0.0009908319        124  13.5 0.0008451262 0.0007779250   8.692881
141 0.0026001596         46  13.5 0.0024410520 0.0083749956  25.108786
142 0.0009120108         47  13.5 0.0007530751 0.0007935642   7.746166
143 0.0018197009         95  13.5 0.0016690124 0.0032505425  17.167353
144 0.0009727472         39  13.5 0.0008124369 0.0009574630   8.356785
145 0.0011402498        101  13.5 0.0009905922 0.0011254733  10.189158
146 0.0008892011         84  13.5 0.0007366226 0.0006558784   7.576868
147 0.0011040786         67  13.5 0.0009485792 0.0011578179   9.757080
148 0.0009057326        169  13.5 0.0007677586 0.0006259722   7.897077
149 0.0031477483         46  13.5 0.0029886407 0.0125538794  30.741311
150 0.0008241381        180  13.5 0.0006880542 0.0005077803   7.077249
151 0.0011668096        115  13.5 0.0010195575 0.0011520168  10.487073
152 0.0009682779         48  13.5 0.0008095139 0.0009129312   8.326697
153 0.0009817479        161  13.5 0.0008423994 0.0007511421   8.664822
154 0.0009484185         61  13.5 0.0007918882 0.0008265947   8.145368
155 0.0011561122        105  13.5 0.0010071419 0.0011510374  10.359381
156 0.0026730064         46  13.5 0.0025138988 0.0088823140  25.858092
157 0.0018197009         95  13.5 0.0016690124 0.0032505425  17.167353
158 0.0009931160         45  13.5 0.0008338366 0.0009815813   8.576889
159 0.0008669619        168  13.5 0.0007288161 0.0005637467   7.496518
160 0.0009057326        169  13.5 0.0007677586 0.0006259722   7.897077
161 0.0010399202        125  13.5 0.0008943862 0.0008697829   9.199564
162 0.0013677288         35  13.5 0.0012067312 0.0021516537  12.412542
163 0.0010641430         68  13.5 0.0009088154 0.0010586219   9.348067
164 0.0021037784         35  13.5 0.0019427809 0.0055769774  19.983611
165 0.0015523870         88  13.5 0.0014004958 0.0023394331  14.405427
166 0.0009462372        131  13.5 0.0008017341 0.0006925748   8.246550
167 0.0014962357         40  13.5 0.0013360972 0.0025777190  13.743189
168 0.0027039584        168  13.5 0.0025658126 0.0069871156  26.391650
169 0.0020606299        160  13.5 0.0019211096 0.0039058547  19.760307
170 0.0009484185         61  13.5 0.0007918882 0.0008265947   8.145368
171 0.0010568175         34  13.5 0.0008956481 0.0011908208   9.212717
172 0.0011142945        116  13.5 0.0009672142 0.0010345731   9.948674
173 0.0010399202        125  13.5 0.0008943862 0.0008697829   9.199564
174 0.0009375620         85  13.5 0.0007851553 0.0007426338   8.076071
175 0.0010375284         50  13.5 0.0008791081 0.0010672575   9.042540
176 0.0031477483         46  13.5 0.0029886407 0.0125538794  30.741311
177 0.0011534533         49  13.5 0.0009948611 0.0013728002  10.233184
178 0.0016865530         35  13.5 0.0015255554 0.0034388026  15.691995
179 0.0010519619        100  13.5 0.0009021325 0.0009360318   9.279269
180 0.0029512092         46  13.5 0.0027921016 0.0109570332  28.719700
181 0.0009594006         46  13.5 0.0008002931 0.0009001785   8.231855
182 0.0012764388         86  13.5 0.0011242040 0.0015173975  11.563507
183 0.0011481536        107  13.5 0.0009995269 0.0011279485  10.281051
184 0.0027039584        168  13.5 0.0025658126 0.0069871156  26.391650
185 0.0010185914         48  13.5 0.0008598274 0.0010299402   8.844224
186 0.0009440609         60  13.5 0.0007873587 0.0008205324   8.098781
187 0.0010715193        122  13.5 0.0009254699 0.0009361604   9.519290
188 0.0011481536        107  13.5 0.0009995269 0.0011279485  10.281051
189 0.0009705100        117  13.5 0.0008236015 0.0007486014   8.471486
190 0.0014723125         46  13.5 0.0013132049 0.0024237930  13.507693
191 0.0008472274         35  13.5 0.0006862298 0.0006958096   7.058619
192 0.0009225714         62  13.5 0.0007662129 0.0007707075   7.881271
193 0.0010185914         75  13.5 0.0008644665 0.0009326467   8.891881
194 0.0012941958        160  13.5 0.0011546755 0.0014110153  11.876857
195 0.0010423174        112  13.5 0.0008945498 0.0008927350   9.201259
196 0.0012331048        166  13.5 0.0010946154 0.0012703535  11.259087
197 0.0010665961        123  13.5 0.0009207185 0.0009249184   9.470417
198 0.0031477483         46  13.5 0.0029886407 0.0125538794  30.741311
199 0.0016865530         35  13.5 0.0015255554 0.0034388026  15.691995
200 0.0009462372        131  13.5 0.0008017341 0.0006925748   8.246550
201 0.0022961486         93  13.5 0.0021451165 0.0054022326  22.064537
202 0.0012502590         47  13.5 0.0010913233 0.0016665269  11.225404
203 0.0019142559        165  13.5 0.0017755947 0.0033411821  18.263560
204 0.0013772095        162  13.5 0.0012380328 0.0016227343  12.734260
205 0.0009462372        131  13.5 0.0008017341 0.0006925748   8.246550
206 0.0031477483         46  13.5 0.0029886407 0.0125538794  30.741311
207 0.0008669619        168  13.5 0.0007288161 0.0005637467   7.496518
208 0.0010423174        112  13.5 0.0008945498 0.0008927350   9.201259
209 0.0010715193        126  13.5 0.0009261572 0.0009311450   9.526356
210 0.0011428783         70  13.5 0.0009878944 0.0012411969  10.161468
211 0.0013001696         87  13.5 0.0011481065 0.0015773739  11.809366
212 0.0011402498        162  13.5 0.0010010731 0.0010609984  10.296920
213 0.0029922646        163  13.5 0.0028532598 0.0086215611  29.348296
214 0.0013001696         87  13.5 0.0011481065 0.0015773739  11.809366
215 0.0027039584        168  13.5 0.0025658126 0.0069871156  26.391650
216 0.0011455129        259  13.5 0.0010230026 0.0014111316  10.522629
217 0.0008472274         35  13.5 0.0006862298 0.0006958096   7.058619
218 0.0010375284        111  13.5 0.0008895890 0.0008848936   9.150234
219 0.0009354057        178  13.5 0.0007989781 0.0006831440   8.218199
220 0.0011246050        106  13.5 0.0009758065 0.0010777603  10.037066
221 0.0011091748         91  13.5 0.0009577990 0.0010837315   9.851866
222 0.0026001596         46  13.5 0.0024410520 0.0083749956  25.108786
223 0.0036728230        170  13.5 0.0035350209 0.0132791387  36.360816
224 0.0010046158         73  13.5 0.0008501473 0.0009087598   8.744597
225 0.0010665961        123  13.5 0.0009207185 0.0009249184   9.470417
226 0.0009397233        160  13.5 0.0008002030 0.0006776605   8.230794
227 0.0009527962         76  13.5 0.0007988431 0.0007934948   8.216878
228 0.0009527962         46  13.5 0.0007936886 0.0008853822   8.163921
229 0.0010764652         44  13.5 0.0009170140 0.0011925008   9.432458
230 0.0010715193        122  13.5 0.0009254699 0.0009361604   9.519290
231 0.0014962357         40  13.5 0.0013360972 0.0025777190  13.743189
232 0.0011561122         99  13.5 0.0010061110 0.0011675196  10.348786
233 0.0020844909         46  13.5 0.0019253833 0.0052103264  19.804591
234 0.0010715193        126  13.5 0.0009261572 0.0009311450   9.526356
235 0.0016865530         35  13.5 0.0015255554 0.0034388026  15.691995
236 0.0012302688        163  13.5 0.0010912639 0.0012611387  11.224613
237 0.0009571941        103  13.5 0.0008078801 0.0007445332   8.309792
238 0.0011668096         41  13.5 0.0010068429 0.0014571612  10.356454
239 0.0015031420        163  13.5 0.0013641371 0.0019706946  14.031354
240 0.0011040786         67  13.5 0.0009485792 0.0011578179   9.757080
241 0.0021037784         35  13.5 0.0019427809 0.0055769774  19.983611
242 0.0011142945        116  13.5 0.0009672142 0.0010345731   9.948674
243 0.0010665961        102  13.5 0.0009171104 0.0009620595   9.433327
244 0.0009057326        169  13.5 0.0007677586 0.0006259722   7.897077
245 0.0011402498        162  13.5 0.0010010731 0.0010609984  10.296920
246 0.0020606299        160  13.5 0.0019211096 0.0039058547  19.760307
247 0.0009354057        178  13.5 0.0007989781 0.0006831440   8.218199
248 0.0012331048        166  13.5 0.0010946154 0.0012703535  11.259087
249 0.0008491805        168  13.5 0.0007110347 0.0005365740   7.313620
250 0.0017179084         44  13.5 0.0015584572 0.0034442643  16.030380
251 0.0022130947        166  13.5 0.0020746053 0.0045632307  21.339148
252 0.0009527962         76  13.5 0.0007988431 0.0007934948   8.216878
253 0.0011246050        106  13.5 0.0009758065 0.0010777603  10.037066
254 0.0015381546         89  13.5 0.0013864352 0.0022852766  14.260798
255 0.0008452788         35  13.5 0.0006842813 0.0006918637   7.038576
256 0.0023173946        167  13.5 0.0021790771 0.0050368441  22.413733
257 0.0012589254         46  13.5 0.0010998178 0.0017000905  11.312782
258 0.0009440609         60  13.5 0.0007873587 0.0008205324   8.098781
259 0.0019408859        165  13.5 0.0018022246 0.0034421542  18.537472
260 0.0012502590         47  13.5 0.0010913233 0.0016665269  11.225404
261 0.0011561122        114  13.5 0.0010086883 0.0011300272  10.375275
262 0.0009594006         46  13.5 0.0008002931 0.0009001785   8.231855
263 0.0010092529        110  13.5 0.0008611417 0.0008311504   8.857628
264 0.0014893611         97  13.5 0.0013390162 0.0020798959  13.773030
265 0.0010375284        111  13.5 0.0008895890 0.0008848936   9.150234
266 0.0010665961        102  13.5 0.0009171104 0.0009620595   9.433327
267 0.0009727472        119  13.5 0.0008261824 0.0007502920   8.498031
268 0.0022542392         36  13.5 0.0020934135 0.0064453733  21.533024
269 0.0021978599         46  13.5 0.0020387523 0.0058419718  20.970710
270 0.0011246050        106  13.5 0.0009758065 0.0010777603  10.037066
271 0.0013899526         46  13.5 0.0012308451 0.0021293022  12.660535
272 0.0011402498         47  13.5 0.0009813140 0.0013474775  10.093843
273 0.0029922646        163  13.5 0.0028532598 0.0086215611  29.348296
274 0.0008394600        135  13.5 0.0006956442 0.0005187744   7.155319
275 0.0009527962         46  13.5 0.0007936886 0.0008853822   8.163921
276 0.0011668096         41  13.5 0.0010068429 0.0014571612  10.356454
277 0.0011481536        107  13.5 0.0009995269 0.0011279485  10.281051
278 0.0013772095        162  13.5 0.0012380328 0.0016227343  12.734260
279 0.0010665961        102  13.5 0.0009171104 0.0009620595   9.433327
280 0.0009354057        178  13.5 0.0007989781 0.0006831440   8.218199
281 0.0024774221         47  13.5 0.0023184863 0.0075216752  23.848062
282 0.0019408859        165  13.5 0.0018022246 0.0034421542  18.537472
283 0.0015381546         89  13.5 0.0013864352 0.0022852766  14.260798
284 0.0010964782         35  13.5 0.0009354806 0.0012930664   9.622434
285 0.0013001696         87  13.5 0.0011481065 0.0015773739  11.809366
286 0.0009484185         45  13.5 0.0007891391 0.0008791671   8.117127
287 0.0008689604         47  13.5 0.0007100247 0.0007054275   7.303348
288 0.0010139114        109  13.5 0.0008656283 0.0008418425   8.903779
289 0.0012793813        162  13.5 0.0011402046 0.0013764129  11.728011
290 0.0010641430         47  13.5 0.0009052073 0.0011465726   9.311005
291 0.0009727472         63  13.5 0.0008165606 0.0008717752   8.399145
292 0.0011402498         47  13.5 0.0009813140 0.0013474775  10.093843
293 0.0013867558        166  13.5 0.0012482664 0.0016520228  12.839523
294 0.0008394600        135  13.5 0.0006956442 0.0005187744   7.155319
295 0.0013614447         42  13.5 0.0012016498 0.0020662014  12.360246
296 0.0009594006         46  13.5 0.0008002931 0.0009001785   8.231855
297 0.0030269134        169  13.5 0.0028889395 0.0088630358  29.715297
298 0.0008491805        133  13.5 0.0007050211 0.0005341526   7.251769
299 0.0015031420        163  13.5 0.0013641371 0.0019706946  14.031354
300 0.0010641430         68  13.5 0.0009088154 0.0010586219   9.348067
301 0.0020558906         46  13.5 0.0018967830 0.0050566842  19.510407
302 0.0021037784         35  13.5 0.0019427809 0.0055769774  19.983611
303 0.0010641430         68  13.5 0.0009088154 0.0010586219   9.348067
304 0.0013772095        162  13.5 0.0012380328 0.0016227343  12.734260
305 0.0011561122         99  13.5 0.0010061110 0.0011675196  10.348786
306 0.0009727472        119  13.5 0.0008261824 0.0007502920   8.498031
307 0.0008336812        132  13.5 0.0006893500 0.0005113310   7.090579
308 0.0009506048        163  13.5 0.0008115999 0.0006975692   8.348022
309 0.0013614447         42  13.5 0.0012016498 0.0020662014  12.360246
310 0.0009908319        124  13.5 0.0008451262 0.0007779250   8.692881
311 0.0014521116         35  13.5 0.0012911140 0.0024630910  13.280510
312 0.0009484185         61  13.5 0.0007918882 0.0008265947   8.145368
313 0.0014521116         35  13.5 0.0012911140 0.0024630910  13.280510
314 0.0022130947        166  13.5 0.0020746053 0.0045632307  21.339148
315 0.0012589254         46  13.5 0.0010998178 0.0017000905  11.312782
316 0.0009015711        162  13.5 0.0007623945 0.0006153789   7.841900
317 0.0010423174        112  13.5 0.0008945498 0.0008927350   9.201259
318 0.0013001696         87  13.5 0.0011481065 0.0015773739  11.809366
319 0.0012331048         44  13.5 0.0010736536 0.0016346889  11.043663
320 0.0014893611         97  13.5 0.0013390162 0.0020798959  13.773030
321 0.0010185914         48  13.5 0.0008598274 0.0010299402   8.844224
322 0.0009571941        103  13.5 0.0008078801 0.0007445332   8.309792
323 0.0009727472        119  13.5 0.0008261824 0.0007502920   8.498031
324 0.0021037784         35  13.5 0.0019427809 0.0055769774  19.983611
325 0.0012941958        160  13.5 0.0011546755 0.0014110153  11.876857
326 0.0017701090         46  13.5 0.0016110014 0.0036477272  16.570843
327 0.0013772095        162  13.5 0.0012380328 0.0016227343  12.734260
328 0.0013772095        162  13.5 0.0012380328 0.0016227343  12.734260
329 0.0009375620         85  13.5 0.0007851553 0.0007426338   8.076071
330 0.0011534533         49  13.5 0.0009948611 0.0013728002  10.233184
331 0.0009705100        117  13.5 0.0008236015 0.0007486014   8.471486
332 0.0009749896        118  13.5 0.0008282530 0.0007555500   8.519330
333 0.0023014418         47  13.5 0.0021425061 0.0064231731  22.037920
334 0.0008241381        180  13.5 0.0006880542 0.0005077803   7.077249
335 0.0017060824         45  13.5 0.0015468030 0.0033778031  15.910499

COMPUTATION

El orden de las variables es importante a la hora de calcular ANCOVA. Primero desea eliminar el efecto de la covariable, es decir, desea controlarla, antes de ingresar su variable o interés principal.

¡La covariable va primero (y no hay interacción)! Si no lo haces en orden, obtendrás resultados diferentes.

#COMPUTATION
res.aov <- anxiety %>%
anova_test(Sv ~ Frecuencia + Class)
get_anova_table(res.aov)
ANOVA Table (type II tests)

      Effect DFn   DFd        F         p p<.05   ges
1 Frecuencia   1 89990 1178.185 1.55e-256     * 0.013
2      Class   8 89990 2190.134  0.00e+00     * 0.163

Después del ajuste por la puntuación de ansiedad previa a la prueba, hubo una diferencia estadísticamente significativa en la puntuación de ansiedad posterior a la prueba entre los grupos, F(4, 4994) = 580.579, p < 0,0001 *.

POST HOC TEST

Se pueden realizar comparaciones por pares para identificar qué grupos son diferentes. Se aplica la corrección de pruebas múltiples de Bonferroni. Esto se puede hacer fácilmente usando la función [paquete rstatix], un envoltorio alrededor del paquete, que debe instalarse. Emmeans significa medias marginales estimadas (también conocidas como medias mínimas cuadráticas o medias ajustadas).emmeans_test()emmeans

#POST HOC TEST
# Pairwise comparisons
# Pairwise comparisons
library(emmeans)
Welcome to emmeans.
Caution: You lose important information if you filter this package's results.
See '? untidy'
pwc <- anxiety %>% 
  emmeans_test(
    Sv ~ Class, covariate = Frecuencia,
    p.adjust.method = "bonferroni"
    )
pwc
# A tibble: 36 × 9
   term     .y.   group1 group2    df statistic         p     p.adj p.adj.signif
 * <chr>    <chr> <chr>  <chr>  <dbl>     <dbl>     <dbl>     <dbl> <chr>       
 1 Frecuen… Sv    3.5    4      89990    0.267  7.89e-  1 1   e+  0 ns          
 2 Frecuen… Sv    3.5    5      89990    0.0463 9.63e-  1 1   e+  0 ns          
 3 Frecuen… Sv    3.5    7.5    89990   -1.37   1.70e-  1 1   e+  0 ns          
 4 Frecuen… Sv    3.5    10.5   89990   -0.736  4.62e-  1 1   e+  0 ns          
 5 Frecuen… Sv    3.5    11     89990   -4.22   2.41e-  5 8.68e-  4 ***         
 6 Frecuen… Sv    3.5    12     89990  -12.5    8.12e- 36 2.92e- 34 ****        
 7 Frecuen… Sv    3.5    12.5   89990  -35.1    2.04e-267 7.33e-266 ****        
 8 Frecuen… Sv    3.5    13.5   89990  -99.9    0         0         ****        
 9 Frecuen… Sv    4      5      89990   -0.221  8.25e-  1 1   e+  0 ns          
10 Frecuen… Sv    4      7.5    89990   -1.64   1.01e-  1 1   e+  0 ns          
# ℹ 26 more rows
# Display the adjusted means of each group
# Also called as the estimated marginal means (emmeans)
emm=get_emmeans(pwc)
emm
# A tibble: 9 × 8
  Frecuencia Class     emmean          se    df    conf.low  conf.high method   
       <dbl> <fct>      <dbl>       <dbl> <dbl>       <dbl>      <dbl> <chr>    
1       157. 3.5   0.00000281 0.000000972 89990 0.000000905 0.00000472 Emmeans …
2       157. 4     0.00000244 0.000000972 89990 0.000000538 0.00000435 Emmeans …
3       157. 5     0.00000275 0.000000972 89990 0.000000842 0.00000465 Emmeans …
4       157. 7.5   0.00000470 0.000000972 89990 0.00000279  0.00000660 Emmeans …
5       157. 10.5  0.00000382 0.000000972 89990 0.00000192  0.00000573 Emmeans …
6       157. 11    0.00000862 0.000000972 89990 0.00000671  0.0000105  Emmeans …
7       157. 12    0.0000200  0.000000972 89990 0.0000181   0.0000219  Emmeans …
8       157. 12.5  0.0000510  0.000000972 89990 0.0000491   0.0000529  Emmeans …
9       157. 13.5  0.000140   0.000000972 89990 0.000138    0.000142   Emmeans …
emm2=emm %>%
  mutate(emmean_abs=abs(emmean), sv=10*log10(emmean_abs),conf.low.abs=abs(conf.low),conf.high.abs=abs(conf.high))%>%


  mutate(low=10*log10(conf.low.abs),hight=10*log10(conf.high.abs))%>%
  
  mutate(low.res=10*log10(abs(emmean_abs-se)),hig.res=10*log10(abs(emmean_abs+se)))

as.data.frame(emm2$se)
       emm2$se
1 9.722770e-07
2 9.722828e-07
3 9.722592e-07
4 9.722632e-07
5 9.722578e-07
6 9.722786e-07
7 9.722550e-07
8 9.722554e-07
9 9.722611e-07
library(dplyr)

emm2_resumen <- emm2 %>%
  select(Frecuencia, Class, sv, se, df, low.res, hig.res) %>%
  mutate_at(vars(sv, se, df, low.res, hig.res), ~ round(., 2))


emm2_resumen
# A tibble: 9 × 7
  Frecuencia Class    sv    se    df low.res hig.res
       <dbl> <fct> <dbl> <dbl> <dbl>   <dbl>   <dbl>
1       157. 3.5   -55.5     0 89990   -57.4   -54.2
2       157. 4     -56.1     0 89990   -58.3   -54.7
3       157. 5     -55.6     0 89990   -57.5   -54.3
4       157. 7.5   -53.3     0 89990   -54.3   -52.5
5       157. 10.5  -54.2     0 89990   -55.4   -53.2
6       157. 11    -50.6     0 89990   -51.2   -50.2
7       157. 12    -47.0     0 89990   -47.2   -46.8
8       157. 12.5  -42.9     0 89990   -43.0   -42.8
9       157. 13.5  -38.5     0 89990   -38.6   -38.5

Los datos se ajustan a la media +/- error estándar. La puntuación media de ansiedad fue estadísticamente significativa mayor en el grp1 (16,4 +/- 0,15) en comparación con el grp2 (15,8 +/- 0,12) y el grp3 (13,5 +/_ 0,11), p < 0,001.

Informe

Se realizó un ANCOVA para determinar el efecto de los ejercicios en la puntuación de ansiedad después de controlar la puntuación de ansiedad basal de los participantes.

Después del ajuste por la puntuación de ansiedad previa a la prueba, hubo una diferencia estadísticamente significativa en la puntuación de ansiedad posterior a la prueba entre los grupos, F(4, 4994) = 580.579, p < 0,0001 *.

El análisis post hoc se realizó con un ajuste de Bonferroni. La puntuación media de ansiedad fue estadísticamente significativa mayor en el grp1 (16,4 +/- 0,15) en comparación con el grp2 (15,8 +/- 0,12) y el grp3 (13,5 +/_ 0,11), p < 0,001.

# Visualization: line plots with p-values

# Función para transformación personalizada (log10 y multiplicación por 10)


pwc <- pwc %>% add_xy_position(x = "Class", fun = "mean_se")

ggline(get_emmeans(pwc), x = "Class", y = "emmean") +
  
  
  
  geom_errorbar(aes(ymin = conf.low, ymax = conf.high, color=Class), width = 1) + 
  
  
  stat_pvalue_manual(pwc, hide.ns = TRUE, tip.length = F) +
  labs(
    subtitle = get_test_label(res.aov, detailed = TRUE),
    caption = get_pwc_label(pwc)
  )+
  


  theme_presentation(base_size = 11)+
  
  
    scale_color_manual(name="Longitud (cm)",values =c("#5f5f5f","#0000ff","#000080","#00bf00","#ffff00","#ff8000","#ff00bf","#ff0000","#a6533c"))+
  

    #scale_color_viridis_d(option = "C")+
  # Modificación para rotar las etiquetas del eje x
theme(axis.text.x = element_text(angle = 0, vjust = 0.5, hjust=1),legend.position = "none")+
  labs(x="Longitud (cm)",y="Emmean (Sv,dB)")

Referencias para buscar en la web: ANCOVA

1-s2.0-S0022191021001335-mmc1.docx (live.com)

Acclimation, duration and intensity of cold exposure determine the rate of cold stress accumulation and mortality in Drosophila suzukii - ScienceDirect

library(ggplot2)

# Graficar los puntos y las barras de error
Ancova01=ggplot(data=emm2) +
  geom_point(alpha=0.5,size = 4, aes(x = Class, y = sv, fill=Class, color=Class))+
  
  labs(
    subtitle = get_test_label(res.aov, detailed = TRUE),
    caption = get_pwc_label(pwc)
  )+

  geom_errorbar(aes(x = as.factor(Class),ymin = low.res, ymax = hig.res, color=Class), width = 0.5) +
  
  
    theme_presentation(base_size = 11)+
  
 # scale_y_continuous(limits = c(-60,150))+
  
    scale_fill_manual(name="Longitud (cm)",values =c("#5f5f5f","#0000ff","#000080","#00bf00","#ffff00","#ff8000","#ff00bf","#ff0000","#a6533c"))+
  
      scale_color_manual(name="Longitud (cm)",values =c("#5f5f5f","#0000ff","#000080","#00bf00","#ffff00","#ff8000","#ff00bf","#ff0000","#a6533c"))+
  
  
  theme(axis.text.x = element_text(angle = 0, vjust = 0.5, hjust=1),legend.position = "none")+
  labs(x="Longitud (cm)",y="Emmean (Sv,dB)")
    
  
  Ancova01

  ggsave(filename = "Ancova_modal.png",
  plot =   Ancova01,     
  height = 4,             # Specifies the height of the plot in inches
       width = 4.25,              # Specifies the width of the plot in inches
       dpi = 1000,             # Specifies the resolution in dots per inch
       path = "F:/Tesis abordo/Tesis abordo/Figuras/Objetivo02/",device = "png")