iterations_immediate

Quarto

Quarto enables you to weave together content and executable code into a finished document. To learn more about Quarto see https://quarto.org.

Running Code

When you click the Render button a document will be generated that includes both content and the output of embedded code. You can embed code like this:

library(broom)
library(lme4)
Loading required package: Matrix
library(dplyr)

Attaching package: 'dplyr'
The following objects are masked from 'package:stats':

    filter, lag
The following objects are masked from 'package:base':

    intersect, setdiff, setequal, union
library(lme4)
library(lmerTest)

Attaching package: 'lmerTest'
The following object is masked from 'package:lme4':

    lmer
The following object is masked from 'package:stats':

    step
library(emmeans)
library(car)
Loading required package: carData

Attaching package: 'car'
The following object is masked from 'package:dplyr':

    recode
library(tidyverse)
-- Attaching core tidyverse packages ------------------------ tidyverse 2.0.0 --
v forcats   1.0.0     v readr     2.1.4
v ggplot2   3.4.3     v stringr   1.5.0
v lubridate 1.9.2     v tibble    3.2.1
v purrr     1.0.1     v tidyr     1.3.0
-- Conflicts ------------------------------------------ tidyverse_conflicts() --
x tidyr::expand() masks Matrix::expand()
x dplyr::filter() masks stats::filter()
x dplyr::lag()    masks stats::lag()
x tidyr::pack()   masks Matrix::pack()
x car::recode()   masks dplyr::recode()
x purrr::some()   masks car::some()
x tidyr::unpack() masks Matrix::unpack()
i Use the conflicted package (<http://conflicted.r-lib.org/>) to force all conflicts to become errors
library(corrplot)
corrplot 0.92 loaded
library(RColorBrewer)
library(ggplot2)
library(MASS)

Attaching package: 'MASS'

The following object is masked from 'package:dplyr':

    select
library(agricolae)
library(vegan)
Loading required package: permute
Loading required package: lattice
Registered S3 methods overwritten by 'vegan':
  method      from
  plot.rda    klaR
  predict.rda klaR
  print.rda   klaR
This is vegan 2.6-4
library(dplyr)
library(readr)
library(DT)
library(ggplot2)
library(quantreg)
Loading required package: SparseM

Attaching package: 'SparseM'

The following object is masked from 'package:base':

    backsolve
library(broom.mixed)
library(pedigree)
library(pedigreemm)
library(pedtools)

Data Reading

rm(list = ls())

setwd("C:/Users/anune/OneDrive/Desktop/PIC_DataAnalysis_files")

data_PIC <- read.csv("PIC_65_FIRE.AN.1.csv")

head(data_PIC$ENTRY_TIME)
[1] "10/27/2022 12:51" "10/27/2022 12:00" "10/27/2022 13:03" "10/27/2022 7:27" 
[5] "10/27/2022 8:04"  "10/27/2022 8:41" 
data_PIC <- mutate(data_PIC, 

                   ENTRY_DATE = as_date(mdy_hm(ENTRY_TIME, tz = "UTC")),

                   ENTRY = mdy_hm(ENTRY_TIME, tz = "UTC"),
                   
                   
                   EXIT_DATE = as_date(mdy_hm(EXIT_TIME, tz = "UTC")),

                   EXIT = mdy_hm(EXIT_TIME, tz = "UTC")

                   )

summary(data_PIC$ENTRY_DATE)
        Min.      1st Qu.       Median         Mean      3rd Qu.         Max. 
"2022-04-06" "2023-01-02" "2023-02-19" "2023-02-18" "2023-04-08" "2023-06-19" 

Data Arrange

#| warning: true
#| echo: true

class(data_PIC)
[1] "data.frame"
data_PIC$PEN <- as.factor(data_PIC$PEN)

data_PIC$Social_Group <- paste(data_PIC$PEN, data_PIC$START_DAY, data_PIC$OFFTEST_DAY, sep = "_")

head(data_PIC$Social_Group)
[1] "B0113_27-Oct-22_2-Jan-23" "B0113_27-Oct-22_2-Jan-23"
[3] "B0113_27-Oct-22_2-Jan-23" "B0113_27-Oct-22_2-Jan-23"
[5] "B0113_27-Oct-22_2-Jan-23" "B0113_27-Oct-22_2-Jan-23"
data_PIC <- group_by(data_PIC, Social_Group)


data_PIC.arrange <- arrange(data_PIC, Social_Group, ENTRY, by_group = TRUE)%>%
  mutate(line= row_number())


data_PIC.arrange
# A tibble: 114,263 x 22
# Groups:   Social_Group [36]
         ID  LINE    SIRE    DAM LITTER PEN    FARM ENTRY_TIME EXIT_TIME STAY_IN
      <int> <int>   <int>  <int>  <int> <fct> <int> <chr>      <chr>       <int>
 1 97900600    65  8.82e7 9.22e7 7.90e7 B0111   774 3/15/2023~ 3/15/202~     156
 2 97900500    65  9.20e7 9.15e7 7.86e7 B0111   774 3/15/2023~ 3/15/202~     416
 3 97900600    65  8.82e7 9.22e7 7.90e7 B0111   774 3/15/2023~ 3/15/202~     469
 4 97887849    65  8.83e7 9.34e7 7.90e7 B0111   774 3/15/2023~ 3/15/202~      48
 5 97900500    65  9.20e7 9.15e7 7.86e7 B0111   774 3/15/2023~ 3/15/202~     166
 6 97900600    65  8.82e7 9.22e7 7.90e7 B0111   774 3/15/2023~ 3/15/202~     113
 7 97887847    65  8.83e7 9.34e7 7.90e7 B0111   774 3/15/2023~ 3/15/202~       7
 8 97900600    65  8.82e7 9.22e7 7.90e7 B0111   774 3/15/2023~ 3/15/202~     103
 9 97887847    65  8.83e7 9.34e7 7.90e7 B0111   774 3/15/2023~ 3/15/202~      24
10 97900231    65  8.83e7 9.37e7 7.90e7 B0111   774 3/15/2023~ 3/15/202~    1862
# i 114,253 more rows
# i 12 more variables: FEED_INTK <int>, ENTRY_WT <int>, EXIT_WT <int>,
#   FEEDER_NO <int>, START_DAY <chr>, OFFTEST_DAY <chr>, ENTRY_DATE <date>,
#   ENTRY <dttm>, EXIT_DATE <date>, EXIT <dttm>, Social_Group <chr>, line <int>
data_PIC.arrange %>%
  dplyr::select(ID, ENTRY, Social_Group)
# A tibble: 114,263 x 3
# Groups:   Social_Group [36]
         ID ENTRY               Social_Group             
      <int> <dttm>              <chr>                    
 1 97900600 2023-03-15 08:06:00 B0111_16-Mar-23_15-May-23
 2 97900500 2023-03-15 08:12:00 B0111_16-Mar-23_15-May-23
 3 97900600 2023-03-15 08:19:00 B0111_16-Mar-23_15-May-23
 4 97887849 2023-03-15 08:34:00 B0111_16-Mar-23_15-May-23
 5 97900500 2023-03-15 08:36:00 B0111_16-Mar-23_15-May-23
 6 97900600 2023-03-15 08:39:00 B0111_16-Mar-23_15-May-23
 7 97887847 2023-03-15 08:44:00 B0111_16-Mar-23_15-May-23
 8 97900600 2023-03-15 08:49:00 B0111_16-Mar-23_15-May-23
 9 97887847 2023-03-15 08:52:00 B0111_16-Mar-23_15-May-23
10 97900231 2023-03-15 09:04:00 B0111_16-Mar-23_15-May-23
# i 114,253 more rows

Creating Time Between

data_PIC <- data_PIC %>%
  arrange(Social_Group, ENTRY) %>%
  group_by(Social_Group) %>%
  mutate(Follower_ID = lead(ID),
         Follower_Time = lead(ENTRY),
         Follower_Social_Group = lead(Social_Group),
         line= row_number(),
         Hour_ENTRY = hour(ENTRY),
         time_between= as.numeric(Follower_Time - EXIT, unit="secs"))%>%
  filter(time_between < 36000,time_between>=0)

Filters

data_PIC%>% mutate(time_between= as.numeric(Follower_Time - ENTRY, unit="secs"),
         lapse_Time = seconds(Follower_Time - ENTRY))%>%
  dplyr::select(time_between, lapse_Time)
Adding missing grouping variables: `Social_Group`
# A tibble: 113,056 x 3
# Groups:   Social_Group [36]
   Social_Group              time_between lapse_Time
   <chr>                            <dbl> <Period>  
 1 B0111_16-Mar-23_15-May-23          360 360S      
 2 B0111_16-Mar-23_15-May-23          420 420S      
 3 B0111_16-Mar-23_15-May-23          900 900S      
 4 B0111_16-Mar-23_15-May-23          120 120S      
 5 B0111_16-Mar-23_15-May-23          180 180S      
 6 B0111_16-Mar-23_15-May-23          300 300S      
 7 B0111_16-Mar-23_15-May-23          300 300S      
 8 B0111_16-Mar-23_15-May-23          180 180S      
 9 B0111_16-Mar-23_15-May-23          720 720S      
10 B0111_16-Mar-23_15-May-23         1920 1920S     
# i 113,046 more rows
data_PIC_pvalues_lessorequal_60  <- as.numeric(data_PIC$time_between)


data_PIC_pvalues_lessorequal_60 <- filter (data_PIC, time_between <= 60) %>%
  mutate(TIME_FEEDER = as.numeric(STAY_IN))


dim(data_PIC_pvalues_lessorequal_60)
[1] 80528    28
summary(data_PIC_pvalues_lessorequal_60$TIME_FEEDER)
   Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
      5     599    1355    1426    2068   10169 

Fixed Effect L_Time

head(data_PIC_pvalues_lessorequal_60)
# A tibble: 6 x 28
# Groups:   Social_Group [1]
        ID  LINE     SIRE    DAM LITTER PEN    FARM ENTRY_TIME EXIT_TIME STAY_IN
     <int> <int>    <int>  <int>  <int> <fct> <int> <chr>      <chr>       <int>
1 97900500    65 92013924 9.15e7 7.86e7 B0111   774 3/15/2023~ 3/15/202~     416
2 97887849    65 88291968 9.34e7 7.90e7 B0111   774 3/15/2023~ 3/15/202~      48
3 97900500    65 92013924 9.15e7 7.86e7 B0111   774 3/15/2023~ 3/15/202~     166
4 97900231    65 88291968 9.37e7 7.90e7 B0111   774 3/15/2023~ 3/15/202~    1862
5 97900500    65 92013924 9.15e7 7.86e7 B0111   774 3/15/2023~ 3/15/202~     965
6 97900600    65 88153118 9.22e7 7.90e7 B0111   774 3/15/2023~ 3/15/202~     500
# i 18 more variables: FEED_INTK <int>, ENTRY_WT <int>, EXIT_WT <int>,
#   FEEDER_NO <int>, START_DAY <chr>, OFFTEST_DAY <chr>, ENTRY_DATE <date>,
#   ENTRY <dttm>, EXIT_DATE <date>, EXIT <dttm>, Social_Group <chr>,
#   Follower_ID <int>, Follower_Time <dttm>, Follower_Social_Group <chr>,
#   line <int>, Hour_ENTRY <int>, time_between <dbl>, TIME_FEEDER <dbl>
data_PIC_pvalues60 <- data_PIC_pvalues_lessorequal_60 %>%
  mutate(L_time = log(TIME_FEEDER))

Thresholds

umbral <- 60

data_PIC_pvalues_0 <- data_PIC_pvalues60 %>%
  mutate(time_between_group = case_when(
    time_between <= umbral ~ "immediate",
    time_between > umbral ~ "distant"
  ))%>% group_by(time_between_group) %>%
  group_modify( ~ broom.mixed::tidy(lme4::lmer(L_time ~ (1|ID) + (1|Follower_ID) + (1|Social_Group) + as.factor(Hour_ENTRY) , data = .x)))%>% filter(., effect == "ran_pars")

total_counts1 <- data_PIC_pvalues60 %>%
  mutate(time_between_group = case_when(
    time_between <= umbral ~ "immediate",
    time_between > umbral ~ "distant"
  )) %>%
  group_by(time_between_group) %>%
  summarise(total = n())  


print(total_counts1)
# A tibble: 1 x 2
  time_between_group total
  <chr>              <int>
1 immediate          80528

Linear Mixed Model Lmer

## modelo sin pedigree modelo original efectos directos e indirectos

reduced_model_Follower.lmer <- lmer(
  L_time ~ as.factor(Hour_ENTRY) +  (1 | ID) +  (1 | Follower_ID) + (1 | Social_Group),
  data = data_PIC_pvalues60
)

Summary model

summary(reduced_model_Follower.lmer)
Linear mixed model fit by REML. t-tests use Satterthwaite's method [
lmerModLmerTest]
Formula: L_time ~ as.factor(Hour_ENTRY) + (1 | ID) + (1 | Follower_ID) +  
    (1 | Social_Group)
   Data: data_PIC_pvalues60

REML criterion at convergence: 267122.6

Scaled residuals: 
    Min      1Q  Median      3Q     Max 
-5.0141 -0.2914  0.2587  0.6128  3.0933 

Random effects:
 Groups       Name        Variance Std.Dev.
 ID           (Intercept) 0.10010  0.3164  
 Follower_ID  (Intercept) 0.07067  0.2658  
 Social_Group (Intercept) 0.03209  0.1791  
 Residual                 1.56642  1.2516  
Number of obs: 80528, groups:  ID, 548; Follower_ID, 548; Social_Group, 36

Fixed effects:
                          Estimate Std. Error         df t value Pr(>|t|)    
(Intercept)              7.040e+00  4.562e-02  1.007e+02 154.335  < 2e-16 ***
as.factor(Hour_ENTRY)1   6.279e-02  4.210e-02  7.973e+04   1.492 0.135811    
as.factor(Hour_ENTRY)2   2.292e-02  4.100e-02  7.973e+04   0.559 0.576104    
as.factor(Hour_ENTRY)3   1.177e-02  4.057e-02  7.977e+04   0.290 0.771666    
as.factor(Hour_ENTRY)4   2.749e-02  3.936e-02  7.987e+04   0.698 0.484975    
as.factor(Hour_ENTRY)5  -1.754e-02  3.730e-02  7.995e+04  -0.470 0.638238    
as.factor(Hour_ENTRY)6  -2.320e-01  3.552e-02  8.009e+04  -6.531 6.56e-11 ***
as.factor(Hour_ENTRY)7  -1.946e-01  3.538e-02  8.012e+04  -5.501 3.78e-08 ***
as.factor(Hour_ENTRY)8  -2.055e-01  3.545e-02  8.010e+04  -5.796 6.81e-09 ***
as.factor(Hour_ENTRY)9  -1.971e-01  3.542e-02  8.010e+04  -5.563 2.65e-08 ***
as.factor(Hour_ENTRY)10 -9.709e-02  3.586e-02  8.012e+04  -2.707 0.006781 ** 
as.factor(Hour_ENTRY)11 -1.261e-01  3.576e-02  8.016e+04  -3.525 0.000424 ***
as.factor(Hour_ENTRY)12 -2.448e-01  3.538e-02  8.018e+04  -6.918 4.61e-12 ***
as.factor(Hour_ENTRY)13 -3.431e-01  3.516e-02  8.022e+04  -9.757  < 2e-16 ***
as.factor(Hour_ENTRY)14 -4.902e-01  3.483e-02  8.024e+04 -14.074  < 2e-16 ***
as.factor(Hour_ENTRY)15 -3.889e-01  3.497e-02  8.025e+04 -11.122  < 2e-16 ***
as.factor(Hour_ENTRY)16 -3.524e-01  3.524e-02  8.020e+04  -9.999  < 2e-16 ***
as.factor(Hour_ENTRY)17 -2.651e-01  3.618e-02  8.013e+04  -7.328 2.36e-13 ***
as.factor(Hour_ENTRY)18 -1.758e-01  3.736e-02  8.003e+04  -4.706 2.53e-06 ***
as.factor(Hour_ENTRY)19 -1.488e-01  3.799e-02  7.993e+04  -3.915 9.04e-05 ***
as.factor(Hour_ENTRY)20 -1.121e-01  3.929e-02  7.985e+04  -2.854 0.004316 ** 
as.factor(Hour_ENTRY)21 -8.209e-02  4.060e-02  7.981e+04  -2.022 0.043180 *  
as.factor(Hour_ENTRY)22 -4.094e-02  4.127e-02  7.971e+04  -0.992 0.321284    
as.factor(Hour_ENTRY)23 -2.708e-02  4.146e-02  7.969e+04  -0.653 0.513764    
---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

Correlation matrix not shown by default, as p = 24 > 12.
Use print(x, correlation=TRUE)  or
    vcov(x)        if you need it
print(VarCorr(reduced_model_Follower.lmer), comp = "Variance")
 Groups       Name        Variance
 ID           (Intercept) 0.100102
 Follower_ID  (Intercept) 0.070668
 Social_Group (Intercept) 0.032086
 Residual                 1.566416

Checking results

table(data_PIC$time_between<=60)

FALSE  TRUE 
32528 80528 
hist(data_PIC$time_between)

hist(data_PIC$time_between[data_PIC$time_between<7200])

hist(data_PIC$time_between[data_PIC$time_between<3600])

table(data_PIC$time_between<=3600)

 FALSE   TRUE 
  2431 110625 
hist(data_PIC_pvalues_lessorequal_60$time_between)

table(data_PIC_pvalues_lessorequal_60$time_between)

    0    60 
45306 35222 

<= 120 Trheshold

#| warning: true
#| echo: true

rm(list = ls())

setwd("C:/Users/anune/OneDrive/Desktop/PIC_DataAnalysis_files")

data_PIC <- read.csv("PIC_65_FIRE.AN.1.csv")

head(data_PIC$ENTRY_TIME)
[1] "10/27/2022 12:51" "10/27/2022 12:00" "10/27/2022 13:03" "10/27/2022 7:27" 
[5] "10/27/2022 8:04"  "10/27/2022 8:41" 
data_PIC <- mutate(data_PIC, 

                   ENTRY_DATE = as_date(mdy_hm(ENTRY_TIME, tz = "UTC")),

                   ENTRY = mdy_hm(ENTRY_TIME, tz = "UTC"),
                   
                   
                   EXIT_DATE = as_date(mdy_hm(EXIT_TIME, tz = "UTC")),

                   EXIT = mdy_hm(EXIT_TIME, tz = "UTC")

                   )

summary(data_PIC$ENTRY_DATE)
        Min.      1st Qu.       Median         Mean      3rd Qu.         Max. 
"2022-04-06" "2023-01-02" "2023-02-19" "2023-02-18" "2023-04-08" "2023-06-19" 
data_PIC$PEN <- as.factor(data_PIC$PEN)

data_PIC$Social_Group <- paste(data_PIC$PEN, data_PIC$START_DAY, data_PIC$OFFTEST_DAY, sep = "_")

head(data_PIC$Social_Group)
[1] "B0113_27-Oct-22_2-Jan-23" "B0113_27-Oct-22_2-Jan-23"
[3] "B0113_27-Oct-22_2-Jan-23" "B0113_27-Oct-22_2-Jan-23"
[5] "B0113_27-Oct-22_2-Jan-23" "B0113_27-Oct-22_2-Jan-23"
data_PIC <- group_by(data_PIC, Social_Group)


data_PIC.arrange <- arrange(data_PIC, Social_Group, ENTRY, by_group = TRUE)%>%
  mutate(line= row_number())


data_PIC.arrange
# A tibble: 114,263 x 22
# Groups:   Social_Group [36]
         ID  LINE    SIRE    DAM LITTER PEN    FARM ENTRY_TIME EXIT_TIME STAY_IN
      <int> <int>   <int>  <int>  <int> <fct> <int> <chr>      <chr>       <int>
 1 97900600    65  8.82e7 9.22e7 7.90e7 B0111   774 3/15/2023~ 3/15/202~     156
 2 97900500    65  9.20e7 9.15e7 7.86e7 B0111   774 3/15/2023~ 3/15/202~     416
 3 97900600    65  8.82e7 9.22e7 7.90e7 B0111   774 3/15/2023~ 3/15/202~     469
 4 97887849    65  8.83e7 9.34e7 7.90e7 B0111   774 3/15/2023~ 3/15/202~      48
 5 97900500    65  9.20e7 9.15e7 7.86e7 B0111   774 3/15/2023~ 3/15/202~     166
 6 97900600    65  8.82e7 9.22e7 7.90e7 B0111   774 3/15/2023~ 3/15/202~     113
 7 97887847    65  8.83e7 9.34e7 7.90e7 B0111   774 3/15/2023~ 3/15/202~       7
 8 97900600    65  8.82e7 9.22e7 7.90e7 B0111   774 3/15/2023~ 3/15/202~     103
 9 97887847    65  8.83e7 9.34e7 7.90e7 B0111   774 3/15/2023~ 3/15/202~      24
10 97900231    65  8.83e7 9.37e7 7.90e7 B0111   774 3/15/2023~ 3/15/202~    1862
# i 114,253 more rows
# i 12 more variables: FEED_INTK <int>, ENTRY_WT <int>, EXIT_WT <int>,
#   FEEDER_NO <int>, START_DAY <chr>, OFFTEST_DAY <chr>, ENTRY_DATE <date>,
#   ENTRY <dttm>, EXIT_DATE <date>, EXIT <dttm>, Social_Group <chr>, line <int>
data_PIC.arrange %>%
  dplyr::select(ID, ENTRY, Social_Group)
# A tibble: 114,263 x 3
# Groups:   Social_Group [36]
         ID ENTRY               Social_Group             
      <int> <dttm>              <chr>                    
 1 97900600 2023-03-15 08:06:00 B0111_16-Mar-23_15-May-23
 2 97900500 2023-03-15 08:12:00 B0111_16-Mar-23_15-May-23
 3 97900600 2023-03-15 08:19:00 B0111_16-Mar-23_15-May-23
 4 97887849 2023-03-15 08:34:00 B0111_16-Mar-23_15-May-23
 5 97900500 2023-03-15 08:36:00 B0111_16-Mar-23_15-May-23
 6 97900600 2023-03-15 08:39:00 B0111_16-Mar-23_15-May-23
 7 97887847 2023-03-15 08:44:00 B0111_16-Mar-23_15-May-23
 8 97900600 2023-03-15 08:49:00 B0111_16-Mar-23_15-May-23
 9 97887847 2023-03-15 08:52:00 B0111_16-Mar-23_15-May-23
10 97900231 2023-03-15 09:04:00 B0111_16-Mar-23_15-May-23
# i 114,253 more rows
data_PIC <- data_PIC %>%
  arrange(Social_Group, ENTRY) %>%
  group_by(Social_Group) %>%
  mutate(Follower_ID = lead(ID),
         Follower_Time = lead(ENTRY),
         Follower_Social_Group = lead(Social_Group),
         line= row_number(),
         Hour_ENTRY = hour(ENTRY),
         time_between= as.numeric(Follower_Time - EXIT, unit="secs"))%>%
  filter(time_between < 36000,time_between>=0)

data_PIC%>% mutate(time_between= as.numeric(Follower_Time - ENTRY, unit="secs"),
         lapse_Time = seconds(Follower_Time - ENTRY))%>%
  dplyr::select(time_between, lapse_Time)
Adding missing grouping variables: `Social_Group`
# A tibble: 113,056 x 3
# Groups:   Social_Group [36]
   Social_Group              time_between lapse_Time
   <chr>                            <dbl> <Period>  
 1 B0111_16-Mar-23_15-May-23          360 360S      
 2 B0111_16-Mar-23_15-May-23          420 420S      
 3 B0111_16-Mar-23_15-May-23          900 900S      
 4 B0111_16-Mar-23_15-May-23          120 120S      
 5 B0111_16-Mar-23_15-May-23          180 180S      
 6 B0111_16-Mar-23_15-May-23          300 300S      
 7 B0111_16-Mar-23_15-May-23          300 300S      
 8 B0111_16-Mar-23_15-May-23          180 180S      
 9 B0111_16-Mar-23_15-May-23          720 720S      
10 B0111_16-Mar-23_15-May-23         1920 1920S     
# i 113,046 more rows
class(data_PIC)
[1] "grouped_df" "tbl_df"     "tbl"        "data.frame"
data_PIC_pvalues_lessorequal_120  <- as.numeric(data_PIC$time_between)


data_PIC_pvalues_lessorequal_120 <- filter (data_PIC, time_between <=  120) %>%
  mutate(TIME_FEEDER = as.numeric(STAY_IN))


dim(data_PIC_pvalues_lessorequal_120)
[1] 86045    28
summary(data_PIC_pvalues_lessorequal_120$TIME_FEEDER)
   Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
      5     595    1352    1423    2065   10169 
head(data_PIC_pvalues_lessorequal_120)
# A tibble: 6 x 28
# Groups:   Social_Group [1]
        ID  LINE     SIRE    DAM LITTER PEN    FARM ENTRY_TIME EXIT_TIME STAY_IN
     <int> <int>    <int>  <int>  <int> <fct> <int> <chr>      <chr>       <int>
1 97900500    65 92013924 9.15e7 7.86e7 B0111   774 3/15/2023~ 3/15/202~     416
2 97887849    65 88291968 9.34e7 7.90e7 B0111   774 3/15/2023~ 3/15/202~      48
3 97900500    65 92013924 9.15e7 7.86e7 B0111   774 3/15/2023~ 3/15/202~     166
4 97900600    65 88153118 9.22e7 7.90e7 B0111   774 3/15/2023~ 3/15/202~     103
5 97900231    65 88291968 9.37e7 7.90e7 B0111   774 3/15/2023~ 3/15/202~    1862
6 97900500    65 92013924 9.15e7 7.86e7 B0111   774 3/15/2023~ 3/15/202~     965
# i 18 more variables: FEED_INTK <int>, ENTRY_WT <int>, EXIT_WT <int>,
#   FEEDER_NO <int>, START_DAY <chr>, OFFTEST_DAY <chr>, ENTRY_DATE <date>,
#   ENTRY <dttm>, EXIT_DATE <date>, EXIT <dttm>, Social_Group <chr>,
#   Follower_ID <int>, Follower_Time <dttm>, Follower_Social_Group <chr>,
#   line <int>, Hour_ENTRY <int>, time_between <dbl>, TIME_FEEDER <dbl>
data_PIC_pvalues120 <- data_PIC_pvalues_lessorequal_120 %>%
  mutate(L_time = log(TIME_FEEDER))
umbral <- 120

data_PIC_pvalues_120 <- data_PIC_pvalues120 %>%
  mutate(time_between_group = case_when(
    time_between <= umbral ~ "immediate",
    time_between > umbral ~ "distant"
  ))%>% group_by(time_between_group) %>%
  group_modify( ~ broom.mixed::tidy(lme4::lmer(L_time ~ (1|ID) + (1|Follower_ID) + (1|Social_Group) + as.factor(Hour_ENTRY) , data = .x)))%>% filter(., effect == "ran_pars")

total_counts2 <- data_PIC_pvalues120 %>%
  mutate(time_between_group = case_when(
    time_between <= umbral ~ "immediate",
    time_between > umbral ~ "distant"
  )) %>%
  group_by(time_between_group) %>%
  summarise(total = n())  

print(total_counts2)
# A tibble: 1 x 2
  time_between_group total
  <chr>              <int>
1 immediate          86045
reduced_model_Follower.lmer_120 <- lmer(
  L_time ~ as.factor(Hour_ENTRY) +  (1 | ID) +  (1 | Follower_ID) + (1 | Social_Group),
  data = data_PIC_pvalues120
)

summary(reduced_model_Follower.lmer_120)
Linear mixed model fit by REML. t-tests use Satterthwaite's method [
lmerModLmerTest]
Formula: L_time ~ as.factor(Hour_ENTRY) + (1 | ID) + (1 | Follower_ID) +  
    (1 | Social_Group)
   Data: data_PIC_pvalues120

REML criterion at convergence: 286819

Scaled residuals: 
    Min      1Q  Median      3Q     Max 
-4.8100 -0.2893  0.2617  0.6139  3.0470 

Random effects:
 Groups       Name        Variance Std.Dev.
 ID           (Intercept) 0.09828  0.3135  
 Follower_ID  (Intercept) 0.06853  0.2618  
 Social_Group (Intercept) 0.03112  0.1764  
 Residual                 1.59463  1.2628  
Number of obs: 86045, groups:  ID, 548; Follower_ID, 548; Social_Group, 36

Fixed effects:
                          Estimate Std. Error         df t value Pr(>|t|)    
(Intercept)              7.031e+00  4.443e-02  9.615e+01 158.255  < 2e-16 ***
as.factor(Hour_ENTRY)1   5.540e-02  4.010e-02  8.524e+04   1.382 0.167092    
as.factor(Hour_ENTRY)2   2.718e-02  3.916e-02  8.524e+04   0.694 0.487573    
as.factor(Hour_ENTRY)3   1.496e-02  3.881e-02  8.528e+04   0.385 0.699879    
as.factor(Hour_ENTRY)4   2.491e-02  3.776e-02  8.538e+04   0.660 0.509412    
as.factor(Hour_ENTRY)5  -4.531e-02  3.575e-02  8.548e+04  -1.268 0.204963    
as.factor(Hour_ENTRY)6  -2.662e-01  3.405e-02  8.562e+04  -7.820 5.34e-15 ***
as.factor(Hour_ENTRY)7  -2.120e-01  3.408e-02  8.565e+04  -6.221 4.96e-10 ***
as.factor(Hour_ENTRY)8  -2.105e-01  3.423e-02  8.563e+04  -6.150 7.78e-10 ***
as.factor(Hour_ENTRY)9  -2.041e-01  3.416e-02  8.562e+04  -5.976 2.29e-09 ***
as.factor(Hour_ENTRY)10 -1.022e-01  3.463e-02  8.564e+04  -2.952 0.003160 ** 
as.factor(Hour_ENTRY)11 -1.366e-01  3.454e-02  8.569e+04  -3.956 7.63e-05 ***
as.factor(Hour_ENTRY)12 -2.432e-01  3.422e-02  8.571e+04  -7.108 1.19e-12 ***
as.factor(Hour_ENTRY)13 -3.496e-01  3.398e-02  8.575e+04 -10.287  < 2e-16 ***
as.factor(Hour_ENTRY)14 -4.930e-01  3.367e-02  8.578e+04 -14.644  < 2e-16 ***
as.factor(Hour_ENTRY)15 -3.828e-01  3.379e-02  8.578e+04 -11.329  < 2e-16 ***
as.factor(Hour_ENTRY)16 -3.483e-01  3.404e-02  8.573e+04 -10.233  < 2e-16 ***
as.factor(Hour_ENTRY)17 -2.606e-01  3.491e-02  8.565e+04  -7.465 8.38e-14 ***
as.factor(Hour_ENTRY)18 -1.731e-01  3.601e-02  8.555e+04  -4.808 1.53e-06 ***
as.factor(Hour_ENTRY)19 -1.389e-01  3.662e-02  8.544e+04  -3.793 0.000149 ***
as.factor(Hour_ENTRY)20 -1.079e-01  3.769e-02  8.536e+04  -2.863 0.004198 ** 
as.factor(Hour_ENTRY)21 -8.429e-02  3.888e-02  8.531e+04  -2.168 0.030143 *  
as.factor(Hour_ENTRY)22 -3.660e-02  3.941e-02  8.523e+04  -0.929 0.353117    
as.factor(Hour_ENTRY)23 -1.321e-02  3.967e-02  8.520e+04  -0.333 0.739115    
---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

Correlation matrix not shown by default, as p = 24 > 12.
Use print(x, correlation=TRUE)  or
    vcov(x)        if you need it
print(VarCorr(reduced_model_Follower.lmer_120), comp = "Variance")
 Groups       Name        Variance
 ID           (Intercept) 0.098279
 Follower_ID  (Intercept) 0.068531
 Social_Group (Intercept) 0.031115
 Residual                 1.594629
hist(data_PIC$time_between)

hist(data_PIC$time_between[data_PIC$time_between<7200])

hist(data_PIC$time_between[data_PIC$time_between<3600])

table(data_PIC$time_between<=3600)

 FALSE   TRUE 
  2431 110625 
table(data_PIC$time_between > 120)

FALSE  TRUE 
86045 27011 

The echo: false option disables the printing of code (only output is displayed).