Utilizando la funcion de la distribucion geometrica: \[ f(x) = p(1 - p)^{x-1} \] Ahora realizamos la funcion de verosimilitud \[ L(p)=p(1 - p)^{x_1-1} * p(1 - p)^{x_2-1} * p(1 - p)^{x_3-1}*...p(1 - p)^{x_n-1} \] \[ L(p)=p^{n}(1-p)^{\sum^n_{i=1}x_i-n} \] \[ log(L(p))=n*log(p)+log(1-p)*\sum^n_{i=1}x_i-n \] ### Paso 1: Generar datos

library(pacman)
p_load(data.table, fixest, lattice, magrittr, ggplot2, kableExtra,dplyr)
N = 500
p = 0.6 
data = rgeom(N,p)

Paso 2: Derivacion de la funcion de verosimilitud

log_like = function(p, x, n){
  return(n*log(p)+log(1-p)*sum(x-n))
}

value = runif(30, 0, 1)

plot(value, log_like(value, data, 500), main = "Función de log verosimilitud")

Paso 3: Estimación por máxima verosimilitud

MLE_estimates = optim(fn=log_like, par=c(0.1), lower = c(0.2, 0.2), upper = c(Inf, Inf), hessian = TRUE, method = "L-BFGS-B", n = 500, x = data)

MLE_par = MLE_estimates$par
MLE_SE = sqrt(diag(solve(MLE_estimates$hessian)))

MLE = data.table(param = c("p"),
                  estimates = MLE_par,
                  sd = MLE_SE)
kable(MLE)%>%
  kable_styling(full_width = F)
param estimates sd
p 0.2 0.0016274

Paso 4: Graficar estimaciones

log_like_graph = function(x = data, n = N){
  p = MLE_par
  log_like_g = n*log(p)+log(1-p)*sum(x-n)
  return (log_like_g)
}
log_like_graph = Vectorize(log_like_graph)

p_graph = ggplot(data = data.frame(p = 0), mapping = aes(p = p)) +
  stat_function(fun = log_like_graph) + xlim(0,1) + theme_bw() + xlab("p") + ylab("Log like")

p_graph

Referencias: