Let’s see if we can roll our own meta regression using the mle function and our own definition of likelihood.
library(dplyr)
Attaching package: 'dplyr'
The following objects are masked from 'package:stats':
filter, lag
The following objects are masked from 'package:base':
intersect, setdiff, setequal, union
set.seed(1)
tau <- 2
n <- 10
data <- data.frame(
study = 1:n
) %>% mutate(
x = 1:n,
sem = rnorm(n)^2,
y = x + rnorm(n, sem) + rnorm(n, tau)
)
library(ggplot2)
ggplot(data, aes(x, y)) +
geom_linerange(aes(ymin = y - sem - tau,
ymax = y + sem + tau),
color='black') +
geom_linerange(aes(ymin = y - sem,
ymax = y + sem),
size=2,
color='grey') +
geom_point(size=2, color='red') +
theme_bw()
model <- lm(y ~ x,
data=data)
coefficients(summary(model))[,1:2]
Estimate Std. Error
(Intercept) 3.213 0.845
x 0.903 0.136
# Estimate Std. Error
# (Intercept) 3.213 0.845
# x 0.903 0.136
library(stats4)
negative_log_likelihood <- function(i, x) {
n <- nrow(data)
df <- length(start)
prediction <- x * data$x + i
e <- data$y - prediction
s2 <- sum(e^2)
((n - df)/2) * (log(2*pi) + log(s2) + 1)
}
start <- list(
i = 0,
x = 0
)
model <- mle(negative_log_likelihood, start)
summary(model)@coef
Estimate Std. Error
i 3.213 0.845
x 0.903 0.136
library(metafor)
Loading required package: Matrix
Loading 'metafor' package (version 1.9-7). For an overview
and introduction to the package please type: help(metafor).
model <- rma(y ~ x,
sem^2,
method="FE",
data=data)
coefficients(summary(model))[,1:2]
estimate se
intrcpt 3.280 0.0475
x 0.983 0.0115
# estimate se
# intrcpt 3.280 0.0475
# x 0.983 0.0115
library(stats4)
negative_log_likelihood <- function(i, x) {
prediction <- x * data$x + i
error <- data$y - prediction
-sum(dnorm(error, 0, data$sem, log=T))
}
start <- list(
i = 0,
x = 0
)
model <- mle(negative_log_likelihood, start)
summary(model)@coef
Estimate Std. Error
i 3.280 0.0475
x 0.983 0.0115
# Estimate Std. Error
# i 3.280 0.0475
# x 0.983 0.0115
library(metafor)
model <- rma(y ~ x,
sem^2,
data=data,
method="ML")
model$tau2 # 0.401
[1] 0.401
coefficients(summary(model))[,1:2]
estimate se
intrcpt 3.82 0.5126
x 0.86 0.0787
# estimate se
# intrcpt 3.82 0.5126
# x 0.86 0.0787
library(stats4)
negative_log_likelihood <- function(i, x, tau2) {
prediction <- x * data$x + i
error <- data$y - prediction
tau2[tau2 < 0] <- 0
sem <- (data$sem^2 + tau2)^0.5
-sum(dnorm(error, 0, sem, log=T))
}
start <- list(
i = 0,
x = 0,
tau2 = 1
)
model <- mle(negative_log_likelihood, start)
summary(model)@coef
Estimate Std. Error
i 3.815 0.5133
x 0.860 0.0788
tau2 0.401 0.2483
# Estimate Std. Error
# i 3.815 0.5133
# x 0.860 0.0788
# tau2 0.401 0.2483
Yes we can.