lme4/ 0000755 0001751 0000144 00000000000 12604662535 011142 5 ustar hornik users lme4/inst/ 0000755 0001751 0000144 00000000000 12604503702 012105 5 ustar hornik users lme4/inst/CITATION 0000754 0001751 0000144 00000001727 12604503173 013255 0 ustar hornik users bibentry(bibtype = "Article",
title = "Fitting Linear Mixed-Effects Models Using {lme4}",
author = c(person(given = "Douglas",
family = "Bates"),
person(given = "Martin",
family = "M{\\\"a}chler"),
person(given = "Ben",
family = "Bolker"),
person(given = "Steve",
family = "Walker")),
journal = "Journal of Statistical Software",
year = "2015",
volume = "67",
number = "1",
pages = "1--48",
doi = "10.18637/jss.v067.i01",
header = "To cite lme4 in publications use:",
textVersion =
paste("Douglas Bates, Martin Maechler, Ben Bolker, Steve Walker (2015).",
"Fitting Linear Mixed-Effects Models Using lme4.",
"Journal of Statistical Software, 67(1), 1-48.",
"doi:10.18637/jss.v067.i01.")
)
lme4/inst/tests/ 0000755 0001751 0000144 00000000000 12604534725 013260 5 ustar hornik users lme4/inst/tests/test-resids.R 0000754 0001751 0000144 00000005550 12406350752 015654 0 ustar hornik users library("testthat")
library("lme4")
context("residuals")
test_that("lmer", {
fm1 <- lmer(Reaction ~ Days + (Days|Subject),sleepstudy)
## fm2 <- lmer(Reaction ~ Days + (Days|Subject),sleepstudy,
## control=lmerControl(calc.derivs=FALSE))
## all.equal(resid(fm1),resid(fm2))
expect_equal(range(resid(fm1)), c(-101.1789,132.5466), tolerance=1e-6)
expect_equal(range(resid(fm1, scaled=TRUE)), c(-3.953567,5.179260), tolerance=1e-6)
expect_equal(resid(fm1,"response"),resid(fm1))
expect_equal(resid(fm1,"response"),resid(fm1,type="working"))
expect_equal(resid(fm1,"deviance"),resid(fm1,type="pearson"))
expect_equal(resid(fm1),resid(fm1,type="pearson")) ## because no weights given
expect_error(residuals(fm1,"partial"),
"partial residuals are not implemented yet")
sleepstudyNA <- sleepstudy
na_ind <- c(10,50)
sleepstudyNA[na_ind,"Days"] <- NA
fm1NA <- update(fm1,data=sleepstudyNA)
fm1NA_exclude <- update(fm1,data=sleepstudyNA,na.action="na.exclude")
expect_equal(length(resid(fm1)),length(resid(fm1NA_exclude)))
expect_true(all(is.na(resid(fm1NA_exclude)[na_ind])))
expect_true(!any(is.na(resid(fm1NA_exclude)[-na_ind])))
})
test_that("glmer", {
gm1 <- glmer(incidence/size ~ period + (1|herd), cbpp,
family=binomial, weights=size)
gm2 <- update(gm1,control=glmerControl(calc.derivs=FALSE))
gm1.old <- update(gm1,control=glmerControl(calc.derivs=FALSE,
use.last.params=TRUE))
expect_equal(resid(gm1),resid(gm2))
## y, wtres, mu change ??
## FIX ME:: why does turning on derivative calculation make these tests fail???
expect_equal(range(resid(gm1.old)), c(-3.197512,2.356677), tolerance=1e-6)
expect_equal(range(resid(gm1)), c(-3.1975034,2.35668826), tolerance=1e-6)
expect_equal(range(resid(gm1.old, "response")), c(-0.1946736,0.3184579), tolerance=1e-6)
expect_equal(range(resid(gm1,"response")),c(-0.194674747774946, 0.318458889275477))
expect_equal(range(resid(gm1.old, "pearson")), c(-2.381643,2.879069),tolerance=1e-5)
expect_equal(range(resid(gm1,"pearson")), c(-2.38163599828335, 2.87908806084918))
expect_equal(range(resid(gm1.old, "working")), c(-1.241733,5.410587),tolerance=1e-5)
expect_equal(range(resid(gm1, "working")), c(-1.24173431447365, 5.41064465283686))
expect_equal(resid(gm1),resid(gm1,scaled=TRUE)) ## since sigma==1
expect_error(resid(gm1,"partial"),
"partial residuals are not implemented yet")
cbppNA <- cbpp
na_ind <- c(10,50)
cbppNA[na_ind,"period"] <- NA
gm1NA <- update(gm1,data=cbppNA)
gm1NA_exclude <- update(gm1,data=cbppNA,na.action="na.exclude")
expect_equal(length(resid(gm1)),length(resid(gm1NA_exclude)))
expect_true(all(is.na(resid(gm1NA_exclude)[na_ind])))
expect_true(!any(is.na(resid(gm1NA_exclude)[-na_ind])))
})
lme4/inst/tests/test-utils.R 0000754 0001751 0000144 00000004275 12565075351 015533 0 ustar hornik users library("testthat")
library("lme4")
context("Utilities (including *non*-exported ones")
test_that("namedList", {
nList <- lme4:::namedList
a <- b <- c <- 1
expect_identical(nList(a,b,c), list(a = 1, b = 1, c = 1))
expect_identical(nList(a,b,d=c),list(a = 1, b = 1, d = 1))
expect_identical(nList(a, d=pi, c), list(a = 1, d = pi, c = 1))
})
test_that("Var-Cov factor conversions", { ## from ../../R/vcconv.R
mlist2vec <- lme4:::mlist2vec
Cv_to_Vv <- lme4:::Cv_to_Vv
Cv_to_Sv <- lme4:::Cv_to_Sv
Sv_to_Cv <- lme4:::Sv_to_Cv
Vv_to_Cv <- lme4:::Vv_to_Cv
##
set.seed(1); cvec1 <- sample(10, 6)
v1 <- Cv_to_Vv(cvec1)
expect_equal(unname(v1), structure(c(9, 12, 15, 65, 34, 93), clen = 3))
expect_equal(2, as.vector(Vv_to_Cv(Cv_to_Vv(2))))
expect_equivalent(c(v1, 1), Cv_to_Vv(cvec1, s=3) / 3^2)
expect_equal(as.vector(ss1 <- Sv_to_Cv(Cv_to_Sv(cvec1))), cvec1)
expect_equal(as.vector(vv1 <- Vv_to_Cv(Cv_to_Vv(cvec1))), cvec1)
## for length-1 matrices, Cv_to_Sv should be equivalent
## to multiplying Cv by sigma and appending sigma ....
clist2 <- list(matrix(1),matrix(2),matrix(3))
cvec2 <- mlist2vec(clist2)
expect_equal(cvec2, structure(1:3, clen = rep(1,3)), tolerance=0)
expect_true(all((cvec3 <- Cv_to_Sv(cvec2, s=2)) == c(cvec2*2,2)))
n3 <- length(cvec3)
expect_equivalent(Sv_to_Cv(cvec3, n=rep(1,3), s=2), cvec3[-n3]/cvec3[n3])
})
test_that("nobar", {
rr <- lme4:::RHSForm
expect_equal(nobars(y~1+(1|g)), y~1)
expect_equal(nobars(y~1|g), y~1)
expect_equal(nobars(y~1+(1||g)), y~1)
expect_equal(nobars(y~1||g), y~1)
expect_equal(nobars(y~1+(x:z|g)), y~1)
expect_equal(nobars(y~1+(x*z|g/h)), y~1)
expect_equal(nobars(y~(1|g)+x+(x|h)), y~x)
expect_equal(nobars(y~(1|g)+x+(x+z|h)), y~x)
expect_equal(nobars(~1+(1|g)), ~1)
expect_equal(nobars(~(1|g)), ~1)
expect_equal(nobars(rr(y~1+(1|g))), 1)
expect_equal(nobars(rr(y~(1|g))), 1)
})
lme4/inst/tests/test-doubleVertNotation.R 0000754 0001751 0000144 00000004300 12406350752 020202 0 ustar hornik users library("lme4")
library("testthat")
context("testing '||' notation for independent ranefs")
test_that("basic intercept + slope '||' works", {
expect_equivalent(
lFormula(Reaction ~ Days + (Days||Subject), sleepstudy)$reTrms,
lFormula(Reaction ~ Days + (1|Subject) + (0 + Days|Subject), sleepstudy)$reTrms,
)
expect_equivalent(
fitted(lmer(Reaction ~ Days + (Days||Subject), sleepstudy)),
fitted(lmer(Reaction ~ Days + (1|Subject) + (0 + Days|Subject), sleepstudy)),
)
})
test_that("'||' works with nested, multiple, or interaction terms" , {
#works with nested
expect_equivalent(findbars(y ~ (x || id / id2)),
findbars(y ~ (1 | id / id2) + (0 + x | id / id2)))
#works with multiple
expect_equivalent(findbars(y ~ (x1 + x2 || id / id2) + (x3 | id3) + (x4 || id4)),
findbars(y ~ (1 | id / id2) + (0 + x1 | id / id2) +
(0 + x2 | id / id2) + (x3 | id3) + (1 | id4) +
(0 + x4| id4)))
#interactions:
expect_equivalent(findbars(y ~ (x1*x2 || id)),
findbars(y ~ (1 | id) + (0+x1 | id) + (0 + x2 | id) +
(0 + x1:x2 | id)))
})
test_that("quoted terms work", {
## used to shit the bed in test-oldRZXFailure.R
f <- quote(crab.speciesS + crab.sizeS +
crab.speciesS:crab.sizeS + (snail.size | plot))
expect_equivalent(findbars(f)[[1]], (~(snail.size|plot))[[2]][[2]] )
})
test_that("leaves superfluous '||' alone", {
expect_equivalent(findbars(y ~ z + (0 + x || id)),
findbars(y ~ z + (0 + x | id)))
})
test_that("plays nice with parens in fixed or random formulas", {
expect_equivalent(findbars(y ~ (z + x)^2 + (x || id)),
findbars(y ~ (z + x)^2 + (1 | id) + (0 + x | id)))
expect_equivalent(findbars(y ~ ((x || id)) + (x2|id)),
findbars(y ~ (1 | id) + (0 + x | id) + (x2|id)))
})
test_that("update works as expected", {
m <- lmer(Reaction ~ Days + (Days || Subject), sleepstudy)
expect_equivalent(fitted(update(m, .~.-(0 + Days | Subject))),
fitted(lmer(Reaction ~ Days + (1|Subject), sleepstudy)))
})
lme4/inst/tests/napredict2.R 0000754 0001751 0000144 00000012163 12402120407 015423 0 ustar hornik users ## refit with missing
library(lme4)
library(testthat)
## baseline model
sleepstudy2 <- sleepstudy
rownames(sleepstudy2) <- paste0("a",rownames(sleepstudy2))
fm1 <- lmer(Reaction~Days+(Days|Subject),sleepstudy2)
sleepstudyNA <- sleepstudy2
sleepstudyNA$Reaction[1:3] = NA
## na.omit
fm2 <- update(fm1,data=sleepstudyNA,
control=lmerControl(check.conv.grad="ignore"))
expect_equal(head(names(fitted(fm1))),paste0("a",1:6))
expect_equal(head(names(fitted(fm2))),paste0("a",4:9))
expect_equal(names(predict(fm2)),names(fitted(fm2)))
expect_equal(length(p1 <- predict(fm2)),177)
expect_equal(length(p2 <- predict(fm2,na.action=na.exclude)),180)
expect_equal(length((s1 <- simulate(fm1,1))[[1]]),180)
expect_equal(length((s2 <- simulate(fm2,1))[[1]]),177)
expect_equal(head(rownames(s1)),paste0("a",1:6))
expect_equal(head(rownames(s2)),paste0("a",4:9))
## na.pass (pretty messed up)
fm3 <- update(fm1,data=sleepstudyNA,
control=lmerControl(check.conv.grad="ignore"),
na.action=na.pass)
sleepstudyNA2 <- sleepstudy2
sleepstudyNA2$Days[1:3] = NA
library(testthat)
expect_error(fm4 <- update(fm1,data=sleepstudyNA2,
control=lmerControl(check.conv.grad="ignore"),
na.action=na.pass),"NA in Z")
expect_is(suppressWarnings(confint(fm2,method="boot",nsim=3)),"matrix")
## fit.na.action <- attr(mfnew, "na.action") ## line 270
cake2 <- rbind(cake,tail(cake,1))
cake2[nrow(cake2),"angle"] <- NA
fm0 <- lmer(angle ~ recipe * temperature + (1|recipe:replicate), cake)
fm1 <- update(fm0,data=cake2)
expect_that(update(fm1,na.action=na.fail),
throws_error("missing values in object"))
fm1_omit <- update(fm1,na.action=na.omit)
## check equal:
expect_true(all.equal(fixef(fm0),fixef(fm1)))
expect_true(all.equal(VarCorr(fm0),VarCorr(fm1)))
expect_true(all.equal(ranef(fm0),ranef(fm1)))
## works, but doesn't make much sense
fm1_pass <- update(fm1,na.action=na.pass)
expect_true(all(is.na(fitted(fm1_pass))))
fm1_exclude <- update(fm1,na.action=na.exclude)
expect_equal(length(fitted(fm1_omit)),270)
expect_equal(length(fitted(fm1_exclude)),271)
## FIXME: fails on Windows (not on Linux!)
expect_true(is.na(tail(predict(fm1_exclude),1)))
## test predict.lm
d <- data.frame(x=1:10,y=c(rnorm(9),NA))
lm1 <- lm(y~x,data=d,na.action=na.exclude)
predict(lm1)
predict(lm1,newdata=data.frame(x=c(1:4,NA)))
## Triq examples ...
m.lmer <- lmer (angle ~ temp + (1 | recipe) + (1 | replicate), data=cake)
# Create new data frame with some NAs in fixed effect
cake2 <- cake
cake2$temp[1:5] <- NA
p1_pass <- predict(m.lmer, newdata=cake2, re.form=NA, na.action=na.pass)
expect_true(length(p1_pass)==nrow(cake2))
expect_true(all(is.na(p1_pass[1:5])))
p1_omit <- predict(m.lmer, newdata=cake2, re.form=NA, na.action=na.omit)
p1_exclude <- predict(m.lmer, newdata=cake2, re.form=NA, na.action=na.exclude)
expect_true(length(p1_omit)==nrow(na.omit(cake2)))
expect_true(length(p1_exclude)==nrow(cake2))
expect_true(all.equal(c(na.omit(p1_exclude)),p1_omit))
expect_that(predict(m.lmer, newdata=cake2, re.form=NA, na.action=na.fail),
throws_error("missing values in object"))
## now try it with re.form==NULL
p2_pass <- predict(m.lmer, newdata=cake2, re.form=NULL, na.action=na.pass)
expect_true(length(p2_pass)==nrow(cake2))
expect_true(all(is.na(p2_pass[1:5])))
p2_omit <- predict(m.lmer, newdata=cake2, re.form=NULL, na.action=na.omit)
p2_exclude <- predict(m.lmer, newdata=cake2, re.form=NULL, na.action=na.exclude)
expect_true(length(p2_omit)==nrow(na.omit(cake2)))
expect_true(all.equal(c(na.omit(p2_exclude)),p2_omit))
expect_that(predict(m.lmer, newdata=cake2, re.form=NULL, na.action=na.fail),
throws_error("missing values in object"))
## experiment with NA values in random effects -- should get
## treated
cake3 <- cake
cake3$replicate[1:5] <- NA
expect_that(predict(m.lmer, newdata=cake3, re.form=NULL),
throws_error("NAs are not allowed in prediction data"))
p4 <- predict(m.lmer, newdata=cake3, re.form=NULL, allow.new.levels=TRUE)
p4B <- predict(m.lmer, newdata=cake3, re.form=~1|recipe, allow.new.levels=TRUE)
expect_true(all.equal(p4[1:5],p4B[1:5]))
p4C <- predict(m.lmer, newdata=cake3, re.form=NA)
library(lme4)
library(testthat)
d <- data.frame(x=runif(100),f=factor(rep(1:10,10)))
set.seed(101)
u <- rnorm(10)
d <- transform(d,y=rnorm(100,1+2*x+u[f],0.2))
d0 <- d
d[c(3,5,7),"x"] <- NA
## 'omit' and 'exclude' are the only choices under which
## we will see NA values in the results
fm0 <- lmer(y~x+(1|f),data=d0)
## no 'na.action' attribute because no NAs in this data set
stopifnot(is.null(attr(model.frame(fm0),"na.action")))
fm1 <- update(fm0,data=d)
## no NAs in predict or residuals because na.omit
stopifnot(!any(is.na(predict(fm1))))
stopifnot(!any(is.na(residuals(fm1))))
fm2 <- update(fm1,na.action="na.exclude")
## no NAs in predict or residuals because na.omit
nNA <- sum(is.na(d$x))
stopifnot(sum(is.na(predict(fm2)))==nNA)
stopifnot(sum(is.na(residuals(fm2)))==nNA)
expect_error(fm3 <- lmer(y~x+(1|f),data=d,na.action="na.pass"),
"Error in qr.default")
refit(fm0)
refit(fm1)
refit(fm2)
refit(fm0,runif(100))
refit(fm1,runif(100))
refit(fm2,runif(100))
lme4/inst/tests/test-factors.R 0000754 0001751 0000144 00000000667 12130605564 016026 0 ustar hornik users library("testthat")
library("lme4")
context("factor handling in grouping variables")
test_that("factors", {
set.seed(101)
d <- data.frame(x=runif(1000),y=runif(1000),f1=rep(1:10,each=100),f2=rep(1:10,100))
d2 <- transform(d,f1=factor(f1),f2=factor(f2))
expect_that(lm1 <- lmer(y~x+(1|f1/f2),data=d), is_a("lmerMod"))
expect_that(lm2 <- lmer(y~x+(1|f1/f2),data=d2),is_a("lmerMod"))
expect_equivalent(lm1,lm2)
})
lme4/inst/tests/test-oldRZXfailure.R 0000754 0001751 0000144 00000000762 12374550357 017124 0 ustar hornik users library(lme4)
library(testthat)
load(system.file("testdata","crabs_randdata00.Rda",package="lme4"))
test_that('RZX is being calculated properly', {
# this is a test for an old problem, documented here:
# http://stevencarlislewalker.github.io/notebook/RZX_problems.html
fr <- cbind(final.snail.density, snails.lost) ~ crab.speciesS + crab.sizeS +
crab.speciesS:crab.sizeS + (snail.size | plot)
m <- glmer(fr, data = randdata00, family = binomial)
expect_that(m, is_a("glmerMod"))
})
lme4/inst/tests/test-glmer.R 0000754 0001751 0000144 00000033247 12565075351 015502 0 ustar hornik users library("testthat")
library("lme4")
testLevel <- if (nzchar(s <- Sys.getenv("LME4_TEST_LEVEL")))
as.numeric(s) else 1
gives_error_or_warning <- function (regexp = NULL, all = FALSE, ...)
{
function(expr) {
res <- try(evaluate_promise(expr),silent=TRUE)
no_error <- !inherits(res, "try-error")
if (no_error) {
warnings <- res$warnings
if (!is.null(regexp) && length(warnings) > 0) {
return(matches(regexp, all = FALSE, ...)(warnings))
} else {
return(expectation(length(warnings) > 0, "no warnings or errors given",
paste0(length(warnings), " warnings created")))
}
}
if (!is.null(regexp)) {
return(matches(regexp, ...)(res))
}
else {
expectation(TRUE, "no error thrown", "threw an error")
}
}
}
## expect_that(stop("foo"),gives_error_or_warning("foo"))
## expect_that(warning("foo"),gives_error_or_warning("foo"))
## expect_that(TRUE,gives_error_or_warning("foo"))
## expect_that(stop("bar"),gives_error_or_warning("foo"))
## expect_that(warning("bar"),gives_error_or_warning("foo"))
context("fitting glmer models")
test_that("glmer", {
set.seed(101)
d <- data.frame(z=rbinom(200,size=1,prob=0.5),
f=factor(sample(1:10,200,replace=TRUE)))
expect_warning(glmer(z~ 1|f, d, family=binomial, method="abc"),"Use the nAGQ argument")
expect_warning(glmer(z~ 1|f, d, family=binomial, method="Laplace"),"Use the nAGQ argument")
expect_warning(glmer(z~ 1|f, d, sparseX=TRUE),"has no effect at present")
expect_that(gm1 <- glmer(cbind(incidence, size - incidence) ~ period + (1 | herd),
data = cbpp, family = binomial), is_a("glmerMod"))
expect_that(gm1@resp, is_a("glmResp"))
expect_that(gm1@pp, is_a("merPredD"))
expect_equal(ge1 <- unname(fixef(gm1)), c(-1.39854982537216, -0.992335519118859,
-1.12867532780426, -1.58030423764517),
tolerance=5e-4)
expect_equal(c(VarCorr(gm1)[[1]]), 0.41245527438386, tolerance=6e-4)
### expect_that(family(gm1), equals(binomial()))
### ?? binomial() has an 'initialize' component ... and the order is different
expect_equal(deviance(gm1), 73.47428, tolerance=1e-5)
## was -2L = 184.05267459802
expect_equal(sigma(gm1), 1)
expect_equal(extractAIC(gm1), c(5, 194.052674598026), tolerance=1e-5)
expect_equal(theta <- unname(getME(gm1, "theta")), 0.642226809144453, tolerance=6e-4)
expect_that(X <- getME(gm1, "X"), is_equivalent_to(
model.matrix(model.frame(~ period, data=cbpp), cbpp)))
expect_that(Zt <- getME(gm1, "Zt"), is_a("dgCMatrix"))
expect_equal(dim(Zt), c(15L, 56L))
expect_equal(Zt@x, rep.int(1, 56L))
expect_that(Lambdat <- getME(gm1, "Lambdat"), is_a("dgCMatrix"))
expect_equivalent(as(Lambdat, "matrix"), diag(theta, 15L, 15L))
expect_is(gm1_probit <- update(gm1,family=binomial(link="probit")),"merMod")
expect_equal(family(gm1_probit)$link,"probit")
## FIXME: test user-specified/custom family?
expect_error(glFormula(cbind(incidence, size - incidence) ~ period + (1 | herd),
data = subset(cbpp, herd==levels(herd)[1]), family = binomial),
"must have > 1")
expect_warning(glmer(cbind(incidence, size - incidence) ~ period + (1 | herd),
data = subset(cbpp, herd %in% levels(herd)[1:4]),
family = binomial,
control=glmerControl(check.nlev.gtreq.5="warning")),
"< 5 sampled levels")
expect_warning(fm1. <- glmer(Reaction ~ Days + (Days|Subject), sleepstudy),
regexp="calling .* with family=gaussian .* as a shortcut")
options(warn=2)
options(glmerControl=list(junk=1,check.conv.grad="ignore"))
expect_warning(glmer(z~ 1|f, d, family=binomial),
"some options")
options(glmerControl=NULL)
cbppX <- transform(cbpp,prop=incidence/size)
expect_is(glmer(prop ~ period + (1 | herd),
data = cbppX, family = binomial, weights=size),
"glmerMod")
expect_is(glmer(prop ~ period + (1 | herd),
data = cbppX, family = binomial, weights=size, start=NULL),
"glmerMod")
expect_is(glmer(prop ~ period + (1 | herd),
data = cbppX, family = binomial, weights=size, verbose=0L),
"glmerMod")
expect_is(glmer(prop ~ period + (1 | herd),
data = cbppX, family = binomial, weights=size, subset=TRUE),
"glmerMod")
expect_is(glmer(prop ~ period + (1 | herd),
data = cbppX, family = binomial, weights=size, na.action="na.exclude"),
"glmerMod")
expect_is(glmer(prop ~ period + (1 | herd),
data = cbppX, family = binomial, weights=size, offset=rep(0,nrow(cbppX))),
"glmerMod")
expect_is(glmer(prop ~ period + (1 | herd),
data = cbppX, family = binomial, weights=size, contrasts=NULL),
"glmerMod")
expect_is(glmer(prop ~ period + (1 | herd),
data = cbppX, family = binomial, weights=size, devFunOnly=FALSE),
"glmerMod")
expect_is(glmer(prop ~ period + (1 | herd),
data = cbppX, family = binomial, weights=size,
control=glmerControl(optimizer="Nelder_Mead")),
"glmerMod")
expect_is(glmer(prop ~ period + (1 | herd),
data = cbppX, family = binomial, weights=size, control=glmerControl()),
"glmerMod")
options(warn=0)
expect_warning(glmer(prop ~ period + (1 | herd),
data = cbppX, family = binomial, weights=size, junkArg=TRUE),
"extra argument.*disregarded")
if(FALSE) { ## Hadley broke this
expect_warning(glmer(cbind(incidence, size - incidence) ~ period + (1 | herd),
data = cbpp, family = binomial,
control=list()),
"instead of passing a list of class")
expect_warning(glmer(cbind(incidence, size - incidence) ~ period + (1 | herd),
data = cbpp, family = binomial,
control=lmerControl()),
"instead of passing a list of class")
}
##
load(system.file("testdata","radinger_dat.RData",package="lme4"))
mod <- glmer(presabs~predictor+(1|species),family=binomial,
radinger_dat)
expect_is(mod,"merMod")
## tolerance: 32-bit Windows (CRAN) reported ave.diff of 5.33e-8
expect_equal(unname(fixef(mod)), c(0.5425528,6.4289962), tolerance = 4e-7)
set.seed(101)
## complete separation case
d <- data.frame(y=rbinom(1000,size=1,p=0.5),
x=runif(1000),
f=factor(rep(1:20,each=50)),
x2=rep(0:1,c(999,1)))
mod2 <- glmer(y~x+x2+(1|f),data=d,family=binomial,
control=glmerControl(check.conv.hess="ignore",
check.conv.grad="ignore"))
expect_equal(unname(fixef(mod2))[1:2],
c(-0.10036244,0.03548523), tolerance=1e-4)
expect_true(unname(fixef(mod2)[3] < -10))
mod3 <- update(mod2, family=binomial(link="probit"))
# singular Hessian warning
expect_equal(unname(fixef(mod3))[1:2], c(-0.062889, 0.022241), tolerance=1e-4)
expect_true(fixef(mod3)[3] < -4)
mod4 <- update(mod2, family=binomial(link="cauchit"),
control=glmerControl(check.conv.hess="ignore",
check.conv.grad="ignore"))#--> singular Hessian warning
## on-the-fly creation of index variables
if (FALSE) {
## FIXME: fails in testthat context -- 'd' is not found
## in the parent environment of glmer() -- but works fine
## otherwise ...
set.seed(101)
d <- data.frame(y1=rpois(100,1), x=rnorm(100), ID=1:100)
fit1 <- glmer(y1 ~ x+(1|ID),data=d,family=poisson)
fit2 <- update(fit1, .~ x+(1|rownames(d)))
expect_equal(unname(unlist(VarCorr(fit1))),
unname(unlist(VarCorr(fit2))))
}
##
if(testLevel > 1) {
load(system.file("testdata","mastitis.rda",package="lme4"))
t1 <- system.time(g1 <-
glmer(NCM ~ birth + calvingYear + (1|sire) + (1|herd),
mastitis, poisson,
## current (2014-04-24) default:
control=glmerControl(optimizer=c("bobyqa","Nelder_Mead"))))
t2 <- system.time(g2 <- update(g1,
control=glmerControl(optimizer="bobyqa")))
## 20 (then 13.0) seconds N-M vs 8 (then 4.8) seconds bobyqa ...
## problem is fairly ill-conditioned so parameters
## are relatively far apart even though likelihoods are OK
expect_equal(logLik(g1),logLik(g2),tolerance=1e-7)
}
## test bootstrap/refit with nAGQ>1
gm1AGQ <- update(gm1,nAGQ=2)
s1 <- simulate(gm1AGQ)
expect_equal(attr(bootMer(gm1AGQ,fixef),"bootFail"),0)
## do.call(new,...) bug
new <- "foo"
expect_that(gm1 <- glmer(cbind(incidence, size - incidence) ~ period + (1 | herd),
data = cbpp, family = binomial), is_a("glmerMod"))
rm("new")
## test issue #47, from Wolfgang Viechtbauer
## create some data
n <- 100
ai <- rep(0:1, each = n/2)
bi <- 1-ai
ci <- c(rep(0,42), rep(1,8), rep(0,18), rep(1,32))
di <- 1-ci
event <- c(rbind(ai,ci))
group <- rep(c(1,0), times=n)
id <- rep(1:n, each=2)
gm3 <- glmer(event ~ group + (1 | id), family=binomial, nAGQ=21)
sd3 <- sqrt(diag(vcov(gm3)))
expect_equal(sd3, c(0.4254254, 0.424922), tolerance=1e-5)
expect_warning(vcov(gm3,use.hessian=FALSE), "finite-difference Hessian")
expect_equal(suppressWarnings(sqrt(diag(vcov(gm3,use.hessian=FALSE)))),
c(0.3840921, 0.3768747), tolerance=1e-7)
expect_equal(sd3, unname(coef(summary(gm3))[,"Std. Error"]))
## test non-pos-def finite-difference Hessian ...
if(getRversion() > "3.0.0") {
## saved fits are not safe with old R versions
L <- load(system.file("testdata","polytomous_vcov_ex.RData",
package="lme4", mustWork=TRUE))
expect_warning(vcov(polytomous_vcov_ex),"falling back to var-cov")
}
## damage Hessian to make it singular
## (example thanks to J. Dushoff)
gm1H <- gm1
gm1H@optinfo$derivs$Hessian[5,] <- 0
expect_warning(vcov(gm1H),"falling back to var-cov")
## test convergence warnings
L <- load(system.file("testdata","gopherdat2.RData",
package="lme4", mustWork=TRUE))
g0 <- glmer(shells~prev + (1|Site)+offset(log(Area)),
family=poisson, data=Gdat)
## fit year as factor: OK
gc <- glmerControl(check.conv.grad="stop")
expect_is(update(g0,.~.+factor(year), control=gc), "glmerMod")
## error/warning with year as numeric:
## don't have full knowledge of which platforms lead to which
## results, and can't detect whether we're running on valgrind,
## which changes the result on 32-bit linux ...
## SEGFAULT on MacOS? why?
if (FALSE) {
expect_that(update(g0,.~.+year),
gives_error_or_warning("(failed to converge|pwrssUpdate did not converge)"))
}
## ("(failed to converge|pwrssUpdate did not converge in)"))
## if (sessionInfo()$platform=="i686-pc-linux-gnu (32-bit)") {
## expect_warning(update(g0, .~. +year), "failed to converge")
## } else {
## ## MacOS x86_64-apple-darwin10.8.0 (64-bit)
## ## MM's platform
## ## "pwrssUpdate did not converge in (maxit) iterations"
## expect_error(update(g0, .~. +year), "pwrssUpdate did not converge in")
## }
## OK if we scale & center it
expect_is(update(g0,.~. + scale(year), control=gc), "glmerMod")
## not OK if we scale and don't center
expect_warning(update(g0,.~. + scale(year,center=FALSE)),
"failed to converge with max|grad|")
## OK if center and don't scale
expect_is(update(g0,.~. + scale(year,center=TRUE,scale=FALSE),
control=gc),
"glmerMod")
## try higher-order AGQ
expect_is(update(gm1,nAGQ=90),"glmerMod")
expect_error(update(gm1,nAGQ=101),"ord < 101L")
## non-numeric response variables
ss <- transform(sleepstudy, Reaction = as.character(Reaction))
expect_error(glmer(Reaction~(1|Days), family="poisson", data=ss),
"response must be numeric")
expect_error(glmer(Reaction~(1|Days), family="binomial", data=ss),
"response must be numeric or factor")
ss2 <- transform(ss,rr=rep(c(TRUE,FALSE),length.out=nrow(ss)))
## should work OK with logical too
expect_is(glmer(rr~(1|Days),family="binomial",data=ss2),"merMod")
## starting values with log(.) link -- thanks to Eric Weese @ Yale:
grp <- rep(letters[1:5], 20); set.seed(1); x <- rnorm(100)
expect_error(glmer(x ~ 1 + (1|grp), family=gaussian(link="log")),
"valid starting values")
## related to GH 231
## fails on some platforms, skip for now
if (FALSE) {
rr <- gm1@resp$copy()
ff <- setdiff(ls(gm1@resp),c("copy","initialize","initialize#lmResp","ptr",
"updateMu","updateWts","resDev","setOffset","wrss"))
for (i in ff) {
expect_equal(gm1@resp[[i]],rr[[i]])
}
}
})
lme4/inst/tests/test-summary.R 0000754 0001751 0000144 00000000750 12542427271 016057 0 ustar hornik users library("testthat")
library("lme4")
context("summarizing/printing models")
test_that("lmer", {
set.seed(0)
J <- 8
n <- 10
N <- J * n
beta <- c(5, 2, 4)
u <- matrix(rnorm(J * 3), J, 3)
x.1 <- rnorm(N)
x.2 <- rnorm(N)
g <- rep(1:J, rep(n, J))
y <- 1 * (beta[1] + u[g,1]) +
x.1 * (beta[2] + u[g,2]) +
x.2 * (beta[3] + u[g,3]) +
rnorm(N)
summary(lmer(y ~ x.1 + x.2 + (1 + x.1 | g)))
summary(lmer(y ~ x.1 + x.2 + (1 + x.1 + x.2 | g)))
})
lme4/inst/tests/tmp.html 0000754 0001751 0000144 00000057636 12324542612 014762 0 ustar hornik users
R: lme4 News
NEWS
R Documentation
lme4 News
CHANGES IN VERSION 1.1-7
BUG FIXES
change gradient testing from absolute to relative
CHANGES IN VERSION 1.1-6
This version incorporates no changes in functionality, just
modifications to testing and dependencies for CRAN/backward compatibility.
BUG FIXES
change drop1 example to prevent use of old/incompatible
pbkrtest versions, for 2.15.3 compatibility
explicitly require(mlmRev) for tests to prevent cyclic
dependency
bump RcppEigen Imports: requirement from >0.3.1.2.3 to
>=0.3.2.0; Rcpp dependency to >= 0.10.5
CHANGES IN VERSION 1.1-5
BUG FIXES
improved NA handling in simulate and refit
made internal handling of weights/offset
arguments slightly more robust (Github #191)
handle non-positive-definite estimated fixed effect
variance-covariance matrices slightly more generally/robustly
(fall back on RX approximation, with a warning,
if finite-difference Hessian
is non-PD; return NA matrix if RX approximation is
also bad)
MINOR USER-VISIBLE CHANGES
Added output specifying when Gauss-Hermite quadrature
was used to fit the model, and specifying number of GHQ points
(Github #190)
CHANGES IN VERSION 1.1-4
BUG FIXES
Models with prior weights returned an incorrect sigma and
deviance (Github issue #155). The deviance bug was only a
practical issue in model comparisons, not with inferences given a
particular model. Both bugs are now fixed.
Profiling failed in some cases for models with vector random
effects (Github issue #172)
Standard errors of fixed effects are now computed
from the approximate Hessian by default (see the
use.hessian argument in vcov.merMod); this
gives better (correct) answers when the estimates of
the random- and fixed-effect parameters are correlated
(Github #47)
MAJOR USER-VISIBLE CHANGES
The default optimizer for lmer fits has been
switched from "Nelder_Mead" to "bobyqa" because we have
generally found the latter to be more reliable. To switch
back to the old behaviour,
use control=lmerControl(optimizer="Nelder_Mead").
Better handling of rank-deficient/overparameterized
fixed-effect model matrices; see check.rankX option
to [g]lmerControl. The default value is
"message+drop.cols", which automatically drops redundant
columns and issues a message (not a warning). (Github #144)
MINOR USER-VISIBLE CHANGES
slight changes in convergence checking; tolerances can
be specified where appropriate, and some default tolerances
have changed (e.g., check.conv.grad)
improved warning messages about rank-deficiency in X and Z
etc. (warnings now try to indicate whether the unidentifiability
is in the fixed- or random-effects part of the model)
predict and simulate now prefer
re.form as the argument to specify which random effects
to condition on, but allow ReForm, REForm, or
REform, giving a message (not a warning) that they are
deprecated (addresses Github #170)
small fixes for printing consistency in models with no
fixed effects
we previously exported a fortify function identical
to the one found in ggplot2 in order to be able to define a
fortify.merMod S3 method without inducing a dependency on
ggplot2. This has now been unexported to avoid masking
ggplot2's own fortify methods; if you want to
add diagnostic information to the results of a model, use
fortify.merMod explicitly.
simulate.formula now checks for names associated
with the theta and beta parameter vectors. If
missing, it prints a message (not a warning); otherwise, it
re-orders the parameter vectors to match the internal
representation.
preliminary implementation of a check.scaleX argument
in [g]lmerControl that warns about scaling if some columns
of the fixed-effect model matrix have large standard
deviations (relative to 1, or to each other)
CHANGES IN VERSION 1.1-3
NEW FEATURES
The gradient and Hessian are now computed via finite
differencing after the nonlinear fit is done, and the results
are used for additional convergence tests. Control of the
behaviour is available through the check.conv.* options
in [g]lmerControl. Singular fits (fits with estimated
variances of zero or correlations of +/- 1) can also be tested for,
although the current default value of the check.conv.singular
option is "ignore"; this may be changed to "warning"
in the future. The results are stored in @optinfo$derivs.
(Github issue #120; based on code by Rune Christensen.)
The simulate method will now work to generate
simulations "from scratch" by providing a model formula,
a data frame holding the predictor variables, and a list
containing the values of the model parameters:
see ?simulate.merMod. (Github issue #115)
VarCorr.merMod objects now have an as.data.frame
method, converting the list of matrices to a more
convenient form for reporting and post-processing. (Github issue #129)
MINOR USER-VISIBLE CHANGES
results of fitted(), predict(),
and residuals() now have
names in all cases (previously results were unnamed, or
named only when predicting from new data)
the anova method now has a refit argument
that controls whether objects of class lmerMod should be
refitted with ML before producing the anova table.
(Github issues #141, #165; contributed by Henrik Singmann.)
the print method for VarCorr objects
now has a formatter argument for finer control
of standard deviation and variance formats
the optinfo slot now stores slightly more
information, including the number of function evaluations
($feval).
dotplot.ranef.mer now adds titles to sub-plots by default,
like qqmath.ranef.mer
BUG FIXES
fitted now respects na.action settings (Github
issue #149)
confint(.,method="boot") now works when there are
NA values in the original data set (Github issue #158)
previously, the code stored the results (parameter values,
residuals, etc.) based on the last set of parameters
evaluated, rather than the optimal parameters. These were
not always the same, but were almost always very close,
but some previous results will change slightly
(Github issue #166)
CHANGES IN VERSION 1.1-0
MINOR USER-VISIBLE CHANGES
when using the default method="profile",
confint now returns appropriate upper/lower bounds
(-1/1 for correlations, 0/Inf for standard deviations)
rather than NA when appropriate
BUG FIXES
in a previous development version, ranef returned
incorrect conditional variances (github issue #148). this is
now fixed
CHANGES IN VERSION 1.0-6 (2013-10-27)
BUG FIXES
prediction now works when new data have fewer factor
levels than are present in the original data (Github issue #143,
reported by Rune Haubo)
the existence of a variable "new" in the global environment
would mess lme4 up: reported at http://stackoverflow.com/questions/19801070/error-message-glmer-using-r-what-must-be-a-character-string-or-a-function
CHANGES IN VERSION 1.0-5 (2013-10-24)
USER-VISIBLE CHANGES
confint.merMod and vcov.merMod are
now exported, for downstream package-author convenience
the package now depends on Matrix >=1.1-0 and RcppEigen
>=0.3.1.2.3
new rename.response option for refit (see BUG
FIXES section)
BUG FIXES
eliminated redundant messages about suppressed
fixed-effect correlation matrices when p>20
most inverse-link functions are now bounded where
appropriate by .Machine$double.eps, allowing fitting
of GLMMs with extreme parameter values
merMod objects created with refit did not
work with update: optional
rename.response option added to refit.merMod, to allow
this (but the default is still FALSE, for
back-compatibility) (reported by A. Kuznetsova)
fixed buglet preventing on-the-fly creation of index variables,
e.g. y~1+(1|rownames(data)) (reported by J. Dushoff)
predict now works properly for glmer models
with basis-creating terms (e.g. poly, ns)
step sizes determined from fixed effect coefficient standard
errors after first state of glmer fitting are now bounded,
allowing some additional models to be fitted
CHANGES IN VERSION 1.0-4 (2013-09-08)
BUG FIXES
refit() now works, again, with lists of
length 1, so that e.g. refit(.,simulate(.)) works.
(Reported by Gustaf Granath)
getME(.,"ST") was returning a list
containing the Cholesky factorizations that get repeated in
Lambda. But this was inconsistent with what ST represents in
lme4.0. This inconsistency has now been fixed and
getME(.,"ST") is now consistent with the definition of the
ST matrix in lme4.0. See
https://github.com/lme4/lme4/issues/111 for more
detail. Thanks to Vince Dorie.
Corrected order of unpacking of standard
deviation/correlation components, which affected results
from confint(.,method="boot"). (Reported by Reinhold
Kliegl)
fixed a copying bug that made refitML()
modify the original model
CHANGES IN VERSION 1.0-1 (2013-08-17)
MINOR USER-VISIBLE CHANGES
check.numobs.* and check.numlev.* in
(g)lmerControl have been changed (from recent development
versions) to check.nobs.* and
check.nlev.* respectively, and the default values of
check.nlev.gtreq.5 and check.nobs.vs.rankZ
have been changed to "ignore" and "warningSmall"
respectively
in (g)lmerControl, arguments to the optimizer
should be passed as a list called optCtrl, rather than
specified as additional (ungrouped) arguments
the postVar argument to ranef has been
changed to the (more sensible) condVar ("posterior variance"
was a misnomer, "conditional variance" – short for "variance of the
conditional mode" – is preferred)
the REform argument to predict has been changed
to ReForm for consistency
the tnames function, briefly exported, has been
unexported
getME(.,"cnms") added
print method for merMod objects is now more
terse, and different from summary.merMod
the objective method for the respMod
reference class now takes an optional sigma.sq parameter
(defaulting to NULL) to allow calculation of the
objective function with a residual variance different from
the profiled value (Vince Dorie)
CHANGES IN VERSION 1.0-0 (2013-08-01)
MAJOR USER-VISIBLE CHANGES
Because the internal computational machinery has changed,
results from the newest version of lme4 will not be numerically
identical to those from previous versions. For reasonably well-
defined fits, they will be extremely close (within numerical
tolerances of 1e-4 or so), but for unstable or poorly-defined fits
the results may change, and very unstable fits may fail when they
(apparently) succeeded with previous versions. Similarly, some fits
may be slower with the new version, although on average the new
version should be faster and more stable. More numerical
tuning options are now available (see below); non-default settings
may restore the speed and/or ability to fit a particular model without
an error. If you notice significant or disturbing changes when fitting
a model with the new version of lme4, please notify the maintainers.
VarCorr returns its results in the same format as before (as a
list of variance-covariance matrices with correlation and stddev
attributes, plus a sc attribute giving the residual standard
deviation/scale parameter when appropriate), but prints them in a
different (nicer) way.
By default residuals gives deviance (rather than Pearson)
residuals when applied to glmer fits (a side effect of matching glm
behaviour more closely).
As another side effect of matching glm
behaviour, reported log-likelihoods from glmer models
are no longer consistent with those from pre-1.0 lme4,
but are consistent with glm; see glmer
examples.
MINOR USER-VISIBLE CHANGES
More use is made of S3 rather than S4 classes and methods: one
side effect is that the nlme and lme4 packages are now much more
compatible; methods such as fixef no longer conflict.
The internal optimizer has changed. [gn]lmer now has an
optimizer argument; "Nelder_Mead" is the default for [n]lmer,
while a combination of "bobyqa" (an alternative derivative-free
method) and "Nelder_Mead" is the default for glmer. To use the
nlminb optimizer as in the old version of lme4, you can use
optimizer="optimx" with control=list(method="nlminb") (you will
need the optimx package to be installed and loaded). See
lmerControl for details.
Families in GLMMs are no longer restricted to built-in/hard-
coded families; any family described in family, or following that
design, is usable (although there are some hard-coded families, which
will be faster).
[gn]lmer now produces objects of class merMod rather than
class mer as before.
the structure of the Zt (transposed random effect
design matrix) as returned by getME(.,"Zt"), and the
corresponding order of the random effects vector
(getME(.,"u")) have changed. To retrieve Zt
in the old format, use do.call(Matrix::rBind,getME(.,"Ztlist")).
the package checks input more thoroughly for
non-identifiable or otherwise problematic cases: see
lmerControl for fine control of the test behaviour.
NEW FEATURES
A general-purpose getME accessor method allows
extraction of a wide variety of components of a mixed-model
fit. getME also allows a vector of objects to be returned as
a list of mixed-model components. This has been backported to
be compatible with older versions of lme4 that still produce mer
objects rather than merMod objects. However, backporting is incomplete;
some objects are only extractable in newer versions of lme4.
Optimization information (convergence codes, warnings, etc.)
is now stored in an @optinfo slot.
bootMer provides a framework for obtaining parameter confidence
intervals by parametric bootstrapping.
plot.merMod provides diagnostic plotting
methods similar to those from the nlme package
(although missing augPred).
A predict.merMod method gives predictions;
it allows an effect-specific choice of conditional prediction or prediction at the
population level (i.e., with random effects set to zero).
Likelihood profiling for lmer and glmer results (see
link{profile-methods}).
Confidence intervals by likelihood profiling (default),
parametric bootstrap, or Wald approximation (fixed effects only):
see confint.merMod
nAGQ=0, an option to do fast (but inaccurate) fitting of GLMMs.
Using devFunOnly=TRUE allows the user to extract a deviance
function for the model, allowing further diagnostics/customization of
model results.
The internal structure of [gn]lmer is now more modular, allowing
finer control of the different steps of argument checking; construction
of design matrices and data structures; parameter estimation; and construction
of the final merMod object (see ?modular).
the formula, model.frame, and terms
methods return full versions (including random effect terms and
input variables) by default, but a fixed.only argument
allows access to the fixed effect submodel.
EXPERIMENTAL FEATURES
glmer.nb provides an embryonic negative
binomial fitting capability.
STILL NON-EXISTENT FEATURES
Adaptive Gaussian quadrature (AGQ) is not available for multiple and/or
non-scalar random effects.
Posterior variances of conditional models for non-scalar random effects.
Standard errors for predict.merMod results.
Automatic MCMC sampling based on the fit turns out to be very difficult
to implement in a way that is really broadly reliable and robust; mcmcsamp
will not be implemented in the near future. See
pvalues for alternatives.
"R-side" structures (within-block correlation and heteroscedasticity) are
not on the current timetable.
BUG FIXES
In a development version, prior weights were not being used properly in
the calculation of the residual standard deviation, but this has been fixed.
Thanks to Simon Wood for pointing this out.
In a development version, the step-halving component of the penalized
iteratively reweighted least squares algorithm was not working, but
this is now fixed.
In a development version, square RZX matrices would lead to a
pwrssUpdate did not converge in 30 iterations error. This has been fixed
by adding an extra column of zeros to RZX.
DEPRECATED AND DEFUNCT
Previous versions of lme4 provided
the mcmcsamp function, which efficiently generated
a Markov chain Monte Carlo sample from the posterior
distribution of the parameters, assuming flat (scaled
likelihood) priors. Due to difficulty in constructing a
version of mcmcsamp that was reliable even in
cases where the estimated random effect variances were
near zero (e.g.
https://stat.ethz.ch/pipermail/r-sig-mixed-models/2009q4/003115.html),
mcmcsamp has been withdrawn (or more precisely,
not updated to work with lme4 versions >=1.0).
Calling glmer with the default gaussian family
redirects to lmer, but this is deprecated
(in the future glmer(...,family="gaussian") may
fit a LMM using the penalized iteratively reweighted least squares
algorithm). Please call lmer directly.
Calling lmer with a family argument redirects
to glmer; this is deprecated. Please call glmer directly.
CHANGES IN VERSION 0.999375-16 (2008-06-23)
MAJOR USER-VISIBLE CHANGES
The underlying algorithms and representations for all the
mixed-effects models fit by this package have changed - for
the better, we hope. The class "mer" is a common
mixed-effects model representation for linear, generalized
linear, nonlinear and generalized nonlinear mixed-effects
models.
ECME iterations are no longer used at all, nor are analytic
gradients. Components named 'niterEM', 'EMverbose', or
'gradient' can be included in the 'control' argument to
lmer(), glmer() or nlmer() but have no effect.
PQL iterations are no longer used in glmer() and nlmer().
Only the Laplace approximation is currently available. AGQ,
for certain classes of GLMMs or NLMMs, is being added.
The 'method' argument to lmer(), glmer() or nlmer() is
deprecated. Use the 'REML = FALSE' in lmer() to obtain ML
estimates. Selection of AGQ in glmer() and nlmer() will be
controlled by the argument 'nAGQ', when completed.
NEW FEATURES
The representation of mixed-effects models has been
dramatically changed to allow for smooth evaluation of the
objective as the variance-covariance matrices for the random
effects approach singularity. Beta testers found this
representation to be more robust and usually faster than
previous versions of lme4.
The mcmcsamp function uses a new sampling method for the
variance-covariance parameters that allows recovery from
singularity. The update is not based on a sample from the
Wishart distribution. It uses a redundant parameter
representation and a linear least squares update.
CAUTION: Currently the results from mcmcsamp look peculiar and
are probably incorrect. I hope it is just a matter of my
omitting a scaling factor but I have seen patterns such as
the parameter estimate for some variance-covariance parameters
being the maximum value in the chain, which is highly
unlikely.
The 'verbose' argument to lmer(), glmer() and nlmer() can be
used instead of 'control = list(msVerbose = TRUE)'.