2014-01-24 9 views
24

Ich versuche, eine Power-Analyse für ein Mixed-Effects-Modell mit der Entwicklungsversion von lme4 und this Tutorial zu machen. Ich stelle fest, in dem Tutorial, das lme4 wirft ein Konvergenzfehler:Konvergenzfehler für die Entwicklungsversion von Lme4

## Warning: Model failed to converge with max|grad| = 0.00187101 (tol = 
## 0.001) 

Die gleiche Warnung kommt, wenn ich den Code für meine Daten-Set ausführen, mit:

## Warning message: In checkConv(attr(opt, "derivs"), opt$par, checkCtrl = 
control$checkConv, : 
Model failed to converge with max|grad| = 0.774131 (tol = 0.001) 

Die Schätzungen von einem regelmäßigen glmer Anruf mit Diese aktualisierte Version ist auch etwas anders als wenn ich die aktualisierte CRAN-Version (keine Warnungen in diesem Fall) verwendet. Irgendeine Idee, warum das passieren könnte?

EDIT

Das Modell war ich angeben versucht:

glmer(resp ~ months.c * similarity * percSem + (similarity | subj), family = binomial, data = myData) 

Der Datensatz Ich habe hat ein Zwischensubjekt (Alter, zentriert) und zwei Innersubjektvariablen (Ähnlichkeit: 2 Ebenen, percSem: 3 Ebenen) Vorhersage eines binären Ergebnisses (falscher Speicher/Schätzung). Zusätzlich hat jede Zelle innerhalb der Versuchsperson 3 wiederholte Messungen. Somit gibt es insgesamt 2 x 3 x 3 = 18 binäre Antworten für jedes Individuum und insgesamt 38 Teilnehmer.

structure(list(subj = structure(c(1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 3L, 3L, 3L, 3L, 3L, 3L, 3L, 3L, 3L, 3L, 3L, 3L, 3L, 3L, 3L, 3L, 3L, 3L, 4L, 4L, 4L, 4L, 4L, 4L, 4L, 4L, 4L, 4L, 4L, 4L, 4L, 4L, 4L, 4L, 4L, 4L, 5L, 5L, 5L, 5L, 5L, 5L, 5L, 5L, 5L, 5L, 5L, 5L, 5L, 5L, 5L, 5L, 5L, 5L, 6L, 6L, 6L, 6L, 6L, 6L, 6L, 6L, 6L, 6L, 6L, 6L, 6L, 6L, 6L, 6L, 6L, 6L, 7L, 7L, 7L, 7L, 7L, 7L, 7L, 7L, 7L, 7L, 7L, 7L, 7L, 7L, 7L, 7L, 7L, 7L, 8L, 8L, 8L, 8L, 8L, 8L, 8L, 8L, 8L, 8L, 8L, 8L, 8L, 8L, 8L, 8L, 8L, 8L, 9L, 9L, 9L, 9L, 9L, 9L, 9L, 9L, 9L, 9L, 9L, 9L, 9L, 9L, 9L, 9L, 9L, 9L, 10L, 10L, 10L, 10L, 10L, 10L, 10L, 10L, 10L, 10L, 10L, 10L, 10L, 10L, 10L, 10L, 10L, 10L, 11L, 11L, 11L, 11L, 11L, 11L, 11L, 11L, 11L, 11L, 11L, 11L, 11L, 11L, 11L, 11L, 11L, 11L, 12L, 12L, 12L, 12L, 12L, 12L, 12L, 12L, 12L, 12L, 12L, 12L, 12L, 12L, 12L, 12L, 12L, 12L, 13L, 13L, 13L, 13L, 13L, 13L, 13L, 13L, 13L, 13L, 13L, 13L, 13L, 13L, 13L, 13L, 13L, 13L, 14L, 14L, 14L, 14L, 14L, 14L, 14L, 14L, 14L, 14L, 14L, 14L, 14L, 14L, 14L, 14L, 14L, 14L, 15L, 15L, 15L, 15L, 15L, 15L, 15L, 15L, 15L, 15L, 15L, 15L, 15L, 15L, 15L, 15L, 15L, 15L, 16L, 16L, 16L, 16L, 16L, 16L, 16L, 16L, 16L, 16L, 16L, 16L, 16L, 16L, 16L, 16L, 16L, 16L, 17L, 17L, 17L, 17L, 17L, 17L, 17L, 17L, 17L, 17L, 17L, 17L, 17L, 17L, 17L, 17L, 17L, 17L, 18L, 18L, 18L, 18L, 18L, 18L, 18L, 18L, 18L, 18L, 18L, 18L, 18L, 18L, 18L, 18L, 18L, 18L, 19L, 19L, 19L, 19L, 19L, 19L, 19L, 19L, 19L, 19L, 19L, 19L, 19L, 19L, 19L, 19L, 19L, 19L, 20L, 20L, 20L, 20L, 20L, 20L, 20L, 20L, 20L, 20L, 20L, 20L, 20L, 20L, 20L, 20L, 20L, 20L, 21L, 21L, 21L, 21L, 21L, 21L, 21L, 21L, 21L, 21L, 21L, 21L, 21L, 21L, 21L, 21L, 21L, 21L, 22L, 22L, 22L, 22L, 22L, 22L, 22L, 22L, 22L, 22L, 22L, 22L, 22L, 22L, 22L, 22L, 22L, 22L, 23L, 23L, 23L, 23L, 23L, 23L, 23L, 23L, 23L, 23L, 23L, 23L, 23L, 23L, 23L, 23L, 23L, 23L, 24L, 24L, 24L, 24L, 24L, 24L, 24L, 24L, 24L, 24L, 24L, 24L, 24L, 24L, 24L, 24L, 24L, 24L, 25L, 25L, 25L, 25L, 25L, 25L, 25L, 25L, 25L, 25L, 25L, 25L, 25L, 25L, 25L, 25L, 25L, 25L, 26L, 26L, 26L, 26L, 26L, 26L, 26L, 26L, 26L, 26L, 26L, 26L, 26L, 26L, 26L, 26L, 26L, 26L, 27L, 27L, 27L, 27L, 27L, 27L, 27L, 27L, 27L, 27L, 27L, 27L, 27L, 27L, 27L, 27L, 27L, 27L, 28L, 28L, 28L, 28L, 28L, 28L, 28L, 28L, 28L, 28L, 28L, 28L, 28L, 28L, 28L, 28L, 28L, 28L, 29L, 29L, 29L, 29L, 29L, 29L, 29L, 29L, 29L, 29L, 29L, 29L, 29L, 29L, 29L, 29L, 29L, 29L, 30L, 30L, 30L, 30L, 30L, 30L, 30L, 30L, 30L, 30L, 30L, 30L, 30L, 30L, 30L, 30L, 30L, 30L, 31L, 31L, 31L, 31L, 31L, 31L, 31L, 31L, 31L, 31L, 31L, 31L, 31L, 31L, 31L, 31L, 31L, 31L, 32L, 32L, 32L, 32L, 32L, 32L, 32L, 32L, 32L, 32L, 32L, 32L, 32L, 32L, 32L, 32L, 32L, 32L, 33L, 33L, 33L, 33L, 33L, 33L, 33L, 33L, 33L, 33L, 33L, 33L, 33L, 33L, 33L, 33L, 33L, 33L, 34L, 34L, 34L, 34L, 34L, 34L, 34L, 34L, 34L, 34L, 34L, 34L, 34L, 34L, 34L, 34L, 34L, 34L, 35L, 35L, 35L, 35L, 35L, 35L, 35L, 35L, 35L, 35L, 35L, 35L, 35L, 35L, 35L, 35L, 35L, 35L, 36L, 36L, 36L, 36L, 36L, 36L, 36L, 36L, 36L, 36L, 36L, 36L, 36L, 36L, 36L, 36L, 36L, 36L, 37L, 37L, 37L, 37L, 37L, 37L, 37L, 37L, 37L, 37L, 37L, 37L, 37L, 37L, 37L, 37L, 37L, 37L, 38L, 38L, 38L, 38L, 38L, 38L, 38L, 38L, 38L, 38L, 38L, 38L, 38L, 38L, 38L, 38L, 38L, 38L), .Label = c("09A", "10", "11", "12", "12A", "13", "14", "14A", "15", "15A", "16", "17", "18", "19", "1A", "2", "20", "21", "22", "22A", "23", "24", "25", "26", "27", "28", "29", "3", "30", "31", "32A", "32B", "33", "4B", "5", "6", "7", "8"), class = "factor"), months.c = structure(c(-9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, 2.18421052631579, 2.18421052631579, 2.18421052631579, 2.18421052631579, 2.18421052631579, 2.18421052631579, 2.18421052631579, 2.18421052631579, 2.18421052631579, 2.18421052631579, 2.18421052631579, 2.18421052631579, 2.18421052631579, 2.18421052631579, 2.18421052631579, 2.18421052631579, 2.18421052631579, 2.18421052631579, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, -7.81578947368421, 9.18421052631579, 9.18421052631579, 9.18421052631579, 9.18421052631579, 9.18421052631579, 9.18421052631579, 9.18421052631579, 9.18421052631579, 9.18421052631579, 9.18421052631579, 9.18421052631579, 9.18421052631579, 9.18421052631579, 9.18421052631579, 9.18421052631579, 9.18421052631579, 9.18421052631579, 9.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, 6.18421052631579, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, -9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, -9.81578947368421, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, -6.81578947368421, -6.81578947368421, -6.81578947368421, -6.81578947368421, -6.81578947368421, -6.81578947368421, -6.81578947368421, -6.81578947368421, -6.81578947368421, -6.81578947368421, -6.81578947368421, -6.81578947368421, -6.81578947368421, -6.81578947368421, -6.81578947368421, -6.81578947368421, -6.81578947368421, -6.81578947368421, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 5.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, -1.81578947368421, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, 1.18421052631579, -8.81578947368421, -8.81578947368421, -8.81578947368421, -8.81578947368421, -8.81578947368421, -8.81578947368421, -8.81578947368421, -8.81578947368421, -8.81578947368421, -8.81578947368421, -8.81578947368421, -8.81578947368421, -8.81578947368421, -8.81578947368421, -8.81578947368421, -8.81578947368421, -8.81578947368421, -8.81578947368421, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 3.18421052631579, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, 11.1842105263158, 0.184210526315795, 0.184210526315795, 0.184210526315795, 0.184210526315795, 0.184210526315795, 0.184210526315795, 0.184210526315795, 0.184210526315795, 0.184210526315795, 0.184210526315795, 0.184210526315795, 0.184210526315795, 0.184210526315795, 0.184210526315795, 0.184210526315795, 0.184210526315795, 0.184210526315795, 0.184210526315795, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -4.81578947368421, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -2.81578947368421, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -10.8157894736842, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, -0.815789473684205, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, 8.18421052631579, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421, -3.81578947368421), "`scaled:center`" = 70.8157894736842), similarity = structure(c(2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L), .Label = c("Dissim", "Sim"), class = "factor"), percSem = structure(c(2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L, 2L, 2L, 2L, 3L, 3L, 3L, 1L, 1L, 1L), .Label = c("Both", "Perc", "Sem"), class = "factor"), resp = structure(c(2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 2L, 1L, 2L, 1L, 2L, 1L, 1L, 1L, 2L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 2L, 2L, 2L, 1L, 2L, 1L, 2L, 1L, 2L, 1L, 2L, 2L, 2L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 2L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 2L, 2L, 2L, 1L, 1L, 2L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 1L, 2L, 2L, 2L, 1L, 2L, 2L, 1L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 1L, 2L, 1L, 1L, 1L, 2L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 1L, 1L, 1L, 1L, 1L, 2L, 1L, 2L, 2L, 2L, 2L, 1L, 1L, 2L, 2L, 1L, 1L, 2L, 1L, 1L, 2L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 2L, 2L, 2L, 2L, 1L, 1L, 2L, 2L, 1L, 2L, 2L, 1L, 2L, 1L, 2L, 2L, 1L, 1L, 1L, 2L, 2L, 2L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 2L, 2L, 2L, 2L, 2L, 1L, 2L, 1L, 2L, 2L, 1L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 1L, 2L, 1L, 2L, 1L, 2L, 2L, 1L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 1L, 2L, 1L, 2L, 2L, 2L, 2L, 1L, 2L, 2L, 2L, 1L, 2L, 2L, 1L, 1L, 2L, 1L, 2L, 1L, 1L, 2L, 1L, 1L, 1L, 2L, 2L, 1L, 2L, 2L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 2L, 2L, 2L, 2L, 2L, 1L, 2L, 1L, 1L, 1L, 2L, 1L, 2L, 1L, 2L, 2L, 2L, 2L, 1L, 2L, 1L, 2L, 1L, 1L, 2L, 1L, 1L, 1L, 2L, 2L, 1L, 2L, 1L, 2L, 1L, 2L, 1L, 1L, 2L, 2L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 2L, 1L, 1L, 2L, 1L, 2L, 2L, 2L, 2L, 2L, 1L, 2L, 2L, 2L, 1L, 1L, 2L, 2L, 2L, 1L, 2L, 2L, 1L, 2L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 2L, 1L, 1L, 2L, 1L, 2L, 2L, 1L, 2L, 2L, 2L, 2L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 2L, 1L, 2L, 1L, 1L, 2L, 1L, 2L, 2L, 1L, 2L, 2L, 1L, 2L, 2L, 1L, 2L, 1L, 1L, 1L, 1L, 2L, 1L, 2L, 1L, 2L, 1L, 1L, 2L, 2L, 1L, 1L, 1L, 2L, 2L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 1L, 2L, 2L, 1L, 2L, 1L, 2L, 1L, 2L, 1L, 2L, 2L, 2L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 2L, 2L, 1L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 2L, 2L, 1L, 2L, 1L, 2L, 2L, 2L, 1L), .Label = c("false memory", "guess"), class = "factor")), .Names = c("subj", "months.c", "similarity", "percSem", "resp"), row.names = c(NA, -684L), class = "data.frame") 
+2

Wir arbeiten immer noch an der Verfeinerung der Konvergenzprüfungen, die erst kürzlich hinzugefügt wurden. Ein paar Fragen: (1) Kannst du bitte einen Hinweis auf das Tutorial geben, dem du folgst? (2) ist das Modell, das du eingebaut hast (mit dem viel größeren Max-Grad) singular, dh wenn du mit "control = lmerControl (check.conv.singular =" warning ") fährst" gibt es eine Warnung (alternativ haben Sie Null-Varianz-Schätzungen oder +/- 1,0 Korrelations-Schätzungen)? Ein reproduzierbares Beispiel wäre sehr hilfreich für Entwicklungszwecke ... –

+0

(1) Das Tutorial ist eines, das Sie auf Rpubs geschrieben haben. Es sollte (?) In der Frage verlinkt sein. (2) Es wurden keine Warnungen mit diesem Code gegeben, und das Modell hatte keine geschätzte Nullvarianz oder Korrelation geschätzt. Ich habe den kompletten Datensatz in die Frage aufgenommen (ich entschuldige mich für die Länge) für Entwicklungs- und Hilfszwecke. – dmartin

+6

Mein einziger weiterer Kommentar ist, dass es wahrscheinlich gut wäre zu sagen, dass dies ** Warnungen ** sind - in der R-Welt gibt es eine klare Unterscheidung zwischen Fehlern (die als schwerwiegend angesehen werden und dazu führen, dass das Programm aufhört, ohne eine Antwort zu geben) und Warnungen (die als beratend betrachtet werden und eine Antwort liefern) –

Antwort

41

tl; dr das sieht aus wie ein falsch positives Ergebnis - ich sehe keine besonders wichtige Unterschiede zwischen den Anfällen mit einer Vielzahl von verschiedenen Optimizern, obwohl es so aussieht, obwohl der Ausreißer die gebaut werden -in Nelder-Mead-Optimierer und nlminb; eingebaute bobyqa und bobyqa und Nelder-Mead aus dem nloptr-Paket geben extrem nahe Antworten und keine Warnungen.

Meine allgemeine Beratung in diesen Fällen wäre die Nachrüstung mit control=glmerControl(optimizer="bobyqa") zu versuchen; wir betrachten, um Schalen bobyqa als Standard verwendet (diese Frage erhöht das Gewicht der Beweise zu seinen Gunsten).

Ich legte den dput Ausgang in einer separaten Datei:

source("convdat.R") 

Führen Sie die gesamte Bandbreite möglicher Optimizern: Einbau-N-M und bobyqa; nlminb und L-BFGS-B von der Basis R über das optimx-Paket; und die nloptr Versionen von N-M und bobyqa.

library(lme4) 
g0.bobyqa <- glmer(resp ~ months.c * similarity * percSem + 
       (similarity | subj), 
     family = binomial, data = myData, 
        control=glmerControl(optimizer="bobyqa")) 
g0.NM <- update(g0.bobyqa,control=glmerControl(optimizer="Nelder_Mead")) 
library(optimx) 
g0.nlminb <- update(g0.bobyqa,control=glmerControl(optimizer="optimx", 
           optCtrl=list(method="nlminb"))) 
g0.LBFGSB <- update(g0.bobyqa,control=glmerControl(optimizer="optimx", 
           optCtrl=list(method="L-BFGS-B"))) 

library(nloptr) 
## from https://github.com/lme4/lme4/issues/98: 
defaultControl <- list(algorithm="NLOPT_LN_BOBYQA",xtol_rel=1e-6,maxeval=1e5) 
nloptwrap2 <- function(fn,par,lower,upper,control=list(),...) { 
    for (n in names(defaultControl)) 
     if (is.null(control[[n]])) control[[n]] <- defaultControl[[n]] 
    res <- nloptr(x0=par,eval_f=fn,lb=lower,ub=upper,opts=control,...) 
    with(res,list(par=solution, 
        fval=objective, 
        feval=iterations, 
        conv=if (status>0) 0 else status, 
        message=message)) 
} 
g0.bobyqa2 <- update(g0.bobyqa,control=glmerControl(optimizer=nloptwrap2)) 
g0.NM2 <- update(g0.bobyqa,control=glmerControl(optimizer=nloptwrap2, 
          optCtrl=list(algorithm="NLOPT_LN_NELDERMEAD"))) 

Fassen Sie die Ergebnisse zusammen. Wir erhalten Warnungen von nlminb, L-BFGS-B und Nelder-Mead (aber die Größe des max abs Steigung ist die größte von Nelder-Mead)

getpar <- function(x) c(getME(x,c("theta")),fixef(x)) 
modList <- list(bobyqa=g0.bobyqa,NM=g0.NM,nlminb=g0.nlminb, 
       bobyqa2=g0.bobyqa2,NM2=g0.NM2,LBFGSB=g0.LBFGSB) 
ctab <- sapply(modList,getpar) 
library(reshape2) 
mtab <- melt(ctab) 
library(ggplot2) 
theme_set(theme_bw()) 
ggplot(mtab,aes(x=Var2,y=value,colour=Var2))+ 
    geom_point()+facet_wrap(~Var1,scale="free") 

einfach 'gut' passt:

ggplot(subset(mtab,Var2 %in% c("NM2","bobyqa","bobyqa2")), 
     aes(x=Var2,y=value,colour=Var2))+ 
    geom_point()+facet_wrap(~Var1,scale="free") 

Koeffizient die Veränderung der Schätzungen unter Optimizern:

summary(cvvec <- apply(ctab,1,function(x) sd(x)/mean(x))) 

Die höchste CV ist für months.c, die nach wie vor nur etwa 4% ist ...

Die Log-Wahrscheinlichkeiten nicht sehr unterschiedlich: NM2 gibt die Log-Likelihood-max und alle ‚guten‘ diejenigen sind ganz in der Nähe (auch die ‚schlechten‘ sind höchstens 1% verschieden)

likList <- sapply(modList,logLik) 
round(log10(max(likList)-likList),1) 
## bobyqa  NM nlminb bobyqa2  NM2 LBFGSB 
## -8.5 -2.9 -2.0 -11.4 -Inf -5.0 
+0

Unglaublich hilfreich, danke !! – dmartin

+0

@Ben - Für was es wert ist, hatte ich das gleiche Problem mit meinen Daten und NM produziert eine Warnung und sehr unterschiedliche Schätzungen. LBFGSB hatte Warnungen und kleine Unterschiede. Die anderen erzeugten keine Warnungen und hatten nahezu identische Schätzungen für alle Parameter. Vielen Dank! – djhocking