1

我有以下纵向(3 Timepoints)数据集:

# A tibble: 504 x 6
      ID   Age Age_group Sex    Timepoint  outcome
   <int> <int> <fct>     <chr>  <chr>        <dbl>
 1 33714    15 Young     Male   bl        0.00103 
 2 35377    15 Young     Female bl        0.00106 
 3 38623    45 Older     Female bl        0.00103 
 4 38806    66 Older     Female bl        0.00114 
 5 39593    69 Older     Female bl        0.00113 
 6 39820    60 Older     Female bl        0.00113 
 7 39951    46 Older     Male   bl        0.000986
 8 40286    68 Older     Female bl        0.00107 
 9 40556     9 Young     Male   bl        0.00114 
10 40798    11 Young     Male   bl        0.00111 
# ... with 494 more rows

数据:

39820L, 39951L, 40286L, 40556L, 40798L, 40800L, 40815L, 43762L, 
50848L, 52183L, 52461L, 52577L, 53320L, 53873L, 54153L, 54206L, 
54581L, 55122L, 55267L, 55462L, 55612L, 55920L, 56022L, 56307L, 
56420L, 56679L, 57405L, 57445L, 57480L, 57725L, 57809L, 58004L, 
58215L, 58229L, 58503L, 59326L, 59327L, 59344L, 59361L, 59865L, 
60099L, 60100L, 60280L, 60384L, 60429L, 60493L, 60503L, 60575L, 
60603L, 60664L, 60846L, 61415L, 61656L, 61749L, 61883L, 62081L, 
62210L, 62285L, 62937L, 62983L, 63327L, 63329L, 64081L, 64328L, 
64418L, 64507L, 64596L, 65178L, 65250L, 65302L, 65478L, 65480L, 
65487L, 65572L, 65802L, 65935L, 65974L, 65975L, 65978L, 65991L, 
65995L, 66013L, 66154L, 66237L, 66245L, 66389L, 66396L, 66460L, 
66572L, 66589L, 67174L, 73230L, 73525L, 73539L, 73677L, 73942L, 
73953L, 74034L, 74113L, 74114L, 74427L, 74439L, 74607L, 74641L, 
74657L, 74794L, 74800L, 74836L, 74942L, 74952L, 74962L, 74969L, 
74977L, 74985L, 74989L, 75220L, 75229L, 75407L, 75653L, 75732L, 
75735L, 75757L, 75895L, 75898L, 76381L, 76559L, 76574L, 76594L, 
76595L, 76746L, 76751L, 76755L, 76759L, 76775L, 77088L, 77091L, 
77099L, 77134L, 77188L, 77203L, 77252L, 77304L, 77413L, 77453L, 
77528L, 77556L, 77585L, 77668L, 78262L, 79724L, 79730L, 79850L, 
79977L, 80052L, 80819L, 80901L, 80932L, 81064L, 81065L, 81071L, 
81098L, 81142L, 81175L, 33714L, 35377L, 38623L, 38806L, 39593L, 
39820L, 39951L, 40286L, 40556L, 40798L, 40800L, 40815L, 43762L, 
50848L, 52183L, 52461L, 52577L, 53320L, 53873L, 54153L, 54206L, 
54581L, 55122L, 55267L, 55462L, 55612L, 55920L, 56022L, 56307L, 
56420L, 56679L, 57405L, 57445L, 57480L, 57725L, 57809L, 58004L, 
58215L, 58229L, 58503L, 59326L, 59327L, 59344L, 59361L, 59865L, 
60099L, 60100L, 60280L, 60384L, 60429L, 60493L, 60503L, 60575L, 
60603L, 60664L, 60846L, 61415L, 61656L, 61749L, 61883L, 62081L, 
62210L, 62285L, 62937L, 62983L, 63327L, 63329L, 64081L, 64328L, 
64418L, 64507L, 64596L, 65178L, 65250L, 65302L, 65478L, 65480L, 
65487L, 65572L, 65802L, 65935L, 65974L, 65975L, 65978L, 65991L, 
65995L, 66013L, 66154L, 66237L, 66245L, 66389L, 66396L, 66460L, 
66572L, 66589L, 67174L, 73230L, 73525L, 73539L, 73677L, 73942L, 
73953L, 74034L, 74113L, 74114L, 74427L, 74439L, 74607L, 74641L, 
74657L, 74794L, 74800L, 74836L, 74942L, 74952L, 74962L, 74969L, 
74977L, 74985L, 74989L, 75220L, 75229L, 75407L, 75653L, 75732L, 
75735L, 75757L, 75895L, 75898L, 76381L, 76559L, 76574L, 76594L, 
76595L, 76746L, 76751L, 76755L, 76759L, 76775L, 77088L, 77091L, 
77099L, 77134L, 77188L, 77203L, 77252L, 77304L, 77413L, 77453L, 
77528L, 77556L, 77585L, 77668L, 78262L, 79724L, 79730L, 79850L, 
79977L, 80052L, 80819L, 80901L, 80932L, 81064L, 81065L, 81071L, 
81098L, 81142L, 81175L, 33714L, 35377L, 38623L, 38806L, 39593L, 
39820L, 39951L, 40286L, 40556L, 40798L, 40800L, 40815L, 43762L, 
50848L, 52183L, 52461L, 52577L, 53320L, 53873L, 54153L, 54206L, 
54581L, 55122L, 55267L, 55462L, 55612L, 55920L, 56022L, 56307L, 
56420L, 56679L, 57405L, 57445L, 57480L, 57725L, 57809L, 58004L, 
58215L, 58229L, 58503L, 59326L, 59327L, 59344L, 59361L, 59865L, 
60099L, 60100L, 60280L, 60384L, 60429L, 60493L, 60503L, 60575L, 
60603L, 60664L, 60846L, 61415L, 61656L, 61749L, 61883L, 62081L, 
62210L, 62285L, 62937L, 62983L, 63327L, 63329L, 64081L, 64328L, 
64418L, 64507L, 64596L, 65178L, 65250L, 65302L, 65478L, 65480L, 
65487L, 65572L, 65802L, 65935L, 65974L, 65975L, 65978L, 65991L, 
65995L, 66013L, 66154L, 66237L, 66245L, 66389L, 66396L, 66460L, 
66572L, 66589L, 67174L, 73230L, 73525L, 73539L, 73677L, 73942L, 
73953L, 74034L, 74113L, 74114L, 74427L, 74439L, 74607L, 74641L, 
74657L, 74794L, 74800L, 74836L, 74942L, 74952L, 74962L, 74969L, 
74977L, 74985L, 74989L, 75220L, 75229L, 75407L, 75653L, 75732L, 
75735L, 75757L, 75895L, 75898L, 76381L, 76559L, 76574L, 76594L, 
76595L, 76746L, 76751L, 76755L, 76759L, 76775L, 77088L, 77091L, 
77099L, 77134L, 77188L, 77203L, 77252L, 77304L, 77413L, 77453L, 
77528L, 77556L, 77585L, 77668L, 78262L, 79724L, 79730L, 79850L, 
79977L, 80052L, 80819L, 80901L, 80932L, 81064L, 81065L, 81071L, 
81098L, 81142L, 81175L), Age = c(15L, 15L, 45L, 66L, 69L, 60L, 
46L, 68L, 9L, 11L, 16L, 9L, 56L, 16L, 16L, 14L, 53L, 8L, 6L, 
63L, 14L, 10L, 15L, 13L, 15L, 8L, 9L, 9L, 8L, 9L, 9L, 13L, 58L, 
10L, 7L, 8L, 8L, 6L, 15L, 43L, 8L, 11L, 44L, 70L, 14L, 12L, 10L, 
16L, 12L, 10L, 6L, 13L, 67L, 11L, 12L, 13L, 10L, 66L, 13L, 14L, 
12L, 45L, 52L, 64L, 17L, 9L, 12L, 44L, 69L, 11L, 10L, 12L, 10L, 
10L, 70L, 54L, 45L, 43L, 54L, 14L, 42L, 44L, 16L, 15L, 43L, 45L, 
50L, 53L, 53L, 49L, 69L, 14L, 65L, 14L, 13L, 67L, 59L, 52L, 54L, 
44L, 62L, 69L, 10L, 63L, 57L, 12L, 62L, 9L, 53L, 54L, 66L, 49L, 
63L, 51L, 9L, 45L, 49L, 49L, 61L, 62L, 57L, 67L, 65L, 45L, 16L, 
55L, 64L, 67L, 56L, 52L, 63L, 10L, 62L, 14L, 66L, 68L, 15L, 13L, 
43L, 47L, 55L, 69L, 67L, 52L, 15L, 64L, 55L, 44L, 13L, 48L, 71L, 
64L, 13L, 50L, 61L, 70L, 57L, 51L, 46L, 57L, 69L, 46L, 8L, 11L, 
46L, 71L, 38L, 56L, 16L, 16L, 46L, 67L, 70L, 61L, 47L, 69L, 11L, 
13L, 18L, 10L, 57L, 18L, 18L, 15L, 54L, 10L, 8L, 64L, 15L, 12L, 
16L, 14L, 16L, 9L, 11L, 11L, 10L, 10L, 11L, 14L, 59L, 12L, 8L, 
9L, 9L, 8L, 16L, 44L, 9L, 13L, 45L, 71L, 16L, 13L, 12L, 18L, 
13L, 11L, 8L, 14L, 68L, 12L, 13L, 14L, 11L, 67L, 14L, 15L, 14L, 
46L, 53L, 65L, 18L, 11L, 14L, 46L, 70L, 12L, 12L, 13L, 11L, 11L, 
71L, 55L, 46L, 44L, 55L, 15L, 43L, 45L, 17L, 16L, 44L, 46L, 51L, 
55L, 54L, 50L, 70L, 15L, 66L, 15L, 14L, 68L, 60L, 53L, 55L, 46L, 
63L, 70L, 11L, 64L, 58L, 13L, 63L, 10L, 54L, 55L, 67L, 50L, 64L, 
52L, 11L, 46L, 50L, 50L, 62L, 63L, 58L, 68L, 66L, 46L, 18L, 56L, 
65L, 68L, 57L, 53L, 64L, 11L, 63L, 15L, 67L, 69L, 16L, 14L, 44L, 
48L, 56L, 70L, 68L, 53L, 17L, 65L, 56L, 45L, 14L, 49L, 73L, 65L, 
14L, 50L, 62L, 71L, 58L, 52L, 47L, 58L, 70L, 47L, 9L, 12L, 47L, 
72L, 39L, 57L, 18L, 18L, 47L, 68L, 71L, 62L, 48L, 70L, 12L, 14L, 
19L, 11L, 58L, 19L, 19L, 16L, 55L, 11L, 9L, 65L, 17L, 13L, 18L, 
16L, 18L, 11L, 12L, 12L, 11L, 11L, 12L, 16L, 60L, 13L, 9L, 11L, 
10L, 9L, 17L, 45L, 11L, 14L, 46L, 72L, 17L, 14L, 13L, 19L, 15L, 
12L, 9L, 15L, 69L, 14L, 14L, 15L, 12L, 68L, 16L, 17L, 15L, 47L, 
54L, 66L, 20L, 12L, 15L, 47L, 71L, 13L, 13L, 14L, 12L, 12L, 72L, 
56L, 47L, 45L, 56L, 16L, 44L, 46L, 19L, 18L, 44L, 47L, 52L, 56L, 
55L, 51L, 71L, 16L, 67L, 16L, 15L, 69L, 60L, 54L, 56L, 46L, 64L, 
71L, 12L, 65L, 59L, 14L, 64L, 11L, 55L, 57L, 68L, 51L, 65L, 53L, 
11L, 47L, 51L, 51L, 63L, 64L, 59L, 69L, 67L, 48L, 19L, 57L, 66L, 
69L, 59L, 54L, 65L, 12L, 64L, 16L, 68L, 70L, 17L, 15L, 45L, 48L, 
57L, 71L, 69L, 54L, 18L, 66L, 57L, 50L, 15L, 50L, 74L, 66L, 15L, 
51L, 63L, 72L, 59L, 53L, 48L, 59L, 71L, 48L, 10L, 13L, 48L, 73L, 
40L, 58L), Age_group = structure(c(1L, 1L, 2L, 2L, 2L, 2L, 2L, 
2L, 1L, 1L, 1L, 1L, 2L, 1L, 1L, 1L, 2L, 1L, 1L, 2L, 1L, 1L, 1L, 
1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 
2L, 1L, 1L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 1L, 1L, 
1L, 1L, 2L, 1L, 1L, 1L, 2L, 2L, 2L, 1L, 1L, 1L, 2L, 2L, 1L, 1L, 
1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 1L, 2L, 2L, 1L, 1L, 2L, 2L, 2L, 
2L, 2L, 2L, 2L, 1L, 2L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 
2L, 2L, 1L, 2L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 2L, 2L, 2L, 2L, 
2L, 2L, 2L, 2L, 2L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 2L, 1L, 2L, 
2L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 2L, 2L, 2L, 1L, 2L, 2L, 
2L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 2L, 2L, 2L, 
2L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 1L, 1L, 2L, 1L, 1L, 
1L, 2L, 1L, 1L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 
1L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 1L, 1L, 2L, 2L, 1L, 1L, 1L, 
1L, 1L, 1L, 1L, 1L, 2L, 1L, 1L, 1L, 1L, 2L, 1L, 1L, 1L, 2L, 2L, 
2L, 1L, 1L, 1L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 
1L, 2L, 2L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 2L, 1L, 1L, 
2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 2L, 2L, 1L, 2L, 1L, 2L, 2L, 2L, 
2L, 2L, 2L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 2L, 2L, 
2L, 2L, 2L, 2L, 1L, 2L, 1L, 2L, 2L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 
2L, 1L, 2L, 2L, 2L, 1L, 2L, 2L, 2L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 
2L, 2L, 2L, 1L, 1L, 2L, 2L, 2L, 2L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 
2L, 1L, 1L, 1L, 1L, 2L, 1L, 1L, 1L, 2L, 1L, 1L, 2L, 1L, 1L, 1L, 
1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 
2L, 1L, 1L, 2L, 2L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 2L, 1L, 1L, 
1L, 1L, 2L, 1L, 1L, 1L, 2L, 2L, 2L, 1L, 1L, 1L, 2L, 2L, 1L, 1L, 
1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 1L, 2L, 2L, 1L, 1L, 2L, 2L, 2L, 
2L, 2L, 2L, 2L, 1L, 2L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 
2L, 2L, 1L, 2L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 2L, 2L, 2L, 2L, 
2L, 2L, 2L, 2L, 2L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 2L, 1L, 2L, 
2L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 2L, 2L, 2L, 1L, 2L, 2L, 
2L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 2L, 1L, 1L, 2L, 2L, 2L, 
2L), .Label = c("Young", "Older"), class = "factor"), Sex = c("Male", 
"Female", "Female", "Female", "Female", "Female", "Male", "Female", 
"Male", "Male", "Male", "Male", "Female", "Male", "Male", "Male", 
"Female", "Male", "Male", "Male", "Male", "Male", "Male", "Male", 
"Male", "Female", "Female", "Male", "Female", "Male", "Female", 
"Female", "Female", "Male", "Female", "Female", "Male", "Female", 
"Male", "Female", "Male", "Female", "Male", "Male", "Female", 
"Male", "Male", "Male", "Female", "Female", "Female", "Male", 
"Female", "Male", "Female", "Female", "Male", "Male", "Female", 
"Male", "Male", "Female", "Female", "Male", "Male", "Female", 
"Female", "Female", "Female", "Female", "Male", "Male", "Male", 
"Male", "Female", "Female", "Female", "Female", "Female", "Male", 
"Female", "Female", "Male", "Male", "Female", "Male", "Female", 
"Female", "Female", "Female", "Female", "Male", "Male", "Female", 
"Female", "Male", "Female", "Female", "Female", "Female", "Female", 
"Female", "Female", "Female", "Female", "Female", "Female", "Female", 
"Female", "Female", "Male", "Female", "Male", "Female", "Male", 
"Female", "Female", "Female", "Female", "Female", "Male", "Male", 
"Female", "Female", "Male", "Female", "Male", "Female", "Female", 
"Male", "Female", "Female", "Female", "Male", "Female", "Male", 
"Male", "Male", "Female", "Female", "Male", "Male", "Male", "Female", 
"Female", "Male", "Female", "Female", "Male", "Female", "Female", 
"Male", "Female", "Male", "Male", "Male", "Male", "Female", "Male", 
"Male", "Female", "Male", "Male", "Male", "Male", "Male", "Male", 
"Female", "Male", "Female", "Female", "Female", "Female", "Female", 
"Male", "Female", "Male", "Male", "Male", "Male", "Female", "Male", 
"Male", "Male", "Female", "Male", "Male", "Male", "Male", "Male", 
"Male", "Male", "Male", "Female", "Female", "Male", "Female", 
"Male", "Female", "Female", "Female", "Male", "Female", "Female", 
"Male", "Female", "Male", "Female", "Male", "Female", "Male", 
"Male", "Female", "Male", "Male", "Male", "Female", "Female", 
"Female", "Male", "Female", "Male", "Female", "Female", "Male", 
"Male", "Female", "Male", "Male", "Female", "Female", "Male", 
"Male", "Female", "Female", "Female", "Female", "Female", "Male", 
"Male", "Male", "Male", "Female", "Female", "Female", "Female", 
"Female", "Male", "Female", "Female", "Male", "Male", "Female", 
"Male", "Female", "Female", "Female", "Female", "Female", "Male", 
"Male", "Female", "Female", "Male", "Female", "Female", "Female", 
"Female", "Female", "Female", "Female", "Female", "Female", "Female", 
"Female", "Female", "Female", "Female", "Male", "Female", "Male", 
"Female", "Male", "Female", "Female", "Female", "Female", "Female", 
"Male", "Male", "Female", "Female", "Male", "Female", "Male", 
"Female", "Female", "Male", "Female", "Female", "Female", "Male", 
"Female", "Male", "Male", "Male", "Female", "Female", "Male", 
"Male", "Male", "Female", "Female", "Male", "Female", "Female", 
"Male", "Female", "Female", "Male", "Female", "Male", "Male", 
"Male", "Male", "Female", "Male", "Male", "Female", "Male", "Male", 
"Male", "Male", "Male", "Male", "Female", "Male", "Female", "Female", 
"Female", "Female", "Female", "Male", "Female", "Male", "Male", 
"Male", "Male", "Female", "Male", "Male", "Male", "Female", "Male", 
"Male", "Male", "Male", "Male", "Male", "Male", "Male", "Female", 
"Female", "Male", "Female", "Male", "Female", "Female", "Female", 
"Male", "Female", "Female", "Male", "Female", "Male", "Female", 
"Male", "Female", "Male", "Male", "Female", "Male", "Male", "Male", 
"Female", "Female", "Female", "Male", "Female", "Male", "Female", 
"Female", "Male", "Male", "Female", "Male", "Male", "Female", 
"Female", "Male", "Male", "Female", "Female", "Female", "Female", 
"Female", "Male", "Male", "Male", "Male", "Female", "Female", 
"Female", "Female", "Female", "Male", "Female", "Female", "Male", 
"Male", "Female", "Male", "Female", "Female", "Female", "Female", 
"Female", "Male", "Male", "Female", "Female", "Male", "Female", 
"Female", "Female", "Female", "Female", "Female", "Female", "Female", 
"Female", "Female", "Female", "Female", "Female", "Female", "Male", 
"Female", "Male", "Female", "Male", "Female", "Female", "Female", 
"Female", "Female", "Male", "Male", "Female", "Female", "Male", 
"Female", "Male", "Female", "Female", "Male", "Female", "Female", 
"Female", "Male", "Female", "Male", "Male", "Male", "Female", 
"Female", "Male", "Male", "Male", "Female", "Female", "Male", 
"Female", "Female", "Male", "Female", "Female", "Male", "Female", 
"Male", "Male", "Male", "Male", "Female", "Male", "Male", "Female", 
"Male", "Male", "Male", "Male", "Male", "Male", "Female"), Timepoint = c("bl", 
"bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", 
"bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", 
"bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", 
"bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", 
"bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", 
"bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", 
"bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", 
"bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", 
"bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", 
"bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", 
"bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", 
"bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", 
"bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", 
"bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", 
"bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", "bl", 
"bl", "bl", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", 
"flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", 
"flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", 
"flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", 
"flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", 
"flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", 
"flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", 
"flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", 
"flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", 
"flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", 
"flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", 
"flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", 
"flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", 
"flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", 
"flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", 
"flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", 
"flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", 
"flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", 
"flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", 
"flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", 
"flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", "flu1", 
"flu1", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", 
"flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", 
"flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", 
"flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", 
"flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", 
"flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", 
"flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", 
"flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", 
"flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", 
"flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", 
"flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", 
"flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", 
"flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", 
"flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", 
"flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", 
"flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", 
"flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", 
"flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", 
"flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", 
"flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", 
"flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", "flu2", 
"flu2"), outcome = c(0.0010333, 0.00105981, 0.00103209, 0.001136335, 
0.001130695, 0.00113139, 0.000986063, 0.0010712, 0.00113736, 
0.001108715, 0.00104864, 0.00110772, 0.00110197, 0.00109096, 
0.00109855, 0.00104169, 0.001090875, 0.00112465, 0.001096525, 
0.001030151, 0.000985059, 0.001098955, 0.001069465, 0.00105376, 
0.00106878, 0.00110388, 0.00108702, 0.001162835, 0.001070955, 
0.0010971, 0.00111695, 0.001060525, 0.001035065, 0.00108797, 
0.00103262, 0.001117605, 0.001061707, 0.001156365, 0.00104431, 
0.001056565, 0.00109114, 0.001053765, 0.001064622, 0.001114025, 
0.001045395, 0.00106441, 0.00108481, 0.0011145, 0.001095115, 
0.001099075, 0.001057, 0.001096125, 0.001087175, 0.00109696, 
0.001064795, 0.00108024, 0.00102137, 0.001087645, 0.001103185, 
0.00111948, 0.001110965, 0.0010694, 0.001089425, 0.001129075, 
0.00103784, 0.00104419, 0.00101302, 0.00110335, 0.00111867, 0.00108785, 
0.001098765, 0.001052415, 0.0010976, 0.001064385, 0.00108407, 
0.001076165, 0.00112799, 0.00106542, 0.00105824, 0.001129705, 
0.001051684, 0.001073525, 0.001076575, 0.001049785, 0.00102377, 
0.0011, 0.001039055, 0.00105945, 0.001083505, 0.0010723, 0.001123395, 
0.00103181, 0.00110417, 0.001078155, 0.001129015, 0.00114891, 
0.001115405, 0.00110057, 0.001054205, 0.00105165, 0.001095601, 
0.001112195, 0.001024814, 0.001061645, 0.001081505, 0.00109171, 
0.00112992, 0.001007862, 0.001069345, 0.00108842, 0.001066665, 
0.00106081, 0.001065165, 0.00108621, 0.001099885, 0.00103779, 
0.00103329, 0.00107046, 0.001094945, 0.001063225, 0.001077665, 
0.0011242, 0.001161425, 0.001092915, 0.00109162, 0.001120835, 
0.001060845, 0.001072238, 0.00109486, 0.00111587, 0.001166475, 
0.001060665, 0.00107877, 0.00106572, 0.00103524, 0.001112615, 
0.00106803, 0.00113409, 0.000992363, 0.001057025, 0.001055855, 
0.001165165, 0.001046216, 0.001015101, 0.001052505, 0.001094015, 
0.00104152, 0.00104361, 0.001138575, 0.001056924, 0.0011807, 
0.001090905, 0.00108723, 0.00106178, 0.00113283, 0.001154635, 
0.00110195, 0.001115265, 0.001102255, 0.00106396, 0.001073585, 
0.001047345, 0.001046765, 0.001090765, 0.001022798, 0.001187635, 
0.001033095, 0.00112301, 0.001060205, 0.0010143, 0.00102627, 
0.00110609, 0.001158665, 0.00112353, 0.00097819, 0.001078495, 
0.001106365, 0.00113328, 0.001045545, 0.001081185, 0.00109504, 
0.001102495, 0.00108775, 0.001055425, 0.001056945, 0.001130705, 
0.00115777, 0.001034427, 0.00105974, 0.001089395, 0.001069105, 
0.001054605, 0.00107229, 0.001128765, 0.001111455, 0.001111155, 
0.001095955, 0.001108905, 0.001077275, 0.00104525, 0.00105191, 
0.00111458, 0.001110655, 0.001114555, 0.00110557, 0.001167755, 
0.0010552, 0.001014149, 0.00110002, 0.00104991, 0.001018892, 
0.00112666, 0.001069695, 0.001101185, 0.001066815, 0.00109388, 
0.001079455, 0.001097655, 0.001106855, 0.00108804, 0.001024891, 
0.001102495, 0.001077445, 0.00104224, 0.001064015, 0.001071736, 
0.00110557, 0.00112483, 0.001093865, 0.001075945, 0.001031211, 
0.001122935, 0.00105965, 0.001048305, 0.0010719, 0.00107988, 
0.001161715, 0.001098505, 0.001073105, 0.001049325, 0.001097175, 
0.00111584, 0.00112603, 0.00107674, 0.001147125, 0.00104971, 
0.00108423, 0.001122035, 0.00103364, 0.001071835, 0.001036422, 
0.001016249, 0.00101976, 0.00107818, 0.001067145, 0.001062425, 
0.00110346, 0.00108456, 0.00113592, 0.001015195, 0.001110565, 
0.00110143, 0.001121325, 0.00110386, 0.00112389, 0.001091155, 
0.00105652, 0.001030636, 0.001106515, 0.00109062, 0.001047755, 
0.001030305, 0.00108161, 0.001063235, 0.00112294, 0.001079355, 
0.001052327, 0.00107416, 0.001091, 0.001057725, 0.00107489, 0.0010597, 
0.001086495, 0.001047792, 0.000995212, 0.001092055, 0.00106338, 
0.001078475, 0.00110247, 0.00111765, 0.00113914, 0.001073185, 
0.001074005, 0.001091145, 0.001040767, 0.00106448, 0.00105502, 
0.00108874, 0.00117407, 0.00109106, 0.001044455, 0.00103907, 
0.00104921, 0.001136435, 0.0010907, 0.001101685, 0.001009874, 
0.00106717, 0.001055338, 0.001157375, 0.001061175, 0.000983892, 
0.001017475, 0.001094945, 0.001018795, 0.00107304, 0.00113138, 
0.0010772, 0.00114321, 0.00111869, 0.001086615, 0.0010709, 0.00116752, 
0.001175765, 0.001113535, 0.001123455, 0.001081295, 0.001084425, 
0.00107189, 0.001030513, 0.001076375, 0.001088265, 0.001043751, 
0.001157355, 0.001039899, 0.00116496, 0.00106309, 0.00106822, 
0.001076605, 0.001113025, 0.00115085, 0.00115274, 0.001005614, 
0.00111759, 0.001052205, 0.001106125, 0.001060125, 0.00106945, 
0.00107761, 0.001092695, 0.00107696, 0.001063425, 0.00111791, 
0.001116725, 0.00105891, 0.001033532, 0.001054205, 0.00109295, 
0.00109387, 0.00101807, 0.001066195, 0.001120295, 0.001111565, 
0.001088595, 0.00102183, 0.0010934, 0.00111935, 0.00105371, 0.001038104, 
0.00108314, 0.0011006, 0.001079585, 0.001127775, 0.001140825, 
0.00106203, 0.001059563, 0.001118035, 0.00103535, 0.001027819, 
0.001120055, 0.00099512, 0.001078955, 0.00108867, 0.0010789, 
0.001030445, 0.00106243, 0.001028545, 0.00108679, 0.00108158, 
0.00105624, 0.001110145, 0.00107318, 0.00106523, 0.001099325, 
0.001103515, 0.00112404, 0.001064455, 0.001086375, 0.001109065, 
0.001077765, 0.001040425, 0.001059305, 0.00106362, 0.00109264, 
0.00116222, 0.001079395, 0.00107183, 0.0010652, 0.00106983, 0.00111722, 
0.00112626, 0.001091765, 0.00113837, 0.00104618, 0.00109727, 
0.00114111, 0.001055565, 0.001115175, 0.001059649, 0.001029902, 
0.000986792, 0.00107968, 0.00106097, 0.001067105, 0.001083625, 
0.00109082, 0.00114355, 0.001062825, 0.001099115, 0.001102155, 
0.001122135, 0.001125253, 0.001131355, 0.001104175, 0.001104315, 
0.00103858, 0.001072123, 0.00109497, 0.00103623, 0.00107783, 
0.00107379, 0.00108648, 0.00114186, 0.001081035, 0.00110761, 
0.001102845, 0.001090495, 0.00105551, 0.00108908, 0.001082845, 
0.001110075, 0.0010599, 0.001042808, 0.00108644, 0.00106482, 
0.00111119, 0.001077005, 0.001183845, 0.001134605, 0.00107317, 
0.001039397, 0.00112268, 0.00108716, 0.00108437, 0.0010827, 0.00111043, 
0.001177305, 0.001057715, 0.00110218, 0.0010338, 0.00103996, 
0.00111301, 0.001071455, 0.001072065, 0.00100762, 0.00106678, 
0.001030939, 0.001161045, 0.001080915, 0.001053775, 0.001032233, 
0.001072078, 0.00103878, 0.00112049, 0.00111996, 0.001085025, 
0.001165745, 0.00114263, 0.00106407, 0.00109784, 0.001187105, 
0.001194925, 0.001139195, 0.0011009, 0.00110112, 0.00107563, 
0.001055985, 0.00100656, 0.0010693, 0.001104395, 0.001022506, 
0.00114974, 0.00105716, 0.00113243)), row.names = c(NA, -504L
), class = c("tbl_df", "tbl", "data.frame"))

outcomeID是在timepoint-bl(1)、timepoint-flu1(2) 和timepoint-flu2(3)对每个受试者 ( ) 进行采样的生理测量。每个主题ageage_groupsex记录为好。Age是一个预测变量,outcomesex用作交互变量。

目标:

  1. 我试图估计3和成人之间outcome变量斜率是否存在显着差异。 YoungOlderTimepoints

  2. 我正在尝试估计之间的个体( ID) 方差。outcometimepoints age_group

这是为我的目标建模的正确方法吗?情节在我看来并不准确。我是线性混合模型的新手,所以任何建议都值得赞赏。

#mixed
DF_mixed <- lmer(outcome ~ Age* Sex + (1 + Age| Age_group:Timepoint) + (1|ID), data = DF) 
# identify age*sex interactions to predict outcome value between timepoints, nested in age_group that have random slope and intercepts

#plot
ggplot(DF, aes(x = Age, y = outcome, color = Timepoint)) +
      geom_point(alpha = 0.7) +
      theme_classic() +
      geom_line(data = cbind(DF, pred = predict(DF_mixed)), aes(y = pred), size = 1)   # adding predicted line from mixed model 



4

1 回答 1

2

下面,我将介绍有关 (1) 准备数据、(2) 指定模型和 (3) 检查模型结果的建议。

除了tidyverse包 和lme4,我将使用broom.mixed::glance()tidy()来检查模型参数,并ggeffects::ggpredict()生成用于绘图的模型预测。

library(tidyverse)
library(lme4)
library(broom.mixed)
library(ggeffects)

数据准备和变量选择

Timepoint是一个字符,将被视为模型中的无序类别,这可能不是您想要的。使用自基线以来经过的实际时间通常比使用通用“时间点”更好,尤其是在时间点之间的距离可变的情况下。Timepoint_yrs对于这个答案,我根据每个受试者相对于基线的年龄变化计算了一个替代值;但如果你有更精确的时间记录,你可能想用它来代替。

此外,outcome由小范围的值组成,大约为 1e-4 - 1e-3;这可能会导致 lmer 行为不端,因此我对其进行了缩放。

DF <- DF %>% 
  group_by(ID) %>% 
  mutate(Timepoint_yrs = Age - min(Age)) %>% 
  ungroup() %>% 
  mutate(outcome_scaled = as.numeric(scale(outcome)))

Age将两者都包含Age_group在模型中会导致多重共线性问题,因为一个是另一个的函数。鉴于 的极端双峰分布Age,我只使用了Age_group. 如上所述,年龄的变化现在由Timepoint_yrs.

型号规格

“我正在尝试估计结果变量的斜率是否存在显着差异YoungOlder成人之间的 3 Timepoints - 即,Age_group通过Timepoint_yrs交互。您还写道sex用作交互变量”。因此,将这些放在一起,您正在通过交互查看 3 Age_group- way SexTimepoint_yrs因为Age_group并且Sex仅在主题之间有所不同,所以我们将在固定效果的级别上指定这一点:

outcome_scaled ~ Age_group * Sex * Timepoint_yrs

因为每个人有多个测量值 ( ID),我们将包含以下的随机截距ID

outcome_scaled ~ Age_group * Sex * Timepoint_yrs + (1 | ID)

我们还可以允许 的斜率Timepoint_yrs在个体内变化。从你的帖子中,我看不出是否包含随机斜率的强有力的理论理由——所以我们会同时做这两件事,看看哪种模型最适合。

# model 1 - random intercepts
rnd_intercept <- lmer(outcome_scaled ~ Age_group * Sex * Timepoint_yrs + (1 | ID), data = DF) 

# model 2 - random intercepts and slopes
rnd_int_slope <- lmer(outcome_scaled ~ Age_group * Sex * Timepoint_yrs + (Timepoint_yrs | ID), data = DF)

模型结果

glance(rnd_intercept)
# # A tibble: 1 x 6
# sigma logLik   AIC   BIC REMLcrit df.residual
#   <dbl>  <dbl> <dbl> <dbl>    <dbl>       <int>
# 1 0.510  -575. 1170. 1212.    1150.         494

glance(rnd_int_slopes)
# A tibble: 1 x 6
#   sigma logLik   AIC   BIC REMLcrit df.residual
#   <dbl>  <dbl> <dbl> <dbl>    <dbl>       <int>
# 1 0.487  -575. 1173. 1224.    1149.         492

看看 AIC 和 BIC,随机截距模型似乎更好/更简约;没有任何理论上的理由偏爱随机斜率模型,我们将坚持使用该模型。

tidy(rnd_intercept, conf.int = TRUE)
# # A tibble: 10 x 8
#    effect   group    term               estimate std.error statistic conf.low conf.high
#    <chr>    <chr>    <chr>                 <dbl>     <dbl>     <dbl>    <dbl>     <dbl>
#  1 fixed    NA       (Intercept)         -0.123     0.122    -1.01    -0.361     0.116 
#  2 fixed    NA       Age_groupYoung      -0.0142    0.221    -0.0643  -0.448     0.419 
#  3 fixed    NA       SexMale              0.248     0.214     1.16    -0.171     0.667 
#  4 fixed    NA       Timepoint_yrs        0.131     0.0407    3.21     0.0509    0.210 
#  5 fixed    NA       Age_groupYoung:Se~  -0.0704    0.317    -0.222   -0.692     0.551 
#  6 fixed    NA       Age_groupYoung:Ti~  -0.152     0.0654   -2.32    -0.280    -0.0234
#  7 fixed    NA       SexMale:Timepoint~  -0.0547    0.0771   -0.709   -0.206     0.0964
#  8 fixed    NA       Age_groupYoung:Se~   0.0192    0.101     0.190   -0.178     0.217 
#  9 ran_pars ID       sd__(Intercept)      0.862    NA        NA       NA        NA     
# 10 ran_pars Residual sd__Observation      0.510    NA        NA       NA        NA     

最后,我们可以从模型中绘制预测值。您的原始代码为原始数据中的每一行预测了一个数据点,然后基本上在每行之间追踪了一条线,正如您所指出的,这不是很有信息量。相反,我们希望为您的预测变量的每个独特组合仅基于一个预测结果值(+/- 误差)绘制图表。ggeffects::ggpredict()是一个很好的捷径;emmeans::emmeans()将是另一种选择。

plot_data <- ggpredict(rnd_intercept, terms = c("Timepoint_yrs", "Age_group", "Sex"))
# col names: x = Timepoint_yrs, group = Age_group, facet = Sex, predicted = observed_scaled

ggplot(plot_data, aes(x, predicted)) +
  geom_line(aes(color = group)) +
  geom_ribbon(aes(ymin = conf.low, ymax = conf.high, fill = group), alpha = .1) +
  facet_wrap(vars(facet)) +
  labs(x = "Years Since Baseline", y = "Outcome\n(scaled, model-predicted)") +
  theme_classic()

一个多面图,按年龄、性别和时间点显示预测值,置信度为 95%。 预测值通常会随着时间的推移而增加

Age_group根据您的模型系数,您可能还想查看 crossTimepoint_yrs以及Timepoint_yrs折叠其他变量的主要影响。您可以通过将terms参数更改为ggpredict()并相应地修改ggplot规范来做到这一点。

于 2022-02-09T03:33:48.000 回答