0

我有两个通过实验确定的变量 ( y1_exp, y2_exp),它们随着 的变化而变化x,其中随着变化的变化而y2_exp影响(但不影响)。这是数据:y1_expxy1_expy2_exp

library(tidyverse)
library(minpack.lm)

data <- read_csv("x,y1_exp,y2_exp
      0,0.00553214,0.006103046
     5,0.005526549,0.006079372
     10,0.005521163,0.006092119
     15,0.005528608,0.006065714
     20,0.005526002,0.006062165
     25,0.005523214,0.006057619
     30,0.005523214,0.006048626
     35,0.005523214,0.006044087
     40,0.005523214,0.006044087
     45,0.005520607,0.006048626
     50,0.005512257,0.006025025
     55,0.005520607,0.006013361
     60,0.005518752,0.006007923
     65,0.005524318,0.006025025
     70,0.005520607,0.006016856
     75,0.005513185,0.006002485
     80,0.005515968,0.005995235
     85,0.005788141,0.006016856
     90,0.005733146,0.006033983
     95,0.005683015,0.006028386
     100,0.005644958,0.006013984
     105,0.005618042,0.006004894
     110,0.005604063,0.006010348
     115,0.005590122,0.006010348
     120,0.005572465,0.005997623
     125,0.005572465,0.006000349
     130,0.005567818,0.00599944
     135,0.005567818,0.006000349
     140,0.005569677,0.005997623
     145,0.005567818,0.005990351
     150,0.005567818,0.00598308
     155,0.00555401,0.005977626
     160,0.005569677,0.00598308
     165,0.005569677,0.005977626
     170,0.00556503,0.00598287
     175,0.005562242,0.00598287
     180,0.005572465,0.005974678
     185,0.005578041,0.00598287
     190,0.005586308,0.005971947
     195,0.005585377,0.005966486
     200,0.005588169,0.00598287
     205,0.005602975,0.005978088
     210,0.005628144,0.005991762
     215,0.005642127,0.005992449
     220,0.005644924,0.005986971
     225,0.00565609,0.005984232
     230,0.00565609,0.005994952
     235,0.005667294,0.005997695
     240,0.005672896,0.006000438
     245,0.005684107,0.006001111
     250,0.005696264,0.006009352
     255,0.005707486,0.005998364
     260,0.005710292,0.006012776
     265,0.005724372,0.006003606
     270,0.005732801,0.006018033
     275,0.005758179,0.006018033
     280,0.005769437,0.006018033
     285,0.005780694,0.006026969
     290,0.005791952,0.006028809
     295,0.00581447,0.006026969
     300,0.005828544,0.006035918
     305,0.005845434,0.006037761
     310,0.005856694,0.006037761
     315,0.005870771,0.006042366
     320,0.005886987,0.006040524
     325,0.005892626,0.006040524
     330,0.005909544,0.006054105
     335,0.005922704,0.006054105
     340,0.005922704,0.00605964
     345,0.005934302,0.006062408
     350,0.00593995,0.006076028
     355,0.005942775,0.006076028
     360,0.005960063,0.006090599
     365,0.005982692,0.006087822
     370,0.005998722,0.006090599
     375,0.006004379,0.006095226
     380,0.005998722,0.006107052
     385,0.006010037,0.006104272
     390,0.006012866,0.006104272
     395,0.006018524,0.006112613
     400,0.006041606,0.00612354
     405,0.006047273,0.006117247
     410,0.006047273,0.006117247
     415,0.006039717,0.006122808
     420,0.006048663,0.006131894
     425,0.006041606,0.006134679
     430,0.006058607,0.006137464
     435,0.006067108,0.00613115
     440,0.006067108,0.006140249
     445,0.006064274,0.006136711
     450,0.006061441,0.00613115
     455,0.006076094,0.006139492
     460,0.00605623,0.006147834
     465,0.006067581,0.006159743
     470,0.006078932,0.006159743
     475,0.006085093,0.006164385
     480,0.006104989,0.00617911
     485,0.006139098,0.00617911
     490,0.00614194,0.0061819
     495,0.006147049,0.006184689
     500,0.00614194,0.006186548
     505,0.006127728,0.006190268
     510,0.006147241,0.006203177
     515,0.006158628,0.006208764
     520,0.006163373,0.006211558
     525,0.006171914,0.006220773
     530,0.006171914,0.006220773
     535,0.006188997,0.006223571
     540,0.006194692,0.006226369
     545,0.006201014,0.006231965
     550,0.006206717,0.006244011
     555,0.006206717,0.00623756
     560,0.006205767,0.006249616
     565,0.006203533,0.006249616
     570,0.006189255,0.00625522
     575,0.00619211,0.006264498
     580,0.006177832,0.006265434
     585,0.00617554,0.00626824
     590,0.00617268,0.006271047
     595,0.00617554,0.006271047
     600,0.006186979,0.00627666
     605,0.006189838,0.006280347
     610,0.00617554,0.006285969
     615,0.006181259,0.006285969
     620,0.00617268,0.006283158
     625,0.006173226,0.006283158
     630,0.00621487,0.006283158
     635,0.00662509,0.006246725
     640,0.006882793,0.006060563
     645,0.007245471,0.005928867
     650,0.008473559,0.006350231
     655,0.009247253,0.006737696
     660,0.009883996,0.007087109
     665,0.010312034,0.007251846
     670,0.010135539,0.00731662
     675,0.009749611,0.007002351
     680,0.009783859,0.006887831
     685,0.010029351,0.006935397
     690,0.01016614,0.007080142
     695,0.010375834,0.007156219
     700,0.010447978,0.007188014
     705,0.010649555,0.007368508
     710,0.010884113,0.007595682
     715,0.010975526,0.007633395
     720,0.011137886,0.007690818
     725,0.011303968,0.007845451
     730,0.011504503,0.008027457
     735,0.011601149,0.008080757
     740,0.011728738,0.008191208
     745,0.011446146,0.008243414
     750,0.010941854,0.008023407
     755,0.010871756,0.007969683
     760,0.010906964,0.00798611
     765,0.010976065,0.008050875
     770,0.011007462,0.008234035
     775,0.011119685,0.008294425
     780,0.011392084,0.008469461
     785,0.011416786,0.008598086
     790,0.011426212,0.008616628
     795,0.011443019,0.008600298
     800,0.011487009,0.008696896
     805,0.011484189,0.008789979
     810,0.011547174,0.008854289
     815,0.011329945,0.008795137
     820,0.01118489,0.008819941
     825,0.01122512,0.008823249
     830,0.011235391,0.008928913
     835,0.011128169,0.00898777
     840,0.011073376,0.008993363
     845,0.010992715,0.009012943
     850,0.01111164,0.009076926
     855,0.011218571,0.009180769
     860,0.01169673,0.009473429
     865,0.011927231,0.009710296
     870,0.012101153,0.009861681
     875,0.011931468,0.009703051
     880,0.01179021,0.009621127
     885,0.011744728,0.009595428
     890,0.011687705,0.009624336
     895,0.011663317,0.009649306
     900,0.011775132,0.009686622
     905,0.011606612,0.009745953
     910,0.011546408,0.009753433
     915,0.011493062,0.009792618
     920,0.01154355,0.009824813
     925,0.01148163,0.009869676
     930,0.011417726,0.009874449
     935,0.0113977,0.009890256
     940,0.011315945,0.009891715
     945,0.011219218,0.009903602
     950,0.011128907,0.009908787
     955,0.011021509,0.009873616
     960,0.010931127,0.009864977
     965,0.010870579,0.009871005
     970,0.011099307,0.009939085
     975,0.011633858,0.010239904
     980,0.012124254,0.010485295
     985,0.012055284,0.010508016
     990,0.011873916,0.010467112
     995,0.011669026,0.010341881
     1000,0.01151546,0.01029192
     1005,0.011362137,0.010227859
     1010,0.011214379,0.010158445
     1015,0.01141196,0.010189825
     1020,0.011609125,0.010348048
     1025,0.011478788,0.010374885
     1030,0.011285931,0.010258824
     1035,0.011100027,0.010180051
     1040,0.010949988,0.010139262
     1045,0.010811721,0.010089286
     1050,0.011463506,0.010302578
     1055,0.011543184,0.010475958
     1060,0.011519785,0.010444628
     1065,0.011597506,0.010448
     1070,0.011625158,0.010433885
     1075,0.011673233,0.010499426
     1080,0.011623437,0.010488115
     1085,0.011556184,0.010473977
     1090,0.011455804,0.010461175
     1095,0.011468944,0.010469645
     1100,0.011446254,0.010480337
     1105,0.01139332,0.010475958
     1110,0.011295969,0.010475958
     1115,0.011206453,0.010441904
     1120,0.011113431,0.010435657
     1125,0.011085183,0.010434068
     1130,0.010985393,0.010430596
     1135,0.011187642,0.010504032
     1140,0.011678679,0.010704961
     1145,0.011937,0.010935956
     1150,0.012063323,0.011025534
     1155,0.012075523,0.011070395
     1160,0.011995448,0.011079496
     1165,0.011926247,0.011053494
     1170,0.011872694,0.011043828
     1175,0.011765039,0.011006141
     1180,0.011667538,0.010975287
     1185,0.011552344,0.010921466
     1190,0.011420588,0.010858289
     1195,0.011286696,0.010794546
     1200,0.011520687,0.010848888
     1205,0.011868758,0.011034132
     1210,0.011766439,0.011116272
     1215,0.011543698,0.011015375
     1220,0.011364646,0.010913784
     1225,0.011217294,0.010832708
     1230,0.011090832,0.010772
     1235,0.010976536,0.010722906
     1240,0.010882105,0.010685284
     1245,0.010790014,0.010637493
     1250,0.010675908,0.01059083
     1255,0.010611841,0.010551785
     1260,0.010520118,0.01049439
     1265,0.010436363,0.010462116
     1270,0.010365772,0.010416168
     1275,0.010283637,0.010364018
     1280,0.010210809,0.010336655
     1285,0.010150285,0.010270346
     1290,0.0100988,0.010250441
     1295,0.010036937,0.010209833
     1300,0.009969535,0.010162162
     1305,0.0098893,0.010102105
     1310,0.009811587,0.010069935
     1315,0.00975446,0.010028163
     1320,0.009710339,0.009973177
     1325,0.009659878,0.009910431
     1330,0.009597342,0.009903557
     1335,0.009534978,0.009862383
     1340,0.009441767,0.009811664
     1345,0.009402898,0.009765549
     1350,0.009555879,0.0097702
     1355,0.009819939,0.009819344
     1360,0.009979651,0.009902473
     1365,0.010059698,0.00993667
     1370,0.010138605,0.009974397
     1375,0.010146579,0.009986544
     1380,0.010124895,0.009972081
     1385,0.010105922,0.009951555
     1390,0.010063733,0.009927461
     1395,0.010055871,0.009923
     1400,0.010130486,0.009923
     1405,0.01025683,0.009958444
     1410,0.010352893,0.009993078
     1415,0.010406697,0.010036551
     1420,0.010394966,0.010040813
     1425,0.01037857,0.010059473
     1430,0.010364907,0.010071851
     1435,0.010320278,0.010053811
     1440,0.010245734,0.010034871
     1445,0.010159804,0.010004539
     1450,0.010079454,0.009978771
     1455,0.009973549,0.009944079
     1460,0.009914505,0.009903577
     1465,0.009818433,0.009862329
     1470,0.009757147,0.009836862
     1475,0.009727332,0.009806513
     1480,0.0098109,0.009814869
     1485,0.009880298,0.009838812
     1490,0.009905426,0.009868682
     1495,0.009898913,0.009880827
     1500,0.009881097,0.009897439
     1505,0.009843015,0.009896232
     1510,0.010190072,0.009962645
     1515,0.010599498,0.010127028
     1520,0.010942795,0.010263008
     1525,0.011170186,0.010381296
     1530,0.011395782,0.010486056
     1535,0.011548349,0.010574059
     1540,0.01176725,0.010667444
     1545,0.011854934,0.010749314
     1550,0.011749957,0.010777671
     1555,0.011645574,0.010769029
     1560,0.01153174,0.010751923
     1565,0.01142172,0.010713116
     1570,0.01132999,0.010701874
     1575,0.011643676,0.010777677
     1580,0.012083217,0.010966761
     1585,0.012234384,0.011089843
     1590,0.01230373,0.011191853
     1595,0.012350221,0.011229474
     1600,0.012288471,0.011247202
     1605,0.01223059,0.011238763
     1610,0.012172839,0.011201678
     1615,0.012078904,0.01119393
     1620,0.011996912,0.011158662
     1625,0.011895883,0.011159118
     1630,0.011740926,0.011088439
     1635,0.011676492,0.011087635
     1640,0.011528284,0.011017164
     1645,0.011790256,0.011085255
     1650,0.011953398,0.011129903
     1655,0.012046214,0.011223616
     1660,0.012033716,0.011271508
     1665,0.011999419,0.011260053
     1670,0.011980367,0.011262917
     1675,0.011934646,0.011261007
     1680,0.011831799,0.011251461
     1685,0.011778945,0.011223616
     1690,0.011643044,0.011198312
     1695,0.011550501,0.011172109
     1700,0.011461248,0.011135939
     1705,0.011361141,0.011090447
     1710,0.011220276,0.011014545
     1715,0.011088828,0.0109503
     1720,0.010983128,0.010912937
     1725,0.010792283,0.010821904
     1730,0.010785869,0.0107725
     1735,0.010750802,0.010723305
     1740,0.010724394,0.010727408
     1745,0.011020584,0.01077812
     1750,0.01146445,0.011004902
     1755,0.011635392,0.011139519
     1760,0.011612069,0.011182085
     1765,0.011540083,0.011148072
     1770,0.011446144,0.011115929
     1775,0.011369663,0.011066511
     1780,0.011327267,0.011051513
     1785,0.011728344,0.01111432
     1790,0.011871734,0.011236811
     1795,0.011881626,0.01120834
     1800,0.011876553,0.011194875
     1805,0.011841874,0.01116778
     1810,0.011800381,0.01116778
     1815,0.011834656,0.011162121
     1820,0.011859992,0.01115675
     1825,0.011904479,0.01120268
     1830,0.011887852,0.01121683
     1835,0.011930542,0.01120834
     1840,0.011897012,0.01121683
     1845,0.011772847,0.011230774
     1850,0.011648206,0.01120268
     1855,0.011587653,0.011161459
     1860,0.011609475,0.011155129
     1865,0.011606612,0.011167355
     1870,0.01163715,0.011165474
     1875,0.011702345,0.011181461
     1880,0.011630658,0.011184283
     1885,0.011704526,0.011198191
     1890,0.011687294,0.011195365
     1895,0.011753668,0.011204784
     1900,0.011730171,0.01128841
     1905,0.01169363,0.011272173
     1910,0.011702284,0.011255141
     1915,0.011734749,0.011251356
     1920,0.011730171,0.011282397
     1925,0.011712211,0.011309709
     1930,0.011755791,0.011301904
     1935,0.011715105,0.011313311
     1940,0.011647555,0.011296201
     1945,0.011616716,0.01129335
     1950,0.011570587,0.011280993
     1955,0.011526489,0.011261305
     1960,0.011478671,0.011255611
     1965,0.011398653,0.011232835
     1970,0.011357835,0.011207538
     1975,0.011266999,0.01117283
     1980,0.011228383,0.011140089
     1985,0.011168425,0.011120624
     1990,0.011365904,0.011122848
     1995,0.011927272,0.01128274
     2000,0.012269557,0.011463536
     2005,0.012610168,0.011623446
     2010,0.012178617,0.011607966
     2015,0.012009799,0.011539489
     2020,0.011862455,0.011453127
     2025,0.011738504,0.011402674
     2030,0.011623752,0.011331657
     2035,0.011528515,0.011290242
     2040,0.011422442,0.011249917
     2045,0.011297187,0.011192985
     2050,0.011224108,0.011134442
     2055,0.01113363,0.01109932
     2060,0.011045227,0.011065237
     2065,0.011351941,0.011120562
     2070,0.011576883,0.01120359
     2075,0.011567037,0.011223171
     2080,0.011478605,0.011215587
     2085,0.011391364,0.011166053
     2090,0.011298781,0.01111384
     2095,0.011181323,0.011080428
     2100,0.011093248,0.011002934
     2105,0.010982501,0.010963566
     2110,0.010883381,0.01090522
     2115,0.010814714,0.010859742
     2120,0.010721912,0.010796415
     2125,0.010683917,0.010766847
     2130,0.010580852,0.010725531
     2135,0.010116913,0.010564735
     2140,0.009553532,0.010407921
     2145,0.009338262,0.010417143
     2150,0.009155087,0.010448563
     2155,0.008953537,0.010404576
     2160,0.008783894,0.010359066
     2165,0.008633234,0.010342206
     2170,0.008503224,0.010270291
     2175,0.008354554,0.010227032
     2180,0.00827382,0.010135737
     2185,0.008141687,0.010084595
     2190,0.008032464,0.010089726
     2195,0.007964806,0.010009439
     2200,0.007898842,0.009945284
     2205,0.008107711,0.009910075
     2210,0.007992595,0.009844644
     2215,0.007960512,0.009825585
     2220,0.007998377,0.009843155
     2225,0.008159075,0.009845001
     2230,0.007983655,0.009766526
     2235,0.00784587,0.009570522
     2240,0.007507169,0.009276039
     2245,0.007153126,0.009055356
     2250,0.006969728,0.008921778
     2255,0.006750942,0.008829258
     2260,0.006683758,0.008575119
     2265,0.006760147,0.008532829
     2270,0.006586153,0.008526863
     2275,0.006708381,0.008433861
     2280,0.006529335,0.008342521
     2285,0.00642451,0.008397442
     2290,0.006863021,0.008618997
     2295,0.006855758,0.008662227
     2300,0.006786828,0.008711552
     2305,0.006929332,0.008715441
     2310,0.006950265,0.008713845
     2315,0.006885391,0.008718283
     2320,0.006809618,0.008671087
     2325,0.007080174,0.008741003
     2330,0.007041501,0.008670396
     2335,0.006937773,0.008681892
     2340,0.006842371,0.00864445
     2345,0.007110533,0.008602165
     2350,0.006981684,0.008549515
     2355,0.006870447,0.008522203
     2360,0.007093723,0.00856686
     2365,0.006991019,0.008483845
     2370,0.006858281,0.008487377
     2375,0.007004398,0.008393025
     2380,0.006777853,0.008164761
     2385,0.006505572,0.007962258
     2390,0.006511813,0.007893978
     2395,0.006374155,0.007896467
     2400,0.006662963,0.007973094
     2405,0.006567388,0.008032516
     2410,0.006718302,0.008016534
     2415,0.006707408,0.008036541
     2420,0.006590093,0.008069528
     2425,0.006832484,0.008044899
     2430,0.006708111,0.008031266
     2435,0.006611892,0.008018998
     2440,0.006838235,0.008003074
     2445,0.006704078,0.007986184
     2450,0.006720489,0.008019776
     2455,0.006792322,0.007947677
     2460,0.006655819,0.007975837
     2465,0.006824524,0.007934253
     2470,0.006750867,0.007881679
     2475,0.006636794,0.007900702
     2480,0.006825441,0.007883455
     2485,0.006679557,0.007879763
     2490,0.006826358,0.007877862
     2495,0.006758137,0.007808681
     2500,0.006748794,0.00787422
     2505,0.006825446,0.007796849
     2510,0.006683937,0.007843116
     2515,0.006848461,0.007754702
     2520,0.006694326,0.007813946
     2525,0.006884377,0.007754702
     2530,0.006710943,0.007776575
     2535,0.006896459,0.007763526
     2540,0.006738777,0.007731582
     2545,0.006737715,0.007694166
     2550,0.006769278,0.007683216
     2555,0.006593724,0.007674512
     2560,0.006824524,0.007680479
     2565,0.006655819,0.007632121
     2570,0.006802394,0.007624822
     2575,0.00670024,0.007622085
     2580,0.006649097,0.007594717
     2585,0.00672338,0.007587419
     2590,0.006545242,0.007602927
     2595,0.006748197,0.007557317
     2600,0.00658554,0.007505998
     2605,0.00677122,0.007597453
     2610,0.006589524,0.007501445
     2615,0.006753831,0.007505998
     2620,0.006610886,0.007479591
     2625,0.006735563,0.007493008
     2630,0.006612046,0.007471186
     2635,0.006672271,0.007464822
     2640,0.006638575,0.007433002
     2645,0.006616383,0.00740573
     2650,0.006638023,0.007382001
     2655,0.006551678,0.007362941
     2660,0.006662652,0.007389262
     2665,0.006506334,0.007340253
     2670,0.006646377,0.007321173
     2675,0.006493024,0.007338389
     2680,0.006673791,0.007323891
     2685,0.006502498,0.007306655
     2690,0.006682945,0.007293989
     2695,0.006525259,0.007284942
     2700,0.006685669,0.007274087
     2705,0.006504655,0.007274087
     2710,0.006666662,0.007239773
     2715,0.006492267,0.007250612
     2720,0.006675731,0.007263257
     2725,0.006494083,0.007228935
     2730,0.006674824,0.007228994
     2735,0.006500747,0.007202841
     2740,0.00669387,0.007210056
     2745,0.006525221,0.007160461
     2750,0.006675768,0.00717579
     2755,0.006520048,0.007168576
     2760,0.006678484,0.007168576
     2765,0.006530909,0.007160461
     2770,0.006665841,0.007155052
     2775,0.006518489,0.007144232
     2780,0.006665841,0.007136118
     2785,0.00650674,0.007107489
     2790,0.006645094,0.007124593
     2795,0.00647541,0.007098488
     2800,0.006618141,0.007079829
     2805,0.006423782,0.007053133
     2810,0.006581413,0.007050441
     2815,0.006414786,0.007037252
     2820,0.006592181,0.007041732
     2825,0.006425581,0.007030084
     2830,0.006597565,0.007034564
     2835,0.006422882,0.007020229
     2840,0.006592181,0.007020229
     2845,0.006393199,0.006994247
     2850,0.006545637,0.006973096
     2855,0.006386386,0.006975779
     2860,0.006564452,0.006975779
     2865,0.006395719,0.006981146
     2870,0.006551013,0.006963256
     2875,0.006397163,0.006944473
     2880,0.006542949,0.006963256
     2885,0.006417243,0.00694179
     2890,0.006514557,0.006926092
     2895,0.006457966,0.006978463
     2900,0.006682091,0.007014853
     2905,0.006552118,0.00700109
     2910,0.006646853,0.006994989
     2915,0.006498219,0.006966195
     2920,0.006601762,0.006965296
     2925,0.006512636,0.006952699
     2930,0.006603662,0.006952699
     2935,0.006500251,0.006948201
     2940,0.006593839,0.006921687
     2945,0.006481357,0.006929772
     2950,0.006628942,0.006913603
     2955,0.00650565,0.006934705
     2960,0.006644226,0.00692391
     2965,0.006534263,0.006930207
     2970,0.006581761,0.006930627
     2975,0.006547781,0.006917112
     2980,0.006563851,0.006911707
     2985,0.006491254,0.006883428
     2990,0.006572239,0.006867237
     2995,0.006467225,0.006865997
     3000,0.006593916,0.006865997
     3005,0.006437582,0.006874081
     3010,0.006567953,0.006876775
     3015,0.006378303,0.006848601
     3020,0.006519657,0.00683784
     3025,0.006446841,0.006814229
     3030,0.006451147,0.006794532
     3035,0.006496538,0.006797824
     3040,0.006402035,0.006795142
     3045,0.006494157,0.006798413
     3050,0.006330998,0.006779667
     3055,0.006438382,0.006760657
     3060,0.006432846,0.006731899
     3065,0.006380863,0.006732532
     3070,0.006461641,0.006743195
     3075,0.006338263,0.006737863
     3080,0.006430743,0.006751192
     3085,0.00632229,0.006725162
     3090,0.006428971,0.006738471
     3095,0.006386188,0.00669766
     3100,0.00638774,0.006708306
     3105,0.006428971,0.006700967
     3110,0.00638134,0.006698309
     3115,0.006457336,0.00671629
     3120,0.00638111,0.006703624
     3125,0.006449358,0.006718952
     3130,0.006365158,0.006700321
     3135,0.006455563,0.006702983
     3140,0.006343889,0.006692337
     3145,0.006432671,0.006708306
     3150,0.006322623,0.00667705
     3155,0.006394614,0.006687679
     3160,0.006297282,0.006659335
     3165,0.006393041,0.00667705
     3170,0.006306454,0.006649411
     3175,0.006369399,0.006669752
     3180,0.006373601,0.00665737
     3185,0.006370481,0.006661577
     3190,0.006395693,0.006660023
     3195,0.006381053,0.006652064
     3200,0.006412483,0.006649411
     3205,0.006389863,0.006634201
     3210,0.006420436,0.00665737
     3215,0.006389863,0.006649411
     3220,0.00642839,0.006644104
     3225,0.006375767,0.006633492
     3230,0.006393223,0.006618307
     3235,0.006373124,0.006623605
     3240,0.006375767,0.006614775
     3245,0.00635131,0.006614775
     3250,0.006381053,0.00659698
     3255,0.006341844,0.00659698
     3260,0.006366265,0.00659698
     3265,0.006321643,0.006587282
     3270,0.006358348,0.006577462
     3275,0.006316604,0.006577462
     3280,0.006374182,0.006577462
     3285,0.006313739,0.006577462
     3290,0.00638034,0.006577462
     3295,0.006304692,0.006580103
     3300,0.006366265,0.006595068
     3305,0.006280946,0.006574821
     3310,0.006381053,0.00660756
     3315,0.006289741,0.006584637
     3320,0.006360987,0.00659698")

使用一阶差分方程(递归计算函数),我计算y2_mod了每个的建模值x。我想拟合的参数是a(参数b是固定的)。我使用 nls.lm 函数进行拟合。这是代码的样子:

fitparms <- c(a = .008)

fn_residuals <- function(fitparms, data) {
  b <- .00111

  t <- data$x
  dT <- t[2] - t[1]
  N <- numeric(length(t))

  y1_exp <- data$y1_exp

  y2_mod <- numeric(length(t))
  y2_mod[1] <- data$y2_exp[1]

  for (h in seq_len(length(t)-1)) {
    y2_mod[h+1] <- (fitparms[1] * (y1_exp[h+1] - y2_mod[h]) + b * (y1_exp[h+1] - y2_mod[h])) *  dT + y2_mod[h]
  }

  data_2 <<- data %>% 
    add_column(y2_mod = y2_mod)

  # residuals
  ssqres <- data_2$y2_mod - data_2$y2_exp

  # # return predicted vs experimental residual
  return(ssqres)
}

# Ploting data
  data_2 %>% 
  gather(y1_exp, y2_exp, y2_mod, key = "y", value = "value") %>% 
  ggplot(aes(x = x)) +
  geom_line(aes(y = value, color = y))

# Fitting
fitval <- nls.lm(par = fitparms, 
                 fn = fn_residuals, 
                 lower = rep(0, 1), #defined lower limit for parameter
                 control = nls.lm.control(nprint = 1), 
                 data = data)

这是我的输出:

It.    0, RSS = 0.00020291, Par. =      0.008
It.    1, RSS = 8.10975e-05, Par. =  0.00326637
It.    2, RSS = 7.9177e-05, Par. =  0.00349277
It.    3, RSS = 7.90245e-05, Par. =  0.00356081
It.    4, RSS = 7.90137e-05, Par. =  0.00357927
It.    5, RSS = 7.90129e-05, Par. =  0.00358413
It.    6, RSS = 7.90129e-05, Par. =  0.00358539
It.    7, RSS = 7.90129e-05, Par. =  0.00358572
It.    8, RSS = 7.90129e-05, Par. =  0.0035858

# Summary of fit
summary(fitval)

Parameters:
   Estimate Std. Error t value Pr(>|t|)    
a 3.586e-03  6.921e-05   51.81   <2e-16 ***
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Residual standard error: 0.000345 on 664 degrees of freedom
Number of iterations to termination: 8 
Reason for termination: Relative error in the sum of squares is at most `ftol'. 


# Estimated parameter
parest <- as.list(coef(fitval))
parest

$a
[1] 0.003585804

我是这个领域的新手,所以我的问题是:

  • 如果这是解决此类问题的正确(正确)方法(使用 nls.lm 是否正确)?
  • 是否有另一种更好的方法来解决为这种曲线拟合找到最佳值的问题?

此外,我没有找到任何可用于为我的问题实现差分方程的包,即我使用y1_exp每个数据x来计算y2_mod. 例如,deSolve 包有一个用于离散时间模型、差分方程(Vignette 第 30 页:在 R 中求解初始值微分方程)的专用求解器(方法 =“迭代”),但是使用此求解器,您只能输入初始值(例如:) y1_exp[1]

  • 那么,有没有更好的方法来为我的类差分方程问题编写代码,或者是否已经存在一些处理此类问题的包?

谢谢!耶内伊

4

1 回答 1

0

我没有检查一切是否正确,但 nls.lm 确实是一种合适的方法。另一个更高级、更灵活的包是包 FME。

你是对的,包 deSolve 也可以处理差分方程。根据如何指定方程,可以使用 method="euler"(如果模型返回差异)或 method="iteration",如果模型返回新值。模型当然从第一个初始值(的向量)开始,但是如果您想通过外部输入强制您的模型,您可以使用强制函数,请参阅 ?forcings。

一种可能的方法是将强制与“迭代”结合起来。

托马斯

于 2019-11-03T15:06:20.407 回答