Search code examples
rneural-network

How to set startweights in R with neuralnet()?


I can't seem to get neuralnet() to use desired set of startweights in R. It's telling me what it used is different than what I gave it to use. What am I doing wrong?

Here's a look at my data:

> summary(traindata)
   Y1               X1               X2               X3               X4               X5               X6               X7        
Min.   :0.0000   Min.   :0.0000   Min.   :0.0000   Min.   :0.0000   Min.   :0.0000   Min.   :0.0000   Min.   :0.0000   Min.   :0.0000  
 1st Qu.:0.1127   1st Qu.:0.2966   1st Qu.:0.2364   1st Qu.:0.1000   1st Qu.:0.0000   1st Qu.:1.0000   1st Qu.:0.4871   1st Qu.:0.2769  
 Median :0.2081   Median :0.4602   Median :0.4000   Median :0.1800   Median :1.0000   Median :1.0000   Median :0.5554   Median :0.3684  
 Mean   :0.2266   Mean   :0.4494   Mean   :0.3895   Mean   :0.2058   Mean   :0.5427   Mean   :0.9528   Mean   :0.5572   Mean   :0.3532  
 3rd Qu.:0.3179   3rd Qu.:0.6128   3rd Qu.:0.5455   3rd Qu.:0.2904   3rd Qu.:1.0000   3rd Qu.:1.0000   3rd Qu.:0.6261   3rd Qu.:0.4259  
 Max.   :1.0000   Max.   :1.0000   Max.   :1.0000   Max.   :1.0000   Max.   :1.0000   Max.   :1.0000   Max.   :1.0000   Max.   :1.0000  
       X8                X9              X10         
 Min.   :0.00000   Min.   :0.0400   Min.   :0.00000  
 1st Qu.:0.07353   1st Qu.:0.3240   1st Qu.:0.08475  
 Median :0.12325   Median :0.3733   Median :0.15254  
 Mean   :0.14481   Mean   :0.3815   Mean   :0.18383  
 3rd Qu.:0.19377   3rd Qu.:0.4308   3rd Qu.:0.25424  
 Max.   :1.00000   Max.   :1.0000   Max.   :1.00000  
> str(traindata)
'data.frame':   99851 obs. of  11 variables:
 $ Y1 : num  0.3295 0.1705 0.0983 0.3526 0.078 ...
 $ X1 : num  0.766 0.234 0.234 0.362 0.447 ...
 $ X2 : num  0.655 0.2 0.2 0.309 0.382 ...
 $ X3 : num  0.68 0.08 0.08 0.2 0.28 0.24 0.16 0.24 0.32 0.68 ...
 $ X4 : num  1 0 0 0 1 0 1 1 1 1 ...
 $ X5 : num  1 1 1 1 1 1 1 1 1 1 ...
 $ X6 : num  0.691 0.691 0.691 0.691 0.691 ...
 $ X7 : num  0.516 0.516 0.516 0.516 0.516 ...
 $ X8 : num  0.2941 0 0.0588 0.0588 0.0588 ...
 $ X9 : num  0.587 0.587 0.587 0.587 0.587 ...
 $ X10: num  0.559 0.559 0.559 0.559 0.559 ...
 - attr(*, "na.action")= 'omit' Named int [1:10993] 1 2 3 4 5 6 7 8 9 10 ...
  ..- attr(*, "names")= chr [1:10993] "1" "2" "3" "4" ...

Here are my first set of weights and their derivation:

> nn1 <- neuralnet(as.formula(paste(vars[1], paste(vars[-1], collapse = " + "), sep = " ~ ")), traindata, hidden = c(10), threshold = 1, rep = 1, linear.output = TRUE, likelihood = TRUE, act.fct = "logistic", err.fct = "sse", learningrate.factor = list(minus = 0.5, plus = 1.2), lifesign = "full")
hidden: 10    thresh: 1    rep: 1/1    steps:    1000   min thresh: 2.7393808337981
                                                 2000   min thresh: 2.12567204477669
                                                 3000   min thresh: 1.13638473824821
                                                 3743   error: 480.83856    aic: 1203.67712 bic: 2354.56067 time: 4.11 mins
> print(nn1$weights)
[[1]]
[[1]][[1]]
            [,1]        [,2]        [,3]       [,4]        [,5]        [,6]        [,7]       [,8]       [,9]      [,10]
 [1,] -0.2014786  0.18227349  0.42386997  0.4607535 -0.39450924  0.26347907  1.35077384 -0.8789595 -1.3539250 -1.9073238
 [2,] -0.3138759  0.49426852  1.81863245  0.3458816  0.50622031 -0.31646315  0.23761338  1.2262221  0.7223506 -0.2711352
 [3,]  4.9401520 -0.12948452 -0.07537522  0.5795735 -1.29174636  1.84254635 -0.01407708 -0.4445504  0.1804914  1.8145001
 [4,]  5.8923391 -0.04022121  0.13038399 -1.5092823  0.40777519 -1.45995638  1.30011068  1.5655756  0.8829988  1.3954805
 [5,] -2.2528145 -1.38591431 -0.71961962  0.5355580 -3.17343121 -3.07299792  0.39655175  0.3208737 -1.4468927  0.1433068
 [6,] -4.6941619  0.77248242  1.30488285 -0.9680374 -0.23188218 -0.67685608  0.23555409 -0.4077941 -0.8229579 -0.9883259
 [7,]  0.4100560 -0.70637286 -1.88565770  0.8880590  0.91697183 -1.30062889 -0.78349558  0.5051081  0.3398119 -0.3897786
 [8,] -2.4235714  0.46652541  0.91892126  1.2754564  0.07831533 -0.25483017  0.49876237 -0.8897812 -1.0529990  0.1073808
 [9,]  0.6887370 -0.04701979  1.16011774  1.0705425 -0.51051715  3.21512739 -0.16291760  1.9464262  1.1197134  0.9003921
[10,]  0.8101476  1.02021087  0.21620719 -1.2889352 -0.41192141 -0.05808716 -1.42630951 -1.7183708  1.2418989 -0.4668008
[11,]  0.3647498  1.20825730  0.65285143 -0.1963329  0.18026556 -0.05677131 -2.40956470 -1.1985816  2.0340285 -0.7114733

[[1]][[2]]
            [,1]
 [1,] -0.7520664
 [2,] -0.1585614
 [3,]  1.3487849
 [4,] -0.6733687
 [5,]  0.3307345
 [6,] -0.6471139
 [7,] -0.3186048
 [8,]  0.5249488
 [9,]  0.3694480
[10,]  0.5088863
[11,]  0.8333604

Finally, I train a second, identical, model using the weights from nn1 as the startweights for this one:

> nn2 <- neuralnet(as.formula(paste(nn1$model.list$response, paste(nn1$model.list$variables, collapse = " + "), sep = " ~ ")), traindata, hidden = c(10), threshold = 1, rep = 1, startweights = nn1$weights, linear.output = TRUE, likelihood = TRUE, act.fct = "logistic", err.fct = "sse", learningrate.factor = list(minus = 0.5, plus = 1.2), lifesign = "full")
hidden: 10    thresh: 1    rep: 1/1    steps:    1000   min thresh: 4.57507785044985
                                                 2000   min thresh: 2.08275094219317
                                                 3000   min thresh: 1.81643516394141
                                                 4000   min thresh: 1.38511662675027
                                                 5000   min thresh: 1.2384995622333
                                                 6000   min thresh: 1.05033117773123
                                                 6140   error: 480.46492    aic: 1202.92984 bic: 2353.8134  time: 6.71 mins
> print(nn2$startweights)
[[1]]
[[1]][[1]]
            [,1]        [,2]       [,3]        [,4]       [,5]       [,6]       [,7]        [,8]        [,9]      [,10]
 [1,]  0.5893646 -0.81292669 -0.5629438  1.51903906 -1.4257453  0.1275882 -0.1057396 -0.05577868  0.22800704 -0.4595900
 [2,]  0.3707842  0.62274320 -0.0422689 -1.34965018 -0.5878436  0.7691974  1.5898764  0.11876721 -0.58689336 -1.1698612
 [3,]  0.6279824  0.22326940  1.7347023  1.51156918  0.2334499  0.3158411  0.4289562 -0.91420679 -0.05893476 -0.1374126
 [4,] -0.9799132 -0.73397255 -0.9325482  0.59750513  0.9198289  0.7040063 -2.0303020  0.06785993 -1.00338522  0.2185066
 [5,]  1.0546680  0.05642049  2.4998288  0.25655327  0.7573550 -0.3371682  0.7030798 -1.72373603 -1.55635173  0.5687848
 [6,] -2.7430174 -0.05536320  0.6252137 -0.42381818 -0.7434616 -0.7051810 -1.8649875 -0.87884095 -1.44110361 -0.9061813
 [7,]  0.6724057 -0.46366009  0.8204684  0.17219848  1.6992790  1.1190887 -0.7160276 -1.84172069 -0.45989825  1.0582524
 [8,] -1.0323476 -1.00524666  0.6220114 -0.04176771 -0.7908566 -1.9329564 -0.4563705  1.35359872  0.97917593 -0.2789364
 [9,]  0.1849783 -0.68836195 -1.1701419  1.91795922  1.1785882 -1.3456765 -0.8279595  0.81416043  0.45596681  0.1533143
[10,] -1.4070175  0.39805001  0.5166913  0.28639229  2.3440190  0.7993813 -1.1532419  0.26576989 -1.25017931 -0.3226559
[11,] -0.9816999 -0.65578411 -1.9905986 -0.45488073 -1.7489129 -0.5705796 -1.4147741 -0.35974793  0.78863263 -0.2901202

[[1]][[2]]
            [,1]
 [1,] -1.9173234
 [2,]  1.3536004
 [3,] -0.3369278
 [4,] -0.5524846
 [5,]  0.3954354
 [6,]  0.1652088
 [7,] -0.9997065
 [8,] -2.9073613
 [9,]  0.4071850
[10,]  1.8160924
[11,] -0.1539692

As you can see, the startweights from nn2 do not match the weights from nn1. Why is this the case? Am I missing something?

EDIT:

Hopefully this set of data will be more reproducible. As you can see, the same problem persists, even on a whole new database.

> Y1 <- rnorm(100)
> X1 <- rnorm(100)
> X2 <- rnorm(100)
> X3 <- rnorm(100)
> X4 <- rnorm(100)
> X5 <- rnorm(100)
> X6 <- rnorm(100)
> X7 <- rnorm(100)
> X8 <- rnorm(100)
> X9 <- rnorm(100)
> X10 <- rnorm(100)
> traindata <- data.frame(Y1, X1, X2, X3, X4, X5, X6, X7, X8, X9, X10)
> vars <- c("Y1", "X1", "X2", "X3", "X4", "X5", "X6", "X7", "X8", "X9", "X10")
> #
> summary(traindata)
       Y1                X1                 X2                 X3                X4                 X5                 X6          
 Min.   :-2.6282   Min.   :-2.31081   Min.   :-2.84703   Min.   :-2.9574   Min.   :-3.06652   Min.   :-2.92643   Min.   :-2.68786  
 1st Qu.:-0.8128   1st Qu.:-0.77061   1st Qu.:-0.77318   1st Qu.:-0.8722   1st Qu.:-0.72283   1st Qu.:-0.64498   1st Qu.:-0.84654  
 Median :-0.1805   Median :-0.01352   Median :-0.03076   Median :-0.1707   Median :-0.06016   Median :-0.02921   Median : 0.03849  
 Mean   :-0.1996   Mean   :-0.05143   Mean   :-0.13527   Mean   :-0.1927   Mean   :-0.07680   Mean   :-0.06514   Mean   :-0.01596  
 3rd Qu.: 0.5022   3rd Qu.: 0.62631   3rd Qu.: 0.57501   3rd Qu.: 0.4927   3rd Qu.: 0.48539   3rd Qu.: 0.57380   3rd Qu.: 0.68568  
 Max.   : 2.0202   Max.   : 2.06222   Max.   : 2.30610   Max.   : 1.8968   Max.   : 2.76293   Max.   : 2.37159   Max.   : 2.51357  
       X7                 X8                 X9                X10          
 Min.   :-2.76740   Min.   :-2.21274   Min.   :-2.25566   Min.   :-3.18281  
 1st Qu.:-0.56095   1st Qu.:-0.66163   1st Qu.:-0.53334   1st Qu.:-0.82798  
 Median :-0.01693   Median : 0.03079   Median : 0.04594   Median :-0.09057  
 Mean   :-0.02820   Mean   : 0.02596   Mean   : 0.13564   Mean   :-0.21482  
 3rd Qu.: 0.67220   3rd Qu.: 0.59838   3rd Qu.: 0.73141   3rd Qu.: 0.43741  
 Max.   : 1.82162   Max.   : 2.81257   Max.   : 2.56385   Max.   : 2.15635  
> str(traindata)
'data.frame':   100 obs. of  11 variables:
 $ Y1 : num  0.7019 -0.7081 0.0401 -1.4108 0.0186 ...
 $ X1 : num  1.2737 -0.4937 0.8092 1.6592 0.0524 ...
 $ X2 : num  0.5711 1.1013 1.5813 0.0494 -1.4142 ...
 $ X3 : num  -2.1977 0.0186 0.013 -0.7717 -0.024 ...
 $ X4 : num  0.478 -0.193 -0.918 0.674 0.654 ...
 $ X5 : num  0.481 0.392 0.411 -1.211 -1.801 ...
 $ X6 : num  1.4 -0.783 1.57 -1.658 -1.214 ...
 $ X7 : num  0.202 0.387 1.601 -0.279 0.366 ...
 $ X8 : num  -0.5429 2.8126 -0.0973 -0.7661 -2.1521 ...
 $ X9 : num  1.6332 -0.0177 -1.6318 0.9238 -1.8879 ...
 $ X10: num  -0.472 -0.089 0.488 -3.183 0.775 ...
> nn1 <- neuralnet(as.formula(paste(vars[1], paste(vars[-1], collapse = " + "), sep = " ~ ")), traindata, hidden = c(10), threshold = 1, rep = 1, linear.output = TRUE, likelihood = TRUE, act.fct = "logistic", err.fct = "sse", learningrate.factor = list(minus = 0.5, plus = 1.2), lifesign = "full")
hidden: 10    thresh: 1    rep: 1/1    steps:      73   error: 3.87651  aic: 249.75302  bic: 564.97861  time: 0.03 secs
> print(nn1$weights)
[[1]]
[[1]][[1]]
            [,1]       [,2]        [,3]       [,4]       [,5]       [,6]       [,7]       [,8]        [,9]      [,10]
 [1,]  2.0561478  2.1246898 -1.76213815 -0.9579672  1.7365645 -3.6917655 -2.8679099 -2.3354355 -0.78416048 -0.8883347
 [2,] -1.1735772  0.6258137 -1.59676588  0.5743075 -4.1081508  1.2250868 -2.2711916  2.6963711 -2.85434918  0.1391520
 [3,]  1.6164354 -0.1167470 -1.86684971 -3.5957692  0.6213768 -2.0814941  1.5481369 -0.4647491 -1.71029782  1.0826877
 [4,] -0.6293219 -0.6560375 -1.35948106 -1.5444962 -4.4668895  0.2072900 -0.5671789 -1.5835564  2.48531032 -1.6700781
 [5,]  2.0008226 -5.5525417 -0.01907481 -4.2084540  1.5691657 -0.6965300  0.3713269  0.6354120 -0.50617525 -5.9433049
 [6,] -1.2389025 -0.8341130 -3.46508109 -0.4022277 -0.8010767 -1.6026804  0.6800117 -0.4354258  2.87640948 -1.6238975
 [7,]  1.1897863 -1.7753309  3.74799195 -1.3946734  2.4358772  0.5671369  0.8298011 -2.4104047  3.81490744  0.2342864
 [8,] -2.8655656  1.7017802 -2.90380747  1.1301706 -1.4243256  0.3095138  0.7930108  2.8188700 -0.03151904 -0.1262049
 [9,]  1.1732202  0.8257122 -2.47857455  1.9630549 -3.1291087  3.4779904 -0.2160225  0.4892494 -0.73072162  0.4053342
[10,] -3.6661128  0.5989946 -0.88542239  2.9123770  1.1520419 -0.8132918 -3.4014543 -0.2657552 -1.42833485 -1.7101058
[11,]  1.5582244  0.2356327 -0.05400474  0.8694207 -1.4107504  2.5518053  2.0864136 -1.3973193 -0.71283726  4.1963821

[[1]][[2]]
            [,1]
 [1,]  0.9961668
 [2,] -0.7378463
 [3,] -0.8060499
 [4,]  1.0650543
 [5,]  1.4801685
 [6,] -1.1207186
 [7,] -1.0321750
 [8,]  2.2744946
 [9,] -0.8224604
[10,] -0.9745411
[11,] -0.8840396


> nn2 <- neuralnet(as.formula(paste(nn1$model.list$response, paste(nn1$model.list$variables, collapse = " + "), sep = " ~ ")), traindata, hidden = c(10), threshold = 1, rep = 1, startweights = nn1$weights, linear.output = TRUE, likelihood = TRUE, act.fct = "logistic", err.fct = "sse", learningrate.factor = list(minus = 0.5, plus = 1.2), lifesign = "full")
hidden: 10    thresh: 1    rep: 1/1    steps:      72   error: 4.43824  aic: 250.87649  bic: 566.10208  time: 0.03 secs
> print(nn2$startweights)
[[1]]
[[1]][[1]]
             [,1]       [,2]        [,3]       [,4]         [,5]        [,6]       [,7]        [,8]        [,9]         [,10]
 [1,] -1.69644421 -0.4333522 -1.30154641  0.4536848  0.291446600  0.67304726  1.1416902  2.38808056  0.83386911 -0.0009094806
 [2,]  1.61174965  1.9336793 -1.15244916 -1.0842433 -0.150961924 -1.33579414  1.7830017 -1.00851612 -0.04383156 -0.1297548803
 [3,] -0.06963176  0.9042027 -0.48823105  0.7636997  1.603469276  0.14116076 -0.2406283 -1.41095608 -0.88344106  0.7069823815
 [4,]  1.54570246  1.1687791  1.23740145  1.3748013 -1.299009433  0.18957076  1.0178625  0.79229782 -1.49661956 -0.5379553541
 [5,]  0.41870651  0.7305606  0.01583256 -0.6306494  0.562912456 -0.30008432  1.8717705  0.42124102 -0.15980324  0.2513703357
 [6,]  1.16461680  2.0595952 -0.76574309 -0.1111576  2.220310268  0.75662937  1.2029788 -0.39538609  0.49934000  2.0005942684
 [7,]  0.15716435 -0.2566108 -1.27468440 -0.5930131  0.818792043  0.66329768  0.7452777 -0.03777932  1.20430254 -0.5595158877
 [8,] -1.58467560  1.4673691 -0.83996793 -0.4187563  0.705109624 -1.24442801  0.1704734 -0.58777674 -0.40027652 -2.4756589392
 [9,] -0.89337191  0.6316128 -0.82507999  1.5183803 -0.287585827 -2.41368786  2.1250179  0.12195748 -1.53103716  0.4412285905
[10,] -0.01346646  0.7372053  1.49649858 -1.9613378 -0.003319936 -0.79324517  3.2320651 -0.16751147  0.90642655  0.6876876521
[11,]  0.28453268 -0.3871505 -0.41599417  1.5926093 -0.662442433 -0.02527895 -0.4075339  0.29115879 -0.43122825 -1.5403539978

[[1]][[2]]
            [,1]
 [1,]  0.3947711
 [2,]  0.4984659
 [3,] -0.8605949
 [4,]  0.2138002
 [5,] -0.7624599
 [6,] -0.2855559
 [7,] -1.7123571
 [8,] -0.9662195
 [9,] -0.5196383
[10,]  0.3078660
[11,] -1.0751191

Here is an exact sample of this randomly generated dataset:

> dput(head(traindata, 30))
structure(list(Y1 = c(0.701922182356232, -0.708123531581225, 
0.0400587622254783, -1.41080737945381, 0.0185917925803016, -0.756094808213198, 
0.229827153393527, 1.03359044931556, -0.325979966122, 1.94639172458108, 
-0.0858880338034102, 1.30192478542873, -1.29456974769679, 0.424863652008442, 
0.137431331087742, 0.967718619114868, 0.712884633842502, 1.09660017836771, 
1.42251489219274, -1.3932494645714, 0.132517063221073, -0.586773257455408, 
-1.04867834765241, -1.48298842777259, -1.18811202193506, -0.0764177818781156, 
0.749556896109999, -1.3529282263906, -0.199434984963032, -0.670278221235389
), X1 = c(1.27366502736629, -0.493684689890589, 0.809158629404917, 
1.65915843651466, 0.0524406431564457, 1.99834071163273, -2.31081184205956, 
-1.17120485025769, -0.993257451630739, 0.153530041415676, 0.131742575531519, 
1.0854692238377, -1.22117812015096, 1.20784952618741, -1.60309580657283, 
0.275992514430696, 0.465355658798675, -0.58324043244759, -0.110548854772808, 
0.714089052476505, 0.8291456053507, -0.19349333985983, 0.503401775891749, 
0.334556651206673, -0.119878011897093, 0.195156112996898, 0.0636604084639662, 
1.34475958603765, -0.355289744059907, -0.960344976220009), X2 = c(0.571072453624444, 
1.1013233917443, 1.58134119122358, 0.0494181005667735, -1.41424073791614, 
-0.317851997994474, -0.124667161289762, 0.845029910205487, -1.1392281731113, 
-2.84702736919688, -0.321182929319084, -0.612686484466993, 0.272155530547179, 
-2.06371852987761, -0.484717082770012, 1.10674403379459, -1.2154536411017, 
0.258139113555222, -0.761462644039399, -0.179388697566508, -0.457896931695907, 
-0.317076825483547, -1.52671258748914, -0.931004767208564, 1.04596630098729, 
0.563468202434267, -0.46893437559372, 1.28328427263904, 0.356878807333198, 
-0.714834159650127), X3 = c(-2.19767598984247, 0.0185884088546519, 
0.0129525883347238, -0.771733202114285, -0.0239773350187926, 
0.778822293534904, -0.443666145403694, -1.23534078334349, -0.621178446436027, 
1.07558407906203, 0.184032149735252, -0.396860023019889, 0.989731378110675, 
0.593591189965602, -0.631541708619503, -1.38424024942674, 0.199560535550159, 
0.58295391877909, 0.306646068780785, -0.214125096089182, 0.211904502773658, 
1.18389005375035, 0.479384174006876, -1.57515519003181, -0.950589131657951, 
0.570587858431722, 0.770643652985999, -0.0781891019601696, 0.165283974675214, 
-0.482431104312909), X4 = c(0.47762074365917, -0.19280124334945, 
-0.917797642044604, 0.673904599272376, 0.654197442468486, -0.0697760044808261, 
2.27076384456501, 0.679999811887382, 0.508491060290448, 1.16295246888786, 
-0.445411117332417, -0.325811531846809, 1.50163109930028, 1.83535005124084, 
0.47586332354473, 1.87962386913725, 0.870235116234204, 0.411485039619166, 
-1.35871567422559, 0.0076564730844747, 1.06777549450363, 1.50987449933043, 
0.213161153472195, 0.186115557477568, -0.157791280579259, -1.23709397773273, 
-0.0505373473885883, -1.00068684118648, 0.287353480148877, 0.830433773553478
), X5 = c(0.481174226063212, 0.391844037883607, 0.410876615099086, 
-1.21147781618028, -1.80075974845901, 0.800546029642124, -0.476361769636676, 
-0.944402072158152, -0.79949012101078, 0.615609119547131, -0.332089606424653, 
-1.28611162784317, -0.126324999116017, -0.850079074926488, -1.16303552579073, 
1.11694000035649, -0.642213432814981, -0.428477829678032, 0.270059019091907, 
0.578167082173589, -0.961260659353575, 1.01356399631725, -0.434149676049041, 
0.454191770775709, 0.945539987554406, -0.653287973061059, 0.975212345830591, 
-2.92643275651569, 0.131537200351334, -0.144696759170823), X6 = c(1.39986578192177, 
-0.783303464282605, 1.57043416479322, -1.65818976109745, -1.21385993362988, 
-1.26704173775851, 0.483189914381612, 0.683337644958036, 1.79018891817461, 
-0.0110654691673887, -0.32034328626619, 1.47261067198678, -0.0630799680643052, 
-1.35355116067763, -0.0892303482657638, 0.669675744222314, 1.13749863964793, 
0.468890086305808, -1.56688842791645, 1.29022138572065, -1.34920827872719, 
-0.825007844187256, 0.237314361839962, 1.53662062038759, 1.20652616894649, 
-1.1377851987682, 0.773646609261767, 0.94323579506706, 0.420605406590486, 
0.0657302585327347), X7 = c(0.201587687432359, 0.387221536601237, 
1.60114696197582, -0.278897031389238, 0.365790676008811, -0.993046118293481, 
1.62143648572453, 0.665967320401928, 0.994410345825827, -0.170991304653852, 
-0.227964877787297, -0.486532886674193, 0.042032489279269, 0.824612159718666, 
-0.642117241988523, 0.312476614136496, -1.03961891897275, -0.548667354933751, 
-0.0521273232669943, 1.51521890875494, 1.51535213926008, 0.253539571229083, 
0.64234449708054, 1.04420862461711, 1.53756512387198, -1.26057851363841, 
0.742294745891574, -1.42184455296825, 0.587000367973472, -0.349451731018588
), X8 = c(-0.542914358047845, 2.81257202395158, -0.0972695380162739, 
-0.7661342677977, -2.15214352701854, 0.946717716211487, 0.99993003028658, 
0.614546386898964, -0.859811405490715, -0.670453616234801, -0.17346947446889, 
-1.07727894279311, 0.592312115423768, -0.256019200500554, 0.319958459484677, 
0.321687427347929, 1.98242781790999, -0.846064232318548, -0.736994211412997, 
-0.115411197421152, -0.754459840834644, -0.456395622597785, 0.74266667766863, 
0.968610205824152, -0.72767394430833, -0.0482633104925355, 0.51270022238816, 
-0.621368764086511, -0.0138749896213129, -0.658688294139246), 
    X9 = c(1.63317823978059, -0.01774256677172, -1.63177241040162, 
    0.923776797289243, -1.88791456589611, 0.532970416239549, 
    -0.311699268395948, 2.43527710160233, 0.038332347572289, 
    0.264960926473119, -1.08530924957301, 1.24640634922912, -0.116226724474104, 
    -0.0224132482158086, -0.0169490420992843, 0.311908689778931, 
    0.729741399504258, -0.562042837815365, 0.344104286394718, 
    -0.630555188617343, 0.779501553691767, 2.56384810481512, 
    -0.406218268893801, 0.825111666168507, 0.86430780344585, 
    -1.05849420512847, 0.473063258062104, -0.523766744743483, 
    -1.28900820575187, 0.322806836954206), X10 = c(-0.471741246765894, 
    -0.0890225241186955, 0.487785473582504, -3.18280694963509, 
    0.775294242648606, 0.908032736784392, -0.870849535156648, 
    -1.8729895069865, -0.924248866769357, 0.064378071447313, 
    0.396570183961503, -0.62260644614774, -1.55366000366709, 
    1.3514088664078, -1.06920245196414, 0.521276074417584, -0.413882376032191, 
    0.17396816534063, 0.601746726940767, 0.797912945936916, -0.254266428410621, 
    -1.37234718769371, 0.133105253457835, 0.387347519836891, 
    0.133134442347685, -0.541749317358317, 0.129941549831559, 
    -0.235276238863235, -1.19889658129318, -0.45975918808316)), row.names = c(NA, 
30L), class = "data.frame")

Solution

  • First of all, let's check what the argument startweights mean:

    a vector containing starting values for the weights. Set to NULL for random initialization.

    So this says that it should contain a vector of the weights. What does the model output of "weights" according to document:

    a list containing the fitted weights of the neural network for every repetition.

    So the first problem is that the weights assigned to the second model should be unlist. Another thing to keep is mind that the "startweights" are randomly assigned if you don't give weights. To fix this you could use a set.seed. Here a reproducible example:

    library(neuralnet)
    # First model has no startweights which means random weights
    set.seed(1)
    nn1 <- neuralnet(Y1 ~., data = traindata, hidden = c(10),
              threshold = 1, rep = 1, linear.output = TRUE, 
              likelihood = TRUE, act.fct = "logistic", err.fct = "sse", 
              learningrate.factor = list(minus = 0.5, plus = 1.2), 
              lifesign = "full")
    #> hidden: 10    thresh: 1    rep: 1/1    steps:      17    error: 1.89992  aic: 245.79983  bic: 415.34472  time: 0.01 secs
    
    nn1$weights
    #> [[1]]
    #> [[1]][[1]]
    #>             [,1]       [,2]        [,3]        [,4]       [,5]        [,6]
    #>  [1,] -0.7227675  1.2573204 -0.03581002  0.04368123 -1.5562328  0.96199990
    #>  [2,] -0.4847567 -0.2692571 -1.98935170 -1.98953668  0.8925048 -1.64922148
    #>  [3,] -2.4356286 -0.9326999 -0.22842225 -0.39146624  0.2601260 -2.31253463
    #>  [4,]  1.6077579 -0.2250691 -1.45612874  0.34318717 -0.4814671  0.94907976
    #>  [5,]  0.6814913 -0.3509336 -0.15109551  0.27767013 -1.3807462 -0.94753172
    #>  [6,]  0.7795316 -0.2249663 -1.99415238  0.63162537  0.5441242  3.27001776
    #>  [7,]  1.0999062  1.4993562 -0.13079006 -0.50522425  0.9845059 -1.30764000
    #>  [8,]  0.9048786  1.6336983  2.01794156 -0.81200072  0.6563736  1.77973936
    #>  [9,]  1.9257814  0.2853293  1.41354955 -0.33146952  1.0095197 -0.03261211
    #> [10,]  0.6288116  0.3259574  0.63801227  0.69449410 -0.7977631  0.52512679
    #> [11,]  0.5333812  1.5946134 -0.96232839  0.21967967  2.0455008 -0.11171042
    #>             [,7]        [,8]        [,9]      [,10]
    #>  [1,] -3.0733586  0.07785035  0.06689381 -1.7418006
    #>  [2,]  1.3927334  0.88681844  1.33549879  0.4480333
    #>  [3,]  1.4216533 -0.56786495 -1.81092003  0.1195839
    #>  [4,]  1.9902517  0.04380839  0.53946781 -0.6588192
    #>  [5,]  0.5065405  0.13220711  0.09200262 -1.1103712
    #>  [6,] -0.3253608  0.25968700  0.17773653 -0.9760197
    #>  [7,] -0.9892736 -2.13604392  1.78431057  1.2382873
    #>  [8,] -0.4024976 -0.67445381  1.47688643  1.5841846
    #>  [9,]  0.3463666  0.38295037 -2.21339221  0.5597742
    #> [10,]  0.4914462  2.66309984 -0.57522941 -1.2158146
    #> [11,] -1.1532919 -0.11150264 -2.03708973  2.5682161
    #> 
    #> [[1]][[2]]
    #>             [,1]
    #>  [1,] -0.3389621
    #>  [2,] -0.3848997
    #>  [3,]  1.4422322
    #>  [4,] -0.8231826
    #>  [5,]  0.6600964
    #>  [6,] -0.4152942
    #>  [7,] -0.3007173
    #>  [8,] -0.4515996
    #>  [9,]  0.5973076
    #> [10,] -0.1580549
    #> [11,] -0.6659437
    
    # Second model 
    nn2 <- neuralnet(Y1~., data = traindata, hidden = c(10), 
                     threshold = 1, rep = 1, startweights = unlist(nn1$weights), 
                     linear.output = TRUE, likelihood = TRUE, act.fct = "logistic", 
                     err.fct = "sse", learningrate.factor = list(minus = 0.5, plus = 1.2), 
                     lifesign = "full")
    #> hidden: 10    thresh: 1    rep: 1/1    steps:       1    error: 1.89992  aic: 245.79983  bic: 415.34472  time: 0 secs
    
    nn2$startweights
    #> [[1]]
    #> [[1]][[1]]
    #>             [,1]       [,2]        [,3]        [,4]       [,5]        [,6]
    #>  [1,] -0.7227675  1.2573204 -0.03581002  0.04368123 -1.5562328  0.96199990
    #>  [2,] -0.4847567 -0.2692571 -1.98935170 -1.98953668  0.8925048 -1.64922148
    #>  [3,] -2.4356286 -0.9326999 -0.22842225 -0.39146624  0.2601260 -2.31253463
    #>  [4,]  1.6077579 -0.2250691 -1.45612874  0.34318717 -0.4814671  0.94907976
    #>  [5,]  0.6814913 -0.3509336 -0.15109551  0.27767013 -1.3807462 -0.94753172
    #>  [6,]  0.7795316 -0.2249663 -1.99415238  0.63162537  0.5441242  3.27001776
    #>  [7,]  1.0999062  1.4993562 -0.13079006 -0.50522425  0.9845059 -1.30764000
    #>  [8,]  0.9048786  1.6336983  2.01794156 -0.81200072  0.6563736  1.77973936
    #>  [9,]  1.9257814  0.2853293  1.41354955 -0.33146952  1.0095197 -0.03261211
    #> [10,]  0.6288116  0.3259574  0.63801227  0.69449410 -0.7977631  0.52512679
    #> [11,]  0.5333812  1.5946134 -0.96232839  0.21967967  2.0455008 -0.11171042
    #>             [,7]        [,8]        [,9]      [,10]
    #>  [1,] -3.0733586  0.07785035  0.06689381 -1.7418006
    #>  [2,]  1.3927334  0.88681844  1.33549879  0.4480333
    #>  [3,]  1.4216533 -0.56786495 -1.81092003  0.1195839
    #>  [4,]  1.9902517  0.04380839  0.53946781 -0.6588192
    #>  [5,]  0.5065405  0.13220711  0.09200262 -1.1103712
    #>  [6,] -0.3253608  0.25968700  0.17773653 -0.9760197
    #>  [7,] -0.9892736 -2.13604392  1.78431057  1.2382873
    #>  [8,] -0.4024976 -0.67445381  1.47688643  1.5841846
    #>  [9,]  0.3463666  0.38295037 -2.21339221  0.5597742
    #> [10,]  0.4914462  2.66309984 -0.57522941 -1.2158146
    #> [11,] -1.1532919 -0.11150264 -2.03708973  2.5682161
    #> 
    #> [[1]][[2]]
    #>             [,1]
    #>  [1,] -0.3389621
    #>  [2,] -0.3848997
    #>  [3,]  1.4422322
    #>  [4,] -0.8231826
    #>  [5,]  0.6600964
    #>  [6,] -0.4152942
    #>  [7,] -0.3007173
    #>  [8,] -0.4515996
    #>  [9,]  0.5973076
    #> [10,] -0.1580549
    #> [11,] -0.6659437
    

    Created on 2022-07-27 by the reprex package (v2.0.1)

    As you can see, the weights of nn1 are the same as the startweights of nn2.