Chapter 7, Moving Beyond Linearity - Question 12

In [182]:
p = 100
n = 1000
itr = 5
set.seed(1)
x = matrix(10*rnorm(p*n), ncol=p,nrow=n)
betas = rnorm(p)
beta_0 = rep(0.7,n)
err = rnorm(n)
betas_hat = rnorm(p)
In [183]:
y = beta_0 + x%*%betas + err
In [196]:
err = c()
for(j in 1:itr){
    for(i in 1:p){
        Y = y-x[,-i]%*%betas_hat[-i]
        model = lm(Y~x[,i])
        betas_hat[i] = model$coef[2]
    }
    intercept = model$coef[1]
    pred = intercept + x%*%betas_hat
    err[j]=mean((pred-y)^2)
}
In [199]:
intercept
(Intercept): 0.728456642750864
In [202]:
plot(err,xlab="Iterations", type="l")
Warning message in plot.window(...):
“relative range of values =  20 * EPS, is small (axis 2)”

Four iterations is enough for getting a good result

In [203]:
betas_hat
  1. 0.78904972821621
  2. 0.398025667870602
  3. -0.478601726943252
  4. -0.456971169593605
  5. -0.171866895741717
  6. 0.585481499813396
  7. -0.986952509457391
  8. -0.0113490947751034
  9. -0.425024345037258
  10. 0.767906845158076
  11. 0.576692579667505
  12. -0.162829405081136
  13. 0.646346450010619
  14. 1.20867981152633
  15. 0.49635195918127
  16. -1.4856507202645
  17. -0.274325930072918
  18. 0.491522576918006
  19. 0.303017050506738
  20. 1.3296241523634
  21. -0.796823734415401
  22. -0.33734862550676
  23. -1.17174702293784
  24. 0.117211235174471
  25. -0.855784522456645
  26. -0.131142518854906
  27. -1.77660088560071
  28. 0.149354836922176
  29. -0.700232192843031
  30. 0.092058137786866
  31. 1.17582231424104
  32. -0.294952571041857
  33. -0.490815549452812
  34. 0.597107131878528
  35. -1.18314266470227
  36. -1.68597587821795
  37. 0.203188517745116
  38. -0.97976621435147
  39. 0.419588657190686
  40. -0.0604901080323164
  41. 0.101253714168027
  42. -1.88212854192151
  43. -0.136543689329343
  44. -0.324182303643097
  45. 1.37196687567549
  46. 0.208923145826943
  47. -0.035094579426044
  48. 1.11950548007022
  49. 2.71103531118445
  50. 0.118267668659416
  51. -0.531223107395963
  52. 0.311156175263165
  53. 1.27528453017404
  54. 0.139122133033436
  55. 0.265045524037119
  56. 1.67853843122377
  57. 1.07843323973864
  58. 0.577281309743731
  59. -0.344498320322516
  60. -0.522876336177289
  61. 0.540598494008423
  62. 0.438995659189366
  63. 0.326192617946531
  64. 0.287581338989821
  65. -1.42634014342476
  66. 2.81844908058733
  67. 1.01786501478883
  68. -1.65768714045028
  69. 0.198761659257675
  70. -0.757589657294584
  71. -1.40393149125298
  72. -0.680715301891702
  73. 0.587783397847698
  74. 0.823523517400078
  75. -0.752332438002472
  76. 0.446796060020726
  77. 0.137376631130461
  78. -0.0949529302345419
  79. 0.309247232322589
  80. -1.09136136209182
  81. 0.206745025723267
  82. -1.11535876385148
  83. 0.0327100295374082
  84. -1.32529093965771
  85. -1.865182509019
  86. 1.11175221817808
  87. -0.715279254817081
  88. 1.00639535395508
  89. 1.24979597283924
  90. 1.26808157050232
  91. -0.402965681446655
  92. -0.414255093754902
  93. -0.0976974909661235
  94. -0.0802301724808517
  95. -0.784233550453272
  96. -0.127481901732062
  97. -0.377162696037618
  98. -1.53756898905165
  99. -0.409399821993766
  100. 1.31803226108823
In [204]:
betas
  1. 0.791441548555919
  2. 0.392167936565892
  3. -0.472666950919845
  4. -0.457951668291923
  5. -0.168131940956006
  6. 0.585673742669619
  7. -0.984240596349714
  8. -0.0118171164760513
  9. -0.430931463506555
  10. 0.772530816021421
  11. 0.582699457541187
  12. -0.163434307215286
  13. 0.644005248415
  14. 1.214677161866
  15. 0.494540683601913
  16. -1.48111532288417
  17. -0.271891311460555
  18. 0.496633610440363
  19. 0.306742731013944
  20. 1.33698979486453
  21. -0.791516761183682
  22. -0.332609714201949
  23. -1.16660444055229
  24. 0.111365298935233
  25. -0.853005958189069
  26. -0.134938715070741
  27. -1.77873013892415
  28. 0.152452288990984
  29. -0.701344878572895
  30. 0.0936090345496135
  31. 1.17277990607221
  32. -0.295498176152151
  33. -0.497457892634564
  34. 0.596221159447781
  35. -1.18102387140881
  36. -1.68766244310306
  37. 0.206248671785439
  38. -0.982847611505839
  39. 0.414036699599061
  40. -0.0560230519420651
  41. 0.100736335595924
  42. -1.88318471405727
  43. -0.142570133850648
  44. -0.323754868692718
  45. 1.3702803314843
  46. 0.209558322664202
  47. -0.0328393805305169
  48. 1.1197473006727
  49. 2.71241485218744
  50. 0.119571503636972
  51. -0.534331297319944
  52. 0.306963244537574
  53. 1.27745896110207
  54. 0.137213251395063
  55. 0.265709909005729
  56. 1.67249920253258
  57. 1.07992488753015
  58. 0.576918556731345
  59. -0.341493957863896
  60. -0.521205323250182
  61. 0.534011977424934
  62. 0.4391999572627
  63. 0.326687709389682
  64. 0.282196340740335
  65. -1.43049010050394
  66. 2.82000443854223
  67. 1.01622234315481
  68. -1.6614158078359
  69. 0.19974114722406
  70. -0.760662303275383
  71. -1.40114560560663
  72. -0.685969432827368
  73. 0.583813136228156
  74. 0.822383015489095
  75. -0.751934152141699
  76. 0.448208391433813
  77. 0.135123085883382
  78. -0.0994906211506443
  79. 0.307107468091999
  80. -1.09147137963389
  81. 0.204915213420387
  82. -1.1085730843388
  83. 0.0363676511085154
  84. -1.31905148307393
  85. -1.8690517095895
  86. 1.11547641984262
  87. -0.713880686223729
  88. 1.00765947842367
  89. 1.24942420087063
  90. 1.26443516068977
  91. -0.400507009996167
  92. -0.413222151097689
  93. -0.097744436722045
  94. -0.0803918272847573
  95. -0.779481836285578
  96. -0.124705552857127
  97. -0.376065669248682
  98. -1.54362322739795
  99. -0.406955390662438
  100. 1.31743271263665
In [205]:
intercept
(Intercept): 0.728456642750864
In [ ]: