setwd("D:/Users/hpeng/Desktop/Math4826")

library(R.matlab)
## Warning: package 'R.matlab' was built under R version 4.0.3
## R.matlab v3.6.2 (2018-09-26) successfully loaded. See ?R.matlab for help.
## 
## Attaching package: 'R.matlab'
## The following objects are masked from 'package:base':
## 
##     getOption, isOpen
library(tidyverse)
## Warning: package 'tidyverse' was built under R version 4.0.3
## -- Attaching packages --------------------------------------- tidyverse 1.3.0 --
## v ggplot2 3.3.3     v purrr   0.3.4
## v tibble  3.0.4     v dplyr   1.0.4
## v tidyr   1.1.2     v stringr 1.4.0
## v readr   1.4.0     v forcats 0.5.0
## Warning: package 'ggplot2' was built under R version 4.0.3
## Warning: package 'tibble' was built under R version 4.0.3
## Warning: package 'tidyr' was built under R version 4.0.3
## Warning: package 'readr' was built under R version 4.0.3
## Warning: package 'purrr' was built under R version 4.0.3
## Warning: package 'forcats' was built under R version 4.0.3
## -- Conflicts ------------------------------------------ tidyverse_conflicts() --
## x dplyr::filter() masks stats::filter()
## x dplyr::lag()    masks stats::lag()
library(fpp2)
## Warning: package 'fpp2' was built under R version 4.0.3
## Registered S3 method overwritten by 'quantmod':
##   method            from
##   as.zoo.data.frame zoo
## -- Attaching packages ---------------------------------------------- fpp2 2.4 --
## v forecast  8.13     v expsmooth 2.3 
## v fma       2.4
## Warning: package 'forecast' was built under R version 4.0.3
## Warning: package 'fma' was built under R version 4.0.3
## Warning: package 'expsmooth' was built under R version 4.0.3
## 

Example: Quarterly Iowa Nonfarm Income, in Lecture Note 3

iowa <- readMat("Iowa_income.mat")

iowa.ts <- 100*diff(log(ts(iowa$Iowa.Income)))

plot(iowa.ts)

iowa_simple_exponential <-ses(iowa.ts, initial="optimal", alpha=0.11)

summary(iowa_simple_exponential)
## 
## Forecast method: Simple exponential smoothing
## 
## Model Information:
## Simple exponential smoothing 
## 
## Call:
##  ses(y = iowa.ts, initial = "optimal", alpha = 0.11) 
## 
##   Smoothing parameters:
##     alpha = 0.11 
## 
##   Initial states:
##     l = 1.4606 
## 
##   sigma:  0.9529
## 
##      AIC     AICc      BIC 
## 604.9520 605.0488 610.6404 
## 
## Error measures:
##                      ME      RMSE       MAE  MPE MAPE      MASE        ACF1
## Training set 0.08288983 0.9454061 0.7267637 -Inf  Inf 0.7640854 -0.02818975
## 
## Forecasts:
##     Point Forecast    Lo 80    Hi 80     Lo 95    Hi 95
## 129        2.61855 1.397309 3.839791 0.7508230 4.486277
## 130        2.61855 1.389943 3.847157 0.7395573 4.497542
## 131        2.61855 1.382620 3.854479 0.7283586 4.508741
## 132        2.61855 1.375341 3.861759 0.7172260 4.519874
## 133        2.61855 1.368104 3.868996 0.7061581 4.530941
## 134        2.61855 1.360909 3.876191 0.6951539 4.541946
## 135        2.61855 1.353755 3.883345 0.6842124 4.552887
## 136        2.61855 1.346640 3.890459 0.6733323 4.563767
## 137        2.61855 1.339566 3.897534 0.6625128 4.574587
## 138        2.61855 1.332530 3.904569 0.6517528 4.585347
iowa_simple_exponential$fitted
## Time Series:
## Start = 2 
## End = 128 
## Frequency = 1 
##               y
##   [1,] 1.460579
##   [2,] 1.354687
##   [3,] 1.493270
##   [4,] 1.434950
##   [5,] 1.537575
##   [6,] 1.385589
##   [7,] 1.284457
##   [8,] 1.312400
##   [9,] 1.612375
##  [10,] 1.370308
##  [11,] 1.444399
##  [12,] 1.521407
##  [13,] 1.796246
##  [14,] 1.849845
##  [15,] 1.791483
##  [16,] 1.766075
##  [17,] 1.571807
##  [18,] 1.511874
##  [19,] 1.499027
##  [20,] 1.594258
##  [21,] 1.553365
##  [22,] 1.422520
##  [23,] 1.305922
##  [24,] 1.188776
##  [25,] 1.044766
##  [26,] 1.035358
##  [27,] 1.129513
##  [28,] 1.234707
##  [29,] 1.224318
##  [30,] 1.348478
##  [31,] 1.393345
##  [32,] 1.453446
##  [33,] 1.175540
##  [34,] 1.453900
##  [35,] 1.441625
##  [36,] 1.506404
##  [37,] 1.406832
##  [38,] 1.383164
##  [39,] 1.317546
##  [40,] 1.247775
##  [41,] 1.099814
##  [42,] 1.201492
##  [43,] 1.297854
##  [44,] 1.409143
##  [45,] 1.433489
##  [46,] 1.568376
##  [47,] 1.501210
##  [48,] 1.515711
##  [49,] 1.395772
##  [50,] 1.288827
##  [51,] 1.349742
##  [52,] 1.228622
##  [53,] 1.102576
##  [54,] 1.107939
##  [55,] 1.120159
##  [56,] 1.058963
##  [57,] 1.056741
##  [58,] 1.114005
##  [59,] 1.128325
##  [60,] 1.172924
##  [61,] 1.118988
##  [62,] 1.103459
##  [63,] 1.137414
##  [64,] 1.189462
##  [65,] 1.365838
##  [66,] 1.323818
##  [67,] 1.368844
##  [68,] 1.405670
##  [69,] 1.500890
##  [70,] 1.558748
##  [71,] 1.696177
##  [72,] 1.816784
##  [73,] 1.850068
##  [74,] 1.945633
##  [75,] 2.016522
##  [76,] 2.114996
##  [77,] 1.864119
##  [78,] 1.773997
##  [79,] 1.834614
##  [80,] 1.755582
##  [81,] 1.872024
##  [82,] 1.873285
##  [83,] 1.832392
##  [84,] 1.804317
##  [85,] 1.621967
##  [86,] 1.703635
##  [87,] 1.739514
##  [88,] 1.706370
##  [89,] 1.704561
##  [90,] 1.831367
##  [91,] 1.817134
##  [92,] 1.796653
##  [93,] 1.659730
##  [94,] 1.843555
##  [95,] 1.806181
##  [96,] 1.822806
##  [97,] 1.807829
##  [98,] 1.875263
##  [99,] 1.875835
## [100,] 2.095560
## [101,] 2.252412
## [102,] 2.282762
## [103,] 2.471790
## [104,] 2.460635
## [105,] 2.458272
## [106,] 2.591546
## [107,] 2.727442
## [108,] 2.735201
## [109,] 2.544147
## [110,] 2.548931
## [111,] 2.566180
## [112,] 2.604490
## [113,] 2.577742
## [114,] 2.566608
## [115,] 2.484911
## [116,] 2.581959
## [117,] 2.582571
## [118,] 2.626291
## [119,] 2.637106
## [120,] 2.581958
## [121,] 2.550258
## [122,] 2.551176
## [123,] 2.620935
## [124,] 2.698678
## [125,] 2.571368
## [126,] 2.606350
## [127,] 2.655015
checkresiduals(iowa_simple_exponential)

## 
##  Ljung-Box test
## 
## data:  Residuals from Simple exponential smoothing
## Q* = 10.313, df = 8, p-value = 0.2438
## 
## Model df: 2.   Total lags used: 10
# automatically selecting alpha and initial values

iowa_simple_exponential <-ses(iowa.ts, initial="optimal")

summary(iowa_simple_exponential)
## 
## Forecast method: Simple exponential smoothing
## 
## Model Information:
## Simple exponential smoothing 
## 
## Call:
##  ses(y = iowa.ts, initial = "optimal") 
## 
##   Smoothing parameters:
##     alpha = 0.1027 
## 
##   Initial states:
##     l = 1.4597 
## 
##   sigma:  0.9528
## 
##      AIC     AICc      BIC 
## 606.9234 607.1185 615.4559 
## 
## Error measures:
##                      ME      RMSE      MAE  MPE MAPE      MASE        ACF1
## Training set 0.08855563 0.9452994 0.727797 -Inf  Inf 0.7651718 -0.02206369
## 
## Forecasts:
##     Point Forecast    Lo 80    Hi 80     Lo 95    Hi 95
## 129       2.614261 1.393158 3.835364 0.7467450 4.481777
## 130       2.614261 1.386740 3.841782 0.7369299 4.491592
## 131       2.614261 1.380356 3.848166 0.7271658 4.501356
## 132       2.614261 1.374004 3.854518 0.7174519 4.511070
## 133       2.614261 1.367685 3.860837 0.7077875 4.520734
## 134       2.614261 1.361398 3.867124 0.6981719 4.530350
## 135       2.614261 1.355142 3.873380 0.6886043 4.539918
## 136       2.614261 1.348917 3.879605 0.6790840 4.549438
## 137       2.614261 1.342722 3.885800 0.6696103 4.558912
## 138       2.614261 1.336558 3.891964 0.6601826 4.568339
iowa_simple_exponential$fitted
## Time Series:
## Start = 2 
## End = 128 
## Frequency = 1 
##               y
##   [1,] 1.459688
##   [2,] 1.360953
##   [3,] 1.489646
##   [4,] 1.435589
##   [5,] 1.531301
##   [6,] 1.390101
##   [7,] 1.295253
##   [8,] 1.320224
##   [9,] 1.599379
##  [10,] 1.374798
##  [11,] 1.443485
##  [12,] 1.515448
##  [13,] 1.772560
##  [14,] 1.825014
##  [15,] 1.773096
##  [16,] 1.751271
##  [17,] 1.571485
##  [18,] 1.515585
##  [19,] 1.503213
##  [20,] 1.591660
##  [21,] 1.553763
##  [22,] 1.431607
##  [23,] 1.321857
##  [24,] 1.210892
##  [25,] 1.074221
##  [26,] 1.062417
##  [27,] 1.147511
##  [28,] 1.243838
##  [29,] 1.233205
##  [30,] 1.348168
##  [31,] 1.390073
##  [32,] 1.446499
##  [33,] 1.187850
##  [34,] 1.446373
##  [35,] 1.435689
##  [36,] 1.496755
##  [37,] 1.404818
##  [38,] 1.382936
##  [39,] 1.321720
##  [40,] 1.256176
##  [41,] 1.117225
##  [42,] 1.210332
##  [43,] 1.299356
##  [44,] 1.403065
##  [45,] 1.426410
##  [46,] 1.553023
##  [47,] 1.491915
##  [48,] 1.506403
##  [49,] 1.395422
##  [50,] 1.295650
##  [51,] 1.351800
##  [52,] 1.238550
##  [53,] 1.119896
##  [54,] 1.123123
##  [55,] 1.132969
##  [56,] 1.074541
##  [57,] 1.070868
##  [58,] 1.122861
##  [59,] 1.135316
##  [60,] 1.176221
##  [61,] 1.125546
##  [62,] 1.110380
##  [63,] 1.141359
##  [64,] 1.189529
##  [65,] 1.354128
##  [66,] 1.316115
##  [67,] 1.358928
##  [68,] 1.394314
##  [69,] 1.484346
##  [70,] 1.540042
##  [71,] 1.670221
##  [72,] 1.785445
##  [73,] 1.819725
##  [74,] 1.912029
##  [75,] 1.981637
##  [76,] 2.077121
##  [77,] 1.846873
##  [78,] 1.764535
##  [79,] 1.822078
##  [80,] 1.749607
##  [81,] 1.858893
##  [82,] 1.861418
##  [83,] 1.824472
##  [84,] 1.799083
##  [85,] 1.629438
##  [86,] 1.704890
##  [87,] 1.738246
##  [88,] 1.707444
##  [89,] 1.705645
##  [90,] 1.823878
##  [91,] 1.811364
##  [92,] 1.792842
##  [93,] 1.665447
##  [94,] 1.836419
##  [95,] 1.802271
##  [96,] 1.818188
##  [97,] 1.804684
##  [98,] 1.867942
##  [99,] 1.869227
## [100,] 2.074969
## [101,] 2.223469
## [102,] 2.254765
## [103,] 2.434054
## [104,] 2.427518
## [105,] 2.428711
## [106,] 2.556128
## [107,] 2.686591
## [108,] 2.698026
## [109,] 2.523537
## [110,] 2.530118
## [111,] 2.548147
## [112,] 2.585752
## [113,] 2.562712
## [114,] 2.553865
## [115,] 2.478927
## [116,] 2.570113
## [117,] 2.571901
## [118,] 2.613799
## [119,] 2.625175
## [120,] 2.574932
## [121,] 2.546068
## [122,] 2.547355
## [123,] 2.612852
## [124,] 2.686237
## [125,] 2.568699
## [126,] 2.601621
## [127,] 2.647524
checkresiduals(iowa_simple_exponential)

## 
##  Ljung-Box test
## 
## data:  Residuals from Simple exponential smoothing
## Q* = 10.194, df = 8, p-value = 0.2517
## 
## Model df: 2.   Total lags used: 10

Example: Weekly Thermostat Sales, in Lecture Note 3

therm<-readMat("Thermostat_Sales.mat")

therm <-ts(t(therm$Thermostat.Sales))

plot(therm)

therm_Double_Exponential <-holt(therm, inital="optimal")

summary(therm_Double_Exponential)
## 
## Forecast method: Holt's method
## 
## Model Information:
## Holt's method 
## 
## Call:
##  holt(y = therm, inital = "optimal") 
## 
##   Smoothing parameters:
##     alpha = 0.2467 
##     beta  = 0.0226 
## 
##   Initial states:
##     l = 199.0721 
##     b = -0.1157 
## 
##   sigma:  28.4509
## 
##      AIC     AICc      BIC 
## 559.5132 560.8175 569.2694 
## 
## Error measures:
##                    ME     RMSE      MAE       MPE     MAPE      MASE       ACF1
## Training set 3.898555 27.33474 21.73306 0.3575591 9.938754 0.8049281 0.08858778
## 
## Forecasts:
##    Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## 53       320.2519 283.7906 356.7132 264.4892 376.0146
## 54       324.7216 286.9611 362.4820 266.9719 382.4712
## 55       329.1913 289.9592 368.4234 269.1910 389.1916
## 56       333.6609 292.7868 374.5351 271.1493 396.1726
## 57       338.1306 295.4477 380.8136 272.8526 403.4086
## 58       342.6003 297.9468 387.2538 274.3087 410.8919
## 59       347.0700 300.2902 393.8498 275.5265 418.6135
## 60       351.5397 302.4842 400.5952 276.5157 426.5636
## 61       356.0094 304.5353 407.4834 277.2866 434.7321
## 62       360.4791 306.4503 414.5079 277.8491 443.1090
therm_Double_Exponential$fitted
## Time Series:
## Start = 1 
## End = 52 
## Frequency = 1 
##              y
##  [1,] 198.9564
##  [2,] 200.7376
##  [3,] 212.7019
##  [4,] 206.2860
##  [5,] 196.6623
##  [6,] 186.9019
##  [7,] 183.0259
##  [8,] 188.0493
##  [9,] 194.6860
## [10,] 193.9732
## [11,] 203.3790
## [12,] 206.2188
## [13,] 203.1025
## [14,] 192.4242
## [15,] 190.9637
## [16,] 204.6316
## [17,] 206.3920
## [18,] 207.2384
## [19,] 208.9479
## [20,] 210.0128
## [21,] 200.8499
## [22,] 198.9732
## [23,] 208.2198
## [24,] 194.7615
## [25,] 197.2125
## [26,] 194.4099
## [27,] 185.1515
## [28,] 180.3466
## [29,] 186.7724
## [30,] 190.7917
## [31,] 204.6443
## [32,] 208.9674
## [33,] 202.7328
## [34,] 204.0309
## [35,] 206.4000
## [36,] 224.9864
## [37,] 233.3406
## [38,] 243.7358
## [39,] 250.9024
## [40,] 249.7286
## [41,] 254.3910
## [42,] 270.8442
## [43,] 278.3101
## [44,] 286.4415
## [45,] 289.7075
## [46,] 285.2152
## [47,] 296.4989
## [48,] 301.0404
## [49,] 307.3100
## [50,] 305.0236
## [51,] 310.0295
## [52,] 306.2136
checkresiduals(therm_Double_Exponential)

## 
##  Ljung-Box test
## 
## data:  Residuals from Holt's method
## Q* = 12.313, df = 6, p-value = 0.05534
## 
## Model df: 4.   Total lags used: 10

Example: Car Sales, in Lecture Note 3

car <- readMat("Car_Sales.mat")

car.ts<-ts(t(car$Car.Sales))

plot(car.ts)

car.ts<-ts(t(car$Car.Sales), frequency=12)

car_seasonal_indication<-hw(car.ts, h=12, seasonal="additive")

summary(car_seasonal_indication)
## 
## Forecast method: Holt-Winters' additive method
## 
## Model Information:
## Holt-Winters' additive method 
## 
## Call:
##  hw(y = car.ts, h = 12, seasonal = "additive") 
## 
##   Smoothing parameters:
##     alpha = 0.1724 
##     beta  = 1e-04 
##     gamma = 1e-04 
## 
##   Initial states:
##     l = 9.8447 
##     b = 0.0879 
##     s = -2.5166 -0.133 -0.5023 -4.6066 -3.3596 -0.9236
##            3.7924 6.5123 5.0313 2.9607 -2.7787 -3.4762
## 
##   sigma:  1.4534
## 
##      AIC     AICc      BIC 
## 603.1170 609.9170 648.7133 
## 
## Error measures:
##                       ME     RMSE      MAE        MPE     MAPE     MASE
## Training set -0.03300989 1.341429 1.097267 -0.7587655 8.322427 0.687933
##                   ACF1
## Training set 0.1055962
## 
## Forecasts:
##        Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## Jan 10       15.33793 13.47532 17.20054 12.48932 18.18655
## Feb 10       16.12280 14.23268 18.01291 13.23212 19.01347
## Mar 10       21.94936 20.03210 23.86662 19.01717 24.88155
## Apr 10       24.10752 22.16346 26.05158 21.13434 27.08071
## May 10       25.67613 23.70559 27.64666 22.66245 28.68980
## Jun 10       23.04376 21.04706 25.04046 19.99008 26.09744
## Jul 10       18.41532 16.39277 20.43788 15.32209 21.50855
## Aug 10       16.06710 14.01898 18.11522 12.93477 19.19943
## Sep 10       14.90731 12.83390 16.98071 11.73631 18.07831
## Oct 10       19.09938 17.00096 21.19781 15.89012 22.30864
## Nov 10       19.55579 17.43261 21.67896 16.30867 22.80291
## Dec 10       17.25979 15.11211 19.40747 13.97519 20.54438
car_seasonal_indication$fitted
##         Jan       Feb       Mar       Apr       May       Jun       Jul
## 1  6.456474  7.258028 13.338884 15.271140 16.689018 13.694392  9.082699
## 2  7.293317  8.069029 14.120946 15.885574 17.091647 14.258318  9.554229
## 3  8.188404  9.403104 15.496028 17.603777 19.070483 16.754484 12.032279
## 4  9.341072 10.388831 16.314588 18.143833 20.098384 17.471656 12.903455
## 5 11.002945 12.006715 17.913117 20.250372 21.993856 19.366285 14.603720
## 6 11.788467 12.641638 18.523673 20.935717 22.882468 20.364625 15.889806
## 7 14.258887 14.771967 20.252076 22.411222 23.932841 20.739865 15.973031
## 8 14.106488 14.567518 19.883540 22.232172 23.362114 20.853837 16.442892
## 9 13.804235 14.486229 20.271799 22.406682 23.856930 21.611046 16.891255
##         Aug       Sep       Oct       Nov       Dec
## 1  6.805959  5.895884 10.286887 10.616174  8.104570
## 2  7.479598  6.405434 10.804723 11.125331  9.111276
## 3  9.703585  8.422996 12.122543 12.467953 10.135552
## 4 10.445842  8.975543 12.992598 13.611816 11.716982
## 5 12.354562 10.842733 15.026633 15.437288 12.851622
## 6 13.421104 12.494976 16.412052 16.993976 14.820309
## 7 13.523844 12.638677 16.962819 17.277773 15.031453
## 8 13.711043 12.503300 16.883766 17.393139 14.877315
## 9 14.737984 13.920366 18.192328 19.192660 16.549992
plot(car_seasonal_indication$fitted)

checkresiduals(car_seasonal_indication)

## 
##  Ljung-Box test
## 
## data:  Residuals from Holt-Winters' additive method
## Q* = 30.28, df = 6, p-value = 3.478e-05
## 
## Model df: 16.   Total lags used: 22

Example: New Plant and Equipment Expenditures, in Lecture Note 3

New_Plant <- readMat("New_Plant.mat")

plant_training<-ts(t(log(New_Plant$New.Plant))[1:44], frequency=4)

plant_seasonal<-hw(plant_training, seasonal="additive", h=8)

summary(plant_seasonal)
## 
## Forecast method: Holt-Winters' additive method
## 
## Model Information:
## Holt-Winters' additive method 
## 
## Call:
##  hw(y = plant_training, h = 8, seasonal = "additive") 
## 
##   Smoothing parameters:
##     alpha = 0.7256 
##     beta  = 0.0042 
##     gamma = 0.2744 
## 
##   Initial states:
##     l = 2.3931 
##     b = 0.0212 
##     s = 0.0989 -0.0065 0.0261 -0.1184
## 
##   sigma:  0.0245
## 
##       AIC      AICc       BIC 
## -150.5891 -145.2950 -134.5314 
## 
## Error measures:
##                        ME       RMSE        MAE        MPE      MAPE      MASE
## Training set 0.0009514271 0.02219517 0.01822413 0.03969571 0.6348927 0.2073649
##                  ACF1
## Training set 0.409508
## 
## Forecasts:
##       Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## 12 Q1       3.286563 3.255116 3.318009 3.238470 3.334656
## 12 Q2       3.427194 3.388264 3.466125 3.367655 3.486733
## 12 Q3       3.426368 3.381108 3.471627 3.357149 3.495586
## 12 Q4       3.548935 3.498068 3.599801 3.471141 3.626728
## 13 Q1       3.372265 3.312181 3.432348 3.280375 3.464154
## 13 Q2       3.512896 3.448387 3.577405 3.414237 3.611555
## 13 Q3       3.512070 3.443373 3.580766 3.407008 3.617131
## 13 Q4       3.634636 3.561951 3.707322 3.523474 3.745799
plant_seasonal$fitted
##        Qtr1     Qtr2     Qtr3     Qtr4
## 1  2.295939 2.466550 2.459491 2.586201
## 2  2.399725 2.577159 2.590090 2.737117
## 3  2.567543 2.755206 2.754803 2.896347
## 4  2.706594 2.851881 2.807356 2.921233
## 5  2.697572 2.880037 2.825686 2.951419
## 6  2.752546 2.918779 2.930212 3.079427
## 7  2.882758 3.014605 3.016055 3.128773
## 8  2.900188 3.033097 3.028950 3.114920
## 9  2.940084 3.116139 3.094653 3.202745
## 10 3.040680 3.205749 3.207993 3.342029
## 11 3.168789 3.316997 3.335795 3.462843
checkresiduals(plant_seasonal)

## 
##  Ljung-Box test
## 
## data:  Residuals from Holt-Winters' additive method
## Q* = 31.596, df = 3, p-value = 6.366e-07
## 
## Model df: 8.   Total lags used: 11
accuracy(plant_seasonal, log(New_Plant$New.Plant)[45:52], frequency =4)
##                         ME       RMSE        MAE         MPE      MAPE
## Training set  0.0009514271 0.02219517 0.01822413  0.03969571 0.6348927
## Test set     -0.0964436478 0.10020992 0.09644365 -2.85795794 2.8579579
##                   MASE     ACF1
## Training set 0.1562626 0.409508
## Test set     0.8269550       NA