################################################### # -- must source ss2 first -- ################################################### # Example 3.33 using Chapter 6 techniques # y=scan("/mydata/ar1boot.dat") # nboot=200 # number of bootstrap reps # # -- set up-- # the model is # x(t+1) = phi*x(t) + phi*v(t) <-- note w(t) = phi*v(t) # y(t) = x(t) + muy + v(t) # with phi=.95, muy=50, Ev(t)=0, Var[v(t)]=8 # and A(t)=1, u(t)=1 num=100 A=array(1, dim=c(1,1,100)) # A is 100 1x1 matrices of 1s input=matrix(1,100,1) # input is 100x1 vector of 1s phi=.95; muy=50; cR= sqrt(8) # initial parameters mu1=0; Sigma1= (phi^2*8)/(1-phi^2); initpar=c(phi,muy,cR) # -- Function to Calculate Likelihood -- Linn=function(para){ phi=para[1] muy=para[2] cR=para[3] cQ=phi*cR S=phi*cR^2 Sigma1=(phi^2)*(cR^2)/(1-phi^2); Sigma1[Sigma1<0] =0; kf=Kfilter2(num,y,A,mu1,Sigma1,phi,0,muy,cQ,cR,S,input) return(kf$like) } # estimate the parameters using MLE (in Example 3.33, YW was used) est=optim(initpar,Linn,NULL,method="BFGS",hessian=TRUE,control=list(reltol=.001)) stderr=sqrt(diag(solve(est$hessian))) cbind(est$par,stderr) # display results (phi, muy, sigmav) # if you want the results to be pretty do this: # estimate=est$par # u=cbind(estimate,stderr) # rownames(u)=c("phi","muy","sigmav") # u # obtain innovations phi=est$par[1] muy=est$par[2] cR=est$par[3] cQ=phi*cR S=phi*cR^2 kf=Kfilter2(num,y,A,mu1,Sigma1,phi,0,muy,cQ,cR,S,input) # -- Function to Calculate Likelihood for bstrapped data -- # (it's the same as Linn except that it uses the booted data y.star) Linn2=function(para){ phi=para[1] muy=para[2] cR=para[3] cQ=phi*cR S=phi*cR^2 Sigma1= (phi^2)*(cR^2)/(1-phi^2); Sigma1[Sigma1<0] =0; kf=Kfilter2(num,y.star,A,mu1,Sigma1,phi,0,muy,cQ,cR,S,input) return(kf$like) } #-- bootstrap --- phi=est$par[1]; muy=est$par[2]; cR=est$par[3] # initialize parameters at MLEs cQ=phi*cR; S=phi*cR^2; mux=0; Sigma1= (8*phi^2)/(1-phi^2); # kf=Kfilter2(num,y,A,mux,Sigma1,phi,0,muy,cQ,cR,S,input) # run the filter # # pull out necessary values from the filter xp=kf$xp innov=kf$innov sig=kf$sig K=kf$K # # set up the bootstrap e=innov/sqrt(sig) # standardize errors e.star=e # initialize resampled errors (just so e.star has the proper dimensions) y.star=y # initialize data - the 1st value of y will be fixed: y.star[1]=y[1] xp.star=xp # initialize state predictions (so xp.star has the proper dimensions, xp is x_t^t-1) para.star = matrix(0, nboot, 3) # a place to put the bootstrapped estimates initpar=c(phi,muy,cR) # for (i in 1:nboot){ cat("iteration:", i, "\n") # writes iteration number to screen e.star[2:100] = sample(e[2:100], replace=TRUE) # resample standardized innovations for (j in 2:100){ xp.star[j] = phi*xp.star[j-1] + K[j]*sqrt(sig[j])*e.star[j] # generate bootstrapped states } y.star[2:100] = xp.star[2:100] + muy + e.star[2:100]*sqrt(sig[2:100]) # generate bootstrapped obs # # estimation with booted data... note reltol=.1 gets you stinky estimates but saves time- # if you set reltol to a lower value, you may be waiting awhile to complete this analysis est.star=optim(initpar,Linn2,NULL,method="BFGS", control=list(reltol=.1)) para.star[i,]=cbind(est.star$par[1], est.star$par[2], est.star$par[3]) # record estimates } # # bootstrap estimates in para.star, for example hist(para.star[,1], 15, main="phi") # a histogram of the phi estimates