Last data update: 2014.03.03

R: Bayesian Model Averaging using Bayesian Adaptive Sampling
BAS-packageR Documentation

Bayesian Model Averaging using Bayesian Adaptive Sampling

Description

Package for Bayesian Model Averaging in linear models using stochastic or deterministic sampling without replacement from posterior distributions. Prior distributions on coefficients are of the form of Zellner's g-prior or mixtures of g-priors. Options include the Zellner-Siow Cauchy Priors, the Liang et al hyper-g priors, Local and Global Empirical Bayes estimates of g, and other default model selection criteria such as AIC and BIC. Sampling probabilities may be updated based on the sampled models.

Details

Package: BAS
Depends: R (>= 2.8)
License: GPL-2
URL: http://www.stat.duke.edu/~clyde

Index:

Author(s)

Merlise Clyde,
Maintainer: Merlise Clyde <clyde@stat.duke.edu>

References

Clyde, M. Ghosh, J. and Littman, M. (2010) Bayesian Adaptive Sampling for Variable Selection and Model Averaging. Journal of Computational Graphics and Statistics. 20:80-101
http://dx.doi.org/10.1198/jcgs.2010.09049

Clyde, M. and George, E. I. (2004) Model uncertainty. Statist. Sci., 19, 81-94.
http://dx.doi.org/10.1214/088342304000000035

Clyde, M. (1999) Bayesian Model Averaging and Model Search Strategies (with discussion). In Bayesian Statistics 6. J.M. Bernardo, A.P. Dawid, J.O. Berger, and A.F.M. Smith eds. Oxford University Press, pages 157-185.

Li, Y. and Clyde, M. (2015) Mixtures of g-priors in Generalized Linear Models. http://arxiv.org/abs/1503.06913

Liang, F., Paulo, R., Molina, G., Clyde, M. and Berger, J.O. (2008) Mixtures of g-priors for Bayesian Variable Selection. Journal of the American Statistical Association. 103:410-423.
http://dx.doi.org/10.1198/016214507000001337

See Also

bas

Examples

demo(BAS.USCrime)
demo(BAS.hald)

Results


R version 3.3.1 (2016-06-21) -- "Bug in Your Hair"
Copyright (C) 2016 The R Foundation for Statistical Computing
Platform: x86_64-pc-linux-gnu (64-bit)

R is free software and comes with ABSOLUTELY NO WARRANTY.
You are welcome to redistribute it under certain conditions.
Type 'license()' or 'licence()' for distribution details.

R is a collaborative project with many contributors.
Type 'contributors()' for more information and
'citation()' on how to cite R or R packages in publications.

Type 'demo()' for some demos, 'help()' for on-line help, or
'help.start()' for an HTML browser interface to help.
Type 'q()' to quit R.

> library(BAS)
> png(filename="/home/ddbj/snapshot/RGM3/R_CC/result/BAS/BAS-package.Rd_%03d_medium.png", width=480, height=480)
> ### Name: BAS-package
> ### Title: Bayesian Model Averaging using Bayesian Adaptive Sampling
> ### Aliases: BAS-package BAS
> ### Keywords: package regression
> 
> ### ** Examples
> 
> demo(BAS.USCrime)


	demo(BAS.USCrime)
	---- ~~~~~~~~~~~

> require(MASS)
Loading required package: MASS

> library(MASS)

> data(UScrime)

> UScrime[,-2] = log(UScrime[,-2])

> crime.bic =  bas.lm(y ~ ., data=UScrime, n.models=2^15, prior="BIC",
+                     modelprior=beta.binomial(1,1),
+                     initprobs= "eplogp") 

> summary(crime.bic)
     Intercept M So Ed Po1 Po2 LF M.F Pop NW U1 U2 GDP Ineq Prob Time
[1,]         1 1  0  1   1   0  0   0   0  1  0  1   0    1    1    1
[2,]         1 1  1  1   1   1  1   1   1  1  1  1   1    1    1    1
[3,]         1 1  0  1   1   0  0   0   0  1  0  1   0    1    1    0
[4,]         1 1  0  1   1   0  0   0   0  1  0  1   1    1    1    1
[5,]         1 1  0  1   1   0  0   0   1  1  0  1   0    1    1    0
               BF PostProbs     R2 dim   logmarg
[1,] 1.0000000000    0.0191 0.8420   9 -22.15855
[2,] 0.0001267935    0.0156 0.8695  16 -31.13150
[3,] 0.7609295192    0.0145 0.8265   8 -22.43176
[4,] 0.5431578292    0.0133 0.8506  10 -22.76890
[5,] 0.5203178750    0.0099 0.8375   9 -22.81186

> plot(crime.bic)

> image(crime.bic, subset=-1)

> # takes a while to run:
> # crime.coef = coefficients(crime.bic)
> # crime.coef
> # par(mfrow=c(3,2))
> # plot(crime.coef, ask=FALSE)
> 
> # see update
> #crime.aic = update(crime.bic, newprior="AIC")
> #crime.zs = update(crime.bic, newprior="ZS-null")
> 
> #crime.EBG = EB.global.bma(crime.bic)
> # same as update(crime.bic, newprior="EB-global")
> #image(crime.EBG, subset=-1)
> 
> demo(BAS.hald)


	demo(BAS.hald)
	---- ~~~~~~~~

> data(Hald)

> hald.gprior =  bas.lm(Y~ ., data=Hald, prior="g-prior", alpha=13,
+                       modelprior=beta.binomial(1,1),
+                       initprobs="eplogp")

> hald.gprior

Call:
bas.lm(formula = Y ~ ., data = Hald, prior = "g-prior", alpha = 13,     modelprior = beta.binomial(1, 1), initprobs = "eplogp")


 Marginal Posterior Inclusion Probabilities: 
Intercept         X1         X2         X3         X4  
   1.0000     0.9019     0.6896     0.4653     0.6329  

> plot(hald.gprior)

> summary(hald.gprior)
     Intercept X1 X2 X3 X4         BF PostProbs     R2 dim   logmarg
[1,]         1  1  1  0  0 1.00000000    0.2432 0.9787   3 11.727354
[2,]         1  1  0  0  1 0.69239442    0.1684 0.9725   3 11.359755
[3,]         1  1  1  1  1 0.08991408    0.1312 0.9824   5  9.318453
[4,]         1  1  1  0  1 0.33557136    0.1224 0.9823   4 10.635434
[5,]         1  1  1  1  0 0.33449263    0.1220 0.9823   4 10.632214

> image(hald.gprior, subset=-1, vlas=0)

> hald.coef = coefficients(hald.gprior)

> hald.coef

 Marginal Posterior Summaries of Coefficients: 
           post mean  post SD  post p(B != 0)
Intercept  95.4231     0.7107   1.0000       
X1          1.2150     0.5190   0.9019       
X2          0.2756     0.4832   0.6896       
X3         -0.1271     0.4976   0.4653       
X4         -0.3269     0.4717   0.6329       

> plot(hald.coef)

> predict(hald.gprior, hald.gprior$X[,-1], top=5)  # do not include the intercept in the design matrix
$Ybma
           [,1]
 [1,]  79.74246
 [2,]  74.50010
 [3,] 105.29268
 [4,]  89.88693
 [5,]  95.57177
 [6,] 104.56409
 [7,] 103.40145
 [8,]  77.13668
 [9,]  91.99731
[10,] 114.21325
[11,]  82.78446
[12,] 111.00723
[13,] 110.40160

$Ypred
         [,1]     [,2]     [,3]     [,4]     [,5]     [,6]     [,7]     [,8]
[1,] 81.17036 74.83464 105.0725 89.69881 97.15898 104.4575 103.3893 76.06454
[2,] 77.70296 74.24113 105.8554 90.46267 93.09565 104.7152 103.1399 78.80193
[3,] 79.70437 74.40553 105.2175 89.76253 95.63309 104.5709 103.5254 77.08557
[4,] 79.65151 74.47846 105.4218 89.83174 95.62799 104.5962 103.5068 77.00839
[5,] 79.84321 74.31409 104.9063 89.65651 95.70301 104.5285 103.5476 77.15919
         [,9]    [,10]    [,11]    [,12]    [,13]
[1,] 91.57174 113.1722 81.59906 111.2219 111.0884
[2,] 92.68123 115.8058 84.50293 110.4162 109.0791
[3,] 91.98604 114.1759 82.78145 111.1196 110.5321
[4,] 92.07571 114.1088 82.68233 111.0429 110.4674
[5,] 91.83513 114.2353 82.88128 111.2384 110.6515

$postprobs
[1] 0.3089304 0.2139017 0.1666632 0.1555023 0.1550024

$best
[1]  2 15 12  9  3


> fitted(hald.gprior, type="HPM")
 [1]  81.17036  74.83464 105.07248  89.69881  97.15898 104.45753 103.38927
 [8]  76.06454  91.57174 113.17222  81.59906 111.22195 111.08841

> hald.gprior =  bas.lm(Y~ ., data=Hald, n.models=2^4,
+                       prior="g-prior", alpha=13, modelprior=uniform(),
+                       initprobs="eplogp")

> hald.EB = update(hald.gprior, newprior="EB-global")

> hald.bic = update(hald.gprior,newprior="BIC")

> hald.zs = update(hald.bic, newprior="ZS-null")
> 
> 
> 
> 
> 
> dev.off()
null device 
          1 
>