Day 2: Bayes Rule

p=c(.3,.5,.7)
prior=c(.5,.3,.2)
# data: n=27, y=11
products=prior*dbinom(11,size=27,prob=p)
posterior=products/sum(products)
cbind(Proportion=p,Prior=prior,Posterior=round(posterior,4))
##      Proportion Prior Posterior
## [1,]        0.3   0.5    0.5665
## [2,]        0.5   0.3    0.4302
## [3,]        0.7   0.2    0.0033

Or you can use the R function pdisc in LearnBayes package:

library(LearnBayes)
posterior2=pdisc(p,prior,c(11,16))
round(posterior2, 4)
## [1] 0.5665 0.4302 0.0033

The Bayes estimate can be obtained by computing the expected value of the posterior distribution.

p.hat=sum(p*posterior2) 
round(p.hat,4)
## [1] 0.3873

To get a better estimate of the population proportion, let’s use more values for \(p\)

p=seq(0.05,0.95,by=.1)
prior.beta=dbeta(p,2,2)
prior=prior.beta/sum(prior.beta)
p.hat2=sum(p*prior)
posterior3=pdisc(p,prior,c(11,16))
p.hat3=sum(p*posterior3)
cbind(PriorMEan=p.hat2,PosteriorMean=p.hat3)
##      PriorMEan PosteriorMean
## [1,]       0.5     0.4193529
plot(p,posterior3,type="h")

Day 5: Predictive Distribuitons

p=seq(0.05,0.95,by=.1)
prior=c(1,5.2,8,7.2,4.6,2.1,0.7,0.1,0,0)
prior=prior/sum(prior)
m=20; ys=0:20
pred=pdiscp(p,prior,m,ys)
# round(cbind(0:20,pred),3)
# sum(prior*dbinom(0,size=20,prob=p))         # 0.02030242
# sum(prior*dbinom(2,size=20,prob=p))         # 0.06894572
product=prior*dbinom(35,size=50,prob=p)
posterior=product/sum(product)
pred.post=pdiscp(p,posterior,m,ys)
round(cbind(SuccessCount=0:20,PriorPredictive=pred,PosteriorPredictive=pred.post),3)
##       SuccessCount PriorPredictive PosteriorPredictive
##  [1,]            0           0.020               0.000
##  [2,]            1           0.044               0.000
##  [3,]            2           0.069               0.000
##  [4,]            3           0.092               0.000
##  [5,]            4           0.106               0.000
##  [6,]            5           0.112               0.002
##  [7,]            6           0.110               0.005
##  [8,]            7           0.102               0.013
##  [9,]            8           0.089               0.028
## [10,]            9           0.074               0.053
## [11,]           10           0.059               0.087
## [12,]           11           0.044               0.123
## [13,]           12           0.031               0.152
## [14,]           13           0.021               0.161
## [15,]           14           0.013               0.146
## [16,]           15           0.007               0.110
## [17,]           16           0.004               0.069
## [18,]           17           0.002               0.034
## [19,]           18           0.001               0.013
## [20,]           19           0.000               0.003
## [21,]           20           0.000               0.000
#sum(posterior*dbinom(10,size=20,prob=p))    # 0.08653378
#sum(posterior*dbinom(13,size=20,prob=p))    # 0.1611527

You can also obtain the Prior predictive distribution using Monte Carlo Simulation.

#p.sim=rbeta(10000,2,2)
p.sim=sample(p,10000,replace=T,prob=prior)
y.sim=rbinom(10000,20,p.sim)
freq=table(y.sim)

ys.sim=as.integer(names(freq))

pred.prob=freq/sum(freq)
cbind(ys.sim,Theoretical=round(pred,3),Simulation=pred.prob)
## Warning in cbind(ys.sim, Theoretical = round(pred, 3), Simulation =
## pred.prob): number of rows of result is not a multiple of vector length
## (arg 1)
##       ys.sim Theoretical Simulation
##  [1,]      0       0.020     0.0206
##  [2,]      1       0.044     0.0451
##  [3,]      2       0.069     0.0700
##  [4,]      3       0.092     0.0922
##  [5,]      4       0.106     0.1067
##  [6,]      5       0.112     0.1107
##  [7,]      6       0.110     0.1104
##  [8,]      7       0.102     0.1054
##  [9,]      8       0.089     0.0876
## [10,]      9       0.074     0.0759
## [11,]     10       0.059     0.0559
## [12,]     11       0.044     0.0439
## [13,]     12       0.031     0.0295
## [14,]     13       0.021     0.0187
## [15,]     14       0.013     0.0144
## [16,]     15       0.007     0.0071
## [17,]     16       0.004     0.0038
## [18,]     17       0.002     0.0016
## [19,]     18       0.001     0.0003
## [20,]     19       0.000     0.0002
## [21,]      0       0.000     0.0206
plot(ys.sim,pred.prob, type="h")

You can also obtain the Posterior predictive distribution using Monte Carlo Simulation.

#p.sim=rbeta(10000,2,2)
p.sim.post=sample(p,1000000,replace=T,prob=posterior)
y.sim.post=rbinom(1000000,20,p.sim.post)
freq.post=table(y.sim.post)

ys.sim.post=as.integer(names(freq.post))

pred.prob.post=freq.post/sum(freq.post)
cbind(ys.sim.post,Theoretical=round(pred.post,5),Simulation=pred.prob.post)
## Warning in cbind(ys.sim.post, Theoretical = round(pred.post, 5), Simulation
## = pred.prob.post): number of rows of result is not a multiple of vector
## length (arg 1)
##       ys.sim.post Theoretical Simulation
##  [1,]           1     0.00000   0.000002
##  [2,]           2     0.00000   0.000019
##  [3,]           3     0.00002   0.000107
##  [4,]           4     0.00010   0.000496
##  [5,]           5     0.00047   0.001800
##  [6,]           6     0.00174   0.005146
##  [7,]           7     0.00521   0.013006
##  [8,]           8     0.01315   0.028491
##  [9,]           9     0.02842   0.053248
## [10,]          10     0.05313   0.086166
## [11,]          11     0.08653   0.122837
## [12,]          12     0.12293   0.151452
## [13,]          13     0.15168   0.160607
## [14,]          14     0.16115   0.146255
## [15,]          15     0.14570   0.110264
## [16,]          16     0.11045   0.069589
## [17,]          17     0.06880   0.034046
## [18,]          18     0.03414   0.012809
## [19,]          19     0.01276   0.003254
## [20,]          20     0.00320   0.000406
## [21,]           1     0.00041   0.000002

If you use a \(Beta(a,b)\) prior, you can use the function ‘pbetap(ab,m,ys)’ to obtain the predictive distribution.

ab=c(37,17)
m=20; ys=0:20
predictive=pbetap(ab,m,ys)
cbind(ys,Predictive=round(predictive,4))
##       ys Predictive
##  [1,]  0     0.0000
##  [2,]  1     0.0000
##  [3,]  2     0.0000
##  [4,]  3     0.0000
##  [5,]  4     0.0001
##  [6,]  5     0.0005
##  [7,]  6     0.0018
##  [8,]  7     0.0051
##  [9,]  8     0.0125
## [10,]  9     0.0269
## [11,] 10     0.0504
## [12,] 11     0.0828
## [13,] 12     0.1192
## [14,] 13     0.1498
## [15,] 14     0.1628
## [16,] 15     0.1510
## [17,] 16     0.1168
## [18,] 17     0.0728
## [19,] 18     0.0345
## [20,] 19     0.0111
## [21,] 20     0.0018