commit 66f686ce3b283fb68292a350e9f97404f514590b
parent beeb0d0bb77da493152e18f8c1e500b61d186ad5
Author: Jared Tobin <jared@jtobin.ca>
Date: Sat, 19 Oct 2013 23:41:16 +1300
Add Bayesian stuff.
Diffstat:
2 files changed, 47 insertions(+), 3 deletions(-)
diff --git a/src/Measurable.hs b/src/Measurable.hs
@@ -43,8 +43,10 @@ import Numeric.Integration.TanhSinh
--
-- This is equivalent to the type that forms the Continuation monad, albeit
-- with constant (Double) result type. The functor and monad instances follow
--- suit. The strength of the Monad instance is that it allows us to meld
--- together measures with different input types.
+-- suit.
+--
+-- The strength of the Monad instance is that it allows us to do Bayesian
+-- inference. prior >>= likelihood == posterior.
newtype Measure a = Measure { measure :: (a -> Double) -> Double }
@@ -95,6 +97,11 @@ fromDensity :: (Double -> Double) -> Measure Double
fromDensity d = Measure $ \f -> quadratureTanhSinh $ liftA2 (*) f d
where quadratureTanhSinh = result . last . everywhere trap
+-- | Create a measure from a mass function.
+fromMassFunction :: (a -> Double) -> [a] -> Measure a
+fromMassFunction p support = Measure $ \f ->
+ sum . map (liftA2 (*) f p) $ support
+
-- | The (sum) identity measure.
identityMeasure :: Fractional a => Measure a
identityMeasure = fromObservations []
diff --git a/tests/Test.hs b/tests/Test.hs
@@ -1,11 +1,14 @@
--- Simple examples that demonstrate some measure-foo.
+-- Simple examples that demonstrate some measure-fu.
import Control.Applicative
import Control.Monad
import Control.Monad.Trans
import Measurable
+import Numeric.SpecFunctions
import Statistics.Distribution hiding (mean, variance)
import Statistics.Distribution.Normal
+import Statistics.Distribution.Beta
+import Statistics.Distribution.Binomial
import Statistics.Distribution.ChiSquared
import System.Random.MWC
import System.Random.MWC.Distributions
@@ -13,6 +16,17 @@ import System.Random.MWC.Distributions
standardNormal = density $ normalDistr 0 1
genLocationNormal m = density $ normalDistr m 1
+basicBeta a b = density $ betaDistr a b
+betaMeasure a b = fromDensity $ basicBeta a b
+
+binom p n k | n <= 0 = 0
+ | k < 0 = 0
+ | n < k = 0
+ | otherwise = n `choose` k * p ^ k * (1 - p) ^ (n - k)
+
+binomMeasure n p = fromMassFunction (\x -> binom p n (truncate x))
+ (map fromIntegral [0..n] :: [Double])
+
main = do
expSamples <- withSystemRandom . asGenIO $ \g ->
replicateM 100 $ exponential 1 g
@@ -75,4 +89,27 @@ main = do
putStrLn $ "let X ~ N(0, 1), Y ~ observed. P(0 < X < 0.8): " ++
show (expectation $ 0 `to` 0.8 <$> (mu + nu))
+ putStrLn ""
+ putStrLn "Creating from a mass function:"
+ putStrLn ""
+
+ let kappa = fromMassFunction (\x -> binom 0.5 10 (truncate x)) [0..10]
+ putStrLn $ "let X ~ binom(10, 0.5). mean of X (should be 5): " ++
+ show (expectation kappa)
+ putStrLn $ "let X ~ binom(10, 0.5). variance of X (should be 2.5): " ++
+ show (variance kappa)
+
+ putStrLn ""
+ putStrLn "Bayesian inference"
+ putStrLn ""
+
+ let omega = (betaMeasure 1 4) >>= binomMeasure 10
+
+ putStrLn $
+ "let X | p ~ binomial(10, p), p ~ beta(1, 4). mean of posterior pred.:\n "
+ ++ show (expectation omega)
+ putStrLn $
+ "let X | p ~ binomial(10, p), p ~ beta(1, 4). variance of posterior pred:\n"
+ ++ show (variance omega)
+