MUQ  0.4.3
MonteCarlo.cpp
Go to the documentation of this file.
3 
10 
11 #include <boost/property_tree/ptree.hpp>
12 
13 namespace pt = boost::property_tree;
14 using namespace muq::Modeling;
15 using namespace muq::SamplingAlgorithms;
16 using namespace muq::Utilities;
17 
18 #include "MCSampleProposal.h" // TODO: Move into muq
19 
20 class MySamplingProblem : public AbstractSamplingProblem {
21 public:
22  MySamplingProblem()
23  : AbstractSamplingProblem(Eigen::VectorXi::Constant(1,1), Eigen::VectorXi::Constant(1,1))
24  {}
25 
26  virtual ~MySamplingProblem() = default;
27 
28 
29  virtual double LogDensity(std::shared_ptr<SamplingState> const& state) override {
30  lastState = state;
31  return 0;
32  };
33 
34  virtual std::shared_ptr<SamplingState> QOI() override {
35  assert (lastState != nullptr);
36  return std::make_shared<SamplingState>(lastState->state[0] * 2, 1.0);
37  }
38 
39 private:
40  std::shared_ptr<SamplingState> lastState = nullptr;
41 
42 };
43 
44 
45 int main(){
46 
47  /***
48  ### 1. Define the target density and set up sampling problem
49  MUQ has extensive tools for combining many model compoenents into larger
50  more complicated models. The AbstractSamplingProblem base class and its
51  children, like the SamplingProblem class, define the interface between
52  sampling algorithms like MCMC and the models and densities they work with.
53 
54  Here, we create a very simple target density and then construct a SamplingProblem
55  directly from the density.
56  */
57  /***
58  Define the Target Density:
59  */
60  Eigen::VectorXd mu(2);
61  mu << 1.0, 2.0;
62 
63  Eigen::MatrixXd cov(2,2);
64  cov << 1.0, 0.8,
65  0.8, 1.5;
66 
67  auto targetDensity = std::make_shared<Gaussian>(mu, cov)->AsDensity(); // standard normal Gaussian
68 
69  /***
70  Create the Sampling Problem:
71  */
72  //auto problem = std::make_shared<SamplingProblem>(targetDensity, targetDensity);
73 
74  auto problem = std::make_shared<MySamplingProblem>();
75 
76  /***
77  ### 2. Construct the RWM algorithm
78  One of the easiest ways to define an MCMC algorithm is to put all of the algorithm
79  parameters, including the kernel and proposal definitions, in a property tree
80  and then let MUQ construct each of the algorithm components: chain, kernel, and
81  proposal.
82 
83  The boost property tree will have the following entries:
84 
85  - NumSamples : 10000
86  - KernelList "Kernel1"
87  - Kernel1
88  * Method : "MHKernel"
89  * Proposal : "MyProposal"
90  * MyProposal
91  + Method : "MHProposal"
92  + ProposalVariance : 0.5
93 
94 At the base level, we specify the number of steps in the chain with the entry "NumSamples".
95  Note that this number includes any burnin samples. The kernel is then defined
96  in the "KernelList" entry. The value, "Kernel1", specifies a block in the
97  property tree with the kernel definition. In the "Kernel1" block, we set the
98  kernel to "MHKernel," which specifies that we want to use the Metropolis-Hastings
99  kernel. We also tell MUQ to use the "MyProposal" block to define the proposal.
100  The proposal method is specified as "MHProposal", which is the random walk
101  proposal used in the RWM algorithm, and the proposal variance is set to 0.5.
102  */
103 
104  // parameters for the sampler
105  pt::ptree pt;
106  pt.put("NumSamples", 1e4); // number of MCMC steps
107  pt.put("BurnIn", 1e3);
108  pt.put("PrintLevel",3);
109  /*pt.put("KernelList", "Kernel1"); // Name of block that defines the transition kernel
110  pt.put("Kernel1.Method","MHKernel"); // Name of the transition kernel class
111  pt.put("Kernel1.Proposal", "MyProposal"); // Name of block defining the proposal distribution
112  pt.put("Kernel1.MyProposal.Method", "MHProposal");*/ // Name of proposal class
113 
114  /***
115  Once the algorithm parameters are specified, we can pass them to the CreateSingleChain
116  function of the MCMCFactory class to create an instance of the MCMC algorithm we defined in the
117  property tree.
118  */
119  //
120  //auto mcmc = MCMCFactory::CreateSingleChain(pt, problem);
121  Eigen::VectorXd startPt = mu;
122 
123  Eigen::VectorXd mu_prop(2);
124  mu_prop << 4.0, 2.0;
125 
126  Eigen::MatrixXd cov_prop(2,2);
127  cov_prop << 1.0, 0.8,
128  0.8, 1.5;
129  auto proposalDensity = std::make_shared<Gaussian>(mu_prop, cov_prop);
130 
131  auto proposal = std::make_shared<MCSampleProposal>(pt, problem, proposalDensity);
132 
133  std::vector<std::shared_ptr<TransitionKernel> > kernels = {std::make_shared<DummyKernel>(pt, problem, proposal)};
134 
135  auto mcmc = std::make_shared<SingleChainMCMC>(pt, kernels);
136 
137  /***
138  ### 3. Run the MCMC algorithm
139  We are now ready to run the MCMC algorithm. Here we start the chain at the
140  target densities mean. The resulting samples are returned in an instance
141  of the SampleCollection class, which internally holds the steps in the MCMC chain
142  as a vector of weighted SamplingState's.
143  */
144  mcmc->Run(startPt);
145  std::shared_ptr<SampleCollection> samps = mcmc->GetQOIs();
146 
147  /***
148  ### 4. Analyze the results
149 
150  When looking at the entries in a SampleCollection, it is important to note that
151  the states stored by a SampleCollection are weighted even in the MCMC setting.
152  When a proposal $x^\prime$ is rejected, instead of making a copy of $x^{(k-1)}$
153  for $x^{(k)}$, the weight on $x^{(k-1)}$ is simply incremented. This is useful
154  for large chains in high dimensional parameter spaces, where storing all duplicates
155  could quickly consume available memory.
156 
157  The SampleCollection class provides several functions for computing sample moments.
158  For example, here we compute the mean, variance, and third central moment.
159  While the third moment is actually a tensor, here we only return the marginal
160  values, i.e., $\mathbb{E}_x[(x_i-\mu_i)^3]$ for each $i$.
161  */
162  Eigen::VectorXd sampMean = samps->Mean();
163  std::cout << "\nSample Mean = \n" << sampMean.transpose() << std::endl;
164 
165  Eigen::VectorXd sampVar = samps->Variance();
166  std::cout << "\nSample Variance = \n" << sampVar.transpose() << std::endl;
167 
168  Eigen::MatrixXd sampCov = samps->Covariance();
169  std::cout << "\nSample Covariance = \n" << sampCov << std::endl;
170 
171  Eigen::VectorXd sampMom3 = samps->CentralMoment(3);
172  std::cout << "\nSample Third Moment = \n" << sampMom3 << std::endl << std::endl;
173 
174  return 0;
175 }
Abstract base class for MCMC and Importance Sampling problems.
int main()
Definition: MonteCarlo.cpp:45