MUQ  0.4.3
MALASampler.cpp
Go to the documentation of this file.
5 
8 
12 
13 namespace pt = boost::property_tree;
14 using namespace muq::Modeling;
15 using namespace muq::SamplingAlgorithms;
16 using namespace muq::Utilities;
17 
18 class Model : public ModPiece {
19 public:
20  inline static const unsigned int dim = 2;
21 
22  inline Model() : ModPiece(Eigen::VectorXi::Constant(1, dim), Eigen::VectorXi::Constant(1, dim)) {}
23 
24  virtual ~Model() = default;
25 private:
26 
27  inline virtual void EvaluateImpl(ref_vector<Eigen::VectorXd> const& inputs) override {
28  const Eigen::VectorXd& in = inputs[0].get();
29 
30  outputs.resize(1);
31  outputs[0] = Eigen::VectorXd::Zero(dim);
32  for( unsigned int i=0; i<dim; ++i ) {
33  outputs[0][i] = 0.5*in[i]*in[i] + std::sin(M_PI*in[(i+1)%dim]);
34  }
35  }
36 
37  inline virtual void JacobianImpl(unsigned int const inwrt, unsigned int const outwrt, ref_vector<Eigen::VectorXd> const& inputs) override {
38  const Eigen::VectorXd& in = inputs[0].get();
39 
40  // since we only have one in/output
41  assert(inwrt==0); assert(outwrt==0);
42 
43  jacobian = Eigen::MatrixXd::Zero(dim, dim);
44  for( unsigned int i=0; i<dim; ++i ) {
45  jacobian(i, i) = in[i];
46  jacobian(i, (i+1)%dim) = M_PI*std::cos(M_PI*in[(i+1)%dim]);
47  }
48  }
49 };
50 
51 int main() {
52  // create the forward model
53  auto model = std::make_shared<Model>();
54 
55  // check the forward model implementation
56  {
57  // example input
58  //const Eigen::VectorXd input = Eigen::VectorXd::Random(Model::dim);
59  const Eigen::VectorXd input = Eigen::VectorXd::Zero(Model::dim);
60  std::cout << "input vector: " << input.transpose() << std::endl;
61 
62  // evaluate the model
63  const Eigen::VectorXd output = model->Evaluate(input) [0];
64  std::cout << "output vector: " << output.transpose() << std::endl;
65 
66  const Eigen::MatrixXd jac = model->Jacobian(0, 0, input);
67  std::cout << "Jacobian: " << std::endl << jac << std::endl;
68  const Eigen::MatrixXd jacfd = model->JacobianByFD(0, 0, input);
69  std::cout << "Jacobian (using finite differences): " << std::endl << jacfd << std::endl;
70 
71  const Eigen::VectorXd sens = Eigen::VectorXd::Random(Model::dim);
72  const Eigen::VectorXd grad = model->Gradient(0, 0, input, sens);
73  std::cout << "Gradient: " << grad.transpose() << std::endl;
74  const Eigen::VectorXd gradfd = model->GradientByFD(0, 0, std::vector<Eigen::VectorXd>({input}), sens);
75  std::cout << "Gradient (using finite differences): " << gradfd.transpose() << std::endl;
76 
77  const Eigen::VectorXd vec = Eigen::VectorXd::Random(Model::dim);
78  const Eigen::VectorXd jacapp = model->ApplyJacobian(0, 0, input, vec);
79  std::cout << "Jacobian action: " << jacapp.transpose() << std::endl;
80  const Eigen::VectorXd jacappfd = model->ApplyJacobianByFD(0, 0, std::vector<Eigen::VectorXd>({input}), vec);
81  std::cout << "Jacobian action: (using finite differences): " << jacappfd.transpose() << std::endl;
82 
83  std::cout << std::endl << "-----------" << std::endl << std::endl;
84  }
85 
86  // number of Monte Carlo samples
87  const unsigned int N = 1e6;
88 
89  // parameters for the sampler
90  pt::ptree pt;
91  pt.put("NumSamples", N);
92  pt.put("BurnIn", (unsigned int)(0.1*N));
93  pt.put("PrintLevel", 2);
94  pt.put("KernelList", "Kernel1");
95  pt.put("Kernel1.Method","MHKernel");
96  pt.put("Kernel1.Proposal", "MyProposal");
97  pt.put("Kernel1.MyProposal.Method", "InfMALAProposal");
98  pt.put("Kernel1.MyProposal.Beta", 0.5);
99  pt.put("Kernel1.MyProposal.StepSize", 0.5);
100  pt.put("Kernel1.MyProposal.PriorNode", "Prior");
101 
102  // create a Gaussian distribution---the sampling problem is built around characterizing this distribution
103  const Eigen::VectorXd mu = Eigen::VectorXd::Zero(Model::dim);
104  const Eigen::MatrixXd priorCov = Eigen::MatrixXd::Identity(Model::dim, Model::dim);
105  auto prior = std::make_shared<Gaussian>(mu, priorCov);
106 
107  Eigen::VectorXd data = Eigen::VectorXd::Zero(Model::dim) + 0.01*Eigen::VectorXd::Random(Model::dim);
108  Eigen::MatrixXd likeCov = 0.1*Eigen::MatrixXd::Identity(Model::dim, Model::dim);
109  auto likelihood = std::make_shared<Gaussian>(data, likeCov);
110 
111  auto graph = std::make_shared<WorkGraph>();
112  graph->AddNode(std::make_shared<IdentityOperator>(Model::dim), "Parameters");
113  graph->AddNode(prior->AsDensity(), "Prior");
114  graph->AddNode(likelihood->AsDensity(), "Likelihood");
115  graph->AddNode(model, "Model");
116 
117  graph->AddEdge("Parameters", 0, "Prior", 0);
118  graph->AddEdge("Parameters", 0, "Model", 0);
119  graph->AddEdge("Model", 0, "Likelihood", 0);
120 
121  graph->AddNode(std::make_shared<DensityProduct>(2), "Posterior");
122  graph->AddEdge("Prior", 0, "Posterior", 0);
123  graph->AddEdge("Likelihood", 0, "Posterior", 1);
124 
125  auto posterior = graph->CreateModPiece("Posterior");
126 
127  // create a sampling problem
128  auto problem = std::make_shared<SamplingProblem>(posterior);
129 
130  // starting point for the MCMC chain
131  std::vector<Eigen::VectorXd> start(1);
132  start.at(0) = mu;
133 
134  // create an instance of MCMC and do the sampling
135  auto mcmc = std::make_shared<SingleChainMCMC>(pt, problem);
136  std::shared_ptr<SampleCollection> samps = mcmc->Run(start);
137 
138  const Eigen::VectorXd sampMean = samps->Mean();
139  const Eigen::MatrixXd sampCov = samps->Covariance();
140 
141  std::cout << std::endl;
142  std::cout << "Mean: " << sampMean.transpose() << std::endl;
143  std::cout << "Cov: " << std::endl << sampCov << std::endl;
144  std::cout << std::endl;
145  std::cout << std::endl;
146 }
int main()
Definition: MALASampler.cpp:51
Provides an abstract interface for defining vector-valued model components.
Definition: ModPiece.h:148
std::vector< std::reference_wrapper< const T > > ref_vector
A vector of references to something ...
Definition: WorkPiece.h:37