MUQ  0.4.3
ExpensiveSamplingProblem.cpp
Go to the documentation of this file.
2 
4 
8 
9 namespace pt = boost::property_tree;
10 using namespace muq::Utilities;
11 using namespace muq::Modeling;
12 using namespace muq::Approximation;
13 using namespace muq::SamplingAlgorithms;
14 
15 ExpensiveSamplingProblem::ExpensiveSamplingProblem(std::shared_ptr<muq::Modeling::ModPiece> const& target, std::shared_ptr<muq::Modeling::ModPiece> const& qoi, Eigen::VectorXd const& centroid, pt::ptree pt) : SamplingProblem(target, qoi), centroid(centroid) {
16  // create the local regressor
17  const std::string regOptions = pt.get<std::string>("RegressionOptions");
18  reg = std::make_shared<LocalRegression>(target, pt.get_child(regOptions));
19 
20  regQoI = std::make_shared<LocalRegression>(qoi, pt.get_child(pt.get<std::string>("QoIRegressionOptions", regOptions)));
21 
22  // must becaused after reg is created
23  SetUp(pt);
24 }
25 
26 
27 ExpensiveSamplingProblem::ExpensiveSamplingProblem(std::shared_ptr<muq::Modeling::ModPiece> const& target, std::shared_ptr<muq::Modeling::ModPiece> const& qoi, pt::ptree pt) : SamplingProblem(target, qoi), centroid(Eigen::VectorXd::Zero(target->inputSizes(0))) {
28  // create the local regressor
29  const std::string regOptions = pt.get<std::string>("RegressionOptions");
30  reg = std::make_shared<LocalRegression>(target, pt.get_child(regOptions));
31 
32  regQoI = std::make_shared<LocalRegression>(qoi, pt.get_child(pt.get<std::string>("QoIRegressionOptions", regOptions)));
33 
34  // must becaused after reg is created
35  SetUp(pt);
36 }
37 
38 ExpensiveSamplingProblem::ExpensiveSamplingProblem(std::shared_ptr<muq::Modeling::ModPiece> const& target, pt::ptree pt) : SamplingProblem(target), centroid(Eigen::VectorXd::Zero(target->inputSizes(0))) {
39  // create the local regressor
40  reg = std::make_shared<LocalRegression>(target, pt.get_child(pt.get<std::string>("RegressionOptions")));
41 
42  // must becaused after reg is created
43  SetUp(pt);
44 }
45 
46 ExpensiveSamplingProblem::ExpensiveSamplingProblem(std::shared_ptr<muq::Modeling::ModPiece> const& target, Eigen::VectorXd const& centroid, pt::ptree pt) : SamplingProblem(target), centroid(centroid) {
47  // create the local regressor
48  reg = std::make_shared<LocalRegression>(target, pt.get_child(pt.get<std::string>("RegressionOptions")));
49 
50  // must becaused after reg is created
51  SetUp(pt);
52 }
53 
54 #if MUQ_HAS_PARCER
55 ExpensiveSamplingProblem::ExpensiveSamplingProblem(std::shared_ptr<muq::Modeling::ModPiece> const& target, boost::property_tree::ptree pt, std::shared_ptr<parcer::Communicator> comm) : SamplingProblem(target), centroid(Eigen::VectorXd::Zero(target->inputSizes(0))) {
56  // create the local regressor
57  reg = std::make_shared<LocalRegression>(target, pt.get_child(pt.get<std::string>("RegressionOptions")), comm);
58 
59  // must becaused after reg is created
60  SetUp(pt);
61 }
62 
63 ExpensiveSamplingProblem::ExpensiveSamplingProblem(std::shared_ptr<muq::Modeling::ModPiece> const& target, Eigen::VectorXd const& centroid, boost::property_tree::ptree pt, std::shared_ptr<parcer::Communicator> comm) : SamplingProblem(target), centroid(centroid) {
64  // create the local regressor
65  reg = std::make_shared<LocalRegression>(target, pt.get_child(pt.get<std::string>("RegressionOptions")), comm);
66 
67  // must becaused after reg is created
68  SetUp(pt);
69 }
70 #endif
71 
72 void ExpensiveSamplingProblem::SetUp(boost::property_tree::ptree& pt) {
73  assert(reg);
74 
75  // can only have one input
76  assert(target->numInputs==1);
77 
78  beta = std::pair<double, double>(pt.get<double>("BetaScale", 0.0), -pt.get<double>("BetaExponent", RAND_MAX));
79  assert(beta.second<0.0);
80 
81  tau0 = pt.get<double>("FirstLevelLength", 1.0);
82 
83  gamma = std::pair<double, double>(std::log(pt.get<double>("GammaScale", 1.0)), pt.get<double>("GammaExponent", 1.0));
84  assert(gamma.second>0.0);
85 
86  lyapunovPara.first = pt.get<double>("LyapunovScale", 1.0);
87  lyapunovPara.second = pt.get<double>("LyapunovExponent", 1.0);
88  eta = pt.get<double>("TailCorrection", 0.0);
89 }
90 
91 double ExpensiveSamplingProblem::LogDensity(std::shared_ptr<SamplingState> const& state) {
92 
93  lastState = state;
94 
95  std::vector<Eigen::VectorXd> neighbors, results;
96 
97  assert(state->HasMeta("iteration"));
98  assert(state->HasMeta("IsProposal"));
99 
100  step = AnyCast(state->meta["iteration"]);
101  bool addThreshold = AnyCast(state->meta["IsProposal"]);
102 
103  double threshold = RefineSurrogate(step, state, neighbors, results);
104 
105  if( !addThreshold ) {
106  lastLyapunov = LogLyapunovFunction(state->state[0]);
107  lastThreshold = threshold;
108  }
109 
110  //double threshold = RefineSurrogate(step, state, neighbors, results);
111  assert(neighbors.size()==results.size());
112  assert(neighbors.size()>=reg->kn);
113 
114  // agument the threshold
115  threshold *= (lastLyapunov<LogLyapunovFunction(state->state[0]) ? -1.0 : 1.0);
116 
117  // set cumulative refinement
118  if( beta.first>0.0 ) { state->meta["cumulative random refinement"] = cumbeta; }
119  state->meta["cumulative structural refinement"] = cumgamma;
120 
121  const double logtarg = reg->EvaluateRegressor(state->state[0],
122  std::vector<Eigen::VectorXd>(neighbors.begin(), neighbors.begin()+reg->kn),
123  std::vector<Eigen::VectorXd>(results.begin(), results.begin()+reg->kn)) (0);
124 
125  return logtarg + (addThreshold ? eta*(lastThreshold+threshold)*state->state[0].norm()*std::fabs(logtarg) : 0.0);
126 }
127 
128 double ExpensiveSamplingProblem::LogLyapunovFunction(Eigen::VectorXd const& x) const {
129  return lyapunovPara.first*std::pow((x-centroid).norm(), lyapunovPara.second);
130 }
131 
132 void ExpensiveSamplingProblem::CheckNumNeighbors(std::shared_ptr<SamplingState> const& state) {
133  while( reg->CacheSize()<reg->kn ) {
134  auto gauss = std::make_shared<muq::Modeling::Gaussian>(state->state[0], std::pow(std::exp(gamma.first), 1.0/(1.0+reg->Order()))*Eigen::VectorXd::Ones(state->state[0].size()));
135  const Eigen::VectorXd& x = gauss->Sample();
136  reg->Add(x);
137  if( regQoI ) { regQoI->Add(x); }
138  ++cumkappa;
139  }
140 }
141 
143  std::shared_ptr<SamplingState> const& state,
144  std::vector<Eigen::VectorXd>& neighbors,
145  std::vector<Eigen::VectorXd>& results) const {
146  reg->NearestNeighbors(state->state[0], neighbors, results);
147  assert(neighbors.size()==results.size());
148 }
149 
150 double ExpensiveSamplingProblem::ErrorThreshold(Eigen::VectorXd const& x) const {
151  return LogLyapunovFunction(x) + gamma.first - gamma.second*std::log((double)level);
152 }
153 
155  std::shared_ptr<SamplingState> const& state,
156  double const radius,
157  std::vector<Eigen::VectorXd>& neighbors,
158  std::vector<Eigen::VectorXd>& results) {
159  // compute the poisedness constant
160  const std::tuple<Eigen::VectorXd, double, unsigned int>& lambda = reg->PoisednessConstant(state->state[0], neighbors);
161 
162  // if the point is already in the cache
163  if( reg->InCache(std::get<0>(lambda)) ) {
164  // choose a random point
165  Eigen::VectorXd point = Eigen::VectorXd::Random(state->state[0].size());
166  point *= RandomGenerator::GetUniform()*radius/point.norm();
167  point += state->state[0];
168 
169  // find the closest point
170  int index = 0;
171  double dist = RAND_MAX;
172  for( unsigned int i=0; i<reg->kn; ++i ) {
173  double newdist = (point-state->state[0]).norm();
174  if( newdist<dist ) { dist = newdist; index = i; }
175  }
176 
177  assert(dist>0.0);
178  assert(dist<=radius); // max is the diameter
179 
180  RefineSurrogate(point, index, neighbors, results);
181  } else {
182  RefineSurrogate(std::get<0>(lambda), std::get<2>(lambda), neighbors, results);
183  }
184 
185  return std::get<1>(lambda);
186 }
187 
188 double ExpensiveSamplingProblem::RefineSurrogate(unsigned int const step, std::shared_ptr<SamplingState> const& state, std::vector<Eigen::VectorXd>& neighbors, std::vector<Eigen::VectorXd>& results) {
189  // make sure we have enough points
190  CheckNumNeighbors(state);
191 
192  // get the nearest nearest neighbors
193  CheckNeighbors(state, neighbors, results);
194 
195  // get the error indicator (using the first kn neighbors)
196  double error, radius;
197  std::tie(error, radius) = reg->ErrorIndicator(state->state[0], neighbors);
198 
199  // random refinement
200  if( RandomGenerator::GetUniform()<beta.first*std::pow((double)step, beta.second) ) {
201  RefineSurrogate(state, radius, neighbors, results);
202  ++cumbeta;
203 
204  // recompute the error indicator
205  std::tie(error, radius) = reg->ErrorIndicator(state->state[0], std::vector<Eigen::VectorXd>(neighbors.begin(), neighbors.begin()+reg->kn));
206  }
207 
208  // check to see if we should increment the level
209  if( step>tau0*std::pow((double)level, 2.0*gamma.second) ) { ++level; }
210 
211  // compute (and store) the error threshold
212  const double threshold = ErrorThreshold(state->state[0]);
213  state->meta["error threshold"] = std::exp(threshold);
214  state->meta["error indicator"] = std::exp(error);
215 
216  // structural refinement
217  if( error>threshold ) {
218  double lambda = RefineSurrogate(state, radius, neighbors, results);
219  ++cumgamma;
220  }
221 
222  return std::exp(threshold);
223 }
224 
225 void ExpensiveSamplingProblem::RefineSurrogate(Eigen::VectorXd const& point, unsigned int const index, std::vector<Eigen::VectorXd>& neighbors, std::vector<Eigen::VectorXd>& results) {
226  assert(!reg->InCache(point));
227  const Eigen::VectorXd& result = reg->Add(point);
228  if( regQoI ) { regQoI->Add(point); }
229 
230  assert(neighbors.size()==results.size());
231  assert(index<neighbors.size());
232 
233  neighbors.push_back(point);
234  results.push_back(result);
235  std::iter_swap(neighbors.end()-1, neighbors.begin()+index);
236  std::iter_swap(results.end()-1, results.begin()+index);
237 }
238 
239 unsigned int ExpensiveSamplingProblem::CacheSize() const { return reg->CacheSize(); }
240 
241 void ExpensiveSamplingProblem::AddOptions(boost::property_tree::ptree & pt) const {
242  pt.put("ReevaluateAcceptedDensity", true);
243 }
244 
245  std::shared_ptr<SamplingState> ExpensiveSamplingProblem::QOI() {
246  if( qoi==nullptr ) { return nullptr; }
247  assert(regQoI);
248 
249  return std::make_shared<SamplingState>(regQoI->Evaluate(lastState->state));
250  }
std::pair< double, double > beta
Parameters for random refinement.
unsigned int step
The current step in the MCMC chain.
std::pair< double, double > gamma
Parameters for structural refinement.
unsigned int cumkappa
Cumulative kappa refinements.
virtual void AddOptions(boost::property_tree::ptree &pt) const override
virtual std::shared_ptr< SamplingState > QOI() override
double lastLyapunov
The Lyapunov function at the most recently accepted point.
std::shared_ptr< muq::Approximation::LocalRegression > regQoI
The regression for the quantity of interest.
std::shared_ptr< muq::Approximation::LocalRegression > reg
The regression for the log-likelihood.
unsigned int cumbeta
Cumulative beta refinements.
double lastThreshold
The error threshold at the most recently accepted point.
const Eigen::VectorXd centroid
The centroid of the distribution (input parameter)
virtual double LogDensity(std::shared_ptr< SamplingState > const &state) override
double RefineSurrogate(unsigned int const step, std::shared_ptr< SamplingState > const &state, std::vector< Eigen::VectorXd > &neighbors, std::vector< Eigen::VectorXd > &results)
void CheckNumNeighbors(std::shared_ptr< SamplingState > const &state)
Check to make sure we have enough model evaluations.
double LogLyapunovFunction(Eigen::VectorXd const &x) const
Estimate the Lyapunov function.
ExpensiveSamplingProblem(std::shared_ptr< muq::Modeling::ModPiece > const &target, std::shared_ptr< muq::Modeling::ModPiece > const &qoi, Eigen::VectorXd const &centroid, boost::property_tree::ptree pt)
unsigned int level
The current error threshold level.
double ErrorThreshold(Eigen::VectorXd const &x) const
Compute the error threshold.
void SetUp(boost::property_tree::ptree &pt)
Set up the sampling problem.
void CheckNeighbors(std::shared_ptr< SamplingState > const &state, std::vector< Eigen::VectorXd > &neighbors, std::vector< Eigen::VectorXd > &results) const
Check to make sure we have enough model evaluations.
unsigned int cumgamma
Cumulative gamma refinements.
std::pair< double, double > lyapunovPara
The scaling and exponent for the Lyapunov function.
Class for sampling problems based purely on a density function.
std::shared_ptr< muq::Modeling::ModPiece > qoi
std::shared_ptr< muq::Modeling::ModPiece > target
The target distribution (the prior in the inference case)
std::shared_ptr< SamplingState > lastState
Class for easily casting boost::any's in assignment operations.
Definition: AnyHelpers.h:34