Skip to content
This repository was archived by the owner on Oct 31, 2023. It is now read-only.

Commit

Permalink
Testing filtering
Browse files Browse the repository at this point in the history
  • Loading branch information
rmcantin committed Jan 30, 2018
1 parent 9102f65 commit 10b22ae
Show file tree
Hide file tree
Showing 7 changed files with 23 additions and 5 deletions.
6 changes: 6 additions & 0 deletions examples/bo_branin.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,12 @@ int main(int nargs, char *args[])
par.random_seed = 0;
par.verbose_level = 1;
par.noise = 1e-10;
par.filtering_startup = 10;
par.filtering_interval = 2;
par.up_margin = 5;
par.low_margin = 1e-20;
par.kernel.name = "kSum(kNoise,kMaternARD5)";

//bayesopt::utils::ParamLoader::save("bo_branin.txt", par);
}

Expand Down
2 changes: 1 addition & 1 deletion include/gauss_distribution.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ namespace bayesopt
double quantile(double p)
{
double x = boost::math::quantile(d_,p);
return (x - mean_) / std_;
return x * std_ + mean_;
};


Expand Down
3 changes: 2 additions & 1 deletion include/student_t_distribution.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
// for student t distribution
#include <boost/math/distributions/students_t.hpp>
#include "prob_distribution.hpp"
#include "log.hpp"

namespace bayesopt
{
Expand Down Expand Up @@ -69,7 +70,7 @@ namespace bayesopt
double quantile(double p)
{
double x = boost::math::quantile(d_,p);
return (x - mean_) / std_;
return x * std_ + mean_;
};

/**
Expand Down
4 changes: 4 additions & 0 deletions python/demo_quad.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,10 @@ def evaluateSample(self,Xin):
params['n_iterations'] = 50
params['n_iter_relearn'] = 5
params['n_init_samples'] = 2
params['filtering_startup'] = 10
params['filtering_interval'] = 2
params['up_margin'] = 5
params['low_margin'] = 1e-10

print("Callback implementation")

Expand Down
4 changes: 2 additions & 2 deletions src/posterior_empirical.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -50,10 +50,10 @@ namespace bayesopt
else
{
kOptimizer->setAlgorithm(COMBINED);
kOptimizer->setMaxEvals(20*nhp);
kOptimizer->setMaxEvals(100*nhp);
}
//Limits in log space
kOptimizer->setLimits(svectord(nhp,-6.0),svectord(nhp,1.0));
kOptimizer->setLimits(svectord(nhp,-6.0),svectord(nhp,6.0));
}

EmpiricalBayes::~EmpiricalBayes()
Expand Down
7 changes: 7 additions & 0 deletions src/robust_filtering.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ namespace bayesopt
up_margin = (100.0 - parameters.up_margin) / 100.0;
low_margin = parameters.low_margin / 100.0;
par2.surr_name = "sStudentTProcessNIG";
par2.noise = 1e-3;
mRobustModel.reset(PosteriorModel::create(dim,par2,eng));
}

Expand All @@ -55,12 +56,18 @@ namespace bayesopt
{
mFilteredData->mX.push_back(XX[i]);
utils::append(mFilteredData->mY, YY[i]);
FILE_LOG(logINFO) << "Keeped value:" << YY[i] << " with thresholds:" << f_low << ", " << f_up << " and mean:" << pd->getMean();
}
else
{
FILE_LOG(logINFO) << "REMOVED value:" << YY[i] << " with thresholds:" << f_low << ", " << f_up;
}
}
if (mFilteredData->getNSamples() <= n_points * 0.5)
{
mFilteredData->mX = XX;
mFilteredData->mY = YY;
FILE_LOG(logINFO) << "TOO MANY POINTS REMOVED.";
}

return mFilteredData.get();
Expand Down
2 changes: 1 addition & 1 deletion src/student_t_distribution.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ namespace bayesopt
double StudentTDistribution::negativeProbabilityOfImprovement(double min,
double epsilon)
{
return -cdf(d_,(min - mean_ + epsilon)/std_);
return -boost::math::cdf(d_,(min - mean_ + epsilon)/std_);
} // negativeProbabilityOfImprovement


Expand Down

0 comments on commit 10b22ae

Please sign in to comment.