Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 7 additions & 4 deletions src/stan/services/optimize/bfgs.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -83,13 +83,15 @@ int bfgs(Model& model, const stan::io::var_context& init,
bfgs._conv_opts.maxIts = num_iterations;

double lp = bfgs.logp();
int ret = 0;

std::stringstream initial_msg;
initial_msg << "Initial log joint probability = " << lp;
logger.info(initial_msg);

std::vector<std::string> names;
names.push_back("lp__");
names.push_back("converged__");
model.constrained_param_names(names, true, true);
parameter_writer(names);

Expand All @@ -109,10 +111,9 @@ int bfgs(Model& model, const stan::io::var_context& init,
if (msg.str().length() > 0)
logger.info(msg);

values.insert(values.begin(), lp);
values.insert(values.begin(), {lp, static_cast<double>(ret)});
parameter_writer(values);
}
int ret = 0;

try {
while (ret == 0) {
Expand Down Expand Up @@ -168,7 +169,7 @@ int bfgs(Model& model, const stan::io::var_context& init,
if (msg.str().length() > 0)
logger.info(msg);

values.insert(values.begin(), lp);
values.insert(values.begin(), {lp, static_cast<double>(ret)});
parameter_writer(values);
}
}
Expand All @@ -192,7 +193,7 @@ int bfgs(Model& model, const stan::io::var_context& init,
}
if (msg.str().length() > 0)
logger.info(msg);
values.insert(values.begin(), lp);
values.insert(values.begin(), {lp, static_cast<double>(ret)});
parameter_writer(values);
}

Expand All @@ -201,10 +202,12 @@ int bfgs(Model& model, const stan::io::var_context& init,
if (ret >= 0) {
logger.info("Optimization terminated normally: ");
logger.info(" " + error_string);
parameter_writer("Optimization terminated normally: " + error_string);
return_code = error_codes::OK;
} else {
logger.error("Optimization terminated with error: ");
logger.error(" " + error_string);
parameter_writer("Optimization terminated with error: " + error_string);
return_code = error_codes::SOFTWARE;
}

Expand Down
11 changes: 7 additions & 4 deletions src/stan/services/optimize/lbfgs.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -87,13 +87,15 @@ int lbfgs(Model& model, const stan::io::var_context& init,
lbfgs._conv_opts.maxIts = num_iterations;

double lp = lbfgs.logp();
int ret = 0;

std::stringstream initial_msg;
initial_msg << "Initial log joint probability = " << lp;
logger.info(initial_msg);

std::vector<std::string> names;
names.push_back("lp__");
names.push_back("converged__");
model.constrained_param_names(names, true, true);
parameter_writer(names);

Expand All @@ -104,10 +106,9 @@ int lbfgs(Model& model, const stan::io::var_context& init,
if (msg.str().length() > 0)
logger.info(msg);

values.insert(values.begin(), lp);
values.insert(values.begin(), {lp, static_cast<double>(ret)});
parameter_writer(values);
}
int ret = 0;

try {
while (ret == 0) {
Expand Down Expand Up @@ -161,7 +162,7 @@ int lbfgs(Model& model, const stan::io::var_context& init,
if (msg.str().length() > 0)
logger.info(msg);

values.insert(values.begin(), lp);
values.insert(values.begin(), {lp, static_cast<double>(ret)});
parameter_writer(values);
}
}
Expand All @@ -186,7 +187,7 @@ int lbfgs(Model& model, const stan::io::var_context& init,
if (msg.str().length() > 0)
logger.info(msg);

values.insert(values.begin(), lp);
values.insert(values.begin(), {lp, static_cast<double>(ret)});
parameter_writer(values);
}

Expand All @@ -196,10 +197,12 @@ int lbfgs(Model& model, const stan::io::var_context& init,
if (ret >= 0) {
logger.info("Optimization terminated normally: ");
logger.info(" " + error_string);
parameter_writer("Optimization terminated normally: " + error_string);
return_code = error_codes::OK;
} else {
logger.error("Optimization terminated with error: ");
logger.error(" " + error_string);
parameter_writer("Optimization terminated with error: " + error_string);
return_code = error_codes::SOFTWARE;
}

Expand Down
6 changes: 4 additions & 2 deletions src/stan/services/optimize/newton.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,9 @@ int newton(Model& model, const stan::io::var_context& init,
logger.info(msg);

std::vector<std::string> names;

names.push_back("lp__");
names.push_back("converged__");
model.constrained_param_names(names, true, true);
parameter_writer(names);

Expand All @@ -97,7 +99,7 @@ int newton(Model& model, const stan::io::var_context& init,
model.write_array(rng, cont_vector, disc_vector, values, true, true, &ss);
if (ss.str().length() > 0)
logger.info(ss);
values.insert(values.begin(), lp);
values.insert(values.begin(), {lp, 0});
parameter_writer(values);
}
interrupt();
Expand All @@ -121,7 +123,7 @@ int newton(Model& model, const stan::io::var_context& init,
model.write_array(rng, cont_vector, disc_vector, values, true, true, &ss);
if (ss.str().length() > 0)
logger.info(ss);
values.insert(values.begin(), lp);
values.insert(values.begin(), {lp, 0});
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Newton converges whenever the difference in lp is less than 1e-8. So why don't we just have an int ret = -1 at the beginning of the loop like in lbfgs so that whenever if (std::fabs(lp - lastlp) <= 1e-8) goes off we set it to 0 so we know that newton converged

Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Good suggestion, I've updated it to use the same numeric codes as lbfgs/bfgs for the two possible options (absolute tolerance reached and max iterations reached)

parameter_writer(values);
}
return error_codes::OK;
Expand Down
14 changes: 8 additions & 6 deletions src/test/unit/services/optimize/bfgs_jacobian_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,11 @@ TEST_F(ServicesOptimize, withJacobian) {
EXPECT_TRUE(logger.find("Optimization terminated normally: "));
EXPECT_FLOAT_EQ(return_code, 0);

ASSERT_EQ(2, parameter.names_.size());
ASSERT_EQ(3, parameter.names_.size());
EXPECT_EQ("lp__", parameter.names_[0]);
EXPECT_EQ("sigma", parameter.names_[1]);
EXPECT_NEAR((3 + std::sqrt(13)) / 2, parameter.states_.back()[1], 0.0001);
EXPECT_EQ("converged__", parameter.names_[1]);
EXPECT_EQ("sigma", parameter.names_[2]);
EXPECT_NEAR((3 + std::sqrt(13)) / 2, parameter.states_.back()[2], 0.0001);
EXPECT_GT(interrupt.call_count(), 0);
}

Expand All @@ -58,9 +59,10 @@ TEST_F(ServicesOptimize, withoutJacobian) {
EXPECT_TRUE(logger.find("Optimization terminated normally: "));
EXPECT_FLOAT_EQ(return_code, 0);

ASSERT_EQ(2, parameter.names_.size());
ASSERT_EQ(3, parameter.names_.size());
EXPECT_EQ("lp__", parameter.names_[0]);
EXPECT_EQ("sigma", parameter.names_[1]);
EXPECT_NEAR(3, parameter.states_.back()[1], 0.0001);
EXPECT_EQ("converged__", parameter.names_[1]);
EXPECT_EQ("sigma", parameter.names_[2]);
EXPECT_NEAR(3, parameter.states_.back()[2], 0.0001);
EXPECT_GT(interrupt.call_count(), 1);
}
15 changes: 8 additions & 7 deletions src/test/unit/services/optimize/bfgs_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -40,20 +40,21 @@ TEST_F(ServicesOptimize, rosenbrock) {

EXPECT_EQ("0,0\n", init_ss.str());

ASSERT_EQ(3, parameter.names_.size());
ASSERT_EQ(4, parameter.names_.size());
EXPECT_EQ("lp__", parameter.names_[0]);
EXPECT_EQ("x", parameter.names_[1]);
EXPECT_EQ("y", parameter.names_[2]);
EXPECT_EQ("converged__", parameter.names_[1]);
EXPECT_EQ("x", parameter.names_[2]);
EXPECT_EQ("y", parameter.names_[3]);

EXPECT_EQ(20, parameter.states_.size());
EXPECT_FLOAT_EQ(0, parameter.states_.front()[1])
<< "initial value should be (0, 0)";
EXPECT_FLOAT_EQ(0, parameter.states_.front()[2])
<< "initial value should be (0, 0)";
EXPECT_FLOAT_EQ(1, parameter.states_.back()[1])
<< "optimal value should be (1, 1)";
EXPECT_FLOAT_EQ(0, parameter.states_.front()[3])
<< "initial value should be (0, 0)";
EXPECT_FLOAT_EQ(1, parameter.states_.back()[2])
<< "optimal value should be (1, 1)";
EXPECT_FLOAT_EQ(1, parameter.states_.back()[3])
<< "optimal value should be (1, 1)";
EXPECT_FLOAT_EQ(return_code, 0);
EXPECT_EQ(19, interrupt.call_count());
}
14 changes: 8 additions & 6 deletions src/test/unit/services/optimize/lbfgs_jacobian_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,11 @@ TEST_F(ServicesOptimize, with_jacobian) {
EXPECT_TRUE(logger.find("Optimization terminated normally: "));
EXPECT_FLOAT_EQ(return_code, 0);

ASSERT_EQ(2, parameter.names_.size());
ASSERT_EQ(3, parameter.names_.size());
EXPECT_EQ("lp__", parameter.names_[0]);
EXPECT_EQ("sigma", parameter.names_[1]);
EXPECT_NEAR((3 + std::sqrt(13)) / 2, parameter.states_.back()[1], 0.0001);
EXPECT_EQ("converged__", parameter.names_[1]);
EXPECT_EQ("sigma", parameter.names_[2]);
EXPECT_NEAR((3 + std::sqrt(13)) / 2, parameter.states_.back()[2], 0.0001);
}

TEST_F(ServicesOptimize, without_jacobian) {
Expand All @@ -58,8 +59,9 @@ TEST_F(ServicesOptimize, without_jacobian) {
EXPECT_TRUE(logger.find("Optimization terminated normally: "));
EXPECT_FLOAT_EQ(return_code, 0);

ASSERT_EQ(2, parameter.names_.size());
ASSERT_EQ(3, parameter.names_.size());
EXPECT_EQ("lp__", parameter.names_[0]);
EXPECT_EQ("sigma", parameter.names_[1]);
EXPECT_NEAR(3, parameter.states_.back()[1], 0.0001);
EXPECT_EQ("converged__", parameter.names_[1]);
EXPECT_EQ("sigma", parameter.names_[2]);
EXPECT_NEAR(3, parameter.states_.back()[2], 0.0001);
}
15 changes: 8 additions & 7 deletions src/test/unit/services/optimize/lbfgs_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -40,19 +40,20 @@ TEST_F(ServicesOptimize, rosenbrock) {

EXPECT_EQ("0,0\n", init_ss.str());

ASSERT_EQ(3, parameter.names_.size());
ASSERT_EQ(4, parameter.names_.size());
EXPECT_EQ("lp__", parameter.names_[0]);
EXPECT_EQ("x", parameter.names_[1]);
EXPECT_EQ("y", parameter.names_[2]);
EXPECT_EQ("converged__", parameter.names_[1]);
EXPECT_EQ("x", parameter.names_[2]);
EXPECT_EQ("y", parameter.names_[3]);

EXPECT_EQ(23, parameter.states_.size());
EXPECT_FLOAT_EQ(0, parameter.states_.front()[1])
<< "initial value should be (0, 0)";
EXPECT_FLOAT_EQ(0, parameter.states_.front()[2])
<< "initial value should be (0, 0)";
EXPECT_FLOAT_EQ(0.99998301, parameter.states_.back()[1])
EXPECT_FLOAT_EQ(0, parameter.states_.front()[3])
<< "initial value should be (0, 0)";
EXPECT_FLOAT_EQ(0.99998301, parameter.states_.back()[2])
<< "optimal value should be (1, 1)";
EXPECT_FLOAT_EQ(0.99996597, parameter.states_.back()[2])
EXPECT_FLOAT_EQ(0.99996597, parameter.states_.back()[3])
<< "optimal value should be (1, 1)";
EXPECT_FLOAT_EQ(return_code, 0);
EXPECT_EQ(22, interrupt.call_count());
Expand Down
14 changes: 8 additions & 6 deletions src/test/unit/services/optimize/newton_jacobian_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,11 @@ TEST_F(ServicesOptimize, withJacobian) {

EXPECT_FLOAT_EQ(return_code, 0);

ASSERT_EQ(2, parameter.names_.size());
ASSERT_EQ(3, parameter.names_.size());
EXPECT_EQ("lp__", parameter.names_[0]);
EXPECT_EQ("sigma", parameter.names_[1]);
EXPECT_NEAR((3 + std::sqrt(13)) / 2, parameter.states_.back()[1], 0.001);
EXPECT_EQ("converged__", parameter.names_[1]);
EXPECT_EQ("sigma", parameter.names_[2]);
EXPECT_NEAR((3 + std::sqrt(13)) / 2, parameter.states_.back()[2], 0.001);
EXPECT_GT(interrupt.call_count(), 0);
}

Expand All @@ -54,9 +55,10 @@ TEST_F(ServicesOptimize, withoutJacobian) {

EXPECT_FLOAT_EQ(return_code, 0);

ASSERT_EQ(2, parameter.names_.size());
ASSERT_EQ(3, parameter.names_.size());
EXPECT_EQ("lp__", parameter.names_[0]);
EXPECT_EQ("sigma", parameter.names_[1]);
EXPECT_NEAR(3, parameter.states_.back()[1], 0.001);
EXPECT_EQ("converged__", parameter.names_[1]);
EXPECT_EQ("sigma", parameter.names_[2]);
EXPECT_NEAR(3, parameter.states_.back()[2], 0.001);
EXPECT_GT(interrupt.call_count(), 0);
}
26 changes: 14 additions & 12 deletions src/test/unit/services/optimize/newton_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -36,20 +36,21 @@ TEST_F(ServicesOptimize, rosenbrock) {
EXPECT_EQ(1, logger.find("Initial log joint probability = -1"));
EXPECT_EQ(1, logger.find("Iteration 1. Log joint probability ="));

ASSERT_EQ(3, parameter.names_.size());
ASSERT_EQ(4, parameter.names_.size());
EXPECT_EQ("lp__", parameter.names_[0]);
EXPECT_EQ("x", parameter.names_[1]);
EXPECT_EQ("y", parameter.names_[2]);
EXPECT_EQ("converged__", parameter.names_[1]);
EXPECT_EQ("x", parameter.names_[2]);
EXPECT_EQ("y", parameter.names_[3]);

EXPECT_GT(parameter.states_.size(), 0);
EXPECT_FLOAT_EQ(0, parameter.states_.front()[1])
<< "initial value should be (0, 0)";
EXPECT_FLOAT_EQ(0, parameter.states_.front()[2])
<< "initial value should be (0, 0)";
EXPECT_NEAR(1, parameter.states_.back()[1], 1e-3)
<< "optimal value should be (1, 1)";
EXPECT_FLOAT_EQ(0, parameter.states_.front()[3])
<< "initial value should be (0, 0)";
EXPECT_NEAR(1, parameter.states_.back()[2], 1e-3)
<< "optimal value should be (1, 1)";
EXPECT_NEAR(1, parameter.states_.back()[3], 1e-3)
<< "optimal value should be (1, 1)";
EXPECT_FLOAT_EQ(return_code, 0);
EXPECT_LT(0, interrupt.call_count());
}
Expand All @@ -75,16 +76,17 @@ TEST_F(ServicesOptimize, rosenbrock_no_save_iterations) {

EXPECT_EQ("0,0\n", init_ss.str());

ASSERT_EQ(3, parameter.names_.size());
ASSERT_EQ(4, parameter.names_.size());
EXPECT_EQ("lp__", parameter.names_[0]);
EXPECT_EQ("x", parameter.names_[1]);
EXPECT_EQ("y", parameter.names_[2]);
EXPECT_EQ("converged__", parameter.names_[1]);
EXPECT_EQ("x", parameter.names_[2]);
EXPECT_EQ("y", parameter.names_[3]);

EXPECT_EQ(1, parameter.states_.size());
EXPECT_NEAR(1, parameter.states_.back()[1], 1e-3)
<< "optimal value should be (1, 1)";
EXPECT_NEAR(1, parameter.states_.back()[2], 1e-3)
<< "optimal value should be (1, 1)";
EXPECT_NEAR(1, parameter.states_.back()[3], 1e-3)
<< "optimal value should be (1, 1)";
EXPECT_FLOAT_EQ(return_code, 0);
EXPECT_LT(0, interrupt.call_count());
}
Loading