diff --git a/src/cmdstan/command.hpp b/src/cmdstan/command.hpp index a16eeb8b34..ef33b2f429 100644 --- a/src/cmdstan/command.hpp +++ b/src/cmdstan/command.hpp @@ -449,8 +449,7 @@ int command(int argc, const char *argv[]) { } } try { - services_log_prob_grad(model, jacobian, params_r_ind, sig_figs, - sample_writers[0].get_stream()); + services_log_prob_grad(model, jacobian, params_r_ind, sample_writers[0]); return_code = return_codes::OK; } catch (const std::exception &e) { msg << "Error during log_prob calculation:" << std::endl; diff --git a/src/cmdstan/command_helper.hpp b/src/cmdstan/command_helper.hpp index ff5cbdcc23..2fa29e7062 100644 --- a/src/cmdstan/command_helper.hpp +++ b/src/cmdstan/command_helper.hpp @@ -4,7 +4,6 @@ #include #include #include -#include #include #include #include @@ -580,18 +579,14 @@ std::vector> get_uparams_r( */ void services_log_prob_grad(const stan::model::model_base &model, bool jacobian, std::vector> ¶ms_set, - int sig_figs, std::ostream &output_stream) { - // header row - output_stream << std::setprecision(sig_figs) << "lp__,"; - std::vector p_names; + stan::callbacks::writer &output) { + // header + std::vector p_names{"lp__"}; model.unconstrained_param_names(p_names, false, false); - for (size_t i = 0; i < p_names.size(); ++i) { - output_stream << "g_" << p_names[i]; - if (i == p_names.size() - 1) - output_stream << "\n"; - else - output_stream << ","; - } + std::transform(p_names.begin() + 1, p_names.end(), p_names.begin() + 1, + [](std::string s) { return "g_" + s; }); + output(p_names); + // data row(s) std::vector dummy_params_i; double lp; @@ -604,10 +599,9 @@ void services_log_prob_grad(const stan::model::model_base &model, bool jacobian, lp = stan::model::log_prob_grad(model, params, dummy_params_i, gradients); } - output_stream << lp << ","; - std::copy(gradients.begin(), gradients.end() - 1, - std::ostream_iterator(output_stream, ",")); - output_stream << gradients.back() << "\n"; + // unfortunate: var.grad clears the vector, so need to insert lp afterwards + gradients.insert(gradients.begin(), lp); + output(gradients); } }