20 return (yhat - y).array().pow(2);
24 shared_ptr<CLabels>& labels,
25 const vector<float>& weights)
27 SGVector<double> _tmp =
28 dynamic_pointer_cast<sh::CRegressionLabels>(labels)->get_labels();
29 SGVector<float> tmp(_tmp.begin(), _tmp.end());
31 Map<VectorXf> yhat(tmp.data(),tmp.size());
46 return 2 * (yhat - y);
51 shared_ptr<CLabels>& labels,
const vector<float>& weights)
53 SGVector<double> _tmp =
54 dynamic_pointer_cast<sh::CRegressionLabels>(labels)->get_labels();
55 SGVector<float> tmp(_tmp.begin(), _tmp.end());
56 Map<VectorXf> yhat(tmp.data(),tmp.size());
70 float mse(
const VectorXf& y,
const VectorXf& yhat, VectorXf& loss,
71 const vector<float>& weights)
73 loss = (yhat - y).array().pow(2);
78 const shared_ptr<CLabels>& labels, VectorXf& loss,
79 const vector<float>& weights)
81 SGVector<double> _tmp =
82 dynamic_pointer_cast<sh::CRegressionLabels>(labels)->get_labels();
83 SGVector<float> tmp(_tmp.begin(), _tmp.end());
84 Map<VectorXf> yhat(tmp.data(),tmp.size());
85 return mse(y,yhat,loss,weights);
88 VectorXf
log_loss(
const VectorXf& y,
const VectorXf& yhat,
89 const vector<float>& class_weights)
91 float eps = pow(10,-10);
95 float sum_weights = 0;
96 loss.resize(y.rows());
97 for (
unsigned i = 0;
i < y.rows(); ++
i)
99 if (yhat(
i) < eps || 1 - yhat(
i) < eps)
101 loss(
i) = -(y(
i)*log(eps) + (1-y(
i))*log(1-eps));
103 loss(
i) = -(y(
i)*log(yhat(
i)) + (1-y(
i))*log(1-yhat(
i)));
109 if (!class_weights.empty())
111 loss(
i) = loss(
i) * class_weights.at(y(
i));
112 sum_weights += class_weights.at(y(
i));
117 loss = loss.array() / sum_weights * y.size();
122 VectorXf
log_loss(
const VectorXf& y, shared_ptr<CLabels>& labels,
123 const vector<float>& class_weights)
125 SGVector<double> _tmp =
126 dynamic_pointer_cast<sh::CBinaryLabels>(labels)->get_values();
127 SGVector<float> tmp(_tmp.begin(), _tmp.end());
129 Map<VectorXf> yhat(tmp.data(),tmp.size());
131 VectorXf loss =
log_loss(y,yhat,class_weights);
137 const VectorXf& yhat, VectorXf& loss,
138 const vector<float>& class_weights)
142 loss =
log_loss(y,yhat,class_weights);
148 const shared_ptr<CLabels>& labels, VectorXf& loss,
149 const vector<float>& class_weights)
151 SGVector<double> _tmp =
152 dynamic_pointer_cast<sh::CBinaryLabels>(labels)->get_values();
153 SGVector<float> tmp(_tmp.begin(), _tmp.end());
154 Map<VectorXf> yhat(tmp.data(),tmp.size());
160 const vector<float>& class_weights)
162 VectorXf dll(y.size());
163 for (
int i = 0;
i < y.size(); ++
i)
167 dll(
i) = (yhat(
i) - y(
i));
168 if (!class_weights.empty())
169 dll(
i) = dll(
i) * class_weights.at(y(
i));
174 VectorXf
d_log_loss(
const VectorXf& y, shared_ptr<CLabels>& labels,
175 const vector<float>& class_weights)
177 SGVector<double> _tmp =
178 dynamic_pointer_cast<sh::CBinaryLabels>(labels)->get_values();
179 SGVector<float> tmp(_tmp.begin(), _tmp.end());
180 Map<VectorXf> yhat(tmp.data(),tmp.size());
192 const vector<float>& class_weights)
194 VectorXf loss = VectorXf::Zero(y.rows());
197 vector<float> uc =
unique(y);
199 float eps = pow(10,-10);
200 float sum_weights = 0;
201 for (
unsigned i = 0;
i < y.rows(); ++
i)
203 for (
const auto& c : uc)
205 ArrayXf yhat = confidences.col(
int(c));
212 if (yhat(
i) < eps || 1 - yhat(
i) < eps)
215 loss(
i) += -log(eps);
219 loss(
i) += -log(yhat(
i));
225 if (!class_weights.empty()){
227 loss(
i) = loss(
i)*class_weights.at(y(
i));
228 sum_weights += class_weights.at(y(
i));
232 loss = loss.array() / sum_weights * y.size();
239 const ArrayXXf& confidences, VectorXf& loss,
240 const vector<float>& class_weights)
251 const shared_ptr<CLabels>& labels, VectorXf& loss,
252 const vector<float>& class_weights)
254 ArrayXXf confidences(y.size(),
unique(y).size());
256 for (
unsigned i =0;
i<y.size(); ++
i)
258 SGVector<double> _tmp =
259 dynamic_pointer_cast<sh::CMulticlassLabels>(labels)->
260 get_multiclass_confidences(
int(
i));
261 SGVector<float> tmp(_tmp.begin(), _tmp.end());
262 if (confidences.cols() != tmp.size())
269 confidences.row(
i) = 0;
272 confidences.row(
i) = Map<ArrayXf>(tmp.data(),tmp.size());
284 const vector<float>& class_weights)
286 ArrayXXf confidences(y.size(),
unique(y).size());
288 for (
unsigned i =0;
i<y.size(); ++
i)
290 SGVector<double> _tmp =
291 dynamic_pointer_cast<sh::CMulticlassLabels>(labels)->
292 get_multiclass_confidences(
int(
i));
294 SGVector<float> tmp(_tmp.begin(), _tmp.end());
295 confidences.row(
i) = Map<ArrayXf>(tmp.data(),tmp.size());
304 shared_ptr<CLabels>& labels,
305 const vector<float>& class_weights)
307 ArrayXXf confidences(y.size(),
unique(y).size());
309 for (
unsigned i =0;
i<y.size(); ++
i)
311 SGVector<double> _tmp =
312 dynamic_pointer_cast<sh::CMulticlassLabels>(labels)->
313 get_multiclass_confidences(
int(
i));
314 SGVector<float> tmp(_tmp.begin(), _tmp.end());
315 confidences.row(
i) = Map<ArrayXf>(tmp.data(),tmp.size());
319 VectorXf loss = VectorXf::Zero(y.rows());
322 vector<float> uc =
unique(y);
324 float eps = pow(10,-10);
325 float sum_weights = 0;
326 for (
unsigned i = 0;
i < y.rows(); ++
i)
328 for (
const auto& c : uc)
330 ArrayXf yhat = confidences.col(
int(c));
345 loss(
i) += -1/yhat(
i);
351 if (!class_weights.empty())
354 loss(
i) = loss(
i)*class_weights.at(y(
i));
355 sum_weights += class_weights.at(y(
i));
361 loss = loss.array() / sum_weights * y.size();
367 VectorXf& loss,
const vector<float>& class_weights)
369 vector<float> uc =
unique(y);
371 for (
const auto&
i : uc)
375 vector<float> TP(c.size(),0.0), TN(c.size(), 0.0), P(c.size(),0.0), N(c.size(),0.0);
376 ArrayXf class_accuracies(c.size());
380 for (
unsigned i=0;
i< c.size(); ++
i)
382 P.at(
i) = (y.array().cast<
int>() == c.at(
i)).count();
383 N.at(
i) = (y.array().cast<
int>() != c.at(
i)).count();
387 for (
unsigned i = 0;
i < y.rows(); ++
i)
390 ++TP.at(y(
i) == -1 ? 0 : y(
i));
392 for (
unsigned j = 0; j < c.size(); ++j)
393 if ( y(
i) !=c.at(j) && yhat(
i) != c.at(j) )
399 for (
unsigned i=0;
i< c.size(); ++
i){
400 class_accuracies(
i) = (TP.at(
i)/P.at(
i) + TN.at(
i)/N.at(
i))/2;
407 loss = (yhat.cast<
int>().array() != y.cast<
int>().array()).cast<float>();
409 return 1.0 - class_accuracies.mean();
413 const shared_ptr<CLabels>& labels,
414 VectorXf& loss,
const vector<float>& class_weights)
417 SGVector<double> _tmp;
419 auto ptrmulticast = dynamic_pointer_cast<sh::CMulticlassLabels>(labels);
421 if(ptrmulticast == NULL)
423 auto ptrbinary = dynamic_pointer_cast<sh::CBinaryLabels>(labels);
424 _tmp = ptrbinary->get_labels();
427 _tmp = ptrmulticast->get_labels();
429 SGVector<float> tmp(_tmp.begin(), _tmp.end());
430 Map<VectorXf> yhat(tmp.data(),tmp.size());
435 float zero_one_loss(
const VectorXf& y,
const VectorXf& yhat, VectorXf& loss,
436 const vector<float>& class_weights)
438 loss = (yhat.cast<
int>().array() != y.cast<
int>().array()).cast<float>();
445 const shared_ptr<CLabels>& labels, VectorXf& loss,
446 const vector<float>& class_weights)
448 SGVector<double> _tmp =
449 dynamic_pointer_cast<sh::CBinaryLabels>(labels)->get_labels();
450 SGVector<float> tmp(_tmp.begin(), _tmp.end());
451 Map<VectorXf> yhat(tmp.data(),tmp.size());
458 const VectorXf& yhat, VectorXf& loss,
459 const vector<float>& class_weights)
461 ArrayXb ybool = y.cast<
bool>();
463 loss = (yhat.cast<
bool>().select(
464 !ybool,
false)).cast<float>();
465 return loss.sum()/float((y.size() - ybool.count()));
469 const shared_ptr<CLabels>& labels, VectorXf& loss,
470 const vector<float>& class_weights)
472 SGVector<double> _tmp =
473 dynamic_pointer_cast<sh::CBinaryLabels>(labels)->get_labels();
474 SGVector<float> tmp(_tmp.begin(), _tmp.end());
475 Map<VectorXf> yhat(tmp.data(),tmp.size());
Eigen::Array< bool, Eigen::Dynamic, 1 > ArrayXb
#define THROW_RUNTIME_ERROR(err)
VectorXf multi_log_loss(const VectorXf &y, shared_ptr< CLabels > &labels, const vector< float > &class_weights)
float bal_zero_one_loss_label(const VectorXf &y, const shared_ptr< CLabels > &labels, VectorXf &loss, const vector< float > &class_weights)
VectorXf log_loss(const VectorXf &y, shared_ptr< CLabels > &labels, const vector< float > &class_weights)
float false_positive_loss(const VectorXf &y, const VectorXf &yhat, VectorXf &loss, const vector< float > &class_weights)
VectorXf squared_difference(const VectorXf &y, shared_ptr< CLabels > &labels, const vector< float > &weights)
float zero_one_loss_label(const VectorXf &y, const shared_ptr< CLabels > &labels, VectorXf &loss, const vector< float > &class_weights)
1 - accuracy
VectorXf d_log_loss(const VectorXf &y, shared_ptr< CLabels > &labels, const vector< float > &class_weights)
float mse_label(const VectorXf &y, const shared_ptr< CLabels > &labels, VectorXf &loss, const vector< float > &weights)
float multi_log_loss_label(const VectorXf &y, const shared_ptr< CLabels > &labels, VectorXf &loss, const vector< float > &class_weights)
multinomial log loss
VectorXf d_squared_difference(const VectorXf &y, shared_ptr< CLabels > &labels, const vector< float > &weights)
float mean_multi_log_loss(const VectorXf &y, const ArrayXXf &confidences, VectorXf &loss, const vector< float > &class_weights)
float bal_zero_one_loss(const VectorXf &y, const VectorXf &yhat, VectorXf &loss, const vector< float > &class_weights)
1 - balanced accuracy
float mean_log_loss(const VectorXf &y, const VectorXf &yhat, VectorXf &loss, const vector< float > &class_weights)
log loss
float zero_one_loss(const VectorXf &y, const VectorXf &yhat, VectorXf &loss, const vector< float > &class_weights)
float false_positive_loss_label(const VectorXf &y, const shared_ptr< CLabels > &labels, VectorXf &loss, const vector< float > &class_weights)
false positive rate
float mse(const VectorXf &y, const VectorXf &yhat, VectorXf &loss, const vector< float > &weights)
mean squared error
float log_loss_label(const VectorXf &y, const shared_ptr< CLabels > &labels, VectorXf &loss, const vector< float > &class_weights)
log loss
VectorXf d_multi_log_loss(const VectorXf &y, shared_ptr< CLabels > &labels, const vector< float > &class_weights)
derivative of multinomial log loss
vector< T > unique(vector< T > w)
returns unique elements in vector
std::string to_string(const T &value)
template function to convert objects to string for logging