1 #ifndef AUTO_BACKPROP_H
2 #define AUTO_BACKPROP_H
8 #include "../pop/nodevector.h"
9 #include "../dat/state.h"
10 #include "../pop/op/n_Dx.h"
11 #include "../eval/metrics.h"
12 #include "../pop/individual.h"
13 #include "../model/ml.h"
15 #include "../params.h"
18 #include <shogun/labels/Labels.h>
20 using shogun::CLabels;
21 using Eigen::MatrixXf;
22 using Eigen::VectorXf;
23 typedef Eigen::Array<bool,Eigen::Dynamic,1>
ArrayXb;
69 typedef VectorXf (*
callback)(
const VectorXf&, shared_ptr<CLabels>&,
const vector<float>&);
104 void next_branch(vector<BP_NODE>& executing, vector<Node*>& bp_program,
105 vector<ArrayXf>& derivatives);
109 float Beta, shared_ptr<CLabels>& yhat,
115 float Beta,
const VectorXf& yhat,
Eigen::Array< bool, Eigen::Dynamic, 1 > ArrayXb
data holding X, y, and Z data
void run(Individual &ind, const Data &d, const Parameters ¶ms)
adapt weights
std::map< string, callback > score_hash
VectorXf(* callback)(const VectorXf &, shared_ptr< CLabels > &, const vector< float > &)
vector< Trace > forward_prop(Individual &ind, const Data &d, MatrixXf &Phi, const Parameters ¶ms)
Return the f_stack.
void backprop(Trace &f_stack, NodeVector &program, int start, int end, float Beta, shared_ptr< CLabels > &yhat, const Data &d, vector< float > sw)
Compute gradients and update weights.
std::map< string, callback > d_score_hash
void next_branch(vector< BP_NODE > &executing, vector< Node * > &bp_program, vector< ArrayXf > &derivatives)
Updates stacks to have proper value on top.
void print_weights(NodeVector &program)
void backprop2(Trace &f_stack, NodeVector &program, int start, int end, float Beta, const VectorXf &yhat, const Data &d, vector< float > sw)
Compute gradients and update weights.
AutoBackProp(string scorer, int iters=1000, float n=0.1, float a=0.9)
individual programs in the population
T pop_front(vector< T > *v)
used for tracing stack outputs for backprop algorithm.
vector< ArrayXf > deriv_list
holds the hyperparameters for Feat.
an extension of a vector of unique pointers to nodes