forpy  2
regression_opt.h
Go to the documentation of this file.
1 /* Author: Christoph Lassner. */
2 #pragma once
3 #ifndef FORPY_THRESHOLD_OPTIMIZERS_REGOPT_H_
4 #define FORPY_THRESHOLD_OPTIMIZERS_REGOPT_H_
5 
6 #include "../global.h"
7 #include "../util/serialization/basics.h"
8 
9 #include "../types.h"
10 #include "../util/desk.h"
11 #include "./ithreshopt.h"
12 
13 namespace forpy {
14 
15 #pragma clang diagnostic push
16 #pragma clang diagnostic ignored "-Wunused-variable"
17 
18 const int DLOG_ROPT_V = 1;
20 const size_t LOG_ROPT_NID = 12043;
21 const bool LOG_ROPT_ALLN = false;
23 
33 #ifdef FORPY_SKLEARN_COMPAT
34 const float REGOPT_EPS = 1E-7;
35 #else
36 const float REGOPT_EPS = 1E-7f;
37 #endif
38 #pragma clang diagnostic pop
39 
54 class RegressionOpt : public IThreshOpt {
55  public:
63  RegressionOpt(const size_t &n_thresholds = 0,
64  const float &gain_threshold = 1E-7f);
65 
67  virtual std::shared_ptr<IThreshOpt> create_duplicate(
69  const uint & /*random_seed*/) const {
70  return std::make_shared<RegressionOpt>(n_thresholds, gain_threshold);
71  }
72  void check_annotations(IDataProvider *dprov);
73  inline void transfer_or_run_check(IThreshOpt *other, IDataProvider *dprov) {
74  auto *ot_ropt = dynamic_cast<RegressionOpt *>(other);
75  if (ot_ropt == nullptr) ot_ropt->check_annotations(dprov);
76  };
77  void full_entropy(const IDataProvider &dprov, Desk *) const;
78  void optimize(Desk *) const;
79  float get_gain_threshold_for(const size_t & /*node_id*/) {
80  return gain_threshold;
81  };
83 
84  inline friend std::ostream &operator<<(std::ostream &stream,
85  const RegressionOpt & /*self*/) {
86  stream << "forpy::RegressionOpt";
87  return stream;
88  };
89  bool operator==(const IThreshOpt &rhs) const;
90 
91  private:
93  inline void optimize__sort(DeciderDesk &d) const;
94  inline std::unique_ptr<std::vector<float>> optimize__thresholds(
95  Desk *d) const;
96  friend class cereal::access;
97  template <class Archive>
98  void serialize(Archive &ar, const uint &) {
99  ar(cereal::make_nvp("base", cereal::base_class<IThreshOpt>(this)),
100  CEREAL_NVP(n_thresholds), CEREAL_NVP(gain_threshold));
101  }
102 
103  size_t n_thresholds;
105 
107 };
108 } // namespace forpy
109 
111 #endif // FORPY_THRESHOLD_OPTIMIZERS_REGOPT_H_
Find an optimal threshold.
Definition: ithreshopt.h:23
Desk for decider training.
Definition: desk.h:61
SplitOptRes< float > & optimize__setup(DeciderDesk &d) const
A data provider for the training of one tree.
Definition: idataprovider.h:22
void serialize(Archive &ar, const uint &)
RegressionOpt(const size_t &n_thresholds=0, const float &gain_threshold=1E-7f)
float get_gain_threshold_for(const size_t &)
Interface implementation.
Optimize split thresholds to optimize regression results (MSE).
const bool LOG_ROPT_ALLN
Variables to control debugging and log output for the forpy::RegressionOpt.
CEREAL_REGISTER_TYPE(forpy::RegressionOpt)
void check_annotations(IDataProvider *dprov)
Interface implementation.
void transfer_or_run_check(IThreshOpt *other, IDataProvider *dprov)
Interface implementation.
friend std::ostream & operator<<(std::ostream &stream, const RegressionOpt &)
void full_entropy(const IDataProvider &dprov, Desk *) const
Interface implementation.
void optimize(Desk *) const
Interface implementation.
virtual std::shared_ptr< IThreshOpt > create_duplicate(const uint &) const
Interface implementation.
DISALLOW_COPY_AND_ASSIGN(RegressionOpt)
bool operator==(const IThreshOpt &rhs) const
void optimize__sort(DeciderDesk &d) const
const size_t LOG_ROPT_NID
Variables to control debugging and log output for the forpy::RegressionOpt.
std::unique_ptr< std::vector< float > > optimize__thresholds(Desk *d) const
Main thread desk object.
Definition: desk.h:201
const float REGOPT_EPS
Regression epsilon. No differences less than this are considered existent. This is relevant for: ...
friend class cereal::access
const int DLOG_ROPT_V
Variables to control debugging and log output for the forpy::RegressionOpt.
unsigned int uint
Convenience typedef for unsigned int.
Definition: types.h:113