Medial Code Documentation
Loading...
Searching...
No Matches
regression_loss_oneapi.h
1
4#ifndef XGBOOST_OBJECTIVE_REGRESSION_LOSS_ONEAPI_H_
5#define XGBOOST_OBJECTIVE_REGRESSION_LOSS_ONEAPI_H_
6
7#include <dmlc/omp.h>
8#include <xgboost/logging.h>
9#include <algorithm>
10
11#include "CL/sycl.hpp"
12
13namespace xgboost {
14namespace obj {
15
21inline float SigmoidOneAPI(float x) {
22 return 1.0f / (1.0f + cl::sycl::exp(-x));
23}
24
25// common regressions
26// linear regression
28 static bst_float PredTransform(bst_float x) { return x; }
29 static bool CheckLabel(bst_float x) { return true; }
30 static bst_float FirstOrderGradient(bst_float predt, bst_float label) {
31 return predt - label;
32 }
33 static bst_float SecondOrderGradient(bst_float predt, bst_float label) {
34 return 1.0f;
35 }
36 static bst_float ProbToMargin(bst_float base_score) { return base_score; }
37 static const char* LabelErrorMsg() { return ""; }
38 static const char* DefaultEvalMetric() { return "rmse"; }
39
40 static const char* Name() { return "reg:squarederror_oneapi"; }
41};
42
43// TODO: DPC++ does not fully support std math inside offloaded kernels
45 static bst_float PredTransform(bst_float x) { return x; }
46 static bool CheckLabel(bst_float label) {
47 return label > -1;
48 }
49 static bst_float FirstOrderGradient(bst_float predt, bst_float label) {
50 predt = std::max(predt, (bst_float)(-1 + 1e-6)); // ensure correct value for log1p
51 return (cl::sycl::log1p(predt) - cl::sycl::log1p(label)) / (predt + 1);
52 }
53 static bst_float SecondOrderGradient(bst_float predt, bst_float label) {
54 predt = std::max(predt, (bst_float)(-1 + 1e-6));
55 float res = (-cl::sycl::log1p(predt) + cl::sycl::log1p(label) + 1) /
56 cl::sycl::pow(predt + 1, (bst_float)2);
57 res = std::max(res, (bst_float)1e-6f);
58 return res;
59 }
60 static bst_float ProbToMargin(bst_float base_score) { return base_score; }
61 static const char* LabelErrorMsg() {
62 return "label must be greater than -1 for rmsle so that log(label + 1) can be valid.";
63 }
64 static const char* DefaultEvalMetric() { return "rmsle"; }
65
66 static const char* Name() { return "reg:squaredlogerror_oneapi"; }
67};
68
69// logistic loss for probability regression task
71 // duplication is necessary, as __device__ specifier
72 // cannot be made conditional on template parameter
73 static bst_float PredTransform(bst_float x) { return SigmoidOneAPI(x); }
74 static bool CheckLabel(bst_float x) { return x >= 0.0f && x <= 1.0f; }
75 static bst_float FirstOrderGradient(bst_float predt, bst_float label) {
76 return predt - label;
77 }
78 static bst_float SecondOrderGradient(bst_float predt, bst_float label) {
79 const bst_float eps = 1e-16f;
80 return std::max(predt * (1.0f - predt), eps);
81 }
82 template <typename T>
83 static T PredTransform(T x) { return SigmoidOneAPI(x); }
84 template <typename T>
85 static T FirstOrderGradient(T predt, T label) { return predt - label; }
86 template <typename T>
87 static T SecondOrderGradient(T predt, T label) {
88 const T eps = T(1e-16f);
89 return std::max(predt * (T(1.0f) - predt), eps);
90 }
91 static bst_float ProbToMargin(bst_float base_score) {
92 CHECK(base_score > 0.0f && base_score < 1.0f)
93 << "base_score must be in (0,1) for logistic loss, got: " << base_score;
94 return -logf(1.0f / base_score - 1.0f);
95 }
96 static const char* LabelErrorMsg() {
97 return "label must be in [0,1] for logistic regression";
98 }
99 static const char* DefaultEvalMetric() { return "rmse"; }
100
101 static const char* Name() { return "reg:logistic_oneapi"; }
102};
103
104// logistic loss for binary classification task
106 static const char* DefaultEvalMetric() { return "logloss"; }
107 static const char* Name() { return "binary:logistic_oneapi"; }
108};
109
110// logistic loss, but predict un-transformed margin
112 // duplication is necessary, as __device__ specifier
113 // cannot be made conditional on template parameter
114 static bst_float PredTransform(bst_float x) { return x; }
115 static bst_float FirstOrderGradient(bst_float predt, bst_float label) {
116 predt = SigmoidOneAPI(predt);
117 return predt - label;
118 }
119 static bst_float SecondOrderGradient(bst_float predt, bst_float label) {
120 const bst_float eps = 1e-16f;
121 predt = SigmoidOneAPI(predt);
122 return std::max(predt * (1.0f - predt), eps);
123 }
124 template <typename T>
125 static T PredTransform(T x) { return x; }
126 template <typename T>
127 static T FirstOrderGradient(T predt, T label) {
128 predt = SigmoidOneAPI(predt);
129 return predt - label;
130 }
131 template <typename T>
132 static T SecondOrderGradient(T predt, T label) {
133 const T eps = T(1e-16f);
134 predt = SigmoidOneAPI(predt);
135 return std::max(predt * (T(1.0f) - predt), eps);
136 }
137 static const char* DefaultEvalMetric() { return "logloss"; }
138
139 static const char* Name() { return "binary:logitraw_oneapi"; }
140};
141
142} // namespace obj
143} // namespace xgboost
144
145#endif // XGBOOST_OBJECTIVE_REGRESSION_LOSS_ONEAPI_H_
defines console logging options for xgboost. Use to enforce unified print behavior.
float SigmoidOneAPI(float x)
calculate the sigmoid of the input.
Definition regression_loss_oneapi.h:21
namespace of xgboost
Definition base.h:90
float bst_float
float type, used for storing statistics
Definition base.h:97
header to handle OpenMP compatibility issues
Definition regression_loss_oneapi.h:27
Definition regression_loss_oneapi.h:105
Definition regression_loss_oneapi.h:111
Definition regression_loss_oneapi.h:70
Definition regression_loss_oneapi.h:44