Skip to content

Commit 3d4af4f

Browse files
authored
Add tests for loss-guided expand (#62)
* minor refactoring; adding tests * remove dead comments --------- Co-authored-by: Dmitry Razdoburdin <>
1 parent bcb4472 commit 3d4af4f

File tree

3 files changed

+74
-22
lines changed

3 files changed

+74
-22
lines changed

plugin/sycl/tree/hist_updater.cc

Lines changed: 8 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -112,13 +112,12 @@ void HistUpdater<GradientSumT>::BuildLocalHistograms(
112112
template<typename GradientSumT>
113113
void HistUpdater<GradientSumT>::BuildNodeStats(
114114
const common::GHistIndexMatrix &gmat,
115-
DMatrix *p_fmat,
116115
RegTree *p_tree,
117116
const USMVector<GradientPair, MemoryType::on_device> &gpair) {
118117
builder_monitor_.Start("BuildNodeStats");
119118
for (auto const& entry : qexpand_depth_wise_) {
120119
int nid = entry.nid;
121-
this->InitNewNode(nid, gmat, gpair, *p_fmat, *p_tree);
120+
this->InitNewNode(nid, gmat, gpair, *p_tree);
122121
// add constraints
123122
if (!(*p_tree)[nid].IsLeftChild() && !(*p_tree)[nid].IsRoot()) {
124123
// it's a right child
@@ -232,7 +231,6 @@ void HistUpdater<GradientSumT>::SplitSiblings(
232231
template<typename GradientSumT>
233232
void HistUpdater<GradientSumT>::ExpandWithDepthWise(
234233
const common::GHistIndexMatrix &gmat,
235-
DMatrix *p_fmat,
236234
RegTree *p_tree,
237235
const USMVector<GradientPair, MemoryType::on_device> &gpair) {
238236
int num_leaves = 0;
@@ -249,7 +247,7 @@ void HistUpdater<GradientSumT>::ExpandWithDepthWise(
249247
hist_rows_adder_->AddHistRows(this, &sync_ids, p_tree);
250248
BuildLocalHistograms(gmat, p_tree, gpair);
251249
hist_synchronizer_->SyncHistograms(this, sync_ids, p_tree);
252-
BuildNodeStats(gmat, p_fmat, p_tree, gpair);
250+
BuildNodeStats(gmat, p_tree, gpair);
253251

254252
EvaluateAndApplySplits(gmat, p_tree, &num_leaves, depth,
255253
&temp_qexpand_depth);
@@ -270,7 +268,6 @@ void HistUpdater<GradientSumT>::ExpandWithDepthWise(
270268
template<typename GradientSumT>
271269
void HistUpdater<GradientSumT>::ExpandWithLossGuide(
272270
const common::GHistIndexMatrix& gmat,
273-
DMatrix* p_fmat,
274271
RegTree* p_tree,
275272
const USMVector<GradientPair, MemoryType::on_device> &gpair) {
276273
builder_monitor_.Start("ExpandWithLossGuide");
@@ -280,10 +277,10 @@ void HistUpdater<GradientSumT>::ExpandWithLossGuide(
280277
ExpandEntry node(ExpandEntry::kRootNid, p_tree->GetDepth(ExpandEntry::kRootNid));
281278
BuildHistogramsLossGuide(node, gmat, p_tree, gpair);
282279

283-
this->InitNewNode(ExpandEntry::kRootNid, gmat, gpair, *p_fmat, *p_tree);
284-
280+
this->InitNewNode(ExpandEntry::kRootNid, gmat, gpair, *p_tree);
285281
this->EvaluateSplits({node}, gmat, *p_tree);
286282
node.split.loss_chg = snode_host_[ExpandEntry::kRootNid].best.loss_chg;
283+
// LOG(FATAL) << node.split.loss_chg;
287284

288285
qexpand_loss_guided_->push(node);
289286
++num_leaves;
@@ -305,9 +302,7 @@ void HistUpdater<GradientSumT>::ExpandWithLossGuide(
305302
e.best.DefaultLeft(), e.weight, left_leaf_weight,
306303
right_leaf_weight, e.best.loss_chg, e.stats.GetHess(),
307304
e.best.left_sum.GetHess(), e.best.right_sum.GetHess());
308-
309305
this->ApplySplit({candidate}, gmat, p_tree);
310-
311306
const int cleft = (*p_tree)[nid].LeftChild();
312307
const int cright = (*p_tree)[nid].RightChild();
313308

@@ -320,8 +315,8 @@ void HistUpdater<GradientSumT>::ExpandWithLossGuide(
320315
BuildHistogramsLossGuide(right_node, gmat, p_tree, gpair);
321316
}
322317

323-
this->InitNewNode(cleft, gmat, gpair, *p_fmat, *p_tree);
324-
this->InitNewNode(cright, gmat, gpair, *p_fmat, *p_tree);
318+
this->InitNewNode(cleft, gmat, gpair, *p_tree);
319+
this->InitNewNode(cright, gmat, gpair, *p_tree);
325320
bst_uint featureid = snode_host_[nid].best.SplitIndex();
326321
tree_evaluator_.AddSplit(nid, cleft, cright, featureid,
327322
snode_host_[cleft].weight, snode_host_[cright].weight);
@@ -356,9 +351,9 @@ void HistUpdater<GradientSumT>::Update(
356351

357352
this->InitData(gmat, gpair_device, *p_fmat, *p_tree);
358353
if (param_.grow_policy == xgboost::tree::TrainParam::kLossGuide) {
359-
ExpandWithLossGuide(gmat, p_fmat, p_tree, gpair_device);
354+
ExpandWithLossGuide(gmat, p_tree, gpair_device);
360355
} else {
361-
ExpandWithDepthWise(gmat, p_fmat, p_tree, gpair_device);
356+
ExpandWithDepthWise(gmat, p_tree, gpair_device);
362357
}
363358

364359
for (int nid = 0; nid < p_tree->NumNodes(); ++nid) {
@@ -842,7 +837,6 @@ void HistUpdater<GradientSumT>::InitNewNode(int nid,
842837
const common::GHistIndexMatrix& gmat,
843838
const USMVector<GradientPair,
844839
MemoryType::on_device> &gpair,
845-
const DMatrix& fmat,
846840
const RegTree& tree) {
847841
builder_monitor_.Start("InitNewNode");
848842

plugin/sycl/tree/hist_updater.h

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -142,11 +142,9 @@ class HistUpdater {
142142
void InitNewNode(int nid,
143143
const common::GHistIndexMatrix& gmat,
144144
const USMVector<GradientPair, MemoryType::on_device> &gpair,
145-
const DMatrix& fmat,
146145
const RegTree& tree);
147146

148147
void ExpandWithDepthWise(const common::GHistIndexMatrix &gmat,
149-
DMatrix *p_fmat,
150148
RegTree *p_tree,
151149
const USMVector<GradientPair, MemoryType::on_device> &gpair);
152150

@@ -169,7 +167,6 @@ class HistUpdater {
169167
RegTree *p_tree);
170168

171169
void BuildNodeStats(const common::GHistIndexMatrix &gmat,
172-
DMatrix *p_fmat,
173170
RegTree *p_tree,
174171
const USMVector<GradientPair, MemoryType::on_device> &gpair);
175172

@@ -188,7 +185,6 @@ class HistUpdater {
188185
std::vector<ExpandEntry>* temp_qexpand_depth);
189186

190187
void ExpandWithLossGuide(const common::GHistIndexMatrix& gmat,
191-
DMatrix* p_fmat,
192188
RegTree* p_tree,
193189
const USMVector<GradientPair, MemoryType::on_device>& gpair);
194190

tests/cpp/plugin/test_sycl_hist_updater.cc

Lines changed: 66 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -49,9 +49,8 @@ class TestHistUpdater : public HistUpdater<GradientSumT> {
4949
auto TestInitNewNode(int nid,
5050
const common::GHistIndexMatrix& gmat,
5151
const USMVector<GradientPair, MemoryType::on_device> &gpair,
52-
const DMatrix& fmat,
5352
const RegTree& tree) {
54-
HistUpdater<GradientSumT>::InitNewNode(nid, gmat, gpair, fmat, tree);
53+
HistUpdater<GradientSumT>::InitNewNode(nid, gmat, gpair, tree);
5554
return HistUpdater<GradientSumT>::snode_host_[nid];
5655
}
5756

@@ -67,6 +66,13 @@ class TestHistUpdater : public HistUpdater<GradientSumT> {
6766
RegTree* p_tree) {
6867
HistUpdater<GradientSumT>::ApplySplit(nodes, gmat, p_tree);
6968
}
69+
70+
auto TestExpandWithLossGuide(const common::GHistIndexMatrix& gmat,
71+
DMatrix *p_fmat,
72+
RegTree* p_tree,
73+
const USMVector<GradientPair, MemoryType::on_device> &gpair) {
74+
HistUpdater<GradientSumT>::ExpandWithLossGuide(gmat, p_tree, gpair);
75+
}
7076
};
7177

7278
void GenerateRandomGPairs(::sycl::queue* qu, GradientPair* gpair_ptr, size_t num_rows, bool has_neg_hess) {
@@ -295,7 +301,7 @@ void TestHistUpdaterInitNewNode(const xgboost::tree::TrainParam& param, float sp
295301
auto& row_idxs = row_set_collection->Data();
296302
const size_t* row_idxs_ptr = row_idxs.DataConst();
297303
updater.TestBuildHistogramsLossGuide(node, gmat, &tree, gpair);
298-
const auto snode = updater.TestInitNewNode(ExpandEntry::kRootNid, gmat, gpair, *p_fmat, tree);
304+
const auto snode = updater.TestInitNewNode(ExpandEntry::kRootNid, gmat, gpair, tree);
299305

300306
GradStats<GradientSumT> grad_stat;
301307
{
@@ -354,7 +360,7 @@ void TestHistUpdaterEvaluateSplits(const xgboost::tree::TrainParam& param) {
354360
auto& row_idxs = row_set_collection->Data();
355361
const size_t* row_idxs_ptr = row_idxs.DataConst();
356362
const auto* hist = updater.TestBuildHistogramsLossGuide(node, gmat, &tree, gpair);
357-
const auto snode_init = updater.TestInitNewNode(ExpandEntry::kRootNid, gmat, gpair, *p_fmat, tree);
363+
const auto snode_init = updater.TestInitNewNode(ExpandEntry::kRootNid, gmat, gpair, tree);
358364

359365
const auto snode_updated = updater.TestEvaluateSplits({node}, gmat, tree);
360366
auto best_loss_chg = snode_updated[0].best.loss_chg;
@@ -479,6 +485,53 @@ void TestHistUpdaterApplySplit(const xgboost::tree::TrainParam& param, float spa
479485

480486
}
481487

488+
template <typename GradientSumT>
489+
void TestHistUpdaterExpandWithLossGuide(const xgboost::tree::TrainParam& param) {
490+
const size_t num_rows = 3;
491+
const size_t num_columns = 1;
492+
const size_t n_bins = 16;
493+
494+
Context ctx;
495+
ctx.UpdateAllowUnknown(Args{{"device", "sycl"}});
496+
497+
DeviceManager device_manager;
498+
auto qu = device_manager.GetQueue(ctx.Device());
499+
500+
std::vector<float> data = {7, 3, 15};
501+
auto p_fmat = GetDMatrixFromData(data, num_rows, num_columns);
502+
503+
DeviceMatrix dmat;
504+
dmat.Init(qu, p_fmat.get());
505+
common::GHistIndexMatrix gmat;
506+
gmat.Init(qu, &ctx, dmat, n_bins);
507+
508+
std::vector<GradientPair> gpair_host = {{1, 2}, {3, 1}, {1, 1}};
509+
USMVector<GradientPair, MemoryType::on_device> gpair(&qu, gpair_host);
510+
511+
RegTree tree;
512+
FeatureInteractionConstraintHost int_constraints;
513+
TestHistUpdater<GradientSumT> updater(&ctx, qu, param, int_constraints, p_fmat.get());
514+
updater.SetHistSynchronizer(new BatchHistSynchronizer<GradientSumT>());
515+
updater.SetHistRowsAdder(new BatchHistRowsAdder<GradientSumT>());
516+
auto* row_set_collection = updater.TestInitData(gmat, gpair, *p_fmat, tree);
517+
518+
updater.TestExpandWithLossGuide(gmat, p_fmat.get(), &tree, gpair);
519+
520+
const auto& nodes = tree.GetNodes();
521+
std::vector<float> ans(data.size());
522+
for (size_t data_idx = 0; data_idx < data.size(); ++data_idx) {
523+
size_t node_idx = 0;
524+
while (!nodes[node_idx].IsLeaf()) {
525+
node_idx = data[data_idx] < nodes[node_idx].SplitCond() ? nodes[node_idx].LeftChild() : nodes[node_idx].RightChild();
526+
}
527+
ans[data_idx] = nodes[node_idx].LeafValue();
528+
}
529+
530+
ASSERT_NEAR(ans[0], -0.15, 1e-6);
531+
ASSERT_NEAR(ans[1], -0.45, 1e-6);
532+
ASSERT_NEAR(ans[2], -0.15, 1e-6);
533+
}
534+
482535
TEST(SyclHistUpdater, Sampling) {
483536
xgboost::tree::TrainParam param;
484537
param.UpdateAllowUnknown(Args{{"subsample", "0.7"}});
@@ -546,4 +599,13 @@ TEST(SyclHistUpdater, ApplySplitDence) {
546599
TestHistUpdaterApplySplit<double>(param, 0.0, (1u << 16) + 1);
547600
}
548601

602+
TEST(SyclHistUpdater, ExpandWithLossGuide) {
603+
xgboost::tree::TrainParam param;
604+
param.UpdateAllowUnknown(Args{{"max_depth", "2"},
605+
{"grow_policy", "lossguide"}});
606+
607+
TestHistUpdaterExpandWithLossGuide<float>(param);
608+
TestHistUpdaterExpandWithLossGuide<double>(param);
609+
}
610+
549611
} // namespace xgboost::sycl::tree

0 commit comments

Comments
 (0)