Skip to content

Commit

Permalink
SYCL. Add more test for sycl-objectives (#11064)
Browse files Browse the repository at this point in the history
  • Loading branch information
razdoburdin authored Dec 12, 2024
1 parent 3162e0d commit a2a348b
Show file tree
Hide file tree
Showing 14 changed files with 416 additions and 180 deletions.
6 changes: 4 additions & 2 deletions src/common/ranking_utils.h
Original file line number Diff line number Diff line change
Expand Up @@ -320,7 +320,9 @@ class NDCGCache : public RankingCache {
}

linalg::VectorView<double const> InvIDCG(Context const* ctx) const {
return inv_idcg_.View(ctx->Device());
// This function doesn't have sycl-specific implementation yet.
// For that reason we transfer data to host in case of sycl is used for propper execution.
return inv_idcg_.View(ctx->Device().IsSycl() ? DeviceOrd::CPU() : ctx->Device());
}
common::Span<double const> Discount(Context const* ctx) const {
return ctx->IsCUDA() ? discounts_.ConstDeviceSpan() : discounts_.ConstHostSpan();
Expand All @@ -330,7 +332,7 @@ class NDCGCache : public RankingCache {
dcg_.SetDevice(ctx->Device());
dcg_.Reshape(this->Groups());
}
return dcg_.View(ctx->Device());
return dcg_.View(ctx->Device().IsSycl() ? DeviceOrd::CPU() : ctx->Device());
}
};

Expand Down
7 changes: 5 additions & 2 deletions src/objective/lambdarank_obj.cc
Original file line number Diff line number Diff line change
Expand Up @@ -112,8 +112,11 @@ class LambdaRankObj : public FitIntercept {
lj_full_.View(ctx_->Device()), &ti_plus_, &tj_minus_,
&li_, &lj_, p_cache_);
} else {
cpu_impl::LambdaRankUpdatePositionBias(ctx_, li_full_.View(ctx_->Device()),
lj_full_.View(ctx_->Device()), &ti_plus_, &tj_minus_,
// This function doesn't have sycl-specific implementation yet.
// For that reason we transfer data to host in case of sycl is used for propper execution.
auto device = ctx_->Device().IsSycl() ? DeviceOrd::CPU() : ctx_->Device();
cpu_impl::LambdaRankUpdatePositionBias(ctx_, li_full_.View(device),
lj_full_.View(device), &ti_plus_, &tj_minus_,
&li_, &lj_, p_cache_);
}

Expand Down
26 changes: 11 additions & 15 deletions tests/cpp/objective/test_aft_obj.cc
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,11 @@
#include "xgboost/objective.h"
#include "xgboost/logging.h"
#include "../helpers.h"
#include "test_aft_obj.h"

namespace xgboost::common {
TEST(Objective, DeclareUnifiedTest(AFTObjConfiguration)) {
auto ctx = MakeCUDACtx(GPUIDX);
std::unique_ptr<ObjFunction> objective(ObjFunction::Create("survival:aft", &ctx));
void TestAFTObjConfiguration(const Context* ctx) {
std::unique_ptr<ObjFunction> objective(ObjFunction::Create("survival:aft", ctx));
objective->Configure({ {"aft_loss_distribution", "logistic"},
{"aft_loss_distribution_scale", "5"} });

Expand Down Expand Up @@ -73,9 +73,8 @@ static inline void CheckGPairOverGridPoints(
}
}

TEST(Objective, DeclareUnifiedTest(AFTObjGPairUncensoredLabels)) {
auto ctx = MakeCUDACtx(GPUIDX);
std::unique_ptr<ObjFunction> obj(ObjFunction::Create("survival:aft", &ctx));
void TestAFTObjGPairUncensoredLabels(const Context* ctx) {
std::unique_ptr<ObjFunction> obj(ObjFunction::Create("survival:aft", ctx));

CheckGPairOverGridPoints(obj.get(), 100.0f, 100.0f, "normal",
{ -3.9120f, -3.4013f, -2.8905f, -2.3798f, -1.8691f, -1.3583f, -0.8476f, -0.3368f, 0.1739f,
Expand All @@ -97,9 +96,8 @@ TEST(Objective, DeclareUnifiedTest(AFTObjGPairUncensoredLabels)) {
0.3026f, 0.1816f, 0.1090f, 0.0654f, 0.0392f, 0.0235f, 0.0141f, 0.0085f, 0.0051f, 0.0031f });
}

TEST(Objective, DeclareUnifiedTest(AFTObjGPairLeftCensoredLabels)) {
auto ctx = MakeCUDACtx(GPUIDX);
std::unique_ptr<ObjFunction> obj(ObjFunction::Create("survival:aft", &ctx));
void TestAFTObjGPairLeftCensoredLabels(const Context* ctx) {
std::unique_ptr<ObjFunction> obj(ObjFunction::Create("survival:aft", ctx));

CheckGPairOverGridPoints(obj.get(), 0.0f, 20.0f, "normal",
{ 0.0285f, 0.0832f, 0.1951f, 0.3804f, 0.6403f, 0.9643f, 1.3379f, 1.7475f, 2.1828f, 2.6361f,
Expand All @@ -118,9 +116,8 @@ TEST(Objective, DeclareUnifiedTest(AFTObjGPairLeftCensoredLabels)) {
0.0296f, 0.0179f, 0.0108f, 0.0065f, 0.0039f, 0.0024f, 0.0014f, 0.0008f, 0.0005f, 0.0003f });
}

TEST(Objective, DeclareUnifiedTest(AFTObjGPairRightCensoredLabels)) {
auto ctx = MakeCUDACtx(GPUIDX);
std::unique_ptr<ObjFunction> obj(ObjFunction::Create("survival:aft", &ctx));
void TestAFTObjGPairRightCensoredLabels(const Context* ctx) {
std::unique_ptr<ObjFunction> obj(ObjFunction::Create("survival:aft", ctx));

CheckGPairOverGridPoints(obj.get(), 60.0f, std::numeric_limits<float>::infinity(), "normal",
{ -3.6583f, -3.1815f, -2.7135f, -2.2577f, -1.8190f, -1.4044f, -1.0239f, -0.6905f, -0.4190f,
Expand All @@ -142,9 +139,8 @@ TEST(Objective, DeclareUnifiedTest(AFTObjGPairRightCensoredLabels)) {
0.1816f, 0.1089f, 0.0654f, 0.0392f, 0.0235f, 0.0141f, 0.0085f, 0.0051f, 0.0031f, 0.0018f });
}

TEST(Objective, DeclareUnifiedTest(AFTObjGPairIntervalCensoredLabels)) {
auto ctx = MakeCUDACtx(GPUIDX);
std::unique_ptr<ObjFunction> obj(ObjFunction::Create("survival:aft", &ctx));
void TestAFTObjGPairIntervalCensoredLabels(const Context* ctx) {
std::unique_ptr<ObjFunction> obj(ObjFunction::Create("survival:aft", ctx));

CheckGPairOverGridPoints(obj.get(), 16.0f, 200.0f, "normal",
{ -2.4435f, -1.9965f, -1.5691f, -1.1679f, -0.7990f, -0.4649f, -0.1596f, 0.1336f, 0.4370f,
Expand Down
2 changes: 1 addition & 1 deletion tests/cpp/objective/test_aft_obj.cu
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@
*/
// Dummy file to keep the CUDA tests.

#include "test_aft_obj.cc"
#include "test_aft_obj_cpu.cc"
23 changes: 23 additions & 0 deletions tests/cpp/objective/test_aft_obj.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
/**
* Copyright 2020-2024 by XGBoost Contributors
*/
#ifndef XGBOOST_TEST_AFT_OBJ_H_
#define XGBOOST_TEST_AFT_OBJ_H_

#include <xgboost/context.h> // for Context

namespace xgboost::common {

void TestAFTObjConfiguration(const Context* ctx);

void TestAFTObjGPairUncensoredLabels(const Context* ctx);

void TestAFTObjGPairLeftCensoredLabels(const Context* ctx);

void TestAFTObjGPairRightCensoredLabels(const Context* ctx);

void TestAFTObjGPairIntervalCensoredLabels(const Context* ctx);

} // namespace xgboost::common

#endif // XGBOOST_TEST_AFT_OBJ_H_
41 changes: 41 additions & 0 deletions tests/cpp/objective/test_aft_obj_cpu.cc
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
/**
* Copyright 2020-2024, XGBoost Contributors
*/
#include <gtest/gtest.h>
#include <memory>
#include <vector>
#include <limits>
#include <cmath>

#include "xgboost/objective.h"
#include "xgboost/logging.h"
#include "../helpers.h"
#include "test_aft_obj.h"

namespace xgboost::common {
TEST(Objective, DeclareUnifiedTest(AFTObjConfiguration)) {
auto ctx = MakeCUDACtx(GPUIDX);
TestAFTObjConfiguration(&ctx);
}

TEST(Objective, DeclareUnifiedTest(AFTObjGPairUncensoredLabels)) {
auto ctx = MakeCUDACtx(GPUIDX);
TestAFTObjGPairUncensoredLabels(&ctx);
}

TEST(Objective, DeclareUnifiedTest(AFTObjGPairLeftCensoredLabels)) {
auto ctx = MakeCUDACtx(GPUIDX);
TestAFTObjGPairLeftCensoredLabels(&ctx);
}

TEST(Objective, DeclareUnifiedTest(AFTObjGPairRightCensoredLabels)) {
auto ctx = MakeCUDACtx(GPUIDX);
TestAFTObjGPairRightCensoredLabels(&ctx);
}

TEST(Objective, DeclareUnifiedTest(AFTObjGPairIntervalCensoredLabels)) {
auto ctx = MakeCUDACtx(GPUIDX);
TestAFTObjGPairIntervalCensoredLabels(&ctx);
}

} // namespace xgboost::common
8 changes: 4 additions & 4 deletions tests/cpp/objective/test_lambdarank_obj.cc
Original file line number Diff line number Diff line change
Expand Up @@ -246,9 +246,9 @@ void TestMAPStat(Context const* ctx) {

predt.SetDevice(ctx->Device());
auto rank_idx =
p_cache->SortedIdx(ctx, ctx->IsCPU() ? predt.ConstHostSpan() : predt.ConstDeviceSpan());
p_cache->SortedIdx(ctx, !ctx->IsCUDA() ? predt.ConstHostSpan() : predt.ConstDeviceSpan());

if (ctx->IsCPU()) {
if (!ctx->IsCUDA()) {
obj::cpu_impl::MAPStat(ctx, info.labels.HostView().Slice(linalg::All(), 0), rank_idx,
p_cache);
} else {
Expand Down Expand Up @@ -283,9 +283,9 @@ void TestMAPStat(Context const* ctx) {

predt.SetDevice(ctx->Device());
auto rank_idx =
p_cache->SortedIdx(ctx, ctx->IsCPU() ? predt.ConstHostSpan() : predt.ConstDeviceSpan());
p_cache->SortedIdx(ctx, !ctx->IsCUDA() ? predt.ConstHostSpan() : predt.ConstDeviceSpan());

if (ctx->IsCPU()) {
if (!ctx->IsCUDA()) {
obj::cpu_impl::MAPStat(ctx, info.labels.HostView().Slice(linalg::All(), 0), rank_idx,
p_cache);
} else {
Expand Down
28 changes: 12 additions & 16 deletions tests/cpp/objective/test_objective.cc
Original file line number Diff line number Diff line change
Expand Up @@ -29,23 +29,19 @@ TEST(Objective, PredTransform) {
size_t n = 100;

for (const auto& entry : ::dmlc::Registry<::xgboost::ObjFunctionReg>::List()) {
// SYCL implementations are skipped for this test
const std::string sycl_postfix = "sycl";
if ((entry->name.size() >= sycl_postfix.size()) && !std::equal(sycl_postfix.rbegin(), sycl_postfix.rend(), entry->name.rbegin())) {
std::unique_ptr<xgboost::ObjFunction> obj{xgboost::ObjFunction::Create(entry->name, &tparam)};
if (entry->name.find("multi") != std::string::npos) {
obj->Configure(Args{{"num_class", "2"}});
}
if (entry->name.find("quantile") != std::string::npos) {
obj->Configure(Args{{"quantile_alpha", "0.5"}});
}
HostDeviceVector<float> predts;
predts.Resize(n, 3.14f); // prediction is performed on host.
ASSERT_FALSE(predts.DeviceCanRead());
obj->PredTransform(&predts);
ASSERT_FALSE(predts.DeviceCanRead());
ASSERT_TRUE(predts.HostCanWrite());
std::unique_ptr<xgboost::ObjFunction> obj{xgboost::ObjFunction::Create(entry->name, &tparam)};
if (entry->name.find("multi") != std::string::npos) {
obj->Configure(Args{{"num_class", "2"}});
}
if (entry->name.find("quantile") != std::string::npos) {
obj->Configure(Args{{"quantile_alpha", "0.5"}});
}
HostDeviceVector<float> predts;
predts.Resize(n, 3.14f); // prediction is performed on host.
ASSERT_FALSE(predts.DeviceCanRead());
obj->PredTransform(&predts);
ASSERT_FALSE(predts.DeviceCanRead());
ASSERT_TRUE(predts.HostCanWrite());
}
}

Expand Down
161 changes: 161 additions & 0 deletions tests/cpp/objective/test_regression_obj.cc
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,167 @@ void TestsLogisticRawGPair(const Context* ctx) {
{0.25f, 0.24f, 0.20f, 0.19f, 0.25f, 0.24f, 0.20f, 0.19f});
}

void TestPoissonRegressionGPair(const Context* ctx) {
std::vector<std::pair<std::string, std::string>> args;
std::unique_ptr<ObjFunction> obj {
ObjFunction::Create("count:poisson", ctx)
};

args.emplace_back("max_delta_step", "0.1f");
obj->Configure(args);

CheckObjFunction(obj,
{ 0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1},
{ 0, 0, 0, 0, 1, 1, 1, 1},
{ 1, 1, 1, 1, 1, 1, 1, 1},
{ 1, 1.10f, 2.45f, 2.71f, 0, 0.10f, 1.45f, 1.71f},
{1.10f, 1.22f, 2.71f, 3.00f, 1.10f, 1.22f, 2.71f, 3.00f});
CheckObjFunction(obj,
{ 0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1},
{ 0, 0, 0, 0, 1, 1, 1, 1},
{}, // Empty weight
{ 1, 1.10f, 2.45f, 2.71f, 0, 0.10f, 1.45f, 1.71f},
{1.10f, 1.22f, 2.71f, 3.00f, 1.10f, 1.22f, 2.71f, 3.00f});
}

void TestPoissonRegressionBasic(const Context* ctx) {
std::vector<std::pair<std::string, std::string>> args;
std::unique_ptr<ObjFunction> obj {
ObjFunction::Create("count:poisson", ctx)
};

obj->Configure(args);
CheckConfigReload(obj, "count:poisson");

// test label validation
EXPECT_ANY_THROW(CheckObjFunction(obj, {0}, {-1}, {1}, {0}, {0}))
<< "Expected error when label < 0 for PoissonRegression";

// test ProbToMargin
EXPECT_NEAR(obj->ProbToMargin(0.1f), -2.30f, 0.01f);
EXPECT_NEAR(obj->ProbToMargin(0.5f), -0.69f, 0.01f);
EXPECT_NEAR(obj->ProbToMargin(0.9f), -0.10f, 0.01f);

// test PredTransform
HostDeviceVector<bst_float> io_preds = {0, 0.1f, 0.5f, 0.9f, 1};
std::vector<bst_float> out_preds = {1, 1.10f, 1.64f, 2.45f, 2.71f};
obj->PredTransform(&io_preds);
auto& preds = io_preds.HostVector();
for (int i = 0; i < static_cast<int>(io_preds.Size()); ++i) {
EXPECT_NEAR(preds[i], out_preds[i], 0.01f);
}
}

void TestGammaRegressionGPair(const Context* ctx) {
std::vector<std::pair<std::string, std::string>> args;
std::unique_ptr<ObjFunction> obj {
ObjFunction::Create("reg:gamma", ctx)
};

obj->Configure(args);
CheckObjFunction(obj,
{0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1},
{2, 2, 2, 2, 1, 1, 1, 1},
{1, 1, 1, 1, 1, 1, 1, 1},
{-1, -0.809, 0.187, 0.264, 0, 0.09f, 0.59f, 0.63f},
{2, 1.809, 0.813, 0.735, 1, 0.90f, 0.40f, 0.36f});
CheckObjFunction(obj,
{0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1},
{2, 2, 2, 2, 1, 1, 1, 1},
{}, // Empty weight
{-1, -0.809, 0.187, 0.264, 0, 0.09f, 0.59f, 0.63f},
{2, 1.809, 0.813, 0.735, 1, 0.90f, 0.40f, 0.36f});
}

void TestGammaRegressionBasic(const Context* ctx) {
std::vector<std::pair<std::string, std::string>> args;
std::unique_ptr<ObjFunction> obj{ObjFunction::Create("reg:gamma", ctx)};

obj->Configure(args);
CheckConfigReload(obj, "reg:gamma");

// test label validation
EXPECT_ANY_THROW(CheckObjFunction(obj, {0}, {0}, {1}, {0}, {0}))
<< "Expected error when label = 0 for GammaRegression";
EXPECT_ANY_THROW(CheckObjFunction(obj, {-1}, {-1}, {1}, {-1}, {-3}))
<< "Expected error when label < 0 for GammaRegression";

// test ProbToMargin
EXPECT_NEAR(obj->ProbToMargin(0.1f), -2.30f, 0.01f);
EXPECT_NEAR(obj->ProbToMargin(0.5f), -0.69f, 0.01f);
EXPECT_NEAR(obj->ProbToMargin(0.9f), -0.10f, 0.01f);

// test PredTransform
HostDeviceVector<bst_float> io_preds = {0, 0.1f, 0.5f, 0.9f, 1};
std::vector<bst_float> out_preds = {1, 1.10f, 1.64f, 2.45f, 2.71f};
obj->PredTransform(&io_preds);
auto& preds = io_preds.HostVector();
for (int i = 0; i < static_cast<int>(io_preds.Size()); ++i) {
EXPECT_NEAR(preds[i], out_preds[i], 0.01f);
}
}

void TestTweedieRegressionGPair(const Context* ctx) {
std::vector<std::pair<std::string, std::string>> args;
std::unique_ptr<ObjFunction> obj{ObjFunction::Create("reg:tweedie", ctx)};

args.emplace_back("tweedie_variance_power", "1.1f");
obj->Configure(args);

CheckObjFunction(obj,
{ 0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1},
{ 0, 0, 0, 0, 1, 1, 1, 1},
{ 1, 1, 1, 1, 1, 1, 1, 1},
{ 1, 1.09f, 2.24f, 2.45f, 0, 0.10f, 1.33f, 1.55f},
{0.89f, 0.98f, 2.02f, 2.21f, 1, 1.08f, 2.11f, 2.30f});
CheckObjFunction(obj,
{ 0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1},
{ 0, 0, 0, 0, 1, 1, 1, 1},
{}, // Empty weight.
{ 1, 1.09f, 2.24f, 2.45f, 0, 0.10f, 1.33f, 1.55f},
{0.89f, 0.98f, 2.02f, 2.21f, 1, 1.08f, 2.11f, 2.30f});
ASSERT_EQ(obj->DefaultEvalMetric(), std::string{"[email protected]"});
}

void TestTweedieRegressionBasic(const Context* ctx) {
std::vector<std::pair<std::string, std::string>> args;
std::unique_ptr<ObjFunction> obj{ObjFunction::Create("reg:tweedie", ctx)};

obj->Configure(args);
CheckConfigReload(obj, "reg:tweedie");

// test label validation
EXPECT_ANY_THROW(CheckObjFunction(obj, {0}, {-1}, {1}, {0}, {0}))
<< "Expected error when label < 0 for TweedieRegression";

// test ProbToMargin
EXPECT_NEAR(obj->ProbToMargin(0.1f), -2.30f, 0.01f);
EXPECT_NEAR(obj->ProbToMargin(0.5f), -0.69f, 0.01f);
EXPECT_NEAR(obj->ProbToMargin(0.9f), -0.10f, 0.01f);

// test PredTransform
HostDeviceVector<bst_float> io_preds = {0, 0.1f, 0.5f, 0.9f, 1};
std::vector<bst_float> out_preds = {1, 1.10f, 1.64f, 2.45f, 2.71f};
obj->PredTransform(&io_preds);
auto& preds = io_preds.HostVector();
for (int i = 0; i < static_cast<int>(io_preds.Size()); ++i) {
EXPECT_NEAR(preds[i], out_preds[i], 0.01f);
}
}

void TestCoxRegressionGPair(const Context* ctx) {
std::vector<std::pair<std::string, std::string>> args;
std::unique_ptr<ObjFunction> obj{ObjFunction::Create("survival:cox", ctx)};

obj->Configure(args);
CheckObjFunction(obj,
{ 0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1},
{ 0, -2, -2, 2, 3, 5, -10, 100},
{ 1, 1, 1, 1, 1, 1, 1, 1},
{ 0, 0, 0, -0.799f, -0.788f, -0.590f, 0.910f, 1.006f},
{ 0, 0, 0, 0.160f, 0.186f, 0.348f, 0.610f, 0.639f});
}

void TestAbsoluteError(const Context* ctx) {
std::unique_ptr<ObjFunction> obj{ObjFunction::Create("reg:absoluteerror", ctx)};
obj->Configure({});
Expand Down
Loading

0 comments on commit a2a348b

Please sign in to comment.