From a2a348be8a947805321d2d59ebd182679796cc58 Mon Sep 17 00:00:00 2001 From: Dmitry Razdoburdin Date: Thu, 12 Dec 2024 22:15:34 +0100 Subject: [PATCH] SYCL. Add more test for sycl-objectives (#11064) --- src/common/ranking_utils.h | 6 +- src/objective/lambdarank_obj.cc | 7 +- tests/cpp/objective/test_aft_obj.cc | 26 ++- tests/cpp/objective/test_aft_obj.cu | 2 +- tests/cpp/objective/test_aft_obj.h | 23 +++ tests/cpp/objective/test_aft_obj_cpu.cc | 41 +++++ tests/cpp/objective/test_lambdarank_obj.cc | 8 +- tests/cpp/objective/test_objective.cc | 28 ++- tests/cpp/objective/test_regression_obj.cc | 161 ++++++++++++++++++ tests/cpp/objective/test_regression_obj.h | 14 ++ .../cpp/objective/test_regression_obj_cpu.cc | 147 +--------------- tests/cpp/plugin/test_sycl_aft_obj.cc | 46 +++++ tests/cpp/plugin/test_sycl_lambdarank_obj.cc | 45 +++++ tests/cpp/plugin/test_sycl_regression_obj.cc | 42 +++++ 14 files changed, 416 insertions(+), 180 deletions(-) create mode 100644 tests/cpp/objective/test_aft_obj.h create mode 100644 tests/cpp/objective/test_aft_obj_cpu.cc create mode 100644 tests/cpp/plugin/test_sycl_aft_obj.cc create mode 100644 tests/cpp/plugin/test_sycl_lambdarank_obj.cc diff --git a/src/common/ranking_utils.h b/src/common/ranking_utils.h index acba0feeb2a4..784dafc8e172 100644 --- a/src/common/ranking_utils.h +++ b/src/common/ranking_utils.h @@ -320,7 +320,9 @@ class NDCGCache : public RankingCache { } linalg::VectorView InvIDCG(Context const* ctx) const { - return inv_idcg_.View(ctx->Device()); + // This function doesn't have sycl-specific implementation yet. + // For that reason we transfer data to host in case of sycl is used for propper execution. + return inv_idcg_.View(ctx->Device().IsSycl() ? DeviceOrd::CPU() : ctx->Device()); } common::Span Discount(Context const* ctx) const { return ctx->IsCUDA() ? discounts_.ConstDeviceSpan() : discounts_.ConstHostSpan(); @@ -330,7 +332,7 @@ class NDCGCache : public RankingCache { dcg_.SetDevice(ctx->Device()); dcg_.Reshape(this->Groups()); } - return dcg_.View(ctx->Device()); + return dcg_.View(ctx->Device().IsSycl() ? DeviceOrd::CPU() : ctx->Device()); } }; diff --git a/src/objective/lambdarank_obj.cc b/src/objective/lambdarank_obj.cc index c50a55b3a17c..4a47de9bd46f 100644 --- a/src/objective/lambdarank_obj.cc +++ b/src/objective/lambdarank_obj.cc @@ -112,8 +112,11 @@ class LambdaRankObj : public FitIntercept { lj_full_.View(ctx_->Device()), &ti_plus_, &tj_minus_, &li_, &lj_, p_cache_); } else { - cpu_impl::LambdaRankUpdatePositionBias(ctx_, li_full_.View(ctx_->Device()), - lj_full_.View(ctx_->Device()), &ti_plus_, &tj_minus_, + // This function doesn't have sycl-specific implementation yet. + // For that reason we transfer data to host in case of sycl is used for propper execution. + auto device = ctx_->Device().IsSycl() ? DeviceOrd::CPU() : ctx_->Device(); + cpu_impl::LambdaRankUpdatePositionBias(ctx_, li_full_.View(device), + lj_full_.View(device), &ti_plus_, &tj_minus_, &li_, &lj_, p_cache_); } diff --git a/tests/cpp/objective/test_aft_obj.cc b/tests/cpp/objective/test_aft_obj.cc index f31debb21af9..cd031b6bcdf5 100644 --- a/tests/cpp/objective/test_aft_obj.cc +++ b/tests/cpp/objective/test_aft_obj.cc @@ -10,11 +10,11 @@ #include "xgboost/objective.h" #include "xgboost/logging.h" #include "../helpers.h" +#include "test_aft_obj.h" namespace xgboost::common { -TEST(Objective, DeclareUnifiedTest(AFTObjConfiguration)) { - auto ctx = MakeCUDACtx(GPUIDX); - std::unique_ptr objective(ObjFunction::Create("survival:aft", &ctx)); +void TestAFTObjConfiguration(const Context* ctx) { + std::unique_ptr objective(ObjFunction::Create("survival:aft", ctx)); objective->Configure({ {"aft_loss_distribution", "logistic"}, {"aft_loss_distribution_scale", "5"} }); @@ -73,9 +73,8 @@ static inline void CheckGPairOverGridPoints( } } -TEST(Objective, DeclareUnifiedTest(AFTObjGPairUncensoredLabels)) { - auto ctx = MakeCUDACtx(GPUIDX); - std::unique_ptr obj(ObjFunction::Create("survival:aft", &ctx)); +void TestAFTObjGPairUncensoredLabels(const Context* ctx) { + std::unique_ptr obj(ObjFunction::Create("survival:aft", ctx)); CheckGPairOverGridPoints(obj.get(), 100.0f, 100.0f, "normal", { -3.9120f, -3.4013f, -2.8905f, -2.3798f, -1.8691f, -1.3583f, -0.8476f, -0.3368f, 0.1739f, @@ -97,9 +96,8 @@ TEST(Objective, DeclareUnifiedTest(AFTObjGPairUncensoredLabels)) { 0.3026f, 0.1816f, 0.1090f, 0.0654f, 0.0392f, 0.0235f, 0.0141f, 0.0085f, 0.0051f, 0.0031f }); } -TEST(Objective, DeclareUnifiedTest(AFTObjGPairLeftCensoredLabels)) { - auto ctx = MakeCUDACtx(GPUIDX); - std::unique_ptr obj(ObjFunction::Create("survival:aft", &ctx)); +void TestAFTObjGPairLeftCensoredLabels(const Context* ctx) { + std::unique_ptr obj(ObjFunction::Create("survival:aft", ctx)); CheckGPairOverGridPoints(obj.get(), 0.0f, 20.0f, "normal", { 0.0285f, 0.0832f, 0.1951f, 0.3804f, 0.6403f, 0.9643f, 1.3379f, 1.7475f, 2.1828f, 2.6361f, @@ -118,9 +116,8 @@ TEST(Objective, DeclareUnifiedTest(AFTObjGPairLeftCensoredLabels)) { 0.0296f, 0.0179f, 0.0108f, 0.0065f, 0.0039f, 0.0024f, 0.0014f, 0.0008f, 0.0005f, 0.0003f }); } -TEST(Objective, DeclareUnifiedTest(AFTObjGPairRightCensoredLabels)) { - auto ctx = MakeCUDACtx(GPUIDX); - std::unique_ptr obj(ObjFunction::Create("survival:aft", &ctx)); +void TestAFTObjGPairRightCensoredLabels(const Context* ctx) { + std::unique_ptr obj(ObjFunction::Create("survival:aft", ctx)); CheckGPairOverGridPoints(obj.get(), 60.0f, std::numeric_limits::infinity(), "normal", { -3.6583f, -3.1815f, -2.7135f, -2.2577f, -1.8190f, -1.4044f, -1.0239f, -0.6905f, -0.4190f, @@ -142,9 +139,8 @@ TEST(Objective, DeclareUnifiedTest(AFTObjGPairRightCensoredLabels)) { 0.1816f, 0.1089f, 0.0654f, 0.0392f, 0.0235f, 0.0141f, 0.0085f, 0.0051f, 0.0031f, 0.0018f }); } -TEST(Objective, DeclareUnifiedTest(AFTObjGPairIntervalCensoredLabels)) { - auto ctx = MakeCUDACtx(GPUIDX); - std::unique_ptr obj(ObjFunction::Create("survival:aft", &ctx)); +void TestAFTObjGPairIntervalCensoredLabels(const Context* ctx) { + std::unique_ptr obj(ObjFunction::Create("survival:aft", ctx)); CheckGPairOverGridPoints(obj.get(), 16.0f, 200.0f, "normal", { -2.4435f, -1.9965f, -1.5691f, -1.1679f, -0.7990f, -0.4649f, -0.1596f, 0.1336f, 0.4370f, diff --git a/tests/cpp/objective/test_aft_obj.cu b/tests/cpp/objective/test_aft_obj.cu index 3da6bc94b9f7..134d1a6b7e5e 100644 --- a/tests/cpp/objective/test_aft_obj.cu +++ b/tests/cpp/objective/test_aft_obj.cu @@ -3,4 +3,4 @@ */ // Dummy file to keep the CUDA tests. -#include "test_aft_obj.cc" +#include "test_aft_obj_cpu.cc" diff --git a/tests/cpp/objective/test_aft_obj.h b/tests/cpp/objective/test_aft_obj.h new file mode 100644 index 000000000000..3f959dc6eb52 --- /dev/null +++ b/tests/cpp/objective/test_aft_obj.h @@ -0,0 +1,23 @@ +/** + * Copyright 2020-2024 by XGBoost Contributors + */ +#ifndef XGBOOST_TEST_AFT_OBJ_H_ +#define XGBOOST_TEST_AFT_OBJ_H_ + +#include // for Context + +namespace xgboost::common { + +void TestAFTObjConfiguration(const Context* ctx); + +void TestAFTObjGPairUncensoredLabels(const Context* ctx); + +void TestAFTObjGPairLeftCensoredLabels(const Context* ctx); + +void TestAFTObjGPairRightCensoredLabels(const Context* ctx); + +void TestAFTObjGPairIntervalCensoredLabels(const Context* ctx); + +} // namespace xgboost::common + +#endif // XGBOOST_TEST_AFT_OBJ_H_ diff --git a/tests/cpp/objective/test_aft_obj_cpu.cc b/tests/cpp/objective/test_aft_obj_cpu.cc new file mode 100644 index 000000000000..f457e4f97f3b --- /dev/null +++ b/tests/cpp/objective/test_aft_obj_cpu.cc @@ -0,0 +1,41 @@ +/** + * Copyright 2020-2024, XGBoost Contributors + */ +#include +#include +#include +#include +#include + +#include "xgboost/objective.h" +#include "xgboost/logging.h" +#include "../helpers.h" +#include "test_aft_obj.h" + +namespace xgboost::common { +TEST(Objective, DeclareUnifiedTest(AFTObjConfiguration)) { + auto ctx = MakeCUDACtx(GPUIDX); + TestAFTObjConfiguration(&ctx); +} + +TEST(Objective, DeclareUnifiedTest(AFTObjGPairUncensoredLabels)) { + auto ctx = MakeCUDACtx(GPUIDX); + TestAFTObjGPairUncensoredLabels(&ctx); +} + +TEST(Objective, DeclareUnifiedTest(AFTObjGPairLeftCensoredLabels)) { + auto ctx = MakeCUDACtx(GPUIDX); + TestAFTObjGPairLeftCensoredLabels(&ctx); +} + +TEST(Objective, DeclareUnifiedTest(AFTObjGPairRightCensoredLabels)) { + auto ctx = MakeCUDACtx(GPUIDX); + TestAFTObjGPairRightCensoredLabels(&ctx); +} + +TEST(Objective, DeclareUnifiedTest(AFTObjGPairIntervalCensoredLabels)) { + auto ctx = MakeCUDACtx(GPUIDX); + TestAFTObjGPairIntervalCensoredLabels(&ctx); +} + +} // namespace xgboost::common diff --git a/tests/cpp/objective/test_lambdarank_obj.cc b/tests/cpp/objective/test_lambdarank_obj.cc index a9249fc284c4..82441ea310ec 100644 --- a/tests/cpp/objective/test_lambdarank_obj.cc +++ b/tests/cpp/objective/test_lambdarank_obj.cc @@ -246,9 +246,9 @@ void TestMAPStat(Context const* ctx) { predt.SetDevice(ctx->Device()); auto rank_idx = - p_cache->SortedIdx(ctx, ctx->IsCPU() ? predt.ConstHostSpan() : predt.ConstDeviceSpan()); + p_cache->SortedIdx(ctx, !ctx->IsCUDA() ? predt.ConstHostSpan() : predt.ConstDeviceSpan()); - if (ctx->IsCPU()) { + if (!ctx->IsCUDA()) { obj::cpu_impl::MAPStat(ctx, info.labels.HostView().Slice(linalg::All(), 0), rank_idx, p_cache); } else { @@ -283,9 +283,9 @@ void TestMAPStat(Context const* ctx) { predt.SetDevice(ctx->Device()); auto rank_idx = - p_cache->SortedIdx(ctx, ctx->IsCPU() ? predt.ConstHostSpan() : predt.ConstDeviceSpan()); + p_cache->SortedIdx(ctx, !ctx->IsCUDA() ? predt.ConstHostSpan() : predt.ConstDeviceSpan()); - if (ctx->IsCPU()) { + if (!ctx->IsCUDA()) { obj::cpu_impl::MAPStat(ctx, info.labels.HostView().Slice(linalg::All(), 0), rank_idx, p_cache); } else { diff --git a/tests/cpp/objective/test_objective.cc b/tests/cpp/objective/test_objective.cc index ab58013230f6..efdd03612a0f 100644 --- a/tests/cpp/objective/test_objective.cc +++ b/tests/cpp/objective/test_objective.cc @@ -29,23 +29,19 @@ TEST(Objective, PredTransform) { size_t n = 100; for (const auto& entry : ::dmlc::Registry<::xgboost::ObjFunctionReg>::List()) { - // SYCL implementations are skipped for this test - const std::string sycl_postfix = "sycl"; - if ((entry->name.size() >= sycl_postfix.size()) && !std::equal(sycl_postfix.rbegin(), sycl_postfix.rend(), entry->name.rbegin())) { - std::unique_ptr obj{xgboost::ObjFunction::Create(entry->name, &tparam)}; - if (entry->name.find("multi") != std::string::npos) { - obj->Configure(Args{{"num_class", "2"}}); - } - if (entry->name.find("quantile") != std::string::npos) { - obj->Configure(Args{{"quantile_alpha", "0.5"}}); - } - HostDeviceVector predts; - predts.Resize(n, 3.14f); // prediction is performed on host. - ASSERT_FALSE(predts.DeviceCanRead()); - obj->PredTransform(&predts); - ASSERT_FALSE(predts.DeviceCanRead()); - ASSERT_TRUE(predts.HostCanWrite()); + std::unique_ptr obj{xgboost::ObjFunction::Create(entry->name, &tparam)}; + if (entry->name.find("multi") != std::string::npos) { + obj->Configure(Args{{"num_class", "2"}}); } + if (entry->name.find("quantile") != std::string::npos) { + obj->Configure(Args{{"quantile_alpha", "0.5"}}); + } + HostDeviceVector predts; + predts.Resize(n, 3.14f); // prediction is performed on host. + ASSERT_FALSE(predts.DeviceCanRead()); + obj->PredTransform(&predts); + ASSERT_FALSE(predts.DeviceCanRead()); + ASSERT_TRUE(predts.HostCanWrite()); } } diff --git a/tests/cpp/objective/test_regression_obj.cc b/tests/cpp/objective/test_regression_obj.cc index f9d6c67004ed..2cb57a066391 100644 --- a/tests/cpp/objective/test_regression_obj.cc +++ b/tests/cpp/objective/test_regression_obj.cc @@ -124,6 +124,167 @@ void TestsLogisticRawGPair(const Context* ctx) { {0.25f, 0.24f, 0.20f, 0.19f, 0.25f, 0.24f, 0.20f, 0.19f}); } +void TestPoissonRegressionGPair(const Context* ctx) { + std::vector> args; + std::unique_ptr obj { + ObjFunction::Create("count:poisson", ctx) + }; + + args.emplace_back("max_delta_step", "0.1f"); + obj->Configure(args); + + CheckObjFunction(obj, + { 0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1}, + { 0, 0, 0, 0, 1, 1, 1, 1}, + { 1, 1, 1, 1, 1, 1, 1, 1}, + { 1, 1.10f, 2.45f, 2.71f, 0, 0.10f, 1.45f, 1.71f}, + {1.10f, 1.22f, 2.71f, 3.00f, 1.10f, 1.22f, 2.71f, 3.00f}); + CheckObjFunction(obj, + { 0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1}, + { 0, 0, 0, 0, 1, 1, 1, 1}, + {}, // Empty weight + { 1, 1.10f, 2.45f, 2.71f, 0, 0.10f, 1.45f, 1.71f}, + {1.10f, 1.22f, 2.71f, 3.00f, 1.10f, 1.22f, 2.71f, 3.00f}); +} + +void TestPoissonRegressionBasic(const Context* ctx) { + std::vector> args; + std::unique_ptr obj { + ObjFunction::Create("count:poisson", ctx) + }; + + obj->Configure(args); + CheckConfigReload(obj, "count:poisson"); + + // test label validation + EXPECT_ANY_THROW(CheckObjFunction(obj, {0}, {-1}, {1}, {0}, {0})) + << "Expected error when label < 0 for PoissonRegression"; + + // test ProbToMargin + EXPECT_NEAR(obj->ProbToMargin(0.1f), -2.30f, 0.01f); + EXPECT_NEAR(obj->ProbToMargin(0.5f), -0.69f, 0.01f); + EXPECT_NEAR(obj->ProbToMargin(0.9f), -0.10f, 0.01f); + + // test PredTransform + HostDeviceVector io_preds = {0, 0.1f, 0.5f, 0.9f, 1}; + std::vector out_preds = {1, 1.10f, 1.64f, 2.45f, 2.71f}; + obj->PredTransform(&io_preds); + auto& preds = io_preds.HostVector(); + for (int i = 0; i < static_cast(io_preds.Size()); ++i) { + EXPECT_NEAR(preds[i], out_preds[i], 0.01f); + } +} + +void TestGammaRegressionGPair(const Context* ctx) { + std::vector> args; + std::unique_ptr obj { + ObjFunction::Create("reg:gamma", ctx) + }; + + obj->Configure(args); + CheckObjFunction(obj, + {0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1}, + {2, 2, 2, 2, 1, 1, 1, 1}, + {1, 1, 1, 1, 1, 1, 1, 1}, + {-1, -0.809, 0.187, 0.264, 0, 0.09f, 0.59f, 0.63f}, + {2, 1.809, 0.813, 0.735, 1, 0.90f, 0.40f, 0.36f}); + CheckObjFunction(obj, + {0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1}, + {2, 2, 2, 2, 1, 1, 1, 1}, + {}, // Empty weight + {-1, -0.809, 0.187, 0.264, 0, 0.09f, 0.59f, 0.63f}, + {2, 1.809, 0.813, 0.735, 1, 0.90f, 0.40f, 0.36f}); +} + +void TestGammaRegressionBasic(const Context* ctx) { + std::vector> args; + std::unique_ptr obj{ObjFunction::Create("reg:gamma", ctx)}; + + obj->Configure(args); + CheckConfigReload(obj, "reg:gamma"); + + // test label validation + EXPECT_ANY_THROW(CheckObjFunction(obj, {0}, {0}, {1}, {0}, {0})) + << "Expected error when label = 0 for GammaRegression"; + EXPECT_ANY_THROW(CheckObjFunction(obj, {-1}, {-1}, {1}, {-1}, {-3})) + << "Expected error when label < 0 for GammaRegression"; + + // test ProbToMargin + EXPECT_NEAR(obj->ProbToMargin(0.1f), -2.30f, 0.01f); + EXPECT_NEAR(obj->ProbToMargin(0.5f), -0.69f, 0.01f); + EXPECT_NEAR(obj->ProbToMargin(0.9f), -0.10f, 0.01f); + + // test PredTransform + HostDeviceVector io_preds = {0, 0.1f, 0.5f, 0.9f, 1}; + std::vector out_preds = {1, 1.10f, 1.64f, 2.45f, 2.71f}; + obj->PredTransform(&io_preds); + auto& preds = io_preds.HostVector(); + for (int i = 0; i < static_cast(io_preds.Size()); ++i) { + EXPECT_NEAR(preds[i], out_preds[i], 0.01f); + } +} + +void TestTweedieRegressionGPair(const Context* ctx) { + std::vector> args; + std::unique_ptr obj{ObjFunction::Create("reg:tweedie", ctx)}; + + args.emplace_back("tweedie_variance_power", "1.1f"); + obj->Configure(args); + + CheckObjFunction(obj, + { 0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1}, + { 0, 0, 0, 0, 1, 1, 1, 1}, + { 1, 1, 1, 1, 1, 1, 1, 1}, + { 1, 1.09f, 2.24f, 2.45f, 0, 0.10f, 1.33f, 1.55f}, + {0.89f, 0.98f, 2.02f, 2.21f, 1, 1.08f, 2.11f, 2.30f}); + CheckObjFunction(obj, + { 0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1}, + { 0, 0, 0, 0, 1, 1, 1, 1}, + {}, // Empty weight. + { 1, 1.09f, 2.24f, 2.45f, 0, 0.10f, 1.33f, 1.55f}, + {0.89f, 0.98f, 2.02f, 2.21f, 1, 1.08f, 2.11f, 2.30f}); + ASSERT_EQ(obj->DefaultEvalMetric(), std::string{"tweedie-nloglik@1.1"}); +} + +void TestTweedieRegressionBasic(const Context* ctx) { + std::vector> args; + std::unique_ptr obj{ObjFunction::Create("reg:tweedie", ctx)}; + + obj->Configure(args); + CheckConfigReload(obj, "reg:tweedie"); + + // test label validation + EXPECT_ANY_THROW(CheckObjFunction(obj, {0}, {-1}, {1}, {0}, {0})) + << "Expected error when label < 0 for TweedieRegression"; + + // test ProbToMargin + EXPECT_NEAR(obj->ProbToMargin(0.1f), -2.30f, 0.01f); + EXPECT_NEAR(obj->ProbToMargin(0.5f), -0.69f, 0.01f); + EXPECT_NEAR(obj->ProbToMargin(0.9f), -0.10f, 0.01f); + + // test PredTransform + HostDeviceVector io_preds = {0, 0.1f, 0.5f, 0.9f, 1}; + std::vector out_preds = {1, 1.10f, 1.64f, 2.45f, 2.71f}; + obj->PredTransform(&io_preds); + auto& preds = io_preds.HostVector(); + for (int i = 0; i < static_cast(io_preds.Size()); ++i) { + EXPECT_NEAR(preds[i], out_preds[i], 0.01f); + } +} + +void TestCoxRegressionGPair(const Context* ctx) { + std::vector> args; + std::unique_ptr obj{ObjFunction::Create("survival:cox", ctx)}; + + obj->Configure(args); + CheckObjFunction(obj, + { 0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1}, + { 0, -2, -2, 2, 3, 5, -10, 100}, + { 1, 1, 1, 1, 1, 1, 1, 1}, + { 0, 0, 0, -0.799f, -0.788f, -0.590f, 0.910f, 1.006f}, + { 0, 0, 0, 0.160f, 0.186f, 0.348f, 0.610f, 0.639f}); +} + void TestAbsoluteError(const Context* ctx) { std::unique_ptr obj{ObjFunction::Create("reg:absoluteerror", ctx)}; obj->Configure({}); diff --git a/tests/cpp/objective/test_regression_obj.h b/tests/cpp/objective/test_regression_obj.h index 1fdb0dc65dba..521a9a318465 100644 --- a/tests/cpp/objective/test_regression_obj.h +++ b/tests/cpp/objective/test_regression_obj.h @@ -18,6 +18,20 @@ void TestLogisticRegressionBasic(const Context* ctx); void TestsLogisticRawGPair(const Context* ctx); +void TestPoissonRegressionGPair(const Context* ctx); + +void TestPoissonRegressionBasic(const Context* ctx); + +void TestGammaRegressionGPair(const Context* ctx); + +void TestGammaRegressionBasic(const Context* ctx); + +void TestTweedieRegressionGPair(const Context* ctx); + +void TestTweedieRegressionBasic(const Context* ctx); + +void TestCoxRegressionGPair(const Context* ctx); + void TestAbsoluteError(const Context* ctx); void TestAbsoluteErrorLeaf(const Context* ctx); diff --git a/tests/cpp/objective/test_regression_obj_cpu.cc b/tests/cpp/objective/test_regression_obj_cpu.cc index 5fc94f897e9d..4d8e83251714 100644 --- a/tests/cpp/objective/test_regression_obj_cpu.cc +++ b/tests/cpp/objective/test_regression_obj_cpu.cc @@ -43,129 +43,27 @@ TEST(Objective, DeclareUnifiedTest(LogisticRawGPair)) { TEST(Objective, DeclareUnifiedTest(PoissonRegressionGPair)) { Context ctx = MakeCUDACtx(GPUIDX); - std::vector> args; - std::unique_ptr obj { - ObjFunction::Create("count:poisson", &ctx) - }; - - args.emplace_back("max_delta_step", "0.1f"); - obj->Configure(args); - - CheckObjFunction(obj, - { 0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1}, - { 0, 0, 0, 0, 1, 1, 1, 1}, - { 1, 1, 1, 1, 1, 1, 1, 1}, - { 1, 1.10f, 2.45f, 2.71f, 0, 0.10f, 1.45f, 1.71f}, - {1.10f, 1.22f, 2.71f, 3.00f, 1.10f, 1.22f, 2.71f, 3.00f}); - CheckObjFunction(obj, - { 0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1}, - { 0, 0, 0, 0, 1, 1, 1, 1}, - {}, // Empty weight - { 1, 1.10f, 2.45f, 2.71f, 0, 0.10f, 1.45f, 1.71f}, - {1.10f, 1.22f, 2.71f, 3.00f, 1.10f, 1.22f, 2.71f, 3.00f}); + TestPoissonRegressionGPair(&ctx); } TEST(Objective, DeclareUnifiedTest(PoissonRegressionBasic)) { Context ctx = MakeCUDACtx(GPUIDX); - std::vector> args; - std::unique_ptr obj { - ObjFunction::Create("count:poisson", &ctx) - }; - - obj->Configure(args); - CheckConfigReload(obj, "count:poisson"); - - // test label validation - EXPECT_ANY_THROW(CheckObjFunction(obj, {0}, {-1}, {1}, {0}, {0})) - << "Expected error when label < 0 for PoissonRegression"; - - // test ProbToMargin - EXPECT_NEAR(obj->ProbToMargin(0.1f), -2.30f, 0.01f); - EXPECT_NEAR(obj->ProbToMargin(0.5f), -0.69f, 0.01f); - EXPECT_NEAR(obj->ProbToMargin(0.9f), -0.10f, 0.01f); - - // test PredTransform - HostDeviceVector io_preds = {0, 0.1f, 0.5f, 0.9f, 1}; - std::vector out_preds = {1, 1.10f, 1.64f, 2.45f, 2.71f}; - obj->PredTransform(&io_preds); - auto& preds = io_preds.HostVector(); - for (int i = 0; i < static_cast(io_preds.Size()); ++i) { - EXPECT_NEAR(preds[i], out_preds[i], 0.01f); - } + TestPoissonRegressionBasic(&ctx); } TEST(Objective, DeclareUnifiedTest(GammaRegressionGPair)) { Context ctx = MakeCUDACtx(GPUIDX); - std::vector> args; - std::unique_ptr obj { - ObjFunction::Create("reg:gamma", &ctx) - }; - - obj->Configure(args); - CheckObjFunction(obj, - {0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1}, - {2, 2, 2, 2, 1, 1, 1, 1}, - {1, 1, 1, 1, 1, 1, 1, 1}, - {-1, -0.809, 0.187, 0.264, 0, 0.09f, 0.59f, 0.63f}, - {2, 1.809, 0.813, 0.735, 1, 0.90f, 0.40f, 0.36f}); - CheckObjFunction(obj, - {0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1}, - {2, 2, 2, 2, 1, 1, 1, 1}, - {}, // Empty weight - {-1, -0.809, 0.187, 0.264, 0, 0.09f, 0.59f, 0.63f}, - {2, 1.809, 0.813, 0.735, 1, 0.90f, 0.40f, 0.36f}); + TestGammaRegressionGPair(&ctx); } TEST(Objective, DeclareUnifiedTest(GammaRegressionBasic)) { Context ctx = MakeCUDACtx(GPUIDX); - std::vector> args; - std::unique_ptr obj{ObjFunction::Create("reg:gamma", &ctx)}; - - obj->Configure(args); - CheckConfigReload(obj, "reg:gamma"); - - // test label validation - EXPECT_ANY_THROW(CheckObjFunction(obj, {0}, {0}, {1}, {0}, {0})) - << "Expected error when label = 0 for GammaRegression"; - EXPECT_ANY_THROW(CheckObjFunction(obj, {-1}, {-1}, {1}, {-1}, {-3})) - << "Expected error when label < 0 for GammaRegression"; - - // test ProbToMargin - EXPECT_NEAR(obj->ProbToMargin(0.1f), -2.30f, 0.01f); - EXPECT_NEAR(obj->ProbToMargin(0.5f), -0.69f, 0.01f); - EXPECT_NEAR(obj->ProbToMargin(0.9f), -0.10f, 0.01f); - - // test PredTransform - HostDeviceVector io_preds = {0, 0.1f, 0.5f, 0.9f, 1}; - std::vector out_preds = {1, 1.10f, 1.64f, 2.45f, 2.71f}; - obj->PredTransform(&io_preds); - auto& preds = io_preds.HostVector(); - for (int i = 0; i < static_cast(io_preds.Size()); ++i) { - EXPECT_NEAR(preds[i], out_preds[i], 0.01f); - } + TestGammaRegressionBasic(&ctx); } TEST(Objective, DeclareUnifiedTest(TweedieRegressionGPair)) { Context ctx = MakeCUDACtx(GPUIDX); - std::vector> args; - std::unique_ptr obj{ObjFunction::Create("reg:tweedie", &ctx)}; - - args.emplace_back("tweedie_variance_power", "1.1f"); - obj->Configure(args); - - CheckObjFunction(obj, - { 0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1}, - { 0, 0, 0, 0, 1, 1, 1, 1}, - { 1, 1, 1, 1, 1, 1, 1, 1}, - { 1, 1.09f, 2.24f, 2.45f, 0, 0.10f, 1.33f, 1.55f}, - {0.89f, 0.98f, 2.02f, 2.21f, 1, 1.08f, 2.11f, 2.30f}); - CheckObjFunction(obj, - { 0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1}, - { 0, 0, 0, 0, 1, 1, 1, 1}, - {}, // Empty weight. - { 1, 1.09f, 2.24f, 2.45f, 0, 0.10f, 1.33f, 1.55f}, - {0.89f, 0.98f, 2.02f, 2.21f, 1, 1.08f, 2.11f, 2.30f}); - ASSERT_EQ(obj->DefaultEvalMetric(), std::string{"tweedie-nloglik@1.1"}); + TestTweedieRegressionGPair(&ctx); } #if defined(__CUDACC__) @@ -220,45 +118,14 @@ TEST(Objective, CPU_vs_CUDA) { TEST(Objective, DeclareUnifiedTest(TweedieRegressionBasic)) { Context ctx = MakeCUDACtx(GPUIDX); - std::vector> args; - std::unique_ptr obj{ObjFunction::Create("reg:tweedie", &ctx)}; - - obj->Configure(args); - CheckConfigReload(obj, "reg:tweedie"); - - // test label validation - EXPECT_ANY_THROW(CheckObjFunction(obj, {0}, {-1}, {1}, {0}, {0})) - << "Expected error when label < 0 for TweedieRegression"; - - // test ProbToMargin - EXPECT_NEAR(obj->ProbToMargin(0.1f), -2.30f, 0.01f); - EXPECT_NEAR(obj->ProbToMargin(0.5f), -0.69f, 0.01f); - EXPECT_NEAR(obj->ProbToMargin(0.9f), -0.10f, 0.01f); - - // test PredTransform - HostDeviceVector io_preds = {0, 0.1f, 0.5f, 0.9f, 1}; - std::vector out_preds = {1, 1.10f, 1.64f, 2.45f, 2.71f}; - obj->PredTransform(&io_preds); - auto& preds = io_preds.HostVector(); - for (int i = 0; i < static_cast(io_preds.Size()); ++i) { - EXPECT_NEAR(preds[i], out_preds[i], 0.01f); - } + TestTweedieRegressionBasic(&ctx); } // CoxRegression not implemented in GPU code, no need for testing. #if !defined(__CUDACC__) TEST(Objective, CoxRegressionGPair) { Context ctx = MakeCUDACtx(GPUIDX); - std::vector> args; - std::unique_ptr obj{ObjFunction::Create("survival:cox", &ctx)}; - - obj->Configure(args); - CheckObjFunction(obj, - { 0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1}, - { 0, -2, -2, 2, 3, 5, -10, 100}, - { 1, 1, 1, 1, 1, 1, 1, 1}, - { 0, 0, 0, -0.799f, -0.788f, -0.590f, 0.910f, 1.006f}, - { 0, 0, 0, 0.160f, 0.186f, 0.348f, 0.610f, 0.639f}); + TestCoxRegressionGPair(&ctx); } #endif diff --git a/tests/cpp/plugin/test_sycl_aft_obj.cc b/tests/cpp/plugin/test_sycl_aft_obj.cc new file mode 100644 index 000000000000..275b4e69fb82 --- /dev/null +++ b/tests/cpp/plugin/test_sycl_aft_obj.cc @@ -0,0 +1,46 @@ +/** + * Copyright 2024 by XGBoost contributors + */ +#include +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wtautological-constant-compare" +#pragma GCC diagnostic ignored "-W#pragma-messages" +#include +#pragma GCC diagnostic pop +#include + +#include "../helpers.h" +#include "../objective/test_aft_obj.h" + +namespace xgboost::common { +TEST(SyclObjective, DeclareUnifiedTest(AFTObjConfiguration)) { + Context ctx; + ctx.UpdateAllowUnknown(Args{{"device", "sycl"}}); + TestAFTObjConfiguration(&ctx); +} + +TEST(SyclObjective, DeclareUnifiedTest(AFTObjGPairUncensoredLabels)) { + Context ctx; + ctx.UpdateAllowUnknown(Args{{"device", "sycl"}}); + TestAFTObjGPairUncensoredLabels(&ctx); +} + +TEST(SyclObjective, DeclareUnifiedTest(AFTObjGPairLeftCensoredLabels)) { + Context ctx; + ctx.UpdateAllowUnknown(Args{{"device", "sycl"}}); + TestAFTObjGPairLeftCensoredLabels(&ctx); +} + +TEST(SyclObjective, DeclareUnifiedTest(AFTObjGPairRightCensoredLabels)) { + Context ctx; + ctx.UpdateAllowUnknown(Args{{"device", "sycl"}}); + TestAFTObjGPairRightCensoredLabels(&ctx); +} + +TEST(SyclObjective, DeclareUnifiedTest(AFTObjGPairIntervalCensoredLabels)) { + Context ctx; + ctx.UpdateAllowUnknown(Args{{"device", "sycl"}}); + TestAFTObjGPairIntervalCensoredLabels(&ctx); +} + +} // namespace xgboost::common diff --git a/tests/cpp/plugin/test_sycl_lambdarank_obj.cc b/tests/cpp/plugin/test_sycl_lambdarank_obj.cc new file mode 100644 index 000000000000..2129d8b3e4c3 --- /dev/null +++ b/tests/cpp/plugin/test_sycl_lambdarank_obj.cc @@ -0,0 +1,45 @@ +/** + * Copyright 2024 by XGBoost Contributors + */ +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-W#pragma-messages" +#include "../objective/test_lambdarank_obj.h" +#pragma GCC diagnostic pop + +#include + +#include "xgboost/context.h" + +namespace xgboost::obj { +TEST(SyclObjective, LambdaRankNDCGJsonIO) { + Context ctx; + ctx.UpdateAllowUnknown(Args{{"device", "sycl"}}); + TestNDCGJsonIO(&ctx); +} + +TEST(SyclObjective, LambdaRankTestNDCGGPair) { + Context ctx; + ctx.UpdateAllowUnknown(Args{{"device", "sycl"}}); + TestNDCGGPair(&ctx); +} + +TEST(SyclObjective, LambdaRankUnbiasedNDCG) { + Context ctx; + ctx.UpdateAllowUnknown(Args{{"device", "sycl"}}); + TestUnbiasedNDCG(&ctx); +} + +TEST(SyclObjective, LambdaRankMAPStat) { + Context ctx; + ctx.UpdateAllowUnknown(Args{{"device", "sycl"}}); + TestMAPStat(&ctx); +} + +TEST(SyclObjective, LambdaRankMAPGPair) { + Context ctx; + ctx.UpdateAllowUnknown(Args{{"device", "sycl"}}); + TestMAPGPair(&ctx); +} + +} // namespace xgboost::obj + diff --git a/tests/cpp/plugin/test_sycl_regression_obj.cc b/tests/cpp/plugin/test_sycl_regression_obj.cc index 56f842989c2d..d80fc0fb03f9 100644 --- a/tests/cpp/plugin/test_sycl_regression_obj.cc +++ b/tests/cpp/plugin/test_sycl_regression_obj.cc @@ -45,6 +45,48 @@ TEST(SyclObjective, LogisticRawGPair) { TestsLogisticRawGPair(&ctx); } +TEST(SyclObjective, PoissonRegressionGPair) { + Context ctx; + ctx.UpdateAllowUnknown(Args{{"device", "sycl"}}); + TestPoissonRegressionGPair(&ctx); +} + +TEST(SyclObjective, PoissonRegressionBasic) { + Context ctx; + ctx.UpdateAllowUnknown(Args{{"device", "sycl"}}); + TestPoissonRegressionBasic(&ctx); +} + +TEST(SyclObjective, GammaRegressionGPair) { + Context ctx; + ctx.UpdateAllowUnknown(Args{{"device", "sycl"}}); + TestGammaRegressionGPair(&ctx); +} + +TEST(SyclObjective, GammaRegressionBasic) { + Context ctx; + ctx.UpdateAllowUnknown(Args{{"device", "sycl"}}); + TestGammaRegressionBasic(&ctx); +} + +TEST(SyclObjective, TweedieRegressionGPair) { + Context ctx; + ctx.UpdateAllowUnknown(Args{{"device", "sycl"}}); + TestTweedieRegressionGPair(&ctx); +} + +TEST(SyclObjective, TweedieRegressionBasic) { + Context ctx; + ctx.UpdateAllowUnknown(Args{{"device", "sycl"}}); + TestTweedieRegressionBasic(&ctx); +} + +TEST(SyclObjective, CoxRegressionGPair) { + Context ctx; + ctx.UpdateAllowUnknown(Args{{"device", "sycl"}}); + TestCoxRegressionGPair(&ctx); +} + TEST(SyclObjective, AbsoluteError) { Context ctx; ctx.UpdateAllowUnknown(Args{{"device", "sycl"}});