Skip to content

Commit

Permalink
[issue1091] Change landmark cost partitioning option from bool to enum.
Browse files Browse the repository at this point in the history
Users of the landmark cost partitioning heuristic must specify which cost
partitioning strategy to use. There are two strategies available right now,
namely uniform or optimal cost partitioning. Previously, the command line option
to choose between them was a boolean "optimal=(false|true)". This changed to an
enum option "cost_partitioning=(uniform|optimal)" to prepare for more choices to
be implemented in the future.
  • Loading branch information
ClemensBuechner authored Aug 31, 2023
1 parent 2f0ef82 commit aff6522
Show file tree
Hide file tree
Showing 2 changed files with 25 additions and 8 deletions.
27 changes: 19 additions & 8 deletions src/search/landmarks/landmark_cost_partitioning_heuristic.cc
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@ using namespace std;
namespace landmarks {
LandmarkCostPartitioningHeuristic::LandmarkCostPartitioningHeuristic(
const plugins::Options &opts)
: LandmarkHeuristic(opts) {
: LandmarkHeuristic(opts),
cost_partitioning_strategy(opts.get<CostPartitioningStrategy>("cost_partitioning")) {
if (log.is_at_least_normal()) {
log << "Initializing landmark cost partitioning heuristic..." << endl;
}
Expand Down Expand Up @@ -46,16 +47,18 @@ void LandmarkCostPartitioningHeuristic::check_unsupported_features(

void LandmarkCostPartitioningHeuristic::set_cost_assignment(
const plugins::Options &opts) {
if (opts.get<bool>("optimal")) {
if (cost_partitioning_strategy == CostPartitioningStrategy::OPTIMAL) {
lm_cost_assignment =
utils::make_unique_ptr<LandmarkEfficientOptimalSharedCostAssignment>(
task_properties::get_operator_costs(task_proxy),
*lm_graph, opts.get<lp::LPSolverType>("lpsolver"));
} else {
} else if (cost_partitioning_strategy == CostPartitioningStrategy::UNIFORM) {
lm_cost_assignment =
utils::make_unique_ptr<LandmarkUniformSharedCostAssignment>(
task_properties::get_operator_costs(task_proxy),
*lm_graph, opts.get<bool>("alm"));
} else {
ABORT("Unknown cost partitioning strategy");
}
}

Expand Down Expand Up @@ -104,10 +107,10 @@ class LandmarkCostPartitioningHeuristicFeature : public plugins::TypedFeature<Ev
"2010"));

LandmarkHeuristic::add_options_to_feature(*this);
add_option<bool>(
"optimal",
"use optimal (LP-based) cost sharing",
"false");
add_option<CostPartitioningStrategy>(
"cost_partitioning",
"strategy for partitioning operator costs among landmarks",
"uniform");
add_option<bool>("alm", "use action landmarks", "true");
lp::add_lp_solver_option_to_feature(*this);

Expand All @@ -127,7 +130,7 @@ class LandmarkCostPartitioningHeuristicFeature : public plugins::TypedFeature<Ev
"which point the above inequality might not hold anymore.");
document_note(
"Optimal Cost Partitioning",
"To use ``optimal=true``, you must build the planner with LP "
"To use ``cost_partitioning=optimal``, you must build the planner with LP "
"support. See LPBuildInstructions.");
document_note(
"Preferred operators",
Expand All @@ -151,4 +154,12 @@ class LandmarkCostPartitioningHeuristicFeature : public plugins::TypedFeature<Ev
};

static plugins::FeaturePlugin<LandmarkCostPartitioningHeuristicFeature> _plugin;

static plugins::TypedEnumPlugin<CostPartitioningStrategy> _enum_plugin({
{"optimal",
"use optimal (LP-based) cost partitioning"},
{"uniform",
"partition operator costs uniformly among all landmarks "
"achieved by that operator"},
});
}
6 changes: 6 additions & 0 deletions src/search/landmarks/landmark_cost_partitioning_heuristic.h
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,13 @@
namespace landmarks {
class LandmarkCostAssignment;

enum class CostPartitioningStrategy {
OPTIMAL,
UNIFORM,
};

class LandmarkCostPartitioningHeuristic : public LandmarkHeuristic {
const CostPartitioningStrategy cost_partitioning_strategy;
std::unique_ptr<LandmarkCostAssignment> lm_cost_assignment;

void check_unsupported_features(const plugins::Options &opts);
Expand Down

0 comments on commit aff6522

Please sign in to comment.