Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Better HybridSmoother #1996

Merged
merged 18 commits into from
Jan 25, 2025
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
59 changes: 29 additions & 30 deletions gtsam/hybrid/HybridSmoother.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -24,17 +24,14 @@
namespace gtsam {

/* ************************************************************************* */
Ordering HybridSmoother::getOrdering(
const HybridGaussianFactorGraph &newFactors) {
HybridGaussianFactorGraph factors(hybridBayesNet());
factors.push_back(newFactors);

Ordering HybridSmoother::getOrdering(const HybridGaussianFactorGraph &factors,
const KeySet &newFactorKeys) {
// Get all the discrete keys from the factors
KeySet allDiscrete = factors.discreteKeySet();

// Create KeyVector with continuous keys followed by discrete keys.
KeyVector newKeysDiscreteLast;
const KeySet newFactorKeys = newFactors.keys();

// Insert continuous keys first.
for (auto &k : newFactorKeys) {
if (!allDiscrete.exists(k)) {
Expand All @@ -56,23 +53,29 @@ Ordering HybridSmoother::getOrdering(
}

/* ************************************************************************* */
void HybridSmoother::update(HybridGaussianFactorGraph graph,
void HybridSmoother::update(const HybridGaussianFactorGraph &graph,
std::optional<size_t> maxNrLeaves,
const std::optional<Ordering> given_ordering) {
HybridGaussianFactorGraph updatedGraph;
// Add the necessary conditionals from the previous timestep(s).
std::tie(updatedGraph, hybridBayesNet_) =
addConditionals(graph, hybridBayesNet_);

Ordering ordering;
// If no ordering provided, then we compute one
if (!given_ordering.has_value()) {
ordering = this->getOrdering(graph);
// Get the keys from the new factors
const KeySet newFactorKeys = graph.keys();

// Since updatedGraph now has all the connected conditionals,
// we can get the correct ordering.
ordering = this->getOrdering(updatedGraph, newFactorKeys);
} else {
ordering = *given_ordering;
}

// Add the necessary conditionals from the previous timestep(s).
std::tie(graph, hybridBayesNet_) =
addConditionals(graph, hybridBayesNet_, ordering);

// Eliminate.
HybridBayesNet bayesNetFragment = *graph.eliminateSequential(ordering);
HybridBayesNet bayesNetFragment = *updatedGraph.eliminateSequential(ordering);

/// Prune
if (maxNrLeaves) {
Expand All @@ -88,21 +91,18 @@ void HybridSmoother::update(HybridGaussianFactorGraph graph,
/* ************************************************************************* */
std::pair<HybridGaussianFactorGraph, HybridBayesNet>
HybridSmoother::addConditionals(const HybridGaussianFactorGraph &originalGraph,
const HybridBayesNet &originalHybridBayesNet,
const Ordering &ordering) const {
const HybridBayesNet &hybridBayesNet) const {
HybridGaussianFactorGraph graph(originalGraph);
HybridBayesNet hybridBayesNet(originalHybridBayesNet);
HybridBayesNet updatedHybridBayesNet(hybridBayesNet);

KeySet factorKeys = graph.keys();

// If hybridBayesNet is not empty,
// it means we have conditionals to add to the factor graph.
if (!hybridBayesNet.empty()) {
// We add all relevant hybrid conditionals on the last continuous variable
// in the previous `hybridBayesNet` to the graph

// Conditionals to remove from the bayes net
// since the conditional will be updated.
std::vector<HybridConditional::shared_ptr> conditionals_to_erase;

// New conditionals to add to the graph
gtsam::HybridBayesNet newConditionals;

Expand All @@ -112,25 +112,24 @@ HybridSmoother::addConditionals(const HybridGaussianFactorGraph &originalGraph,
auto conditional = hybridBayesNet.at(i);

for (auto &key : conditional->frontals()) {
if (std::find(ordering.begin(), ordering.end(), key) !=
ordering.end()) {
if (std::find(factorKeys.begin(), factorKeys.end(), key) !=
factorKeys.end()) {
newConditionals.push_back(conditional);
conditionals_to_erase.push_back(conditional);

// Remove the conditional from the updated Bayes net
auto it = find(updatedHybridBayesNet.begin(),
updatedHybridBayesNet.end(), conditional);
updatedHybridBayesNet.erase(it);

break;
}
}
}
// Remove conditionals at the end so we don't affect the order in the
// original bayes net.
for (auto &&conditional : conditionals_to_erase) {
auto it = find(hybridBayesNet.begin(), hybridBayesNet.end(), conditional);
hybridBayesNet.erase(it);
}

graph.push_back(newConditionals);
}
return {graph, hybridBayesNet};

return {graph, updatedHybridBayesNet};
}

/* ************************************************************************* */
Expand Down
30 changes: 27 additions & 3 deletions gtsam/hybrid/HybridSmoother.h
Original file line number Diff line number Diff line change
Expand Up @@ -49,11 +49,35 @@ class GTSAM_EXPORT HybridSmoother {
* @param given_ordering The (optional) ordering for elimination, only
* continuous variables are allowed
*/
void update(HybridGaussianFactorGraph graph,
void update(const HybridGaussianFactorGraph& graph,
std::optional<size_t> maxNrLeaves = {},
const std::optional<Ordering> given_ordering = {});

Ordering getOrdering(const HybridGaussianFactorGraph& newFactors);
/**
* @brief Get an elimination ordering which eliminates continuous and then
* discrete.
*
* Expects `newFactors` to already have the necessary conditionals connected
* to the
*
* @param factors
* @return Ordering
*/

/**
* @brief Get an elimination ordering which eliminates continuous
* and then discrete.
*
* Expects `factors` to already have the necessary conditionals
* which were connected to the variables in the newly added factors.
* Those variables should be in `newFactorKeys`.
*
* @param factors All the new factors and connected conditionals.
* @param newFactorKeys The keys/variables in the newly added factors.
* @return Ordering
*/
Ordering getOrdering(const HybridGaussianFactorGraph& factors,
const KeySet& newFactorKeys);

/**
* @brief Add conditionals from previous timestep as part of liquefication.
Expand All @@ -66,7 +90,7 @@ class GTSAM_EXPORT HybridSmoother {
*/
std::pair<HybridGaussianFactorGraph, HybridBayesNet> addConditionals(
const HybridGaussianFactorGraph& graph,
const HybridBayesNet& hybridBayesNet, const Ordering& ordering) const;
const HybridBayesNet& hybridBayesNet) const;

/**
* @brief Get the hybrid Gaussian conditional from
Expand Down
10 changes: 4 additions & 6 deletions gtsam/hybrid/tests/testHybridSmoother.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -95,16 +95,15 @@ TEST(HybridSmoother, IncrementalSmoother) {
initial.insert(X(k), switching.linearizationPoint.at<double>(X(k)));

HybridGaussianFactorGraph linearized = *graph.linearize(initial);
Ordering ordering = smoother.getOrdering(linearized);

smoother.update(linearized, maxNrLeaves, ordering);
smoother.update(linearized, maxNrLeaves);

// Clear all the factors from the graph
graph.resize(0);
}

EXPECT_LONGS_EQUAL(11,
smoother.hybridBayesNet().at(0)->asDiscrete()->nrValues());
smoother.hybridBayesNet().at(3)->asDiscrete()->nrValues());

// Get the continuous delta update as well as
// the optimal discrete assignment.
Expand Down Expand Up @@ -150,16 +149,15 @@ TEST(HybridSmoother, ValidPruningError) {
initial.insert(X(k), switching.linearizationPoint.at<double>(X(k)));

HybridGaussianFactorGraph linearized = *graph.linearize(initial);
Ordering ordering = smoother.getOrdering(linearized);

smoother.update(linearized, maxNrLeaves, ordering);
smoother.update(linearized, maxNrLeaves);

// Clear all the factors from the graph
graph.resize(0);
}

EXPECT_LONGS_EQUAL(14,
smoother.hybridBayesNet().at(0)->asDiscrete()->nrValues());
smoother.hybridBayesNet().at(6)->asDiscrete()->nrValues());

// Get the continuous delta update as well as
// the optimal discrete assignment.
Expand Down
Loading