Skip to content

Commit

Permalink
Merge pull request #1912 from borglab/profiling
Browse files Browse the repository at this point in the history
  • Loading branch information
varunagrawal authored Dec 2, 2024
2 parents bbdeaa4 + a9c75d8 commit bb5cfb2
Show file tree
Hide file tree
Showing 11 changed files with 122 additions and 58 deletions.
54 changes: 25 additions & 29 deletions cmake/FindGooglePerfTools.cmake
Original file line number Diff line number Diff line change
@@ -1,42 +1,38 @@
# -*- cmake -*-

# - Find Google perftools
# Find the Google perftools includes and libraries
# This module defines
# GOOGLE_PERFTOOLS_INCLUDE_DIR, where to find heap-profiler.h, etc.
# GOOGLE_PERFTOOLS_FOUND, If false, do not try to use Google perftools.
# also defined for general use are
# TCMALLOC_LIBRARY, where to find the tcmalloc library.

FIND_PATH(GOOGLE_PERFTOOLS_INCLUDE_DIR google/heap-profiler.h
/usr/local/include
/usr/include
)
# - Find GPerfTools (formerly Google perftools)
# Find the GPerfTools libraries
# If false, do not try to use Google perftools.
# Also defined for general use are
# - GPERFTOOLS_TCMALLOC: where to find the tcmalloc library
# - GPERFTOOLS_PROFILER: where to find the profiler library

SET(TCMALLOC_NAMES ${TCMALLOC_NAMES} tcmalloc)
FIND_LIBRARY(TCMALLOC_LIBRARY
find_library(GPERFTOOLS_TCMALLOC
NAMES ${TCMALLOC_NAMES}
PATHS /usr/lib /usr/local/lib
)
)
find_library(GPERFTOOLS_PROFILER
NAMES profiler
PATHS /usr/lib /usr/local/lib
)

IF (TCMALLOC_LIBRARY AND GOOGLE_PERFTOOLS_INCLUDE_DIR)
SET(TCMALLOC_LIBRARIES ${TCMALLOC_LIBRARY})
SET(GOOGLE_PERFTOOLS_FOUND "YES")
ELSE (TCMALLOC_LIBRARY AND GOOGLE_PERFTOOLS_INCLUDE_DIR)
SET(GOOGLE_PERFTOOLS_FOUND "NO")
ENDIF (TCMALLOC_LIBRARY AND GOOGLE_PERFTOOLS_INCLUDE_DIR)
IF (GPERFTOOLS_TCMALLOC AND GPERFTOOLS_PROFILER)
SET(TCMALLOC_LIBRARIES ${GPERFTOOLS_TCMALLOC})
SET(GPERFTOOLS_FOUND "YES")
ELSE (GPERFTOOLS_TCMALLOC AND GPERFTOOLS_PROFILER)
SET(GPERFTOOLS_FOUND "NO")
ENDIF (GPERFTOOLS_TCMALLOC AND GPERFTOOLS_PROFILER)

IF (GOOGLE_PERFTOOLS_FOUND)
IF (NOT GOOGLE_PERFTOOLS_FIND_QUIETLY)
MESSAGE(STATUS "Found Google perftools: ${GOOGLE_PERFTOOLS_LIBRARIES}")
ENDIF (NOT GOOGLE_PERFTOOLS_FIND_QUIETLY)
ELSE (GOOGLE_PERFTOOLS_FOUND)
IF (GPERFTOOLS_FOUND)
MESSAGE(STATUS "Found Gperftools: ${GPERFTOOLS_PROFILER}")
ELSE (GPERFTOOLS_FOUND)
IF (GOOGLE_PERFTOOLS_FIND_REQUIRED)
MESSAGE(FATAL_ERROR "Could not find Google perftools library")
ENDIF (GOOGLE_PERFTOOLS_FIND_REQUIRED)
ENDIF (GOOGLE_PERFTOOLS_FOUND)
ENDIF (GPERFTOOLS_FOUND)

MARK_AS_ADVANCED(
TCMALLOC_LIBRARY
GOOGLE_PERFTOOLS_INCLUDE_DIR
)
GPERFTOOLS_TCMALLOC
GPERFTOOLS_PROFILER
)
2 changes: 1 addition & 1 deletion cmake/HandleAllocators.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ else()
list(APPEND possible_allocators BoostPool STL)
set(preferred_allocator STL)
endif()
if(GOOGLE_PERFTOOLS_FOUND)
if(GPERFTOOLS_FOUND)
list(APPEND possible_allocators tcmalloc)
endif()

Expand Down
2 changes: 1 addition & 1 deletion cmake/HandlePerfTools.cmake
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@

###############################################################################
# Find Google perftools
find_package(GooglePerfTools)
find_package(GooglePerfTools)
4 changes: 4 additions & 0 deletions gtsam/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,10 @@ if (GTSAM_USE_EIGEN_MKL)
target_include_directories(gtsam PUBLIC ${MKL_INCLUDE_DIR})
endif()

if (GPERFTOOLS_FOUND)
target_link_libraries(gtsam PRIVATE ${GPERFTOOLS_TCMALLOC} ${GPERFTOOLS_PROFILER})
endif()

# Add includes for source directories 'BEFORE' boost and any system include
# paths so that the compiler uses GTSAM headers in our source directory instead
# of any previously installed GTSAM headers.
Expand Down
30 changes: 21 additions & 9 deletions gtsam/discrete/DecisionTreeFactor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -407,11 +407,9 @@ namespace gtsam {
};

/* ************************************************************************ */
DecisionTreeFactor DecisionTreeFactor::prune(size_t maxNrAssignments) const {
const size_t N = maxNrAssignments;

double DecisionTreeFactor::computeThreshold(const size_t N) const {
// Set of all keys
std::set<Key> allKeys(keys().begin(), keys().end());
std::set<Key> allKeys = this->labels();
MinHeap min_heap;

auto op = [&](const Assignment<Key>& a, double p) {
Expand All @@ -433,26 +431,40 @@ namespace gtsam {
nrAssignments *= cardinalities_.at(k);
}

// If min-heap is empty, fill it initially.
// This is because there is nothing at the top.
if (min_heap.empty()) {
min_heap.push(p, std::min(nrAssignments, N));

} else {
// If p is larger than the smallest element,
// then we insert into the max heap.
if (p > min_heap.top()) {
for (size_t i = 0; i < std::min(nrAssignments, N); ++i) {
for (size_t i = 0; i < std::min(nrAssignments, N); ++i) {
// If p is larger than the smallest element,
// then we insert into the min heap.
// We check against the top each time because the
// heap maintains the smallest element at the top.
if (p > min_heap.top()) {
if (min_heap.size() == N) {
min_heap.pop();
}
min_heap.push(p);
} else {
// p is <= min value so move to the next one
break;
}
}
}
return p;
};
this->visitWith(op);

double threshold = min_heap.top();
return min_heap.top();
}

/* ************************************************************************ */
DecisionTreeFactor DecisionTreeFactor::prune(size_t maxNrAssignments) const {
const size_t N = maxNrAssignments;

double threshold = computeThreshold(N);

// Now threshold the decision tree
size_t total = 0;
Expand Down
11 changes: 11 additions & 0 deletions gtsam/discrete/DecisionTreeFactor.h
Original file line number Diff line number Diff line change
Expand Up @@ -224,6 +224,17 @@ namespace gtsam {
/// Get all the probabilities in order of assignment values
std::vector<double> probabilities() const;

/**
* @brief Compute the probability value which is the threshold above which
* only `N` leaves are present.
*
* This is used for pruning out the smaller probabilities.
*
* @param N The number of leaves to keep post pruning.
* @return double
*/
double computeThreshold(const size_t N) const;

/**
* @brief Prune the decision tree of discrete variables.
*
Expand Down
45 changes: 37 additions & 8 deletions gtsam/discrete/tests/testDecisionTreeFactor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -140,11 +140,46 @@ TEST(DecisionTreeFactor, enumerate) {
EXPECT(actual == expected);
}

namespace pruning_fixture {

DiscreteKey A(1, 2), B(2, 2), C(3, 2);
DecisionTreeFactor f(A& B& C, "1 5 3 7 2 6 4 8");

DiscreteKey D(4, 2);
DecisionTreeFactor factor(
D& C & B & A,
"0.0 0.0 0.0 0.60658897 0.61241912 0.61241969 0.61247685 0.61247742 0.0 "
"0.0 0.0 0.99995287 1.0 1.0 1.0 1.0");

} // namespace pruning_fixture

/* ************************************************************************* */
// Check if computing the correct threshold works.
TEST(DecisionTreeFactor, ComputeThreshold) {
using namespace pruning_fixture;

// Only keep the leaves with the top 5 values.
double threshold = f.computeThreshold(5);
EXPECT_DOUBLES_EQUAL(4.0, threshold, 1e-9);

// Check for more extreme pruning where we only keep the top 2 leaves
threshold = f.computeThreshold(2);
EXPECT_DOUBLES_EQUAL(7.0, threshold, 1e-9);

threshold = factor.computeThreshold(5);
EXPECT_DOUBLES_EQUAL(0.99995287, threshold, 1e-9);

threshold = factor.computeThreshold(3);
EXPECT_DOUBLES_EQUAL(1.0, threshold, 1e-9);

threshold = factor.computeThreshold(6);
EXPECT_DOUBLES_EQUAL(0.61247742, threshold, 1e-9);
}

/* ************************************************************************* */
// Check pruning of the decision tree works as expected.
TEST(DecisionTreeFactor, Prune) {
DiscreteKey A(1, 2), B(2, 2), C(3, 2);
DecisionTreeFactor f(A & B & C, "1 5 3 7 2 6 4 8");
using namespace pruning_fixture;

// Only keep the leaves with the top 5 values.
size_t maxNrAssignments = 5;
Expand All @@ -160,12 +195,6 @@ TEST(DecisionTreeFactor, Prune) {
DecisionTreeFactor expected2(A & B & C, "0 0 0 7 0 0 0 8");
EXPECT(assert_equal(expected2, pruned2));

DiscreteKey D(4, 2);
DecisionTreeFactor factor(
D & C & B & A,
"0.0 0.0 0.0 0.60658897 0.61241912 0.61241969 0.61247685 0.61247742 0.0 "
"0.0 0.0 0.99995287 1.0 1.0 1.0 1.0");

DecisionTreeFactor expected3(D & C & B & A,
"0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 "
"0.999952870000 1.0 1.0 1.0 1.0");
Expand Down
1 change: 1 addition & 0 deletions gtsam/geometry/geometry.i
Original file line number Diff line number Diff line change
Expand Up @@ -360,6 +360,7 @@ class Rot3 {
// Standard Interface
static gtsam::Rot3 Expmap(gtsam::Vector v);
static gtsam::Vector Logmap(const gtsam::Rot3& p);
gtsam::Rot3 expmap(const gtsam::Vector& v);
gtsam::Vector logmap(const gtsam::Rot3& p);
gtsam::Matrix matrix() const;
gtsam::Matrix transpose() const;
Expand Down
8 changes: 5 additions & 3 deletions gtsam/hybrid/HybridBayesTree.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -210,9 +210,11 @@ void HybridBayesTree::prune(const size_t maxNrLeaves) {
if (conditional->isHybrid()) {
auto hybridGaussianCond = conditional->asHybrid();

// Imperative
clique->conditional() = std::make_shared<HybridConditional>(
hybridGaussianCond->prune(parentData.prunedDiscreteProbs));
if (!hybridGaussianCond->pruned()) {
// Imperative
clique->conditional() = std::make_shared<HybridConditional>(
hybridGaussianCond->prune(parentData.prunedDiscreteProbs));
}
}
return parentData;
}
Expand Down
12 changes: 7 additions & 5 deletions gtsam/hybrid/HybridGaussianConditional.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ struct HybridGaussianConditional::Helper {

/* *******************************************************************************/
HybridGaussianConditional::HybridGaussianConditional(
const DiscreteKeys &discreteParents, Helper &&helper)
const DiscreteKeys &discreteParents, Helper &&helper, bool pruned)
: BaseFactor(discreteParents,
FactorValuePairs(
[&](const GaussianFactorValuePair
Expand All @@ -130,7 +130,8 @@ HybridGaussianConditional::HybridGaussianConditional(
},
std::move(helper.pairs))),
BaseConditional(*helper.nrFrontals),
negLogConstant_(helper.minNegLogConstant) {}
negLogConstant_(helper.minNegLogConstant),
pruned_(pruned) {}

HybridGaussianConditional::HybridGaussianConditional(
const DiscreteKey &discreteParent,
Expand Down Expand Up @@ -166,8 +167,9 @@ HybridGaussianConditional::HybridGaussianConditional(
: HybridGaussianConditional(discreteParents, Helper(conditionals)) {}

HybridGaussianConditional::HybridGaussianConditional(
const DiscreteKeys &discreteParents, const FactorValuePairs &pairs)
: HybridGaussianConditional(discreteParents, Helper(pairs)) {}
const DiscreteKeys &discreteParents, const FactorValuePairs &pairs,
bool pruned)
: HybridGaussianConditional(discreteParents, Helper(pairs), pruned) {}

/* *******************************************************************************/
const HybridGaussianConditional::Conditionals
Expand Down Expand Up @@ -331,7 +333,7 @@ HybridGaussianConditional::shared_ptr HybridGaussianConditional::prune(

FactorValuePairs prunedConditionals = factors().apply(pruner);
return std::make_shared<HybridGaussianConditional>(discreteKeys(),
prunedConditionals);
prunedConditionals, true);
}

/* *******************************************************************************/
Expand Down
11 changes: 9 additions & 2 deletions gtsam/hybrid/HybridGaussianConditional.h
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,9 @@ class GTSAM_EXPORT HybridGaussianConditional
///< Take advantage of the neg-log space so everything is a minimization
double negLogConstant_;

/// Flag to indicate if the conditional has been pruned.
bool pruned_ = false;

public:
/// @name Constructors
/// @{
Expand Down Expand Up @@ -150,9 +153,10 @@ class GTSAM_EXPORT HybridGaussianConditional
*
* @param discreteParents the discrete parents. Will be placed last.
* @param conditionalPairs Decision tree of GaussianFactor/scalar pairs.
* @param pruned Flag indicating if conditional has been pruned.
*/
HybridGaussianConditional(const DiscreteKeys &discreteParents,
const FactorValuePairs &pairs);
const FactorValuePairs &pairs, bool pruned = false);

/// @}
/// @name Testable
Expand Down Expand Up @@ -233,6 +237,9 @@ class GTSAM_EXPORT HybridGaussianConditional
HybridGaussianConditional::shared_ptr prune(
const DecisionTreeFactor &discreteProbs) const;

/// Return true if the conditional has already been pruned.
bool pruned() const { return pruned_; }

/// @}

private:
Expand All @@ -241,7 +248,7 @@ class GTSAM_EXPORT HybridGaussianConditional

/// Private constructor that uses helper struct above.
HybridGaussianConditional(const DiscreteKeys &discreteParents,
Helper &&helper);
Helper &&helper, bool pruned = false);

/// Check whether `given` has values for all frontal keys.
bool allFrontalsGiven(const VectorValues &given) const;
Expand Down

0 comments on commit bb5cfb2

Please sign in to comment.