diff --git a/.gitignore b/.gitignore index d16c7dd..f711dc1 100644 --- a/.gitignore +++ b/.gitignore @@ -31,3 +31,4 @@ bin/test .la cmake-build-debug +logs/ diff --git a/CMakeLists.txt b/CMakeLists.txt index dcafcf2..9452a48 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,18 +1,16 @@ -# This file (c) 2016-2020 AlertAvert.com. All rights reserved. +# This file (c) 2016-2024 AlertAvert.com. All rights reserved. +cmake_minimum_required(VERSION 3.23) project(distlib) -cmake_minimum_required(VERSION 3.16) -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++17 -fPIC") - -# Conan Packaging support -include(${CMAKE_BINARY_DIR}/conanbuildinfo.cmake) -conan_basic_setup() +set(CMAKE_CXX_STANDARD 17) +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fPIC") ## # Include common cmake utilities. -# See: https://bitbucket.org/marco/common-utils +# See: https://github.com/massenz/common-utils # include(${COMMON_UTILS_DIR}/commons.cmake OPTIONAL RESULT_VARIABLE COMMONS_FILE) +message(STATUS "Using Common Utilities: ${COMMONS_FILE}") ## # Installation directory; this is both used to access @@ -23,6 +21,7 @@ include(${COMMON_UTILS_DIR}/commons.cmake OPTIONAL RESULT_VARIABLE COMMONS_FILE) if(NOT DEFINED INSTALL_DIR) if(DEFINED ENV{INSTALL_DIR}) set(INSTALL_DIR $ENV{INSTALL_DIR}) + message(STATUS "INSTALL_DIR set to: ${INSTALL_DIR}") else() message(FATAL_ERROR "INSTALL_DIR should be defined as an environment variable, or via -D notation") @@ -33,7 +32,7 @@ endif() # Release version management # set(RELEASE_MAJOR 0) -set(RELEASE_MINOR 18) +set(RELEASE_MINOR 20) set(RELEASE_PATCH 0) set(RELEASE "${RELEASE_MAJOR}.${RELEASE_MINOR}.${RELEASE_PATCH}") @@ -51,11 +50,12 @@ message(STATUS "Building Release: ${RELEASE_STRING} (${CMAKE_BUILD_TYPE})") # Both the value of CMAKE_BUILD_TYPE and the `build_type` setting # in `~/.conan/profiles/default` need to match. # -if(${CMAKE_BUILD_TYPE} STREQUAL "Debug") - set(GLOG glogd) -else() - set(GLOG glog) -endif() +# TODO: review to confirm whether this is still necessary with Conan V2 +#if(${CMAKE_BUILD_TYPE} STREQUAL "Debug") +# set(GLOG glogd) +#else() +# set(GLOG glog) +#endif() set(SOURCE_DIR ${PROJECT_SOURCE_DIR}/src) set(INCLUDE_DIR ${PROJECT_SOURCE_DIR}/include) @@ -75,10 +75,10 @@ include_directories( ${INSTALL_DIR}/include ) -link_directories( - ${INSTALL_DIR}/lib - ${PROJECT_BINARY_DIR}/lib -) +#link_directories( +# ${INSTALL_DIR}/lib +# ${PROJECT_BINARY_DIR}/lib +#) set(UTILS_SOURCES ${SOURCE_DIR}/utils/misc.cpp @@ -91,9 +91,16 @@ set(SOURCES ${SOURCE_DIR}/View.cpp ) -set(UTILS_LIBS - crypto - ${GLOG} +# Add Conan build folder to CMAKE_PREFIX_PATH for dependency resolution +list(APPEND CMAKE_PREFIX_PATH "${CMAKE_BINARY_DIR}") + +find_package(cryptopp REQUIRED) +find_package(glog) +find_package(OpenSSL REQUIRED) +set(LIBS + cryptopp::cryptopp + glog::glog + openssl::openssl pthread ) @@ -106,7 +113,7 @@ add_library(distutils SHARED ${SOURCES} ${UTILS_SOURCES} ) -target_link_libraries(distutils ${UTILS_LIBS}) +target_link_libraries(distutils ${LIBS}) ## @@ -126,9 +133,10 @@ add_subdirectory(${TESTS_DIR}) set(EXAMPLES_DIR ${SOURCE_DIR}/examples) -# Examples will use the latest build for the shared libraries +message(STATUS "Libraries will be linked from + ${CMAKE_BINARY_DIR} and ${INSTALL_DIR}/lib") link_directories( - ${CMAKE_BINARY_DIR}/lib + ${CMAKE_BINARY_DIR} ${INSTALL_DIR}/lib ) @@ -136,10 +144,10 @@ link_directories( # Merkle Tree Demo # add_executable(merkle_demo ${EXAMPLES_DIR}/merkle_demo.cpp) -target_link_libraries(merkle_demo distutils ${UTILS_LIBS}) +target_link_libraries(merkle_demo distutils ${LIBS}) ## # KeyValue Store Demo # add_executable(keystore_demo ${EXAMPLES_DIR}/keystore_example.cpp) -target_link_libraries(keystore_demo distutils ${UTILS_LIBS}) +target_link_libraries(keystore_demo distutils ${LIBS}) diff --git a/CMakeLists.txt.example b/CMakeLists.txt.example new file mode 100644 index 0000000..6a74a6c --- /dev/null +++ b/CMakeLists.txt.example @@ -0,0 +1,9 @@ +cmake_minimum_required(VERSION 3.15) +project(MyProject) + +# Find the glog package installed by Conan +find_package(glog REQUIRED) + +# Set up the target and link glog +add_executable(use-glog src/examples/use_glog.cpp) +target_link_libraries(use-glog PRIVATE glog::glog) diff --git a/CMakeUserPresets.json b/CMakeUserPresets.json index 945b382..66dabec 100644 --- a/CMakeUserPresets.json +++ b/CMakeUserPresets.json @@ -4,6 +4,7 @@ "conan": {} }, "include": [ - "build/CMakePresets.json" + "build/Release/generators/CMakePresets.json", + "build/Debug/generators/CMakePresets.json" ] } \ No newline at end of file diff --git a/README.md b/README.md index 9ebad01..8999b70 100644 --- a/README.md +++ b/README.md @@ -29,21 +29,42 @@ A detailed description of the library's features follows the [Install & build](# ## Build & testing +The sequence of commands to build the project: + +```shell +conan install . -s build_type=Debug --build=missing +cmake --preset conan-debug -DCOMMON_UTILS=$COMMON_UTILS +cmake --build --preset conan-debug -DCOMMON_UTILS_DIR=$COMMON_UTILS +``` +The build targets are currently just `distutils` lib and the `merkle_demo` and +`keystore_demo` examples, in the `build/Debug` directory: + +```shell +./build/Debug/merkle_demo "this is the string to hash" +``` + +For the `Release` build, use the `conan-release` preset and `Release` build type. +See [#Common utilities](#common-utilities) for a simpler way. + ### Common utilities -The build/test scripts in this repository take advantage of shared common utility functions in [this common utils repository](https://bitbucket.org/marco/common-utils): clone it -somewhere, and make `$COMMON_UTILS_DIR` point to it: +The build/test scripts in this repository take advantage of shared common utility functions in +[the Common Utils repository](https://bitbucket.org/marco/common-utils): follow the instructions there to install the utilities and +set the `COMMON_UTILS` environment variable to point to the directory where you installed them. + +That is done as part of the installation process anyway: ```shell -git clone git@bitbucket.org:marco/common-utils.git -export COMMON_UTILS_DIR="$(pwd)/common-utils" +export COMMON_UTILS=/path/to/common-utils +export VERSION=... +curl -s -L https://cdn.githubraw.com/massenz/common-utils/$VERSION/install.sh | zsh -s ``` To build/test the project, link to the scripts there: ```shell -ln -s ${COMMON_UTILS_DIR}/build.sh build && \ - ln -s ${COMMON_UTILS_DIR}/test.sh test +ln -s ${COMMON_UTILS_DIR}/build.sh bin/build && \ + ln -s ${COMMON_UTILS_DIR}/test.sh bin/test ``` ### Build & Install libdistutils.so diff --git a/conanfile.txt b/conanfile.txt index 05df069..1c64b35 100644 --- a/conanfile.txt +++ b/conanfile.txt @@ -9,12 +9,12 @@ glog/0.4.0 gtest/1.15.0 openssl/3.3.2 #protobuf/5.27.0 -zeromq/4.3.5 +#cppzmq/4.10.0 [options] glog/*:with_gflags=False gtest/*:shared=False -protobuf/*:shared=True +#protobuf/*:shared=True [imports] # @@ -24,10 +24,13 @@ protobuf/*:shared=True # This is useful for the `install` step to move them to $INSTALL_DIR # if defined. # -lib, *.dylib* -> ./lib -lib, *.so* -> ./lib -include, * -> ./include +#lib, *.dylib* -> ./lib +#lib, *.so* -> ./lib +#include, * -> ./include [generators] CMakeDeps CMakeToolchain + +[layout] +cmake_layout diff --git a/env.sh b/env.sh deleted file mode 100644 index e7c0f9c..0000000 --- a/env.sh +++ /dev/null @@ -1,19 +0,0 @@ -# Build environment for Distributed Libraries -# Uses the Common Utilities, see: https://github.com/massenz/common-utils -# -# Created MM, 2021-11-27 - -set -eu - -BUILDDIR=$(abspath "./build") -CLANG=$(which clang++) - -OS_NAME=$(uname -s) -msg "Build Platform: ${OS_NAME}" -if [[ ${OS_NAME} == "Linux" ]]; then - export LD_LIBRARY_PATH="${BUILDDIR}/lib":${LD_LIBRARY_PATH:-} -elif [[ ${OS_NAME} == "Darwin" ]]; then - export DYLD_LIBRARY_PATH="${BUILDDIR}/lib":${DYLD_LIBRARY_PATH:-} -else - fatal "Unsupported Linux variant: ${OS_NAME}" -fi diff --git a/include/Bucket.hpp b/include/Bucket.hpp index bc81533..fc5851e 100644 --- a/include/Bucket.hpp +++ b/include/Bucket.hpp @@ -3,7 +3,6 @@ #pragma once - #include #include #include @@ -14,6 +13,14 @@ #include "json.hpp" using json = nlohmann::json; +class Bucket; + +/** + * Utility function to stream a string representation of a bucket. + * + * @return the passed in stream, to which the bucket has been streamed to. + */ +std::ostream& operator<<(std::ostream& out, const Bucket &bucket); /** * A "bucket" abstracts the concept of a hashed partition, using consistent hashing. @@ -79,15 +86,15 @@ class Bucket { * * @return the set of {@link partitions()} points that define this bucket */ - std::vector partition_points() const { + [[nodiscard]] std::vector partition_points() const { return hash_points_; } - float partition_point(int i) const { + [[nodiscard]] float partition_point(int i) const { if (i < 0 || i >= partitions()) { std::ostringstream msg; - msg << "Out of bound: requesting partition point #" << i << ", when only " - << partitions() << " are available ('" << name_ << "')"; + msg << "Requesting partition point #" << i << ", when only " + << partitions() << " are available in " << *this; throw std::out_of_range(msg.str()); } return hash_points_[i]; @@ -103,17 +110,29 @@ class Bucket { * * @param x a point in the [0, 1] interval. * @return a pair of {index, point} values that determine which partition point is - * the immediately greater than `x`. + * the one immediately greater than `x`. */ - std::pair partition_point(float x) const; + [[nodiscard]] std::pair partition_point(float x) const; - int partitions() const { return hash_points_.size(); } + [[nodiscard]] int partitions() const { return hash_points_.size(); } }; + /** - * Utility function to stream a string representation of a bucket. + * Equality operator for `Bucket` objects. + * Uses the name and partition points to determine equality. * - * @return the passed in stream, to which the bucket has been streamed to. + * This method actully uses the ordering operator to compare the two buckets, using + * the usual approach that (a == b) iff !(a < b) && !(b < a). */ -std::ostream& operator<<(std::ostream& out, const Bucket &bucket); +bool operator==(const Bucket &lhs, const Bucket &rhs); + +/** + * Ordering operator for `Bucket` objects. + * Uses the name and partition points to define a total order on the Buckets. + * + * To compare partition points (which are floats), we use a tolerance of 1e-6. + * TODO: this should actually be a configurable (static) attribute of the class. + */ +bool operator<(const Bucket &lhs, const Bucket &rhs); diff --git a/include/ConsistentHash.hpp b/include/ConsistentHash.hpp index af05281..9465899 100644 --- a/include/ConsistentHash.hpp +++ b/include/ConsistentHash.hpp @@ -42,17 +42,15 @@ float consistent_hash(const std::string &msg); * * See Item 40 of Effective STL. */ -template -class FloatLessWithTolerance : - public std::binary_function { - - double epsilon_; - public: - FloatLessWithTolerance() { - epsilon_ = pow(10, -Tolerance); - } - - bool operator()( const float &left, const float &right ) const { - return (std::abs(left - right) > epsilon_) && (left < right); - } +template +class FloatLessWithTolerance { +public: + explicit FloatLessWithTolerance(double eps = pow(10, -Tolerance)) : epsilon_(eps) { } + + bool operator()(const float &left, const float &right) const { + return (std::abs(left - right) > epsilon_) && (left < right); + } + +private: + double epsilon_; }; diff --git a/include/View.hpp b/include/View.hpp index 8bf7ec8..0c55286 100644 --- a/include/View.hpp +++ b/include/View.hpp @@ -30,14 +30,16 @@ inline std::ostream& operator<<(std::ostream& out, const BucketPtr& ptr) { } /** - * Bucket pointer should be sorted (when in ordered collections) by the buckets' names. + * Bucket pointers should be sorted (when in ordered collections) by the buckets' + * ordering criteria. * * @param lhs * @param rhs - * @return whether the name of `lhs` precedes `rhs`'s name + * @return whether `lhs` precedes `rhs` in the ordering. + * @see Bucket::operator<(const Bucket&, const Bucket&) */ inline bool operator<(const BucketPtr &lhs, const BucketPtr &rhs) { - return lhs->name() < rhs->name(); + return *lhs < *rhs; } /** @@ -65,6 +67,9 @@ using MapWithTolerance = std::map>; * * For more details, see the paper on Consistent Hashing, referred to in the documentation for * the `consistent_hash()` method. + * + * **Note**: in order to avoid issues with ordering and (crucially) when adding to the sets + * of buckets, we actually disallow two buckets with the same name to be added to the same view. */ class View { @@ -90,7 +95,11 @@ class View { View(const View&) = delete; View(View&&) = delete; - /** Adds a bucket to this `View` */ + /** + * Adds a bucket to this `View`, avoiding duplicates. + * + * @see Bucket::operator==(const BucketPtr&, const BucketPtr&) + */ void Add(const BucketPtr& bucket); /** diff --git a/src/Bucket.cpp b/src/Bucket.cpp index 8c51c6e..9b0c9f1 100644 --- a/src/Bucket.cpp +++ b/src/Bucket.cpp @@ -6,59 +6,80 @@ #include #include +#include #include +// Tolerance for comparing floating point numbers. +constexpr float kEpsilon = 1e-6; -std::ostream& operator<<(std::ostream& out, const Bucket &bucket) { - out << "'" << bucket.name() << "' ["; - out.setf(std::ios_base::fixed); - out.precision(5); +std::ostream &operator<<(std::ostream &out, const Bucket &bucket) { + out << "'" << bucket.name() << "' ["; + out.setf(std::ios_base::fixed); + out.precision(5); - for (int i = 0; i < bucket.partitions(); ++i) { - if (i > 0) out << ", "; - out << bucket.partition_point(i); - } - out << "]"; + for (int i = 0; i < bucket.partitions(); ++i) { + if (i > 0) out << ", "; + out << bucket.partition_point(i); + } + out << "]"; - return out; + return out; } Bucket::Bucket(std::string name, std::vector hash_points) : - name_(std::move(name)), hash_points_(std::move(hash_points)) { + name_(std::move(name)), hash_points_(std::move(hash_points)) { std::sort(hash_points_.begin(), hash_points_.end()); } std::pair Bucket::partition_point(float x) const { - auto pos = std::upper_bound(hash_points_.begin(), hash_points_.end(), x); - if (pos == hash_points_.end()) { - return std::make_pair(0, hash_points_[0]); - } - return std::make_pair(std::distance(hash_points_.cbegin(), pos), *pos); + auto pos = std::upper_bound(hash_points_.begin(), hash_points_.end(), x); + if (pos == hash_points_.end()) { + return std::make_pair(0, hash_points_[0]); + } + return std::make_pair(std::distance(hash_points_.cbegin(), pos), *pos); } void Bucket::add_partition_point(float point) { - auto pos = hash_points_.begin(); - for(auto x : hash_points_) { - if (x > point) { - break; + auto pos = hash_points_.begin(); + for (auto x: hash_points_) { + if (x > point) { + break; + } + pos++; + } + if (pos != hash_points_.end()) { + hash_points_.insert(pos, point); + } else { + hash_points_.push_back(point); } - pos++; - } - if (pos != hash_points_.end()) { - hash_points_.insert(pos, point); - } else { - hash_points_.push_back(point); - } } + void Bucket::remove_partition_point(unsigned int i) { - if (i < partitions()) { - hash_points_.erase(hash_points_.cbegin() + i); - } + if (i < partitions()) { + hash_points_.erase(hash_points_.cbegin() + i); + } } Bucket::operator json() const { - return nlohmann::json { - {"name", name()}, - {"partition_points", partition_points()} - }; + return nlohmann::json{ + {"name", name()}, + {"partition_points", partition_points()} + }; +} + +bool operator<(const Bucket &lhs, const Bucket &rhs) { + if (lhs.name() != rhs.name()) { + return lhs.name() < rhs.name(); + } + int count = std::min(lhs.partitions(), rhs.partitions()); + for (int i = 0; i < count; ++i) { + if (std::abs(lhs.partition_point(i) - rhs.partition_point(i)) > kEpsilon) { + return lhs.partition_point(i) < rhs.partition_point(i); + } + } + return false; +} + +bool operator==(const Bucket &lhs, const Bucket &rhs) { + return !(lhs < rhs) && !(rhs < lhs); } diff --git a/src/examples/use_glog.cpp b/src/examples/use_glog.cpp new file mode 100644 index 0000000..f199878 --- /dev/null +++ b/src/examples/use_glog.cpp @@ -0,0 +1,18 @@ +// +// Created by Marco Massenzio on 11/3/24. +// + +#include + +int main(int argc, char* argv[]) { + // Initialize Google logging. + FLAGS_log_dir = "./logs"; + google::InitGoogleLogging(argv[0]); + + // Log an info message. + LOG(INFO) << "Hello, this is an info message from glog!"; + + // Shutdown Google logging. + google::ShutdownGoogleLogging(); + return 0; +} diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 4debd70..f2fc24d 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -4,19 +4,6 @@ project(distlib_test) enable_testing() set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++17 -fPIC") - -# Conan Packaging support -include(${CMAKE_BINARY_DIR}/conanbuildinfo.cmake) -conan_basic_setup() - - -find_library(GTEST gtest ${INSTALL_DIR}/lib) -find_library(GMOCK gmock ${INSTALL_DIR}/lib) - -if (${GTEST} STREQUAL GTEST-NOTFOUND) - message(FATAL_ERROR "Could not locate a valid Google Test library installed.") -endif () - set(UNIT_TESTS ${TESTS_DIR}/test_bucket.cpp ${TESTS_DIR}/test_hash.cpp @@ -28,19 +15,23 @@ set(UNIT_TESTS ${TESTS_DIR}/test_view.cpp ) -# Add the build directory to the library search path -link_directories(${CMAKE_BINARY_DIR}) - add_executable(unit_tests ${UNIT_TESTS} tests.h all_tests.cpp ) +message(STATUS "Libraries will be linkde from + ${CMAKE_BINARY_DIR} and ${INSTALL_DIR}/lib") +link_directories( + ${CMAKE_BINARY_DIR} + ${INSTALL_DIR}/lib +) + +find_package(GTest REQUIRED) target_link_libraries(unit_tests - ${GTEST} - ${GMOCK} + gtest::gtest distutils ) - -add_test(unit_tests ${CMAKE_CURRENT_BINARY_DIR}/unit_tests) +add_test(unit_tests + ${CMAKE_CURRENT_BINARY_DIR}/unit_tests) diff --git a/tests/test_bucket.cpp b/tests/test_bucket.cpp index 76c2a3c..f665465 100644 --- a/tests/test_bucket.cpp +++ b/tests/test_bucket.cpp @@ -1,10 +1,6 @@ // Copyright (c) 2016-2020 AlertAvert.com. All rights reserved. // Created by M. Massenzio (marco@alertavert.com) on 10/7/16. -// Ignore CLion warning caused by GTest TEST() macro. -#pragma ide diagnostic ignored "cert-err58-cpp" - - #include #include @@ -13,143 +9,138 @@ TEST(BucketTests, CanCreate) { - Bucket b("test_bucket", {0.3, 0.6, 0.9}); - - EXPECT_EQ(3, b.partitions()); - - // We verify that all partition points are within the unit circle and sorted. - float last = 0.0; - for (auto current : b.partition_points()) { - EXPECT_GT(current, 0.0); - EXPECT_LT(current, 1.0); - EXPECT_GT(current, last); - last = current; - } - EXPECT_EQ("test_bucket", b.name()); + Bucket b("test_bucket", {0.3, 0.6, 0.9}); + EXPECT_EQ(3, b.partitions()); + + // We verify that all partition points are within the unit circle and sorted. + float last = 0.0; + for (auto current: b.partition_points()) { + EXPECT_GT(current, 0.0); + EXPECT_LT(current, 1.0); + EXPECT_GT(current, last); + last = current; + } + EXPECT_EQ("test_bucket", b.name()); } TEST(BucketTests, CanCreateWithValues) { - std::vector points { 0.3, 0.6, 0.9 }; - Bucket b("with_points", points); - - ASSERT_EQ(points.size(), b.partitions()); - for (int i = 0; i < points.size(); ++i) { - ASSERT_FLOAT_EQ(points[i], b.partition_point(i)); - } + std::vector points{0.3, 0.6, 0.9}; + Bucket b("with_points", points); + ASSERT_EQ(points.size(), b.partitions()); + for (int i = 0; i < points.size(); ++i) { + ASSERT_FLOAT_EQ(points[i], b.partition_point(i)); + } } +TEST(BucketTests, Compare) { + std::vector points{0.3, 0.6, 0.9}; + Bucket b1("with_points", points); + Bucket b2("with_points", points); + ASSERT_TRUE(b1 == b2); + Bucket b3("aardvark", points); + ASSERT_FALSE(b1 < b3); + std::vector mo_points{0.3, 0.6, 0.8}; + Bucket b4("with_points", mo_points); + ASSERT_TRUE(b4 < b1) << "same name, same number of points"; + mo_points.push_back(0.9); + Bucket b5("with_points", mo_points); + ASSERT_TRUE(b5 < b1) << "same name, different points"; +} TEST(BucketTests, CanAddPoint) { - std::vector points { 0.2, 0.4, 0.6, 0.8 }; - Bucket b("with_points", points); - - ASSERT_EQ(points.size(), b.partitions()); - for (int i = 0; i < points.size(); ++i) { - ASSERT_FLOAT_EQ(points[i], b.partition_point(i)); - } - - b.add_partition_point(0.7); - ASSERT_FLOAT_EQ(0.7, b.partition_point(3)) << "Point was not added properly " << b; + std::vector points{0.2, 0.4, 0.6, 0.8}; + Bucket b("with_points", points); + ASSERT_EQ(points.size(), b.partitions()); + for (int i = 0; i < points.size(); ++i) { + ASSERT_FLOAT_EQ(points[i], b.partition_point(i)); + } + b.add_partition_point(0.7); + ASSERT_FLOAT_EQ(0.7, b.partition_point(3)) << "Point was not added properly " << b; } - TEST(BucketTests, CanRemovePoint) { - std::vector points { 0.2, 0.4, 0.6, 0.8 }; - Bucket b("with_points", points); - - b.remove_partition_point(2); - - ASSERT_EQ(points.size() - 1, b.partitions()); - for (auto x : b.partition_points()) { - ASSERT_NE(0.6, x); - } + std::vector points{0.2, 0.4, 0.6, 0.8}; + Bucket b("with_points", points); + b.remove_partition_point(2); + ASSERT_EQ(points.size() - 1, b.partitions()); + for (auto x: b.partition_points()) { + ASSERT_NE(0.6, x); + } } - TEST(BucketTests, CanFindNearest) { - Bucket b("abucket", {0.0422193, - 0.0592362, 0.119673, 0.215251, 0.90553}); - auto points = b.partition_points(); - - float hv = consistent_hash("the test string"); - - // we can take advantage of what we know about partition points: - // they are sorted and there's only 5 of them. - // - // During a test run, this is what we have: - // - // hv = 0.362957 - // i partition_point(i) - // 0 0.0422193 - // 1 0.0592362 - // 2 0.119673 - // 3 0.215251 - // 4 0.90553 - int i; - for (i = 0; i < 5; ++i) { - if ((points)[i] > hv) break; - } - ASSERT_EQ(std::make_pair(i, b.partition_point(i)), b.partition_point(hv)); - - Bucket b2("another_bucket", {0.065193, - 0.052362, 0.19673, 0.2551, 0.9553}); - points = b.partition_points(); - hv = consistent_hash("a different test string"); - - for (i = 0; i < 5; ++i) { - if ((points)[i] > hv) break; - } - ASSERT_EQ(std::make_pair(i, b.partition_point(i)), b.partition_point(hv)); + Bucket b("abucket", { + 0.0422193, 0.0592362, 0.119673, 0.215251, 0.90553}); + auto points = b.partition_points(); + float hv = consistent_hash("the test string"); + + // we can take advantage of what we know about partition points: + // they are sorted and there's only 5 of them. + // + // During a test run, this is what we have: + // + // hv = 0.362957 + // i partition_point(i) + // 0 0.0422193 + // 1 0.0592362 + // 2 0.119673 + // 3 0.215251 + // 4 0.90553 + int i; + for (i = 0; i < 5; ++i) { + if ((points)[i] > hv) break; + } + ASSERT_EQ(std::make_pair(i, b.partition_point(i)), b.partition_point(hv)); + Bucket b2("another_bucket", {0.065193, + 0.052362, 0.19673, 0.2551, 0.9553}); + points = b.partition_points(); + hv = consistent_hash("a different test string"); + for (i = 0; i < 5; ++i) { + if ((points)[i] > hv) break; + } + ASSERT_EQ(std::make_pair(i, b.partition_point(i)), b.partition_point(hv)); } - TEST(BucketTests, CanPrint) { - std::ostringstream os; - Bucket b("fancy bucket", {0.065193, 0.052362, 0.19673, 0.2551, 0.9553}); - os << b; - ASSERT_THAT(os.str(), ::testing::StartsWith("'fancy bucket' [")); + std::ostringstream os; + Bucket b("fancy bucket", {0.065193, 0.052362, 0.19673, 0.2551, 0.9553}); + os << b; + ASSERT_THAT(os.str(), ::testing::StartsWith("'fancy bucket' [")); } TEST(BucketTests, CanSetName) { - Bucket b("bucket", {0.065193, 0.052362, 0.19673, 0.2551, 0.9553}); - ASSERT_EQ(b.name(), "bucket"); - - b.set_name("another"); - ASSERT_EQ(b.name(), "another"); + Bucket b("bucket", {0.065193, 0.052362, 0.19673, 0.2551, 0.9553}); + ASSERT_EQ(b.name(), "bucket"); + b.set_name("another"); + ASSERT_EQ(b.name(), "another"); } - TEST(BucketTests, ThrowsOutOfRange) { - Bucket b("bucket", {0.065193, 0.052362}); - ASSERT_EQ(b.partitions(), 2); - ASSERT_THROW(b.partition_point(b.partitions() + 2), std::out_of_range); + Bucket b("bucket", {0.065193, 0.052362}); + ASSERT_EQ(b.partitions(), 2); + ASSERT_THROW( + static_cast(b.partition_point(b.partitions() + 2)), + std::out_of_range); } - TEST(BucketTests, Json) { - - json bj = Bucket {"my-bucket", {0.5f, 0.8f}}; - - ASSERT_EQ("my-bucket", bj["name"]); - ASSERT_TRUE(bj["partition_points"].is_array()); - - ASSERT_EQ(0.5f, bj["partition_points"][0]); - ASSERT_EQ(0.8f, bj["partition_points"][1]); + json bj = Bucket{"my-bucket", {0.5f, 0.8f}}; + ASSERT_EQ("my-bucket", bj["name"]); + ASSERT_TRUE(bj["partition_points"].is_array()); + ASSERT_EQ(0.5f, bj["partition_points"][0]); + ASSERT_EQ(0.8f, bj["partition_points"][1]); } - TEST(BucketTests, JsonArray) { - - std::vector buckets = { - Bucket {"my-bucket", {0.5f, 0.8f}}, - Bucket {"another", {0.6f, 0.9f}}, - Bucket {"last", {0.7f, 0.1f}}, - }; - - json myBuckets; - myBuckets["buckets"] = buckets; - - ASSERT_TRUE(myBuckets["buckets"].is_array()); - ASSERT_EQ(3, myBuckets["buckets"].size()); - ASSERT_EQ("another", myBuckets["buckets"][1]["name"]); + std::vector buckets = { + Bucket{"my-bucket", {0.5f, 0.8f}}, + Bucket{"another", {0.6f, 0.9f}}, + Bucket{"last", {0.7f, 0.1f}}, + }; + + json myBuckets; + myBuckets["buckets"] = buckets; + ASSERT_TRUE(myBuckets["buckets"].is_array()); + ASSERT_EQ(3, myBuckets["buckets"].size()); + ASSERT_EQ("another", myBuckets["buckets"][1]["name"]); } diff --git a/tests/test_view.cpp b/tests/test_view.cpp index a4c5ed5..adb70e5 100644 --- a/tests/test_view.cpp +++ b/tests/test_view.cpp @@ -185,21 +185,22 @@ TEST(ViewTests, CreateBalancedView) { TEST(ViewTests, CanGetBucketsAndUse) { auto pv = make_balanced_view(5, 15); auto buckets = pv->buckets(); - ASSERT_EQ(5, buckets.size()); + ASSERT_EQ(buckets.size(), 5); auto pos = buckets.begin(); - (*pos)->set_name("new bucket"); + (*pos)->set_name("test bucket"); pos++; pos++; - (*pos)->set_name("new bucket"); + (*pos)->set_name("test bucket"); - int count = 0; + ASSERT_EQ(buckets.size(), 5); auto new_buckets = pv->buckets(); - ASSERT_EQ(5, new_buckets.size()); + ASSERT_EQ(new_buckets.size(), 5); + int count = 0; std::for_each(new_buckets.begin(), new_buckets.end(), [&count](const BucketPtr &bucket_ptr) { - if (bucket_ptr->name() == "new bucket") { + if (bucket_ptr->name() == "test bucket") { count++; } });