diff --git a/wave_optimization/CMakeLists.txt b/wave_optimization/CMakeLists.txt index 84bf8096..8b92f597 100644 --- a/wave_optimization/CMakeLists.txt +++ b/wave_optimization/CMakeLists.txt @@ -19,7 +19,8 @@ WAVE_ADD_TEST(${PROJECT_NAME}_tests tests/ceres/ceres_examples_test.cpp tests/factor_graph/factor_test.cpp tests/factor_graph/graph_test.cpp - tests/factor_graph/variable_test.cpp) + tests/factor_graph/variable_test.cpp + tests/factor_graph/noise_test.cpp) TARGET_LINK_LIBRARIES(${PROJECT_NAME}_tests ${PROJECT_NAME} wave_utils diff --git a/wave_optimization/include/wave/optimization/ceres/graph_wrapper.hpp b/wave_optimization/include/wave/optimization/ceres/graph_wrapper.hpp index 57527f9e..55e38708 100644 --- a/wave_optimization/include/wave/optimization/ceres/graph_wrapper.hpp +++ b/wave_optimization/include/wave/optimization/ceres/graph_wrapper.hpp @@ -17,13 +17,37 @@ namespace wave { /** @addtogroup optimization * @{ */ -void addFactorToProblem(ceres::Problem &problem, FactorBase &factor) { +/** + * Ceres cost function wrapping `Factor::evaluateRaw`. + */ +class FactorCostFunction : public ceres::CostFunction { + public: + explicit FactorCostFunction(std::shared_ptr factor) + : factor{factor} { + this->set_num_residuals(factor->residualSize()); + for (const auto &var : factor->variables()) { + this->mutable_parameter_block_sizes()->push_back(var->size()); + } + } + + bool Evaluate(double const *const *parameters, + double *residuals, + double **jacobians) const override { + return factor->evaluateRaw(parameters, residuals, jacobians); + } + + private: + std::shared_ptr factor; +}; + +void addFactorToProblem(ceres::Problem &problem, + std::shared_ptr factor) { // We make a vector of residual block pointers and pass it to // AddResidualBlock because Ceres' implementation forms a vector // anyway. auto data_ptrs = std::vector{}; - for (const auto &v : factor.variables()) { + for (const auto &v : factor->variables()) { data_ptrs.push_back(v->data()); // Explicitly adding parameters "causes additional correctness @@ -31,15 +55,17 @@ void addFactorToProblem(ceres::Problem &problem, FactorBase &factor) { // @todo can add local parametrization in this call problem.AddParameterBlock(v->data(), v->size()); - // Set parameter blocks constant if the variable is so marked - if (v->isFixed()) { + // Set parameter blocks constant if the factor is a zero-noise prior + if (factor->isPerfectPrior()) { problem.SetParameterBlockConstant(v->data()); } } - // Give ceres the cost function and its parameter blocks. - problem.AddResidualBlock( - factor.costFunction().release(), nullptr, data_ptrs); + // Finally, give ceres the cost function and its parameter blocks. + if (!factor->isPerfectPrior()) { + problem.AddResidualBlock( + new FactorCostFunction{std::move(factor)}, nullptr, data_ptrs); + } } /** @@ -52,7 +78,7 @@ void evaluateGraph(FactorGraph &graph) { ceres::Problem problem; for (const auto &ptr : graph) { - addFactorToProblem(problem, *ptr); + addFactorToProblem(problem, ptr); } // Initialize the solver diff --git a/wave_optimization/include/wave/optimization/factor_graph/Factor.hpp b/wave_optimization/include/wave/optimization/factor_graph/Factor.hpp index ee549df6..8d69f621 100644 --- a/wave_optimization/include/wave/optimization/factor_graph/Factor.hpp +++ b/wave_optimization/include/wave/optimization/factor_graph/Factor.hpp @@ -6,36 +6,16 @@ #ifndef WAVE_OPTIMIZATION_FACTOR_GRAPH_FACTOR_HPP #define WAVE_OPTIMIZATION_FACTOR_GRAPH_FACTOR_HPP -#include -#include - #include "wave/utils/math.hpp" #include "wave/optimization/factor_graph/FactorVariableBase.hpp" #include "wave/optimization/factor_graph/FactorBase.hpp" #include "wave/optimization/factor_graph/OutputMap.hpp" + namespace wave { /** @addtogroup optimization * @{ */ -template -class FactorCostFunction - : public ceres::SizedCostFunction { - public: - explicit FactorCostFunction( - std::function fn) - : f_evaluate{std::move(fn)} {} - - bool Evaluate(double const *const *parameters, - double *residuals, - double **jacobians) const override { - return f_evaluate(parameters, residuals, jacobians); - } - - private: - std::function f_evaluate; -}; - /** * Template for factors of different variable types. * @@ -98,33 +78,17 @@ class Factor : public FactorBase { using const_iterator = typename VarArrayType::const_iterator; /** Construct with the given function, measurement and variables. */ - explicit Factor(FuncType f, - MeasType meas, - std::shared_ptr... variable_ptrs) - : measurement_function{f}, - measurement{meas}, - variable_ptrs{{variable_ptrs...}} {} - - ~Factor() override = default; + explicit Factor(FuncType measurement_function, + MeasType measurement, + std::shared_ptr... variable_ptrs); int size() const override { return NumVars; } - int numResiduals() const override { + int residualSize() const override { return ResidualSize; } - std::unique_ptr costFunction() override { - auto fn = std::bind(&FactorType::evaluateRaw, - this, - std::placeholders::_1, - std::placeholders::_2, - std::placeholders::_3); - return std::unique_ptr{ - new FactorCostFunction{ - fn}}; - } - bool evaluateRaw(double const *const *parameters, double *residuals, double **jacobians) const noexcept override; @@ -135,6 +99,11 @@ class Factor : public FactorBase { return this->variable_ptrs; } + /** Return true if this factor is a zero-noise prior */ + bool isPerfectPrior() const noexcept override { + return false; + } + /** Print a representation for debugging. Used by operator<< */ void print(std::ostream &os) const override; @@ -153,4 +122,4 @@ class Factor : public FactorBase { #include "impl/Factor.hpp" -#endif +#endif // WAVE_OPTIMIZATION_FACTOR_GRAPH_FACTOR_HPP diff --git a/wave_optimization/include/wave/optimization/factor_graph/FactorBase.hpp b/wave_optimization/include/wave/optimization/factor_graph/FactorBase.hpp index 6bee8a1e..a5ae3c86 100644 --- a/wave_optimization/include/wave/optimization/factor_graph/FactorBase.hpp +++ b/wave_optimization/include/wave/optimization/factor_graph/FactorBase.hpp @@ -44,12 +44,17 @@ class FactorBase { * Called by `operator<<`. */ virtual void print(std::ostream &os) const = 0; + /** The arity (number of variables connected) of the factor */ virtual int size() const = 0; - virtual int numResiduals() const = 0; - virtual std::unique_ptr costFunction() = 0; + + /** The dimension of the measurement and residual vector */ + virtual int residualSize() const = 0; /** Get a reference to the vector of variable pointers */ virtual const VarVectorType &variables() const noexcept = 0; + + /** Return true if this factor is a zero-noise prior */ + virtual bool isPerfectPrior() const noexcept = 0; }; diff --git a/wave_optimization/include/wave/optimization/factor_graph/FactorGraph.hpp b/wave_optimization/include/wave/optimization/factor_graph/FactorGraph.hpp index 708037b5..94e8b70d 100644 --- a/wave_optimization/include/wave/optimization/factor_graph/FactorGraph.hpp +++ b/wave_optimization/include/wave/optimization/factor_graph/FactorGraph.hpp @@ -53,9 +53,17 @@ class FactorGraph { */ template void addFactor(FuncType f, - const MeasType &meas, + const MeasType &measurement, std::shared_ptr... variables); + template + void addPrior(const MeasType &measurement, + std::shared_ptr variable); + + template + void addPerfectPrior(const typename VarType::MappedType &measured_value, + std::shared_ptr variable); + // Iterators /** Return iterator over factors */ diff --git a/wave_optimization/include/wave/optimization/factor_graph/FactorMeasurement.hpp b/wave_optimization/include/wave/optimization/factor_graph/FactorMeasurement.hpp index c0802bb8..7aa7615f 100644 --- a/wave_optimization/include/wave/optimization/factor_graph/FactorMeasurement.hpp +++ b/wave_optimization/include/wave/optimization/factor_graph/FactorMeasurement.hpp @@ -7,49 +7,88 @@ #define WAVE_OPTIMIZATION_FACTOR_GRAPH_MEASUREMENT_HPP #include "wave/optimization/factor_graph/FactorVariable.hpp" - +#include "wave/optimization/factor_graph/Noise.hpp" +#include "wave/optimization/factor_graph/OutputMap.hpp" namespace wave { /** @addtogroup optimization * @{ */ +/** The default NoiseType parameter to FactorMeasurement */ +template +using DefaultNoiseType = DiagonalNoise::Size>; + /** - * A measurement associated with a Factor - * + * A measurement, with associated noise, associated with a Factor * @tparam V the type of ValueView representing the measurement's value - * @todo a Noise parameter will be added here + * @tparam NoiseTmpl the type of noise */ -template +template > class FactorMeasurement : public FactorVariable { using Base = FactorVariable; public: + using VarType = Base; using ViewType = typename Base::ViewType; + using NoiseType = N; using MappedType = typename Base::MappedType; constexpr static int Size = Base::Size; - /** Construct with initial value */ - explicit FactorMeasurement(MappedType &&initial) - : Base{std::move(initial)} {} + /** Construct with initial value and initial noise value*/ + explicit FactorMeasurement(MappedType initial, + typename NoiseType::InitType noise_value) + : Base{std::move(initial)}, noise{std::move(noise_value)} {} + - /** Construct copying initial value */ - explicit FactorMeasurement(const MappedType &initial) : Base{initial} {} + /** Construct with initial value and initial noise object*/ + explicit FactorMeasurement(MappedType initial, NoiseType noise) + : Base{std::move(initial)}, noise{std::move(noise)} {} + + /** Construct with initial value and no noise + * Only allowed when NoiseType is void*/ + explicit FactorMeasurement(MappedType initial) : Base{std::move(initial)} { + static_assert(std::is_void::value, + "A noise value must be provided as the second argument"); + } + + NoiseType noise; }; -/** Subtract a measurement from the result of a measurement function - * @return the difference: the non-normalized residual + +/** + * Partially specialized FactorMeasurement with zero noise + * + * This specialization can be constructed with a measured value only, without + * needing to specify noise. */ template -inline Eigen::Matrix::Size> operator-( - const ResultOut::Size> &lhs, - const FactorMeasurement &rhs) { - // We need to accept the specific types and manually convert them, as Eigen - // 3.2 is not as great at automatically converting things. See #151 - return lhs - Eigen::Map< - const Eigen::Matrix::Size>>{ - rhs.data()}; +class FactorMeasurement : public FactorVariable { + using Base = FactorVariable; + + public: + using VarType = Base; + using ViewType = typename Base::ViewType; + using NoiseType = void; + using MappedType = typename Base::MappedType; + constexpr static int Size = Base::Size; + + /** Construct with initial value and no noise */ + explicit FactorMeasurement(MappedType initial) : Base{std::move(initial)} {} +}; + +template +inline Eigen::Matrix::Size, 1> operator-( + const ResultOut::Size> &lhs, + const FactorMeasurement &rhs) { + // We need to accept the specific ResultOut type (instead of a generic Eigen + // map) as for some reason they are not converted with Eigen 3.2. See #151 + return lhs - + Eigen::Map< + const Eigen::Matrix::Size, 1>>{ + rhs.data()}; } + /** @} group optimization */ } // namespace wave diff --git a/wave_optimization/include/wave/optimization/factor_graph/FactorVariable.hpp b/wave_optimization/include/wave/optimization/factor_graph/FactorVariable.hpp index 8c73c570..cf7efc66 100644 --- a/wave_optimization/include/wave/optimization/factor_graph/FactorVariable.hpp +++ b/wave_optimization/include/wave/optimization/factor_graph/FactorVariable.hpp @@ -48,19 +48,50 @@ class FactorVariable : public FactorVariableBase { using MappedType = typename ViewType::MappedType; constexpr static int Size = ViewType::Size; + // Constructors + /** Default construct with uninitialized estimate * Actually initializes to zero to avoid problems with garbage floats * @todo move setting of initial value elsewhere */ - FactorVariable() : storage{MappedType::Zero()}, value{storage} {} + FactorVariable() noexcept : storage{MappedType::Zero()}, value{storage} {} /** Construct with initial value */ - explicit FactorVariable(MappedType &&initial) + explicit FactorVariable(MappedType initial) : storage{std::move(initial)}, value{storage} {} - /** Construct copying initial value */ - explicit FactorVariable(const MappedType &initial) - : storage{initial}, value{storage} {} + // Because `value` is a map holding a pointer to another member, we must + // define a custom copy constructor (and the rest of the rule of five) + /** Copy constructor */ + FactorVariable(const FactorVariable &other) noexcept + : FactorVariable{other.storage} {} + + /** Move constructor */ + FactorVariable(FactorVariable &&other) noexcept + : FactorVariable{std::move(other.storage)} {} + + /** Copy assignment operator */ + FactorVariable &operator=(const FactorVariable &other) noexcept { + auto temp = FactorVariable{other}; + swap(*this, temp); + return *this; + } + + /** Move assignment operator */ + FactorVariable &operator=(FactorVariable &&other) noexcept { + auto temp = std::move(other); + swap(*this, temp); + return *this; + } + + friend void swap(FactorVariable &lhs, FactorVariable &rhs) noexcept { + std::swap(lhs.storage, rhs.storage); + // Note: don't swap value + } + + ~FactorVariable() override = default; + + // Access /** Return the number of scalar values in the variable. */ int size() const noexcept override { diff --git a/wave_optimization/include/wave/optimization/factor_graph/FactorVariableBase.hpp b/wave_optimization/include/wave/optimization/factor_graph/FactorVariableBase.hpp index c71726ed..8c59fdd2 100644 --- a/wave_optimization/include/wave/optimization/factor_graph/FactorVariableBase.hpp +++ b/wave_optimization/include/wave/optimization/factor_graph/FactorVariableBase.hpp @@ -22,26 +22,9 @@ class FactorVariableBase { virtual const double *data() const noexcept = 0; virtual double *data() noexcept = 0; - /** Marks as constant during optimization - * @todo replace with unary factors for priors - */ - void setFixed(bool c) noexcept { - this->fixed = c; - } - - /** Whether this has been marked constant during optimization - * @todo replace with unary factors for priors - */ - bool isFixed() const noexcept { - return this->fixed; - } - /** Print representation of the object for debugging. * Called by `operator<<`. */ virtual void print(std::ostream &os) const = 0; - - private: - bool fixed = false; }; inline std::ostream &operator<<(std::ostream &os, const FactorVariableBase &v) { diff --git a/wave_optimization/include/wave/optimization/factor_graph/Noise.hpp b/wave_optimization/include/wave/optimization/factor_graph/Noise.hpp new file mode 100644 index 00000000..5b3d7ded --- /dev/null +++ b/wave_optimization/include/wave/optimization/factor_graph/Noise.hpp @@ -0,0 +1,110 @@ +/** + * @file + * @ingroup optimization + */ + +#ifndef WAVE_OPTIMIZATION_FACTOR_GRAPH_NOISE_HPP +#define WAVE_OPTIMIZATION_FACTOR_GRAPH_NOISE_HPP + +#include + +#include "wave/utils/math.hpp" +#include "wave/optimization/factor_graph/FactorVariableBase.hpp" + +namespace wave { +/** @addtogroup optimization + * @{ */ + +template +struct traits {}; + +/** + * Gaussian noise with full covariance matrix + * @tparam S dimension of the value + */ +template +class FullNoise { + using SquareMat = Eigen::Matrix; + + public: + /** The type accepted by the constructor */ + using InitType = SquareMat; + + /** Construct with the given covariance matrix */ + explicit FullNoise(InitType cov) + : covariance_mat{cov}, + // Pre-calculate inverse sqrt covariance, used in normalization + inverse_sqrt_cov{SquareMat{cov.llt().matrixL()}.inverse()} {} + + Eigen::Matrix inverseSqrtCov() const { + return this->inverse_sqrt_cov; + }; + + Eigen::Matrix covariance() const { + return this->covariance_mat; + }; + + private: + const SquareMat covariance_mat; + const SquareMat inverse_sqrt_cov; +}; + +/** + * Gaussian noise with diagonal covariance matrix + * @tparam S dimension of the value + */ +template +class DiagonalNoise { + using DiagonalMat = Eigen::DiagonalMatrix; + + public: + /** The type accepted by the constructor */ + using InitType = Eigen::Matrix; + + /** Construct with the given vector of standard devations (sigmas) */ + explicit DiagonalNoise(const InitType &stddev) + : covariance_mat{stddev.cwiseProduct(stddev)}, + // Pre-calculate inverse sqrt covariance, used in normalization + inverse_sqrt_cov{stddev.cwiseInverse()} {} + + DiagonalMat covariance() const { + return this->covariance_mat; + }; + + DiagonalMat inverseSqrtCov() const { + return this->inverse_sqrt_cov; + }; + + private: + const DiagonalMat covariance_mat; + const DiagonalMat inverse_sqrt_cov; +}; + +/** + * Special case of noise for a single value + */ +template <> +class DiagonalNoise<1> { + public: + /** The type accepted by the constructor */ + using InitType = double; + + /** Construct with the given standard devations (sigma) */ + explicit DiagonalNoise<1>(double stddev) : stddev{stddev} {} + + double covariance() const { + return this->stddev * this->stddev; + }; + + double inverseSqrtCov() const { + return 1. / this->stddev; + } + + private: + double stddev; +}; + +/** @} group optimization */ +} // namespace wave + +#endif // WAVE_OPTIMIZATION_FACTOR_GRAPH_NOISE_HPP diff --git a/wave_optimization/include/wave/optimization/factor_graph/OutputMap.hpp b/wave_optimization/include/wave/optimization/factor_graph/OutputMap.hpp index b332728c..df7b31e5 100644 --- a/wave_optimization/include/wave/optimization/factor_graph/OutputMap.hpp +++ b/wave_optimization/include/wave/optimization/factor_graph/OutputMap.hpp @@ -29,11 +29,37 @@ class OutputMap : public Eigen::Map { return this->data() != nullptr; } + /* Assign from an Eigen matrix */ template OutputMap &operator=(const Eigen::MatrixBase &other) { this->Eigen::Map::operator=(other); return *this; } + + /** + * Assign from a ValueView + * + * @tparam ViewType the type of ValueView + * @tparam MappedType specified only so this template is used only for + * ValueView types (see SFINAE) + */ + template + OutputMap &operator=(const ViewType &v) { + this->Eigen::Map::operator=(v.asVector()); + return *this; + } + + /** + * Assign from a double + * Allowed for values of size 1 only + */ + OutputMap &operator=(double v) { + static_assert(OutputMap::SizeAtCompileTime == 1, + "Only OutputMap of size 1 may be assigned doubles"); + *this->data() = v; + return *this; + }; }; /** Type of jacobian output parameters for `Factor::evaluate()`. */ diff --git a/wave_optimization/include/wave/optimization/factor_graph/PerfectPrior.hpp b/wave_optimization/include/wave/optimization/factor_graph/PerfectPrior.hpp new file mode 100644 index 00000000..3c20e2ad --- /dev/null +++ b/wave_optimization/include/wave/optimization/factor_graph/PerfectPrior.hpp @@ -0,0 +1,99 @@ +/** + * @file + * @ingroup optimization + */ + +#ifndef WAVE_OPTIMIZATION_FACTOR_GRAPH_PERFECT_PRIOR_HPP +#define WAVE_OPTIMIZATION_FACTOR_GRAPH_PERFECT_PRIOR_HPP + +#include "wave/optimization/factor_graph/FactorVariableBase.hpp" +#include "wave/optimization/factor_graph/FactorBase.hpp" +#include "wave/optimization/factor_graph/FactorMeasurement.hpp" + + +namespace wave { +/** @addtogroup optimization + * @{ */ + +/** + * A unary factor representing a noiseless prior + * + * A prior in a factor graph can be described as + * + * @f[ + * Z = f(\theta) + \epsilon + * @f] + * + * where @f$ f @$f is the measurement function, @f$ \theta @$f is a random + * variable and @f$ \epsilon @$f is random noise. @f$ \epsilon @f cannot be + * zero in general, as there may not be a solution for @f$ \Theta @$f. + * + * In libwave, noiseless priors are allowed in the case of a direct + * measurement (i.e., @f$ f(\theta) = \theta @$f). These are called perfect + * priors. Each FactorVariable may be connected to at most one perfect prior. + * + * @tparam VarType The type of factor variable we have prior information on + */ +template +class PerfectPrior : public FactorBase { + using FactorType = PerfectPrior; + using ViewType = typename VarType::ViewType; + using MeasType = FactorMeasurement; + + public: + constexpr static int NumVars = 1; + constexpr static int ResidualSize = MeasType::Size; + using ResidualType = Eigen::Matrix; + using VarArrayType = FactorBase::VarVectorType; + using const_iterator = typename VarArrayType::const_iterator; + + /** Construct with the given measurement and variable. */ + explicit PerfectPrior(MeasType measurement, + std::shared_ptr variable_ptr) + : measurement{measurement}, variable_ptrs{variable_ptr} { + // Assign to the variable + variable_ptr->value = measurement.value; + } + + int size() const override { + return NumVars; + } + + int residualSize() const override { + return ResidualSize; + } + + bool evaluateRaw(double const *const *, double *, double **) const + noexcept override { + return false; + } + + /** Get a reference to the vector of variable pointers */ + const VarVectorType &variables() const noexcept override { + return this->variable_ptrs; + } + + /** Return true if this factor is a zero-noise prior */ + bool isPerfectPrior() const noexcept override { + return true; + } + + /** Print a representation for debugging. Used by operator<< */ + void print(std::ostream &os) const override { + os << "[Perfect prior for variable "; + const auto &v = this->variable_ptrs.front(); + os << *v << "(" << v << ")]"; + } + + private: + /** Storage of the measurement */ + MeasType measurement; + + /** Pointers to the variables this factor is linked to */ + VarArrayType variable_ptrs; +}; + +/** @} group optimization */ +} // namespace wave + +#endif // WAVE_OPTIMIZATION_FACTOR_GRAPH_PERFECT_PRIOR_HPP diff --git a/wave_optimization/include/wave/optimization/factor_graph/ValueView.hpp b/wave_optimization/include/wave/optimization/factor_graph/ValueView.hpp index 2f65f15c..95524c7a 100644 --- a/wave_optimization/include/wave/optimization/factor_graph/ValueView.hpp +++ b/wave_optimization/include/wave/optimization/factor_graph/ValueView.hpp @@ -50,11 +50,27 @@ class ValueView { /** Construct to map the given mapped Eigen matrix */ explicit ValueView(MappedType &m) : dataptr{m.data()} {} + /** Copy the value of another ValueView */ + ValueView &operator=(const ValueView &other) { + this->asVector() = other.asVector(); + return *this; + } + /** Return the size of the underlying value */ constexpr int size() const noexcept { return Size; } + /** Return an Eigen Map to the data */ + Eigen::Map asVector() const { + return Eigen::Map{this->dataptr}; + } + + /** Return an Eigen Map to the data */ + Eigen::Map asVector() { + return Eigen::Map{this->dataptr}; + } + /** Return a raw pointer to the start of the internal vector */ const double *data() const { return this->dataptr; diff --git a/wave_optimization/include/wave/optimization/factor_graph/impl/Factor.hpp b/wave_optimization/include/wave/optimization/factor_graph/impl/Factor.hpp index 88ba7015..d8a81007 100644 --- a/wave_optimization/include/wave/optimization/factor_graph/impl/Factor.hpp +++ b/wave_optimization/include/wave/optimization/factor_graph/impl/Factor.hpp @@ -48,7 +48,9 @@ struct make_index_sequence : make_index_sequence {}; template struct make_index_sequence<0, Indices...> : index_sequence {}; -/** Constructs a FactorValue mapping the given raw array. +/** Constructs a ValueView mapping the given raw array. + * + * Applies for values of size > 1 * * @tparam V the type of FactorVariable * @tparam I the index of the variable in this factor @@ -56,7 +58,8 @@ struct make_index_sequence<0, Indices...> : index_sequence {}; * @return a FactorValue mapping the Ith array */ template -const typename V::ViewType make_value(double const *const *parameters) { +inline typename std::enable_if<(V::Size > 1), const typename V::ViewType>::type +make_value(double const *const *parameters) { /* The array is const, but ValueView maps non-const arrays. It would be * complicated to have two variants of ValueView, one for const and one * for non-const arrays. In this case, we know that the ValueView itself @@ -67,6 +70,16 @@ const typename V::ViewType make_value(double const *const *parameters) { return typename V::ViewType{ptr}; } +/** Constructs a ValueView mapping the given raw array. + * + * Specialization for values of size 1 + */ +template +inline typename std::enable_if<(V::Size == 1), const double &>::type make_value( + double const *const *parameters) { + return *parameters[I]; +} + /** Constructs a jacobian output object mapping the given raw array. * * @tparam R size of residuals (height of the jacobian matrix) @@ -84,40 +97,88 @@ JacobianOut make_jacobian(double **jacobians) { /** Calls Factor::evaluate() with the necessary arguments generated from the * Factor type (known at compile time) and the provided array pointers (known * at run time). + * + * Calling a separate function expands the given index sequence - this approach + * is explained above in the documentation for `index_sequence`. */ -template -bool callEvaluate(F measurement_function, +template +bool callEvaluate(typename Factor::FuncType measurement_function, double const *const *parameters, - const ResultOut &results, + ResultOut &residuals, double **jacobians, index_sequence) noexcept { // Call evaluate, generating the correct type and number of the three kinds // of arguments: input values, output residuals, and output jacobians. - return measurement_function(make_value(parameters)..., - results, - make_jacobian(jacobians)...); + return measurement_function( + make_value(parameters)..., + residuals, + make_jacobian(jacobians)...); } +template +void multiplyJacobian(const T &L, JacobianOut jac) { + if (jac) { + jac = L * jac; + } +}; + +/** Calls Factor::evaluate() with the necessary arguments generated from the + * Factor type (known at compile time) and the provided array pointers (known + * at run time). + * + * Calling a separate function expands the given index sequence - this approach + * is explained in the documentation for `index_sequence`. + */ +template +void normalize(const M &measurement, + ResultOut &residuals, + double **jacobians, + index_sequence) noexcept { + // Calculate the normalized residual + const auto &L = measurement.noise.inverseSqrtCov(); + residuals = L * (residuals - measurement); + // Call multiplyJacobian for each jacobian pointer + auto loop = { + (multiplyJacobian(L, make_jacobian(jacobians)), + 0)...}; + (void) loop; // ignore unused variable warning +} } // namespace internal +template +Factor::Factor(Factor::FuncType measurement_function, + M measurement, + std::shared_ptr... variable_ptrs) + : measurement_function{measurement_function}, + measurement{measurement}, + variable_ptrs{{variable_ptrs...}} {} + +// Definition of evaluateRaw for regular factor (not perfect prior) template bool Factor::evaluateRaw(double const *const *parameters, - double *residuals, + double *raw_residuals, double **jacobians) const noexcept { - auto results = ResultOut{residuals}; + // Map the raw residuals array to an OutputMap, allowing easy assignment + auto residuals = ResultOut{raw_residuals}; - // Call a helper function. We need to do this to expand the index sequence - // which we generate here. This approach is explained above. - auto ok = internal::callEvaluate::FuncType, M::Size, V...>( + // Call a helper function, specialized for this factor's type. This helps + // by expanding the index sequence we create here (see `index_sequence`). + bool ok = internal::callEvaluate( this->measurement_function, parameters, - results, + residuals, jacobians, internal::make_index_sequence()); + if (ok) { - results = results - this->measurement; + internal::normalize( + this->measurement, + residuals, + jacobians, + internal::make_index_sequence()); } + return ok; } diff --git a/wave_optimization/include/wave/optimization/factor_graph/impl/FactorGraph.hpp b/wave_optimization/include/wave/optimization/factor_graph/impl/FactorGraph.hpp index baedb1bb..4b3d1ba5 100644 --- a/wave_optimization/include/wave/optimization/factor_graph/impl/FactorGraph.hpp +++ b/wave_optimization/include/wave/optimization/factor_graph/impl/FactorGraph.hpp @@ -1,23 +1,48 @@ +#include "wave/optimization/factor_graph/FactorMeasurement.hpp" +#include "wave/optimization/factor_graph/PerfectPrior.hpp" + namespace wave { +namespace internal { + +/** + * Trivial measurement function for a prior, f(X) = X + */ +template +inline bool identityMeasurementFunction( + const V &variable, + ResultOut::Size> result, + JacobianOut::Size, FactorVariable::Size> jacobian) { + result = variable; + + if (jacobian) { + jacobian.setIdentity(); + } + + return true; +} + +} // namespace internal + // Constructors -FactorGraph::FactorGraph() {} +inline FactorGraph::FactorGraph() {} // Capacity -FactorGraph::size_type FactorGraph::countFactors() const noexcept { +inline FactorGraph::size_type FactorGraph::countFactors() const noexcept { return this->factors.size(); } -bool FactorGraph::empty() const noexcept { +inline bool FactorGraph::empty() const noexcept { return this->factors.empty(); } // Modifiers + template -void FactorGraph::addFactor(FuncType f, - const MeasType &meas, - std::shared_ptr... variables) { +inline void FactorGraph::addFactor(FuncType f, + const MeasType &measurement, + std::shared_ptr... variables) { using FactorType = Factor; // Give a nice error message if the function type is wrong @@ -26,28 +51,49 @@ void FactorGraph::addFactor(FuncType f, "The given measurement function is of incorrect type"); this->factors.emplace_back( - std::make_shared(f, meas, std::move(variables)...)); + std::make_shared(f, measurement, std::move(variables)...)); }; +template +inline void FactorGraph::addPrior( + const MeasType &measurement, + std::shared_ptr variable) { + return this->addFactor( + internal::identityMeasurementFunction, + measurement, + std::move(variable)); +} + +template +inline void FactorGraph::addPerfectPrior( + const typename VarType::MappedType &measured_value, + std::shared_ptr variable) { + using MeasType = FactorMeasurement; + + this->factors.emplace_back(std::make_shared>( + MeasType{measured_value}, std::move(variable))); +} + // Iterators -typename FactorGraph::iterator FactorGraph::begin() noexcept { +inline typename FactorGraph::iterator FactorGraph::begin() noexcept { return this->factors.begin(); } -typename FactorGraph::iterator FactorGraph::end() noexcept { +inline typename FactorGraph::iterator FactorGraph::end() noexcept { return this->factors.end(); } -typename FactorGraph::const_iterator FactorGraph::begin() const noexcept { +inline typename FactorGraph::const_iterator FactorGraph::begin() const + noexcept { return this->factors.begin(); } -typename FactorGraph::const_iterator FactorGraph::end() const noexcept { +inline typename FactorGraph::const_iterator FactorGraph::end() const noexcept { return this->factors.end(); } -std::ostream &operator<<(std::ostream &os, const FactorGraph &graph) { +inline std::ostream &operator<<(std::ostream &os, const FactorGraph &graph) { os << "FactorGraph " << graph.countFactors() << " factors ["; const auto N = graph.countFactors(); for (auto i = 0 * N; i < N; ++i) { diff --git a/wave_optimization/tests/factor_graph/example_instances.hpp b/wave_optimization/tests/factor_graph/example_instances.hpp index 870228e2..1842e2c0 100644 --- a/wave_optimization/tests/factor_graph/example_instances.hpp +++ b/wave_optimization/tests/factor_graph/example_instances.hpp @@ -32,7 +32,10 @@ struct Pose2D : public ValueView<3> { // (https://gcc.gnu.org/bugzilla/show_bug.cgi?id=67054) explicit Pose2D(double *d) : ValueView<3>{d} {} explicit Pose2D(MappedType &m) : ValueView<3>{m} {} - + Pose2D &operator=(const Pose2D &other) { + this->ValueView<3>::operator=(other); + return *this; + } using Vec1 = Eigen::Matrix; Eigen::Map position{dataptr}; @@ -51,6 +54,10 @@ struct Landmark2D : public ValueView<2> { // (https://gcc.gnu.org/bugzilla/show_bug.cgi?id=67054) explicit Landmark2D(double *d) : ValueView<2>{d} {} explicit Landmark2D(MappedType &m) : ValueView<2>{m} {} + Landmark2D &operator=(const Landmark2D &other) { + this->ValueView<2>::operator=(other); + return *this; + } Eigen::Map position{dataptr}; }; diff --git a/wave_optimization/tests/factor_graph/factor_test.cpp b/wave_optimization/tests/factor_graph/factor_test.cpp index e30f2ca1..c5fa913e 100644 --- a/wave_optimization/tests/factor_graph/factor_test.cpp +++ b/wave_optimization/tests/factor_graph/factor_test.cpp @@ -1,5 +1,7 @@ #include "wave/wave_test.hpp" #include "wave/optimization/factor_graph/Factor.hpp" +#include "wave/optimization/factor_graph/FactorVariable.hpp" +#include "wave/optimization/factor_graph/FactorGraph.hpp" #include "example_instances.hpp" namespace wave { @@ -11,7 +13,7 @@ TEST(FactorTest, evaluate) { using Landmark2DVar = FactorVariable; - DistanceToLandmarkFactor f{DistanceMeasurement{meas}, + DistanceToLandmarkFactor f{DistanceMeasurement{meas, 1.0}, std::make_shared(), std::make_shared()}; @@ -41,10 +43,68 @@ TEST(FactorTest, evaluate) { VectorsNear, expected_jac_landmark, Eigen::Map{out_jac_landmark}); } + +TEST(FactorTest, evaluateSize1) { + // Demonstrate construction of a simple unary factor with size-1 measurement + // This also shows that the measurement function can be a lambda + auto func = []( + const double &v, ResultOut<1> result, JacobianOut<1, 1> jac) -> bool { + result = v * 2; + jac(0, 0) = -1.1; + return true; + }; + const auto meas_val = 1.2; + const auto meas_stddev = 0.1; + auto meas = FactorMeasurement{meas_val, meas_stddev}; + auto var = std::make_shared>(); + auto factor = Factor, FactorVariable>{ + func, meas, var}; + EXPECT_EQ(1, factor.size()); + EXPECT_EQ(1, factor.residualSize()); + ASSERT_EQ(1u, factor.variables().size()); + EXPECT_EQ(var, factor.variables().front()); + EXPECT_FALSE(factor.isPerfectPrior()); + + + // Prepare sample C-style arrays as used by Ceres + double test_residual; + double test_jac; + double test_val = 1.09; + const double *const params[] = {&test_val}; + double *jacs[] = {&test_jac}; + + // evaluate the factor + EXPECT_TRUE(factor.evaluateRaw(params, &test_residual, jacs)); + + // Compare the result. We expect the residual to be L(f(X) - Z) + // In this case that is (2x - Z)/stddev + EXPECT_DOUBLE_EQ((test_val * 2 - meas_val) / meas_stddev, test_residual); + // We expect the jacobian to be normalized as well + EXPECT_DOUBLE_EQ(-1.1 / meas_stddev, test_jac); +} + + +TEST(FactorTest, constructPerfectPrior) { + // Test that using a perfect prior immediately sets the variable's value + // @todo this may change + + // Note explicitly constructing PerfectPrior is not intended for users - + // They should use FactorGraph::addPerfectPrior. That is why this test does + // some non-intuitive preparation (e.g. constructing a FactorMeasurement) + using MeasType = FactorMeasurement; + using VarType = FactorVariable; + auto v = std::make_shared(); + + auto factor = PerfectPrior{MeasType{1.2}, v}; + EXPECT_DOUBLE_EQ(1.2, v->value); + + EXPECT_TRUE(factor.isPerfectPrior()); +} + TEST(FactorTest, print) { auto v1 = std::make_shared(); auto v2 = std::make_shared(); - auto meas = DistanceMeasurement{0.0}; + auto meas = DistanceMeasurement{0.0, 1.0}; auto factor = DistanceToLandmarkFactor{meas, v1, v2}; @@ -57,36 +117,19 @@ TEST(FactorTest, print) { EXPECT_EQ(expected.str(), ss.str()); } -TEST(FactorTest, evaluateCostFunction) { - auto meas = DistanceMeasurement{1.23}; - DistanceToLandmarkFactor f{ - meas, std::make_shared(), std::make_shared()}; +TEST(FactorTest, printPerfectPrior) { + using MeasType = FactorMeasurement; + using VarType = FactorVariable; + auto v = std::make_shared(); + auto factor = PerfectPrior{MeasType{1.2}, v}; - // Prepare sample C-style arrays as used by Ceres - const double param_pose[3] = {1.1, 2.2, 3.3}; - const double param_landmark[2] = {4.4, 5.5}; - const double *const parameters[2] = {param_pose, param_landmark}; - double out_residuals[1]; - double out_jac_pose[3]; - double out_jac_landmark[2]; - double *out_jacobians[2] = {out_jac_pose, out_jac_landmark}; - - // Calculate expected values (use analytic jacobians) - auto dist = - std::sqrt((1.1 - 4.4) * (1.1 - 4.4) + (2.2 - 5.5) * (2.2 - 5.5)); - auto expected_residual = dist - 1.23; - Vec3 expected_jac_pose{(1.1 - 4.4) / dist, (2.2 - 5.5) / dist, 0}; - Vec2 expected_jac_landmark{(1.1 - 4.4) / dist, (2.2 - 5.5) / dist}; + std::stringstream expected; + expected << "[Perfect prior for variable "; + expected << *v << "(" << v << ")]"; - // Call and compare - auto p_costfn = f.costFunction(); - auto res = p_costfn->Evaluate(parameters, out_residuals, out_jacobians); - EXPECT_TRUE(res); - EXPECT_DOUBLE_EQ(expected_residual, out_residuals[0]); - EXPECT_PRED2( - VectorsNear, expected_jac_pose, Eigen::Map{out_jac_pose}); - EXPECT_PRED2( - VectorsNear, expected_jac_landmark, Eigen::Map{out_jac_landmark}); + std::stringstream ss; + ss << factor; + EXPECT_EQ(expected.str(), ss.str()); } } // namespace wave diff --git a/wave_optimization/tests/factor_graph/graph_test.cpp b/wave_optimization/tests/factor_graph/graph_test.cpp index 20decb56..6eed5e86 100644 --- a/wave_optimization/tests/factor_graph/graph_test.cpp +++ b/wave_optimization/tests/factor_graph/graph_test.cpp @@ -10,7 +10,7 @@ TEST(FactorGraph, add) { FactorGraph graph; auto p = std::make_shared(); auto l = std::make_shared(); - auto m = DistanceMeasurement{2.3}; + auto m = DistanceMeasurement{2.3, 1.0}; graph.addFactor(distanceMeasurementFunction, m, p, l); @@ -26,13 +26,138 @@ TEST(FactorGraph, capacity) { auto p = std::make_shared(); auto l = std::make_shared(); - auto m = DistanceMeasurement{2.3}; + auto m = DistanceMeasurement{2.3, 1.0}; graph.addFactor(distanceMeasurementFunction, m, p, l); EXPECT_EQ(1u, graph.countFactors()); EXPECT_FALSE(graph.empty()); } +TEST(FactorGraph, addPrior) { + const auto test_meas = Vec2{1.2, 3.4}; + const auto test_stddev = Vec2{0.01, 0.01}; + const auto test_val = Vec2{1.23, 3.38}; + // The expected results are normalized + const Vec2 expected_res = (test_val - test_meas).cwiseQuotient(test_stddev); + const Mat2 expected_jac = Mat2::Identity() / 0.01; + + // Prepare arguments to add unary factor + FactorGraph graph; + auto p = std::make_shared(); + auto m = FactorMeasurement{test_meas, test_stddev}; + + // Prepare arguments in a form matching ceres calls + Vec2 test_residual; + Mat2 test_jac; + const double *const params[] = {test_val.data()}; + double *jacs[] = {test_jac.data()}; + + // Add the factor, retrieve the pointer to it, and verity + graph.addPrior(m, p); + EXPECT_EQ(1u, graph.countFactors()); + + auto factor = *graph.begin(); + ASSERT_NE(nullptr, factor); + + EXPECT_TRUE(factor->evaluateRaw(params, test_residual.data(), jacs)); + EXPECT_PRED2(VectorsNear, expected_res, test_residual); + EXPECT_PRED2(MatricesNear, expected_jac, test_jac); +} + +TEST(FactorGraph, addPriorOfSize1) { + const auto test_meas = 1.2; + const auto test_stddev = 0.01; + const auto test_val = 1.23; + // The expected results are normalized + const auto expected_res = (test_val - test_meas) / test_stddev; + const auto expected_jac = 1.0 / test_stddev; + + // Prepare arguments to add unary factor + FactorGraph graph; + auto p = std::make_shared>(); + auto m = FactorMeasurement{test_meas, test_stddev}; + + // Prepare arguments in a form matching ceres calls + double test_residual; + double test_jac; + const double *const params[] = {&test_val}; + double *jacs[] = {&test_jac}; + + // Add the factor, retrieve the pointer to it, and verity + graph.addPrior(m, p); + EXPECT_EQ(1u, graph.countFactors()); + + auto factor = *graph.begin(); + ASSERT_NE(nullptr, factor); + + EXPECT_TRUE(factor->evaluateRaw(params, &test_residual, jacs)); + EXPECT_DOUBLE_EQ(expected_res, test_residual); + EXPECT_DOUBLE_EQ(expected_jac, test_jac); +} + +TEST(FactorGraph, addPerfectPrior) { + const auto test_meas = Vec2{1.2, 3.4}; + const auto test_val = Vec2{1.23, 3.38}; + + // Prepare arguments to add unary factor + FactorGraph graph; + auto p = std::make_shared(); + + // Prepare arguments in a form matching ceres calls + Vec2 test_residual; + Mat2 test_jac; + const double *const params[] = {test_val.data()}; + double *jacs[] = {test_jac.data()}; + + // Add the factor + graph.addPerfectPrior(test_meas, p); + EXPECT_EQ(1u, graph.countFactors()); + + // The variable should have the measured value immediately + // @todo this may change + EXPECT_PRED2(VectorsNear, test_meas, Eigen::Map(p->data())); + + // Retrieve a pointer to the factor we just (indirectly) added + auto factor = *graph.begin(); + ASSERT_NE(nullptr, factor); + + EXPECT_TRUE(factor->isPerfectPrior()); + + // We cannot evaluate a factor that is a perfect prior + EXPECT_FALSE(factor->evaluateRaw(params, test_residual.data(), jacs)); +} + +TEST(FactorGraph, addPerfectPriorOfSize1) { + const auto test_meas = 1.2; + const auto test_val = 1.23; + + // Prepare arguments to add unary factor + FactorGraph graph; + auto p = std::make_shared>(); + + // Prepare arguments in a form matching ceres calls + double test_residual; + double test_jac; + const double *const params[] = {&test_val}; + double *jacs[] = {&test_jac}; + + // Add the factor + graph.addPerfectPrior(test_meas, p); + EXPECT_EQ(1u, graph.countFactors()); + + // The variable should have the measured value immediately + // @todo this may change + EXPECT_DOUBLE_EQ(test_meas, *p->data()); + + auto factor = *graph.begin(); + ASSERT_NE(nullptr, factor); + + EXPECT_TRUE(factor->isPerfectPrior()); + + // We cannot evaluate a factor that is a perfect prior + EXPECT_FALSE(factor->evaluateRaw(params, &test_residual, jacs)); +} + TEST(FactorGraph, triangulationSim) { // Simulate a robot passing by a landmark // First generate "true" data @@ -46,25 +171,30 @@ TEST(FactorGraph, triangulationSim) { // Construct variables and add them to the graph auto landmark_vars = std::vector>{}; auto pose_vars = std::vector>{}; + FactorGraph graph; - for (const auto &l : true_l_pos) { - landmark_vars.push_back(std::make_shared(l)); + + for (const auto &l_pos : true_l_pos) { + auto l = std::make_shared(); + landmark_vars.push_back(l); // For now, landmark positions are exactly known. - landmark_vars.back()->setFixed(true); + graph.addPerfectPrior(l_pos, l); } + for (const auto &pose : true_poses) { // Make uninitialized Variable auto p = std::make_shared(); pose_vars.push_back(p); for (auto i = 0u; i < landmark_vars.size(); ++i) { auto distance = double{(true_l_pos[i] - pose.head<2>()).norm()}; - auto meas = DistanceMeasurement{distance}; + auto meas = DistanceMeasurement{distance, 1.0}; graph.addFactor( distanceMeasurementFunction, meas, p, landmark_vars[i]); } } - EXPECT_EQ(true_poses.size() * true_l_pos.size(), graph.countFactors()); + // Expect one factor for each measurement, plus the three priors + EXPECT_EQ(3 + true_poses.size() * true_l_pos.size(), graph.countFactors()); // Fill the variables with values evaluateGraph(graph); @@ -85,9 +215,9 @@ TEST(GraphTest, print) { auto graph = FactorGraph{}; - auto m1 = DistanceMeasurement{0.0}; - auto m2 = DistanceMeasurement{1.1}; - auto m3 = DistanceMeasurement{2.2}; + auto m1 = DistanceMeasurement{0.0, 1.0}; + auto m2 = DistanceMeasurement{1.1, 1.0}; + auto m3 = DistanceMeasurement{2.2, 1.0}; graph.addFactor(distanceMeasurementFunction, m1, v1, l); graph.addFactor(distanceMeasurementFunction, m2, v2, l); diff --git a/wave_optimization/tests/factor_graph/noise_test.cpp b/wave_optimization/tests/factor_graph/noise_test.cpp new file mode 100644 index 00000000..49425164 --- /dev/null +++ b/wave_optimization/tests/factor_graph/noise_test.cpp @@ -0,0 +1,46 @@ +#include "wave/wave_test.hpp" +#include "wave/optimization/factor_graph/Noise.hpp" + +namespace wave { + +TEST(NoiseTest, diagonalNoise) { + const DiagonalNoise<2>::InitType stddev = Vec2{1.1, 2.2}; + Eigen::Matrix2d expected_cov, expected_inv; + expected_cov << 1.1 * 1.1, 0, 0, 2.2 * 2.2; + expected_inv << 1 / 1.1, 0, 0, 1 / 2.2; + + auto n = DiagonalNoise<2>{stddev}; + + auto res = Eigen::MatrixXd{n.covariance()}; + EXPECT_PRED2(MatricesNear, expected_cov, res); + + res = Eigen::MatrixXd{n.inverseSqrtCov()}; + EXPECT_PRED2(MatricesNear, expected_inv, res); +} + +TEST(NoiseTest, singleNoise) { + const DiagonalNoise<1>::InitType stddev = 1.1; + + auto n = DiagonalNoise<1>{stddev}; + EXPECT_DOUBLE_EQ(stddev * stddev, n.covariance()); + EXPECT_DOUBLE_EQ(1. / stddev, n.inverseSqrtCov()); +} + +TEST(NoiseTest, fullNoise) { + FullNoise<2>::InitType cov = Mat2{}; + cov << 3.3, 1.1, 1.1, 4.4; + + auto n = FullNoise<2>{cov}; + + // inverse of cholesky of cov calculated separately + Mat2 expected_inv; + expected_inv << 0.550481882563180, 0, -0.165976532577323, 0.497929597731969; + + auto res = Eigen::MatrixXd{n.covariance()}; + EXPECT_PRED2(MatricesNear, cov, res); + + res = Eigen::MatrixXd{n.inverseSqrtCov()}; + EXPECT_PRED2(MatricesNear, expected_inv, res); +} + +} // namespace wave diff --git a/wave_optimization/tests/factor_graph/variable_test.cpp b/wave_optimization/tests/factor_graph/variable_test.cpp index 47d1dfce..d68881c6 100644 --- a/wave_optimization/tests/factor_graph/variable_test.cpp +++ b/wave_optimization/tests/factor_graph/variable_test.cpp @@ -1,6 +1,6 @@ #include "wave/wave_test.hpp" #include "wave/optimization/factor_graph/FactorVariable.hpp" - +#include "wave/optimization/factor_graph/FactorMeasurement.hpp" namespace wave { @@ -49,4 +49,50 @@ TEST(VariableTest, constructInitialRvalue) { EXPECT_PRED2(VectorsNear, expected, Eigen::Map{var.value.data()}); } +TEST(VariableTest, copyConstruct) { + // Because FactorVariable works with pointers to its storage, it must have + // a custom copy constructor + FactorVariable> a{Vec2{1.1, 2.2}}; + FactorVariable> b{a}; + + ASSERT_NE(&a, &b); + EXPECT_NE(a.data(), b.data()); + EXPECT_DOUBLE_EQ(1.1, *a.data()); + EXPECT_DOUBLE_EQ(1.1, *b.data()); +} + +TEST(VariableTest, moveConstruct) { + FactorVariable> a; + auto a_ptr = a.data(); + FactorVariable> b{std::move(a)}; + + ASSERT_NE(&a, &b); + EXPECT_NE(a_ptr, b.data()); +} + +TEST(VariableTest, copyAssign) { + FactorVariable> a, b; + b = a; + + ASSERT_NE(&a, &b); + EXPECT_NE(a.data(), b.data()); +} + + +TEST(VariableTest, moveAssign) { + FactorVariable> a, b; + b = std::move(a); + + ASSERT_NE(&a, &b); + EXPECT_NE(a.data(), b.data()); +} + +TEST(MeasurementTest, constructFromRvalue) { + auto meas = FactorMeasurement>{Vec2{1.1, 2.2}, Vec2{0.1, 0.2}}; + const auto expected_val = Vec2{1.1, 2.2}; + EXPECT_EQ(2, meas.size()); + EXPECT_PRED2( + VectorsNear, expected_val, Eigen::Map{meas.value.data()}); +} + } // namespace wave