From 8127c1e92f539ac1952d677eadfb306d8a52a482 Mon Sep 17 00:00:00 2001 From: Easton Potokar Date: Mon, 16 Dec 2024 20:38:26 -0500 Subject: [PATCH] Make core module to pair with trait module --- examples/g2o.rs | 13 ++++----- examples/gps.rs | 42 +++++++++++++-------------- rustfmt.toml | 6 ++++ src/containers/factor.rs | 2 +- src/containers/graph.rs | 2 +- src/containers/values.rs | 4 +-- src/lib.rs | 31 +++++++++++++++----- src/optimizers/gauss_newton.rs | 8 ++--- src/optimizers/levenberg_marquardt.rs | 8 ++--- src/optimizers/macros.rs | 22 +++++++++----- src/optimizers/mod.rs | 25 +++++++--------- src/optimizers/traits.rs | 9 +----- src/residuals/imu_preint/delta.rs | 6 ++-- src/residuals/imu_preint/residual.rs | 8 ++--- src/variables/imu_bias.rs | 2 +- 15 files changed, 101 insertions(+), 87 deletions(-) create mode 100644 rustfmt.toml diff --git a/examples/g2o.rs b/examples/g2o.rs index e66103b..d621c2c 100644 --- a/examples/g2o.rs +++ b/examples/g2o.rs @@ -1,17 +1,16 @@ +#[cfg(feature = "rerun")] +use std::net::{SocketAddr, SocketAddrV4}; use std::{env, time::Instant}; +#[cfg(feature = "rerun")] +use factrs::rerun::RerunObserver; use factrs::{ - optimizers::{GaussNewton, GraphOptimizer, Optimizer}, + core::{GaussNewton, SE2, SE3}, + traits::Optimizer, utils::load_g20, - variables::*, }; - -#[cfg(feature = "rerun")] -use factrs::rerun::RerunObserver; #[cfg(feature = "rerun")] use rerun::{Arrows2D, Arrows3D, Points2D, Points3D}; -#[cfg(feature = "rerun")] -use std::net::{SocketAddr, SocketAddrV4}; // Setups rerun and a callback for iteratively sending to rerun // Must run with --features rerun for it to work diff --git a/examples/gps.rs b/examples/gps.rs index 6a3be0e..7eaaa40 100644 --- a/examples/gps.rs +++ b/examples/gps.rs @@ -10,21 +10,18 @@ A simple 2D pose slam example with "GPS" measurements */ #![allow(unused_imports)] +// Our state will be represented by SE2 -> theta, x, y +// VectorVar2 is a newtype around Vector2 for optimization purposes +use factrs::variables::{VectorVar2, SE2}; use factrs::{ assign_symbols, - containers::{Graph, Values}, + core::{BetweenResidual, GaussNewton, GaussianNoise, Graph, Values}, dtype, fac, linalg::{Const, ForwardProp, Numeric, NumericalDiff, VectorX}, - noise::GaussianNoise, - optimizers::GaussNewton, - residuals::{BetweenResidual, Residual1}, - traits::{GraphOptimizer, Optimizer, Variable}, + residuals::Residual1, + traits::*, }; -// Our state will be represented by SE2 -> theta, x, y -// VectorVar2 is a newtype around Vector2 for optimization purposes -use factrs::variables::{VectorVar2, SE2}; - #[derive(Clone, Debug)] // Enable serialization if it's desired #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] @@ -45,10 +42,11 @@ impl GpsResidual { impl Residual1 for GpsResidual { // Use forward propagation for differentiation type Differ = ForwardProp<::DimIn>; - // Alternatively, could use numerical differentiation (6 => 10^-6 as denominator) - // type Differ = NumericalDiff<6>; + // Alternatively, could use numerical differentiation (6 => 10^-6 as + // denominator) type Differ = NumericalDiff<6>; - // The input variable type, input dimension of variable(s), and output dimension of residual + // The input variable type, input dimension of variable(s), and output dimension + // of residual type V1 = SE2; type DimIn = Const<3>; type DimOut = Const<2>; @@ -64,8 +62,8 @@ impl Residual1 for GpsResidual { } // You can also hand-code the jacobian by hand if extra efficiency is desired. - // fn residual1_jacobian(&self, values: &Values, keys: &[Key]) -> DiffResult { - // let p: &SE2 = values + // fn residual1_jacobian(&self, values: &Values, keys: &[Key]) -> + // DiffResult { let p: &SE2 = values // .get_unchecked(keys[0]) // .expect("got wrong variable type"); // let s = p.theta().sin(); @@ -76,8 +74,9 @@ impl Residual1 for GpsResidual { // diff, // } // } - // As a note - the above jacobian is only valid if running with the "left" feature disabled - // Switching to the left feature will change the jacobian used + // As a note - the above jacobian is only valid if running with the "left" + // feature disabled Switching to the left feature will change the jacobian + // used } // Here we assign X to always represent SE2 variables @@ -111,11 +110,12 @@ fn main() { // These will all compile-time error // values.insert(X(5), VectorVar2::identity()); // wrong variable type - // let f = fac![GpsResidual::new(0.0, 0.0), (X(0), X(1))]; // wrong number of keys - // let n = GaussianNoise::<5>::from_scalar_sigma(0.1); - // let f = fac![GpsResidual::new(0.0, 0.0), X(0), n]; // wrong noise-model dimension - // assign_symbols!(Y : VectorVar2); - // let f = fac![GpsResidual::new(0.0, 0.0), Y(0), 0.1 as std]; // wrong variable type + // let f = fac![GpsResidual::new(0.0, 0.0), (X(0), X(1))]; // wrong number of + // keys let n = GaussianNoise::<5>::from_scalar_sigma(0.1); + // let f = fac![GpsResidual::new(0.0, 0.0), X(0), n]; // wrong noise-model + // dimension assign_symbols!(Y : VectorVar2); + // let f = fac![GpsResidual::new(0.0, 0.0), Y(0), 0.1 as std]; // wrong variable + // type // optimize let mut opt: GaussNewton = GaussNewton::new(graph); diff --git a/rustfmt.toml b/rustfmt.toml new file mode 100644 index 0000000..9e0fbee --- /dev/null +++ b/rustfmt.toml @@ -0,0 +1,6 @@ +imports_granularity = "Crate" +group_imports = "StdExternalCrate" +# imports_layout = "HorizontalVertical" + +format_code_in_doc_comments = true +wrap_comments = true \ No newline at end of file diff --git a/src/containers/factor.rs b/src/containers/factor.rs index eb7e654..bce1712 100644 --- a/src/containers/factor.rs +++ b/src/containers/factor.rs @@ -136,7 +136,7 @@ impl<'f, KF> FactorFormatter<'f, KF> { } } -impl<'f, KF: KeyFormatter> fmt::Debug for FactorFormatter<'f, KF> { +impl fmt::Debug for FactorFormatter<'_, KF> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { if f.alternate() { f.write_str("Factor {\n")?; diff --git a/src/containers/graph.rs b/src/containers/graph.rs index 297e736..632b474 100644 --- a/src/containers/graph.rs +++ b/src/containers/graph.rs @@ -131,7 +131,7 @@ impl<'g, KF> GraphFormatter<'g, KF> { } } -impl<'g, KF: KeyFormatter> Debug for GraphFormatter<'g, KF> { +impl Debug for GraphFormatter<'_, KF> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { if f.alternate() { f.write_str("Graph [\n")?; diff --git a/src/containers/values.rs b/src/containers/values.rs index 5b775e3..8d63b3b 100644 --- a/src/containers/values.rs +++ b/src/containers/values.rs @@ -225,7 +225,7 @@ impl<'v, KF> ValuesFormatter<'v, KF> { } } -impl<'v, KF: KeyFormatter> fmt::Display for ValuesFormatter<'v, KF> { +impl fmt::Display for ValuesFormatter<'_, KF> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let precision = f.precision().unwrap_or(3); if f.alternate() { @@ -246,7 +246,7 @@ impl<'v, KF: KeyFormatter> fmt::Display for ValuesFormatter<'v, KF> { } } -impl<'v, KF: KeyFormatter> fmt::Debug for ValuesFormatter<'v, KF> { +impl fmt::Debug for ValuesFormatter<'_, KF> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let precision = f.precision().unwrap_or(3); if f.alternate() { diff --git a/src/lib.rs b/src/lib.rs index d865bee..1cef9aa 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -127,10 +127,26 @@ pub mod symbols { /// ``` pub mod traits { pub use crate::{ - linalg::Diff, - optimizers::{GraphOptimizer, Optimizer}, - residuals::Residual, - variables::Variable, + linalg::Diff, noise::NoiseModel, optimizers::Optimizer, residuals::Residual, + robust::RobustCost, variables::Variable, + }; +} + +/// Helper module to group together common types +/// +/// Specifically, this contains everything that would be needed to implement a +/// simple pose graph. While we recommend against it, it can be all imported +/// using +/// ``` +/// use factrs::core::*; +/// ``` +pub mod core { + pub use crate::{ + containers::{Factor, Graph, Values}, + noise::{GaussianNoise, UnitNoise}, + optimizers::{GaussNewton, LevenMarquardt}, + residuals::{BetweenResidual, PriorResidual}, + variables::{VectorVar, VectorVar1, VectorVar2, VectorVar3, SE2, SE3, SO2, SO3}, }; } @@ -139,8 +155,7 @@ pub mod rerun; #[cfg(feature = "serde")] pub mod serde { - pub use crate::noise::tag_noise; - pub use crate::residuals::tag_residual; - pub use crate::robust::tag_robust; - pub use crate::variables::tag_variable; + pub use crate::{ + noise::tag_noise, residuals::tag_residual, robust::tag_robust, variables::tag_variable, + }; } diff --git a/src/optimizers/gauss_newton.rs b/src/optimizers/gauss_newton.rs index 5d4efb7..39feefc 100644 --- a/src/optimizers/gauss_newton.rs +++ b/src/optimizers/gauss_newton.rs @@ -1,6 +1,6 @@ use faer_ext::IntoNalgebra; -use super::{GraphOptimizer, OptObserverVec, OptParams, OptResult, Optimizer}; +use super::{OptObserverVec, OptParams, OptResult, Optimizer}; use crate::{ containers::{Graph, GraphOrder, Values, ValuesOrder}, linalg::DiffResult, @@ -26,8 +26,8 @@ pub struct GaussNewton { graph_order: Option, } -impl GraphOptimizer for GaussNewton { - fn new(graph: Graph) -> Self { +impl GaussNewton { + pub fn new(graph: Graph) -> Self { Self { graph, solver: S::default(), @@ -37,7 +37,7 @@ impl GraphOptimizer for GaussNewton { } } - fn graph(&self) -> &Graph { + pub fn graph(&self) -> &Graph { &self.graph } } diff --git a/src/optimizers/levenberg_marquardt.rs b/src/optimizers/levenberg_marquardt.rs index 43fa6ac..c8fba74 100644 --- a/src/optimizers/levenberg_marquardt.rs +++ b/src/optimizers/levenberg_marquardt.rs @@ -3,7 +3,7 @@ use std::ops::Mul; use faer::{scale, sparse::SparseColMat}; use faer_ext::IntoNalgebra; -use super::{GraphOptimizer, OptError, OptObserverVec, OptParams, OptResult, Optimizer}; +use super::{OptError, OptObserverVec, OptParams, OptResult, Optimizer}; use crate::{ containers::{Graph, GraphOrder, Values, ValuesOrder}, dtype, @@ -51,8 +51,8 @@ pub struct LevenMarquardt { graph_order: Option, } -impl GraphOptimizer for LevenMarquardt { - fn new(graph: Graph) -> Self { +impl LevenMarquardt { + pub fn new(graph: Graph) -> Self { Self { graph, solver: S::default(), @@ -64,7 +64,7 @@ impl GraphOptimizer for LevenMarquardt { } } - fn graph(&self) -> &Graph { + pub fn graph(&self) -> &Graph { &self.graph } } diff --git a/src/optimizers/macros.rs b/src/optimizers/macros.rs index b13fdbc..cc81276 100644 --- a/src/optimizers/macros.rs +++ b/src/optimizers/macros.rs @@ -5,35 +5,43 @@ /// - Between optimization for VectorVar3, SO3, and SE3 #[macro_export] macro_rules! test_optimizer { - ($optimizer:ty) => { + ($o:ty) => { #[test] fn priorvector3() { - $crate::optimizers::test::optimize_prior::<$optimizer, 3, $crate::variables::VectorVar3>() + let f = |graph| <$o>::new(graph); + $crate::optimizers::test::optimize_prior::<$o, 3, $crate::variables::VectorVar3>(&f) } #[test] fn priorso3() { - $crate::optimizers::test::optimize_prior::<$optimizer, 3, $crate::variables::SO3>(); + let f = |graph| <$o>::new(graph); + $crate::optimizers::test::optimize_prior::<$o, 3, $crate::variables::SO3>(&f); } #[test] fn priorse3() { - $crate::optimizers::test::optimize_prior::<$optimizer, 6, $crate::variables::SE3>(); + let f = |graph| <$o>::new(graph); + $crate::optimizers::test::optimize_prior::<$o, 6, $crate::variables::SE3>(&f); } #[test] fn betweenvector3() { - $crate::optimizers::test::optimize_between::<$optimizer, 3, 6, $crate::variables::VectorVar3>(); + let f = |graph| <$o>::new(graph); + $crate::optimizers::test::optimize_between::<$o, 3, 6, $crate::variables::VectorVar3>( + &f, + ); } #[test] fn betweenso3() { - $crate::optimizers::test::optimize_between::<$optimizer, 3, 6, $crate::variables::SO3>(); + let f = |graph| <$o>::new(graph); + $crate::optimizers::test::optimize_between::<$o, 3, 6, $crate::variables::SO3>(&f); } #[test] fn betweense3() { - $crate::optimizers::test::optimize_between::<$optimizer, 6, 12, $crate::variables::SE3>(); + let f = |graph| <$o>::new(graph); + $crate::optimizers::test::optimize_between::<$o, 6, 12, $crate::variables::SE3>(&f); } }; } diff --git a/src/optimizers/mod.rs b/src/optimizers/mod.rs index a79cd55..50083d7 100644 --- a/src/optimizers/mod.rs +++ b/src/optimizers/mod.rs @@ -35,9 +35,7 @@ //! using the [test_optimizer](crate::test_optimizer) macro to run a handful of //! simple tests over a few different variable types to ensure correctness. mod traits; -pub use traits::{ - GraphOptimizer, OptError, OptObserver, OptObserverVec, OptParams, OptResult, Optimizer, -}; +pub use traits::{OptError, OptObserver, OptObserverVec, OptParams, OptResult, Optimizer}; mod macros; @@ -58,7 +56,6 @@ pub mod test { containers::{FactorBuilder, Graph, Values}, dtype, linalg::{AllocatorBuffer, Const, DualAllocator, DualVector, VectorX}, - noise::{NoiseModel, UnitNoise}, residuals::{BetweenResidual, PriorResidual, Residual}, symbols::X, variables::VariableDtype, @@ -69,11 +66,11 @@ pub mod test { const DIM: usize, #[cfg(feature = "serde")] T: VariableDtype> + 'static + typetag::Tagged, #[cfg(not(feature = "serde"))] T: VariableDtype> + 'static, - >() - where - UnitNoise: NoiseModel, + >( + new: &dyn Fn(Graph) -> O, + ) where PriorResidual: Residual, - O: Optimizer + GraphOptimizer, + O: Optimizer, { let t = VectorX::from_fn(T::DIM, |_, i| ((i + 1) as dtype) / 10.0); let p = T::exp(t.as_view()); @@ -86,7 +83,7 @@ pub mod test { let factor = FactorBuilder::new1_unchecked(res, X(0)).build(); graph.add_factor(factor); - let mut opt = O::new(graph); + let mut opt = new(graph); values = opt.optimize(values).expect("Optimization failed"); let out: &T = values.get_unchecked(X(0)).expect("Missing X(0)"); @@ -104,12 +101,12 @@ pub mod test { const DIM_DOUBLE: usize, #[cfg(feature = "serde")] T: VariableDtype> + 'static + typetag::Tagged, #[cfg(not(feature = "serde"))] T: VariableDtype> + 'static, - >() - where - UnitNoise: NoiseModel, + >( + new: &dyn Fn(Graph) -> O, + ) where PriorResidual: Residual, BetweenResidual: Residual, - O: Optimizer + GraphOptimizer, + O: Optimizer, Const: ToTypenum, AllocatorBuffer, Const>>: Sync + Send, DefaultAllocator: DualAllocator, Const>>, @@ -136,7 +133,7 @@ pub mod test { let factor = FactorBuilder::new2_unchecked(res, X(0), X(1)).build(); graph.add_factor(factor); - let mut opt = O::new(graph); + let mut opt = new(graph); values = opt.optimize(values).expect("Optimization failed"); let out1: &T = values.get_unchecked(X(0)).expect("Missing X(0)"); diff --git a/src/optimizers/traits.rs b/src/optimizers/traits.rs index e173a69..78b5fad 100644 --- a/src/optimizers/traits.rs +++ b/src/optimizers/traits.rs @@ -1,4 +1,4 @@ -use crate::{containers::Graph, dtype}; +use crate::dtype; /// Error types for optimizers #[derive(Debug)] @@ -165,10 +165,3 @@ pub trait Optimizer { Err(OptError::MaxIterations(values)) } } - -/// Small trait for optimizers that work on a graph -pub trait GraphOptimizer: Optimizer { - fn new(graph: Graph) -> Self; - - fn graph(&self) -> &Graph; -} diff --git a/src/residuals/imu_preint/delta.rs b/src/residuals/imu_preint/delta.rs index 0d156ea..4a30cfe 100644 --- a/src/residuals/imu_preint/delta.rs +++ b/src/residuals/imu_preint/delta.rs @@ -4,8 +4,7 @@ use super::{AccelUnbiased, Gravity, GyroUnbiased, ImuState}; use crate::{ dtype, linalg::{Matrix, Matrix3, Numeric, SupersetOf, Vector3}, - traits::Variable, - variables::{ImuBias, MatrixLieGroup, SO3}, + variables::{ImuBias, MatrixLieGroup, Variable, SO3}, }; /// Struct to hold the preintegrated Imu delta @@ -187,9 +186,8 @@ mod test { use super::*; use crate::{ - linalg::{DualVector, ForwardProp, Vector, VectorX}, + linalg::{Diff, DualVector, ForwardProp, Vector, VectorX}, residuals::{Accel, Gyro}, - traits::*, variables::VectorVar, }; diff --git a/src/residuals/imu_preint/residual.rs b/src/residuals/imu_preint/residual.rs index facafaf..5207878 100644 --- a/src/residuals/imu_preint/residual.rs +++ b/src/residuals/imu_preint/residual.rs @@ -5,8 +5,7 @@ use crate::{ linalg::{Const, ForwardProp, Matrix, Matrix3, VectorX}, noise::GaussianNoise, residuals::Residual6, - traits::*, - variables::{ImuBias, MatrixLieGroup, VectorVar3, SE3, SO3}, + variables::{ImuBias, MatrixLieGroup, Variable, VectorVar3, SE3, SO3}, }; // ------------------------- Covariances ------------------------- // @@ -350,18 +349,17 @@ impl Residual6 for ImuPreintegrationResidual { #[cfg(test)] mod test { + use super::*; use crate::{ assert_variable_eq, assign_symbols, containers::{Graph, Values}, fac, linalg::Vector3, - optimizers::GaussNewton, + optimizers::{GaussNewton, Optimizer}, residuals::{Accel, Gyro, PriorResidual}, variables::{ImuBias, VectorVar3, SE3}, }; - use super::*; - assign_symbols!(X: SE3; V: VectorVar3; B: ImuBias); // Test using residual in an optimization problem diff --git a/src/variables/imu_bias.rs b/src/variables/imu_bias.rs index 57dcd65..25e72e7 100644 --- a/src/variables/imu_bias.rs +++ b/src/variables/imu_bias.rs @@ -154,7 +154,7 @@ impl ops::Sub for ImuBias { } } -impl<'a, T: Numeric> ops::Sub for &'a ImuBias { +impl ops::Sub for &ImuBias { type Output = ImuBias; fn sub(self, rhs: Self) -> ImuBias {