diff --git a/Cargo.lock b/Cargo.lock index 8a5018f906..fb597b1a9d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -660,6 +660,7 @@ dependencies = [ "filtering", "linear_algebra", "nalgebra", + "ordered-float", "path_serde", "serde", "types", @@ -732,7 +733,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "rustc-hash", + "rustc-hash 1.1.0", "shlex", "syn 1.0.109", "which", @@ -755,7 +756,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "rustc-hash", + "rustc-hash 1.1.0", "shlex", "syn 2.0.69", "which", @@ -778,7 +779,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "rustc-hash", + "rustc-hash 1.1.0", "shlex", "syn 2.0.69", "which", @@ -1451,12 +1452,14 @@ dependencies = [ "framework", "geometry", "hardware", + "hungarian_algorithm", "itertools 0.10.5", "kinematics", "linear_algebra", "log", "motionfile", "nalgebra", + "ndarray", "num-traits", "ordered-float", "path_serde", @@ -1635,6 +1638,18 @@ dependencies = [ "toml", ] +[[package]] +name = "deprecate-until" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a3767f826efbbe5a5ae093920b58b43b01734202be697e1354914e862e8e704" +dependencies = [ + "proc-macro2", + "quote", + "semver", + "syn 2.0.69", +] + [[package]] name = "deranged" version = "0.3.11" @@ -3146,6 +3161,15 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" +[[package]] +name = "hungarian_algorithm" +version = "0.1.0" +dependencies = [ + "ndarray", + "ordered-float", + "pathfinding", +] + [[package]] name = "hyper" version = "0.14.29" @@ -3325,6 +3349,15 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "integer-sqrt" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "276ec31bcb4a9ee45f58bec6f9ec700ae4cf4f4f8f2fa7e06cb406bd5ffdd770" +dependencies = [ + "num-traits", +] + [[package]] name = "io-kit-sys" version = "0.4.1" @@ -3786,7 +3819,7 @@ dependencies = [ "once_cell", "parking_lot", "pkg-config", - "rustc-hash", + "rustc-hash 1.1.0", "serde", ] @@ -3817,7 +3850,7 @@ dependencies = [ "indexmap 2.2.6", "log", "num-traits", - "rustc-hash", + "rustc-hash 1.1.0", "spirv", "termcolor", "thiserror", @@ -4569,6 +4602,20 @@ dependencies = [ "syn 2.0.69", ] +[[package]] +name = "pathfinding" +version = "4.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39103a901800b5711b9076f2474da1e7412f804c11a0f3b4e8dd3b1e59c58b12" +dependencies = [ + "deprecate-until", + "indexmap 2.2.6", + "integer-sqrt", + "num-traits", + "rustc-hash 2.0.0", + "thiserror", +] + [[package]] name = "peeking_take_while" version = "0.1.2" @@ -5101,6 +5148,12 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" +[[package]] +name = "rustc-hash" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152" + [[package]] name = "rustc_version" version = "0.4.0" @@ -6134,7 +6187,7 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "deb68604048ff8fa93347f02441e4487594adc20bb8a084f9e564d2b827a0a9f" dependencies = [ - "rustc-hash", + "rustc-hash 1.1.0", ] [[package]] @@ -6644,7 +6697,7 @@ dependencies = [ "parking_lot", "profiling", "raw-window-handle 0.6.2", - "rustc-hash", + "rustc-hash 1.1.0", "smallvec", "thiserror", "web-sys", @@ -6684,7 +6737,7 @@ dependencies = [ "profiling", "raw-window-handle 0.6.2", "renderdoc-sys", - "rustc-hash", + "rustc-hash 1.1.0", "smallvec", "thiserror", "wasm-bindgen", diff --git a/Cargo.toml b/Cargo.toml index 550dd06e8d..d3ecb17170 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -26,6 +26,7 @@ members = [ "crates/hulk_nao", "crates/hulk_replayer", "crates/hulk_webots", + "crates/hungarian_algorithm", "crates/kinematics", "crates/linear_algebra", "crates/motionfile", @@ -119,6 +120,7 @@ home = "0.5.4" hula-types = { path = "tools/hula/types" } hulk = { path = "crates/hulk" } hulk_manifest = { path = "crates/hulk_manifest" } +hungarian_algorithm = { path = "crates/hungarian_algorithm" } i2cdev = "0.5.1" image = "0.24.4" indicatif = "0.17.2" @@ -147,6 +149,7 @@ opusfile-ng = "0.1.0" ordered-float = "3.1.0" parameters = { path = "crates/parameters" } parking_lot = "0.12.1" +pathfinding = "4.10.0" path_serde = { path = "crates/path_serde" } path_serde_derive = { path = "crates/path_serde_derive" } petgraph = "0.6.2" diff --git a/crates/ball_filter/Cargo.toml b/crates/ball_filter/Cargo.toml index 798886027c..1d01296fcd 100644 --- a/crates/ball_filter/Cargo.toml +++ b/crates/ball_filter/Cargo.toml @@ -10,6 +10,7 @@ coordinate_systems = { workspace = true } filtering = { workspace = true } linear_algebra = { workspace = true } nalgebra = { workspace = true } +ordered-float = { workspace = true } path_serde = { workspace = true } serde = { workspace = true } types = { workspace = true } diff --git a/crates/ball_filter/src/hypothesis.rs b/crates/ball_filter/src/hypothesis.rs index 7187526bb0..4e40991a9b 100644 --- a/crates/ball_filter/src/hypothesis.rs +++ b/crates/ball_filter/src/hypothesis.rs @@ -1,4 +1,7 @@ -use std::time::{Duration, SystemTime}; +use std::{ + f32::consts::PI, + time::{Duration, SystemTime}, +}; use filtering::kalman_filter::KalmanFilter; use moving::{MovingPredict, MovingUpdate}; @@ -17,52 +20,50 @@ use types::{ pub mod moving; pub mod resting; +#[derive(Clone, Debug, Serialize, Deserialize, PathSerialize, PathDeserialize, PathIntrospect)] +pub enum BallMode { + Resting(MultivariateNormalDistribution<2>), + Moving(MultivariateNormalDistribution<4>), +} + #[derive(Clone, Debug, Serialize, Deserialize, PathSerialize, PathDeserialize, PathIntrospect)] pub struct BallHypothesis { - pub moving: MultivariateNormalDistribution<4>, - pub resting: MultivariateNormalDistribution<2>, + pub mode: BallMode, pub last_seen: SystemTime, pub validity: f32, } impl BallHypothesis { - pub fn new( - moving_hypothesis: MultivariateNormalDistribution<4>, - resting_hypothesis: MultivariateNormalDistribution<2>, - last_seen: SystemTime, - ) -> Self { + pub fn new(hypothesis: MultivariateNormalDistribution<4>, last_seen: SystemTime) -> Self { Self { - moving: moving_hypothesis, - resting: resting_hypothesis, + mode: BallMode::Moving(hypothesis), last_seen, validity: 1.0, } } - pub fn resting(&self) -> BallPosition { - BallPosition { - position: self.resting.mean.xy().framed().as_point(), - velocity: Vector2::zeros(), - last_seen: self.last_seen, + pub fn position(&self) -> BallPosition { + match self.mode { + BallMode::Resting(resting) => BallPosition { + position: resting.mean.framed().as_point(), + velocity: Vector2::zeros(), + last_seen: self.last_seen, + }, + BallMode::Moving(moving) => BallPosition { + position: moving.mean.xy().framed().as_point(), + velocity: vector![moving.mean.z, moving.mean.w], + last_seen: self.last_seen, + }, } } - pub fn moving(&self) -> BallPosition { - BallPosition { - position: self.moving.mean.xy().framed().as_point(), - velocity: vector![self.moving.mean.z, self.moving.mean.w], - last_seen: self.last_seen, + pub fn position_covariance(&self) -> Matrix2 { + match self.mode { + BallMode::Resting(resting) => resting.covariance, + BallMode::Moving(moving) => moving.covariance.fixed_view::<2, 2>(0, 0).into_owned(), } } - pub fn choose_ball(&self, velocity_threshold: f32) -> BallPosition { - let moving = self.moving(); - if moving.velocity.norm() < velocity_threshold { - return self.resting(); - }; - moving - } - pub fn predict( &mut self, delta_time: Duration, @@ -70,25 +71,42 @@ impl BallHypothesis { velocity_decay: f32, moving_process_noise: Matrix4, resting_process_noise: Matrix2, - velocity_threshold: f32, + log_likelihood_of_zero_velocity_threshold: f32, ) { - MovingPredict::predict( - &mut self.moving, - delta_time, - last_to_current_odometry, - velocity_decay, - moving_process_noise, - ); - RestingPredict::predict( - &mut self.resting, - last_to_current_odometry, - resting_process_noise, - ); - - let moving_velocity: Vector2 = vector![self.moving.mean.z, self.moving.mean.w]; - if moving_velocity.norm() < velocity_threshold { - self.resting.mean.x = self.moving.mean.x; - self.resting.mean.y = self.moving.mean.y; + match &mut self.mode { + BallMode::Resting(resting) => { + RestingPredict::predict(resting, last_to_current_odometry, resting_process_noise) + } + BallMode::Moving(moving) => { + MovingPredict::predict( + moving, + delta_time, + last_to_current_odometry, + velocity_decay, + moving_process_noise, + ); + + let velocity_covariance = moving.covariance.fixed_view::<2, 2>(0, 0); + let velocity = nalgebra::vector![moving.mean.z, moving.mean.w]; + + let exponent = -velocity.dot( + &velocity_covariance + .cholesky() + .expect("covariance not invertible") + .solve(&velocity), + ) / 2.; + let determinant = velocity_covariance.determinant(); + + let log_likelihood_of_zero_velocity = + exponent - (2. * PI * determinant.sqrt()).ln(); + + if log_likelihood_of_zero_velocity > log_likelihood_of_zero_velocity_threshold { + self.mode = BallMode::Resting(MultivariateNormalDistribution { + mean: moving.mean.xy(), + covariance: moving.covariance.fixed_view::<2, 2>(0, 0).into_owned(), + }) + } + } } } @@ -96,25 +114,38 @@ impl BallHypothesis { &mut self, detection_time: SystemTime, measurement: MultivariateNormalDistribution<2>, + validity_bonus: f32, ) { self.last_seen = detection_time; - MovingUpdate::update(&mut self.moving, measurement); - RestingUpdate::update(&mut self.resting, measurement); - self.validity += 1.0; + self.validity += validity_bonus; + + match &mut self.mode { + BallMode::Resting(resting) => RestingUpdate::update(resting, measurement), + BallMode::Moving(moving) => MovingUpdate::update(moving, measurement), + } } pub fn merge(&mut self, other: BallHypothesis) { - KalmanFilter::update( - &mut self.moving, - Matrix4::identity(), - other.moving.mean, - other.moving.covariance, - ); - KalmanFilter::update( - &mut self.resting, - Matrix2::identity(), - other.resting.mean, - other.resting.covariance, - ); + match (&mut self.mode, other.mode) { + (BallMode::Resting(resting), BallMode::Resting(distribution)) => { + KalmanFilter::update( + resting, + Matrix2::identity(), + distribution.mean, + distribution.covariance, + ); + self.validity = self.validity.max(other.validity); + } + (BallMode::Moving(moving), BallMode::Moving(distribution)) => { + KalmanFilter::update( + moving, + Matrix4::identity(), + distribution.mean, + distribution.covariance, + ); + self.validity = self.validity.max(other.validity); + } + _ => (), // deny merge + }; } } diff --git a/crates/ball_filter/src/lib.rs b/crates/ball_filter/src/lib.rs index c1548e97e4..18f95076e9 100644 --- a/crates/ball_filter/src/lib.rs +++ b/crates/ball_filter/src/lib.rs @@ -1,21 +1,23 @@ use std::time::{Duration, SystemTime}; use coordinate_systems::Ground; -use linear_algebra::{Isometry2, Point2}; -use nalgebra::{Matrix2, Matrix4}; +use filtering::kalman_filter::KalmanFilter; +use linear_algebra::{distance, IntoFramed, Isometry2}; +use nalgebra::{Matrix2, Matrix2x4, Matrix4}; +use ordered_float::NotNan; use path_serde::{PathDeserialize, PathIntrospect, PathSerialize}; use serde::{Deserialize, Serialize}; mod hypothesis; -pub use hypothesis::BallHypothesis; +pub use hypothesis::{BallHypothesis, BallMode}; use types::multivariate_normal_distribution::MultivariateNormalDistribution; #[derive( Debug, Default, Clone, Serialize, Deserialize, PathSerialize, PathDeserialize, PathIntrospect, )] pub struct BallFilter { - hypotheses: Vec, + pub hypotheses: Vec, } impl BallFilter { @@ -26,10 +28,6 @@ impl BallFilter { .max_by(|a, b| a.validity.partial_cmp(&b.validity).unwrap()) } - pub fn hypotheses(&self) -> &Vec { - &self.hypotheses - } - pub fn decay_hypotheses(&mut self, decay_factor_criterion: impl Fn(&BallHypothesis) -> f32) { for hypothesis in self.hypotheses.iter_mut() { let decay_factor = decay_factor_criterion(hypothesis); @@ -44,7 +42,7 @@ impl BallFilter { velocity_decay: f32, moving_process_noise: Matrix4, resting_process_noise: Matrix2, - velocity_threshold: f32, + log_likelihood_of_zero_velocity_threshold: f32, ) { for hypothesis in self.hypotheses.iter_mut() { hypothesis.predict( @@ -53,7 +51,7 @@ impl BallFilter { velocity_decay, moving_process_noise, resting_process_noise, - velocity_threshold, + log_likelihood_of_zero_velocity_threshold, ) } } @@ -62,26 +60,6 @@ impl BallFilter { self.hypotheses.clear() } - pub fn update( - &mut self, - detection_time: SystemTime, - measurement: MultivariateNormalDistribution<2>, - matching_criterion: impl Fn(&BallHypothesis) -> bool, - ) -> bool { - let mut number_of_matching_hypotheses = 0; - - for hypothesis in self - .hypotheses - .iter_mut() - .filter(|hypothesis| matching_criterion(hypothesis)) - { - number_of_matching_hypotheses += 1; - hypothesis.update(detection_time, measurement) - } - - number_of_matching_hypotheses > 0 - } - pub fn remove_hypotheses( &mut self, is_valid: impl Fn(&BallHypothesis) -> bool, @@ -111,25 +89,43 @@ impl BallFilter { pub fn spawn( &mut self, detection_time: SystemTime, - measurement: Point2, + measurement: MultivariateNormalDistribution<2>, initial_moving_covariance: Matrix4, - initial_resting_covariance: Matrix2, ) { - let initial_state = nalgebra::vector![measurement.x(), measurement.y(), 0.0, 0.0]; - - let moving_hypothesis = MultivariateNormalDistribution { - mean: initial_state, + let closest_hypothesis = self.hypotheses.iter().min_by_key(|hypothesis| { + NotNan::new(distance( + measurement.mean.framed().as_point(), + hypothesis.position().position, + )) + .expect("distance is nan") + }); + + let mut new_hypothesis = MultivariateNormalDistribution { + mean: closest_hypothesis.map_or( + nalgebra::vector![measurement.mean.x, measurement.mean.y, 0.0, 0.0], + |hypothesis| { + let old_position = hypothesis.position().position.inner.coords; + nalgebra::vector![old_position.x, old_position.y, 0.0, 0.0] + }, + ), covariance: initial_moving_covariance, }; - let resting_hypothesis = MultivariateNormalDistribution { - mean: initial_state.xy(), - covariance: initial_resting_covariance, + + if closest_hypothesis.is_some() { + KalmanFilter::update( + &mut new_hypothesis, + Matrix2x4::identity(), + measurement.mean, + measurement.covariance, + ) + } + + let new_hypothesis = BallHypothesis { + mode: BallMode::Moving(new_hypothesis), + last_seen: detection_time, + validity: 1.0, }; - self.hypotheses.push(BallHypothesis::new( - moving_hypothesis, - resting_hypothesis, - detection_time, - )) + self.hypotheses.push(new_hypothesis) } } diff --git a/crates/control/Cargo.toml b/crates/control/Cargo.toml index 69a06a98c8..2bcbad54d4 100644 --- a/crates/control/Cargo.toml +++ b/crates/control/Cargo.toml @@ -18,12 +18,14 @@ filtering = { workspace = true } framework = { workspace = true } geometry = { workspace = true } hardware = { workspace = true } +hungarian_algorithm = { workspace = true } itertools = { workspace = true } kinematics = { workspace = true } linear_algebra = { workspace = true } log = { workspace = true } motionfile = { workspace = true } nalgebra = { workspace = true } +ndarray = { workspace = true } num-traits = { workspace = true } ordered-float = { workspace = true } path_serde = { workspace = true } diff --git a/crates/control/src/ball_filter.rs b/crates/control/src/ball_filter.rs index 6958d67741..8beca5d630 100644 --- a/crates/control/src/ball_filter.rs +++ b/crates/control/src/ball_filter.rs @@ -1,7 +1,11 @@ use std::{collections::BTreeMap, time::SystemTime}; use color_eyre::Result; +use hungarian_algorithm::AssignmentProblem; +use itertools::Itertools; use nalgebra::{Matrix2, Matrix4}; +use ndarray::Array2; +use ordered_float::NotNan; use serde::{Deserialize, Serialize}; use ball_filter::{BallFilter as BallFiltering, BallHypothesis}; @@ -9,7 +13,7 @@ use context_attribute::context; use coordinate_systems::{Ground, Pixel}; use framework::{AdditionalOutput, HistoricInput, MainOutput, PerceptionInput}; use geometry::circle::Circle; -use linear_algebra::{distance, IntoFramed, IntoTransform, Isometry2, Point2}; +use linear_algebra::{IntoTransform, Isometry2, Point2}; use projection::{camera_matrices::CameraMatrices, camera_matrix::CameraMatrix, Projection}; use types::{ ball_detection::BallPercept, @@ -84,6 +88,10 @@ impl BallFilter { cycle_time: &CycleTime, ) -> Vec { for (detection_time, balls) in measurements { + self.ball_filter.hypotheses.retain(|hypothesis| { + hypothesis.validity > filter_parameters.validity_discard_threshold + }); + let delta_time = historic_cycle_times .get(&detection_time) .last_cycle_duration; @@ -99,15 +107,14 @@ impl BallFilter { filter_parameters.velocity_decay_factor, Matrix4::from_diagonal(&filter_parameters.noise.process_noise_moving), Matrix2::from_diagonal(&filter_parameters.noise.process_noise_resting), - filter_parameters.resting_ball_velocity_threshold, + filter_parameters.log_likelihood_of_zero_velocity_threshold, ); - let camera_matrices = camera_matrices.get(&detection_time); - if !had_ground_contact.get(&detection_time) { self.ball_filter.reset(); continue; } + let camera_matrices = camera_matrices.get(&detection_time); let projected_limbs_bottom = projected_limbs .persistent @@ -125,34 +132,52 @@ impl BallFilter { ) }); - for ball in balls { - let mean_position = ball.percept_in_ground.mean.framed().as_point(); - let is_hypothesis_detected = |hypothesis: &BallHypothesis| { - distance( - hypothesis - .choose_ball(filter_parameters.resting_ball_velocity_threshold) - .position, - mean_position, - ) < filter_parameters.measurement_matching_distance - }; - let is_any_hypothesis_updated = self.ball_filter.update( + let match_matrix = + mahalanobis_matrix_of_hypotheses_and_percepts(&self.ball_filter.hypotheses, &balls); + + let assignment = AssignmentProblem::from_costs(match_matrix).solve(); + + let mut used_percepts = vec![]; + + for (hypothesis, assigned_percept) in self + .ball_filter + .hypotheses + .iter_mut() + .zip_eq(assignment.iter()) + { + if let Some(assigned_percept) = assigned_percept { + let mahalanobis_distance = -assigned_percept.cost; + if mahalanobis_distance > filter_parameters.maximum_matching_cost { + hypothesis.validity *= 1.0 / 7.0; + continue; + } + let validity_increase = assigned_percept.cost.exp(); + let percept = balls[assigned_percept.to]; + used_percepts.push(assigned_percept.to); + hypothesis.update(detection_time, percept.percept_in_ground, validity_increase); + } + } + + let unused_percepts = { + let mut all_percepts = balls.clone(); + used_percepts.sort_unstable(); + for index in used_percepts.into_iter().rev() { + all_percepts.remove(index); + } + all_percepts + }; + + for percept in unused_percepts { + self.ball_filter.spawn( detection_time, - ball.percept_in_ground, - is_hypothesis_detected, + percept.percept_in_ground, + Matrix4::from_diagonal(&filter_parameters.noise.initial_covariance), ); - if !is_any_hypothesis_updated { - self.ball_filter.spawn( - detection_time, - mean_position, - Matrix4::from_diagonal(&filter_parameters.noise.initial_covariance), - Matrix2::from_diagonal(&filter_parameters.noise.initial_covariance.xy()), - ) - } } } let is_hypothesis_valid = |hypothesis: &BallHypothesis| { - let ball = hypothesis.choose_ball(filter_parameters.resting_ball_velocity_threshold); + let ball = hypothesis.position(); let duration_since_last_observation = cycle_time .start_time .duration_since(ball.last_seen) @@ -164,19 +189,26 @@ impl BallFilter { && duration_since_last_observation < filter_parameters.hypothesis_timeout }; + // TODO: this removes hypotheses if not one is resting and the other one is moving! let should_merge_hypotheses = - |hypothesis1: &BallHypothesis, hypothesis2: &BallHypothesis| { - let ball1 = - hypothesis1.choose_ball(filter_parameters.resting_ball_velocity_threshold); - let ball2 = - hypothesis2.choose_ball(filter_parameters.resting_ball_velocity_threshold); - - distance(ball1.position, ball2.position) - < filter_parameters.hypothesis_merge_distance + |_hypothesis1: &BallHypothesis, _hypothesis2: &BallHypothesis| { + // distance( + // hypothesis1.position().position, + // hypothesis2.position().position, + // ) < filter_parameters.hypothesis_merge_distance + false }; + let removed = self + .ball_filter + .remove_hypotheses(is_hypothesis_valid, should_merge_hypotheses); self.ball_filter - .remove_hypotheses(is_hypothesis_valid, should_merge_hypotheses) + .hypotheses + .sort_unstable_by(|a, b| b.validity.total_cmp(&a.validity)); + self.ball_filter + .hypotheses + .truncate(filter_parameters.maximum_number_of_hypotheses); + removed } pub fn cycle(&mut self, mut context: CycleContext) -> Result { @@ -198,8 +230,6 @@ impl BallFilter { context.cycle_time, ); - let velocity_threshold = filter_parameters.resting_ball_velocity_threshold; - context .filter_state .fill_if_subscribed(|| self.ball_filter.clone()); @@ -211,16 +241,15 @@ impl BallFilter { .best_ball_hypothesis .fill_if_subscribed(|| best_hypothesis.cloned()); - let filtered_ball = - best_hypothesis.map(|hypothesis| hypothesis.choose_ball(velocity_threshold)); + let filtered_ball = best_hypothesis.map(|hypothesis| hypothesis.position()); let output_balls: Vec<_> = self .ball_filter - .hypotheses() + .hypotheses .iter() .filter_map(|hypothesis| { if hypothesis.validity >= filter_parameters.validity_output_threshold { - Some(hypothesis.choose_ball(velocity_threshold)) + Some(hypothesis.position()) } else { None } @@ -246,33 +275,29 @@ impl BallFilter { .filter(|hypothesis| { hypothesis.validity >= context.ball_filter_configuration.validity_output_threshold }) - .map(|hypothesis| hypothesis.choose_ball(velocity_threshold).position) + .map(|hypothesis| hypothesis.position().position) .collect::>(); Ok(MainOutputs { ball_position: filtered_ball.into(), removed_ball_positions: removed_ball_positions.into(), hypothetical_ball_positions: self - .hypothetical_ball_positions( - velocity_threshold, - filter_parameters.validity_output_threshold, - ) + .hypothetical_ball_positions(filter_parameters.validity_output_threshold) .into(), }) } fn hypothetical_ball_positions( &self, - velocity_threshold: f32, validity_limit: f32, ) -> Vec> { self.ball_filter - .hypotheses() + .hypotheses .iter() .filter_map(|hypothesis| { if hypothesis.validity < validity_limit { Some(HypotheticalBallPosition { - position: hypothesis.choose_ball(velocity_threshold).position, + position: hypothesis.position().position, validity: hypothesis.validity, }) } else { @@ -283,6 +308,29 @@ impl BallFilter { } } +fn mahalanobis_matrix_of_hypotheses_and_percepts( + hypotheses: &[BallHypothesis], + percepts: &[&BallPercept], +) -> Array2> { + Array2::from_shape_fn((hypotheses.len(), percepts.len()), |(i, j)| { + let hypothesis = &hypotheses[i]; + let percept = &percepts[j]; + let ball = hypothesis.position(); + + let residual = percept.percept_in_ground.mean - ball.position.inner.coords; + let covariance = hypothesis.position_covariance(); + + let mahalanobis_distance = residual.dot( + &covariance + .cholesky() + .expect("covariance not invertible") + .solve(&residual), + ); + + NotNan::new(-mahalanobis_distance).expect("mahalanobis distance is NaN") + }) +} + fn time_ordered_balls<'a>( balls_top: BTreeMap>>>, balls_bottom: BTreeMap>>>, @@ -309,7 +357,7 @@ fn decide_validity_decay_for_hypothesis( camera_matrices .zip(projected_limbs) .map_or(false, |(camera_matrices, projected_limbs)| { - let ball = hypothesis.choose_ball(configuration.resting_ball_velocity_threshold); + let ball = hypothesis.position(); is_visible_to_camera( &ball, &camera_matrices.bottom, @@ -365,3 +413,63 @@ fn is_visible_to_camera( && (0.0..480.0).contains(&position_in_image.y()) && is_above_limbs(position_in_image, projected_limbs) } + +#[cfg(test)] +mod tests { + use ball_filter::BallMode; + use linear_algebra::point; + use nalgebra::vector; + use types::multivariate_normal_distribution::MultivariateNormalDistribution; + + use super::*; + + #[test] + fn hypothesis_update_matching() { + let hypothesis1 = BallHypothesis { + mode: BallMode::Moving(MultivariateNormalDistribution { + mean: nalgebra::vector![0.0, 1.0, 0.0, 0.0], + covariance: Matrix4::identity(), + }), + last_seen: SystemTime::now(), + validity: 0.0, + }; + let hypothesis2 = BallHypothesis { + mode: BallMode::Moving(MultivariateNormalDistribution { + mean: nalgebra::vector![0.0, -1.0, 0.0, 0.0], + covariance: Matrix4::identity(), + }), + last_seen: SystemTime::now(), + validity: 0.0, + }; + + let percept1 = BallPercept { + percept_in_ground: MultivariateNormalDistribution { + mean: vector![0.0, 0.4], + covariance: Matrix2::identity(), + }, + image_location: Circle::new(point![0.0, 0.0], 1.0), + }; + let percept2 = BallPercept { + percept_in_ground: MultivariateNormalDistribution { + mean: vector![0.0, -0.6], + covariance: Matrix2::identity(), + }, + image_location: Circle::new(point![0.0, 0.0], 1.0), + }; + + let hypotheses = vec![hypothesis1, hypothesis2]; + let percepts = vec![&percept1, &percept2]; + + let costs = mahalanobis_matrix_of_hypotheses_and_percepts(&hypotheses, &percepts); + let assignment = AssignmentProblem::from_costs(costs).solve(); + + let percept_of_hypothesis1 = assignment[0].unwrap().to; + assert_eq!(percept_of_hypothesis1, 0); + + let percept_of_hypothesis2 = assignment[1].unwrap().to; + assert_eq!(percept_of_hypothesis2, 1); + + assert_eq!(assignment.len(), 2); + assert_eq!(assignment.into_iter().flatten().count(), 2); + } +} diff --git a/crates/hungarian_algorithm/Cargo.toml b/crates/hungarian_algorithm/Cargo.toml new file mode 100644 index 0000000000..ca82b6a53c --- /dev/null +++ b/crates/hungarian_algorithm/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "hungarian_algorithm" +version.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +pathfinding = { workspace = true } +ndarray = { workspace = true } +ordered-float = { workspace = true } diff --git a/crates/hungarian_algorithm/src/lib.rs b/crates/hungarian_algorithm/src/lib.rs new file mode 100644 index 0000000000..df5e962cb1 --- /dev/null +++ b/crates/hungarian_algorithm/src/lib.rs @@ -0,0 +1,149 @@ +use std::cmp::Ordering; + +use ndarray::{Array2, Axis}; +use ordered_float::NotNan; +use pathfinding::kuhn_munkres::{kuhn_munkres, Weights}; + +pub struct AssignmentProblem { + costs: Array2>, + + number_of_workers: usize, + number_of_tasks: usize, +} + +#[derive(Clone, Copy, Debug, PartialEq)] +pub struct Assignment { + pub to: usize, + pub cost: f32, +} + +impl Weights> for AssignmentProblem { + fn rows(&self) -> usize { + self.costs.nrows() + } + + fn columns(&self) -> usize { + self.costs.ncols() + } + + fn at(&self, row: usize, col: usize) -> NotNan { + self.costs[[row, col]] + } + + fn neg(&self) -> Self + where + Self: Sized, + NotNan: pathfinding::num_traits::Signed, + { + unimplemented!() + } +} + +impl AssignmentProblem { + pub fn from_costs(costs: Array2>) -> Self { + let (number_of_tasks, number_of_workers) = costs.dim(); + let costs = match number_of_tasks.cmp(&number_of_workers) { + Ordering::Less => { + let new_tasks = + Array2::zeros((number_of_workers - number_of_tasks, number_of_workers)); + + ndarray::concatenate![Axis(0), costs, new_tasks] + } + Ordering::Greater => { + let new_costs = + Array2::zeros((number_of_tasks, number_of_tasks - number_of_workers)); + ndarray::concatenate![Axis(1), costs, new_costs] + } + Ordering::Equal => costs, + }; + + Self { + costs, + number_of_workers, + number_of_tasks, + } + } + + pub fn solve(self) -> Vec> { + let (_, assignment) = kuhn_munkres(&self); + + assignment[..self.number_of_tasks] + .iter() + .enumerate() + .map(|(task_index, &job_assignment)| { + if job_assignment < self.number_of_workers { + let cost = self.costs[(task_index, job_assignment)]; + Some(Assignment { + to: job_assignment, + cost: cost.into(), + }) + } else { + None + } + }) + .collect::>() + } +} + +#[cfg(test)] +mod tests { + use ndarray::array; + + use super::*; + trait Convert { + fn convert(self) -> O; + } + + impl Convert>> for Array2 { + fn convert(self) -> Array2> { + self.mapv(|x| NotNan::new(x).unwrap()) + } + } + + #[test] + fn test_assignment_problem() { + let costs = array![[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]].convert(); + let problem = AssignmentProblem::from_costs(costs); + + let solution = problem.solve(); + assert_eq!( + solution, + vec![ + Some(Assignment { to: 0, cost: 1.0 }), + Some(Assignment { to: 1, cost: 1.0 }), + Some(Assignment { to: 2, cost: 1.0 }) + ] + ); + } + + #[test] + fn test_unbalanced_1() { + let costs = array![[1., 0.9, 0.], [0.8, 0., 1.]].convert(); + let problem = AssignmentProblem::from_costs(costs); + + let solution = problem.solve(); + assert_eq!( + solution, + vec![ + Some(Assignment { to: 0, cost: 1.0 }), + Some(Assignment { to: 2, cost: 1.0 }) + ] + ); + } + + #[test] + fn test_unbalanced_2() { + let costs = array![[1., 0.], [0., 1.], [0., 2.]].convert(); + let problem = AssignmentProblem::from_costs(costs); + + let solution = problem.solve(); + assert_eq!( + solution, + vec![ + Some(Assignment { to: 0, cost: 1.0 }), + None, + Some(Assignment { to: 1, cost: 2.0 }) + ] + ); + } +} diff --git a/crates/types/src/parameters.rs b/crates/types/src/parameters.rs index 8717974419..f7e79ebea1 100644 --- a/crates/types/src/parameters.rs +++ b/crates/types/src/parameters.rs @@ -304,6 +304,8 @@ pub struct BallDetectionParameters { pub cluster_merge_radius_factor: f32, pub ball_radius_enlargement_factor: f32, pub detection_noise: Vector2, + pub noise_increase_slope: f32, + pub noise_increase_distance_threshold: f32, } #[derive( @@ -312,7 +314,6 @@ pub struct BallDetectionParameters { pub struct BallFilterNoise { pub process_noise_moving: nalgebra::Vector4, pub process_noise_resting: nalgebra::Vector2, - pub measurement_noise: nalgebra::Vector2, pub initial_covariance: nalgebra::Vector4, } @@ -321,15 +322,16 @@ pub struct BallFilterNoise { )] pub struct BallFilterParameters { pub hypothesis_timeout: Duration, - pub measurement_matching_distance: f32, + pub maximum_number_of_hypotheses: usize, + pub log_likelihood_of_zero_velocity_threshold: f32, pub hypothesis_merge_distance: f32, pub visible_validity_exponential_decay_factor: f32, pub hidden_validity_exponential_decay_factor: f32, pub validity_output_threshold: f32, pub validity_discard_threshold: f32, pub velocity_decay_factor: f32, - pub resting_ball_velocity_threshold: f32, pub noise: BallFilterNoise, + pub maximum_matching_cost: f32, } #[derive( diff --git a/crates/vision/src/ball_detection.rs b/crates/vision/src/ball_detection.rs index 272b2a2361..8536785601 100644 --- a/crates/vision/src/ball_detection.rs +++ b/crates/vision/src/ball_detection.rs @@ -1,5 +1,6 @@ use color_eyre::Result; use compiled_nn::CompiledNN; +use nalgebra::Matrix2; use serde::{Deserialize, Serialize}; use context_attribute::context; @@ -137,6 +138,8 @@ impl BallDetection { context.camera_matrix, context.parameters.detection_noise, *context.ball_radius, + context.parameters.noise_increase_slope, + context.parameters.noise_increase_distance_threshold, ); Ok(MainOutputs { @@ -320,6 +323,8 @@ fn project_balls_to_ground( camera_matrix: &CameraMatrix, measurement_noise: Vector2, ball_radius: f32, + noise_increase_slope: f32, + noise_increase_distance_threshold: f32, ) -> Vec { clusters .iter() @@ -330,14 +335,22 @@ fn project_balls_to_ground( ball_radius, ) .ok()?; + let projected_covariance = { + let distance = position.coords().norm(); + let distance_noise_increase = 1.0 + + (distance - noise_increase_distance_threshold).max(0.0) + * noise_increase_slope; + let scaled_noise = measurement_noise .inner .map(|x| (cluster.circle.radius * x).powi(2)) .framed(); - camera_matrix.project_noise_to_ground(position, scaled_noise) - } - .ok()?; + camera_matrix + .project_noise_to_ground(position, scaled_noise) + .ok()? + * (Matrix2::identity() * distance_noise_increase.powi(2)) + }; Some(BallPercept { percept_in_ground: MultivariateNormalDistribution { @@ -490,6 +503,8 @@ mod tests { cluster_merge_radius_factor: 1.5, ball_radius_enlargement_factor: 2.0, detection_noise: vector![0.0, 0.0], + noise_increase_slope: 0.0, + noise_increase_distance_threshold: 0.0, }; let perspective_grid_candidates = PerspectiveGridCandidates { candidates: vec![Circle { diff --git a/etc/parameters/default.json b/etc/parameters/default.json index 5610234149..6f3e6a7da4 100644 --- a/etc/parameters/default.json +++ b/etc/parameters/default.json @@ -54,7 +54,9 @@ "image_containment_merge_factor": 1.0, "cluster_merge_radius_factor": 1.5, "ball_radius_enlargement_factor": 1.5, - "detection_noise": [4.0, 4.0] + "detection_noise": [8.0, 8.0], + "noise_increase_slope": 8.0, + "noise_increase_distance_threshold": 3.0 }, "vision_bottom": { "minimal_radius": 42.0, @@ -69,7 +71,9 @@ "image_containment_merge_factor": 1.0, "cluster_merge_radius_factor": 1.5, "ball_radius_enlargement_factor": 1.5, - "detection_noise": [4.0, 4.0] + "detection_noise": [4.0, 4.0], + "noise_increase_slope": 0.0, + "noise_increase_distance_threshold": 0.0 } }, "camera_matrix_parameters": { @@ -322,19 +326,19 @@ "nanos": 0, "secs": 20 }, - "measurement_matching_distance": 1.0, - "hypothesis_merge_distance": 1.0, - "resting_ball_velocity_threshold": 0.25, - "visible_validity_exponential_decay_factor": 0.96, - "hidden_validity_exponential_decay_factor": 0.999, - "validity_output_threshold": 3.0, + "maximum_matching_cost": 0.25, + "hypothesis_merge_distance": 0.1, + "log_likelihood_of_zero_velocity_threshold": 0.9, + "maximum_number_of_hypotheses": 10, + "visible_validity_exponential_decay_factor": 0.9, + "hidden_validity_exponential_decay_factor": 0.997, + "validity_output_threshold": 1.5, "validity_discard_threshold": 0.5, "velocity_decay_factor": 0.998, "noise": { - "process_noise_moving": [0.005, 0.005, 0.2, 0.2], + "process_noise_moving": [0.005, 0.005, 0.02, 0.02], "process_noise_resting": [0.001, 0.001], - "measurement_noise": [0.5, 2.0], - "initial_covariance": [0.5, 0.5, 10.0, 10.0] + "initial_covariance": [0.5, 0.5, 40.0, 40.0] } }, "button_filter": { diff --git a/tools/twix/src/panels/map/layers/ball_filter.rs b/tools/twix/src/panels/map/layers/ball_filter.rs index df5213296e..2ceef81aec 100644 --- a/tools/twix/src/panels/map/layers/ball_filter.rs +++ b/tools/twix/src/panels/map/layers/ball_filter.rs @@ -3,7 +3,7 @@ use std::sync::Arc; use color_eyre::Result; use eframe::epaint::{Color32, Stroke}; -use ball_filter::BallFilter as BallFiltering; +use ball_filter::{BallFilter as BallFiltering, BallMode}; use coordinate_systems::Ground; use linear_algebra::{vector, Point}; use types::field_dimensions::FieldDimensions; @@ -30,25 +30,28 @@ impl Layer for BallFilter { _field_dimensions: &FieldDimensions, ) -> Result<()> { if let Some(filter) = self.filter.get_last_value()?.flatten() { - for hypothesis in filter.hypotheses() { + for hypothesis in filter.hypotheses { let stroke = Stroke::new(0.01, Color32::BLACK); - - let state = hypothesis.resting; - let position = Point::from(state.mean.xy()); - let covariance = state.covariance.fixed_view::<2, 2>(0, 0).into_owned(); - let yellow = Color32::from_rgba_unmultiplied(255, 255, 0, 100); - painter.covariance(position, covariance, stroke, yellow); - painter.target(position, 0.02, stroke, yellow); - - let state = hypothesis.moving; - let position = Point::from(state.mean.xy()); - let covariance = state.covariance.fixed_view::<2, 2>(0, 0).into_owned(); - let pink = Color32::from_rgba_unmultiplied(255, 102, 204, 100); - painter.covariance(position, covariance, stroke, pink); - painter.target(position, 0.02, stroke, pink); - let velocity = vector![state.mean.z, state.mean.w]; - let velocity_target = position + velocity; - painter.line_segment(position, velocity_target, stroke) + match hypothesis.mode { + BallMode::Resting(resting) => { + let position = Point::from(resting.mean.xy()); + let covariance = resting.covariance.fixed_view::<2, 2>(0, 0).into_owned(); + let yellow = Color32::from_rgba_unmultiplied(255, 255, 0, 100); + painter.covariance(position, covariance, stroke, yellow); + painter.target(position, 0.02, stroke, yellow); + } + BallMode::Moving(moving) => { + let position = Point::from(moving.mean.xy()); + let covariance = moving.covariance.fixed_view::<2, 2>(0, 0).into_owned(); + let pink = Color32::from_rgba_unmultiplied(255, 102, 204, 100); + painter.covariance(position, covariance, stroke, pink); + painter.target(position, 0.02, stroke, pink); + + let velocity = vector![moving.mean.z, moving.mean.w]; + let velocity_target = position + velocity; + painter.line_segment(position, velocity_target, stroke) + } + } } }