Skip to content

Commit 0284ca2

Browse files
jumpinthefireHossam Rabbouh
andauthored
Change error computation to converge in less steps (#42)
* Pre-compute symbolic matrix for jacobians * Remove reserve * Formatting * Try to fix pipeline * Try to fix pipeline * Change error calculation * Fix par iter, some cleanup * Cleanup * bump cargo.toml * Update LM * Reset example * formatting --------- Co-authored-by: Hossam Rabbouh <[email protected]>
1 parent 1785aa9 commit 0284ca2

File tree

6 files changed

+79
-76
lines changed

6 files changed

+79
-76
lines changed

Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[package]
22
name = "tiny-solver"
3-
version = "0.15.0"
3+
version = "0.15.1"
44
edition = "2021"
55
authors = ["Powei Lin <[email protected]>, Hossam R. <[email protected]>"]
66
readme = "README.md"

src/optimizer/common.rs

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,13 @@ pub trait Optimizer {
5858
// for (key, param) in params.par_iter_mut() {
5959
// }
6060
}
61+
fn compute_error(
62+
&self,
63+
problem: &problem::Problem,
64+
params: &HashMap<String, ParameterBlock>,
65+
) -> f64 {
66+
problem.compute_residuals(params, true).squared_norm_l2()
67+
}
6168
}
6269

6370
#[derive(PartialEq, Debug)]

src/optimizer/gauss_newton_optimizer.rs

Lines changed: 34 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -49,22 +49,42 @@ impl optimizer::Optimizer for GaussNewtonOptimizer {
4949
&variable_name_to_col_idx_dict,
5050
);
5151

52-
let mut last_err: f64 = 1.0;
52+
let mut last_err;
53+
let mut current_error = self.compute_error(&problem, &parameter_blocks);
5354

5455
for i in 0..opt_option.max_iteration {
55-
let start = Instant::now();
56+
last_err = current_error;
57+
let mut start = Instant::now();
58+
5659
let (residuals, jac) = problem.compute_residual_and_jacobian(
5760
&parameter_blocks,
5861
&variable_name_to_col_idx_dict,
59-
total_variable_dimension,
6062
&symbolic_structure,
6163
);
62-
let current_error = residuals.norm_l2();
64+
let residual_and_jacobian_duration = start.elapsed();
65+
66+
start = Instant::now();
67+
let solving_duration;
68+
if let Some(dx) = linear_solver.solve(&residuals, &jac) {
69+
solving_duration = start.elapsed();
70+
let dx_na = dx.as_ref().into_nalgebra().column(0).clone_owned();
71+
self.apply_dx2(
72+
&dx_na,
73+
&mut parameter_blocks,
74+
&variable_name_to_col_idx_dict,
75+
);
76+
} else {
77+
log::debug!("solve ax=b failed");
78+
return None;
79+
}
80+
81+
current_error = self.compute_error(&problem, &parameter_blocks);
6382
trace!(
64-
"iter:{}, total err:{}, duration: {:?}",
83+
"iter:{}, total err:{}, residual + jacobian duration: {:?}, solving duration: {:?}",
6584
i,
6685
current_error,
67-
start.elapsed()
86+
residual_and_jacobian_duration,
87+
solving_duration
6888
);
6989

7090
if current_error < opt_option.min_error_threshold {
@@ -74,32 +94,15 @@ impl optimizer::Optimizer for GaussNewtonOptimizer {
7494
log::debug!("solve ax=b failed, current error is nan");
7595
return None;
7696
}
77-
if i > 0 {
78-
if (last_err - current_error).abs() < opt_option.min_abs_error_decrease_threshold {
79-
trace!("absolute error decreas low");
80-
break;
81-
} else if (last_err - current_error).abs() / last_err
82-
< opt_option.min_rel_error_decrease_threshold
83-
{
84-
trace!("reletive error decrease low");
85-
break;
86-
}
87-
}
88-
last_err = current_error;
8997

90-
let start = Instant::now();
91-
if let Some(dx) = linear_solver.solve(&residuals, &jac) {
92-
let duration = start.elapsed();
93-
trace!("Time elapsed in solve Ax=b is: {:?}", duration);
94-
let dx_na = dx.as_ref().into_nalgebra().column(0).clone_owned();
95-
self.apply_dx2(
96-
&dx_na,
97-
&mut parameter_blocks,
98-
&variable_name_to_col_idx_dict,
99-
);
100-
} else {
101-
log::debug!("solve ax=b failed");
102-
return None;
98+
if (last_err - current_error).abs() < opt_option.min_abs_error_decrease_threshold {
99+
trace!("absolute error decrease low");
100+
break;
101+
} else if (last_err - current_error).abs() / last_err
102+
< opt_option.min_rel_error_decrease_threshold
103+
{
104+
trace!("relative error decrease low");
105+
break;
103106
}
104107
}
105108
let params = parameter_blocks

src/optimizer/levenberg_marquardt_optimizer.rs

Lines changed: 27 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ impl optimizer::Optimizer for LevenbergMarquardtOptimizer {
6767
// With LM, rather than solving A * dx = b for dx, we solve for (A + lambda * diag(A)) dx = b.
6868
let mut jacobi_scaling_diagonal: Option<faer::sparse::SparseColMat<usize, f64>> = None;
6969

70-
let s = problem.build_symbolic_structure(
70+
let symbolic_structure = problem.build_symbolic_structure(
7171
&parameter_blocks,
7272
total_variable_dimension,
7373
&variable_name_to_col_idx_dict,
@@ -76,13 +76,15 @@ impl optimizer::Optimizer for LevenbergMarquardtOptimizer {
7676
// Damping parameter (a.k.a lambda / Marquardt parameter)
7777
let mut u = 1.0 / self.initial_trust_region_radius;
7878

79-
let mut last_err: f64 = 1.0;
79+
let mut last_err;
80+
let mut current_error = self.compute_error(&problem, &parameter_blocks);
8081
for i in 0..opt_option.max_iteration {
82+
last_err = current_error;
83+
8184
let (residuals, mut jac) = problem.compute_residual_and_jacobian(
8285
&parameter_blocks,
8386
&variable_name_to_col_idx_dict,
84-
total_variable_dimension,
85-
&s,
87+
&symbolic_structure,
8688
);
8789

8890
if i == 0 {
@@ -110,29 +112,6 @@ impl optimizer::Optimizer for LevenbergMarquardtOptimizer {
110112
);
111113
}
112114

113-
let current_error = residuals.norm_l2();
114-
trace!("iter:{} total err:{}", i, current_error);
115-
116-
if current_error < opt_option.min_error_threshold {
117-
trace!("error too low");
118-
break;
119-
} else if current_error.is_nan() {
120-
log::debug!("solve ax=b failed, current error is nan");
121-
return None;
122-
}
123-
if i > 0 {
124-
if (last_err - current_error).abs() < opt_option.min_abs_error_decrease_threshold {
125-
trace!("absolute error decrease low");
126-
break;
127-
} else if (last_err - current_error).abs() / last_err
128-
< opt_option.min_rel_error_decrease_threshold
129-
{
130-
trace!("relative error decrease low");
131-
break;
132-
}
133-
}
134-
last_err = current_error;
135-
136115
// Scale the current jacobian by the diagonal matrix
137116
jac = jac * jacobi_scaling_diagonal.as_ref().unwrap();
138117

@@ -199,6 +178,27 @@ impl optimizer::Optimizer for LevenbergMarquardtOptimizer {
199178
log::debug!("solve ax=b failed");
200179
return None;
201180
}
181+
182+
current_error = self.compute_error(problem, &parameter_blocks);
183+
trace!("iter:{} total err:{}", i, current_error);
184+
185+
if current_error < opt_option.min_error_threshold {
186+
trace!("error too low");
187+
break;
188+
} else if current_error.is_nan() {
189+
log::debug!("solve ax=b failed, current error is nan");
190+
return None;
191+
}
192+
193+
if (last_err - current_error).abs() < opt_option.min_abs_error_decrease_threshold {
194+
trace!("absolute error decrease low");
195+
break;
196+
} else if (last_err - current_error).abs() / last_err
197+
< opt_option.min_rel_error_decrease_threshold
198+
{
199+
trace!("relative error decrease low");
200+
break;
201+
}
202202
}
203203
let params = parameter_blocks
204204
.iter()

src/problem.rs

Lines changed: 10 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -196,14 +196,16 @@ impl Problem {
196196
let total_residual = Arc::new(Mutex::new(na::DVector::<f64>::zeros(
197197
self.total_residual_dimension,
198198
)));
199-
self.residual_blocks.iter().for_each(|(_, residual_block)| {
200-
self.compute_residual_impl(
201-
residual_block,
202-
parameter_blocks,
203-
&total_residual,
204-
with_loss_fn,
205-
)
206-
});
199+
self.residual_blocks
200+
.par_iter()
201+
.for_each(|(_, residual_block)| {
202+
self.compute_residual_impl(
203+
residual_block,
204+
parameter_blocks,
205+
&total_residual,
206+
with_loss_fn,
207+
)
208+
});
207209
let total_residual = Arc::try_unwrap(total_residual)
208210
.unwrap()
209211
.into_inner()
@@ -216,15 +218,13 @@ impl Problem {
216218
&self,
217219
parameter_blocks: &HashMap<String, ParameterBlock>,
218220
variable_name_to_col_idx_dict: &HashMap<String, usize>,
219-
total_variable_dimension: usize,
220221
symbolic_structure: &SymbolicStructure,
221222
) -> (faer::Mat<f64>, SparseColMat<usize, f64>) {
222223
// multi
223224
let total_residual = Arc::new(Mutex::new(na::DVector::<f64>::zeros(
224225
self.total_residual_dimension,
225226
)));
226227

227-
let mut start = std::time::Instant::now();
228228
let jacobian_lists: Vec<JacobianValue> = self
229229
.residual_blocks
230230
.par_iter()
@@ -239,24 +239,18 @@ impl Problem {
239239
.flatten()
240240
.collect();
241241

242-
log::debug!("compute_residual_and_jacobian_impl: {:?}, total_variable_dim: {:?}, total_residual: {:?}", start.elapsed(), total_variable_dimension, self.total_residual_dimension);
243-
start = std::time::Instant::now();
244242
let total_residual = Arc::try_unwrap(total_residual)
245243
.unwrap()
246244
.into_inner()
247245
.unwrap();
248246

249247
let residual_faer = total_residual.view_range(.., ..).into_faer().to_owned();
250-
log::debug!("residual_faer: {:?}", start.elapsed());
251-
start = std::time::Instant::now();
252248
let jacobian_faer = SparseColMat::new_from_order_and_values(
253249
symbolic_structure.pattern.clone(),
254250
&symbolic_structure.order,
255251
jacobian_lists.as_slice(),
256252
)
257253
.unwrap();
258-
log::debug!("jacobian_faer: {:?}", start.elapsed());
259-
//log::debug!("rest of the function: {:?}", start.elapsed());
260254
(residual_faer, jacobian_faer)
261255
}
262256

tests/test_problem.rs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -129,7 +129,6 @@ mod tests {
129129
let (residuals, jac) = problem.compute_residual_and_jacobian(
130130
&parameter_blocks,
131131
&variable_name_to_col_idx_dict,
132-
total_variable_dimension,
133132
&symbolic_structure,
134133
);
135134

0 commit comments

Comments
 (0)