diff --git a/module/move/optimization_tools/Cargo.toml b/module/move/optimization_tools/Cargo.toml index b56414c847..dc422ba723 100644 --- a/module/move/optimization_tools/Cargo.toml +++ b/module/move/optimization_tools/Cargo.toml @@ -59,6 +59,7 @@ rayon = "1.8.0" thiserror = "1.0.56" rkyv = { version = "0.7.44", features = [ "validation" ] } ordered-float = "4.2.0" +markdown-table = "0.2.0" [dev-dependencies] test_tools = { workspace = true } diff --git a/module/move/optimization_tools/src/hybrid_optimizer/mod.rs b/module/move/optimization_tools/src/hybrid_optimizer/mod.rs index dc298931c2..e82477e11c 100644 --- a/module/move/optimization_tools/src/hybrid_optimizer/mod.rs +++ b/module/move/optimization_tools/src/hybrid_optimizer/mod.rs @@ -498,8 +498,8 @@ pub fn starting_params_for_hybrid() -> Result< OptimalProblem< RangeInclusive< f let opt_problem = OptimalProblem::new() .add( Some( String::from( "temperature decrease factor" ) ), Some( 0.0..=1.0 ), Some( 0.999 ), Some( 0.0002 ) )? .add( Some( String::from( "mutation per dynasty" ) ), Some( 10.0..=2000.0 ), Some( 300.0 ), Some( 20.0 ) )? - .add( Some( String::from( "mutation rate" ) ), Some( 0.0..=0.5 ), Some( 0.25 ), Some( 0.1 ) )? - .add( Some( String::from( "crossover rate" ) ), Some( 0.0..=0.5 ), Some( 0.5 ), Some( 0.2 ) )? + .add( Some( String::from( "mutation rate" ) ), Some( 0.0..=1.0 ), Some( 0.25 ), Some( 0.1 ) )? + .add( Some( String::from( "crossover rate" ) ), Some( 0.0..=1.0 ), Some( 0.5 ), Some( 0.2 ) )? .add( Some( String::from( "max stale iterations" ) ), Some( 1.0..=1000.0 ), Some( 30.0 ), Some( 5.0 ) )? .add( Some( String::from( "population size" ) ), Some( 1.0..=1000.0 ), Some( 300.0 ), Some( 200.0 ) )? .add( Some( String::from( "dynasties limit" ) ), Some( 100.0..=5000.0 ), Some( 1000.0 ), Some( 300.0 ) )? @@ -530,8 +530,8 @@ pub fn starting_params_for_ga() -> Result< OptimalProblem< RangeInclusive< f64 > let opt_problem = OptimalProblem::new() .add( Some( String::from( "temperature decrease factor" ) ), Some( 0.0..=1.0 ), Some( 0.999 ), Some( 0.0002 ) )? .add( Some( String::from( "mutation per dynasty" ) ), Some( 10.0..=2000.0 ), Some( 300.0 ), Some( 20.0 ) )? - .add( Some( String::from( "mutation rate" ) ), Some( 0.1..=0.5 ), Some( 0.25 ), Some( 0.1 ) )? - .add( Some( String::from( "crossover rate" ) ), Some( 0.1..=0.5 ), Some( 0.5 ), Some( 0.2 ) )? + .add( Some( String::from( "mutation rate" ) ), Some( 0.1..=1.0 ), Some( 0.25 ), Some( 0.1 ) )? + .add( Some( String::from( "crossover rate" ) ), Some( 0.1..=1.0 ), Some( 0.5 ), Some( 0.2 ) )? .add( Some( String::from( "max stale iterations" ) ), Some( 1.0..=1000.0 ), Some( 30.0 ), Some( 5.0 ) )? .add( Some( String::from( "population size" ) ), Some( 10.0..=5000.0 ), Some( 300.0 ), Some( 200.0 ) )? .add( Some( String::from( "dynasties limit" ) ), Some( 100.0..=5000.0 ), Some( 1000.0 ), Some( 300.0 ) )? diff --git a/module/move/optimization_tools/src/optimal_params_search/mod.rs b/module/move/optimization_tools/src/optimal_params_search/mod.rs index fdf3a28a7a..00b4ccd694 100644 --- a/module/move/optimization_tools/src/optimal_params_search/mod.rs +++ b/module/move/optimization_tools/src/optimal_params_search/mod.rs @@ -5,35 +5,10 @@ pub mod nelder_mead; pub mod sim_annealing; use std::ops::RangeBounds; use iter_tools::Itertools; - use crate::hybrid_optimizer::*; +use results_serialize::read_results; -use self::results_serialize::read_results; - -/// Level of difficulty of sudoku board. -#[ derive( Debug, Clone, Copy, PartialEq, Eq, Hash ) ] -pub enum Level -{ - /// Easy level with difficulty <= 2. - Easy, - /// Medium, 2 < difficulty <= 2.5. - Medium, - /// Hard level, 2.5 < difficulty <= 3. - Hard, - /// Expert level with difficulty > 3. - Expert, -} - -impl Level { - /// Iterates over sudoku difficulty levels. - pub fn iterator() -> impl Iterator< Item = Level > - { - use Level::*; - [ Easy, Medium, Hard, Expert ].iter().copied() - } -} - -/// +/// Configuration for optimal parameters search. #[ derive( Debug, Clone ) ] pub struct OptimalParamsConfig { @@ -49,7 +24,7 @@ pub struct OptimalParamsConfig impl Default for OptimalParamsConfig { - fn default() -> Self + fn default() -> Self { Self { @@ -93,12 +68,12 @@ impl< 'a, R : RangeBounds< f64 > > OptimalProblem< R > /// Add parameter to optimal parameters search problem. pub fn add - ( + ( mut self, - name : Option< String >, - bounds : Option< R >, - start_value : Option< f64 >, - simplex_size : Option< f64 >, + name : Option< String >, + bounds : Option< R >, + start_value : Option< f64 >, + simplex_size : Option< f64 >, ) -> Result< Self, Error > { if let Some( ref name ) = name @@ -134,14 +109,15 @@ pub fn find_hybrid_optimal_params< R, S, C, M > ( config : OptimalParamsConfig, problem : OptimalProblem< R >, - hybrid_problem : Problem< S, C, M > + hybrid_problem : Problem< S, C, M >, + intermediate_results_file : Option< String >, ) -> Result< nelder_mead::Solution, nelder_mead::Error > where R : RangeBounds< f64 > + Sync, S : InitialProblem + Sync + Clone, - C : CrossoverOperator::< Person = < S as InitialProblem>::Person > + Clone + Sync, + C : CrossoverOperator::< Person = < S as InitialProblem >::Person > + Clone + Sync, M : MutationOperator::< Person = < S as InitialProblem >::Person > + Sync, M : MutationOperator::< Problem = S > + Sync + Clone, - TournamentSelection: SelectionOperator<::Person> + TournamentSelection: SelectionOperator< < S as InitialProblem >::Person > { let seeder = hybrid_problem.seeder.clone(); let ga_crossover_operator = hybrid_problem.ga_crossover_operator.clone(); @@ -152,7 +128,7 @@ where R : RangeBounds< f64 > + Sync, log::info! ( "temp_decrease_coefficient : {:.4?}, max_mutations_per_dynasty: {}, mutation_rate: {:.2}, crossover_rate: {:.2};", - case.coords[ 0 ], case.coords[ 1 ].into_inner() as usize, case.coords[ 2 ], case.coords[ 3 ] + case.coords[ 0 ].into_inner(), case.coords[ 1 ].into_inner() as usize, case.coords[ 2 ], case.coords[ 3 ] ); log::info! @@ -192,7 +168,7 @@ where R : RangeBounds< f64 > + Sync, let ( _reason, _solution ) = optimizer.optimize(); }; - let res = optimize_by_time( config, problem, objective_function ); + let res = optimize_by_time( config, problem, objective_function, intermediate_results_file ); log::info!( "result: {:?}", res ); @@ -200,7 +176,13 @@ where R : RangeBounds< f64 > + Sync, } /// Wrapper for optimizing objective function by execution time instead of value. -pub fn optimize_by_time< F, R >( config : OptimalParamsConfig, problem : OptimalProblem< R >, objective_function : F ) -> Result< nelder_mead::Solution, nelder_mead::Error > +pub fn optimize_by_time< F, R > +( + config : OptimalParamsConfig, + problem : OptimalProblem< R >, + objective_function : F, + intermediate_results_file : Option< String >, +) -> Result< nelder_mead::Solution, nelder_mead::Error > where F : Fn( &nelder_mead::Point ) + Sync, R : RangeBounds< f64 > + Sync { let objective_function = | case : &nelder_mead::Point | @@ -233,25 +215,32 @@ where F : Fn( &nelder_mead::Point ) + Sync, R : RangeBounds< f64 > + Sync // objective_function : objective_function, // max_iterations : 50, // }; + let mut optimizer = nelder_mead::Optimizer::new( objective_function ); optimizer.bounds = problem.bounds; - optimizer.set_starting_point( problem.starting_point.clone() ); + optimizer.set_starting_point( problem.starting_point ); optimizer.set_simplex_size( problem.simplex_size ); + optimizer.add_constraint( | p : &nelder_mead::Point | p.coords[ 2 ] + p.coords[ 3 ] <= 1.0.into() ); optimizer.improvement_threshold = config.improvement_threshold; optimizer.max_iterations = config.max_iterations; optimizer.max_no_improvement_steps = config.max_no_improvement_steps; - let calculated_points = read_results(); - if let Ok( calculated_points ) = calculated_points + if let Some( results_file ) = intermediate_results_file { - optimizer.set_calculated_results( calculated_points ); + let calculated_points = read_results( &results_file ); + if let Ok( calculated_points ) = calculated_points + { + optimizer.set_calculated_results( calculated_points ); + } + + optimizer.set_save_results_file( results_file ); } optimizer.optimize_from_random_points() } -/// Possible error when building NMOptimizer. +/// Possible error when building OptimalProblem. #[ derive( thiserror::Error, Debug ) ] pub enum Error { diff --git a/module/move/optimization_tools/src/optimal_params_search/nelder_mead.rs b/module/move/optimization_tools/src/optimal_params_search/nelder_mead.rs index a8089536d9..f9eefc03b4 100644 --- a/module/move/optimization_tools/src/optimal_params_search/nelder_mead.rs +++ b/module/move/optimization_tools/src/optimal_params_search/nelder_mead.rs @@ -2,8 +2,13 @@ //! It operates by adjusting a simplex(geometric shape) to explore and converge toward the optimal solution. //! -use std::{ collections::HashMap, ops::{ Bound, RangeBounds } }; - +use std:: +{ + collections::HashMap, + fs::{ File, OpenOptions }, + ops::{ Bound, RangeBounds }, + sync::{ Arc, Mutex }, +}; use deterministic_rand::{ Hrng, Seed, Rng }; use iter_tools::Itertools; use ordered_float::OrderedFloat; @@ -27,6 +32,7 @@ impl Point Self { coords : coords.into_iter().map( | elem | elem.into() ).collect_vec() } } + /// Create new point from given coordinates. pub fn new_from_ordered( coords : Vec< OrderedFloat< f64 > > ) -> Self { Self { coords } @@ -35,15 +41,59 @@ impl Point /// Represents geometric shape formed by a set of n+1 points in a multidimensional space, where n is a number of dimensions. /// Simplex is used to navigate through solution space, adjusting its shape based on the performance of the objective function at different points. -#[ derive( Debug, Clone ) ] +#[ derive( Debug, Clone ) ] pub struct Simplex { /// Points of simplex. pub points : Vec< Point >, } -/// Struct which holds initial configuration for NelderMead optimization, and can perform optimization if all necessary information were provided during initialization process. +/// Constraints for points of optimization process. +#[ derive( Debug, Clone ) ] +pub enum Constraints +{ + NoConstraints, + WithConstraints( Vec< fn( &Point ) -> bool > ), +} + +impl Constraints +{ + /// Add constraint to constraints list. + pub fn add_constraint( &mut self, constraint : fn( &Point ) -> bool ) + { + match self + { + Self::NoConstraints => *self = Self::WithConstraints( vec![ constraint ] ), + Self::WithConstraints( constraints ) => constraints.push( constraint ), + } + } +} + #[ derive( Debug, Clone ) ] +pub struct Stats +{ + /// Sum of difference between starting value of parameter and new value, for every parameter. + pub diff_sum : Vec< f64 >, +} + +impl Stats +{ + pub fn new( dimensions : usize ) -> Self + { + Self { diff_sum : vec![ 0.0; dimensions ] } + } + + pub fn record_diff( &mut self, start_point : &Point, point : &Point ) + { + for i in 0..start_point.coords.len() + { + self.diff_sum[ i ] += ( start_point.coords[ i ] - point.coords[ i ] ).abs() + } + } +} + +/// Struct which holds initial configuration for NelderMead optimization, and can perform optimization if all necessary information were provided during initialization process. +#[ derive( Debug, Clone ) ] pub struct Optimizer< R, F > { /// Bounds for parameters of objective function, may be unbounded or bounded on one side. @@ -76,13 +126,20 @@ pub struct Optimizer< R, F > /// If previously calculated contraction point doesn't improve the objective function shrinking is performed to adjust simplex size. /// Shrinking involves reducing the distance between the vertices of the simplex, making it smaller. pub sigma : f64, - pub calculated_results : Option< HashMap< Point, f64 > > + /// Values of objective function calculated in previous executions. + pub calculated_results : Option< HashMap< Point, f64 > >, + /// File for saving values of objective function during optimization process. + pub save_results_file : Option< Arc< Mutex< File > > >, + /// Additional constraint for coordinates of function. + pub constraints : Constraints, } -impl< R : RangeBounds< f64 > + Sync, F : Fn( &Point ) -> f64 + Sync > Optimizer< R, F > +impl< R, F > Optimizer< R, F > +where R : RangeBounds< f64 > + Sync, + F : Fn( &Point ) -> f64 + Sync, { /// Create new instance of Nelder-Mead optimizer. - pub fn new( objective_function : F ) -> Self + pub fn new( objective_function : F ) -> Self { Self { @@ -98,16 +155,51 @@ impl< R : RangeBounds< f64 > + Sync, F : Fn( &Point ) -> f64 + Sync > Optimizer< rho : -0.5, sigma : 0.5, calculated_results : None, + save_results_file : None, + constraints : Constraints::NoConstraints, } } + /// Add set of previosly calculated values of objective function. pub fn set_calculated_results( &mut self, res : HashMap< Point, f64 > ) { self.calculated_results = Some( res ); } + /// Set file for saving results of calculations. + pub fn set_save_results_file( &mut self, file_path : String ) + { + let file_res = OpenOptions::new() + .write( true ) + .append( true ) + .create( true ) + .open( file_path ) + ; + + if let Ok( file ) = file_res + { + self.save_results_file = Some( Arc::new( Mutex::new( file ) ) ); + } + } + + /// Add constraint function. + pub fn add_constraint( &mut self, constraint : fn( &Point ) -> bool ) + { + self.constraints.add_constraint( constraint ); + } + + /// Calculate value of objective function at given point or get previously calculated value if such exists. pub fn evaluate_point( &self, p : &Point ) -> f64 { + if let Constraints::WithConstraints( constraint_vec ) = &self.constraints + { + let valid = constraint_vec.iter().fold( true, | acc, constraint | acc && constraint( p ) ); + if !valid + { + return f64::INFINITY; + } + } + if let Some( points ) = &self.calculated_results { if let Some( value ) = points.get( &p ) @@ -116,7 +208,16 @@ impl< R : RangeBounds< f64 > + Sync, F : Fn( &Point ) -> f64 + Sync > Optimizer< } } let result = ( self.objective_function )( p ); - _ = save_result( p.coords.clone().into_iter().map( |val| val.into_inner() ).collect_vec(), result ); + + if let Some( file ) = &self.save_results_file + { + _ = save_result + ( + p.coords.clone().into_iter().map( | val | val.into_inner() ).collect_vec(), + result, + file.clone(), + ); + } result } @@ -186,11 +287,18 @@ impl< R : RangeBounds< f64 > + Sync, F : Fn( &Point ) -> f64 + Sync > Optimizer< res } + // fn update_diff( point : &Point ) + // { + // for coordinate in point + // { + + // } + // } + /// Checks if point left the domain, if so, performs projection: all coordinates that lie out of domain bounds are set to closest coordinate included in bounded space. /// Returns projected point. fn check_bounds( &self, point : Point ) -> Point { - let mut coords = point.coords; for i in 0..self.bounds.len() { @@ -200,16 +308,16 @@ impl< R : RangeBounds< f64 > + Sync, F : Fn( &Point ) -> f64 + Sync > Optimizer< { match bound.start_bound() { - Bound::Included( val ) => + Bound::Included( val ) => { - if val < &coords[ i ] + if val < &coords[ i ] { coords[ i ] = ( *val ).into(); } }, - Bound::Excluded( val ) => + Bound::Excluded( val ) => { - if val <= &coords[ i ] + if val <= &coords[ i ] { coords[ i ] = ( val + f64::EPSILON ).into(); } @@ -218,16 +326,16 @@ impl< R : RangeBounds< f64 > + Sync, F : Fn( &Point ) -> f64 + Sync > Optimizer< } match bound.end_bound() { - Bound::Included( val ) => + Bound::Included( val ) => { - if val > &coords[ i ] + if val > &coords[ i ] { coords[ i ] = ( *val ).into(); } }, - Bound::Excluded( val ) => + Bound::Excluded( val ) => { - if val >= &coords[ i ] + if val >= &coords[ i ] { coords[ i ] = ( val - f64::EPSILON ).into(); } @@ -307,7 +415,7 @@ impl< R : RangeBounds< f64 > + Sync, F : Fn( &Point ) -> f64 + Sync > Optimizer< new_coords.push( start_bound ) } } - else + else { if bound.end_bound() != Bound::Unbounded { @@ -356,8 +464,8 @@ impl< R : RangeBounds< f64 > + Sync, F : Fn( &Point ) -> f64 + Sync > Optimizer< Bound::Unbounded => unreachable!(), }; let end = match bound.end_bound() { - Bound::Included(end) => *end + f64::EPSILON, - Bound::Excluded(end) => *end, + Bound::Included( end ) => *end + f64::EPSILON, + Bound::Excluded( end ) => *end, Bound::Unbounded => unreachable!(), }; @@ -369,9 +477,11 @@ impl< R : RangeBounds< f64 > + Sync, F : Fn( &Point ) -> f64 + Sync > Optimizer< points.push( Point::new( point ) ); } - let results = points.into_par_iter().map( | point | { + let stats = Arc::new( Mutex::new( Stats::new( self.start_point.coords.len() ) ) ); + + let results = points.into_par_iter().map( | point | + { let x0 = point.clone(); - let dimensions = x0.coords.len(); let mut prev_best = self.evaluate_point( &x0 ); let mut steps_with_no_improv = 0; @@ -397,6 +507,7 @@ impl< R : RangeBounds< f64 > + Sync, F : Fn( &Point ) -> f64 + Sync > Optimizer< point : res[ 0 ].0.clone(), objective : res[ 0 ].1, reason : TerminationReason::MaxIterations, + stats : None, } ) } @@ -419,6 +530,7 @@ impl< R : RangeBounds< f64 > + Sync, F : Fn( &Point ) -> f64 + Sync > Optimizer< point : res[ 0 ].0.clone(), objective : res[ 0 ].1, reason : TerminationReason::NoImprovement, + stats : None, } ) } @@ -441,6 +553,7 @@ impl< R : RangeBounds< f64 > + Sync, F : Fn( &Point ) -> f64 + Sync > Optimizer< } // check if point left the domain, if so, perform projection let x_ref = self.check_bounds( Point::new_from_ordered( x_ref ) ); + stats.lock().unwrap().record_diff( &self.start_point, &x_ref ); let reflection_score = self.evaluate_point( &x_ref ); let second_worst = res[ res.len() - 2 ].1; @@ -461,6 +574,7 @@ impl< R : RangeBounds< f64 > + Sync, F : Fn( &Point ) -> f64 + Sync > Optimizer< } // check if point left the domain, if so, perform projection let x_exp = self.check_bounds( Point::new_from_ordered( x_exp ) ); + stats.lock().unwrap().record_diff( &self.start_point, &x_exp ); let expansion_score = self.evaluate_point( &x_exp ); if expansion_score < reflection_score @@ -484,6 +598,7 @@ impl< R : RangeBounds< f64 > + Sync, F : Fn( &Point ) -> f64 + Sync > Optimizer< x_con[ i ] = x0_center[ i ] + OrderedFloat( self.rho ) * ( x0_center[ i ] - worst_dir.0.coords[ i ] ); } let x_con = self.check_bounds( Point::new_from_ordered( x_con ) ); + stats.lock().unwrap().record_diff( &self.start_point, &x_con ); let contraction_score = self.evaluate_point( &x_con ); if contraction_score < worst_dir.1 @@ -504,6 +619,7 @@ impl< R : RangeBounds< f64 > + Sync, F : Fn( &Point ) -> f64 + Sync > Optimizer< x_shrink[ i ] = x1.coords[ i ] + OrderedFloat( self.sigma ) * ( point.coords[ i ] - x1.coords[ i ] ); } let x_shrink = self.check_bounds( Point::new_from_ordered( x_shrink ) ); + stats.lock().unwrap().record_diff( &self.start_point, &x_shrink ); let score = self.evaluate_point( &x_shrink ); new_res.push( ( x_shrink, score ) ); } @@ -512,8 +628,12 @@ impl< R : RangeBounds< f64 > + Sync, F : Fn( &Point ) -> f64 + Sync > Optimizer< } } ).collect::< Vec<_> >(); + let stats = stats.lock().unwrap().clone(); + let results = results.into_iter().flatten().collect_vec(); - Ok( results.into_iter().min_by( | res1, res2 | res1.objective.total_cmp( &res2.objective ) ).unwrap() ) + let mut res = results.into_iter().min_by( | res1, res2 | res1.objective.total_cmp( &res2.objective ) ).unwrap(); + res.stats = Some( stats ); + Ok( res ) } /// Optimize provided objective function with using initialized configuration. @@ -561,6 +681,7 @@ impl< R : RangeBounds< f64 > + Sync, F : Fn( &Point ) -> f64 + Sync > Optimizer< point : res[ 0 ].0.clone(), objective : res[ 0 ].1, reason : TerminationReason::MaxIterations, + stats : None, } ) } @@ -583,6 +704,7 @@ impl< R : RangeBounds< f64 > + Sync, F : Fn( &Point ) -> f64 + Sync > Optimizer< point : res[ 0 ].0.clone(), objective : res[ 0 ].1, reason : TerminationReason::NoImprovement, + stats : None, } ) } @@ -687,6 +809,8 @@ pub struct Solution pub objective : f64, /// Reason for termination. pub reason : TerminationReason, + /// Staticstics. + pub stats : Option< Stats >, } /// Reasons for termination of optimization process. diff --git a/module/move/optimization_tools/src/optimal_params_search/results_serialize.rs b/module/move/optimization_tools/src/optimal_params_search/results_serialize.rs index 09ba199589..432774d6cd 100644 --- a/module/move/optimization_tools/src/optimal_params_search/results_serialize.rs +++ b/module/move/optimization_tools/src/optimal_params_search/results_serialize.rs @@ -2,10 +2,12 @@ use std:: { - collections::HashMap, fs::OpenOptions, io::{ BufRead, BufReader, Write }, + collections::HashMap, + fs::{ File, OpenOptions }, + io::{ BufRead, BufReader, Write }, + sync::{ Arc, Mutex }, }; use rkyv::{ Archive, Deserialize, Serialize } ; - use crate::optimal_params_search::nelder_mead::Point; #[ derive( Archive, Deserialize, Serialize, Debug ) ] @@ -23,37 +25,22 @@ struct ObjectiveFunctionValue } /// Save results of optimal parameters search. -pub fn save_result( point : Vec< f64 >, value : f64 ) -> Result< (), Box< dyn std::error::Error > > +pub fn save_result( point : Vec< f64 >, value : f64, file : Arc< Mutex< File > > ) -> Result< (), Box< dyn std::error::Error > > { let obj_value = ObjectiveFunctionValue{ point, value }; - - let dir_path = format!( "{}/target", crate::simplex::drawing::workspace_dir().to_string_lossy() ); - _ = std::fs::create_dir( &dir_path ); - let path = format!( "{}/output", dir_path ); - let bytes = rkyv::to_bytes::< _, 256 >( &obj_value ).unwrap(); - let mut file = OpenOptions::new() - .write( true ) - .append( true ) - .create( true ) - .open( &path ) - .unwrap(); - - file.write( &bytes )?; + let mut file = file.lock().unwrap(); + file.write( &bytes )?; file.write( &vec![ 0x0A as u8 ] )?; + Ok( () ) } /// Read results from previous execution. -pub fn read_results() -> Result< HashMap< Point, f64 >, Box< dyn std::error::Error > > +pub fn read_results( file_path : &str ) -> Result< HashMap< Point, f64 >, Box< dyn std::error::Error > > { - - let dir_path = format!( "{}/target", crate::simplex::drawing::workspace_dir().to_string_lossy() ); - _ = std::fs::create_dir( &dir_path ); - let path = format!( "{}/output", dir_path ); - - let read_file = OpenOptions::new().read( true ).open( &path )?; + let read_file = OpenOptions::new().read( true ).open( file_path )?; let mut reader = BufReader::new( read_file ); let mut buffer: Vec< u8 > = Vec::new(); let mut data = HashMap::new(); diff --git a/module/move/optimization_tools/src/optimal_params_search/sim_annealing.rs b/module/move/optimization_tools/src/optimal_params_search/sim_annealing.rs index a89b536282..084cbaee51 100644 --- a/module/move/optimization_tools/src/optimal_params_search/sim_annealing.rs +++ b/module/move/optimization_tools/src/optimal_params_search/sim_annealing.rs @@ -203,6 +203,7 @@ impl< R : RangeBounds< f64 > + Sync, F : Fn( nelder_mead::Point ) -> f64 + Sync point : Point::new( best_found.0.clone() ), objective : best_found.1, reason : TerminationReason::MaxIterations, + stats : None, } ) } } diff --git a/module/move/optimization_tools/src/problems/sudoku/board.rs b/module/move/optimization_tools/src/problems/sudoku/board.rs index 6ada848c91..d73dc5290d 100644 --- a/module/move/optimization_tools/src/problems/sudoku/board.rs +++ b/module/move/optimization_tools/src/problems/sudoku/board.rs @@ -337,6 +337,41 @@ impl Board let coeff = possibilities_count.into_iter().fold( 0, | acc, val | acc + val.0 * val.1 ) as f64 / 81.0 ; coeff } + + pub fn calculate_level( &self ) -> Level + { + match self.calculate_difficulty() + { + n if n >= 0.0 && n<= 2.0 => Level::Easy, + n if n > 2.0 && n <= 2.5 => Level::Medium, + n if n > 2.5 && n < 3.0 => Level::Hard, + _ => Level::Expert, + } + } + +} + +/// Level of difficulty of sudoku board. +#[ derive( Debug, Clone, Copy, PartialEq, Eq, Hash ) ] +pub enum Level +{ + /// Easy level with difficulty <= 2. + Easy, + /// Medium, 2 < difficulty <= 2.5. + Medium, + /// Hard level, 2.5 < difficulty <= 3. + Hard, + /// Expert level with difficulty > 3. + Expert, +} + +impl Level { + /// Iterates over sudoku difficulty levels. + pub fn iterator() -> impl Iterator< Item = Level > + { + use Level::*; + [ Easy, Medium, Hard, Expert ].iter().copied() + } } /// Sets default value for board. diff --git a/module/move/optimization_tools/src/problems/sudoku/sudoku.rs b/module/move/optimization_tools/src/problems/sudoku/sudoku.rs index fa1f00d268..816c57f68a 100644 --- a/module/move/optimization_tools/src/problems/sudoku/sudoku.rs +++ b/module/move/optimization_tools/src/problems/sudoku/sudoku.rs @@ -432,4 +432,4 @@ impl CrossoverOperator for BestRowsColumnsCrossover SudokuPerson::with_board( min_board ) } -} \ No newline at end of file +} diff --git a/module/move/optimization_tools/src/problems/traveling_salesman.rs b/module/move/optimization_tools/src/problems/traveling_salesman.rs index 5151664007..2c3e5bb9a1 100644 --- a/module/move/optimization_tools/src/problems/traveling_salesman.rs +++ b/module/move/optimization_tools/src/problems/traveling_salesman.rs @@ -249,7 +249,7 @@ impl InitialProblem for TSProblem /// Randomly selects a subroute from the first parent and fills the remainder of the route with the nodes from the second parent in the order in which they appear, without duplicating any nodes in the selected subroute from the first parent. #[ derive( Debug, Clone ) ] -pub struct OrderedRouteCrossover {} +pub struct OrderedRouteCrossover; impl CrossoverOperator for OrderedRouteCrossover { @@ -290,7 +290,7 @@ impl CrossoverOperator for OrderedRouteCrossover /// Randomly mutates route in three different ways: by swapping two nodes, by reversing subroute, or by changing position of subroute. #[ derive( Debug, Clone ) ] -pub struct TSRouteMutation {} +pub struct TSRouteMutation; impl TSRouteMutation { diff --git a/module/move/optimization_tools/sudoku_results.md b/module/move/optimization_tools/sudoku_results.md index 71fc3fce6d..b4852cb6f8 100644 --- a/module/move/optimization_tools/sudoku_results.md +++ b/module/move/optimization_tools/sudoku_results.md @@ -1,47 +1,58 @@ Sudoku Problem -For parameters: - - temperature decrease coefficient : 0.9974; - - max mutations per dynasty : 277; - - mutation rate : 0.47; - - crossover rate : 0.41; +For hybrid parameters: + - temperature decrease coefficient : 1.0000, sum of differences: 5.84; + - max mutations per dynasty : 542, sum of differences: 32756.57; + - mutation rate : 0.27, sum of differences: 18.53; + - crossover rate : 0.57, sum of differences: 10.69; + - elitism rate : 0.15; + - max stale iterations : 1000, sum of differences: 30424.75; + - population size : 64, sum of differences: 12688.33; + - dynasties limit : 1662, sum of differences: 97853.79; + - level : Easy; + - execution time : 0.635s; + + + +For SA parameters: + - temperature decrease coefficient : 1.0000, sum of differences: 10.66; + - max mutations per dynasty : 621, sum of differences: 41110.52; + - mutation rate : 1.00, sum of differences: 0.00; + - crossover rate : 0.00, sum of differences: 0.00; + - elitism rate : 0.00; + - max stale iterations : 1000, sum of differences: 35882.85; + - population size : 1, sum of differences: 0.00; + - dynasties limit : 102, sum of differences: 283809.00; + - level : Easy; + - execution time : 0.052s; + + + +For GA parameters: + - temperature decrease coefficient : 0.8275, sum of differences: 7.72; + - max mutations per dynasty : 247, sum of differences: 37671.49; + - mutation rate : 0.29, sum of differences: 22.06; + - crossover rate : 0.59, sum of differences: 11.12; - elitism rate : 0.12; - - max stale iterations : 1000; - - -| Level | Population size | Dynasties limit | Execution time | -|----------------------|----------------------|----------------------|----------------------|- -| Easy | 2 | 500 | 0.265s | - - - -For parameters: - - temperature decrease coefficient : 0.9423; - - max mutations per dynasty : 340; - - mutation rate : 1.00; - - crossover rate : 0.00; - - elitism rate : 0.00; - - max stale iterations : 62; - - -| Level | Population size | Dynasties limit | Execution time | -|----------------------|----------------------|----------------------|----------------------|- -| Easy | 1 | 1357 | 0.026s | - - - -For parameters: - - temperature decrease coefficient : 0.9332; - - max mutations per dynasty : 240; - - mutation rate : 0.29; - - crossover rate : 0.50; - - elitism rate : 0.21; - - max stale iterations : 164; - - -| Level | Population size | Dynasties limit | Execution time | -|----------------------|----------------------|----------------------|----------------------|- -| Easy | 31 | 1757 | 0.294s | - - - + - max stale iterations : 206, sum of differences: 32779.01; + - population size : 112, sum of differences: 153695.91; + - dynasties limit : 1803, sum of differences: 112653.70; + - level : Easy; + - execution time : 0.547s; + + + +
modetemperature +decrease +coefficientmax +mutations +per +dynastymutation +ratecrossover +rateelitism +ratemax +stale +iterationspopulation +sizedynasties +limitlevelexecution +time
hybrid1.00005420.270.570.151000641662Easy0.635s
SA1.00006211.000.000.0010001102Easy0.052s
GA0.82752470.290.590.122061121803Easy0.547s
\ No newline at end of file diff --git a/module/move/optimization_tools/tests/opt_params.rs b/module/move/optimization_tools/tests/opt_params.rs index 3989fe4775..0a4803ef83 100644 --- a/module/move/optimization_tools/tests/opt_params.rs +++ b/module/move/optimization_tools/tests/opt_params.rs @@ -1,5 +1,5 @@ use iter_tools::Itertools; -use optimization_tools::*; +use optimization_tools::{ optimal_params_search::nelder_mead::Stats, * }; use optimal_params_search::OptimalParamsConfig; use problems::{ sudoku::*, traveling_salesman::* }; use hybrid_optimizer::*; @@ -7,66 +7,119 @@ use hybrid_optimizer::*; mod tools; use tools::*; -fn write_results( filename : String, title : String, hybrid_res : Vec< f64 >, sa_res : Vec< f64 >, ga_res : Vec< f64 > ) -> Result< (), std::io::Error > +fn named_results_list( params : Vec< f64 >, stats : Stats ) -> Vec< ( String, Option< String >, String ) > +{ + let mut str_params = Vec::new(); + str_params.push( format!( "{:.4}", params[ 0 ] ) ); + str_params.push( format!( "{:?}", params[ 1 ] as usize ) ); + str_params.push( format!( "{:.2}", params[ 2 ] ) ); + str_params.push( format!( "{:.2}", params[ 3 ] ) ); + str_params.push( format!( "{:.2}", ( 1.0 - params[ 2 ] - params[ 3 ] ) ) ); + str_params.push( format!( "{:?}", params[ 4 ] as usize ) ); + str_params.push( format!( "{}", params[ 5 ] as usize ) ); + str_params.push( format!( "{}", params[ 6 ] as usize ) ); + + let params_name = + [ + "temperature decrease coefficient", + "max mutations per dynasty", + "mutation rate", + "crossover rate", + "elitism rate", + "max stale iterations", + "population size", + "dynasties limit", + ]; + + let mut stats_vec = stats.diff_sum.iter().cloned().map( | val | Some( format!( "{:.2}", val ) ) ).collect_vec(); + stats_vec.insert( 4, None ); + + let mut list = Vec::new(); + + for ( ( name, stats ), param ) in params_name.into_iter().zip( stats_vec ).zip( str_params ) + { + list.push( ( name.to_owned(), stats, param ) ); + } + + list +} + +type ResWithStats = Vec< ( String, Option< String >, String ) >; + +fn write_results( + filename : String, + title : String, + hybrid_res : ResWithStats, + sa_res : ResWithStats, + ga_res : ResWithStats, +) -> Result< (), std::io::Error > { let mut file = std::fs::File::create( format!( "{}.md", filename ) )?; - std::io::Write::write(&mut file, format!( "{}\n\n", title).as_bytes() )?; + std::io::Write::write( &mut file, format!( "{}\n\n", title ).as_bytes() )?; - for params in [ hybrid_res, sa_res, ga_res ] + for ( mode, params ) in [ ( "hybrid", &hybrid_res ), ( "SA", &sa_res ), ( "GA", &ga_res ) ] { - std::io::Write::write(&mut file, b"For parameters:\n")?; - std::io::Write::write( &mut file,format!( " - temperature decrease coefficient : {:.4};\n", params[ 0 ] ).as_bytes() )?; - std::io::Write::write( &mut file,format!( " - max mutations per dynasty : {:?};\n", params[ 1 ] as usize ).as_bytes() )?; - std::io::Write::write( &mut file,format!( " - mutation rate : {:.2};\n", params[ 2 ] ).as_bytes() )?; - std::io::Write::write( &mut file,format!( " - crossover rate : {:.2};\n", params[ 3 ] ).as_bytes() )?; - std::io::Write::write( &mut file,format!( " - elitism rate : {:.2};\n", ( 1.0 - params[ 2 ] - params[ 3 ] ) ).as_bytes() )?; - std::io::Write::write( &mut file,format!( " - max stale iterations : {:?};\n", params[ 4 ] as usize ).as_bytes() )?; - - let columns = [ "Level", "Population size", "Dynasties limit", "Execution time" ]; - let mut title = String::from( "| " ); - let mut line = String::from( "|-" ); - let mut result = String::from( "| " ); - let res_columns = - [ - String::from( "Easy" ), - ( params[ 5 ] as usize ).to_string(), - ( params[ 6 ] as usize ).to_string(), - format!( "{:.3}s", params[ 7 ] ) - ]; - for ( index, column ) in columns.iter().enumerate() + std::io::Write::write(&mut file, format!( "For {} parameters:\n", mode ).as_bytes() )?; + for i in 0..params.len() { - title.push_str( column ); - result.push_str( &res_columns[ index ] ); - for _ in 0..column.len() + let mut stats_str = String::new(); + if let Some( stats ) = ¶ms[ i ].1 { - line.push( '-' ); + stats_str = format!( ", sum of differences: {}", stats ); } - for _ in 0..( 20 - column.len() ) + if mode == "SA" + { + if [ 2, 3, 4, 6 ].contains( &i ) + { + std::io::Write::write( &mut file,format!( " - {} : {}{};\n", params[ i ].0, params[ i ].2, stats_str ).as_bytes() )?; + continue; + } + } + std::io::Write::write( &mut file,format!( " - {} : {}{};\n", params[ i ].0, params[ i ].2, stats_str ).as_bytes() )?; + } + + std::io::Write::write( &mut file, format!("\n\n\n" ).as_bytes() )?; + } + + //table + use markdown_table::MarkdownTable; + + let mut table_vec = Vec::new(); + let mut headers = vec![ String::from( "mode" ) ]; + for i in 0..hybrid_res.len() + { + headers.push( hybrid_res[ i ].0.clone().replace( " ", "\n") ); + } + + table_vec.push( headers ); + for ( mode, params ) in [ ( "hybrid", &hybrid_res ), ( "SA", &sa_res ), ( "GA", &ga_res ) ] + { + let mut row = Vec::new(); + for i in 0..params.len() + 1 + { + if i == 0 { - title.push( ' ' ); - line.push( '-' ); + row.push( mode.to_owned() ); } - for _ in 0..( 20 - res_columns[ index ].len() ) + else { - result.push( ' ' ); + row.push( params[ i - 1 ].2.clone() ); } - line.push_str( "-|-" ); - title.push_str( " | " ); - result.push_str( " | " ); } - - std::io::Write::write( &mut file, format!("\n\n{}\n", title ).as_bytes() )?; - std::io::Write::write( &mut file, format!("{}\n", line ).as_bytes() )?; - std::io::Write::write( &mut file, format!("{}\n\n\n\n", result ).as_bytes() )?; + + table_vec.push( row ); } + let table = MarkdownTable::new( table_vec ).as_markdown().unwrap(); + + std::io::Write::write( &mut file, format!( "{}", table ).as_bytes() )?; Ok( () ) } #[ ignore ] #[ test ] -fn find_opt_params_sudoku() -> Result< (), Box< dyn std::error::Error > > +fn find_opt_params_sudoku() -> Result< (), Box< dyn std::error::Error > > { let easy = r#" 080924060 @@ -83,43 +136,76 @@ fn find_opt_params_sudoku() -> Result< (), Box< dyn std::error::Error > > logger_init(); log::set_max_level( log::LevelFilter::Info ); + let dir_path = format!( "{}/target", crate::simplex::drawing::workspace_dir().to_string_lossy() ); + _ = std::fs::create_dir( &dir_path ); + let path = format!( "{}/output_sudoku", dir_path ); + let config = OptimalParamsConfig::default(); let initial = SudokuInitial::new( Board::from( easy ) ); - let hybrid_problem = Problem::new( initial.clone(), BestRowsColumnsCrossover, RandomPairInBlockMutation ); - let res = optimal_params_search::find_hybrid_optimal_params( config.clone(), hybrid_optimizer::starting_params_for_hybrid()?, hybrid_problem ); + let hybrid_problem = Problem::new( + initial.clone(), + BestRowsColumnsCrossover, + RandomPairInBlockMutation, + ); + let res = optimal_params_search::find_hybrid_optimal_params( + config.clone(), + hybrid_optimizer::starting_params_for_hybrid()?, + hybrid_problem, + Some( path.clone() ), + ); assert!( res.is_ok() ); - let mut hybrid_res = Vec::new(); if let Ok( solution ) = res { - hybrid_res = solution.point.coords.into_iter().map( | val | val.into_inner() ).collect_vec(); - hybrid_res.push( solution.objective ); + hybrid_res = named_results_list( solution.point.coords.into_iter().map( | val | val.into_inner() ).collect_vec(), solution.stats.unwrap() ); + hybrid_res.push( ( String::from( "level" ), None, format!( "{:?}", Board::from( easy ).calculate_level() ) ) ); + hybrid_res.push( ( String::from( "execution time" ), None, format!( "{:.3}s", solution.objective ) ) ); } // SA - let hybrid_problem = Problem::new( initial.clone(), BestRowsColumnsCrossover, RandomPairInBlockMutation ); - let res = optimal_params_search::find_hybrid_optimal_params( config.clone(), hybrid_optimizer::starting_params_for_sa()?, hybrid_problem ); + let hybrid_problem = Problem::new( + initial.clone(), + BestRowsColumnsCrossover, + RandomPairInBlockMutation, + ); + let res = optimal_params_search::find_hybrid_optimal_params( + config.clone(), + hybrid_optimizer::starting_params_for_sa()?, + hybrid_problem, + Some( path.clone() ), + ); assert!( res.is_ok() ); let mut sa_res = Vec::new(); if let Ok( solution ) = res { - sa_res = solution.point.coords.into_iter().map( | val | val.into_inner() ).collect_vec(); - sa_res.push( solution.objective ); + sa_res = named_results_list( solution.point.coords.into_iter().map( | val | val.into_inner() ).collect_vec(), solution.stats.unwrap() ); + sa_res.push( ( String::from( "level" ), None, format!( "{:?}", Board::from( easy ).calculate_level() ) ) ); + sa_res.push( ( String::from( "execution time" ), None, format!( "{:.3}s", solution.objective ) ) ); } // GA - let hybrid_problem = Problem::new( initial.clone(), BestRowsColumnsCrossover, RandomPairInBlockMutation ); - let res = optimal_params_search::find_hybrid_optimal_params( config, hybrid_optimizer::starting_params_for_ga()?, hybrid_problem ); + let hybrid_problem = Problem::new( + initial.clone(), + BestRowsColumnsCrossover, + RandomPairInBlockMutation, + ); + let res = optimal_params_search::find_hybrid_optimal_params( + config, + hybrid_optimizer::starting_params_for_ga()?, + hybrid_problem, + Some( path ), + ); assert!( res.is_ok() ); let mut ga_res = Vec::new(); if let Ok( solution ) = res { - ga_res = solution.point.coords.into_iter().map( | val | val.into_inner() ).collect_vec(); - ga_res.push( solution.objective ); + ga_res = named_results_list( solution.point.coords.into_iter().map( | val | val.into_inner() ).collect_vec(), solution.stats.unwrap() ); + ga_res.push( ( String::from( "level" ), None, format!( "{:?}", Board::from( easy ).calculate_level() ) ) ); + ga_res.push( ( String::from( "execution time" ), None, format!( "{:.3}s", solution.objective ) ) ); } write_results( String::from( "sudoku_results" ), String::from( "Sudoku Problem" ), hybrid_res, sa_res, ga_res )?; Ok( () ) @@ -127,44 +213,80 @@ fn find_opt_params_sudoku() -> Result< (), Box< dyn std::error::Error > > #[ ignore ] #[ test ] -fn find_opt_params_tsp() -> Result< (), Box< dyn std::error::Error > > +fn find_opt_params_tsp() -> Result< (), Box< dyn std::error::Error > > { logger_init(); - log::set_max_level( log::LevelFilter::Warn ); + log::set_max_level( log::LevelFilter::Info ); + + let dir_path = format!( "{}/target", crate::simplex::drawing::workspace_dir().to_string_lossy() ); + _ = std::fs::create_dir( &dir_path ); + let path = format!( "{}/output_tsp", dir_path ); let config = OptimalParamsConfig::default(); - let initial = TSProblem { graph : TSPGraph::default(), starting_node : NodeIndex( 1 ) }; + let graph = TSPGraph::default(); + let number_of_nodes = graph.nodes().len(); + let initial = TSProblem { graph, starting_node : NodeIndex( 1 ) }; - let hybrid_problem = Problem::new( initial.clone(), OrderedRouteCrossover{}, TSRouteMutation{} ); - let res = optimal_params_search::find_hybrid_optimal_params( config.clone(), hybrid_optimizer::starting_params_for_hybrid()?, hybrid_problem ); + let hybrid_problem = Problem::new( + initial.clone(), + OrderedRouteCrossover, + TSRouteMutation, + ); + let res = optimal_params_search::find_hybrid_optimal_params( + config.clone(), + hybrid_optimizer::starting_params_for_hybrid()?, + hybrid_problem, + Some( path.clone() ), + ); assert!( res.is_ok() ); let mut hybrid_res = Vec::new(); if let Ok( solution ) = res { - hybrid_res = solution.point.coords.into_iter().map( | val | val.into_inner() ).collect_vec(); - hybrid_res.push( solution.objective ); + hybrid_res = named_results_list( solution.point.coords.into_iter().map( | val | val.into_inner() ).collect_vec(), solution.stats.unwrap() ); + hybrid_res.push( ( String::from( "number of nodes" ), None, number_of_nodes.to_string() ) ); + hybrid_res.push( ( String::from( "execution time" ), None, format!( "{:.3}s", solution.objective ) ) ); } // SA - let hybrid_problem = Problem::new( initial.clone(), OrderedRouteCrossover{}, TSRouteMutation{} ); - let res = optimal_params_search::find_hybrid_optimal_params( config.clone(), hybrid_optimizer::starting_params_for_sa()?, hybrid_problem ); + let hybrid_problem = Problem::new( + initial.clone(), + OrderedRouteCrossover, + TSRouteMutation, + ); + let res = optimal_params_search::find_hybrid_optimal_params( + config.clone(), + hybrid_optimizer::starting_params_for_sa()?, + hybrid_problem, + Some( path.clone() ), + ); assert!( res.is_ok() ); let mut sa_res = Vec::new(); if let Ok( solution ) = res { - sa_res = solution.point.coords.into_iter().map( | val | val.into_inner() ).collect_vec(); - sa_res.push( solution.objective ); + sa_res = named_results_list( solution.point.coords.into_iter().map( | val | val.into_inner() ).collect_vec(), solution.stats.unwrap() ); + sa_res.push( ( String::from( "number of nodes" ), None, number_of_nodes.to_string() ) ); + sa_res.push( ( String::from( "execution time" ), None, format!( "{:.3}s", solution.objective ) ) ); } // GA - let hybrid_problem = Problem::new( initial, OrderedRouteCrossover{}, TSRouteMutation{} ); - let res = optimal_params_search::find_hybrid_optimal_params( config, hybrid_optimizer::starting_params_for_ga()?, hybrid_problem ); + let hybrid_problem = Problem::new( + initial, + OrderedRouteCrossover, + TSRouteMutation, + ); + let res = optimal_params_search::find_hybrid_optimal_params( + config, + hybrid_optimizer::starting_params_for_ga()?, + hybrid_problem, + Some( path ), + ); assert!( res.is_ok() ); let mut ga_res = Vec::new(); if let Ok( solution ) = res { - ga_res = solution.point.coords.into_iter().map( | val | val.into_inner() ).collect_vec(); - ga_res.push( solution.objective ); + ga_res = named_results_list( solution.point.coords.into_iter().map( | val | val.into_inner() ).collect_vec(), solution.stats.unwrap() ); + ga_res.push( ( String::from( "number of nodes" ), None, number_of_nodes.to_string() ) ); + ga_res.push( ( String::from( "execution time" ), None, format!( "{:.3}s", solution.objective ) ) ); } write_results( String::from( "tsp_results" ), String::from( "Traveling Salesman Problem" ), hybrid_res, sa_res, ga_res )?;