Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: feat: removed ExtraIn and ExtraOut from LocalSearchOptimizer interface #34

Merged
merged 2 commits into from
Aug 25, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ Rust library for local search optimization
All of the algorithms are parallelized with Rayon.

1. Hill Climbing.
2. Tabu Search.
2. Tabu Search. To use this optimizer you also need to implement your problem specific tabu list.
3. Simulated Annealing
4. Epsilon Greedy Search, a variant of Hill Climbing which accepts the trial solution with a constant probabilith even if the score of the trial solution is worse than the previous one.
5. Relative Annealing, a variant of Simulated Annealing which uses relative score diff to calculate transition probability.
Expand Down Expand Up @@ -110,7 +110,7 @@ fn main() {
pb.set_position(op.iter as u64);
};

let res = opt.run(&model, None, n_iter, time_limit, Some(&callback), ());
let res = opt.run(&model, None, n_iter, time_limit, Some(&callback));
pb.finish();
dbg!(res.unwrap());
}
Expand Down
2 changes: 1 addition & 1 deletion examples/quadratic_model.rs
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ fn main() {
pb.set_position(op.iter as u64);
};

let res = opt.run(&model, None, n_iter, time_limit, Some(&callback), ());
let res = opt.run(&model, None, n_iter, time_limit, Some(&callback));
pb.finish();
dbg!(res.unwrap());
}
36 changes: 19 additions & 17 deletions examples/tsp_model.rs
Original file line number Diff line number Diff line change
Expand Up @@ -179,24 +179,32 @@ impl DequeTabuList {
}
}

impl TabuList for DequeTabuList {
type Item = (SolutionType, TransitionType);
impl Default for DequeTabuList {
fn default() -> Self {
Self::new(10)
}
}

fn contains(&self, item: &Self::Item) -> bool {
impl TabuList<TSPModel> for DequeTabuList {
fn contains(&self, item: &(SolutionType, TransitionType)) -> bool {
let (_, (_, inserted_edges)) = item;
inserted_edges
.iter()
.any(|edge| self.buff.iter().any(|e| *e == *edge))
}

fn append(&mut self, item: Self::Item) {
fn append(&mut self, item: (SolutionType, TransitionType)) {
let (_, (removed_edges, _)) = item;
for edge in removed_edges {
if self.buff.iter().all(|e| *e != edge) {
self.buff.append(edge);
}
}
}

fn set_size(&mut self, n: usize) {
self.buff = RingBuffer::new(n);
}
}

// The output is wrapped in a Result to allow matching on errors
Expand Down Expand Up @@ -261,14 +269,13 @@ fn main() {

println!("run hill climbing");
let optimizer = HillClimbingOptimizer::new(1000, 200);
let (final_solution, final_score, _) = optimizer
let (final_solution, final_score) = optimizer
.run(
&tsp_model,
initial_solution.clone(),
n_iter,
time_limit,
Some(&callback),
(),
)
.unwrap();
println!(
Expand All @@ -280,16 +287,14 @@ fn main() {
pb.reset();

println!("run tabu search");
let tabu_list = DequeTabuList::new(20);
let optimizer = TabuSearchOptimizer::new(patience, 200, 10);
let (final_solution, final_score, _) = optimizer
let optimizer = TabuSearchOptimizer::<TSPModel, DequeTabuList>::new(patience, 200, 10, 20);
let (final_solution, final_score) = optimizer
.run(
&tsp_model,
initial_solution.clone(),
n_iter,
time_limit,
Some(&callback),
tabu_list,
)
.unwrap();
println!(
Expand All @@ -301,15 +306,14 @@ fn main() {
pb.reset();

println!("run annealing");
let optimizer = SimulatedAnnealingOptimizer::new(patience, 200);
let (final_solution, final_score, _) = optimizer
let optimizer = SimulatedAnnealingOptimizer::new(patience, 200, 200.0, 50.0);
let (final_solution, final_score) = optimizer
.run(
&tsp_model,
initial_solution.clone(),
n_iter,
time_limit,
Some(&callback),
(200.0, 50.0),
)
.unwrap();
println!(
Expand All @@ -322,14 +326,13 @@ fn main() {

println!("run epsilon greedy");
let optimizer = EpsilonGreedyOptimizer::new(patience, 200, 10, 0.3);
let (final_solution, final_score, _) = optimizer
let (final_solution, final_score) = optimizer
.run(
&tsp_model,
initial_solution.clone(),
n_iter,
time_limit,
Some(&callback),
(),
)
.unwrap();
println!(
Expand All @@ -342,14 +345,13 @@ fn main() {

println!("run relative annealing");
let optimizer = RelativeAnnealingOptimizer::new(patience, 200, 10, 1e1);
let (final_solution, final_score, _) = optimizer
let (final_solution, final_score) = optimizer
.run(
&tsp_model,
initial_solution,
n_iter,
time_limit,
Some(&callback),
(),
)
.unwrap();
println!(
Expand Down
16 changes: 4 additions & 12 deletions src/optim/base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,6 @@ use crate::{callback::OptCallbackFn, Duration, OptModel};
/// Optimizer that implements local search algorithm
#[auto_impl(&, Box, Rc, Arc)]
pub trait LocalSearchOptimizer<M: OptModel> {
/// Extra input type
type ExtraIn;
/// Extra output type
type ExtraOut;

/// Start optimization
fn optimize<F>(
&self,
Expand All @@ -20,8 +15,7 @@ pub trait LocalSearchOptimizer<M: OptModel> {
n_iter: usize,
time_limit: Duration,
callback: Option<&F>,
extra_in: Self::ExtraIn,
) -> (M::SolutionType, M::ScoreType, Self::ExtraOut)
) -> (M::SolutionType, M::ScoreType)
where
M: OptModel,
F: OptCallbackFn<M::SolutionType, M::ScoreType>;
Expand All @@ -34,8 +28,7 @@ pub trait LocalSearchOptimizer<M: OptModel> {
n_iter: usize,
time_limit: Duration,
callback: Option<&F>,
extra_in: Self::ExtraIn,
) -> AnyResult<(M::SolutionType, M::ScoreType, Self::ExtraOut)>
) -> AnyResult<(M::SolutionType, M::ScoreType)>
where
M: OptModel,
F: OptCallbackFn<M::SolutionType, M::ScoreType>,
Expand All @@ -51,18 +44,17 @@ pub trait LocalSearchOptimizer<M: OptModel> {
let (initial_solution, initial_score) =
model.preprocess_solution(initial_solution, initial_score)?;

let (solution, score, extra) = self.optimize(
let (solution, score) = self.optimize(
model,
initial_solution,
initial_score,
n_iter,
time_limit,
callback,
extra_in,
);

let (solution, score) = model.postprocess_solution(solution, score);
Ok((solution, score, extra))
Ok((solution, score))
}
}

Expand Down
7 changes: 1 addition & 6 deletions src/optim/epsilon_greedy.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,6 @@ impl EpsilonGreedyOptimizer {
}

impl<M: OptModel> LocalSearchOptimizer<M> for EpsilonGreedyOptimizer {
type ExtraIn = ();
type ExtraOut = ();
/// Start optimization
///
/// - `model` : the model to optimize
Expand All @@ -49,7 +47,6 @@ impl<M: OptModel> LocalSearchOptimizer<M> for EpsilonGreedyOptimizer {
/// - `n_iter`: maximum iterations
/// - `time_limit`: maximum iteration time
/// - `callback` : callback function that will be invoked at the end of each iteration
/// - `_extra_in` : not used
fn optimize<F>(
&self,
model: &M,
Expand All @@ -58,8 +55,7 @@ impl<M: OptModel> LocalSearchOptimizer<M> for EpsilonGreedyOptimizer {
n_iter: usize,
time_limit: Duration,
callback: Option<&F>,
_extra_in: Self::ExtraIn,
) -> (M::SolutionType, M::ScoreType, Self::ExtraOut)
) -> (M::SolutionType, M::ScoreType)
where
M: OptModel + Sync + Send,
F: OptCallbackFn<M::SolutionType, M::ScoreType>,
Expand All @@ -77,7 +73,6 @@ impl<M: OptModel> LocalSearchOptimizer<M> for EpsilonGreedyOptimizer {
n_iter,
time_limit,
callback,
_extra_in,
)
}
}
8 changes: 2 additions & 6 deletions src/optim/generic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,6 @@ where
FT: TransitionProbabilityFn<ST>,
M: OptModel<ScoreType = ST>,
{
type ExtraIn = ();
type ExtraOut = ();
/// Start optimization
///
/// - `model` : the model to optimize
Expand All @@ -65,7 +63,6 @@ where
/// - `n_iter`: maximum iterations
/// - `time_limit`: maximum iteration time
/// - `callback` : callback function that will be invoked at the end of each iteration
/// - `_extra_in` : not used
fn optimize<F>(
&self,
model: &M,
Expand All @@ -74,8 +71,7 @@ where
n_iter: usize,
time_limit: Duration,
callback: Option<&F>,
_extra_in: Self::ExtraIn,
) -> (M::SolutionType, M::ScoreType, Self::ExtraOut)
) -> (M::SolutionType, M::ScoreType)
where
F: OptCallbackFn<M::SolutionType, M::ScoreType>,
{
Expand Down Expand Up @@ -141,6 +137,6 @@ where
}

let best_solution = (*best_solution.borrow()).clone();
(best_solution, best_score, ())
(best_solution, best_score)
}
}
7 changes: 1 addition & 6 deletions src/optim/hill_climbing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,6 @@ impl HillClimbingOptimizer {
}

impl<M: OptModel> LocalSearchOptimizer<M> for HillClimbingOptimizer {
type ExtraIn = ();
type ExtraOut = ();

/// Start optimization
///
/// - `model` : the model to optimize
Expand All @@ -39,8 +36,7 @@ impl<M: OptModel> LocalSearchOptimizer<M> for HillClimbingOptimizer {
n_iter: usize,
time_limit: Duration,
callback: Option<&F>,
_extra_in: Self::ExtraIn,
) -> (M::SolutionType, M::ScoreType, Self::ExtraOut)
) -> (M::SolutionType, M::ScoreType)
where
F: OptCallbackFn<M::SolutionType, M::ScoreType>,
{
Expand All @@ -52,7 +48,6 @@ impl<M: OptModel> LocalSearchOptimizer<M> for HillClimbingOptimizer {
n_iter,
time_limit,
callback,
_extra_in,
)
}
}
7 changes: 1 addition & 6 deletions src/optim/logistic_annealing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,6 @@ impl LogisticAnnealingOptimizer {
}

impl<M: OptModel<ScoreType = NotNan<f64>>> LocalSearchOptimizer<M> for LogisticAnnealingOptimizer {
type ExtraIn = ();
type ExtraOut = ();
/// Start optimization
///
/// - `model` : the model to optimize
Expand All @@ -54,7 +52,6 @@ impl<M: OptModel<ScoreType = NotNan<f64>>> LocalSearchOptimizer<M> for LogisticA
/// - `n_iter`: maximum iterations
/// - `time_limit`: maximum iteration time
/// - `callback` : callback function that will be invoked at the end of each iteration
/// - `_extra_in` : not used
fn optimize<F>(
&self,
model: &M,
Expand All @@ -63,8 +60,7 @@ impl<M: OptModel<ScoreType = NotNan<f64>>> LocalSearchOptimizer<M> for LogisticA
n_iter: usize,
time_limit: Duration,
callback: Option<&F>,
_extra_in: Self::ExtraIn,
) -> (M::SolutionType, M::ScoreType, Self::ExtraOut)
) -> (M::SolutionType, M::ScoreType)
where
F: OptCallbackFn<M::SolutionType, M::ScoreType>,
{
Expand All @@ -82,7 +78,6 @@ impl<M: OptModel<ScoreType = NotNan<f64>>> LocalSearchOptimizer<M> for LogisticA
n_iter,
time_limit,
callback,
_extra_in,
)
}
}
Expand Down
7 changes: 1 addition & 6 deletions src/optim/relative_annealing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,9 +44,6 @@ impl RelativeAnnealingOptimizer {
}

impl<M: OptModel<ScoreType = NotNan<f64>>> LocalSearchOptimizer<M> for RelativeAnnealingOptimizer {
type ExtraIn = ();
type ExtraOut = ();

/// Start optimization
///
/// - `model` : the model to optimize
Expand All @@ -64,8 +61,7 @@ impl<M: OptModel<ScoreType = NotNan<f64>>> LocalSearchOptimizer<M> for RelativeA
n_iter: usize,
time_limit: Duration,
callback: Option<&F>,
_extra_in: Self::ExtraIn,
) -> (M::SolutionType, M::ScoreType, Self::ExtraOut)
) -> (M::SolutionType, M::ScoreType)
where
F: OptCallbackFn<M::SolutionType, M::ScoreType>,
{
Expand All @@ -83,7 +79,6 @@ impl<M: OptModel<ScoreType = NotNan<f64>>> LocalSearchOptimizer<M> for RelativeA
n_iter,
time_limit,
callback,
_extra_in,
)
}
}
Expand Down
Loading