Skip to content

Commit

Permalink
Merge in-progress Square-1 code (#61)
Browse files Browse the repository at this point in the history
I'm about to start porting `GenericPuzzle` and related code from
`phase2-symmcoords`, and coordinating two branches is likely to be very
messy. It's simpler to merge the Square-1 code, which is stable apart
from the Square-1 specific changes. Those changes have been put behind a
env var. To test Square-1:

```shell
env EXPERIMENTAL_SQUARE1=true cargo run --release -- scramble sq1
```

Co-authored-by: Jeremy Fleischman <[email protected]>
  • Loading branch information
lgarron and jfly authored Oct 18, 2024
2 parents db57c28 + 1342242 commit 34b3d1a
Show file tree
Hide file tree
Showing 29 changed files with 1,016 additions and 107 deletions.
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ license = "MPL-2.0"
edition = "2021"

[workspace.dependencies]
cubing = "0.13.2"
cubing = "0.13.4"

[workspace.metadata.bin]
cbindgen = { version = "0.26.0" }
Expand Down
Binary file modified bun.lockb
Binary file not shown.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
"bun": ">=1.1.5"
},
"dependencies": {
"cubing": "^0.49.0",
"cubing": "^0.52.3",
"getbuiltinmodule-ponyfill": "^1.0.1"
},
"devDependencies": {
Expand Down
2 changes: 1 addition & 1 deletion src/rs-wasm/wasm_api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ pub fn wasmTwsearch(
None => Generators::Default,
};

let idfs = IDFSearch::try_new(
let idfs = <IDFSearch>::try_new(
kpuzzle,
target_pattern,
generators,
Expand Down
2 changes: 1 addition & 1 deletion src/rs/_cli/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ fn search(search_command_args: SearchCommandArgs) -> Result<(), CommandError> {
}
};

let mut idf_search = IDFSearch::try_new(
let mut idf_search = <IDFSearch>::try_new(
kpuzzle,
target_pattern,
search_command_args.generator_args.parse(),
Expand Down
2 changes: 1 addition & 1 deletion src/rs/_cli/serve/serve.rs
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ fn solve_pattern(
Ok(search_pattern) => search_pattern,
Err(e) => return Response::text(e.to_string()).with_status_code(400),
};
let mut search = match IDFSearch::try_new(
let mut search = match <IDFSearch>::try_new(
kpuzzle,
target_pattern,
Generators::Custom(CustomGenerators {
Expand Down
6 changes: 3 additions & 3 deletions src/rs/_internal/canonical_fsm/canonical_fsm.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ pub struct CanonicalFSM {
impl CanonicalFSM {
// TODO: Return a more specific error.
pub fn try_new(generators: SearchGenerators) -> Result<CanonicalFSM, SearchError> {
let num_move_classes = generators.grouped.len();
let num_move_classes = generators.by_move_class.len();
if num_move_classes > MAX_NUM_MOVE_CLASSES {
return Err(SearchError {
description: "Too many move classes!".to_owned(),
Expand All @@ -113,8 +113,8 @@ impl CanonicalFSM {
for i in 0..num_move_classes {
for j in 0..num_move_classes {
if !do_transformations_commute(
&generators.grouped[i][0].transformation,
&generators.grouped[j][0].transformation,
&generators.by_move_class[i][0].transformation,
&generators.by_move_class[j][0].transformation,
) {
commutes[i] &= MoveClassMask(!(1 << j));
commutes[j] &= MoveClassMask(!(1 << i));
Expand Down
67 changes: 33 additions & 34 deletions src/rs/_internal/canonical_fsm/search_generators.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,18 @@ use cubing::{
};
use rand::{seq::SliceRandom, thread_rng};

use crate::_internal::{
cli::options::{Generators, MetricEnum},
SearchError,
use crate::{
_internal::{
cli::options::{Generators, MetricEnum},
SearchError,
},
index_type,
};

use super::MoveClassIndex;

index_type!(FlatMoveIndex);

#[derive(Clone, Debug)]
pub struct MoveTransformationInfo {
#[allow(dead_code)] // TODO
Expand All @@ -22,14 +27,16 @@ pub struct MoveTransformationInfo {
pub transformation: KTransformation,
#[allow(dead_code)] // TODO
pub inverse_transformation: KTransformation,

pub flat_move_index: FlatMoveIndex,
}

pub type MoveTransformationMultiples = Vec<MoveTransformationInfo>;

#[derive(Clone, Debug)]
pub struct SearchGenerators {
// TODO: figure out the most reusable abstraction
pub grouped: Vec<MoveTransformationMultiples>,
pub by_move_class: Vec<MoveTransformationMultiples>,
pub flat: Vec<MoveTransformationInfo>, // TODO: avoid duplicate data
pub by_move: HashMap<Move, (MoveClassIndex, MoveTransformationInfo)>, // TODO: avoid duplicate data
}
Expand Down Expand Up @@ -120,47 +127,39 @@ impl SearchGenerators {
let mut move_multiple_transformation =
KTransformationBuffer::from(move_transformation.clone());

let mut populate_fields = |r#move: Move, transformation: &KTransformation| {
let info = MoveTransformationInfo {
r#move: r#move.clone(),
// metric_turns: 1, // TODO
transformation: transformation.clone(),
inverse_transformation: transformation.invert(),
flat_move_index: FlatMoveIndex(flat.len()),
};
multiples.push(info.clone());
flat.push(info.clone());
by_move.insert(r#move, (move_class_index, info));
};

match metric {
MetricEnum::Hand => {
let mut amount: i32 = r#move.amount;
while move_multiple_transformation.current() != &identity_transformation {
let mut move_multiple = r#move.clone();
move_multiple.amount = canonicalize_center_amount(order, amount);
let info = MoveTransformationInfo {
r#move: move_multiple.clone(),
// metric_turns: 1, // TODO
transformation: move_multiple_transformation.current().clone(),
inverse_transformation: move_multiple_transformation.current().invert(),
};
multiples.push(info.clone());
flat.push(info.clone());
by_move.insert(move_multiple, (move_class_index, info));
populate_fields(move_multiple, move_multiple_transformation.current());

amount += r#move.amount;
move_multiple_transformation.apply_transformation(&move_transformation);
}
}
MetricEnum::Quantum => {
let info = MoveTransformationInfo {
r#move: r#move.clone(),
// metric_turns: 1, // TODO
transformation: move_multiple_transformation.current().clone(),
inverse_transformation: move_multiple_transformation.current().invert(),
};
let is_self_inverse = info.transformation == info.inverse_transformation;
multiples.push(info.clone());
flat.push(info.clone());
by_move.insert(r#move.clone(), (move_class_index, info));
if !is_self_inverse {
let info = MoveTransformationInfo {
r#move: r#move.invert(),
// metric_turns: 1, // TODO
transformation: move_multiple_transformation.current().invert(),
inverse_transformation: move_multiple_transformation.current().clone(),
};
multiples.push(info.clone());
flat.push(info.clone());
by_move.insert(r#move.invert(), (move_class_index, info));
let transformation = move_multiple_transformation.current();
populate_fields(r#move.clone(), transformation);

let inverse_transformation = transformation.invert();
if transformation != &inverse_transformation {
// TODO: avoid redundant calculations?
populate_fields(r#move.invert(), &inverse_transformation);
}
}
}
Expand All @@ -173,7 +172,7 @@ impl SearchGenerators {
}

Ok(Self {
grouped,
by_move_class: grouped,
flat,
by_move,
})
Expand Down
1 change: 0 additions & 1 deletion src/rs/_internal/cli/io.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ use crate::_internal::ArgumentError;
use serde::Deserialize;

pub fn read_to_json<T: for<'a> Deserialize<'a>>(input_file: &Path) -> Result<T, ArgumentError> {
format!("Rewriting: {:?}", input_file);
let input_str = read_to_string(input_file).or(Err("Could not read input file."))?;
let input_parsed: T =
serde_json::from_str(&input_str).or(Err("Input file is not valid JSON."))?;
Expand Down
2 changes: 1 addition & 1 deletion src/rs/_internal/gods_algorithm/gods_algorithm_table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ impl GodsAlgorithmSearch {
let mut patterns_at_current_depth = BulkQueue::new(None);
for queue_item in last_depth_patterns.into_iter() {
for move_class_index in &self.canonical_fsm.move_class_indices {
let moves_in_class = &self.search_moves.grouped[move_class_index.0];
let moves_in_class = &self.search_moves.by_move_class[move_class_index.0];
let next_state = self
.canonical_fsm
.next_state(queue_item.canonical_fsm_state, *move_class_index);
Expand Down
13 changes: 13 additions & 0 deletions src/rs/_internal/search/check_pattern.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
use cubing::kpuzzle::KPattern;

pub trait PatternValidityChecker {
fn is_valid(pattern: &KPattern) -> bool;
}

pub struct AlwaysValid;

impl PatternValidityChecker for AlwaysValid {
fn is_valid(_pattern: &KPattern) -> bool {
true
}
}
30 changes: 20 additions & 10 deletions src/rs/_internal/search/idf_search.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,11 @@ use serde::{Deserialize, Serialize};

use crate::_internal::{
cli::options::{Generators, MetricEnum},
CanonicalFSM, CanonicalFSMState, MoveClassIndex, PruneTable, RecursiveWorkTracker, SearchError,
SearchGenerators, SearchLogger, CANONICAL_FSM_START_STATE,
CanonicalFSM, CanonicalFSMState, HashPruneTable, MoveClassIndex, RecursiveWorkTracker,
SearchError, SearchGenerators, SearchLogger, CANONICAL_FSM_START_STATE,
};

use super::KPatternStack;
use super::{AlwaysValid, KPatternStack, PatternValidityChecker};

const MAX_SUPPORTED_SEARCH_DEPTH: usize = 500; // TODO: increase

Expand Down Expand Up @@ -137,12 +137,12 @@ pub struct IDFSearchAPIData {
pub search_logger: Arc<SearchLogger>,
}

pub struct IDFSearch {
pub struct IDFSearch<ValidityChecker: PatternValidityChecker = AlwaysValid> {
api_data: Arc<IDFSearchAPIData>,
prune_table: PruneTable,
pub prune_table: HashPruneTable<ValidityChecker>,
}

impl IDFSearch {
impl<ValidityChecker: PatternValidityChecker> IDFSearch<ValidityChecker> {
pub fn try_new(
kpuzzle: KPuzzle,
target_pattern: KPattern,
Expand All @@ -163,7 +163,8 @@ impl IDFSearch {
search_logger: search_logger.clone(),
});

let prune_table = PruneTable::new(api_data.clone(), search_logger, min_prune_table_size); // TODO: make the prune table reusable across searches.
let prune_table =
HashPruneTable::new(api_data.clone(), search_logger, min_prune_table_size); // TODO: make the prune table reusable across searches.
Ok(Self {
api_data,
prune_table,
Expand Down Expand Up @@ -262,10 +263,15 @@ impl IDFSearch {
remaining_depth: usize,
solution_moves: SolutionMoves,
) -> SearchRecursionResult {
let current_pattern = kpattern_stack.current_pattern();
// TODO: apply invalid checks only to intermediate state (i.e. exclude remaining_depth == 0)?
if !ValidityChecker::is_valid(current_pattern) {
return SearchRecursionResult::ContinueSearchingDefault();
}

individual_search_data
.recursive_work_tracker
.record_recursive_call();
let current_pattern = kpattern_stack.current_pattern();
if remaining_depth == 0 {
return self.base_case(
individual_search_data,
Expand All @@ -281,8 +287,12 @@ impl IDFSearch {
if prune_table_depth > remaining_depth {
return SearchRecursionResult::ContinueSearchingDefault();
}
for (move_class_index, move_transformation_multiples) in
self.api_data.search_generators.grouped.iter().enumerate()
for (move_class_index, move_transformation_multiples) in self
.api_data
.search_generators
.by_move_class
.iter()
.enumerate()
{
let Some(next_state) = self
.api_data
Expand Down
3 changes: 3 additions & 0 deletions src/rs/_internal/search/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,5 +11,8 @@ pub(crate) use recursive_work_tracker::*;
mod search_logger;
pub use search_logger::*;

mod check_pattern;
pub use check_pattern::*;

mod kpattern_stack;
pub(crate) use kpattern_stack::*;
Loading

0 comments on commit 34b3d1a

Please sign in to comment.