feat: conformance tests pass for first time

Change implementation to exponentially increase search space at each
level.
This commit is contained in:
nobody 2025-12-12 21:05:29 -08:00
commit 546d6deb69
Signed by: GrocerPublishAgent
GPG key ID: D460CD54A9E3AB86
13 changed files with 1852 additions and 102 deletions

View file

@ -0,0 +1,254 @@
mod model;
use model::{Insert, Scenario};
use peoplesgrocers_lseq::{LseqRng, SortKey, LSEQ};
use rand::{rngs::StdRng, Rng, SeedableRng};
use std::fs;
/// A recording RNG wrapper that records random values as floats
/// compatible with TypeScript's Math.random() consumption pattern.
struct RecordingRng {
inner: StdRng,
recorded: Vec<f64>,
}
impl RecordingRng {
fn new(seed: u64) -> Self {
RecordingRng {
inner: StdRng::seed_from_u64(seed),
recorded: Vec::new(),
}
}
fn take_recorded(&mut self) -> Vec<f64> {
std::mem::take(&mut self.recorded)
}
}
impl LseqRng for RecordingRng {
fn gen_bool(&mut self, p: f64) -> bool {
// Use fully qualified syntax to call rand::Rng method
let result = Rng::gen_bool(&mut self.inner, p);
// Record a float that TypeScript's `f < p` would produce the same result
// If result is true, we need f < p, so use p/2
// If result is false, we need f >= p, so use (1 + p) / 2
let recorded_value = if result { p / 2.0 } else { (1.0 + p) / 2.0 };
self.recorded.push(recorded_value);
result
}
fn gen_range(&mut self, range: std::ops::Range<u64>) -> u64 {
// Use fully qualified syntax to call rand::Rng method
let result: u64 = Rng::gen_range(&mut self.inner, range.clone());
// Record a float that TypeScript's `Math.floor(f * range_size)` would produce the same result
// For result r in [0, range_size), we need floor(f * range_size) = r
// So f should be in [r/range_size, (r+1)/range_size)
// Use midpoint: (r + 0.5) / range_size
let range_size = (range.end - range.start) as f64;
let relative_result = (result - range.start) as f64;
let recorded_value = (relative_result + 0.5) / range_size;
self.recorded.push(recorded_value);
result
}
}
fn generate_scenario(
name: &str,
description: &str,
seed: u64,
init: Vec<&str>,
ops: impl FnOnce(&mut LSEQ<RecordingRng>, &mut Vec<String>) -> Vec<Insert>,
) -> Scenario {
let mut lseq = LSEQ::new(RecordingRng::new(seed));
let mut state: Vec<String> = init.iter().map(|s| s.to_string()).collect();
let operations = ops(&mut lseq, &mut state);
Scenario {
name: name.to_string(),
description: description.to_string(),
seed,
init: init.iter().map(|s| s.to_string()).collect(),
rng: lseq.take_rng().take_recorded(),
operations,
}
}
fn main() {
let scenarios = vec![
// 01 - Sequential append
generate_scenario(
"sequential-append",
"20 inserts, each after the last",
42,
vec![],
|lseq, state| {
let mut ops = Vec::new();
for _ in 0..20 {
let before_key = state.last().map(|s| s.parse::<SortKey>().unwrap());
let result = lseq.alloc(before_key.as_ref(), None);
let result_str = result.to_string();
ops.push(Insert::After {
index: -1, // after last element (or beginning if empty)
outcome: result_str.clone(),
});
state.push(result_str);
}
ops
},
),
// 02 - Sequential prepend
generate_scenario(
"sequential-prepend",
"20 inserts, each before the first",
43,
vec![],
|lseq, state| {
let mut ops = Vec::new();
for _ in 0..20 {
let after_key = state.first().map(|s| s.parse::<SortKey>().unwrap());
let result = lseq.alloc(None, after_key.as_ref());
let result_str = result.to_string();
ops.push(Insert::Before {
index: 0, // before first element (prepend)
outcome: result_str.clone(),
});
state.insert(0, result_str);
}
ops
},
),
// 03 - Random insert 100 items
generate_scenario(
"random-insert-100-items",
"100 inserts at random positions",
44,
vec![],
|lseq, state| {
let mut ops = Vec::new();
let mut position_rng = StdRng::seed_from_u64(100); // Separate RNG for positions
for _ in 0..100 {
// Pick a random insertion point (0..=state.len())
let idx = if state.is_empty() {
0
} else {
Rng::gen_range(&mut position_rng, 0..=state.len())
};
// Derive beforeKey and afterKey from the insertion index
let before_key = if idx > 0 {
Some(state[idx - 1].parse::<SortKey>().unwrap())
} else {
None
};
let after_key = if idx < state.len() {
Some(state[idx].parse::<SortKey>().unwrap())
} else {
None
};
let result = lseq.alloc(before_key.as_ref(), after_key.as_ref());
let result_str = result.to_string();
ops.push(Insert::Before {
index: idx as i32,
outcome: result_str.clone(),
});
state.insert(idx, result_str);
}
ops
},
),
// 04 - Dense packing
generate_scenario(
"dense-packing",
"20 inserts between adjacent keys '0' and '1'",
45,
vec!["0", "1"],
|lseq, state| {
let mut ops = Vec::new();
// Always insert between first and second element
for _ in 0..20 {
let before_key = state[0].parse::<SortKey>().unwrap();
let after_key = state[1].parse::<SortKey>().unwrap();
let result = lseq.alloc(Some(&before_key), Some(&after_key));
let result_str = result.to_string();
ops.push(Insert::Before {
index: 1, // between state[0] and state[1]
outcome: result_str.clone(),
});
state.insert(1, result_str);
}
ops
},
),
// 05 - Deep nesting
generate_scenario(
"deep-nesting",
"Force 5+ level deep keys by inserting between adjacent keys",
46,
vec!["M", "N"],
|lseq, state| {
let mut ops = Vec::new();
// Keep inserting between first two to force depth
for _ in 0..30 {
let before_key = state[0].parse::<SortKey>().unwrap();
let after_key = state[1].parse::<SortKey>().unwrap();
let result = lseq.alloc(Some(&before_key), Some(&after_key));
let result_str = result.to_string();
ops.push(Insert::Before {
index: 1, // between state[0] and state[1]
outcome: result_str.clone(),
});
state.insert(1, result_str);
}
ops
},
),
// 06 - Edge min interval
generate_scenario(
"edge-min-interval",
"Insert between adjacent keys (A, B) which have interval=1",
47,
vec!["A", "B"],
|lseq, state| {
let mut ops = Vec::new();
for _ in 0..10 {
let before_key = state[0].parse::<SortKey>().unwrap();
let after_key = state[1].parse::<SortKey>().unwrap();
let result = lseq.alloc(Some(&before_key), Some(&after_key));
let result_str = result.to_string();
ops.push(Insert::Before {
index: 1, // between state[0] and state[1]
outcome: result_str.clone(),
});
state.insert(1, result_str);
}
ops
},
),
];
// Write each scenario to a file
let output_dir = "../genfiles";
fs::create_dir_all(output_dir).expect("Failed to create genfiles directory");
for (i, scenario) in scenarios.iter().enumerate() {
let filename = format!("{}/{:02}-{}.scenario.json", output_dir, i + 1, scenario.name);
let json = serde_json::to_string_pretty(scenario).expect("Failed to serialize scenario");
fs::write(&filename, json).expect("Failed to write scenario file");
println!("Generated: {}", filename);
}
}

View file

@ -0,0 +1,46 @@
use serde::ser::{SerializeMap, Serializer};
use serde::Serialize;
#[derive(Serialize)]
pub struct Scenario {
pub name: String,
pub description: String,
pub seed: u64,
pub init: Vec<String>,
pub rng: Vec<f64>,
pub operations: Vec<Insert>,
}
/// Specifies an insertion point in the sequence.
///
/// - `Insert::Before { index: 3, .. }` → insert before state[3]
/// - `Insert::Before { index: 0, .. }` → insert at start
/// - `Insert::After { index: 2, .. }` → insert after state[2]
/// - `Insert::After { index: -1, .. }` → insert at end (Python-style negative index)
///
/// Serializes to `{ "before": n, "expected": "..." }` or `{ "after": n, "expected": "..." }`.
#[derive(Debug)]
pub enum Insert {
Before { index: i32, outcome: String },
After { index: i32, outcome: String },
}
impl Serialize for Insert {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut map = serializer.serialize_map(Some(2))?;
match self {
Insert::Before { index, outcome } => {
map.serialize_entry("before", index)?;
map.serialize_entry("expected", outcome)?;
}
Insert::After { index, outcome } => {
map.serialize_entry("after", index)?;
map.serialize_entry("expected", outcome)?;
}
}
map.end()
}
}