254 lines
8.9 KiB
Rust
254 lines
8.9 KiB
Rust
mod model;
|
|
|
|
use model::{Insert, Scenario};
|
|
use peoplesgrocers_lseq::{LseqRng, SortKey, LSEQ};
|
|
use rand::{rngs::StdRng, Rng, SeedableRng};
|
|
use std::fs;
|
|
|
|
/// A recording RNG wrapper that records random values as floats
|
|
/// compatible with TypeScript's Math.random() consumption pattern.
|
|
struct RecordingRng {
|
|
inner: StdRng,
|
|
recorded: Vec<f64>,
|
|
}
|
|
|
|
impl RecordingRng {
|
|
fn new(seed: u64) -> Self {
|
|
RecordingRng {
|
|
inner: StdRng::seed_from_u64(seed),
|
|
recorded: Vec::new(),
|
|
}
|
|
}
|
|
|
|
fn take_recorded(&mut self) -> Vec<f64> {
|
|
std::mem::take(&mut self.recorded)
|
|
}
|
|
}
|
|
|
|
impl LseqRng for RecordingRng {
|
|
fn random_bool(&mut self, p: f64) -> bool {
|
|
// Use fully qualified syntax to call rand::Rng method
|
|
let result = Rng::random_bool(&mut self.inner, p);
|
|
// Record a float that TypeScript's `f < p` would produce the same result
|
|
// If result is true, we need f < p, so use p/2
|
|
// If result is false, we need f >= p, so use (1 + p) / 2
|
|
let recorded_value = if result { p / 2.0 } else { (1.0 + p) / 2.0 };
|
|
self.recorded.push(recorded_value);
|
|
result
|
|
}
|
|
|
|
fn random_range(&mut self, range: std::ops::Range<u64>) -> u64 {
|
|
// Use fully qualified syntax to call rand::Rng method
|
|
let result: u64 = Rng::random_range(&mut self.inner, range.clone());
|
|
// Record a float that TypeScript's `Math.floor(f * range_size)` would produce the same result
|
|
// For result r in [0, range_size), we need floor(f * range_size) = r
|
|
// So f should be in [r/range_size, (r+1)/range_size)
|
|
// Use midpoint: (r + 0.5) / range_size
|
|
let range_size = (range.end - range.start) as f64;
|
|
let relative_result = (result - range.start) as f64;
|
|
let recorded_value = (relative_result + 0.5) / range_size;
|
|
self.recorded.push(recorded_value);
|
|
result
|
|
}
|
|
}
|
|
|
|
|
|
fn generate_scenario(
|
|
name: &str,
|
|
description: &str,
|
|
seed: u64,
|
|
init: Vec<&str>,
|
|
ops: impl FnOnce(&mut LSEQ<RecordingRng>, &mut Vec<String>) -> Vec<Insert>,
|
|
) -> Scenario {
|
|
let mut lseq = LSEQ::new(RecordingRng::new(seed));
|
|
let mut state: Vec<String> = init.iter().map(|s| s.to_string()).collect();
|
|
let operations = ops(&mut lseq, &mut state);
|
|
|
|
Scenario {
|
|
name: name.to_string(),
|
|
description: description.to_string(),
|
|
seed,
|
|
init: init.iter().map(|s| s.to_string()).collect(),
|
|
rng: lseq.take_rng().take_recorded(),
|
|
operations,
|
|
}
|
|
}
|
|
|
|
fn main() {
|
|
let scenarios = vec![
|
|
// 01 - Sequential append
|
|
generate_scenario(
|
|
"sequential-append",
|
|
"20 inserts, each after the last",
|
|
42,
|
|
vec![],
|
|
|lseq, state| {
|
|
let mut ops = Vec::new();
|
|
for _ in 0..20 {
|
|
let before_key = state.last().map(|s| s.parse::<SortKey>().unwrap());
|
|
let result = lseq.alloc(before_key.as_ref(), None);
|
|
let result_str = result.to_string();
|
|
|
|
ops.push(Insert::After {
|
|
index: -1, // after last element (or beginning if empty)
|
|
outcome: result_str.clone(),
|
|
});
|
|
|
|
state.push(result_str);
|
|
}
|
|
ops
|
|
},
|
|
),
|
|
// 02 - Sequential prepend
|
|
generate_scenario(
|
|
"sequential-prepend",
|
|
"20 inserts, each before the first",
|
|
43,
|
|
vec![],
|
|
|lseq, state| {
|
|
let mut ops = Vec::new();
|
|
for _ in 0..20 {
|
|
let after_key = state.first().map(|s| s.parse::<SortKey>().unwrap());
|
|
let result = lseq.alloc(None, after_key.as_ref());
|
|
let result_str = result.to_string();
|
|
|
|
ops.push(Insert::Before {
|
|
index: 0, // before first element (prepend)
|
|
outcome: result_str.clone(),
|
|
});
|
|
|
|
state.insert(0, result_str);
|
|
}
|
|
ops
|
|
},
|
|
),
|
|
// 03 - Random insert 100 items
|
|
generate_scenario(
|
|
"random-insert-100-items",
|
|
"100 inserts at random positions",
|
|
44,
|
|
vec![],
|
|
|lseq, state| {
|
|
let mut ops = Vec::new();
|
|
let mut position_rng = StdRng::seed_from_u64(100); // Separate RNG for positions
|
|
|
|
for _ in 0..100 {
|
|
// Pick a random insertion point (0..=state.len())
|
|
let idx = if state.is_empty() {
|
|
0
|
|
} else {
|
|
Rng::random_range(&mut position_rng, 0..=state.len())
|
|
};
|
|
|
|
// Derive beforeKey and afterKey from the insertion index
|
|
let before_key = if idx > 0 {
|
|
Some(state[idx - 1].parse::<SortKey>().unwrap())
|
|
} else {
|
|
None
|
|
};
|
|
let after_key = if idx < state.len() {
|
|
Some(state[idx].parse::<SortKey>().unwrap())
|
|
} else {
|
|
None
|
|
};
|
|
|
|
let result = lseq.alloc(before_key.as_ref(), after_key.as_ref());
|
|
let result_str = result.to_string();
|
|
|
|
ops.push(Insert::Before {
|
|
index: idx as i32,
|
|
outcome: result_str.clone(),
|
|
});
|
|
|
|
state.insert(idx, result_str);
|
|
}
|
|
ops
|
|
},
|
|
),
|
|
// 04 - Dense packing
|
|
generate_scenario(
|
|
"dense-packing",
|
|
"20 inserts between adjacent keys '0' and '1'",
|
|
45,
|
|
vec!["0", "1"],
|
|
|lseq, state| {
|
|
let mut ops = Vec::new();
|
|
// Always insert between first and second element
|
|
for _ in 0..20 {
|
|
let before_key = state[0].parse::<SortKey>().unwrap();
|
|
let after_key = state[1].parse::<SortKey>().unwrap();
|
|
let result = lseq.alloc(Some(&before_key), Some(&after_key));
|
|
let result_str = result.to_string();
|
|
|
|
ops.push(Insert::Before {
|
|
index: 1, // between state[0] and state[1]
|
|
outcome: result_str.clone(),
|
|
});
|
|
|
|
state.insert(1, result_str);
|
|
}
|
|
ops
|
|
},
|
|
),
|
|
// 05 - Deep nesting
|
|
generate_scenario(
|
|
"deep-nesting",
|
|
"Force 5+ level deep keys by inserting between adjacent keys",
|
|
46,
|
|
vec!["M", "N"],
|
|
|lseq, state| {
|
|
let mut ops = Vec::new();
|
|
// Keep inserting between first two to force depth
|
|
for _ in 0..30 {
|
|
let before_key = state[0].parse::<SortKey>().unwrap();
|
|
let after_key = state[1].parse::<SortKey>().unwrap();
|
|
let result = lseq.alloc(Some(&before_key), Some(&after_key));
|
|
let result_str = result.to_string();
|
|
|
|
ops.push(Insert::Before {
|
|
index: 1, // between state[0] and state[1]
|
|
outcome: result_str.clone(),
|
|
});
|
|
|
|
state.insert(1, result_str);
|
|
}
|
|
ops
|
|
},
|
|
),
|
|
// 06 - Edge min interval
|
|
generate_scenario(
|
|
"edge-min-interval",
|
|
"Insert between adjacent keys (A, B) which have interval=1",
|
|
47,
|
|
vec!["A", "B"],
|
|
|lseq, state| {
|
|
let mut ops = Vec::new();
|
|
for _ in 0..10 {
|
|
let before_key = state[0].parse::<SortKey>().unwrap();
|
|
let after_key = state[1].parse::<SortKey>().unwrap();
|
|
let result = lseq.alloc(Some(&before_key), Some(&after_key));
|
|
let result_str = result.to_string();
|
|
|
|
ops.push(Insert::Before {
|
|
index: 1, // between state[0] and state[1]
|
|
outcome: result_str.clone(),
|
|
});
|
|
|
|
state.insert(1, result_str);
|
|
}
|
|
ops
|
|
},
|
|
),
|
|
];
|
|
|
|
// Write each scenario to a file
|
|
let output_dir = "../genfiles";
|
|
fs::create_dir_all(output_dir).expect("Failed to create genfiles directory");
|
|
|
|
for (i, scenario) in scenarios.iter().enumerate() {
|
|
let filename = format!("{}/{:02}-{}.scenario.json", output_dir, i + 1, scenario.name);
|
|
let json = serde_json::to_string_pretty(scenario).expect("Failed to serialize scenario");
|
|
fs::write(&filename, json).expect("Failed to write scenario file");
|
|
println!("Generated: {}", filename);
|
|
}
|
|
}
|