diff options
Diffstat (limited to 'src')
130 files changed, 7711 insertions, 0 deletions
diff --git a/src/.gitignore b/src/.gitignore new file mode 100644 index 0000000..aeb024c --- /dev/null +++ b/src/.gitignore @@ -0,0 +1,21 @@ +# Generated by Cargo +# will have compiled files and executables +/target/ + +# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries +# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html +Cargo.lock + +# These are backup files generated by rustfmt +**/*.rs.bk + +/proptest-regressions/* + +# Racket +compiled/ + +# Rust parser code gen testing dir. +/gen_code/ +/racket_specs/gen_lib_spec/ +/racket_specs/gen_prop_spec/ +/racket_specs/gen_match/
\ No newline at end of file diff --git a/src/Cargo.toml b/src/Cargo.toml new file mode 100644 index 0000000..d3658ac --- /dev/null +++ b/src/Cargo.toml @@ -0,0 +1,17 @@ +[workspace] +resolver = "2" +members = [ + "crates/primrose", + "crates/library", + "crates/cli", + "crates/benchmarker" +] + +[workspace.dependencies] +log = { version = "0.4.20" } +env_logger = "0.10.0" +thiserror = "1.0.49" +anyhow = "1.0.75" +rand = "0.8.5" +serde = "1.0.190" +serde_json = "1.0.108"
\ No newline at end of file diff --git a/src/README.md b/src/README.md new file mode 100644 index 0000000..604468b --- /dev/null +++ b/src/README.md @@ -0,0 +1,143 @@ +# Getting Start + +This is the research artifact for the paper *"Primrose: Selecting Container Data Types by their Properties"*. + +For easy of evaluation, the artifact is provided as a VirtualBox virtual machine with all dependencies already pre-installed. +Below, we provide a guide on how to check that the claims made in the paper are supported by the artifact. +We encourage the evaluators to experiment themselves with the provided examples. + + +## Logging into the VM and locating the artifact +- user name: `user` +- password: `helloworld` +- artifact location: `/home/user/Documents/PrimroseAE/` + +# Overview of the artifact +This artifact supports the claims made in the paper by containing: +- The property specifications introduced in section 4 of the paper. +- An implementation of a parser and type checker of the property specifications accompanied by a type-inference algorithm. +- An implementation of the container library studied in section 5 of the paper. +- Library specifications for each container discussed in section 5. +- An implementation of the selection process described in section 6 of the paper. +- An implementation of the code generation described in section 6.4 of the paper. +- Property based testing as described in the section 7.1 of the paper. +- A script for measuring the solver times reported in section 7.2 of the paper. We do expect that runtimes measured in the virtual machine might differ from the numbers reported in the paper that have been measured outside the virtual machine. + +# Step-by-Step Instructions +## Walkthrough code related to features introduced in the paper +- The *syntactic properties* introduced in section 4.1 can be found in `./src/traits/mod.rs` +- The *semantic properties* introduced in section 4.2, can be found in: + - the `unique` property specification can be found in `./spec_code/example_unique.rs` + - the `ascending` property specification as well as the composition of `ascending` and `unique` can be found in `./spec_code/example_comp.rs`. +- The property specification of the stack example introduced in section 4.3 can be found in `./spec_code/example_stack.rs` +- The combinators used in property specifications are provided in `./racket_specs/combinators.rkt` +- All library specifications introduced in section 5 can be found in `./src/library/` +- All property based tests introduced in section 7 can also be found in `./src/library/` + +## Selecting of valid container implementations and Rust code generation via the `primrose` tool +- To run the *unique container example* from the paper: + - Make sure you are in the directory `PrimroseAE` + - Run command: `cargo run example_unique.rs unique 3` + - Generated code can be found in the directory `./gen_code/unique/` + - For this example we expect that four files each with a different container implementation are generated: + - `./gen_code/unique/unique0.rs` + - `./gen_code/unique/unique1.rs` + - `./gen_code/unique/unique2.rs` + - `./gen_code/unique/unique3.rs` + - To compile the generated Rust code, go to `Cargo.toml`, add at the end for the code you want to compile: + ``` + [[bin]] + name = "unique0" + path = "gen_code/unique/unique0.rs" + + [[bin]] + name = "unique1" + path = "gen_code/unique/unique1.rs" + + [[bin]] + name = "unique2" + path = "gen_code/unique/unique2.rs" + + [[bin]] + name = "unique2" + path = "gen_code/unique/unique3.rs" + ``` + - Then you can compile and execute the generated file with: + - `cargo run --bin unique0` + - `cargo run --bin unique1` + - `cargo run --bin unique2` + - `cargo run --bin unique3` + +- To run the *unique and ascending (strictly ascending) container example* from the paper: + - Make sure you are in the directory `PrimroseAE` + - Run command: `cargo run example_comp.rs comp 3` + - Generated code can be found under the directory `./gen_code/comp/` + - To compile the generated Rust code, add it to `Cargo.toml` as above and then execute it via `cargo run` +- To run the *stack example* from the paper: + - Make sure you are in the directory `PrimroseAE` + - Run command: `cargo run example_stack.rs stack 3` + - Generated code can be found under the directory `./gen_code/stack/` + - To compile the generated Rust code, add it to `Cargo.toml` as above and then execute it via `cargo run` + +## Running property based testing from section 7.1 +- Make sure you are in the directory `PrimroseAE` +- Run command: `cargo test` +- if you want to measure how long it takes to execute all tests: `./timetests.sh` + +## Producing solver efficiency benchmarks form section 7.2 +- Make sure you are in the directory `PrimroseAE` +- Run command: `./timeall.sh` +- **Please note:** we do not expect that the times measured inside the virtual machine and on different hardware will be exactly the same as the times presented in the paper. + +# Technical Overview of the Artifact + +## Overview of pre-installed dependencies +- Rust 1.67.0-nightly +- Racket 8.1 or later +- Rosette + - We used the Z3 backend in this project +- These dependencies are all pre-installed, to check they are installed correctly: + - Type command: `rustc --version` in terminal, you should get: + ``` + rustc 1.67.0-nightly (01fbc6ae7 2022-12-07) + ``` + - Type command: `racket --version` in terminal, you should get: + ``` + Welcome to Racket v8.6 [cs]. + +## Execution of the `primrose` tool with arbitrary property specification +- Make sure you are in the `PrimroseAE` directory +- Make sure the Rust program with embedded property specifications (`input.rs`) is provided in the directory `./spec_code/` +- Run the tool with command: + ``` + cargo run [input.rs] [output_dir_name] [model_size] + ``` +- For most properties, we recommend a model size of `3` +- The generated file will appear in the directory `[output_dir_name]` and can be compiled with `cargo` after an entry for it has been added at the end of `Cargo.toml` file: + ``` + [[bin]] + name = "name_you_like" + path = "path/to/file.rs" + ``` +- To execute the generated Rust code run: + ``` + cargo run --bin name_you_like + ``` + +## File structure of the `PrimroseAE` directory +- `./benches/`: containing the code producing the runtime performance benchmarks in section 2 +- `./gen_code/`: containing generated code with selected container implementations +- `./mem_profiling/`: containing scripts producing the memory consumption benchmarks in section 2 +- `./racket_specs/`: containing scripts for setting up and executing the selection process and generated code during the selection process +- `./scripts/`: containing the scripts for measuring the solver's selection time introduced in section 7.2 +- `./spec_code/`: containing source code with property specifications introduced in section 4. +- `./src/library/`: containing container implementations used in this paper, library specifications introduced in section 5 and property based tests introduced in section 7.1 +- `./src/proptest/`: containing code for setting up property based tests. +- `./src/tools/`: containing the code for generating dataset memory profiling. +- `./src/traits/`: container syntactic properties introduced in section 4. +- `./src/main.rs`: the entry point for executing the tool. +- All other files in the `./src/` directory are the detailed implementation of Primrose, including parsing, type checking and analyzing property specifications, extracting and processing library specifications, executing the selection and generating code. +- `./runall.sh` is the script for executing all examples +- `./timeall.sh` is the script running every script located in `./scripts/` measuring the solver's selection time introduced in section 7.2 +- `./timetests.sh` is the script measuring the time for running all property based tests reported in section 7.1 +- `./Cargo.toml`, `./Cargo.lock` and `rust-toolchain.toml` are Rust package and compiler configuration files. diff --git a/src/crates/benchmarker/Cargo.toml b/src/crates/benchmarker/Cargo.toml new file mode 100644 index 0000000..3f542fc --- /dev/null +++ b/src/crates/benchmarker/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "candelabra-benchmarker" +version = "0.1.0" +edition = "2021" + +[dependencies] +log = { workspace = true } + +serde = { workspace = true, features = ["derive"] } +serde_json = { workspace = true } +rand = { workspace = true } + +primrose-library = { path = "../library" } + +[dev-dependencies] +env_logger = { workspace = true }
\ No newline at end of file diff --git a/src/crates/benchmarker/examples/run_vec.rs b/src/crates/benchmarker/examples/run_vec.rs new file mode 100644 index 0000000..0c93bbb --- /dev/null +++ b/src/crates/benchmarker/examples/run_vec.rs @@ -0,0 +1,15 @@ +use candelabra_benchmarker::{to_writer, Benchmarker}; + +fn main() { + env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("debug")).init(); + + to_writer( + std::io::stdout(), + &Benchmarker::<Vec<usize>, usize>::with_ns(&[1, 100, 1000]) + .container() + .indexable() + .stack() + .finish(), + ) + .unwrap(); +} diff --git a/src/crates/benchmarker/src/bench.rs b/src/crates/benchmarker/src/bench.rs new file mode 100644 index 0000000..9c4cf74 --- /dev/null +++ b/src/crates/benchmarker/src/bench.rs @@ -0,0 +1,90 @@ +use std::{ + cmp, + hint::black_box, + time::{Duration, Instant}, +}; + +use crate::BenchmarkResult; + +/// Benchmark an operation for approx 5 seconds, returning the results. +/// +/// `setup` is used to create the thing `op` acts on, and `undo` is called between each run to undo `op`. +/// If `undo` is invalid, this will return garbage results. +/// +/// Warm-up for the setup is done beforehand. +pub fn benchmark_op<T>( + mut setup: impl FnMut() -> T, + mut op: impl FnMut(&mut T), + mut undo: impl FnMut(&mut T), +) -> BenchmarkResult { + // let loop_end = Instant::now() + Duration::from_secs(5); + let loop_end = Instant::now() + Duration::from_millis(100); + + let mut times = 0; + let mut min = Duration::from_secs(u64::MAX); + let mut max = Duration::from_secs(0); + let mut sum = Duration::from_secs(0); + + let mut target = setup(); + while Instant::now() + max < loop_end { + #[allow(clippy::unit_arg)] // pretty sure this is necessary to prevent optimisations + let duration = time_singular(|| black_box(op(&mut target))); + undo(&mut target); + + min = cmp::min(min, duration); + max = cmp::max(max, duration); + sum += duration; + times += 1; + } + + BenchmarkResult { + times, + min, + max, + avg: sum / times as u32, + } +} + +fn time_singular(f: impl FnOnce()) -> Duration { + let start = Instant::now(); + f(); + let end = Instant::now(); + end - start +} + +#[cfg(test)] +mod tests { + use super::benchmark_op; + use std::time::Duration; + + #[test] + fn benchmark_op_resets_properly() { + benchmark_op( + || false, + |b| { + assert!(!(*b)); + *b = true; + }, + |b| { + *b = false; + }, + ); + } + + #[test] + fn benchmark_op_times_properly() { + let results = benchmark_op( + || (), + |_| std::thread::sleep(Duration::from_millis(5)), + |_| {}, + ); + + let avg_millis = results.avg.as_nanos() as f32 / (10.0_f32).powi(6); + dbg!(avg_millis); + + assert!( + (avg_millis - 5.0).abs() < 0.1, + "sleeping for 5ms takes roughly 5ms" + ) + } +} diff --git a/src/crates/benchmarker/src/container.rs b/src/crates/benchmarker/src/container.rs new file mode 100644 index 0000000..39e2288 --- /dev/null +++ b/src/crates/benchmarker/src/container.rs @@ -0,0 +1,192 @@ +use std::{any::type_name, collections::HashMap}; + +use log::debug; +use primrose_library::traits::Container; +use rand::{distributions::Standard, prelude::Distribution, random, thread_rng, Rng}; + +use crate::{bench::benchmark_op, Observation, Results}; + +/// Benchmark [`primrose_library::traits::Container`] operations +pub trait ContainerExt<E> { + /// Benchmark at a single `n`. + fn benchmark_container_at(n: usize) -> Results; + + /// Benchmark `len` at a single `n`. + fn benchmark_container_len(n: usize) -> Observation; + + /// Benchmark `contains` at a single `n`. + fn benchmark_container_contains(n: usize) -> Observation; + + /// Benchmark `insert` at a single `n`. + fn benchmark_container_insert(n: usize) -> Observation; + + /// Benchmark `clear` at a single `n`. + fn benchmark_container_clear(n: usize) -> Observation; + + /// Benchmark `remove` at a single `n`. + fn benchmark_container_remove(n: usize) -> Observation; +} + +impl<T, E> ContainerExt<E> for T +where + T: Container<E> + Default, + E: Clone, + Standard: Distribution<E>, +{ + fn benchmark_container_at(n: usize) -> Results { + let mut by_op = HashMap::new(); + + debug!("Benchmarking {} at n = {}", type_name::<T>(), n); + + debug!("...len"); + by_op.insert("len".to_string(), vec![Self::benchmark_container_len(n)]); + debug!("...contains"); + by_op.insert( + "contains".to_string(), + vec![Self::benchmark_container_contains(n)], + ); + debug!("...insert"); + by_op.insert( + "insert".to_string(), + vec![Self::benchmark_container_insert(n)], + ); + debug!("...clear"); + by_op.insert( + "clear".to_string(), + vec![Self::benchmark_container_clear(n)], + ); + debug!("...remove"); + by_op.insert( + "remove".to_string(), + vec![Self::benchmark_container_remove(n)], + ); + debug!("--- done!"); + + Results { by_op } + } + + fn benchmark_container_contains(n: usize) -> Observation { + ( + n, + benchmark_op( + || { + // TODO: maybe we should actually just test the worst case? (at the end) + // we also don't actually test misses yet. + + let mut rng = thread_rng(); + let mut c = T::default(); + + // decide where the element that we will search for will be + let pivot = rng.gen_range(0..n); + + // insert the element at pivot, and keep track of what it is + for _ in 0..pivot { + c.insert(random()); + } + let chosen = rng.gen(); + c.insert(chosen.clone()); + for _ in pivot..n { + c.insert(random()); + } + + (c, chosen) + }, + |(c, search)| { + c.contains(search); + }, + |_| (), + ), + ) + } + + fn benchmark_container_len(n: usize) -> Observation { + ( + n, + benchmark_op( + || { + let mut c = T::default(); + for _ in 0..n { + c.insert(random()); + } + c + }, + |c| { + c.len(); + }, + |_| (), + ), + ) + } + + fn benchmark_container_insert(n: usize) -> Observation { + let setup_closure = || { + let mut c = T::default(); + for _ in 0..n { + c.insert(random()); + } + c + }; + ( + n, + benchmark_op( + setup_closure, + |c| { + // TODO: rng generation could throw off benchmarks + c.insert(random()); + }, + |c| *c = setup_closure(), + ), + ) + } + + fn benchmark_container_clear(n: usize) -> Observation { + let setup_closure = || { + let mut c = T::default(); + for _ in 0..n { + c.insert(random()); + } + c + }; + ( + n, + benchmark_op( + setup_closure, + |c| { + c.clear(); + }, + |c| *c = setup_closure(), + ), + ) + } + + fn benchmark_container_remove(n: usize) -> Observation { + ( + n, + benchmark_op( + || { + let mut rng = thread_rng(); + let mut c = T::default(); + + // decide where the element that we will remove will be + let pivot = rng.gen_range(0..n); + + // insert the element at pivot, and keep track of what it is + for _ in 0..pivot { + c.insert(random()); + } + let chosen = rng.gen(); + c.insert(chosen.clone()); + for _ in pivot..n { + c.insert(random()); + } + + (c, chosen) + }, + |(c, chosen)| { + c.remove(chosen.clone()); + }, + |(c, chosen)| c.insert(chosen.clone()), + ), + ) + } +} diff --git a/src/crates/benchmarker/src/indexable.rs b/src/crates/benchmarker/src/indexable.rs new file mode 100644 index 0000000..a30a1f4 --- /dev/null +++ b/src/crates/benchmarker/src/indexable.rs @@ -0,0 +1,104 @@ +use std::{any::type_name, collections::HashMap}; + +use log::debug; +use primrose_library::traits::{Container, Indexable}; +use rand::{distributions::Standard, prelude::Distribution, random}; + +use crate::{benchmark_op, Observation, Results}; + +/// Benchmark [`primrose_library::traits::Indexable`] operations +pub trait IndexableExt<E> { + /// Benchmark at a single `n`. + fn benchmark_indexable_at(n: usize) -> Results; + + /// Benchmark `first` at a single `n`. + fn benchmark_indexable_first(n: usize) -> Observation; + + /// Benchmark `last` at a single `n`. + fn benchmark_indexable_last(n: usize) -> Observation; + + /// Benchmark `nth` at a single `n`. + fn benchmark_indexable_nth(n: usize) -> Observation; +} + +impl<T, E> IndexableExt<E> for T +where + T: Container<E> + Indexable<E> + Default, + Standard: Distribution<E>, +{ + fn benchmark_indexable_at(n: usize) -> Results { + let mut by_op = HashMap::new(); + + debug!("Benchmarking {} at n = {}", type_name::<T>(), n); + + debug!("...first"); + by_op.insert( + "first".to_string(), + vec![Self::benchmark_indexable_first(n)], + ); + debug!("...last"); + by_op.insert("last".to_string(), vec![Self::benchmark_indexable_last(n)]); + debug!("...nth"); + by_op.insert("nth".to_string(), vec![Self::benchmark_indexable_nth(n)]); + debug!("--- done!"); + + Results { by_op } + } + + fn benchmark_indexable_first(n: usize) -> Observation { + ( + n, + benchmark_op( + || { + let mut c = T::default(); + for _ in 0..n { + c.insert(random()); + } + c + }, + |c| { + c.first(); + }, + |_| (), + ), + ) + } + + fn benchmark_indexable_last(n: usize) -> Observation { + ( + n, + benchmark_op( + || { + let mut c = T::default(); + for _ in 0..n { + c.insert(random()); + } + c + }, + |c| { + c.last(); + }, + |_| (), + ), + ) + } + + fn benchmark_indexable_nth(n: usize) -> Observation { + ( + n, + benchmark_op( + || { + let mut c = T::default(); + for _ in 0..n { + c.insert(random()); + } + (c, random::<usize>()) + }, + |(c, fetch)| { + c.nth(*fetch); + }, + |_| (), + ), + ) + } +} diff --git a/src/crates/benchmarker/src/lib.rs b/src/crates/benchmarker/src/lib.rs new file mode 100644 index 0000000..e6fc69b --- /dev/null +++ b/src/crates/benchmarker/src/lib.rs @@ -0,0 +1,81 @@ +use std::{collections::HashMap, marker::PhantomData}; + +pub use serde_json::to_writer; + +mod bench; +pub use bench::benchmark_op; + +mod container; +pub use container::ContainerExt; + +mod indexable; +pub use indexable::IndexableExt; + +mod stack; +pub use stack::StackExt; + +mod results; +pub use results::*; + +/// Runs benchmarks at varying `n`s with a builder-style interface. +/// +/// This mostly just makes our code generation easier. +pub struct Benchmarker<T, E>(&'static [usize], Results, PhantomData<(T, E)>); +impl<T, E> Benchmarker<T, E> { + /// Create a benchmarker that will repeat all benchmarks with each of the given n values. + pub fn with_ns(ns: &'static [usize]) -> Self { + Self( + ns, + Results { + by_op: HashMap::new(), + }, + PhantomData, + ) + } + + /// Finish benchmarking and get the results + pub fn finish(self) -> Results { + self.1 + } +} + +impl<T, E> Benchmarker<T, E> +where + T: ContainerExt<E>, +{ + /// Run benchmarks for [`primrose_library::traits::Container`] operations. + pub fn container(mut self) -> Self { + for n in self.0 { + self.1.merge(T::benchmark_container_at(*n)); + } + + self + } +} +impl<T, E> Benchmarker<T, E> +where + T: IndexableExt<E>, +{ + /// Run benchmarks for [`primrose_library::traits::Indexable`] operations. + pub fn indexable(mut self) -> Self { + for n in self.0 { + self.1.merge(T::benchmark_indexable_at(*n)); + } + + self + } +} + +impl<T, E> Benchmarker<T, E> +where + T: StackExt<E>, +{ + /// Run benchmarks for [`primrose_library::traits::Stack`] operations. + pub fn stack(mut self) -> Self { + for n in self.0 { + self.1.merge(T::benchmark_stack_at(*n)); + } + + self + } +} diff --git a/src/crates/benchmarker/src/results.rs b/src/crates/benchmarker/src/results.rs new file mode 100644 index 0000000..3c0783f --- /dev/null +++ b/src/crates/benchmarker/src/results.rs @@ -0,0 +1,47 @@ +use std::{collections::HashMap, time::Duration}; + +use serde::{Deserialize, Serialize}; + +/// Results for a whole suite of benchmarks +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct Results { + /// Results for each collection operation + pub by_op: HashMap<OpName, Vec<Observation>>, +} + +/// Name of an operation +pub type OpName = String; + +/// The first key in the tuple is the `n` of the container before the benchmark was taken, and the second the results of the benchmark. +pub type Observation = (usize, BenchmarkResult); + +/// Results for a single benchmark +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct BenchmarkResult { + /// Number of times the benchmark was run + pub times: usize, + + /// The minimum time taken + pub min: Duration, + + /// The maximum time taken + pub max: Duration, + + /// The average (mean) time taken + pub avg: Duration, +} + +impl Results { + /// Merge results from `b` into these results. + /// If `b` contains benchmarks for operations we have a result for, the observations are merged. + pub fn merge(&mut self, b: Self) -> &mut Self { + for (name, mut res_b) in b.by_op { + self.by_op + .entry(name) + .and_modify(|res| res.append(&mut res_b)) + .or_insert(res_b); + } + + self + } +} diff --git a/src/crates/benchmarker/src/stack.rs b/src/crates/benchmarker/src/stack.rs new file mode 100644 index 0000000..7680548 --- /dev/null +++ b/src/crates/benchmarker/src/stack.rs @@ -0,0 +1,79 @@ +use std::{any::type_name, collections::HashMap}; + +use log::debug; +use primrose_library::traits::Stack; +use rand::{distributions::Standard, prelude::Distribution, random}; + +use crate::{benchmark_op, Observation, Results}; + +/// Benchmark [`primrose_library::traits::Stack`] operations +pub trait StackExt<E> { + /// Benchmark at a single `n`. + fn benchmark_stack_at(n: usize) -> Results; + + /// Benchmark `push` at a single `n`. + fn benchmark_stack_push(n: usize) -> Observation; + + /// Benchmark `pop` at a single `n`. + fn benchmark_stack_pop(n: usize) -> Observation; +} + +impl<T, E> StackExt<E> for T +where + T: Stack<E> + Default, + Standard: Distribution<E>, +{ + fn benchmark_stack_at(n: usize) -> Results { + let mut by_op = HashMap::new(); + + debug!("Benchmarking {} at n = {}", type_name::<T>(), n); + + debug!("...push"); + by_op.insert("push".to_string(), vec![Self::benchmark_stack_push(n)]); + debug!("...pop"); + by_op.insert("pop".to_string(), vec![Self::benchmark_stack_pop(n)]); + debug!("--- done!"); + + Results { by_op } + } + + fn benchmark_stack_push(n: usize) -> Observation { + ( + n, + benchmark_op( + || { + let mut c = T::default(); + for _ in 0..n { + c.push(random()); + } + c + }, + |s| s.push(random()), + |s| { + s.pop(); + }, + ), + ) + } + + fn benchmark_stack_pop(n: usize) -> Observation { + ( + n, + benchmark_op( + || { + let mut c = T::default(); + for _ in 0..n { + c.push(random()); + } + c + }, + |s| { + s.pop(); + }, + |s| { + s.push(random()); + }, + ), + ) + } +} diff --git a/src/crates/cli/Cargo.toml b/src/crates/cli/Cargo.toml new file mode 100644 index 0000000..c6dc852 --- /dev/null +++ b/src/crates/cli/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "candelabra-cli" +version = "0.1.0" +edition = "2021" +default-run = "candelabra-cli" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +log = { workspace = true } +env_logger = { workspace = true } +primrose = { path = "../primrose" } +candelabra-benchmarker = { path = "../benchmarker" } +anyhow = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } +camino = "1.1.6" +cargo_metadata = "0.18.1" +argh = "0.1.12" +glob = "0.3.1" +tempfile = "3"
\ No newline at end of file diff --git a/src/crates/cli/src/cache.rs b/src/crates/cli/src/cache.rs new file mode 100644 index 0000000..598cad1 --- /dev/null +++ b/src/crates/cli/src/cache.rs @@ -0,0 +1,111 @@ +//! Common utilities for caching results +use std::{ + collections::hash_map::DefaultHasher, + fs::{create_dir_all, metadata, remove_file, File}, + hash::{Hash, Hasher}, + marker::PhantomData, +}; + +use anyhow::{Context, Result}; +use camino::{Utf8Path, Utf8PathBuf}; +use glob::glob; +use log::{debug, warn}; +use serde::{Deserialize, Serialize}; +use serde_json::{from_reader, to_writer}; + +/// A filesystem-based K/V cache +/// This doesn't deal with key invalidation or anything, just the filesystem/serialisation stuff +pub struct FileCache<K: 'static + ?Sized, V> { + base_dir: Utf8PathBuf, + _data: PhantomData<(&'static K, V)>, +} + +impl<K: ?Sized + ToString, V: Serialize + for<'a> Deserialize<'a>> FileCache<K, V> { + /// Create a new file store in the given directory. + pub fn new(base_dir: Utf8PathBuf) -> Result<Self> { + create_dir_all(base_dir.as_std_path()).context("Error creating cache directory")?; + Ok(Self { + base_dir, + _data: PhantomData, + }) + } + + /// Store the given value with the given `key` + pub fn put(&self, key: &K, value: &V) -> Result<()> { + let path = self.path_for(key); + let mut file = File::create(path)?; + to_writer(&mut file, value)?; + + Ok(()) + } + + /// Attempt to load cache entry with the given `key` + /// `Ok(None)` indicates no valid cache entry, while `Err(e)` indicates an IO error + /// Invalid cache entries will be deleted. + pub fn find(&self, key: &K) -> Result<Option<V>> { + let path = self.path_for(key); + if !path.exists() { + return Ok(None); + } + + let file = File::open(&path).context("Error opening cache entry")?; + let contents: V = match from_reader(file) { + Ok(x) => x, + Err(e) => { + debug!("Invalid cache entry: {}", e); + if let Err(e) = self.remove(key) { + warn!("Error deleting invalid cache entry: {}", e); + } + + return Ok(None); + } + }; + + Ok(Some(contents)) + } + + /// Remove value for the given key + pub fn remove(&self, key: &K) -> Result<()> { + Ok(remove_file(self.path_for(key))?) + } + + /// Get the path for a given key + fn path_for(&self, key: &K) -> Utf8PathBuf { + // Sanitise key name + let key = key.to_string(); + let mut san = String::with_capacity(key.len()); + for chr in key.chars() { + if chr == '/' + || chr == '\\' + || chr == ':' + || chr == '*' + || chr == '?' + || chr == '"' + || chr == '<' + || chr == '>' + || chr == '|' + { + san += "_"; + } else { + san.push(chr); + } + } + if san.is_empty() { + san += "_"; + } + self.base_dir.join(san) // TODO: santisation + } +} + +/// Generate a hash from the current state of the given directory +/// This is built from the modification time of all files in that directory and all children. +pub fn gen_tree_hash(dir: &Utf8Path) -> Result<u64> { + let mut hasher = DefaultHasher::new(); + + for f in glob(&format!("{}/**/*", dir)).unwrap() { + let modified = metadata(f?)?.modified()?; + modified.hash(&mut hasher); + } + + Ok(hasher.finish()) +} diff --git a/src/crates/cli/src/candidates.rs b/src/crates/cli/src/candidates.rs new file mode 100644 index 0000000..d134307 --- /dev/null +++ b/src/crates/cli/src/candidates.rs @@ -0,0 +1,113 @@ +//! Generating and caching primrose candidate results + +use std::{collections::HashMap, fs::metadata, time::SystemTime}; + +use anyhow::{Context, Result}; +use camino::Utf8Path; +use log::{debug, warn}; +use primrose::ContainerSelector; +use serde::{Deserialize, Serialize}; + +use crate::{ + cache::{gen_tree_hash, FileCache}, + paths::Paths, +}; + +// TODO: Make this adjustable +/// The size of the model used by primrose +const MODEL_SIZE: usize = 3; + +/// Names a container type we want to select. +pub type ConTypeName = String; + +/// Name of a container implementation we are considering +pub type ImplName = String; + +/// A list of candidate container types +pub type Candidates = HashMap<ConTypeName, Vec<ImplName>>; + +/// Entry in the benchmark cache +#[derive(Serialize, Deserialize, Debug)] +struct CacheEntry { + lib_hash: u64, + mod_time: SystemTime, + value: Candidates, +} + +/// Gets/retrieves candidate container types for primrose files. +/// This caches results, and invalidates them when the file changes. +pub struct CandidatesStore<'a> { + paths: &'a Paths, + store: FileCache<Utf8Path, CacheEntry>, + lib_hash: u64, +} + +impl<'a> CandidatesStore<'a> { + /// Create a new store, using the given paths. + /// Benchmarks are cached in `paths.target_dir / candelabra / primrose_results` + pub fn new(paths: &'a Paths) -> Result<Self> { + let base_dir = paths.target_dir.join("candelabra").join("primrose_results"); + + let lib_hash = + gen_tree_hash(&paths.library_crate).context("Error generating library hash")?; + + debug!("Initialised candidate cacher with hash {}", lib_hash); + + Ok(Self { + store: FileCache::new(base_dir)?, + paths, + lib_hash, + }) + } + + /// Get benchmark results for the given type, using cached results if possible and persisting the results for later. + /// Will panic if `name` is not in library specs. + pub fn get(&self, src: &Utf8Path) -> Result<Candidates> { + if let Some(results) = self.find(src)? { + debug!("Cache hit for {} candidates", src); + Ok(results) + } else { + debug!("Cache miss for {} candidates", src); + let selector = ContainerSelector::from_path( + src.as_std_path(), + self.paths.library_src.as_std_path(), + MODEL_SIZE, + ) + .with_context(|| format!("error getting container selector for {}", src))?; + + let candidates = selector + .find_all_candidates()? + .into_iter() + .map(|(k, v)| (k.to_string(), v)) + .collect(); + + if let Err(e) = self.put(src, &candidates) { + warn!("Error caching candidates for {}: {}", src, e); + } + Ok(candidates) + } + } + + /// Attempt to find an up-to-date set of results with the given key + fn find(&self, src: &Utf8Path) -> Result<Option<Candidates>> { + let mod_time = metadata(src)?.modified()?; + Ok(self + .store + .find(src)? + .filter(|e| e.lib_hash == self.lib_hash && e.mod_time == mod_time) + .map(|e| e.value)) + } + + /// Store a new set of results with the given key + fn put(&self, src: &Utf8Path, results: &Candidates) -> Result<()> { + let mod_time = metadata(src)?.modified()?; + self.store.put( + src, + &CacheEntry { + lib_hash: self.lib_hash, + value: results.clone(), + mod_time, + }, + ) + } +} diff --git a/src/crates/cli/src/cost/benchmark.rs b/src/crates/cli/src/cost/benchmark.rs new file mode 100644 index 0000000..a4824d9 --- /dev/null +++ b/src/crates/cli/src/cost/benchmark.rs @@ -0,0 +1,126 @@ +//! Benchmarking of container types + +use std::{ + fs::{copy, create_dir, File}, + io::Write, + process::Command, +}; + +use anyhow::{bail, Context, Result}; +use candelabra_benchmarker::Results; +use log::{debug, log_enabled, Level}; +use primrose::{LibSpec, LibSpecs}; +use tempfile::{tempdir, TempDir}; + +use crate::paths::Paths; + +/// The name of the element type we use for benchmarking +pub const ELEM_TYPE: &str = "usize"; + +/// String representation of the array of N values we use for benchmarking +pub const NS: &str = "[1, 10, 100]"; + +/// Run benchmarks for the given container type, returning the results. +/// Panics if the given name is not in the library specs. +pub fn run_benchmarks(name: &str, paths: &Paths, lib_specs: &LibSpecs) -> Result<Results> { + let lib_spec = lib_specs + .get(name) + .expect("name passed to benchmarkspec not in libspecs"); + + // Generate crate & source + let crate_ = prepare_crate(name, paths, lib_spec)?; + + // Build and run + debug!("Building and running benchmarks for {}", name); + let run_output = Command::new("cargo") + .args(["run", "--release"]) + .current_dir(crate_.path()) + .env("CARGO_TARGET_DIR", &paths.target_dir) // Share target directory + .output() + .context("Error running build command")?; + + if !run_output.status.success() { + bail!("Error result from benchmark. Output: {:?}", run_output); + } + + if log_enabled!(Level::Debug) { + if let Ok(stdout) = String::from_utf8(run_output.stdout.clone()) { + debug!("stdout: {:?}", stdout); + } + if let Ok(stderr) = String::from_utf8(run_output.stderr.clone()) { + debug!("stderr: {:?}", stderr); + } + } + + // Deserialise benchmark results + serde_json::from_slice(&run_output.stdout).context("Error deserialising benchmark JSON") +} + +fn prepare_crate(name: &str, paths: &Paths, lib_spec: &LibSpec) -> Result<TempDir> { + // Directory we will create the crate in + let crate_tempdir = tempdir()?; + let crate_dir = crate_tempdir.path(); + debug!("Preparing benchmark crate for {} in {:?}", name, crate_dir); + + // Write the manifest + let mut manifest = + File::create(crate_dir.join("Cargo.toml")).context("Error creating Cargo.toml")?; + manifest + .write_all( + format!( + " +[package] +name = \"bench\" +version = \"0.1.0\" +edition = \"2021\" + +[dependencies] +candelabra-benchmarker = {{ path = \"{}\" }} +primrose-library = {{ path = \"{}\" }} +", + paths.benchmarker_crate, paths.library_crate, + ) + .as_bytes(), + ) + .context("Error writing Cargo.toml")?; + + // Ensure we use the right toolchain + let orig_toolchain_file = paths.base.join("rust-toolchain.toml"); + copy(orig_toolchain_file, crate_dir.join("rust-toolchain.toml")) + .context("Error writing rust-toolchain.toml")?; + + // Generate the code for running our benchmarks + let mut benchmarker = format!( + "Benchmarker::<{}<{}>, {}>::with_ns(&{})", + name, ELEM_TYPE, ELEM_TYPE, NS + ); + + // Add benchmarks for implemented traits + let implemented_traits = lib_spec.interface_provide_map.keys(); + for tr in implemented_traits { + benchmarker += &format!(".{}()", tr.to_lowercase()); + } + benchmarker += ".finish()"; + + // Write the benchmarking source, using our generated benchmarker code. + let src_dir = crate_dir.join("src"); + create_dir(&src_dir).context("Error creating src directory")?; + + let mut src_file = File::create(src_dir.join("main.rs")).context("Error creating main.rs")?; + src_file + .write_all( + format!( + " +use candelabra_benchmarker::{{to_writer, Benchmarker}}; +fn main() {{ + to_writer(std::io::stdout(), &{}).unwrap(); +}} +", + benchmarker + ) + .as_bytes(), + ) + .context("Error writing to main.rs")?; + + Ok(crate_tempdir) +} diff --git a/src/crates/cli/src/cost/mod.rs b/src/crates/cli/src/cost/mod.rs new file mode 100644 index 0000000..eda5957 --- /dev/null +++ b/src/crates/cli/src/cost/mod.rs @@ -0,0 +1,96 @@ +//! Generating, caching, and using cost models +pub mod benchmark; + +use anyhow::{anyhow, Context, Result}; + +use candelabra_benchmarker::Results; + +use log::{debug, warn}; +use primrose::{LibSpec, LibSpecs}; +use serde::{Deserialize, Serialize}; + +use crate::{ + cache::{gen_tree_hash, FileCache}, + cost::benchmark::run_benchmarks, + paths::Paths, +}; + +/// Entry in the benchmark cache +#[derive(Serialize, Deserialize, Debug)] +struct CacheEntry { + lib_hash: u64, + value: Results, +} + +/// Gets/retrieves benchmark results for container implementations. +/// This caches results, and invalidates them when the library or parameters change. +pub struct ResultsStore<'a> { + paths: &'a Paths, + store: FileCache<str, CacheEntry>, + lib_specs: LibSpecs, + lib_hash: u64, +} + +impl<'a> ResultsStore<'a> { + /// Create a new store, using the given paths. + /// Benchmarks are cached in `paths.target_dir / candelabra / benchmark_results` + pub fn new(paths: &'a Paths) -> Result<Self> { + let lib_specs = + LibSpec::read_all(paths.library_src.as_std_path()).map_err(|e| anyhow!("{}", e))?; + + // TODO: this should be home folder or smth + let base_dir = paths + .target_dir + .join("candelabra") + .join("benchmark_results"); + + // TODO: Doesn't take NS or ELEM_TYPE into account + let lib_hash = + gen_tree_hash(&paths.library_crate).context("Error generating library hash")?; + + debug!("Initialised benchmark cacher with hash {}", lib_hash); + + Ok(Self { + store: FileCache::new(base_dir)?, + paths, + lib_specs, + lib_hash, + }) + } + + /// Get benchmark results for the given type, using cached results if possible and persisting the results for later. + /// Will panic if `name` is not in library specs. + pub fn get(&self, name: &str) -> Result<Results> { + if let Some(results) = self.find(name)? { + debug!("Cache hit for {} benchmarks", name); + Ok(results) + } else { + debug!("Cache miss for {} benchmarks", name); + let results = run_benchmarks(name, self.paths, &self.lib_specs)?; + if let Err(e) = self.put(name, &results) { + warn!("Error caching benchmark outputs for {}: {}", name, e); + } + Ok(results) + } + } + + /// Attempt to find an up-to-date set of results with the given key + fn find(&self, name: &str) -> Result<Option<Results>> { + Ok(self + .store + .find(name)? + .filter(|e| e.lib_hash == self.lib_hash) + .map(|e| e.value)) + } + + /// Store a new set of results with the given key + fn put(&self, name: &str, results: &Results) -> Result<()> { + self.store.put( + name, + &CacheEntry { + lib_hash: self.lib_hash, + value: results.clone(), + }, + ) + } +} diff --git a/src/crates/cli/src/main.rs b/src/crates/cli/src/main.rs new file mode 100644 index 0000000..00c85cc --- /dev/null +++ b/src/crates/cli/src/main.rs @@ -0,0 +1,89 @@ +use std::collections::HashSet; + +use anyhow::{anyhow, Context, Result}; +use argh::FromArgs; +use log::info; +use project::Project; + +use crate::{candidates::CandidatesStore, cost::ResultsStore, paths::Paths}; + +mod cache; +mod candidates; +mod cost; +mod paths; +mod project; + +#[derive(FromArgs)] +/// Find the best performing container type using primrose +struct Args { + /// path to Cargo.toml + #[argh(option)] + manifest_path: Option<String>, + + /// project to run on, if in a workspace + #[argh(option, short = 'p')] + project: Option<String>, +} + +fn main() -> Result<()> { + env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")).init(); + + let args: Args = argh::from_env(); + + let paths = Paths::default(); + info!("Using source dir: {:?}", &paths.base); + + let projects = get_projects(&args).context("failed to find project paths")?; + + let candidates = CandidatesStore::new(&paths)?; + let benchmarks = ResultsStore::new(&paths)?; + + let mut seen_types = HashSet::new(); + for project in projects { + info!("Processing {}", &project.name); + + let all_candidates = project.get_all_candidates(&candidates)?; + info!("Found candidates: {:?}", all_candidates); + for (_, candidates) in all_candidates.iter() { + for candidate in candidates { + seen_types.insert(candidate.clone()); + } + } + } + + info!("Found all candidate types. Running benchmarks"); + for typ in seen_types.into_iter() { + dbg!(benchmarks.get(&typ).context("Error running benchmark")?); + } + + Ok(()) +} + +fn get_projects(args: &Args) -> Result<Vec<Project>> { + let mut cmd = cargo_metadata::MetadataCommand::new(); + if let Some(p) = &args.manifest_path { + cmd.manifest_path(p); + } + + let metadata = cmd.exec().context("failed to get manifest metadata")?; + + if let Some(p) = &args.project { + // Select a specific project + Ok(vec![metadata + .packages + .iter() + .find(|pkg| pkg.name == *p) + .map(|pkg| Project::new(pkg.clone())) + .ok_or_else(|| { + anyhow!("specified project does not exist") + })?]) + } else { + // Default to all workspace members + Ok(metadata + .workspace_members + .iter() + .flat_map(|member| metadata.packages.iter().find(|pkg| pkg.id == *member)) + .map(|pkg| Project::new(pkg.clone())) + .collect()) + } +} diff --git a/src/crates/cli/src/paths.rs b/src/crates/cli/src/paths.rs new file mode 100644 index 0000000..639e1c5 --- /dev/null +++ b/src/crates/cli/src/paths.rs @@ -0,0 +1,47 @@ +use std::{env, path::PathBuf}; + +use camino::Utf8PathBuf; + +#[derive(Debug, Clone)] +pub struct Paths { + pub base: Utf8PathBuf, + pub library_crate: Utf8PathBuf, + pub library_src: Utf8PathBuf, + pub benchmarker_crate: Utf8PathBuf, + pub target_dir: Utf8PathBuf, +} + +impl Paths { + fn from_base(base: Utf8PathBuf) -> Self { + Paths { + library_crate: base.join("crates").join("primrose-library"), + library_src: base.join("crates").join("primrose-library").join("src"), + benchmarker_crate: base.join("crates").join("candelabra-benchmarker"), + target_dir: base.join("target"), + base, + } + } +} + +impl Default for Paths { + fn default() -> Self { + let path = if let Ok(var) = env::var("CANDELABRA_SRC_DIR") { + var.into() + } else { + // Most the time this won't work, but it's worth a shot. + let mut path = PathBuf::from(file!()); + path.pop(); // main.rs + path.pop(); // src + path.pop(); // candelabra-cli + path.pop(); // crates + if path.components().count() == 0 { + path.push("."); + } + path + }; + + Paths::from_base(path.canonicalize().expect( + "candelabra source directory not found. please specify it with CANDELABRA_SRC_DIR", + ).try_into().expect("candelabra source directory has non-utf8 components in it (???)")) + } +} diff --git a/src/crates/cli/src/project.rs b/src/crates/cli/src/project.rs new file mode 100644 index 0000000..8d04c94 --- /dev/null +++ b/src/crates/cli/src/project.rs @@ -0,0 +1,49 @@ +use std::collections::HashMap; + +use anyhow::{Context, Result}; +use cargo_metadata::{camino::Utf8PathBuf, Package}; +use glob::glob; + +use crate::candidates::{CandidatesStore, ConTypeName, ImplName}; + +/// A single package or crate that we wish to process. +#[derive(Debug, Clone)] +pub struct Project { + pub name: String, + source_dir: Utf8PathBuf, +} + +impl Project { + pub fn new(package: Package) -> Self { + Project { + name: package.name.clone(), + source_dir: package.manifest_path.parent().unwrap().to_path_buf(), + } + } + + /// Find all primrose files (`.pr.rs`) in this project. + pub fn find_primrose_files(&self) -> Result<Vec<Utf8PathBuf>> { + glob(&format!("{}/**/*.pr.rs", self.source_dir)) + .unwrap() + .flat_map(|p| p.map(|p| p.try_into())) + .collect::<Result<Vec<_>, _>>() + .context("error finding primrose files in project") + } + + /// Run primrose on all files in this project. + /// Returns a list of all candidates for each container type in each file. + pub fn get_all_candidates(&self, store: &CandidatesStore) -> Result<ProjectCandidateList> { + let mut all_candidates = HashMap::new(); + for file in self.find_primrose_files()? { + let result = store.get(&file)?; + + for (con_type_id, candidates) in result { + all_candidates.insert((file.clone(), con_type_id.clone()), candidates); + } + } + + Ok(all_candidates) + } +} + +pub type ProjectCandidateList = HashMap<(Utf8PathBuf, ConTypeName), Vec<ImplName>>; diff --git a/src/crates/library/Cargo.toml b/src/crates/library/Cargo.toml new file mode 100644 index 0000000..5231ed9 --- /dev/null +++ b/src/crates/library/Cargo.toml @@ -0,0 +1,10 @@ +[package] +name = "primrose-library" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +im = "10.2.0" +proptest = "1.0.0"
\ No newline at end of file diff --git a/src/crates/library/src/eager_sorted_vector.rs b/src/crates/library/src/eager_sorted_vector.rs new file mode 100644 index 0000000..9be45d0 --- /dev/null +++ b/src/crates/library/src/eager_sorted_vector.rs @@ -0,0 +1,335 @@ +/*LIBSPEC-NAME* +rust-eager-sorted-vec-spec primrose_library::EagerSortedVec +*ENDLIBSPEC-NAME*/ + +use crate::traits::{Container, Indexable}; + +use std::slice::Iter; +use std::vec::Vec; + +/// A Sorted Vector +#[derive(Debug, Clone)] +pub struct EagerSortedVec<T> { + v: Vec<T>, +} + +impl<T: Ord> EagerSortedVec<T> { + pub fn from_vec(mut v: Vec<T>) -> EagerSortedVec<T> { + v.sort(); + EagerSortedVec { v } + } + + pub fn new() -> EagerSortedVec<T> { + EagerSortedVec { v: Vec::new() } + } + + pub fn len(&mut self) -> usize { + self.v.len() + } + + pub fn contains(&mut self, x: &T) -> bool { + self.v.binary_search(x).is_ok() + } + + pub fn is_empty(&mut self) -> bool { + self.len() == 0 + } + + pub fn push(&mut self, value: T) { + let index = self.v.binary_search(&value).unwrap_or_else(|i| i); + self.v.insert(index, value); + } + + pub fn pop(&mut self) -> Option<T> { + self.v.pop() + } + + pub fn remove(&mut self, index: usize) -> T { + self.v.remove(index) + } + + pub fn clear(&mut self) { + self.v.clear() + } + + pub fn first(&mut self) -> Option<&T> { + self.v.first() + } + + pub fn last(&mut self) -> Option<&T> { + self.v.last() + } + + pub fn iter(&mut self) -> Iter<'_, T> { + self.v.iter() + } + + pub fn to_vec(self) -> Vec<T> { + self.v + } +} + +/*IMPL* +Container +*ENDIMPL*/ +impl<T: Ord> Container<T> for EagerSortedVec<T> { + /*LIBSPEC* + /*OPNAME* + len op-len pre-len post-len + *ENDOPNAME*/ + (define (op-len xs) (cons xs (length xs))) + (define (pre-len xs) (equal? xs (sort xs <))) + (define (post-len xs r) (equal? r (op-len xs))) + *ENDLIBSPEC*/ + fn len(&mut self) -> usize { + EagerSortedVec::len(self) + } + + /*LIBSPEC* + /*OPNAME* + contains op-contains pre-contains post-contains + *ENDOPNAME*/ + (define (op-contains xs x) + (cond + [(list? (member x xs)) (cons xs #t)] + [else (cons xs #f)])) + (define (pre-contains xs) (equal? xs (sort xs <))) + (define (post-contains xs x r) (equal? r (op-contains xs x))) + *ENDLIBSPEC*/ + fn contains(&mut self, x: &T) -> bool { + EagerSortedVec::contains(self, x) + } + + /*LIBSPEC* + /*OPNAME* + is-empty op-is-empty pre-is-empty post-is-empty + *ENDOPNAME*/ + (define (op-is-empty xs) (cons xs (null? xs))) + (define (pre-is-empty xs) (equal? xs (sort xs <))) + (define (post-is-empty xs r) (equal? r (op-is-empty xs))) + *ENDLIBSPEC*/ + fn is_empty(&mut self) -> bool { + EagerSortedVec::is_empty(self) + } + + /*LIBSPEC* + /*OPNAME* + clear op-clear pre-clear post-clear + *ENDOPNAME*/ + (define (op-clear xs) null) + (define (pre-clear xs) (equal? xs (sort xs <))) + (define (post-clear xs r) (equal? r (op-clear xs))) + *ENDLIBSPEC*/ + fn clear(&mut self) { + EagerSortedVec::clear(self); + } + + /*LIBSPEC* + /*OPNAME* + insert op-insert pre-insert post-insert + *ENDOPNAME*/ + (define (op-insert xs x) (sort (append xs (list x)) <)) + (define (pre-insert xs) (equal? xs (sort xs <))) + (define (post-insert xs x ys) (equal? ys (op-insert xs x))) + *ENDLIBSPEC*/ + fn insert(&mut self, elt: T) { + EagerSortedVec::push(self, elt); + } + + /*LIBSPEC* + /*OPNAME* + remove op-remove pre-remove post-remove + *ENDOPNAME*/ + (define (op-remove xs x) + (cond + [(list? (member x xs)) (cons (remove x xs) x)] + [else (cons xs null)])) + (define (pre-remove xs) (equal? xs (sort xs <))) + (define (post-remove xs r) (equal? r (op-remove xs))) + *ENDLIBSPEC*/ + fn remove(&mut self, elt: T) -> Option<T> { + self.iter() + .position(|x| *x == elt) + .map(|index| self.remove(index)) + } +} + +/*IMPL* +Indexable +*ENDIMPL*/ +impl<T: Ord> Indexable<T> for EagerSortedVec<T> { + /*LIBSPEC* + /*OPNAME* + first op-first pre-first post-first + *ENDOPNAME*/ + (define (op-first xs) + (cond + [(null? xs) (cons xs null)] + [else (cons xs (first xs))])) + (define (pre-first xs) (equal? xs (sort xs <))) + (define (post-first xs r) (equal? r (op-first xs))) + *ENDLIBSPEC*/ + fn first(&mut self) -> Option<&T> { + EagerSortedVec::first(self) + } + + /*LIBSPEC* + /*OPNAME* + last op-last pre-last post-last + *ENDOPNAME*/ + (define (op-last xs) + (cond + [(null? xs) (cons xs null)] + [else (cons xs (last xs))])) + (define (pre-last xs) (equal? xs (sort xs <))) + (define (post-last xs r) (equal? r (op-last xs))) + *ENDLIBSPEC*/ + fn last(&mut self) -> Option<&T> { + EagerSortedVec::last(self) + } + + /*LIBSPEC* + /*OPNAME* + nth op-nth pre-nth post-nth + *ENDOPNAME*/ + (define (op-nth xs n) + (cond + [(>= n (length xs)) (cons xs null)] + [(< n 0) (cons xs null)] + [else (cons xs (list-ref xs n))])) + (define (pre-nth xs) (equal? xs (sort xs <))) + (define (post-nth xs n r) (equal? r (op-nth xs n))) + *ENDLIBSPEC*/ + fn nth(&mut self, n: usize) -> Option<&T> { + EagerSortedVec::iter(self).nth(n) + } +} + +impl<T: Ord> Default for EagerSortedVec<T> { + fn default() -> Self { + Self::new() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::proptest::{strategies::eager_sorted_vec, *}; + + use im::conslist::ConsList; + use proptest::prelude::*; + + fn abstraction<T>(v: EagerSortedVec<T>) -> ConsList<T> + where + T: Ord, + { + let list: ConsList<T> = ConsList::from(v.to_vec()); + list + } + + proptest! { + #![proptest_config(ProptestConfig { + cases: 100, .. ProptestConfig::default() + })] + + #[test] + fn test_eager_sorted_vec_len(ref mut v in eager_sorted_vec(".*", 0..100)) { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, abs_list.sort()); + //post + assert_eq!(Container::<String>::len(v), abs_list.len()); + assert_eq!(abstraction(v.clone()), abs_list); + } + + #[test] + fn test_eager_sorted_vec_contains(ref mut v in eager_sorted_vec(".*", 0..100), a in ".*") { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, abs_list.sort()); + //post + assert_eq!(Container::<String>::contains(v, &a), contains(&abs_list, &a)); + assert_eq!(abstraction(v.clone()), abs_list); + } + + #[test] + fn test_eager_sorted_vec_is_empty(ref mut v in eager_sorted_vec(".*", 0..100)) { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, abs_list.sort()); + //post + assert_eq!(Container::<String>::is_empty(v), abs_list.is_empty()); + assert_eq!(abstraction(v.clone()), abs_list); + } + + #[test] + fn test_eager_sorted_vec_insert(ref mut v in eager_sorted_vec(".*", 0..100), a in ".*") { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, abs_list.sort()); + //post + let after_list = abs_list.append(conslist![a.clone()]).sort(); + Container::<String>::insert(v, a.clone()); + assert_eq!(abstraction(v.clone()), after_list); + } + + #[test] + fn test_eager_sorted_vec_clear(ref mut v in eager_sorted_vec(".*", 0..100)) { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, abs_list.sort()); + //post + let after_list = clear(&abs_list); + Container::<String>::clear(v); + assert_eq!(abstraction(v.clone()), after_list); + } + + #[test] + fn test_eager_sorted_vec_remove(ref mut v in eager_sorted_vec(".*", 0..100), a in ".*") { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, abs_list.sort()); + //post + let (after_list, abs_elem) = remove(&abs_list, a.clone()); + let elem = Container::<String>::remove(v, a.clone()); + assert_eq!(abstraction(v.clone()), after_list); + assert_eq!(elem, abs_elem); + } + + #[test] + fn test_eager_sorted_vec_first(ref mut v in eager_sorted_vec(".*", 0..100)) { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, abs_list.sort()); + //post + let elem = Indexable::<String>::first(v); + let abs_first = first(&abs_list); + assert_eq!(elem, abs_first); + assert_eq!(abstraction(v.clone()), abs_list); + } + + #[test] + fn test_eager_sorted_vec_last(ref mut v in eager_sorted_vec(".*", 0..100)) { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, abs_list.sort()); + //post + let elem = Indexable::<String>::last(v); + let abs_last = last(&abs_list); + assert_eq!(elem, abs_last); + assert_eq!(abstraction(v.clone()), abs_list); + } + + #[test] + fn test_eager_sorted_vec_nth(ref mut v in eager_sorted_vec(".*", 0..100), n in 0usize..100) { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, abs_list.sort()); + //post + let elem = Indexable::<String>::nth(v, n); + let abs_nth = nth(&abs_list, n); + assert_eq!(elem, abs_nth); + assert_eq!(abstraction(v.clone()), abs_list); + } + } +} diff --git a/src/crates/library/src/eager_unique_vector.rs b/src/crates/library/src/eager_unique_vector.rs new file mode 100644 index 0000000..68ba78f --- /dev/null +++ b/src/crates/library/src/eager_unique_vector.rs @@ -0,0 +1,341 @@ +/*LIBSPEC-NAME* +rust-eager-unique-vec-spec primrose_library::EagerUniqueVec +*ENDLIBSPEC-NAME*/ + +use crate::traits::{Container, Indexable}; + +use std::slice::Iter; +use std::vec::Vec; + +/// A Unique Vector +#[derive(Debug, Clone)] +pub struct EagerUniqueVec<T> { + v: Vec<T>, +} + +impl<T: PartialEq> EagerUniqueVec<T> { + pub fn from_vec(v: Vec<T>) -> EagerUniqueVec<T> { + let mut vec = Vec::<T>::new(); + for i in v { + if !vec.contains(&i) { + vec.push(i); + } + } + EagerUniqueVec { v: vec } + } + + pub fn new() -> EagerUniqueVec<T> { + EagerUniqueVec { v: Vec::new() } + } + + pub fn len(&mut self) -> usize { + self.v.len() + } + + pub fn contains(&mut self, x: &T) -> bool { + self.v.contains(x) + } + + pub fn is_empty(&mut self) -> bool { + self.len() == 0 + } + + // Duplicated elements will be discarded + pub fn push(&mut self, value: T) { + if !self.contains(&value) { + self.v.push(value); + } + } + + pub fn pop(&mut self) -> Option<T> { + self.v.pop() + } + + pub fn remove(&mut self, index: usize) -> T { + self.v.remove(index) + } + + pub fn clear(&mut self) { + self.v.clear() + } + + pub fn first(&mut self) -> Option<&T> { + self.v.first() + } + + pub fn last(&mut self) -> Option<&T> { + self.v.last() + } + + pub fn iter(&mut self) -> Iter<'_, T> { + self.v.iter() + } + + pub fn to_vec(self) -> Vec<T> { + self.v + } +} + +/*IMPL* +Container +*ENDIMPL*/ +impl<T: PartialEq> Container<T> for EagerUniqueVec<T> { + /*LIBSPEC* + /*OPNAME* + len op-len pre-len post-len + *ENDOPNAME*/ + (define (op-len xs) (cons xs (length xs))) + (define (pre-len xs) (equal? xs (remove-duplicates xs))) + (define (post-len xs r) (equal? r (op-len xs))) + *ENDLIBSPEC*/ + fn len(&mut self) -> usize { + EagerUniqueVec::len(self) + } + + /*LIBSPEC* + /*OPNAME* + contains op-contains pre-contains post-contains + *ENDOPNAME*/ + (define (op-contains xs x) + (cond + [(list? (member x xs)) (cons xs #t)] + [else (cons xs #f)])) + (define (pre-contains xs) (equal? xs (remove-duplicates xs))) + (define (post-contains xs x r) (equal? r (op-contains xs x))) + *ENDLIBSPEC*/ + fn contains(&mut self, x: &T) -> bool { + EagerUniqueVec::contains(self, x) // use fully qualified syntax to avoid function name collision + } + + /*LIBSPEC* + /*OPNAME* + is-empty op-is-empty pre-is-empty post-is-empty + *ENDOPNAME*/ + (define (op-is-empty xs) (cons xs (null? xs))) + (define (pre-is-empty xs) (equal? xs (remove-duplicates xs))) + (define (post-is-empty xs r) (equal? r (op-is-empty xs))) + *ENDLIBSPEC*/ + fn is_empty(&mut self) -> bool { + EagerUniqueVec::is_empty(self) + } + + /*LIBSPEC* + /*OPNAME* + clear op-clear pre-clear post-clear + *ENDOPNAME*/ + (define (op-clear xs) null) + (define (pre-clear xs) (equal? xs (remove-duplicates xs))) + (define (post-clear xs r) (equal? r (op-clear xs))) + *ENDLIBSPEC*/ + fn clear(&mut self) { + EagerUniqueVec::clear(self); + } + + /*LIBSPEC* + /*OPNAME* + insert op-insert pre-insert post-insert + *ENDOPNAME*/ + (define (op-insert xs x) (remove-duplicates (append xs (list x)))) + (define (pre-insert xs) (equal? xs (remove-duplicates xs))) + (define (post-insert xs x ys) (equal? ys (op-insert xs x))) + *ENDLIBSPEC*/ + fn insert(&mut self, elt: T) { + EagerUniqueVec::push(self, elt); + } + + /*LIBSPEC* + /*OPNAME* + remove op-remove pre-remove post-remove + *ENDOPNAME*/ + (define (op-remove xs x) + (cond + [(list? (member x xs)) (cons (remove x xs) x)] + [else (cons xs null)])) + (define (pre-remove xs) (equal? xs (remove-duplicates xs))) + (define (post-remove xs r) (equal? r (op-remove xs))) + *ENDLIBSPEC*/ + fn remove(&mut self, elt: T) -> Option<T> { + self.iter() + .position(|x| *x == elt) + .map(|index| self.remove(index)) + } +} + +/*IMPL* +Indexable +*ENDIMPL*/ +impl<T: PartialEq> Indexable<T> for EagerUniqueVec<T> { + /*LIBSPEC* + /*OPNAME* + first op-first pre-first post-first + *ENDOPNAME*/ + (define (op-first xs) + (cond + [(null? xs) (cons xs null)] + [else (cons xs (first xs))])) + (define (pre-first xs) #t) + (define (post-first xs r) (equal? r (op-first xs))) + *ENDLIBSPEC*/ + fn first(&mut self) -> Option<&T> { + EagerUniqueVec::first(self) + } + + /*LIBSPEC* + /*OPNAME* + last op-last pre-last post-last + *ENDOPNAME*/ + (define (op-last xs) + (cond + [(null? xs) (cons xs null)] + [else (cons xs (last xs))])) + (define (pre-last xs) #t) + (define (post-last xs r) (equal? r (op-last xs))) + *ENDLIBSPEC*/ + fn last(&mut self) -> Option<&T> { + EagerUniqueVec::last(self) + } + + /*LIBSPEC* + /*OPNAME* + nth op-nth pre-nth post-nth + *ENDOPNAME*/ + (define (op-nth xs n) + (cond + [(>= n (length xs)) (cons xs null)] + [(< n 0) (cons xs null)] + [else (cons xs (list-ref xs n))])) + (define (pre-nth xs) #t) + (define (post-nth xs n r) (equal? r (op-nth xs n))) + *ENDLIBSPEC*/ + fn nth(&mut self, n: usize) -> Option<&T> { + EagerUniqueVec::iter(self).nth(n) + } +} + +impl<T: Ord> Default for EagerUniqueVec<T> { + fn default() -> Self { + Self::new() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::proptest::{strategies::eager_unique_vec, *}; + use im::conslist::ConsList; + use proptest::prelude::*; + + fn abstraction<T>(v: EagerUniqueVec<T>) -> ConsList<T> + where + T: PartialEq, + { + let list: ConsList<T> = ConsList::from(v.to_vec()); + list + } + + proptest! { + #![proptest_config(ProptestConfig { + cases: 100, .. ProptestConfig::default() + })] + + #[test] + fn test_eager_unique_vec_len(ref mut v in eager_unique_vec(".*", 0..100)) { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list)); + //post + assert_eq!(Container::<String>::len(v), abs_list.len()); + assert_eq!(abstraction(v.clone()), abs_list); + } + + #[test] + fn test_eager_unique_vec_contains(ref mut v in eager_unique_vec(".*", 0..100), a in ".*") { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list)); + //post + assert_eq!(Container::<String>::contains(v, &a), contains(&abs_list, &a)); + assert_eq!(abstraction(v.clone()), abs_list); + } + + #[test] + fn test_eager_unique_vec_is_empty(ref mut v in eager_unique_vec(".*", 0..100)) { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list)); + //post + assert_eq!(Container::<String>::is_empty(v), abs_list.is_empty()); + assert_eq!(abstraction(v.clone()), abs_list); + } + + #[test] + fn test_eager_unique_vec_insert(ref mut v in eager_unique_vec(".*", 0..100), a in ".*") { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list)); + //post + let after_list = unique(&abs_list.append(conslist![a.clone()])); + Container::<String>::insert(v, a.clone()); + assert_eq!(abstraction(v.clone()), after_list); + } + + #[test] + fn test_eager_unique_vec_clear(ref mut v in eager_unique_vec(".*", 0..100)) { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list)); + //post + let after_list = clear(&abs_list); + Container::<String>::clear(v); + assert_eq!(abstraction(v.clone()), after_list); + } + + #[test] + fn test_eager_unique_vec_remove(ref mut v in eager_unique_vec(".*", 0..100), a in ".*") { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list)); + //post + let (after_list, abs_elem) = remove(&abs_list, a.clone()); + let elem = Container::<String>::remove(v, a.clone()); + assert_eq!(abstraction(v.clone()), after_list); + assert_eq!(elem, abs_elem); + } + + #[test] + fn test_eager_unique_vec_first(ref mut v in eager_unique_vec(".*", 0..100)) { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list)); + //post + let elem = Indexable::<String>::first(v); + let abs_first = first(&abs_list); + assert_eq!(elem, abs_first); + assert_eq!(abstraction(v.clone()), abs_list); + } + + #[test] + fn test_eager_unique_vec_last(ref mut v in eager_unique_vec(".*", 0..100)) { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list)); + //post + let elem = Indexable::<String>::last(v); + let abs_last = last(&abs_list); + assert_eq!(elem, abs_last); + assert_eq!(abstraction(v.clone()), abs_list); + } + + #[test] + fn test_eager_unique_vec_nth(ref mut v in eager_unique_vec(".*", 0..100), n in 0usize..100) { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list)); + //post + let elem = Indexable::<String>::nth(v, n); + let abs_nth = nth(&abs_list, n); + assert_eq!(elem, abs_nth); + assert_eq!(abstraction(v.clone()), abs_list); + } + } +} diff --git a/src/crates/library/src/hashset.rs b/src/crates/library/src/hashset.rs new file mode 100644 index 0000000..d7d8915 --- /dev/null +++ b/src/crates/library/src/hashset.rs @@ -0,0 +1,178 @@ +/*LIBSPEC-NAME* +rust-hashset-spec std::collections::HashSet +*ENDLIBSPEC-NAME*/ + +use crate::traits::Container; +pub use std::collections::HashSet; +use std::hash::Hash; + +/*IMPL* +Container +*ENDIMPL*/ +impl<T: Ord + Hash> Container<T> for HashSet<T> { + /*LIBSPEC* + /*OPNAME* + len op-len pre-len post-len + *ENDOPNAME*/ + (define (op-len xs) (cons xs (length xs))) + (define (pre-len xs) (equal? xs (remove-duplicates (sort xs <)))) + (define (post-len xs r) (equal? r (op-len xs))) + *ENDLIBSPEC*/ + fn len(&mut self) -> usize { + HashSet::len(self) + } + + /*LIBSPEC* + /*OPNAME* + contains op-contains pre-contains post-contains + *ENDOPNAME*/ + (define (op-contains xs x) + (cond + [(list? (member x xs)) (cons xs #t)] + [else (cons xs #f)])) + (define (pre-contains xs) (equal? xs (remove-duplicates (sort xs <)))) + (define (post-contains xs x r) (equal? r (op-contains xs x))) + *ENDLIBSPEC*/ + fn contains(&mut self, x: &T) -> bool { + HashSet::contains(self, x) + } + + /*LIBSPEC* + /*OPNAME* + is-empty op-is-empty pre-is-empty post-is-empty + *ENDOPNAME*/ + (define (op-is-empty xs) (cons xs (null? xs))) + (define (pre-is-empty xs) (equal? xs (remove-duplicates (sort xs <)))) + (define (post-is-empty xs r) (equal? r (op-is-empty xs))) + *ENDLIBSPEC*/ + fn is_empty(&mut self) -> bool { + HashSet::is_empty(self) + } + + /*LIBSPEC* + /*OPNAME* + clear op-clear pre-clear post-clear + *ENDOPNAME*/ + (define (op-clear xs) null) + (define (pre-clear xs) (equal? xs (remove-duplicates (sort xs <)))) + (define (post-clear xs r) (equal? r (op-clear xs))) + *ENDLIBSPEC*/ + fn clear(&mut self) { + HashSet::clear(self); + } + + /*LIBSPEC* + /*OPNAME* + insert op-insert pre-insert post-insert + *ENDOPNAME*/ + (define (op-insert xs x) (remove-duplicates (sort (append xs (list x)) <))) + (define (pre-insert xs) (equal? xs (remove-duplicates (sort xs <)))) + (define (post-insert xs x ys) (equal? ys (op-insert xs x))) + *ENDLIBSPEC*/ + fn insert(&mut self, elt: T) { + HashSet::insert(self, elt); + } + + /*LIBSPEC* + /*OPNAME* + remove op-remove pre-remove post-remove + *ENDOPNAME*/ + (define (op-remove xs x) + (cond + [(list? (member x xs)) (cons (remove x xs) x)] + [else (cons xs null)])) + (define (pre-remove xs) (equal? xs (remove-duplicates (sort xs <)))) + (define (post-remove xs r) (equal? r (op-remove xs))) + *ENDLIBSPEC*/ + fn remove(&mut self, elt: T) -> Option<T> { + match HashSet::remove(self, &elt) { + true => Some(elt), + false => None, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::proptest::*; + + use im::conslist::ConsList; + use proptest::{collection::hash_set, prelude::*}; + use std::iter::FromIterator; + + fn abstraction<T: Ord>(h: HashSet<T>) -> ConsList<T> { + let list: ConsList<T> = ConsList::from_iter(h); + list.sort() + } + + proptest! { + #![proptest_config(ProptestConfig { + cases: 100, .. ProptestConfig::default() + })] + + #[test] + fn test_hashset_len(ref mut h in hash_set(".*", 0..100)) { + let abs_list = abstraction(h.clone()); + // pre: our list model is a sorted and unique list + assert_eq!(abs_list, unique(&abs_list.sort())); + //post + assert_eq!(Container::<String>::len(h), abs_list.len()); + assert_eq!(abstraction(h.clone()), abs_list); + } + + #[test] + fn test_hashset_contains(ref mut h in hash_set(".*", 0..100), a in ".*") { + let abs_list = abstraction(h.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list.sort())); + //post + assert_eq!(Container::<String>::contains(h, &a), contains(&abs_list, &a)); + assert_eq!(abstraction(h.clone()), abs_list); + } + + #[test] + fn test_hashset_is_empty(ref mut h in hash_set(".*", 0..100)) { + let abs_list = abstraction(h.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list.sort())); + //post + assert_eq!(Container::<String>::is_empty(h), abs_list.is_empty()); + assert_eq!(abstraction(h.clone()), abs_list); + } + + #[test] + fn test_hashset_insert(ref mut h in hash_set(".*", 0..100), a in ".*") { + let abs_list = abstraction(h.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list.sort())); + //post + let after_list = unique(&abs_list.append(conslist![a.clone()]).sort()); + Container::<String>::insert(h, a.clone()); + assert_eq!(abstraction(h.clone()), after_list); + } + + #[test] + fn test_hash_clear(ref mut h in hash_set(".*", 0..100)) { + let abs_list = abstraction(h.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list.sort())); + //post + let after_list = clear(&abs_list); + Container::<String>::clear(h); + assert_eq!(abstraction(h.clone()), after_list); + } + + #[test] + fn test_hashset_remove(ref mut h in hash_set(".*", 0..100), a in ".*") { + let abs_list = abstraction(h.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list.sort())); + //post + let (after_list, abs_elem) = remove(&abs_list, a.clone()); + let elem = Container::<String>::remove(h, a.clone()); + assert_eq!(abstraction(h.clone()), after_list); + assert_eq!(elem, abs_elem); + } + } +} diff --git a/src/crates/library/src/lazy_sorted_vector.rs b/src/crates/library/src/lazy_sorted_vector.rs new file mode 100644 index 0000000..5ead065 --- /dev/null +++ b/src/crates/library/src/lazy_sorted_vector.rs @@ -0,0 +1,371 @@ +/*LIBSPEC-NAME* +rust-lazy-sorted-vec-spec primrose_library::LazySortedVec +*ENDLIBSPEC-NAME*/ + +use crate::traits::{Container, Indexable}; + +use std::slice::Iter; +pub use std::vec::Vec; + +/// A Sorted Vector +#[derive(Debug, Clone)] +pub struct LazySortedVec<T> { + v: Vec<T>, + modified: bool, +} + +impl<T: Ord> LazySortedVec<T> { + pub fn from_vec(mut v: Vec<T>) -> LazySortedVec<T> { + v.sort(); + LazySortedVec { v, modified: false } + } + + pub fn new() -> LazySortedVec<T> { + LazySortedVec { + v: Vec::new(), + modified: false, + } + } + + pub fn len(&mut self) -> usize { + if self.modified { + self.v.sort(); + self.modified = false; + } + self.v.len() + } + + pub fn contains(&mut self, x: &T) -> bool { + if self.modified { + self.v.sort(); + self.modified = false; + } + self.v.binary_search(x).is_ok() + } + + pub fn is_empty(&mut self) -> bool { + if self.modified { + self.v.sort(); + self.modified = false; + } + self.len() == 0 + } + + pub fn push(&mut self, value: T) { + self.v.push(value); + self.modified = true; + } + + pub fn pop(&mut self) -> Option<T> { + if self.modified { + self.v.sort(); + self.modified = false; + } + self.v.pop() + } + + pub fn remove(&mut self, index: usize) -> T { + if self.modified { + self.v.sort(); + self.modified = false; + } + self.v.remove(index) + } + + pub fn clear(&mut self) { + self.v.clear() + } + + pub fn first(&mut self) -> Option<&T> { + if self.modified { + self.v.sort(); + self.modified = false; + } + self.v.first() + } + + pub fn last(&mut self) -> Option<&T> { + if self.modified { + self.v.sort(); + self.modified = false; + } + self.v.last() + } + + pub fn get(&mut self, index: usize) -> Option<&T> { + self.v.get(index) + } + + pub fn iter(&mut self) -> Iter<T> { + self.v.iter() + } + + pub fn to_vec(self) -> Vec<T> { + self.v + } +} + +/*IMPL* +Container +*ENDIMPL*/ +impl<T: Ord> Container<T> for LazySortedVec<T> { + /*LIBSPEC* + /*OPNAME* + len op-len pre-len post-len + *ENDOPNAME*/ + (define (op-len xs) (cons xs (length xs))) + (define (pre-len xs) (equal? xs (sort xs <))) + (define (post-len xs r) (equal? r (op-len xs))) + *ENDLIBSPEC*/ + fn len(&mut self) -> usize { + LazySortedVec::len(self) + } + + /*LIBSPEC* + /*OPNAME* + contains op-contains pre-contains post-contains + *ENDOPNAME*/ + (define (op-contains xs x) + (cond + [(list? (member x xs)) (cons xs #t)] + [else (cons xs #f)])) + (define (pre-contains xs) (equal? xs (sort xs <))) + (define (post-contains xs x r) (equal? r (op-contains xs x))) + *ENDLIBSPEC*/ + fn contains(&mut self, x: &T) -> bool { + LazySortedVec::contains(self, x) + } + + /*LIBSPEC* + /*OPNAME* + is-empty op-is-empty pre-is-empty post-is-empty + *ENDOPNAME*/ + (define (op-is-empty xs) (cons xs (null? xs))) + (define (pre-is-empty xs) (equal? xs (sort xs <))) + (define (post-is-empty xs r) (equal? r (op-is-empty xs))) + *ENDLIBSPEC*/ + fn is_empty(&mut self) -> bool { + LazySortedVec::is_empty(self) + } + + /*LIBSPEC* + /*OPNAME* + clear op-clear pre-clear post-clear + *ENDOPNAME*/ + (define (op-clear xs) null) + (define (pre-clear xs) (equal? xs (sort xs <))) + (define (post-clear xs r) (equal? r (op-clear xs))) + *ENDLIBSPEC*/ + fn clear(&mut self) { + LazySortedVec::clear(self); + } + + /*LIBSPEC* + /*OPNAME* + insert op-insert pre-insert post-insert + *ENDOPNAME*/ + (define (op-insert xs x) (sort (append xs (list x)) <)) + (define (pre-insert xs) (equal? xs (sort xs <))) + (define (post-insert xs x ys) (equal? ys (op-insert xs x))) + *ENDLIBSPEC*/ + fn insert(&mut self, elt: T) { + LazySortedVec::push(self, elt); + } + + /*LIBSPEC* + /*OPNAME* + remove op-remove pre-remove post-remove + *ENDOPNAME*/ + (define (op-remove xs x) + (cond + [(list? (member x xs)) (cons (remove x xs) x)] + [else (cons xs null)])) + (define (pre-remove xs) (equal? xs (sort xs <))) + (define (post-remove xs r) (equal? r (op-remove xs))) + *ENDLIBSPEC*/ + fn remove(&mut self, elt: T) -> Option<T> { + self.iter() + .position(|x| *x == elt) + .map(|index| self.remove(index)) + } +} + +/*IMPL* +Indexable +*ENDIMPL*/ +impl<T: Ord> Indexable<T> for LazySortedVec<T> { + /*LIBSPEC* + /*OPNAME* + first op-first pre-first post-first + *ENDOPNAME*/ + (define (op-first xs) + (cond + [(null? xs) (cons xs null)] + [else (cons xs (first xs))])) + (define (pre-first xs) (equal? xs (sort xs <))) + (define (post-first xs r) (equal? r (op-first xs))) + *ENDLIBSPEC*/ + fn first(&mut self) -> Option<&T> { + LazySortedVec::first(self) + } + + /*LIBSPEC* + /*OPNAME* + last op-last pre-last post-last + *ENDOPNAME*/ + (define (op-last xs) + (cond + [(null? xs) (cons xs null)] + [else (cons xs (last xs))])) + (define (pre-last xs) (equal? xs (sort xs <))) + (define (post-last xs r) (equal? r (op-last xs))) + *ENDLIBSPEC*/ + fn last(&mut self) -> Option<&T> { + LazySortedVec::last(self) + } + + /*LIBSPEC* + /*OPNAME* + nth op-nth pre-nth post-nth + *ENDOPNAME*/ + (define (op-nth xs n) + (cond + [(>= n (length xs)) (cons xs null)] + [(< n 0) (cons xs null)] + [else (cons xs (list-ref xs n))])) + (define (pre-nth xs) (equal? xs (sort xs <))) + (define (post-nth xs n r) (equal? r (op-nth xs n))) + *ENDLIBSPEC*/ + fn nth(&mut self, n: usize) -> Option<&T> { + LazySortedVec::iter(self).nth(n) + } +} + +impl<T: Ord> Default for LazySortedVec<T> { + fn default() -> Self { + Self::new() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::proptest::{strategies::lazy_sorted_vec, *}; + use proptest::prelude::*; + + use im::conslist::ConsList; + + fn abstraction<T>(v: LazySortedVec<T>) -> ConsList<T> + where + T: Ord, + { + let list: ConsList<T> = ConsList::from(v.to_vec()); + list.sort() + } + + proptest! { + #![proptest_config(ProptestConfig { + cases: 100, .. ProptestConfig::default() + })] + + #[test] + fn test_lazy_sorted_vec_len(ref mut v in lazy_sorted_vec(".*", 0..100)) { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, abs_list.sort()); + //post + assert_eq!(Container::<String>::len(v), abs_list.len()); + assert_eq!(abstraction(v.clone()), abs_list); + } + + #[test] + fn test_lazy_sorted_vec_contains(ref mut v in lazy_sorted_vec(".*", 0..100), a in ".*") { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, abs_list.sort()); + //post + assert_eq!(Container::<String>::contains(v, &a), contains(&abs_list, &a)); + assert_eq!(abstraction(v.clone()), abs_list); + } + + #[test] + fn test_lazy_sorted_vec_is_empty(ref mut v in lazy_sorted_vec(".*", 0..100)) { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, abs_list.sort()); + //post + assert_eq!(Container::<String>::is_empty(v), abs_list.is_empty()); + assert_eq!(abstraction(v.clone()), abs_list); + } + + #[test] + fn test_lazy_sorted_vec_insert(ref mut v in lazy_sorted_vec(".*", 0..100), a in ".*") { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, abs_list.sort()); + //post + let after_list = abs_list.append(conslist![a.clone()]).sort(); + Container::<String>::insert(v, a.clone()); + assert_eq!(abstraction(v.clone()), after_list); + } + + #[test] + fn test_lazy_sorted_vec_clear(ref mut v in lazy_sorted_vec(".*", 0..100)) { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, abs_list.sort()); + //post + let after_list = clear(&abs_list); + Container::<String>::clear(v); + assert_eq!(abstraction(v.clone()), after_list); + } + + #[test] + fn test_lazy_sorted_vec_remove(ref mut v in lazy_sorted_vec(".*", 0..100), a in ".*") { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, abs_list.sort()); + //post + let (after_list, abs_elem) = remove(&abs_list, a.clone()); + let elem = Container::<String>::remove(v, a.clone()); + assert_eq!(abstraction(v.clone()), after_list); + assert_eq!(elem, abs_elem); + } + + #[test] + fn test_lazy_sorted_vec_first(ref mut v in lazy_sorted_vec(".*", 0..100)) { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, abs_list.sort()); + //post + let elem = Indexable::<String>::first(v); + let abs_first = first(&abs_list); + assert_eq!(elem, abs_first); + assert_eq!(abstraction(v.clone()), abs_list); + } + + #[test] + fn test_lazy_sorted_vec_last(ref mut v in lazy_sorted_vec(".*", 0..100)) { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, abs_list.sort()); + //post + let elem = Indexable::<String>::last(v); + let abs_last = last(&abs_list); + assert_eq!(elem, abs_last); + assert_eq!(abstraction(v.clone()), abs_list); + } + + #[test] + fn test_lazy_sorted_vec_nth(ref mut v in lazy_sorted_vec(".*", 0..100), n in 0usize..100) { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, abs_list.sort()); + //post + let elem = Indexable::<String>::nth(v, n); + let abs_nth = nth(&abs_list, n); + assert_eq!(elem, abs_nth); + assert_eq!(abstraction(v.clone()), abs_list); + } + } +} diff --git a/src/crates/library/src/lazy_unique_vector.rs b/src/crates/library/src/lazy_unique_vector.rs new file mode 100644 index 0000000..9b5e60c --- /dev/null +++ b/src/crates/library/src/lazy_unique_vector.rs @@ -0,0 +1,370 @@ +/*LIBSPEC-NAME* +rust-lazy-unique-vec-spec primrose_library::LazyUniqueVec +*ENDLIBSPEC-NAME*/ + +use crate::traits::{Container, Indexable}; + +use std::slice::Iter; +use std::vec::Vec; + +/// A Unique and Ascending Vector +#[derive(Debug, Clone)] +pub struct LazyUniqueVec<T> { + v: Vec<T>, + modified: bool, +} + +impl<T: Ord> LazyUniqueVec<T> { + pub fn new() -> LazyUniqueVec<T> { + LazyUniqueVec { + v: Vec::new(), + modified: false, + } + } + + pub fn from_vec(mut v: Vec<T>) -> LazyUniqueVec<T> { + v.sort(); + v.dedup(); + LazyUniqueVec { v, modified: false } + } + + pub fn len(&mut self) -> usize { + if self.modified { + self.v.sort(); + self.v.dedup(); + self.modified = false; + } + self.v.len() + } + + pub fn contains(&mut self, x: &T) -> bool { + if self.modified { + self.v.sort(); + self.v.dedup(); + self.modified = false; + } + self.v.binary_search(x).is_ok() + } + + pub fn is_empty(&mut self) -> bool { + if self.modified { + self.v.sort(); + self.v.dedup(); + self.modified = false; + } + self.len() == 0 + } + + // Duplicated elements will be discarded + pub fn push(&mut self, value: T) { + self.v.push(value); + self.modified = true; + } + + pub fn pop(&mut self) -> Option<T> { + if self.modified { + self.v.sort(); + self.v.dedup(); + self.modified = false; + } + self.v.pop() + } + + pub fn remove(&mut self, index: usize) -> T { + if self.modified { + self.v.sort(); + self.v.dedup(); + self.modified = false; + } + self.v.remove(index) + } + + pub fn clear(&mut self) { + self.v.clear() + } + + pub fn first(&mut self) -> Option<&T> { + if self.modified { + self.v.sort(); + self.v.dedup(); + self.modified = false; + } + self.v.first() + } + + pub fn last(&mut self) -> Option<&T> { + if self.modified { + self.v.sort(); + self.v.dedup(); + self.modified = false; + } + self.v.last() + } + + pub fn iter(&mut self) -> Iter<'_, T> { + if self.modified { + self.v.sort(); + self.v.dedup(); + self.modified = false; + } + self.v.iter() + } + + pub fn to_vec(self) -> Vec<T> { + self.v + } +} + +/*IMPL* +Container +*ENDIMPL*/ +impl<T: Ord> Container<T> for LazyUniqueVec<T> { + /*LIBSPEC* + /*OPNAME* + len op-len pre-len post-len + *ENDOPNAME*/ + (define (op-len xs) (cons xs (length xs))) + (define (pre-len xs) (equal? xs (remove-duplicates (sort xs <)))) + (define (post-len xs r) (equal? r (op-len xs))) + *ENDLIBSPEC*/ + fn len(&mut self) -> usize { + LazyUniqueVec::len(self) + } + + /*LIBSPEC* + /*OPNAME* + contains op-contains pre-contains post-contains + *ENDOPNAME*/ + (define (op-contains xs x) + (cond + [(list? (member x xs)) (cons xs #t)] + [else (cons xs #f)])) + (define (pre-contains xs) (equal? xs (remove-duplicates (sort xs <)))) + (define (post-contains xs x r) (equal? r (op-contains xs x))) + *ENDLIBSPEC*/ + fn contains(&mut self, x: &T) -> bool { + LazyUniqueVec::contains(self, x) // use fully qualified syntax to avoid function name collision + } + + /*LIBSPEC* + /*OPNAME* + is-empty op-is-empty pre-is-empty post-is-empty + *ENDOPNAME*/ + (define (op-is-empty xs) (cons xs (null? xs))) + (define (pre-is-empty xs) (equal? xs (remove-duplicates (sort xs <)))) + (define (post-is-empty xs r) (equal? r (op-is-empty xs))) + *ENDLIBSPEC*/ + fn is_empty(&mut self) -> bool { + LazyUniqueVec::is_empty(self) + } + + /*LIBSPEC* + /*OPNAME* + clear op-clear pre-clear post-clear + *ENDOPNAME*/ + (define (op-clear xs) null) + (define (pre-clear xs) (equal? xs (remove-duplicates (sort xs <)))) + (define (post-clear xs r) (equal? r (op-clear xs))) + *ENDLIBSPEC*/ + fn clear(&mut self) { + LazyUniqueVec::clear(self); + } + + /*LIBSPEC* + /*OPNAME* + insert op-insert pre-insert post-insert + *ENDOPNAME*/ + (define (op-insert xs x) (remove-duplicates (sort (append xs (list x)) <))) + (define (pre-insert xs) (equal? xs (remove-duplicates (sort xs <)))) + (define (post-insert xs x ys) (equal? ys (op-insert xs x))) + *ENDLIBSPEC*/ + fn insert(&mut self, elt: T) { + LazyUniqueVec::push(self, elt); + } + + /*LIBSPEC* + /*OPNAME* + remove op-remove pre-remove post-remove + *ENDOPNAME*/ + (define (op-remove xs x) + (cond + [(list? (member x xs)) (cons (remove x xs) x)] + [else (cons xs null)])) + (define (pre-remove xs) (equal? xs (remove-duplicates (sort xs <)))) + (define (post-remove xs r) (equal? r (op-remove xs))) + *ENDLIBSPEC*/ + fn remove(&mut self, elt: T) -> Option<T> { + self.iter() + .position(|x| *x == elt) + .map(|index| self.remove(index)) + } +} + +/*IMPL* +Indexable +*ENDIMPL*/ +impl<T: Ord> Indexable<T> for LazyUniqueVec<T> { + /*LIBSPEC* + /*OPNAME* + first op-first pre-first post-first + *ENDOPNAME*/ + (define (op-first xs) + (cond + [(null? xs) (cons xs null)] + [else (cons xs (first xs))])) + (define (pre-first xs) (equal? xs (remove-duplicates (sort xs <)))) + (define (post-first xs r) (equal? r (op-first xs))) + *ENDLIBSPEC*/ + fn first(&mut self) -> Option<&T> { + LazyUniqueVec::first(self) + } + + /*LIBSPEC* + /*OPNAME* + last op-last pre-last post-last + *ENDOPNAME*/ + (define (op-last xs) + (cond + [(null? xs) (cons xs null)] + [else (cons xs (last xs))])) + (define (pre-last xs) (equal? xs (remove-duplicates (sort xs <)))) + (define (post-last xs r) (equal? r (op-last xs))) + *ENDLIBSPEC*/ + fn last(&mut self) -> Option<&T> { + LazyUniqueVec::last(self) + } + + /*LIBSPEC* + /*OPNAME* + nth op-nth pre-nth post-nth + *ENDOPNAME*/ + (define (op-nth xs n) + (cond + [(>= n (length xs)) (cons xs null)] + [(< n 0) (cons xs null)] + [else (cons xs (list-ref xs n))])) + (define (pre-nth xs) (equal? xs (remove-duplicates (sort xs <)))) + (define (post-nth n xs r) (equal? r (op-nth xs n))) + *ENDLIBSPEC*/ + fn nth(&mut self, n: usize) -> Option<&T> { + LazyUniqueVec::iter(self).nth(n) + } +} + +impl<T: Ord> Default for LazyUniqueVec<T> { + fn default() -> Self { + Self::new() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::proptest::{strategies::lazy_unique_vec, *}; + use proptest::prelude::*; + + use im::conslist::ConsList; + + fn abstraction<T>(v: LazyUniqueVec<T>) -> ConsList<T> + where + T: Ord, + { + let list: ConsList<T> = ConsList::from(v.to_vec()); + unique(&list.sort()) + } + + proptest! { + #![proptest_config(ProptestConfig { + cases: 100, .. ProptestConfig::default() + })] + + #[test] + fn test_lazy_unique_vec_len(ref mut v in lazy_unique_vec(".*", 0..100)) { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list.sort())); + //post + assert_eq!(Container::<String>::len(v), abs_list.len()); + assert_eq!(abstraction(v.clone()), abs_list); + } + + #[test] + fn test_lazy_unique_vec_contains(ref mut v in lazy_unique_vec(".*", 0..100), a in ".*") { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list.sort())); + //post + assert_eq!(Container::<String>::contains(v, &a), contains(&abs_list, &a)); + assert_eq!(abstraction(v.clone()), abs_list); + } + + #[test] + fn test_lazy_unique_vec_insert(ref mut v in lazy_unique_vec(".*", 0..100), a in ".*") { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list.sort())); + //post + let after_list = unique(&abs_list.append(conslist![a.clone()]).sort()); + Container::<String>::insert(v, a.clone()); + assert_eq!(abstraction(v.clone()), after_list); + } + + #[test] + fn test_lazy_vec_is_empty(ref mut v in lazy_unique_vec(".*", 0..100)) { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list.sort())); + //post + assert_eq!(Container::<String>::is_empty(v), abs_list.is_empty()); + assert_eq!(abstraction(v.clone()), abs_list); + } + + #[test] + fn test_lazy_unique_vec_remove(ref mut v in lazy_unique_vec(".*", 0..100), a in ".*") { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list.sort())); + //post + let (after_list, abs_elem) = remove(&abs_list, a.clone()); + let elem = Container::<String>::remove(v, a.clone()); + assert_eq!(abstraction(v.clone()), after_list); + assert_eq!(elem, abs_elem); + } + + #[test] + fn test_lazy_unique_vec_first(ref mut v in lazy_unique_vec(".*", 0..100)) { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list.sort())); + //post + let elem = Indexable::<String>::first(v); + let abs_first = first(&abs_list); + assert_eq!(elem, abs_first); + assert_eq!(abstraction(v.clone()), abs_list); + } + + #[test] + fn test_lazy_unique_vec_last(ref mut v in lazy_unique_vec(".*", 0..100)) { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list.sort())); + //post + let elem = Indexable::<String>::last(v); + let abs_last = last(&abs_list); + assert_eq!(elem, abs_last); + assert_eq!(abstraction(v.clone()), abs_list); + } + + #[test] + fn test_lazy_unique_vec_nth(ref mut v in lazy_unique_vec(".*", 0..100), n in 0usize..100) { + let abs_list = abstraction(v.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list.sort())); + //post + let elem = Indexable::<String>::nth(v, n); + let abs_nth = nth(&abs_list, n); + assert_eq!(elem, abs_nth); + assert_eq!(abstraction(v.clone()), abs_list); + } + } +} diff --git a/src/crates/library/src/lib.rs b/src/crates/library/src/lib.rs new file mode 100644 index 0000000..19ff7af --- /dev/null +++ b/src/crates/library/src/lib.rs @@ -0,0 +1,23 @@ +//! Container implementations that primrose can select from, and traits. + +#![feature(linked_list_cursors)] + +pub mod traits; + +mod eager_sorted_vector; +mod eager_unique_vector; +mod lazy_sorted_vector; +mod lazy_unique_vector; + +mod hashset; +mod list; +mod treeset; +mod vector; + +pub use eager_sorted_vector::EagerSortedVec; +pub use eager_unique_vector::EagerUniqueVec; +pub use lazy_sorted_vector::LazySortedVec; +pub use lazy_unique_vector::LazyUniqueVec; + +#[cfg(test)] +pub mod proptest; diff --git a/src/crates/library/src/list.rs b/src/crates/library/src/list.rs new file mode 100644 index 0000000..40ac771 --- /dev/null +++ b/src/crates/library/src/list.rs @@ -0,0 +1,317 @@ +/*LIBSPEC-NAME* +rust-linked-list-spec std::collections::LinkedList +*ENDLIBSPEC-NAME*/ + +use crate::traits::{Container, Indexable, Stack}; + +use std::cmp::Ordering; +use std::collections::LinkedList; +use std::marker::PhantomData; + +/*IMPL* +Container +*ENDIMPL*/ +impl<T: Ord> Container<T> for LinkedList<T> { + /*LIBSPEC* + /*OPNAME* + len op-len pre-len post-len + *ENDOPNAME*/ + (define (op-len xs) (cons xs (length xs))) + (define (pre-len xs) #t) + (define (post-len xs r) (equal? r (op-len xs))) + *ENDLIBSPEC*/ + fn len(&mut self) -> usize { + LinkedList::len(self) + } + + /*LIBSPEC* + /*OPNAME* + contains op-contains pre-contains post-contains + *ENDOPNAME*/ + (define (op-contains xs x) + (cond + [(list? (member x xs)) (cons xs #t)] + [else (cons xs #f)])) + (define (pre-contains xs) #t) + (define (post-contains xs x r) (equal? r (op-contains xs x))) + *ENDLIBSPEC*/ + fn contains(&mut self, x: &T) -> bool { + LinkedList::contains(self, x) + } + + /*LIBSPEC* + /*OPNAME* + is-empty op-is-empty pre-is-empty post-is-empty + *ENDOPNAME*/ + (define (op-is-empty xs) (cons xs (null? xs))) + (define (pre-is-empty xs) #t) + (define (post-is-empty xs r) (equal? r (op-is-empty xs))) + *ENDLIBSPEC*/ + fn is_empty(&mut self) -> bool { + LinkedList::is_empty(self) + } + + /*LIBSPEC* + /*OPNAME* + clear op-clear pre-clear post-clear + *ENDOPNAME*/ + (define (op-clear xs) null) + (define (pre-clear xs) #t) + (define (post-clear xs r) (equal? r (op-clear xs))) + *ENDLIBSPEC*/ + fn clear(&mut self) { + LinkedList::clear(self); + } + + /*LIBSPEC* + /*OPNAME* + insert op-insert pre-insert post-insert + *ENDOPNAME*/ + (define (op-insert xs x) (append xs (list x))) + (define (pre-insert xs) #t) + (define (post-insert xs x ys) (equal? ys (op-insert xs x))) + *ENDLIBSPEC*/ + fn insert(&mut self, elt: T) { + LinkedList::push_back(self, elt); + } + + /*LIBSPEC* + /*OPNAME* + remove op-remove pre-remove post-remove + *ENDOPNAME*/ + (define (op-remove xs x) + (cond + [(list? (member x xs)) (cons (remove x xs) x)] + [else (cons xs null)])) + (define (pre-remove xs) #t) + (define (post-remove xs r) (equal? r (op-remove xs))) + *ENDLIBSPEC*/ + fn remove(&mut self, elt: T) -> Option<T> { + let mut c = self.cursor_front_mut(); + loop { + match c.current() { + Some(x) => match &elt.cmp(x) { + Ordering::Equal => return c.remove_current(), + _ => c.move_next(), + }, + None => { + // empty list + return None; + } + } + } + } +} + +/*IMPL* +Stack +*ENDIMPL*/ +impl<T> Stack<T> for LinkedList<T> { + /*LIBSPEC* + /*OPNAME* + push push pre-push post-push + *ENDOPNAME*/ + (define (push xs x) (append xs (list x))) + (define (pre-push xs) #t) + (define (post-push xs x ys) (equal? ys (push xs x))) + *ENDLIBSPEC*/ + fn push(&mut self, elt: T) { + LinkedList::push_back(self, elt); + } + + /*LIBSPEC* + /*OPNAME* + pop pop pre-pop post-pop + *ENDOPNAME*/ + (define (pop xs) + (cond + [(null? xs) (cons xs null)] + [else (cons (take xs (- (length xs) 1)) (last xs))])) + (define (pre-pop xs) #t) + (define (post-pop xs r) (equal? r (pop xs))) + *ENDLIBSPEC*/ + fn pop(&mut self) -> Option<T> { + LinkedList::pop_back(self) + } +} + +/*IMPL* +Indexable +*ENDIMPL*/ +impl<T> Indexable<T> for LinkedList<T> { + /*LIBSPEC* + /*OPNAME* + first op-first pre-first post-first + *ENDOPNAME*/ + (define (op-first xs) + (cond + [(null? xs) (cons xs null)] + [else (cons xs (first xs))])) + (define (pre-first xs) #t) + (define (post-first xs r) (equal? r (op-first xs))) + *ENDLIBSPEC*/ + fn first(&mut self) -> Option<&T> { + LinkedList::front(self) + } + + /*LIBSPEC* + /*OPNAME* + last op-last pre-last post-last + *ENDOPNAME*/ + (define (op-last xs) + (cond + [(null? xs) (cons xs null)] + [else (cons xs (last xs))])) + (define (pre-last xs) #t) + (define (post-last xs r) (equal? r (op-last xs))) + *ENDLIBSPEC*/ + fn last(&mut self) -> Option<&T> { + LinkedList::back(self) + } + + /*LIBSPEC* + /*OPNAME* + nth op-nth pre-nth post-nth + *ENDOPNAME*/ + (define (op-nth xs n) + (cond + [(>= n (length xs)) (cons xs null)] + [(< n 0) (cons xs null)] + [else (cons xs (list-ref xs n))])) + (define (pre-nth xs) #t) + (define (post-nth xs n r) (equal? r (op-nth xs n))) + *ENDLIBSPEC*/ + fn nth(&mut self, n: usize) -> Option<&T> { + LinkedList::iter(self).nth(n) + } +} + +struct Con<T> { + elem_t: PhantomData<T>, +} + +pub trait Constructor { + type Impl: ?Sized; + type Interface: ?Sized; + fn new() -> Box<Self::Interface>; +} + +impl<T: 'static + Ord> Constructor for Con<T> { + type Impl = LinkedList<T>; + type Interface = dyn Container<T>; + fn new() -> Box<Self::Interface> { + Box::<LinkedList<T>>::default() + } +} + +#[cfg(test)] +mod tests { + + use std::sync::Arc; + + use super::*; + use crate::proptest::*; + use proptest::{collection::linked_list, prelude::*}; + + use im::conslist::ConsList; + fn abstraction<T>(l: LinkedList<T>) -> ConsList<T> { + let list: ConsList<T> = ConsList::from_iter(l); + list + } + + proptest! { + #![proptest_config(ProptestConfig { + cases: 100, .. ProptestConfig::default() + })] + + #[test] + fn test_list_len(ref mut l in linked_list(".*", 0..100)) { + let abs_list = abstraction(l.clone()); + assert_eq!(Container::<String>::len(l), abs_list.len()); + assert_eq!(abstraction(l.clone()), abs_list); + } + + #[test] + fn test_list_contains(ref mut l in linked_list(".*", 0..100), a in ".*") { + let abs_list = abstraction(l.clone()); + assert_eq!(Container::<String>::contains(l, &a), contains(&abs_list, &a)); + assert_eq!(abstraction(l.clone()), abs_list); + } + + #[test] + fn test_list_is_empty(ref mut l in linked_list(".*", 0..100)) { + let abs_list = abstraction(l.clone()); + assert_eq!(Container::<String>::is_empty(l), abs_list.is_empty()); + assert_eq!(abstraction(l.clone()), abs_list); + } + + #[test] + fn test_list_insert(ref mut l in linked_list(".*", 0..100), a in ".*") { + let abs_list = abstraction(l.clone()); + let after_list = abs_list.append(conslist![a.clone()]); + Container::<String>::insert(l, a.clone()); + assert_eq!(abstraction(l.clone()), after_list); + } + + #[test] + fn test_list_clear(ref mut l in linked_list(".*", 0..100)) { + let abs_list = abstraction(l.clone()); + let after_list = clear(&abs_list); + Container::<String>::clear(l); + assert_eq!(abstraction(l.clone()), after_list); + } + + #[test] + fn test_list_remove(ref mut l in linked_list(".*", 0..100), a in ".*") { + let abs_list = abstraction(l.clone()); + let (after_list, abs_elem) = remove(&abs_list, a.clone()); + let elem = Container::<String>::remove(l, a.clone()); + assert_eq!(abstraction(l.clone()), after_list); + assert_eq!(elem, abs_elem); + } + + #[test] + fn test_list_first(ref mut l in linked_list(".*", 0..100)) { + let abs_list = abstraction(l.clone()); + let elem = Indexable::<String>::first(l); + let abs_first = first(&abs_list); + assert_eq!(elem, abs_first); + assert_eq!(abstraction(l.clone()), abs_list); + } + + #[test] + fn test_list_last(ref mut l in linked_list(".*", 0..100)) { + let abs_list = abstraction(l.clone()); + let elem = Indexable::<String>::last(l); + let abs_last = last(&abs_list); + assert_eq!(elem, abs_last); + assert_eq!(abstraction(l.clone()), abs_list); + } + + #[test] + fn test_list_nth(ref mut l in linked_list(".*", 0..100), n in 0usize..100) { + let abs_list = abstraction(l.clone()); + let elem = Indexable::<String>::nth(l, n); + let abs_nth = nth(&abs_list, n); + assert_eq!(elem, abs_nth); + assert_eq!(abstraction(l.clone()), abs_list); + } + + #[test] + fn test_list_push(ref mut l in linked_list(".*", 0..100), a in ".*") { + let abs_list = abstraction(l.clone()); + let after_list = push(&abs_list, a.clone()); + Stack::<String>::push(l, a.clone()); + assert_eq!(abstraction(l.clone()), after_list); + } + + #[test] + fn test_list_pop(ref mut l in linked_list(".*", 0..100)) { + let abs_list = abstraction(l.clone()); + let (after_list, abs_elem) = pop(&abs_list); + let elem = Stack::<String>::pop(l); + assert_eq!(abstraction(l.clone()), after_list); + assert_eq!(elem.map(Arc::new), abs_elem); + } + } +} diff --git a/src/crates/library/src/proptest/mod.rs b/src/crates/library/src/proptest/mod.rs new file mode 100644 index 0000000..7807854 --- /dev/null +++ b/src/crates/library/src/proptest/mod.rs @@ -0,0 +1,68 @@ +pub mod strategies; + +pub use im::conslist; +use im::conslist::ConsList; +use std::sync::Arc; + +pub fn contains<T: PartialEq>(list: &ConsList<T>, elem: &T) -> bool { + list.iter().any(|x| x.as_ref() == elem) +} + +pub fn clear<T>(_list: &ConsList<T>) -> ConsList<T> { + ConsList::<T>::new() +} + +pub fn remove<T: PartialEq + Clone>(list: &ConsList<T>, a: T) -> (ConsList<T>, Option<T>) { + if contains(list, &a) { + let mut result = ConsList::<T>::new(); + let mut found = false; + for i in list.iter() { + if i.as_ref() == &a && !found { + found = true; + continue; + } else { + result = result.append(conslist![i.clone()]); + } + } + (result, Some(a)) + } else { + (list.clone(), None) + } +} + +pub fn first<T>(list: &ConsList<T>) -> Option<&T> { + list.head().map(|x| unsafe { &*Arc::into_raw(x) }) +} + +pub fn last<T>(list: &ConsList<T>) -> Option<&T> { + list.reverse().head().map(|x| unsafe { &*Arc::into_raw(x) }) +} + +pub fn nth<T>(list: &ConsList<T>, n: usize) -> Option<&T> { + list.iter().nth(n).map(|x| unsafe { &*Arc::into_raw(x) }) +} + +pub fn push<T>(list: &ConsList<T>, a: T) -> ConsList<T> { + list.append(conslist![a]) +} + +pub fn pop<T>(list: &ConsList<T>) -> (ConsList<T>, Option<Arc<T>>) { + if list.is_empty() { + (ConsList::<T>::new(), None) + } else { + let (elem, result) = list.reverse().uncons().unwrap(); + (result.reverse(), Some(elem)) + } +} + +pub fn unique<T: PartialEq>(list: &ConsList<T>) -> ConsList<T> { + let mut result = ConsList::<T>::new(); + for i in list.iter() { + if contains(&result, &i) { + continue; + } else { + result = result.append(conslist![i.clone()]); + } + } + result +} diff --git a/src/crates/library/src/proptest/strategies.rs b/src/crates/library/src/proptest/strategies.rs new file mode 100644 index 0000000..d087fcc --- /dev/null +++ b/src/crates/library/src/proptest/strategies.rs @@ -0,0 +1,50 @@ +use proptest::prelude::*; + +use std::ops::Range; + +use crate::eager_sorted_vector::EagerSortedVec; +use crate::eager_unique_vector::EagerUniqueVec; +use crate::lazy_sorted_vector::LazySortedVec; +use crate::lazy_unique_vector::LazyUniqueVec; + +use proptest::collection::vec; + +pub fn eager_unique_vec<T: Strategy + 'static>( + element: T, + size: Range<usize>, +) -> impl Strategy<Value = EagerUniqueVec<T::Value>> +where + <T as Strategy>::Value: PartialEq, +{ + vec(element, size.clone()).prop_map(EagerUniqueVec::from_vec) +} + +pub fn lazy_unique_vec<T: Strategy + 'static>( + element: T, + size: Range<usize>, +) -> impl Strategy<Value = LazyUniqueVec<T::Value>> +where + <T as Strategy>::Value: Ord, +{ + vec(element, size.clone()).prop_map(LazyUniqueVec::from_vec) +} + +pub fn eager_sorted_vec<T: Strategy + 'static>( + element: T, + size: Range<usize>, +) -> impl Strategy<Value = EagerSortedVec<T::Value>> +where + <T as Strategy>::Value: Ord, +{ + vec(element, size.clone()).prop_map(EagerSortedVec::from_vec) +} + +pub fn lazy_sorted_vec<T: Strategy + 'static>( + element: T, + size: Range<usize>, +) -> impl Strategy<Value = LazySortedVec<T::Value>> +where + <T as Strategy>::Value: Ord, +{ + vec(element, size.clone()).prop_map(LazySortedVec::from_vec) +} diff --git a/src/crates/library/src/traits.rs b/src/crates/library/src/traits.rs new file mode 100644 index 0000000..a33f3d9 --- /dev/null +++ b/src/crates/library/src/traits.rs @@ -0,0 +1,27 @@ +//! Common traits for primrose container types + +pub trait Container<T> { + fn len(&mut self) -> usize; + fn contains(&mut self, x: &T) -> bool; + fn is_empty(&mut self) -> bool; + fn insert(&mut self, elt: T); + fn clear(&mut self); + fn remove(&mut self, elt: T) -> Option<T>; // remove first occurance +} + +pub trait Stack<T> { + fn push(&mut self, elt: T); + fn pop(&mut self) -> Option<T>; +} + +pub trait Indexable<T> { + fn first(&mut self) -> Option<&T>; + fn last(&mut self) -> Option<&T>; + fn nth(&mut self, n: usize) -> Option<&T>; +} + +pub trait ContainerConstructor { + type Impl: ?Sized; + type Bound: ?Sized; + fn new() -> Box<Self::Bound>; +} diff --git a/src/crates/library/src/treeset.rs b/src/crates/library/src/treeset.rs new file mode 100644 index 0000000..4e2b213 --- /dev/null +++ b/src/crates/library/src/treeset.rs @@ -0,0 +1,263 @@ +/*LIBSPEC-NAME* +rust-btreeset-spec std::collections::BTreeSet +*ENDLIBSPEC-NAME*/ + +use crate::traits::{Container, Indexable}; +pub use std::collections::BTreeSet; + +/*IMPL* +Container +*ENDIMPL*/ +impl<T: Ord> Container<T> for BTreeSet<T> { + /*LIBSPEC* + /*OPNAME* + len op-len pre-len post-len + *ENDOPNAME*/ + (define (op-len xs) (cons xs (length xs))) + (define (pre-len xs) (equal? xs (remove-duplicates (sort xs <)))) + (define (post-len xs r) (equal? r (op-len xs))) + *ENDLIBSPEC*/ + fn len(&mut self) -> usize { + BTreeSet::len(self) + } + + /*LIBSPEC* + /*OPNAME* + contains op-contains pre-contains post-contains + *ENDOPNAME*/ + (define (op-contains xs x) + (cond + [(list? (member x xs)) (cons xs #t)] + [else (cons xs #f)])) + (define (pre-contains xs) (equal? xs (remove-duplicates (sort xs <)))) + (define (post-contains xs x r) (equal? r (op-contains xs x))) + *ENDLIBSPEC*/ + fn contains(&mut self, x: &T) -> bool { + BTreeSet::contains(self, x) + } + + /*LIBSPEC* + /*OPNAME* + is-empty op-is-empty pre-is-empty post-is-empty + *ENDOPNAME*/ + (define (op-is-empty xs) (cons xs (null? xs))) + (define (pre-is-empty xs) (equal? xs (remove-duplicates (sort xs <)))) + (define (post-is-empty xs r) (equal? r (op-is-empty xs))) + *ENDLIBSPEC*/ + fn is_empty(&mut self) -> bool { + BTreeSet::is_empty(self) + } + + /*LIBSPEC* + /*OPNAME* + clear op-clear pre-clear post-clear + *ENDOPNAME*/ + (define (op-clear xs) null) + (define (pre-clear xs) (equal? xs (remove-duplicates (sort xs <)))) + (define (post-clear xs r) (equal? r (op-clear xs))) + *ENDLIBSPEC*/ + fn clear(&mut self) { + BTreeSet::clear(self); + } + + /*LIBSPEC* + /*OPNAME* + insert op-insert pre-insert post-insert + *ENDOPNAME*/ + (define (op-insert xs x) (remove-duplicates (sort (append xs (list x)) <))) + (define (pre-insert xs) (equal? xs (remove-duplicates (sort xs <)))) + (define (post-insert xs x ys) (equal? ys (op-insert xs x))) + *ENDLIBSPEC*/ + fn insert(&mut self, elt: T) { + BTreeSet::insert(self, elt); + } + + /*LIBSPEC* + /*OPNAME* + remove op-remove pre-remove post-remove + *ENDOPNAME*/ + (define (op-remove xs x) + (cond + [(list? (member x xs)) (cons (remove x xs) x)] + [else (cons xs null)])) + (define (pre-remove xs) (equal? xs (remove-duplicates (sort xs <)))) + (define (post-remove xs r) (equal? r (op-remove xs))) + *ENDLIBSPEC*/ + fn remove(&mut self, elt: T) -> Option<T> { + match BTreeSet::remove(self, &elt) { + true => Some(elt), + false => None, + } + } +} + +/*IMPL* +Indexable +*ENDIMPL*/ +impl<T: Ord> Indexable<T> for BTreeSet<T> { + /*LIBSPEC* + /*OPNAME* + first op-first pre-first post-first + *ENDOPNAME*/ + (define (op-first xs) + (cond + [(null? xs) (cons xs null)] + [else (cons xs (first xs))])) + (define (pre-first xs) (equal? xs (remove-duplicates (sort xs <)))) + (define (post-first xs r) (equal? r (op-first xs))) + *ENDLIBSPEC*/ + fn first(&mut self) -> Option<&T> { + BTreeSet::first(self) + } + + /*LIBSPEC* + /*OPNAME* + last op-last pre-last post-last + *ENDOPNAME*/ + (define (op-last xs) + (cond + [(null? xs) (cons xs null)] + [else (cons xs (last xs))])) + (define (pre-last xs) (equal? xs (remove-duplicates (sort xs <)))) + (define (post-last xs r) (equal? r (op-last xs))) + *ENDLIBSPEC*/ + fn last(&mut self) -> Option<&T> { + BTreeSet::last(self) + } + + /*LIBSPEC* + /*OPNAME* + nth op-nth pre-nth post-nth + *ENDOPNAME*/ + (define (op-nth xs n) + (cond + [(>= n (length xs)) (cons xs null)] + [(< n 0) (cons xs null)] + [else (cons xs (list-ref xs n))])) + (define (pre-nth xs) (equal? xs (remove-duplicates (sort xs <)))) + (define (post-nth n xs r) (equal? r (op-nth xs n))) + *ENDLIBSPEC*/ + fn nth(&mut self, n: usize) -> Option<&T> { + BTreeSet::iter(self).nth(n) + } +} + +#[cfg(test)] +mod test { + use super::*; + + use crate::proptest::*; + use im::conslist::ConsList; + use proptest::{collection::btree_set, prelude::*}; + use std::iter::FromIterator; + + fn abstraction<T>(t: BTreeSet<T>) -> ConsList<T> { + let list: ConsList<T> = ConsList::from_iter(t); + list + } + + proptest! { + #![proptest_config(ProptestConfig { + cases: 100, .. ProptestConfig::default() + })] + #[test] + fn test_btree_len(ref mut t in btree_set(".*", 0..100)) { + let abs_list = abstraction(t.clone()); + // pre: our list model is a sorted and unique list + assert_eq!(abs_list, unique(&abs_list.sort())); + //post + assert_eq!(Container::<String>::len(t), abs_list.len()); + assert_eq!(abstraction(t.clone()), abs_list); + } + + #[test] + fn test_btree_contains(ref mut t in btree_set(".*", 0..100), a in ".*") { + let abs_list = abstraction(t.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list.sort())); + //post + assert_eq!(Container::<String>::contains(t, &a), contains(&abs_list, &a)); + assert_eq!(abstraction(t.clone()), abs_list); + } + + #[test] + fn test_btree_is_empty(ref mut t in btree_set(".*", 0..100)) { + let abs_list = abstraction(t.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list.sort())); + //post + assert_eq!(Container::<String>::is_empty(t), abs_list.is_empty()); + assert_eq!(abstraction(t.clone()), abs_list); + } + + #[test] + fn test_btree_insert(ref mut t in btree_set(".*", 0..100), a in ".*") { + let abs_list = abstraction(t.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list.sort())); + //post + let after_list = unique(&abs_list.append(conslist![a.clone()]).sort()); + Container::<String>::insert(t, a.clone()); + assert_eq!(abstraction(t.clone()), after_list); + } + + #[test] + fn test_btree_clear(ref mut t in btree_set(".*", 0..100)) { + let abs_list = abstraction(t.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list.sort())); + //post + let after_list = clear(&abs_list); + Container::<String>::clear(t); + assert_eq!(abstraction(t.clone()), after_list); + } + + #[test] + fn test_btree_remove(ref mut t in btree_set(".*", 0..100), a in ".*") { + let abs_list = abstraction(t.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list.sort())); + //post + let (after_list, abs_elem) = remove(&abs_list, a.clone()); + let elem = Container::<String>::remove(t, a.clone()); + assert_eq!(abstraction(t.clone()), after_list); + assert_eq!(elem, abs_elem); + } + + #[test] + fn test_btree_first(ref mut t in btree_set(".*", 0..100)) { + let abs_list = abstraction(t.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list.sort())); + //post + let elem = Indexable::<String>::first(t); + let abs_first = first(&abs_list); + assert_eq!(elem, abs_first); + assert_eq!(abstraction(t.clone()), abs_list); + } + + #[test] + fn test_btree_last(ref mut t in btree_set(".*", 0..100)) { + let abs_list = abstraction(t.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list.sort())); + //post + let elem = Indexable::<String>::last(t); + let abs_last = last(&abs_list); + assert_eq!(elem, abs_last); + assert_eq!(abstraction(t.clone()), abs_list); + } + + #[test] + fn test_btree_nth(ref mut t in btree_set(".*", 0..100), n in 0usize..100) { + let abs_list = abstraction(t.clone()); + //pre + assert_eq!(abs_list, unique(&abs_list.sort())); + //post + let elem = Indexable::<String>::nth(t, n); + let abs_nth = nth(&abs_list, n); + assert_eq!(elem, abs_nth); + assert_eq!(abstraction(t.clone()), abs_list); + } + } +} diff --git a/src/crates/library/src/vector.rs b/src/crates/library/src/vector.rs new file mode 100644 index 0000000..38293c8 --- /dev/null +++ b/src/crates/library/src/vector.rs @@ -0,0 +1,286 @@ +/*LIBSPEC-NAME* +rust-vec-spec std::vec::Vec +*ENDLIBSPEC-NAME*/ + +use crate::traits::{Container, Indexable, Stack}; +pub use std::vec::Vec; + +/*IMPL* +Container +*ENDIMPL*/ +impl<T: PartialEq> Container<T> for Vec<T> { + /*LIBSPEC* + /*OPNAME* + len op-len pre-len post-len + *ENDOPNAME*/ + (define (op-len xs) (cons xs (length xs))) + (define (pre-len xs) #t) + (define (post-len xs r) (equal? r (op-len xs))) + *ENDLIBSPEC*/ + fn len(&mut self) -> usize { + Vec::len(self) + } + + /*LIBSPEC* + /*OPNAME* + contains op-contains pre-contains post-contains + *ENDOPNAME*/ + (define (op-contains xs x) + (cond + [(list? (member x xs)) (cons xs #t)] + [else (cons xs #f)])) + (define (pre-contains xs) #t) + (define (post-contains xs x r) (equal? r (op-contains xs x))) + *ENDLIBSPEC*/ + fn contains(&mut self, x: &T) -> bool { + <[T]>::contains(self, x) // use fully qualified syntax to avoid function name collision + } + + /*LIBSPEC* + /*OPNAME* + is-empty op-is-empty pre-is-empty post-is-empty + *ENDOPNAME*/ + (define (op-is-empty xs) (cons xs (null? xs))) + (define (pre-is-empty xs) #t) + (define (post-is-empty xs r) (equal? r (op-is-empty xs))) + *ENDLIBSPEC*/ + fn is_empty(&mut self) -> bool { + Vec::is_empty(self) + } + + /*LIBSPEC* + /*OPNAME* + clear op-clear pre-clear post-clear + *ENDOPNAME*/ + (define (op-clear xs) null) + (define (pre-clear xs) #t) + (define (post-clear xs r) (equal? r (op-clear xs))) + *ENDLIBSPEC*/ + fn clear(&mut self) { + Vec::clear(self); + } + + /*LIBSPEC* + /*OPNAME* + insert op-insert pre-insert post-insert + *ENDOPNAME*/ + (define (op-insert xs x) (append xs (list x))) + (define (pre-insert xs) #t) + (define (post-insert xs x ys) (equal? ys (op-insert xs x))) + *ENDLIBSPEC*/ + fn insert(&mut self, elt: T) { + Vec::push(self, elt); + } + + /*LIBSPEC* + /*OPNAME* + remove op-remove pre-remove post-remove + *ENDOPNAME*/ + (define (op-remove xs x) + (cond + [(list? (member x xs)) (cons (remove x xs) x)] + [else (cons xs null)])) + (define (pre-remove xs) #t) + (define (post-remove xs r) (equal? r (op-remove xs))) + *ENDLIBSPEC*/ + fn remove(&mut self, elt: T) -> Option<T> { + self.iter() + .position(|x| *x == elt) + .map(|index| self.remove(index)) + } +} + +/*IMPL* +Stack +*ENDIMPL*/ +impl<T> Stack<T> for Vec<T> { + /*LIBSPEC* + /*OPNAME* + push push pre-push post-push + *ENDOPNAME*/ + (define (push xs x) (append xs (list x))) + (define (pre-push xs) #t) + (define (post-push xs x ys) (equal? ys (push xs x))) + *ENDLIBSPEC*/ + fn push(&mut self, elt: T) { + Vec::push(self, elt); + } + + /*LIBSPEC* + /*OPNAME* + pop pop pre-pop post-pop + *ENDOPNAME*/ + (define (pop xs) + (cond + [(null? xs) (cons xs null)] + [else (cons (take xs (- (length xs) 1)) (last xs))])) + (define (pre-pop xs) #t) + (define (post-pop xs r) (equal? r (pop xs))) + *ENDLIBSPEC*/ + fn pop(&mut self) -> Option<T> { + Vec::pop(self) + } +} + +/*IMPL* +Indexable +*ENDIMPL*/ +impl<T> Indexable<T> for Vec<T> { + /*LIBSPEC* + /*OPNAME* + first op-first pre-first post-first + *ENDOPNAME*/ + (define (op-first xs) + (cond + [(null? xs) (cons xs null)] + [else (cons xs (first xs))])) + (define (pre-first xs) #t) + (define (post-first xs r) (equal? r (op-first xs))) + *ENDLIBSPEC*/ + fn first(&mut self) -> Option<&T> { + <[T]>::first(self) + } + + /*LIBSPEC* + /*OPNAME* + last op-last pre-last post-last + *ENDOPNAME*/ + (define (op-last xs) + (cond + [(null? xs) (cons xs null)] + [else (cons xs (last xs))])) + (define (pre-last xs) #t) + (define (post-last xs r) (equal? r (op-last xs))) + *ENDLIBSPEC*/ + fn last(&mut self) -> Option<&T> { + <[T]>::last(self) + } + + /*LIBSPEC* + /*OPNAME* + nth op-nth pre-nth post-nth + *ENDOPNAME*/ + (define (op-nth xs n) + (cond + [(>= n (length xs)) (cons xs null)] + [(< n 0) (cons xs null)] + [else (cons xs (list-ref xs n))])) + (define (pre-nth xs) #t) + (define (post-nth xs n r) (equal? r (op-nth xs n))) + *ENDLIBSPEC*/ + fn nth(&mut self, n: usize) -> Option<&T> { + <[T]>::iter(self).nth(n) + } +} + +#[cfg(test)] +mod test { + use std::sync::Arc; + + use super::*; + use crate::proptest::*; + use im::conslist::ConsList; + use proptest::collection::vec; + use proptest::prelude::*; + + fn abstraction<T>(v: Vec<T>) -> ConsList<T> { + let list: ConsList<T> = ConsList::from(v); + list + } + + proptest! { + #![proptest_config(ProptestConfig { + cases: 100, .. ProptestConfig::default() + })] + + #[test] + fn test_vec_len(ref mut v in vec(".*", 0..100)) { + let abs_list = abstraction(v.clone()); + assert_eq!(Container::<String>::len(v), abs_list.len()); + assert_eq!(abstraction(v.clone()), abs_list); + } + + #[test] + fn test_vec_contains(ref mut v in vec(".*", 0..100), a in ".*") { + let abs_list = abstraction(v.clone()); + assert_eq!(Container::<String>::contains(v, &a), contains(&abs_list, &a)); + assert_eq!(abstraction(v.clone()), abs_list); + } + + #[test] + fn test_vec_is_empty(ref mut v in vec(".*", 0..100)) { + let abs_list = abstraction(v.clone()); + assert_eq!(Container::<String>::is_empty(v), abs_list.is_empty()); + assert_eq!(abstraction(v.clone()), abs_list); + } + + #[test] + fn test_vec_insert(ref mut v in vec(".*", 0..100), a in ".*") { + let abs_list = abstraction(v.clone()); + let after_list = abs_list.append(conslist![a.clone()]); + Container::<String>::insert(v, a.clone()); + assert_eq!(abstraction(v.clone()), after_list); + } + + #[test] + fn test_vec_clear(ref mut v in vec(".*", 0..100)) { + let abs_list = abstraction(v.clone()); + let after_list = clear(&abs_list); + Container::<String>::clear(v); + assert_eq!(abstraction(v.clone()), after_list); + } + + #[test] + fn test_vec_remove(ref mut v in vec(".*", 0..100), a in ".*") { + let abs_list = abstraction(v.clone()); + let (after_list, abs_elem) = remove(&abs_list, a.clone()); + let elem = Container::<String>::remove(v, a.clone()); + assert_eq!(abstraction(v.clone()), after_list); + assert_eq!(elem, abs_elem); + } + + #[test] + fn test_vec_first(ref mut v in vec(".*", 0..100)) { + let abs_list = abstraction(v.clone()); + let elem = Indexable::<String>::first(v); + let abs_first = first(&abs_list); + assert_eq!(elem, abs_first); + assert_eq!(abstraction(v.clone()), abs_list); + } + + #[test] + fn test_vec_last(ref mut v in vec(".*", 0..100)) { + let abs_list = abstraction(v.clone()); + let elem = Indexable::<String>::last(v); + let abs_last = last(&abs_list); + assert_eq!(elem, abs_last); + assert_eq!(abstraction(v.clone()), abs_list); + } + + #[test] + fn test_vec_nth(ref mut v in vec(".*", 0..100), n in 0usize..100) { + let abs_list = abstraction(v.clone()); + let elem = Indexable::<String>::nth(v, n); + let abs_nth = nth(&abs_list, n); + assert_eq!(elem, abs_nth); + assert_eq!(abstraction(v.clone()), abs_list); + } + + #[test] + fn test_vec_push(ref mut v in vec(".*", 0..100), a in ".*") { + let abs_list = abstraction(v.clone()); + let after_list = push(&abs_list, a.clone()); + Stack::<String>::push(v, a.clone()); + assert_eq!(abstraction(v.clone()), after_list); + } + + #[test] + fn test_vec_pop(ref mut v in vec(".*", 0..100)) { + let abs_list = abstraction(v.clone()); + let (after_list, abs_elem) = pop(&abs_list); + let elem = Stack::<String>::pop(v); + assert_eq!(abstraction(v.clone()), after_list); + assert_eq!(elem.map(Arc::new), abs_elem); + } + } +} diff --git a/src/crates/primrose/Cargo.toml b/src/crates/primrose/Cargo.toml new file mode 100644 index 0000000..80dfdab --- /dev/null +++ b/src/crates/primrose/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "primrose" +version = "0.1.0" +authors = ["Xueying Qin <qinxy1995@gmail.com>"] +edition = "2021" + +[dependencies] +log = { workspace = true } +env_logger = { workspace = true } + +rand = { workspace = true } +thiserror = { workspace = true } +peg = "0.8.0" + +primrose-library = { path = "../library" } + +[dev-dependencies] +criterion = "0.3.5" + +[[bench]] +name = "criterion_benchmark" +harness = false diff --git a/src/crates/primrose/benches/criterion_benchmark.rs b/src/crates/primrose/benches/criterion_benchmark.rs new file mode 100644 index 0000000..6bdc214 --- /dev/null +++ b/src/crates/primrose/benches/criterion_benchmark.rs @@ -0,0 +1,235 @@ +use criterion::{criterion_group, criterion_main, Criterion}; + +use primrose_library::traits::Container; +use primrose_library::LazyUniqueVec; +use rand::rngs::StdRng; +use rand::seq::SliceRandom; + +use rand::SeedableRng; +use std::collections::{BTreeSet, HashSet}; +use std::mem::size_of; + +use std::vec::Vec; + +// one search at the +fn gen_dataset_1() -> Vec<u32> { + // avoid duplication + let size = 1024 * 1024; // 1 MB + let amount = size / size_of::<u32>(); + let mut data: Vec<u32> = (1..amount as u32).collect(); //ensure no duplication + let mut rng = StdRng::seed_from_u64(222); + data.shuffle(&mut rng); + data +} + +fn gen_dataset_128() -> Vec<u32> { + // avoid duplication + let size = 128 * 1024 * 1024; // 128 MB + let amount = size / size_of::<u32>(); + let mut data: Vec<u32> = (1..amount as u32).collect(); //ensure no duplication + let mut rng = StdRng::seed_from_u64(222); + data.shuffle(&mut rng); + data +} + +fn gen_dataset_256() -> Vec<u32> { + // avoid duplication + let size = 256 * 1024 * 1024; // 256 MB + let amount = size / size_of::<u32>(); + let mut data: Vec<u32> = (1..amount as u32).collect(); //ensure no duplication + let mut rng = StdRng::seed_from_u64(222); + data.shuffle(&mut rng); + data +} + +fn gen_dataset_512() -> Vec<u32> { + // avoid duplication + let size = 512 * 1024 * 1024; // 512 MB + let amount = size / size_of::<u32>(); + let mut data: Vec<u32> = (1..amount as u32).collect(); //ensure no duplication + let mut rng = StdRng::seed_from_u64(222); + data.shuffle(&mut rng); + data +} + +fn btreeset_insertion_1m(c: &mut Criterion) { + let s: &mut dyn Container<u32> = &mut BTreeSet::new(); + let data = gen_dataset_1(); + c.bench_function("btreeset insertion 1MB", |b| { + b.iter(|| { + for val in data.iter() { + s.insert(*val); + } + s.contains(&1024); + }) + }); +} + +fn btreeset_insertion_128m(c: &mut Criterion) { + let s: &mut dyn Container<u32> = &mut BTreeSet::new(); + let data = gen_dataset_128(); + c.bench_function("btreeset insertion 128MB", |b| { + b.iter(|| { + for val in data.iter() { + s.insert(*val); + } + s.contains(&1024); + }) + }); +} + +fn btreeset_insertion_256m(c: &mut Criterion) { + let s: &mut dyn Container<u32> = &mut BTreeSet::new(); + let data = gen_dataset_256(); + c.bench_function("btreeset insertion 256MB", |b| { + b.iter(|| { + for val in data.iter() { + s.insert(*val); + } + s.contains(&1024); + }) + }); +} + +fn btreeset_insertion_512m(c: &mut Criterion) { + let s: &mut dyn Container<u32> = &mut BTreeSet::new(); + let data = gen_dataset_512(); + c.bench_function("btreeset insertion 512MB", |b| { + b.iter(|| { + for val in data.iter() { + s.insert(*val); + } + s.contains(&1024); + }) + }); +} + +fn hashset_insertion_1m(c: &mut Criterion) { + let s: &mut dyn Container<u32> = &mut HashSet::new(); + let data = gen_dataset_1(); + c.bench_function("hashset insertion 1MB", |b| { + b.iter(|| { + for val in data.iter() { + s.insert(*val); + } + s.contains(&1024); + }) + }); +} + +fn hashset_insertion_128m(c: &mut Criterion) { + let s: &mut dyn Container<u32> = &mut HashSet::new(); + let data = gen_dataset_128(); + c.bench_function("hashset insertion 128MB", |b| { + b.iter(|| { + for val in data.iter() { + s.insert(*val); + } + s.contains(&1024); + }) + }); +} + +fn hashset_insertion_256m(c: &mut Criterion) { + let s: &mut dyn Container<u32> = &mut HashSet::new(); + let data = gen_dataset_256(); + c.bench_function("hashset insertion 256MB", |b| { + b.iter(|| { + for val in data.iter() { + s.insert(*val); + } + s.contains(&1024); + }) + }); +} + +fn hashset_insertion_512m(c: &mut Criterion) { + let s: &mut dyn Container<u32> = &mut HashSet::new(); + let data = gen_dataset_512(); + c.bench_function("hashset insertion 512MB", |b| { + b.iter(|| { + for val in data.iter() { + s.insert(*val); + } + s.contains(&1024); + }) + }); +} + +fn lazy_uniuqe_vec_insertion_1m(c: &mut Criterion) { + let s: &mut dyn Container<u32> = &mut LazyUniqueVec::new(); + let data = gen_dataset_1(); + c.bench_function("lazy unique vector insertion 1MB", |b| { + b.iter(|| { + for val in data.iter() { + s.insert(*val); + } + s.contains(&1024); + }) + }); +} + +fn lazy_uniuqe_vec_insertion_128m(c: &mut Criterion) { + let s: &mut dyn Container<u32> = &mut LazyUniqueVec::new(); + let data = gen_dataset_128(); + c.bench_function("lazy unique vector insertion 128MB", |b| { + b.iter(|| { + for val in data.iter() { + s.insert(*val); + } + s.contains(&1024); + }) + }); +} + +fn lazy_uniuqe_vec_insertion_256m(c: &mut Criterion) { + let s: &mut dyn Container<u32> = &mut LazyUniqueVec::new(); + let data = gen_dataset_256(); + c.bench_function("lazy unique vector insertion 256MB", |b| { + b.iter(|| { + for val in data.iter() { + s.insert(*val); + } + s.contains(&1024); + }) + }); +} + +fn lazy_uniuqe_vec_insertion_512m(c: &mut Criterion) { + let s: &mut dyn Container<u32> = &mut LazyUniqueVec::new(); + let data = gen_dataset_512(); + c.bench_function("lazy unique vector insertion 512MB", |b| { + b.iter(|| { + for val in data.iter() { + s.insert(*val); + } + s.contains(&1024); + }) + }); +} + +criterion_group! { + name = insertion_1m; + config = Criterion::default().sample_size(10); + targets = btreeset_insertion_1m, hashset_insertion_1m, lazy_uniuqe_vec_insertion_1m +} + +criterion_group! { + name = insertion_128m; + config = Criterion::default().sample_size(10); + targets = btreeset_insertion_128m, hashset_insertion_128m, lazy_uniuqe_vec_insertion_128m +} + +criterion_group! { + name = insertion_256m; + config = Criterion::default().sample_size(10); + targets = btreeset_insertion_256m, hashset_insertion_256m, lazy_uniuqe_vec_insertion_256m +} + +criterion_group! { + name = insertion_512m; + config = Criterion::default().sample_size(10); + targets = btreeset_insertion_512m, hashset_insertion_512m, lazy_uniuqe_vec_insertion_512m +} + +criterion_main!(insertion_1m, insertion_128m, insertion_256m, insertion_512m); diff --git a/src/crates/primrose/src/analysis.rs b/src/crates/primrose/src/analysis.rs new file mode 100644 index 0000000..ec2f803 --- /dev/null +++ b/src/crates/primrose/src/analysis.rs @@ -0,0 +1,372 @@ +use crate::description::{InforMap, Tag}; +use crate::parser::{Decl, Prog, Refinement, Spec, Term}; +use crate::spec_map::PropSpecs; + +use std::fs; +use std::io::{Error, Write}; + +pub type AnalyserError = String; + +const LANGDECL: &str = "#lang rosette\n"; +const REQUIRE: &str = "(require \"../combinators.rkt\")\n"; +const EXTRAREQUIRE: &str = "(require \"../gen_lib_spec/ops.rkt\")\n"; +const GENPATH: &str = "./racket_specs/gen_prop_spec/"; + +fn gen_list_model(size: usize) -> String { + format!( + r#" +(define (generate-list n) + (define-symbolic* y integer? #:length n) + y) +(define-symbolic len (bitvector 32)) +(define ls (take-bv (generate-list {size}) len)) +"# + ) +} + +fn gen_symbolic(n: &str) -> String { + format!( + r#" +(define-symbolic {n} integer?) +"# + ) +} + +fn gen_symbolics(symbolics: &[String]) -> String { + let provide = symbolics.join(" "); + let mut code = String::new(); + for s in symbolics.iter() { + code = code + &gen_symbolic(s); + } + let provide = format!( + r#" +(provide {provide} ls) +"# + ); + code = code + &provide; + code +} + +#[derive(Debug)] +pub struct Analyser { + ctx: InforMap, + prop_specs: PropSpecs, +} + +impl Analyser { + pub fn new() -> Analyser { + Analyser { + ctx: InforMap::new(), + prop_specs: PropSpecs::new(), + } + } + + pub fn ctx(&self) -> &InforMap { + &self.ctx + } + + pub fn prop_specs(&self) -> &PropSpecs { + &self.prop_specs + } + + pub fn analyse_prog(&mut self, prog: Prog, model_size: usize) -> Result<(), AnalyserError> { + let specs: Vec<Spec> = prog + .iter() + .filter(|block| block.is_spec_block()) + .map(|block| block.extract_spec()) + .collect(); + self.analyse_specs(specs, model_size) + } + + fn analyse_specs(&mut self, specs: Vec<Spec>, model_size: usize) -> Result<(), AnalyserError> { + let concat_specs = specs.concat(); + let prop_decls: Vec<&Decl> = concat_specs + .iter() + .filter(|decl| decl.is_prop_decl()) + .collect(); + let contype_decls: Vec<&Decl> = concat_specs + .iter() + .filter(|decl| decl.is_contype_decl()) + .collect(); + match self.analyse_prop_decls(prop_decls, model_size) { + Ok(_) => match self.analyse_contype_decls(contype_decls.clone()) { + Ok(_) => self.analyse_bound_decls(contype_decls), + Err(e) => Err(e), + }, + Err(e) => Err(e), + } + } + + fn analyse_prop_decls( + &mut self, + decls: Vec<&Decl>, + model_size: usize, + ) -> Result<(), AnalyserError> { + let mut result = Ok(()); + for decl in decls.into_iter() { + match self.analyse_prop_decl(decl, model_size) { + Ok(_) => continue, + Err(e) => result = Err(e), + } + } + result + } + + fn analyse_prop_decl(&mut self, decl: &Decl, model_size: usize) -> Result<(), AnalyserError> { + match decl { + Decl::PropertyDecl((id, _), term) => { + let mut mterm = term.clone(); + let mut cdr_added = Vec::<String>::new(); + let mut symbolics = Vec::<String>::new(); + let code = "(define ".to_string() + + id + + " " + + &Self::analyse_term(&mut mterm, true, false, &mut cdr_added, &mut symbolics) + + ")\n" + + "(provide " + + id + + ")"; + let filename = id.to_string() + ".rkt"; + let mut symbolics_provided = gen_symbolics(&["n".to_string()]); + if !symbolics.is_empty() { + symbolics_provided = gen_symbolics(&symbolics); + } + self.write_prop_spec_file(filename.clone(), code, symbolics_provided, model_size) + .map_err(|e| format!("{}", e))?; + let prop_tag = Tag::Prop(id.to_string()); + self.ctx.entry(id.to_string()).or_insert(prop_tag); + if symbolics.is_empty() { + self.prop_specs + .insert(id.to_string(), (filename, vec!["n".to_string()])); + } else { + self.prop_specs + .insert(id.to_string(), (filename, symbolics)); + } + Ok(()) + } + _ => Err("Not a valid property declaration".to_string()), + } + } + + fn analyse_bound_decls(&mut self, decls: Vec<&Decl>) -> Result<(), AnalyserError> { + let mut result = Ok(()); + for decl in decls.into_iter() { + match self.analyse_bound_decl(decl) { + Ok(_) => continue, + Err(e) => result = Err(e), + } + } + result + } + + fn analyse_bound_decl(&mut self, decl: &Decl) -> Result<(), AnalyserError> { + match decl { + Decl::ConTypeDecl(con_ty, (_, ins, _)) => { + let (c, t) = con_ty.get_con_elem().unwrap(); + let mut name = c.clone() + "Trait"; + let bound_tag = Tag::Bound( + (c.clone(), t), + ins.clone().into_iter().collect::<Vec<String>>(), + ); + let immut_ctx = self.ctx.clone(); + // prevent generating existing name + let mut i: usize = 0; + while immut_ctx.contains_key(&name) { + name = name + &i.to_string(); + i += 1; + } + let con_tag = immut_ctx.get(&c).unwrap(); + match con_tag { + Tag::Con(elem_ty, _, tags) => { + self.ctx.insert( + c.clone(), + Tag::Con(elem_ty.to_string(), name.clone(), tags.to_vec()), + ); + } + _ => { + return Err("Not a valid container declaration.".to_string()); + } + } + self.ctx.entry(name).or_insert(bound_tag); + Ok(()) + } + _ => Err("Not a valid bound declaration".to_string()), + } + } + + fn analyse_contype_decls(&mut self, decls: Vec<&Decl>) -> Result<(), AnalyserError> { + let mut result = Ok(()); + for decl in decls.into_iter() { + match self.analyse_contype_decl(decl) { + Ok(_) => continue, + Err(e) => result = Err(e), + } + } + result + } + + fn analyse_contype_decl(&mut self, decl: &Decl) -> Result<(), AnalyserError> { + let mut tags = Vec::<Tag>::new(); + match decl { + Decl::ConTypeDecl(con_ty, (_, ins, r)) => { + let (c, t) = con_ty.get_con_elem().unwrap(); + let i_tag = Tag::Bound( + (c.clone(), t.clone()), + ins.clone().into_iter().collect::<Vec<String>>(), + ); + tags.push(i_tag); + match self.analyse_ref(r) { + Ok(prop_tags) => { + let mut prop_tags_mut = prop_tags.clone(); + tags.append(&mut prop_tags_mut); + let con_tag = Tag::Con(t, String::new(), tags); + self.ctx.entry(c).or_insert(con_tag); + Ok(()) + } + Err(e) => Err(e), + } + } + _ => Err("Not a valid container type declaration".to_string()), + } + } + + fn analyse_ref(&self, r: &Refinement) -> Result<Vec<Tag>, AnalyserError> { + match r { + Refinement::Prop(term) => match term { + Term::App(term1, _term2) => match self.retrive_ref_term(term1) { + Ok(t) => { + let tags = vec![t.clone()]; + Ok(tags) + } + Err(e) => Err(e), + }, + _ => Err("Not a valid term for refining the type Con<T>".to_string()), + }, + Refinement::AndProps(r1, r2) => match self.analyse_ref(r1) { + Ok(tags1) => match self.analyse_ref(r2) { + Ok(tags2) => Ok([tags1, tags2].concat()), + Err(e) => Err(e), + }, + Err(e) => Err(e), + }, + } + } + + fn retrive_ref_term(&self, term: &Term) -> Result<&Tag, AnalyserError> { + match term { + Term::Var(id) => match self.ctx.get(&id.to_string()) { + Some(t) => match t { + Tag::Prop(_) => Ok(t), + _ => Err(id.to_string() + " does not have a valid property"), + }, + _ => Err("Undefined variable: ".to_string() + id), + }, + _ => Err("Should be a varible term".to_string()), + } + } + + fn analyse_term( + term: &mut Term, + is_outter_app: bool, + is_quantifier: bool, + cdr_added: &mut Vec<String>, + symbolics: &mut Vec<String>, + ) -> String { + match term { + Term::Lit(lit) => { + if lit == "true" { + "#t".to_string() + } else { + "#f".to_string() + } + } + Term::Var(id) => id.to_string(), + Term::Lambda((id, _), t) => { + if is_quantifier { + symbolics.push(id.to_string()); + "(list ".to_string() + + id + + ") " + + &Self::analyse_term(t, true, false, cdr_added, symbolics) + } else { + "(lambda (".to_string() + + id + + ") " + + &Self::analyse_term(t, true, false, cdr_added, symbolics) + + ")" + } + } + Term::App(t1, t2) => { + // Temporary solution of cdr required to adjust model ops + if (*t1.clone()).require_cdr() && !cdr_added.contains(&t1.to_string()) { + cdr_added.push(t1.to_string()); + *term = Term::App( + Box::new(Term::Var("cdr".to_string())), + Box::new(term.clone()), + ); + Self::analyse_term(term, is_outter_app, is_quantifier, cdr_added, symbolics) + } else { + match ((*t1.clone()).is_quantifier(), *t2.clone()) { + (_, Term::App(_, _)) => { + if is_outter_app { + "(".to_string() + + &Self::analyse_term(t1, false, false, cdr_added, symbolics) + + " " + + &Self::analyse_term(t2, true, false, cdr_added, symbolics) + + ")" + } else { + Self::analyse_term(t1, false, false, cdr_added, symbolics) + + " " + + &Self::analyse_term(t2, true, false, cdr_added, symbolics) + } + } + (false, _) => { + if is_outter_app { + "(".to_string() + + &Self::analyse_term(t1, false, false, cdr_added, symbolics) + + " " + + &Self::analyse_term(t2, false, false, cdr_added, symbolics) + + ")" + } else { + Self::analyse_term(t1, false, false, cdr_added, symbolics) + + " " + + &Self::analyse_term(t2, false, false, cdr_added, symbolics) + } + } + (true, _) => { + if is_outter_app { + "(".to_string() + + &Self::analyse_term(t1, false, false, cdr_added, symbolics) + + " " + + &Self::analyse_term(t2, false, true, cdr_added, symbolics) + + ")" + } else { + Self::analyse_term(t1, false, false, cdr_added, symbolics) + + " " + + &Self::analyse_term(t2, false, true, cdr_added, symbolics) + } + } + } + } + } + } + } + + fn write_prop_spec_file( + &self, + filename: String, + contents: String, + symbolics: String, + model_size: usize, + ) -> Result<(), Error> { + let mut output = fs::File::create(GENPATH.to_owned() + &filename)?; + write!(output, "{}", LANGDECL)?; + write!(output, "{}", REQUIRE)?; + write!(output, "{}", EXTRAREQUIRE)?; + let list_model = gen_list_model(model_size); + write!(output, "{}", list_model)?; + write!(output, "{}", contents)?; + write!(output, "{}", symbolics)?; + Ok(()) + } +} diff --git a/src/crates/primrose/src/bounded_ops.rs b/src/crates/primrose/src/bounded_ops.rs new file mode 100644 index 0000000..0ca598f --- /dev/null +++ b/src/crates/primrose/src/bounded_ops.rs @@ -0,0 +1,43 @@ +use crate::types::{Bounds, Type, TypeVar}; + +use std::collections::HashMap; + +type BoundName = String; +type OpName = String; +type OpInfo = (OpName, Type); +pub type BoundedOps = HashMap<BoundName, Vec<OpInfo>>; + +pub fn generate_bounded_ops() -> BoundedOps { + let mut ops = BoundedOps::new(); + let push = ( + "push".to_string(), + Type::Fun( + Box::new(Type::Con( + "Con".to_string(), + Box::new(Type::Var(TypeVar::new("T".to_string()))), + Bounds::from(["Stack".to_string()]), + )), + Box::new(Type::Fun( + Box::new(Type::Var(TypeVar::new("T".to_string()))), + Box::new(Type::Con( + "Con".to_string(), + Box::new(Type::Var(TypeVar::new("T".to_string()))), + Bounds::from(["Stack".to_string()]), + )), + )), + ), + ); + let pop = ( + "pop".to_string(), + Type::Fun( + Box::new(Type::Con( + "Con".to_string(), + Box::new(Type::Var(TypeVar::new("T".to_string()))), + Bounds::from(["Stack".to_string()]), + )), + Box::new(Type::Var(TypeVar::new("T".to_string()))), + ), + ); + ops.insert("Stack".to_string(), vec![push, pop]); + ops +} diff --git a/src/crates/primrose/src/codegen.rs b/src/crates/primrose/src/codegen.rs new file mode 100644 index 0000000..d56c2b3 --- /dev/null +++ b/src/crates/primrose/src/codegen.rs @@ -0,0 +1,108 @@ +//! Generating rust code from results of + +use crate::{ + description::{InforMap, Tag}, + parser::Block, + selector::ContainerSelector, +}; + +const CODEGEN: &str = "/*CODEGEN*/\n"; +const CODEGENEND: &str = "/*ENDCODEGEN*/\n"; +const TRAITCRATE: &str = "primrose_library::traits::"; +const IMPORT: &str = "use primrose_library::traits::ContainerConstructor;\n"; + +impl ContainerSelector { + /// Generate replacement code for the whole file, with the given `(tag_id, selection)` pairs. + /// This will generate invalid code if any selection is invalid, or panic if any `tag_id` is invalid. + /// Returns the original file with the generated code in place of the original specification. + pub fn gen_replacement_file<'a, T: Iterator<Item = (&'a String, &'a str)>>( + &self, + selections: T, + ) -> String { + let mut result = String::new(); + for (tag_id, selection) in selections { + result += &self.gen_replacement_code(tag_id, selection); + } + + // rest of the rust code, minus the spec stuff + for block in self.blocks.iter().filter(|block| block.is_code_block()) { + match block { + Block::CodeBlock(code, _) => result += code, + _ => unreachable!(), + }; + } + + result + } + + /// Generate replacement code for the given tag, choosing the given library spec. + /// This will generate invalid code if selection is invalid, or panic if `tag_id` is invalid. + /// Returns only the required code, and not the rest of the code originally in the file. + pub fn gen_replacement_code(&self, tag_id: &String, selection: &str) -> String { + let tag = self.analyser.ctx().get(tag_id).expect("invalid tag_id"); + + let Tag::Con(elem_ty, i_name, _tags) = tag else { + panic!("tag_id was not Tag::Con"); + }; + + // generated code at top + let mut result = String::new(); + result += CODEGEN; + result += IMPORT; + result += &self.bounds_decl; + result += &gen_output_code(tag_id, elem_ty, selection, i_name); + result += CODEGENEND; + + result + } +} + +// TODO: Constructing a box like this is inefficient, and might affect performance of some programs +fn gen_output_code(s: &str, elem_type: &str, chosen: &str, trait_name: &str) -> String { + format!( + r#"struct {s}<{elem_type}> {{ + elem_t: core::marker::PhantomData<{elem_type}>, +}} + +impl<{elem_type}: 'static + Ord + std::hash::Hash> ContainerConstructor for {s}<{elem_type}> {{ + type Impl = {chosen}<{elem_type}>; + type Bound = dyn {trait_name}<{elem_type}>; + fn new() -> Box<Self::Bound> {{ + Box::new(Self::Impl::new()) + }} +}} +"# + ) +} + +pub fn process_bound_decl(ctx: &InforMap) -> String { + let mut code = String::new(); + for (id, tag) in ctx.iter() { + match tag { + Tag::Bound((c, t), decs) => { + let traits = decs + .iter() + .map(|name| process_bound_elem_ty(name, t)) + .collect::<Vec<String>>() + .join(" + "); + code = code + &gen_trait_code(id, c, t, &traits); + } + _ => continue, + } + } + + code +} + +fn gen_trait_code(trait_name: &str, s: &str, elem_type: &str, traits: &str) -> String { + format!( + r#" +trait {trait_name}<{elem_type}> : {traits} {{}} +impl<{elem_type}: 'static + Ord + std::hash::Hash> {trait_name}<{elem_type}> for <{s}<{elem_type}> as ContainerConstructor>::Impl {{}} +"# + ) +} + +fn process_bound_elem_ty(t: &str, elem_ty: &str) -> String { + TRAITCRATE.to_string() + t + "<" + elem_ty + ">" +} diff --git a/src/crates/primrose/src/description.rs b/src/crates/primrose/src/description.rs new file mode 100644 index 0000000..29eeb44 --- /dev/null +++ b/src/crates/primrose/src/description.rs @@ -0,0 +1,54 @@ +use std::collections::HashMap; + +pub type InforMap = HashMap<TagId, Tag>; +pub type TagId = String; + +pub type Description = String; +type ElemTypeName = String; +type ConName = String; +type BoundName = String; + +#[derive(Eq, PartialEq, Clone, Debug)] +pub enum Tag { + /// Links to a property by name + Prop(Description), + /// TODO + Bound((ConName, ElemTypeName), Vec<Description>), + /// Places bounds on a container type defined in the type context + Con( + /// The name of the type variable used for the element type + ElemTypeName, + /// The name of the bound + BoundName, + /// Bounds placed on the type + Vec<Tag>, + ), +} + +impl Tag { + pub fn is_prop_tag(&self) -> bool { + matches!(self, Tag::Prop(..)) + } + + pub fn is_bound_tag(&self) -> bool { + matches!(self, Tag::Bound(..)) + } + + pub fn is_con_tag(&self) -> bool { + matches!(self, Tag::Con(..)) + } + + pub fn extract_prop_desc(&self) -> Description { + match self { + Tag::Prop(desc) => desc.to_string(), + _ => String::new(), + } + } + + pub fn extract_bound_descs(&self) -> Vec<Description> { + match self { + Tag::Bound(_, descs) => descs.to_vec(), + _ => Vec::new(), + } + } +} diff --git a/src/crates/primrose/src/error.rs b/src/crates/primrose/src/error.rs new file mode 100644 index 0000000..990f80e --- /dev/null +++ b/src/crates/primrose/src/error.rs @@ -0,0 +1,36 @@ +use std::io; + +use thiserror::Error; + +use crate::type_check::TypeError; + +/// The main error type for primrose +#[derive(Error, Debug)] +pub enum Error { + #[error("error reading input: {0}")] + InputRead(io::Error), + + #[error("error parsing input: {0}")] + ParseError(#[from] peg::error::ParseError<peg::str::LineCol>), + + #[error("type error: {0}")] + TypeError(#[from] TypeError), + + #[error("library specification error: {0}")] + LibraryError(String), + + #[error("analyser error: {0}")] + AnalyserError(String), + + #[error("error generating match script: {0}")] + GenMatchScript(io::Error), + + #[error("error executing solver: {0}")] + ExecutionError(String), + + #[error("Unable to find a struct which matches the specification in the library")] + NoMatchingStructInLibrary, + + #[error("Error, cannot obtain provided operations from the library specification")] + ProvidedOperationsNotInLibrarySpec, +} diff --git a/src/crates/primrose/src/inference.rs b/src/crates/primrose/src/inference.rs new file mode 100644 index 0000000..9cf4783 --- /dev/null +++ b/src/crates/primrose/src/inference.rs @@ -0,0 +1,111 @@ +use std::collections::{HashMap, HashSet}; + +use std::ops::{Deref, DerefMut}; + +use crate::bounded_ops::generate_bounded_ops; +use crate::parser::{Id, Term}; +use crate::types::{Subst, Type, TypeScheme, TypeVar, TypeVarGen, Types}; + +/// A type environment +#[derive(Clone, Debug)] +pub struct TypeEnv(HashMap<Id, TypeScheme>); + +impl Deref for TypeEnv { + type Target = HashMap<Id, TypeScheme>; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for TypeEnv { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +impl Types for TypeEnv { + fn ftv(&self) -> HashSet<TypeVar> { + self.values().cloned().collect::<Vec<TypeScheme>>().ftv() + } + + fn apply(&self, s: &Subst) -> TypeEnv { + TypeEnv(self.iter().map(|(k, v)| (k.clone(), v.apply(s))).collect()) + } +} + +impl TypeEnv { + pub fn new() -> TypeEnv { + TypeEnv(HashMap::new()) + } + + // Main type inference algorithm + fn ti(&self, term: &Term, tvg: &mut TypeVarGen) -> Result<(Subst, Type), InferenceError> { + // Get types of operations defined in traits + let bounded_ops = generate_bounded_ops(); + let (s, t) = (match term { + // Infer literal: currently only boolean + Term::Lit(_) => Ok((Subst::new(), Type::Bool())), + // Infer variable + Term::Var(v) => match self.get(&v.to_string()) { + Some(s) => Ok((Subst::new(), s.instantiate(tvg))), + None => Err("unbound variable".to_string() + " " + &v.to_string()), + }, + // Infer abstraction + Term::Lambda((n, bounds), ref e) => { + let mut tv = Type::Var(tvg.gen()); + let mut env = self.clone(); + if !bounds.is_empty() { + tv = Type::Con("Con".to_string(), Box::new(tv), bounds.clone()); + for b in bounds.iter() { + if bounded_ops.contains_key(b) { + let ops_info = bounded_ops.get(b).unwrap(); + for (op_name, op_ty) in ops_info { + env.insert( + op_name.to_string(), + TypeScheme { + vars: Vec::new(), + ty: op_ty.clone(), + }, + ); + } + } + } + } + env.remove(&n.to_string()); + + env.insert( + n.to_string(), + TypeScheme { + vars: Vec::new(), + ty: tv.clone(), + }, + ); + let (s1, t1) = env.ti(e, tvg)?; + let result_ty = Type::Fun(Box::new(tv.apply(&s1)), Box::new(t1)); + Ok((s1.clone(), result_ty)) + } + // Infer application + Term::App(ref e1, ref e2) => { + let (s1, t1) = self.ti(e1, tvg)?; + let (s2, t2) = self.apply(&s1).ti(e2, tvg)?; + let tv = Type::Var(tvg.gen()); + let s3 = t1 + .apply(&s2) + .mgu(&Type::Fun(Box::new(t2), Box::new(tv.clone())))?; + Ok((s3.compose(&s2.compose(&s1)), tv.apply(&s3))) + } + })?; + Ok((s, t)) + } + + // perform type inference on term + pub fn type_inference( + &self, + term: &Term, + tvg: &mut TypeVarGen, + ) -> Result<Type, InferenceError> { + let (s, t) = self.ti(term, tvg)?; + Ok(t.apply(&s)) + } +} + +pub type InferenceError = String; diff --git a/src/crates/primrose/src/lib.rs b/src/crates/primrose/src/lib.rs new file mode 100644 index 0000000..a283f28 --- /dev/null +++ b/src/crates/primrose/src/lib.rs @@ -0,0 +1,23 @@ +mod analysis; +mod bounded_ops; +mod description; +mod inference; +mod parser; +mod run_matching; +mod source_file; +mod spec_map; +mod type_check; +mod types; + +pub mod tools; + +mod codegen; +mod selector; +pub use selector::ContainerSelector; + +mod library_specs; +pub use library_specs::LibSpec; +pub use spec_map::LibSpecs; + +mod error; +pub use error::Error; diff --git a/src/crates/primrose/src/library_specs.rs b/src/crates/primrose/src/library_specs.rs new file mode 100644 index 0000000..6b30ae6 --- /dev/null +++ b/src/crates/primrose/src/library_specs.rs @@ -0,0 +1,340 @@ +//! Process library files and extracts library specifications + +use std::collections::BTreeMap; +use std::collections::HashMap; + +use std::fs; + +use std::io::{Error, Write}; +use std::path::Path; + +use log::debug; + +use crate::spec_map::{Bounds, LibSpecs, ProvidedOps}; + +const LIBSPECNAME: &str = "/*LIBSPEC-NAME*"; +const LIBSPECNAMEEND: &str = "*ENDLIBSPEC-NAME*/"; +const LIBSPEC: &str = "/*LIBSPEC*"; +const LIBSPECEND: &str = "*ENDLIBSPEC*/"; +const LANGDECL: &str = "#lang rosette\n"; +const GENPATH: &str = "./racket_specs/gen_lib_spec/"; +const OPNAME: &str = "/*OPNAME*"; +const OPNAMEEND: &str = "*ENDOPNAME*/"; +const IMPL: &str = "/*IMPL*"; +const IMPLEND: &str = "*ENDIMPL*/"; + +type ErrorMessage = String; + +/// Specifications extracted from a library file +#[derive(Debug, Clone)] +pub struct LibSpec { + /// Name of the specification + pub spec_name: String, + + /// Name of the specified structs + pub struct_name: String, + + /// All specification code defined + pub specs: Vec<String>, + + /// The provided rosette module name + pub provide: String, + + /// The bounds of each operation + pub interface_provide_map: Bounds, + + /// The provided operations + pub provided_ops: ProvidedOps, +} + +impl LibSpec { + /// Process all library specifications in the given directory. + /// This will also write the required racket file for that spec (see [`Self::process`]). + pub fn process_all(dirname: &Path) -> Result<LibSpecs, ErrorMessage> { + let paths = fs::read_dir(dirname).unwrap(); + let files: Vec<String> = paths + .into_iter() + .map(|p| p.unwrap()) + .filter(|p| p.file_type().unwrap().is_file()) + .map(|path| path.path().into_os_string().into_string().unwrap()) + .filter(|path| !path.contains("/mod.rs") && !path.contains("/lib.rs")) + .collect(); + let mut lib_specs = LibSpecs::new(); + for path in files { + match Self::process(&path) { + Ok(spec) => { + lib_specs.insert(spec.struct_name.clone(), spec); + } + Err(e) => { + debug!( + "Failed to process library module {}: {}. Continuing anyway.", + &path, e + ); + } + } + } + Ok(lib_specs) + } + + /// Process a single library specification file. + /// This will also write the required racket file for that spec. + pub fn process(filename: &str) -> Result<Self, ErrorMessage> { + let result = Self::read(filename); + match result { + Ok(spec) => { + let _spec_name = format!("{}.rkt", &spec.spec_name); + let state = write_lib_file(&spec.spec_name, &spec.specs, &spec.provide); + if state.is_err() { + return Err("Unable to create lib specification file".to_string()); + } + Ok(spec) + } + Err(e) => Err(e), + } + } + + /// Read all library specification files in the given directory. + pub fn read_all(dirname: &Path) -> Result<LibSpecs, ErrorMessage> { + let paths = fs::read_dir(dirname).map_err(|_e| "Library spec directory does not exist")?; + + let _lib_specs = LibSpecs::new(); + Ok(paths + .into_iter() + .flatten() + .filter(|p| p.file_type().unwrap().is_file()) + .flat_map(|path| path.path().into_os_string().into_string()) + .filter(|path| !path.contains("/mod.rs") && !path.contains("/lib.rs")) + .flat_map(|path| match Self::read(&path) { + Ok(spec) => Some((spec.struct_name.clone(), spec)), + Err(e) => { + debug!( + "Failed to process library module {}: {}. Continuing anyway.", + &path, e + ); + None + } + }) + .collect()) + } + + /// Read and parse a library specification file + pub fn read(filename: &str) -> Result<Self, ErrorMessage> { + let contents = fs::read_to_string(filename).expect("Something went wrong reading the file"); + let trimed_contents = contents.trim().to_string(); + + let name_pragmas: Vec<&str> = trimed_contents.matches(LIBSPECNAME).collect(); + let name_end_pragmas: Vec<&str> = trimed_contents.matches(LIBSPECNAMEEND).collect(); + let spec_pragmas: Vec<&str> = trimed_contents.matches(LIBSPEC).collect(); + let spec_end_pragmas: Vec<&str> = trimed_contents.matches(LIBSPECEND).collect(); + if (name_pragmas.len() != 1) || (name_end_pragmas.len() != 1) { + Err("Error, invalid declaration of library specification name**.".to_string()) + } else if spec_pragmas.len() != spec_end_pragmas.len() { + return Err("Error, invalid declaration of library specification.".to_string()); + } else { + let _specs = String::new(); + let v1: Vec<&str> = trimed_contents.split(LIBSPECNAME).collect(); + let s = v1 + .get(1) + .expect("Error, invalid declaration of library specification name."); + let v2: Vec<&str> = s.split(LIBSPECNAMEEND).collect(); + let s3 = v2.first().unwrap().trim().to_string(); + let v3: Vec<&str> = s3.split(' ').collect(); + let spec_name = v3.first().unwrap().trim().to_string(); + let struct_name = v3.get(1).unwrap().trim().to_string(); + let s1 = v1.first().expect("Unexpected error."); + let s2 = v2.get(1).expect("Unexpected error."); + // process interface blocks + let mut trimed_contents = String::new(); + trimed_contents.push_str(s1); + trimed_contents.push_str(s2); + if !is_next_pragma_impl(&trimed_contents) && has_pragma_spec(&trimed_contents) { + return Err("Specification without declared interface is not allowed".to_string()); + } else { + let mut interfaces = Vec::<String>::new(); + let mut interface_info = + HashMap::<String, BTreeMap<String, (String, String, String)>>::new(); + let mut code = Vec::<String>::new(); + let mut provided_ops = Vec::<String>::new(); + while has_pragma_impl(&trimed_contents) { + let v4: Vec<&str> = trimed_contents.splitn(2, IMPL).collect(); + let s4 = v4.get(1).expect("Error, invalid interface declaration."); + let v5: Vec<&str> = s4.splitn(2, IMPLEND).collect(); + let interface_name = v5.first().unwrap().trim().to_string(); + trimed_contents = v5.get(1).unwrap().trim().to_string(); + let lib_specs = extract_lib_specs(trimed_contents); + match lib_specs { + Ok(RawLibSpec { + contents, + code: mut result, + op_infos, + provided_ops: mut ops, + }) => { + code.append(&mut result); + interface_info.insert(interface_name.clone(), op_infos); + interfaces.push(interface_name.clone()); + provided_ops.append(&mut ops); + trimed_contents = contents; + } + Err(e) => { + return Err(e); + } + } + } + let (provide, interface_provide_map) = generate_provide(interface_info); + Ok(LibSpec { + spec_name, + struct_name, + specs: code.clone(), + provide, + interface_provide_map, + provided_ops: (code, provided_ops), + }) + } + } + } +} + +/// Extracted, but not yet parsed directives from a library spec +struct RawLibSpec { + /// The contents of the LIBSPEC block + contents: String, + /// The code define by the block + code: Vec<String>, + /// Hoare triples for each operation + op_infos: BTreeMap<String, (String, String, String)>, + /// Provided operation names + provided_ops: Vec<String>, +} + +fn is_next_pragma_impl(src: &str) -> bool { + match src.find(IMPL) { + Some(impl_pos) => match src.find(LIBSPEC) { + Some(spec_pos) => impl_pos < spec_pos, + None => true, + }, + None => false, + } +} + +fn has_pragma_impl(src: &str) -> bool { + src.contains(IMPL) +} + +fn has_pragma_spec(src: &str) -> bool { + src.contains(LIBSPEC) +} + +fn generate_provide( + interface_info: HashMap<String, BTreeMap<String, (String, String, String)>>, +) -> (String, Bounds) { + let mut interfaces = Vec::<String>::new(); + let mut provide = String::new(); + let mut interface_provide_map = Bounds::new(); + for (interface, infos) in interface_info.iter() { + let mut specs = Vec::<String>::new(); + let mut pres = Vec::<String>::new(); + for (_key, value) in infos.iter() { + specs.push(value.0.clone()); + pres.push(value.1.clone()); + } + let specs_name = interface.to_lowercase() + "-specs"; + let pres_name = interface.to_lowercase() + "-pres"; + let interface_name = interface.to_lowercase(); + let specs_str = + "\n(define ".to_string() + &specs_name + " (list " + &specs.join(" ") + "))\n"; + let pres_str = "(define ".to_string() + &pres_name + " (list " + &pres.join(" ") + "))\n"; + let interface_str = "(define ".to_string() + + &interface_name + + " (cons " + + &specs_name + + " " + + &pres_name + + "))\n"; + provide = provide + &specs_str + &pres_str + &interface_str; + interfaces.push(interface.to_lowercase()); + interface_provide_map.insert(interface.to_string(), interface_name); + } + let provide_str = "(provide ".to_string() + &interfaces.join(" ") + ")"; + + provide = provide + &provide_str; + (provide, interface_provide_map) +} + +/// Extract the relevant LIBSPEC blocks +fn extract_lib_specs(src: String) -> Result<RawLibSpec, ErrorMessage> { + let mut result = Vec::<String>::new(); + let mut contents = src.trim(); + let mut op_infos = BTreeMap::<String, (String, String, String)>::new(); + let mut provided_ops = Vec::<String>::new(); + while !contents.is_empty() && !is_next_pragma_impl(contents) { + if contents.contains(LIBSPEC) && contents.contains(LIBSPECEND) { + let v1: Vec<&str> = contents.splitn(2, LIBSPEC).collect(); + let s = v1.get(1).expect("Error, invalid specification."); + let v2: Vec<&str> = s.splitn(2, LIBSPECEND).collect(); + let spec = v2.first().unwrap().trim().to_string(); + let info = extract_op_info(spec.clone()).unwrap(); + op_infos.insert(info.0, (info.1.clone(), info.2, info.3)); + provided_ops.push(info.1); + let v3: Vec<&str> = spec.splitn(2, OPNAMEEND).collect(); + let code = v3 + .get(1) + .unwrap() + .trim_matches(|c| c == '\t' || c == ' ') + .to_string(); + result.push(code); + contents = v2.get(1).unwrap().trim(); + } else { + break; + } + } + Ok(RawLibSpec { + contents: contents.to_string(), + code: result, + op_infos, + provided_ops, + }) +} + +fn extract_op_info(spec: String) -> Result<(String, String, String, String), ErrorMessage> { + let op_name_pragmas: Vec<&str> = spec.matches(OPNAME).collect(); + let op_name_end_pragmas: Vec<&str> = spec.matches(OPNAMEEND).collect(); + if spec.starts_with('/') && op_name_pragmas.len() == 1 && op_name_end_pragmas.len() == 1 { + let v1: Vec<&str> = spec.split(OPNAME).collect(); + let s = v1 + .get(1) + .expect("Error, invaild operation information declaration."); + let v2: Vec<&str> = s.split(OPNAMEEND).collect(); + let info_string = v2 + .first() + .expect("Error, invaild operation information declaration."); + let mut infos: Vec<&str> = info_string.trim().split(' ').collect(); + if infos.len() == 4 { + let post = infos.pop().unwrap(); + let pre = infos.pop().unwrap(); + let op_spec = infos.pop().unwrap(); + let name = infos.pop().unwrap(); + Ok(( + name.to_string(), + op_spec.to_string(), + pre.to_string(), + post.to_string(), + )) + } else { + Err("Error, invaild operation information declaration.".to_string()) + } + } else { + Err("Error, invaild operation information declaration.".to_string()) + } +} + +fn write_lib_file(filename: &str, contents: &[String], provide: &str) -> Result<(), Error> { + let path = GENPATH; + + let mut output = fs::File::create(path.to_owned() + filename)?; + write!(output, "{}", LANGDECL)?; + for item in contents.iter() { + write!(output, "{}", item)?; + } + write!(output, "{}", provide)?; + Ok(()) +} diff --git a/src/crates/primrose/src/main.rs b/src/crates/primrose/src/main.rs new file mode 100644 index 0000000..d307942 --- /dev/null +++ b/src/crates/primrose/src/main.rs @@ -0,0 +1,91 @@ +use log::info; +use primrose::tools::nary_cartesian_product; +use primrose::{ContainerSelector, Error}; +use std::collections::HashMap; +use std::error::Error as StdError; +use std::path::Path; +use std::{env, fs, io::Write}; + +const LIB: &str = "./crates/primrose-library/src/"; + +fn main() -> Result<(), Box<dyn StdError>> { + env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")).init(); + + let (file_name, output_name, model_size) = + parse_args().map_err(Into::<Box<dyn StdError>>::into)?; + + info!( + "Running on {}, outputting to {}, with model size {}", + file_name, output_name, model_size + ); + run(file_name, output_name, model_size)?; + + info!("Yippeeeeee!!"); + Ok(()) +} + +fn parse_args() -> Result<(String, String, usize), &'static str> { + let args: Vec<String> = env::args().collect(); + if args.len() == 1 { + Ok(( + "./spec_code/example_unique.rs".to_string(), + "default".to_string(), + 5, + )) + } else if args.len() == 4 { + let model_size_input = match args.get(3).unwrap().parse::<u64>() { + Ok(val) => val, + Err(_) => { + return Err("Invalid model size"); + } + }; + let model_size = model_size_input as usize; + + Ok(( + "./spec_code/".to_string() + args.get(1).unwrap(), + args.get(2).unwrap().to_string(), + model_size, + )) + } else { + return Err("Usage: <file_name> [output_path] [model_size]"); + } +} + +/// Read input from the filename, calculate all valid implementations, and output them to seperate files in output_path +fn run(input: String, output_name: String, model_size: usize) -> Result<(), Error> { + info!("Generating candidate code outputs"); + let gen_code = gen_outputs(input, model_size)?; + + let output_path = format!("./gen_code/{}/", output_name); + fs::create_dir_all(&output_path).expect("error creating output directory"); + + info!("Writing {} different outputs", gen_code.len()); + for (i, code) in gen_code.iter().enumerate() { + let mut output = fs::File::create(format!("{}/{}{}.rs", output_path, output_name, i)) + .expect("error creating output file"); + write!(output, "{}", &code).expect("error writing output file"); + } + + Ok(()) +} + +/// Process the given file, returning code for all possible types +fn gen_outputs(filename: String, model_size: usize) -> Result<Vec<String>, Error> { + let selector = ContainerSelector::from_path(Path::new(&filename), Path::new(LIB), model_size)?; + + let mut candidates = HashMap::new(); + for tag in selector.container_tags() { + let found = selector.find_candidates(tag)?; + candidates.insert(tag, found); + } + + Ok(nary_cartesian_product(&candidates) + .into_iter() + .map(|selections| { + selections + .into_iter() + .map(|(tag_name, typ_name)| selector.gen_replacement_code(tag_name, typ_name)) + .collect::<String>() + }) + .collect()) +} diff --git a/src/crates/primrose/src/parser.rs b/src/crates/primrose/src/parser.rs new file mode 100644 index 0000000..47ae8e3 --- /dev/null +++ b/src/crates/primrose/src/parser.rs @@ -0,0 +1,203 @@ +extern crate peg; +use peg::parser; + +use std::iter::FromIterator; +use std::vec::Vec; + +use crate::types::{Bounds, Name, Type, TypeVar}; + +pub type Id = String; + +pub type Literal = String; + +#[derive(Clone, Debug)] +pub enum Refinement { + Prop(Term), + AndProps(Box<Refinement>, Box<Refinement>), +} + +#[derive(Clone, Debug)] +pub enum Term { + Lit(Literal), + Var(Id), + Lambda((Id, Bounds), Box<Term>), + App(Box<Term>, Box<Term>), +} + +impl Term { + pub fn is_quantifier(&self) -> bool { + match self { + Term::Var(id) => id.to_string().eq("forall"), + _ => false, + } + } + + pub fn require_cdr(&self) -> bool { + match self { + Term::Var(id) => id.to_string().eq("pop"), + _ => false, + } + } +} + +impl ToString for Term { + fn to_string(&self) -> String { + match self { + Term::Lit(l) => l.to_string(), + Term::Var(id) => id.to_string(), + Term::Lambda((id, _bounds), _t) => id.to_string(), + Term::App(t1, t2) => t1.to_string() + &t2.to_string(), + } + } +} + +#[derive(Clone, Debug)] +pub enum Decl { + PropertyDecl((Id, Type), Box<Term>), + ConTypeDecl(Type, (Id, Bounds, Refinement)), +} + +impl Decl { + pub fn is_prop_decl(&self) -> bool { + matches!(self, Decl::PropertyDecl(_, _)) + } + + pub fn is_contype_decl(&self) -> bool { + matches!(self, Decl::ConTypeDecl(_, _)) + } + + pub fn get_name(&self) -> String { + match self { + Decl::ConTypeDecl(con_ty, _) => { + let (con, _) = con_ty.get_con_elem().unwrap(); + con + } + Decl::PropertyDecl((id, _), _) => id.to_string(), + } + } +} + +pub type Spec = Vec<Decl>; +pub type Code = String; + +#[derive(Clone, Debug)] +pub enum Block { + SpecBlock(Box<Spec>, usize), + CodeBlock(Box<Code>, usize), +} + +impl Block { + pub fn is_spec_block(&self) -> bool { + matches!(self, Block::SpecBlock(_, _)) + } + + pub fn is_code_block(&self) -> bool { + matches!(self, Block::CodeBlock(_, _)) + } + + pub fn extract_spec(&self) -> Spec { + match self { + Block::SpecBlock(spec, _) => spec.to_vec(), + _ => Vec::new(), + } + } +} + +pub type Prog = Vec<Block>; + +parser! { +pub grammar spec() for str { + pub rule id() -> Id + = s:$(!keyword() ([ 'a'..='z' | 'A'..='Z' | '_' ]['a'..='z' | 'A'..='Z' | '0'..='9' | '_' | '-' | '?' ]*)) + { s.into() } + + pub rule name() -> Name + = s:$(!keyword() ([ 'a'..='z' | 'A'..='Z' | '_' ]['a'..='z' | 'A'..='Z' | '0'..='9' ]*)) + { s.into() } + + pub rule keyword() -> () + = ("crate" / "super" / "self" / "Self" / "const" / "mut" / "true" / "false" / "pub" / "in" / "from" / "with" + / "f32"/ "i32" / "u32" / "bool" / "let" / "if" / "else" / "for" / "while" / "fn" / "do") + ![ 'a'..='z' | 'A'..='Z' | '0'..='9' | '_' | '-' | '?' ] + + pub rule literal() -> Literal + = s:$("true" / "false") + { s.into() } + + pub rule ty() -> Type + = precedence! { + n:name() "<" _ t:ty() _ ">" + { Type::Con(n, Box::new(t), Bounds::from(["Container".to_string()])) } + -- + n:name() + { Type::Var(TypeVar::new(n)) } + } + + pub rule term() -> Term + = precedence!{ + lit: literal() { Term::Lit(lit) } + -- + v:id() { Term::Var(v) } + -- + "\\" v:id() _ "->" _ t:term() { Term::Lambda((v, std::collections::HashSet::default()), Box::new(t)) } + -- + "\\" v:id() _ "<:" _ "(" _ b:bounds() _ ")" _ "->" _ t:term() { Term::Lambda((v, b), Box::new(t)) } + -- + "(" _ t1:term() __ t2:term() _ ")" { Term::App(Box::new(t1), Box::new(t2)) } + } + + pub rule refinement() -> Refinement + = precedence!{ + t:term() { Refinement::Prop(t) } + -- + "(" _ p1:refinement() __ "and" __ p2:refinement() _ ")" { Refinement::AndProps(Box::new(p1), Box::new(p2)) } + } + + pub rule bounds() -> Bounds + = l: ((_ n:name() _ {n}) ++ "," ) { Bounds::from_iter(l.iter().cloned()) } + + pub rule decl() -> Decl + = precedence! { + _ "property" __ p:id() _ "<" _ ty:ty() _ ">" _ "{" _ t:term() _ "}" _ + { + Decl::PropertyDecl((p, ty), Box::new(t)) + } + -- + _ "type" __ ty:ty() _ "=" _ "{" _ c:id() _ "impl" __ "(" _ b:bounds() _ ")" _ "|" _ t:refinement() _ "}" _ + { + Decl::ConTypeDecl(ty, (c, b, t)) + } + } + + pub rule spec() -> Spec + = _ "/*SPEC*" _ decls: (d:decl() { d }) ** _ _ "*ENDSPEC*/" _ + { + decls + } + + pub rule code() -> Code + = _ "/*CODE*/" c:$((!"/*ENDCODE*/"!"/*SPEC*"!"*ENDSPEC*/"[_])*) "/*ENDCODE*/" _ { c.into() } + + pub rule block() -> Block + = precedence! { + _ p:position!() s:spec() _ + { + Block::SpecBlock(Box::new(s), p) + } + -- + _ p:position!() c:code() _ + { + Block::CodeBlock(Box::new(c), p) + } + } + + pub rule prog() -> Prog + = _ blocks: (b:block() { b }) ** _ _ + { + blocks + } + + rule _ = quiet!{[' ' | '\n' | '\t']*} + rule __ = quiet!{[' ' | '\n' | '\t']+} + +}} diff --git a/src/crates/primrose/src/run_matching.rs b/src/crates/primrose/src/run_matching.rs new file mode 100644 index 0000000..c494f50 --- /dev/null +++ b/src/crates/primrose/src/run_matching.rs @@ -0,0 +1,109 @@ +use std::fs; +use std::io::{Error, Write}; +use std::process::Command; + +use crate::spec_map::MatchSetup; + +type ExecutionError = String; + +pub const LANGDECL: &str = "#lang rosette\n"; +const GENNAME: &str = "./racket_specs/gen_match/match-script.rkt"; +const LIBSPECPATH: &str = "../gen_lib_spec/"; +const PROPSPECPATH: &str = "../gen_prop_spec/"; +//const SETUP: &str = "(require \"../match-setup.rkt\")\n"; +const LIBDIR: &str = "./racket_specs/gen_lib_spec/"; +const PROPDIR: &str = "./racket_specs/gen_prop_spec/"; +const MATCHDIR: &str = "./racket_specs/gen_match/"; + +pub fn initialise_match_setup() -> MatchSetup { + let mut match_setup = MatchSetup::new(); + match_setup.insert( + "Container".to_string(), + "../container-setup.rkt".to_string(), + ); + match_setup.insert( + "Indexable".to_string(), + "../indexable-setup.rkt".to_string(), + ); + match_setup.insert("Stack".to_string(), "../stack-setup.rkt".to_string()); + match_setup +} + +pub fn gen_match_script( + prop: &str, + match_setup: &str, + prop_spec_file: &str, + lib_spec_file: &str, + interface_spec: &str, + symbolics: &[String], +) -> Result<String, Error> { + let mut output = fs::File::create(GENNAME)?; + write!(output, "{}", LANGDECL)?; + let require_prop = "(require \"".to_string() + PROPSPECPATH + prop_spec_file + "\")\n"; + write!(output, "{}", require_prop)?; + let require_lib = "(require \"".to_string() + LIBSPECPATH + lib_spec_file + "\")\n"; + write!(output, "{}", require_lib)?; + write!( + output, + "{}", + "(require \"".to_string() + match_setup + "\")\n" + )?; + let s = symbolics.join(" "); + let code = "(check ".to_string() + + prop + + " (cdr " + + interface_spec + + ") (car " + + interface_spec + + ") ls " + + &s + + ")\n"; + write!(output, "{}", code)?; + Ok(GENNAME.to_string()) +} + +pub fn run_matching(filename: String) -> Result<bool, ExecutionError> { + let output = Command::new("sh") + .arg("-c") + .arg("racket ".to_owned() + &filename) + .output() + .expect("failed to execute process"); + let raw = output.stdout; + let result_str = String::from_utf8_lossy(&raw).to_string(); + let result = result_str.trim(); + if result == "#t" { + Ok(true) + } else if result == "#f" { + Ok(false) + } else { + Err("Error: Not a valid output.".to_string()) + } +} + +pub fn cleanup_script() { + Command::new("sh") + .arg("-c") + .arg("rm -f ".to_owned() + GENNAME) + .output() + .expect("Fail to clean up"); +} + +pub fn setup_dirs() { + Command::new("sh") + .arg("-c") + .arg("mkdir -p ".to_owned() + PROPDIR) + .output() + .expect("Fail to create the property specification directory"); + + Command::new("sh") + .arg("-c") + .arg("mkdir -p ".to_owned() + LIBDIR) + .output() + .expect("Fail to create the library specification directory"); + + Command::new("sh") + .arg("-c") + .arg("mkdir -p ".to_owned() + MATCHDIR) + .output() + .expect("Fail to create the matching script directory"); +} diff --git a/src/crates/primrose/src/selector.rs b/src/crates/primrose/src/selector.rs new file mode 100644 index 0000000..fcfd0c6 --- /dev/null +++ b/src/crates/primrose/src/selector.rs @@ -0,0 +1,206 @@ +use std::{ + collections::HashMap, + fs, + io::{self, Write}, + path::Path, +}; + +use log::{debug, trace}; + +const MATCHSCRIPT: &str = "./racket_specs/gen_match/match-script.rkt"; +const OPS: &str = "./racket_specs/gen_lib_spec/ops.rkt"; + +use crate::{ + analysis::Analyser, + codegen::process_bound_decl, + description::{Description, Tag, TagId}, + error::Error, + library_specs::LibSpec, + parser::{spec, Block}, + run_matching::{ + cleanup_script, gen_match_script, initialise_match_setup, run_matching, setup_dirs, + LANGDECL, + }, + source_file::read_src, + spec_map::{LibSpecs, MatchSetup, ProvidedOps}, + type_check::TypeChecker, +}; + +/// Selects containers for a specific file. +/// Creating this requires doing some analysis on the file, so is relatively costly. +pub struct ContainerSelector { + /// Analysis of the source file + pub(crate) analyser: Analyser, + + pub(crate) blocks: Vec<Block>, + pub(crate) bounds_decl: String, + pub(crate) match_setup: MatchSetup, + pub(crate) lib_specs: LibSpecs, +} + +impl ContainerSelector { + /// Load the file at the given path, perform analysis, and return a selector. + pub fn from_path(path: &Path, lib_path: &Path, model_size: usize) -> Result<Self, Error> { + Self::from_src( + &read_src(path).map_err(Error::InputRead)?, + lib_path, + model_size, + ) + } + + /// Analyse the given source and return a selector for it. + pub fn from_src(src: &str, lib_path: &Path, model_size: usize) -> Result<Self, Error> { + debug!("Setting up directories"); + setup_dirs(); + + debug!("Parsing into blocks"); + let blocks = spec::prog(src)?; + + debug!("Running type checker"); + let mut tc = TypeChecker::new(); + tc.check_prog(blocks.clone())?; + trace!("Results of type checking: {:#?}", &tc); + + debug!("Running analysis"); + let mut analyser = Analyser::new(); + analyser + .analyse_prog(blocks.clone(), model_size) + .map_err(Error::AnalyserError)?; + trace!("Results of analysis: {:#?}", &analyser); + + Self::new(analyser, blocks, lib_path) + } + + /// Create a new selector using the given analysis + fn new(analyser: Analyser, blocks: Vec<Block>, lib_path: &Path) -> Result<Self, Error> { + Ok(Self { + blocks, + bounds_decl: process_bound_decl(analyser.ctx()), + match_setup: initialise_match_setup(), + lib_specs: LibSpec::process_all(lib_path).map_err(Error::LibraryError)?, + analyser, + }) + } + + /// Get all container tags in this context + pub fn container_tags(&self) -> impl Iterator<Item = &String> { + self.analyser + .ctx() + .iter() + .filter(|(_k, v)| v.is_con_tag()) + .map(|(k, _)| k) + } + + /// Find candidates for all container tags in this context. + /// Returns a map from tag name to list of candidates. + pub fn find_all_candidates(&self) -> Result<HashMap<&TagId, Vec<String>>, Error> { + let mut candidates = HashMap::new(); + for tag in self.container_tags() { + debug!("Finding candidates for tag {}", tag); + let found = self.find_candidates(tag)?; + + debug!("Found {} candidates for tag {}", found.len(), tag); + candidates.insert(tag, found); + } + + Ok(candidates) + } + + /// Find candidate container types for a given Tag in this context. + /// Panics if tag_id is not a key, or is not the correct type of tag + pub fn find_candidates(&self, tag_id: &String) -> Result<Vec<String>, Error> { + let tag = self.analyser.ctx().get(tag_id).expect("invalid tag_id"); + + let Tag::Con(_elem_ty, _i_name, tags) = tag else { + panic!("tag_id was not Tag::Con"); + }; + + debug!("Finding container types for tag {}", tag_id); + let prop_descs: Vec<Description> = tags + .iter() + .filter(|t| t.is_prop_tag()) + .map(|t| t.extract_prop_desc()) + .collect(); + + let bounds: Vec<Description> = tags + .iter() + .filter(|t| t.is_bound_tag()) + .flat_map(|t| t.extract_bound_descs()) + .collect(); + + let mut structs = Vec::new(); + + // select library structs implement bounds decl in contype + let lib_spec_impls = self.lib_specs.iter().filter(|(_name, spec)| { + bounds.iter().all(|i| { + spec.interface_provide_map + .keys() + .cloned() + .collect::<String>() + .contains(i) + }) + }); + + for (name, spec) in lib_spec_impls { + debug!("{} - ...", name); + match write_provided_ops(&spec.provided_ops) { + Ok(_) => {} + Err(_) => { + return Err(Error::ProvidedOperationsNotInLibrarySpec); + } + } + let mut is_match = true; + for p in prop_descs.iter() { + for i in bounds.iter() { + let (prop_file, symbolics) = + self.analyser.prop_specs().get(p).expect( + &("Error: No property specification found for: ".to_string() + &p), + ); + gen_match_script( + p, + self.match_setup.get(i).unwrap(), + prop_file, + &spec.spec_name, + spec.interface_provide_map.get(i).unwrap(), + symbolics, + ) + .map_err(Error::GenMatchScript)?; + + // true - match; false - not match + is_match &= + run_matching(MATCHSCRIPT.to_string()).map_err(Error::ExecutionError)?; + if !is_match { + break; + } + } + if !is_match { + break; + } + } + if is_match { + debug!("{} - YAY", name); + structs.push(name.to_string()); + } else { + debug!("{} - NAY", name); + } + } + + cleanup_script(); + Ok(structs) + } +} + +/// Write the provided operation specifications from a library spec to the correct path ([`self::OPS`]). +fn write_provided_ops(provided_ops: &ProvidedOps) -> Result<(), io::Error> { + let ops_path = OPS; + let (code, ops) = provided_ops; + let mut output = fs::File::create(ops_path)?; + write!(output, "{}", LANGDECL)?; + for item in code.iter() { + write!(output, "{}", item)?; + } + let ops_string = ops.join(" "); + let provide = "\n(provide ".to_string() + &ops_string + ")"; + write!(output, "{}", provide)?; + Ok(()) +} diff --git a/src/crates/primrose/src/source_file.rs b/src/crates/primrose/src/source_file.rs new file mode 100644 index 0000000..0884015 --- /dev/null +++ b/src/crates/primrose/src/source_file.rs @@ -0,0 +1,47 @@ +use std::{ + fs, + io::{self}, + path::Path, +}; + +pub const CODE: &str = "/*CODE*/"; +pub const CODEEND: &str = "/*ENDCODE*/"; + +pub const SPEC: &str = "/*SPEC*"; +pub const SPECEND: &str = "*ENDSPEC*/"; + +pub fn read_src(filename: &Path) -> Result<String, io::Error> { + let contents = fs::read_to_string(filename)?; + Ok(preprocess_src(contents)) +} + +/// Mark all parts of the code so everything is between either CODE and CODEEND +/// or SPEC and SPECEND +pub fn preprocess_src(src: String) -> String { + let mut trimed_src = src.trim(); + let mut result = String::new(); + while !trimed_src.is_empty() { + match trimed_src.find(SPEC) { + Some(n) => match trimed_src.find(SPECEND) { + Some(m) => { + if n > 0 { + let code = &trimed_src[..n]; + result = result + CODE + code + CODEEND; + } + let spec = &trimed_src[n..(m + SPECEND.len())]; + trimed_src = &trimed_src[(m + SPECEND.len())..].trim(); + result += spec; + } + None => { + result = result + CODE + trimed_src + CODEEND; + break; + } + }, + None => { + result = result + CODE + trimed_src + CODEEND; + break; + } + } + } + result +} diff --git a/src/crates/primrose/src/spec_map.rs b/src/crates/primrose/src/spec_map.rs new file mode 100644 index 0000000..22b84c8 --- /dev/null +++ b/src/crates/primrose/src/spec_map.rs @@ -0,0 +1,18 @@ +use std::collections::HashMap; + +use crate::library_specs::LibSpec; + +type StructName = String; +type BoundName = String; +type BoundProvide = String; +type MatchSetupDir = String; +pub type Bounds = HashMap<BoundName, BoundProvide>; +pub type ProvidedOps = (Vec<String>, Vec<String>); + +type PropertyName = String; +type PropSpecDir = String; +type PropSymbolics = Vec<String>; + +pub type LibSpecs = HashMap<StructName, LibSpec>; +pub type PropSpecs = HashMap<PropertyName, (PropSpecDir, PropSymbolics)>; +pub type MatchSetup = HashMap<BoundName, MatchSetupDir>; diff --git a/src/crates/primrose/src/tools/mod.rs b/src/crates/primrose/src/tools/mod.rs new file mode 100644 index 0000000..ebee9b5 --- /dev/null +++ b/src/crates/primrose/src/tools/mod.rs @@ -0,0 +1,89 @@ +//! Useful tools for benchmarking & selection + +use rand::rngs::StdRng; +use rand::seq::SliceRandom; + +use rand::SeedableRng; + +use std::collections::HashMap; +use std::mem::size_of; +use std::{hash::Hash, vec::Vec}; + +pub fn gen_dataset_1() -> Vec<u32> { + let size = 1024 * 1024; // 1 MB + let amount = size / size_of::<u32>(); + let mut data: Vec<u32> = (1..amount as u32).collect(); + let mut rng = StdRng::seed_from_u64(222); + data.shuffle(&mut rng); + data +} + +pub fn gen_dataset_128() -> Vec<u32> { + let size = 128 * 1024 * 1024; // 128 MB + let amount = size / size_of::<u32>(); + let mut data: Vec<u32> = (1..amount as u32).collect(); + let mut rng = StdRng::seed_from_u64(222); + data.shuffle(&mut rng); + data +} + +pub fn gen_dataset_256() -> Vec<u32> { + let size = 256 * 1024 * 1024; // 256 MB + let amount = size / size_of::<u32>(); + let mut data: Vec<u32> = (1..amount as u32).collect(); + let mut rng = StdRng::seed_from_u64(222); + data.shuffle(&mut rng); + data +} + +pub fn gen_dataset_512() -> Vec<u32> { + let size = 512 * 1024 * 1024; // 512 MB + let amount = size / size_of::<u32>(); + let mut data: Vec<u32> = (1..amount as u32).collect(); + let mut rng = StdRng::seed_from_u64(222); + data.shuffle(&mut rng); + data +} + +/// Get the cartesian product of all of the values in set. +/// Returns a list of every possible combination created by picking a value from the matching key in `bins`. +pub fn nary_cartesian_product<K: Hash + Eq, V>(bins: &HashMap<K, Vec<V>>) -> Vec<HashMap<&K, &V>> { + let bins = bins.iter().collect::<Vec<_>>(); + let mut indices = vec![0; bins.len()]; + let lengths = bins.iter().map(|(_, v)| v.len()).collect::<Vec<_>>(); + let mut outputs = Vec::with_capacity(lengths.iter().product()); + loop { + let code = indices + .iter() + .enumerate() + // get candidate we're using for each type + .map(|(key_idx, val_idx)| (bins[key_idx].0, &bins[key_idx].1[*val_idx])) + .collect::<HashMap<_, _>>(); + + outputs.push(code); + + if inc_carrying(&mut indices, &lengths) { + break; + } + } + + outputs +} + +/// Increment the left hand side of indices, carrying as needed if any entries reach the +/// the value defined in `bounds`. +/// `max` and `indices` must be the same length. +/// Returns true if carried outside the list, otherwise false. +pub fn inc_carrying(indices: &mut [usize], bounds: &[usize]) -> bool { + for i in 0..indices.len() { + if indices[i] < bounds[i] - 1 { + indices[i] += 1; + return false; + } else { + indices[i] = 0; + // carry to next bit + } + } + + true +} diff --git a/src/crates/primrose/src/type_check.rs b/src/crates/primrose/src/type_check.rs new file mode 100644 index 0000000..29a8d2a --- /dev/null +++ b/src/crates/primrose/src/type_check.rs @@ -0,0 +1,388 @@ +//! Performs type checking for specifications +use thiserror::Error; + +use crate::inference::TypeEnv; +use crate::parser::{Decl, Prog, Refinement, Spec}; +use crate::types::{Bounds, Type, TypeScheme, TypeVar, TypeVarGen}; + +use std::ops::Deref; + +#[derive(Error, Debug)] +#[error("{0}")] +pub struct TypeError(String); + +#[derive(Debug)] +pub struct TypeChecker { + global_ctx: TypeEnv, + tvg: TypeVarGen, +} + +impl TypeChecker { + /// Create a new type checking context. + pub fn new() -> TypeChecker { + let mut tc = TypeChecker { + global_ctx: TypeEnv::new(), + tvg: TypeVarGen::new(), + }; + tc.predefined(); + + tc + } + + /// Add types for pre-defined functions + fn predefined(&mut self) { + // put for_all_unique_pair into context + let binary_fn1 = Type::Fun( + Box::new(Type::Var(TypeVar::new("T".to_string()))), + Box::new(Type::Fun( + Box::new(Type::Var(TypeVar::new("T".to_string()))), + Box::new(Type::Bool()), + )), + ); + self.global_ctx.insert( + "for-all-unique-pairs".to_string(), + TypeScheme { + vars: Vec::new(), + ty: Type::Fun( + Box::new(Type::Con( + "Con".to_string(), + Box::new(Type::Var(TypeVar::new("T".to_string()))), + Bounds::from(["Container".to_string()]), + )), + Box::new(Type::Fun(Box::new(binary_fn1), Box::new(Type::Bool()))), + ), + }, + ); + + // put for_all_unique_pair into context + let binary_fn2 = Type::Fun( + Box::new(Type::Var(TypeVar::new("T".to_string()))), + Box::new(Type::Fun( + Box::new(Type::Var(TypeVar::new("T".to_string()))), + Box::new(Type::Bool()), + )), + ); + self.global_ctx.insert( + "for-all-consecutive-pairs".to_string(), + TypeScheme { + vars: Vec::new(), + ty: Type::Fun( + Box::new(Type::Con( + "Con".to_string(), + Box::new(Type::Var(TypeVar::new("T".to_string()))), + Bounds::from(["Container".to_string()]), + )), + Box::new(Type::Fun(Box::new(binary_fn2), Box::new(Type::Bool()))), + ), + }, + ); + + let unary_fn = Type::Fun( + Box::new(Type::Var(TypeVar::new("T".to_string()))), + Box::new(Type::Bool()), + ); + self.global_ctx.insert( + "for-all-elems".to_string(), + TypeScheme { + vars: Vec::new(), + ty: Type::Fun( + Box::new(Type::Con( + "Con".to_string(), + Box::new(Type::Var(TypeVar::new("T".to_string()))), + Bounds::from(["Container".to_string()]), + )), + Box::new(Type::Fun(Box::new(unary_fn), Box::new(Type::Bool()))), + ), + }, + ); + + // put neq into context + let neq_fn = Type::Fun( + Box::new(Type::Var(TypeVar::new("T".to_string()))), + Box::new(Type::Fun( + Box::new(Type::Var(TypeVar::new("T".to_string()))), + Box::new(Type::Bool()), + )), + ); + self.global_ctx.insert( + "neq".to_string(), + TypeScheme { + vars: Vec::new(), + ty: neq_fn, + }, + ); + + // put leq into context + let leq_fn = Type::Fun( + Box::new(Type::Var(TypeVar::new("T".to_string()))), + Box::new(Type::Fun( + Box::new(Type::Var(TypeVar::new("T".to_string()))), + Box::new(Type::Bool()), + )), + ); + self.global_ctx.insert( + "leq?".to_string(), + TypeScheme { + vars: Vec::new(), + ty: leq_fn, + }, + ); + + let geq_fn = Type::Fun( + Box::new(Type::Var(TypeVar::new("T".to_string()))), + Box::new(Type::Fun( + Box::new(Type::Var(TypeVar::new("T".to_string()))), + Box::new(Type::Bool()), + )), + ); + self.global_ctx.insert( + "geq?".to_string(), + TypeScheme { + vars: Vec::new(), + ty: geq_fn, + }, + ); + + let equal = Type::Fun( + Box::new(Type::Var(TypeVar::new("T".to_string()))), + Box::new(Type::Fun( + Box::new(Type::Var(TypeVar::new("T".to_string()))), + Box::new(Type::Bool()), + )), + ); + self.global_ctx.insert( + "equal?".to_string(), + TypeScheme { + vars: Vec::new(), + ty: equal, + }, + ); + + let unique_count_fn = Type::Fun( + Box::new(Type::Var(TypeVar::new("T".to_string()))), + Box::new(Type::Fun( + Box::new(Type::Con( + "Con".to_string(), + Box::new(Type::Var(TypeVar::new("T".to_string()))), + Bounds::from(["Container".to_string()]), + )), + Box::new(Type::Bool()), + )), + ); + self.global_ctx.insert( + "unique-count?".to_string(), + TypeScheme { + vars: Vec::new(), + ty: unique_count_fn, + }, + ); + + // the forall quantifier + let forall = Type::Fun( + Box::new(Type::Fun( + Box::new(Type::Var(TypeVar::new("T".to_string()))), + Box::new(Type::Bool()), + )), + Box::new(Type::Bool()), + ); + self.global_ctx.insert( + "forall".to_string(), + TypeScheme { + vars: Vec::new(), + ty: forall, + }, + ); + } + + /// Check an entire program + pub fn check_prog(&mut self, prog: Prog) -> Result<(), TypeError> { + let specs: Vec<Spec> = prog + .iter() + .filter(|block| block.is_spec_block()) + .map(|block| block.extract_spec()) + .collect(); + + self.check_specs(&specs) + } + + /// Check a list of specifications + fn check_specs(&mut self, specs: &[Spec]) -> Result<(), TypeError> { + let concat_specs = specs.concat(); + let prop_decls: Vec<&Decl> = concat_specs + .iter() + .filter(|decl| decl.is_prop_decl()) + .collect(); + let contype_decls: Vec<&Decl> = concat_specs + .iter() + .filter(|decl| decl.is_contype_decl()) + .collect(); + + self.check_prop_decls(&prop_decls)?; + self.check_contype_decls(&contype_decls)?; + self.check_bound_decls(&contype_decls) + } + + /// Check all bound declarations + fn check_bound_decls(&mut self, decls: &[&Decl]) -> Result<(), TypeError> { + for decl in decls.iter() { + self.check_bound_decl(decl)?; + } + + Ok(()) + } + + /// Check a single bound declaration + fn check_bound_decl(&mut self, decl: &Decl) -> Result<(), TypeError> { + let Decl::ConTypeDecl(_, (_, ins, _)) = decl else { + return Err(TypeError("Not a valid bound declaration".to_string())); + }; + + // Duplicate bound name checking + for i in ins.iter() { + if self.global_ctx.get(&i.to_string()).is_some() { + return Err(TypeError("Duplicate bound name declaration".to_string())); + } + + // TODO: check each bound is a valid rust trait + } + Ok(()) + } + + /// Check a list of property declarations + fn check_prop_decls(&mut self, decls: &[&Decl]) -> Result<(), TypeError> { + for decl in decls.iter() { + self.check_prop_decl(decl)?; + } + + Ok(()) + } + + /// Check a single property declaration + fn check_prop_decl(&mut self, decl: &Decl) -> Result<(), TypeError> { + let Decl::PropertyDecl((id, _ty), term) = decl else { + return Err(TypeError("Not a valid property declaration".to_string())); + }; + + // Duplicate property decl checking + if self.global_ctx.get(&id.to_string()).is_some() { + return Err(TypeError("Duplicate property declaration".to_string())); + } + + // check well formedness + let ty = self + .global_ctx + .type_inference(term, &mut self.tvg) + .map_err(TypeError)?; + + // it should have type Con<T> -> Bool + let Type::Fun(ref t1, ref t2) = ty else { + return Err(TypeError( + "Not a valid property decl: should have type Con<T> -> Bool".to_string(), + )); + }; + + match (t1.deref(), t2.deref()) { + (Type::Con(n, _t, _), Type::Bool()) => { + if n == "Con" { + self.global_ctx.insert( + id.to_string(), + TypeScheme { + vars: Vec::new(), + ty, + }, + ); + Ok(()) + } else { + Err(TypeError("Not a valid property decl: input does not have basic container type Con<T>".to_string())) + } + } + (_, Type::Bool()) => { + self.global_ctx.insert( + id.to_string(), + TypeScheme { + vars: Vec::new(), + ty, + }, + ); + Ok(()) + } + _ => Err(TypeError( + "Not a valid property decl: should have type Con<T> -> Bool".to_string(), + )), + } + } + + /// Check all container type declarations + fn check_contype_decls(&mut self, decls: &[&Decl]) -> Result<(), TypeError> { + for decl in decls.iter() { + self.check_contype_decl(decl)?; + } + + Ok(()) + } + + /// Check a single container type declaration + fn check_contype_decl(&mut self, decl: &Decl) -> Result<(), TypeError> { + let Decl::ConTypeDecl(con_ty, (vid, ins, r)) = decl else { + return Err(TypeError( + "Not a valid container type declaration".to_string(), + )); + }; + + // Duplicate container type decl checking + if self.global_ctx.get(&con_ty.to_string()).is_some() { + return Err(TypeError( + "Duplicate container type declaration".to_string(), + )); + } + + // Insert into local context + let con = Type::Con( + "Con".to_string(), + Box::new(Type::Var(TypeVar::new("T".to_string()))), + ins.clone(), + ); + let mut local_ctx = self.global_ctx.clone(); + local_ctx.insert( + vid.to_string(), + TypeScheme { + vars: Vec::new(), + ty: con, + }, + ); + + // Check that it makes sense in the local context + self.check_ref(&mut local_ctx, r)?; + + // If so, insert into global context + self.global_ctx.insert( + decl.get_name(), + TypeScheme { + vars: Vec::new(), + ty: con_ty.clone(), + }, + ); + Ok(()) + } + + fn check_ref(&mut self, ctx: &mut TypeEnv, r: &Refinement) -> Result<(), TypeError> { + match r { + Refinement::Prop(t) => { + let t = ctx.type_inference(t, &mut self.tvg).map_err(TypeError)?; + + // t has to be boolean + if t.is_bool() { + Ok(()) + } else { + Err(TypeError( + "The refinement has to be evaluated to a Bool type.".to_string(), + )) + } + } + Refinement::AndProps(r1, r2) => { + self.check_ref(ctx, r1)?; + self.check_ref(ctx, r2) + } + } + } +} diff --git a/src/crates/primrose/src/types.rs b/src/crates/primrose/src/types.rs new file mode 100644 index 0000000..6c459fc --- /dev/null +++ b/src/crates/primrose/src/types.rs @@ -0,0 +1,282 @@ +use std::collections::{HashMap, HashSet}; + +use std::hash::Hash; +use std::ops::{Deref, DerefMut}; + +pub type Name = String; + +// traits +pub type Bounds = HashSet<Name>; + +#[derive(Eq, PartialEq, Clone, Debug)] +pub enum Type { + Bool(), + Var(TypeVar), + Con(Name, Box<Type>, Bounds), + Fun(Box<Type>, Box<Type>), +} + +impl Type { + pub fn is_bool(&self) -> bool { + matches!(self, Type::Bool()) + } + + pub fn get_con_elem(&self) -> Option<(String, String)> { + match self { + Type::Con(n, t, _) => Some((n.to_string(), t.to_string())), + _ => None, + } + } +} + +impl ToString for Type { + fn to_string(&self) -> String { + match self { + Type::Bool() => "bool".to_string(), + Type::Var(tv) => tv.to_string(), + Type::Con(n, t, bounds) => { + n.to_string() + + "<" + + &t.to_string() + + ">" + + " <: (" + + &bounds + .clone() + .into_iter() + .collect::<Vec<String>>() + .join(", ") + + ")" + } + Type::Fun(t1, t2) => t1.to_string() + "->" + &t2.to_string(), + } + } +} + +pub type UnificationError = String; + +trait Union { + fn union(&self, other: &Self) -> Self; +} + +impl<K, V> Union for HashMap<K, V> +where + K: Clone + Eq + Hash, + V: Clone, +{ + fn union(&self, other: &Self) -> Self { + let mut res = self.clone(); + for (key, value) in other { + res.entry(key.clone()).or_insert(value.clone()); + } + res + } +} + +impl Type { + // Most general unifier + pub fn mgu(&self, other: &Type) -> Result<Subst, UnificationError> { + match (self, other) { + // Unify function type + (Type::Fun(in1, out1), Type::Fun(in2, out2)) => { + let sub1 = in1.mgu(in2)?; + let sub2 = out1.apply(&sub1).mgu(&out2.apply(&sub1))?; + Ok(sub1.compose(&sub2)) + } + + // Unify con type + (Type::Con(n1, t1, _), Type::Con(n2, t2, _)) => { + if n1 != n2 { + Err("Cannot unify two different container".to_string()) + } else { + t1.mgu(t2) + } + } + + // Type variable biding + (Type::Var(v), t) => v.bind(t), + (t, Type::Var(v)) => v.bind(t), + + // Unify primitives + (&Type::Bool(), &Type::Bool()) => Ok(Subst::new()), + + // Otherwise, the types cannot be unified. + (t1, t2) => { + println!("{:?}", t1); + println!("{:?}", t2); + Err("types do not unify".to_string()) + } + } + } +} + +// A substitution is a mapping from type variables to types. +#[derive(Clone, Debug)] +pub struct Subst(HashMap<TypeVar, Type>); + +impl Deref for Subst { + type Target = HashMap<TypeVar, Type>; + fn deref(&self) -> &HashMap<TypeVar, Type> { + &self.0 + } +} +impl DerefMut for Subst { + fn deref_mut(&mut self) -> &mut HashMap<TypeVar, Type> { + &mut self.0 + } +} + +impl Subst { + pub fn new() -> Subst { + Subst(HashMap::new()) + } + + // composing substitutions + pub fn compose(&self, other: &Subst) -> Subst { + Subst( + self.union( + &other + .iter() + .map(|(k, v)| (k.clone(), v.apply(self))) + .collect(), + ), + ) + } +} + +// Fresh variable generator +#[derive(Debug)] +pub struct TypeVarGen { + supply: usize, +} + +impl TypeVarGen { + pub fn new() -> TypeVarGen { + TypeVarGen { supply: 0 } + } + pub fn gen(&mut self) -> TypeVar { + let name = "#T".to_owned() + &self.supply.to_string(); + let v = TypeVar::new(name); + self.supply += 1; + v + } +} + +// Type variables/type names +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct TypeVar { + name: Name, +} + +impl TypeVar { + pub fn new(s: Name) -> TypeVar { + TypeVar { name: s } + } + /// Attempt to bind a type variable to a type, returning an appropriate substitution. + fn bind(&self, ty: &Type) -> Result<Subst, UnificationError> { + // Binding to itself + if let Type::Var(u) = ty { + if u == self { + return Ok(Subst::new()); + } + } + + // Occurance check + if ty.ftv().contains(self) { + return Err("occur check fails".to_string()); + } + + let mut s = Subst::new(); + s.insert(self.clone(), ty.clone()); + Ok(s) + } +} + +impl ToString for TypeVar { + fn to_string(&self) -> String { + self.name.to_string() + } +} + +pub trait Types { + fn ftv(&self) -> HashSet<TypeVar>; + fn apply(&self, s: &Subst) -> Self; +} + +impl<T> Types for Vec<T> +where + T: Types, +{ + // Free type variables + fn ftv(&self) -> HashSet<TypeVar> { + self.iter() + .map(|x| x.ftv()) + .fold(HashSet::new(), |set, x| set.union(&x).cloned().collect()) + } + + // Apply a substitution to a vector of types + fn apply(&self, s: &Subst) -> Vec<T> { + self.iter().map(|x| x.apply(s)).collect() + } +} + +impl Types for Type { + fn ftv(&self) -> HashSet<TypeVar> { + match self { + Type::Var(s) => [s.clone()].iter().cloned().collect(), + &Type::Bool() => HashSet::new(), + Type::Fun(i, o) => i.ftv().union(&o.ftv()).cloned().collect(), + Type::Con(_, s, _) => s.ftv().union(&HashSet::new()).cloned().collect(), + } + } + + // apply substitution + fn apply(&self, s: &Subst) -> Type { + match self { + Type::Var(n) => s.get(n).cloned().unwrap_or(self.clone()), + Type::Fun(t1, t2) => Type::Fun(Box::new(t1.apply(s)), Box::new(t2.apply(s))), + Type::Con(n, t, bounds) => { + Type::Con(n.to_string(), Box::new(t.apply(s)), bounds.clone()) + } + _ => self.clone(), + } + } +} + +// A type scheme is a type with an extra piece of information attached, to constraint the inference +#[derive(Clone, Debug)] +pub struct TypeScheme { + pub vars: Vec<TypeVar>, + pub ty: Type, +} + +impl Types for TypeScheme { + fn ftv(&self) -> HashSet<TypeVar> { + self.ty + .ftv() + .difference(&self.vars.iter().cloned().collect()) + .cloned() + .collect() + } + + fn apply(&self, s: &Subst) -> TypeScheme { + TypeScheme { + vars: self.vars.clone(), + ty: { + let mut sub = s.clone(); + for var in &self.vars { + sub.remove(var); + } + self.ty.apply(&sub) + }, + } + } +} + +impl TypeScheme { + /// Instantiates a typescheme into a type. + pub fn instantiate(&self, tvg: &mut TypeVarGen) -> Type { + let newvars = self.vars.iter().map(|_| Type::Var(tvg.gen())); + self.ty + .apply(&Subst(self.vars.iter().cloned().zip(newvars).collect())) + } +} diff --git a/src/racket_specs/combinators.rkt b/src/racket_specs/combinators.rkt new file mode 100644 index 0000000..a74296b --- /dev/null +++ b/src/racket_specs/combinators.rkt @@ -0,0 +1,78 @@ +#lang rosette +; Combinators +; list -> boolean +; we choose the list as our model for specifications +; The binary combinators +(define (for-all-unique-pairs l fn) + (foldl elem-and #t + (flatten + (map (lambda (a) + (map (lambda (b) (fn a b)) (remove a l))) l)))) + +(define (for-all-consecutive-pairs l fn) + (foldl elem-and #t + (map (lambda (p) (fn (first p) (second p))) (consecutive-pairs l)))) + +; The unary combinator +(define (for-all-elems l fn) + (foldl elem-and #t + (map (lambda (a) (fn a)) l))) + +; Helpers +; (elem-and a b) -> boolean +; Since the and operator in Racket is a syntax instead of a procedure, +; we need to create an and procedure which can be used as a parameter +; of a procedure +(define (elem-and a b) (and a b)) + +; (not-equal? a b) -> boolean? +(define (not-equal? a b) (not (equal? a b))) + +; (leq? a b) -> boolean? +(define (leq? . args) + (cond [(andmap string? args) (apply string<=? args)] + [(andmap char? args) (apply char<=? args)] + [else (apply <= args)])) + +; (geq? a b) -> boolean? +(define (geq? . args) + (cond [(andmap string? args) (apply string>=? args)] + [(andmap char? args) (apply char>=? args)] + [else (apply >= args)])) + +; (contains? elem lst) -> boolean? +(define (contains x l) + (cond + [(list? (member x l)) #t] + [else #f])) + +; (unique-count? elem lst) -> boolean +; Checking if the occurance of the elem in the lst is exactly once +(define (unique-count? x l) + (= 1 (count (lambda (y) (= x y)) l))) + +; (once? elem lst) -> boolean? +; The equalvent version of unique-count?, +; created to compare the performance of the solver +(define (once? x l) + (cond + [(empty? l) #f] + [else (or (and (= x (first l)) (not (contains x (rest l)))) + (and (not (= x (first l))) (once? x (rest l))))])) + +; (consecutive-pairs lst) -> list? +; Obtaining all consecutive pairs of elements of a given list +; Examples: +; > (consecutive-pairs '(1 2 3)) +; '((1 2) (2 3)) +; > (consecutive-pairs '(1)) +; '() +; > (consecutive-pairs null) +; '() +(define (consecutive-pairs l) + (cond + [(< (length l) 2) null] + [else (append (list (take l 2)) (consecutive-pairs (drop l 1)))])) + +; Export procedures +(provide for-all-unique-pairs for-all-consecutive-pairs for-all-elems elem-and not-equal? leq? geq? unique-count?)
\ No newline at end of file diff --git a/src/racket_specs/container-setup.rkt b/src/racket_specs/container-setup.rkt new file mode 100644 index 0000000..0d5cd79 --- /dev/null +++ b/src/racket_specs/container-setup.rkt @@ -0,0 +1,55 @@ +#lang rosette + +(define (check-spec-len prop pre spec xs) + (assume (and (prop xs) (pre xs))) + (assert (prop (car (spec xs))))) + +(define (check-spec-is-empty prop pre spec xs) + (assume (and (prop xs) (pre xs))) + (assert (prop (car (spec xs))))) + +(define (check-spec-first prop pre spec xs) + (assume (and (prop xs) (pre xs))) + (assert (prop (car (spec xs))))) + +(define (check-spec-last prop pre spec xs) + (assume (and (prop xs) (pre xs))) + (assert (prop (car (spec xs))))) + +(define (check-spec-contains prop pre spec xs x) + (assume (and (prop xs) (pre xs))) + (assert (prop (car (spec xs x))))) + +(define (check-spec-insert prop pre spec xs x) + (assume (and (prop xs) (pre xs))) + (assert (prop (spec xs x)))) + +(define (check-spec-remove prop pre spec xs x) + (assume (and (prop xs) (pre xs))) + (assert (prop (car (spec xs x))))) + +(define (check-spec-clear prop pre spec xs) + (assume (and (prop xs) (pre xs))) + (assert (prop (spec xs)))) + +(define (check-not-contradict prop pre xs) + (assert (and (prop xs) (pre xs) (> (length xs) 1)))) + +(define (check prop pres specs xs x) + (cond + [(or (unsat? (solve (check-not-contradict prop (first pres) xs))) + (unsat? (solve (check-not-contradict prop (second pres) xs))) + (unsat? (solve (check-not-contradict prop (third pres) xs))) + (unsat? (solve (check-not-contradict prop (fourth pres) xs))) + (unsat? (solve (check-not-contradict prop (fifth pres) xs))) + (unsat? (solve (check-not-contradict prop (sixth pres) xs))) + ) #f] + [else (and (unsat? (verify (check-spec-clear prop (first pres) (first specs) xs))) + (unsat? (verify (check-spec-contains prop (second pres) (second specs) xs x))) + (unsat? (verify (check-spec-insert prop (third pres) (third specs) xs x))) + (unsat? (verify (check-spec-is-empty prop (fourth pres) (fourth specs) xs))) + (unsat? (verify (check-spec-len prop (fifth pres) (fifth specs) xs))) + (unsat? (verify (check-spec-remove prop (sixth pres) (sixth specs) xs x))) + )])) + +(provide check)
\ No newline at end of file diff --git a/src/racket_specs/indexable-setup.rkt b/src/racket_specs/indexable-setup.rkt new file mode 100644 index 0000000..ddd6239 --- /dev/null +++ b/src/racket_specs/indexable-setup.rkt @@ -0,0 +1,31 @@ +#lang rosette + +(define (check-not-contradict prop pre xs) + (assert (and (prop xs) (pre xs) (> (length xs) 1)))) + +(define (check-spec-first prop pre spec xs) + (assume (and (prop xs) (pre xs))) + (assert (prop (car (spec xs))))) + +(define (check-spec-last prop pre spec xs) + (assume (and (prop xs) (pre xs))) + (assert (prop (car (spec xs))))) + + +(define (check-spec-nth prop pre spec xs n) + (assume (and (prop xs) (pre xs))) + (assert (prop (car (spec xs n))))) + + +(define (check prop pres specs xs n) + (cond + [(or (unsat? (solve (check-not-contradict prop (first pres) xs))) + (unsat? (solve (check-not-contradict prop (second pres) xs))) + (unsat? (solve (check-not-contradict prop (third pres) xs))) + ) #f] + [else (and (unsat? (verify (check-spec-first prop (first pres) (first specs) xs))) + (unsat? (verify (check-spec-last prop (second pres) (second specs) xs))) + (unsat? (verify (check-spec-nth prop (third pres) (third specs) xs n))) + )])) + +(provide check)
\ No newline at end of file diff --git a/src/racket_specs/stack-setup.rkt b/src/racket_specs/stack-setup.rkt new file mode 100644 index 0000000..336cf8d --- /dev/null +++ b/src/racket_specs/stack-setup.rkt @@ -0,0 +1,10 @@ +#lang rosette + +(define (check-push-pop prop pres xs) + (assume (and ((first pres) xs) ((second pres) xs))) + (assert (prop xs))) + +(define (check prop pres specs xs x) + (unsat? (verify (check-push-pop prop pres xs)))) + +(provide check)
\ No newline at end of file diff --git a/src/rust-toolchain.toml b/src/rust-toolchain.toml new file mode 100644 index 0000000..271800c --- /dev/null +++ b/src/rust-toolchain.toml @@ -0,0 +1,2 @@ +[toolchain] +channel = "nightly"
\ No newline at end of file diff --git a/src/scripts/b_asc_con_3.sh b/src/scripts/b_asc_con_3.sh new file mode 100755 index 0000000..3befd9d --- /dev/null +++ b/src/scripts/b_asc_con_3.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: ascending +syntactic property: Container +search time: %R seconds +' +time { +cargo run b_asc_con.rs gen_asc_con 3 +} diff --git a/src/scripts/b_asc_con_5.sh b/src/scripts/b_asc_con_5.sh new file mode 100755 index 0000000..32b3f61 --- /dev/null +++ b/src/scripts/b_asc_con_5.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: ascending +syntactic property: Container +search time: %R seconds +' +time { +cargo run b_asc_con.rs gen_asc_con 5 +} diff --git a/src/scripts/b_asc_con_7.sh b/src/scripts/b_asc_con_7.sh new file mode 100755 index 0000000..843a7d1 --- /dev/null +++ b/src/scripts/b_asc_con_7.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: ascending +syntactic property: Container +search time: %R seconds +' +time { +cargo run b_asc_con.rs gen_asc_con.rs 7 +} diff --git a/src/scripts/b_asc_con_9.sh b/src/scripts/b_asc_con_9.sh new file mode 100755 index 0000000..78134e5 --- /dev/null +++ b/src/scripts/b_asc_con_9.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: ascending +syntactic property: Container +search time: %R seconds +' +time { +cargo run b_asc_con.rs gen_asc_con 9 +} diff --git a/src/scripts/b_asc_con_ra_3.sh b/src/scripts/b_asc_con_ra_3.sh new file mode 100755 index 0000000..8346f4c --- /dev/null +++ b/src/scripts/b_asc_con_ra_3.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: ascending +syntactic property: Container, RandomAsscess +search time: %R seconds +' +time { +cargo run b_asc_con_ra.rs gen_asc_con_ra 3 +} diff --git a/src/scripts/b_asc_con_ra_5.sh b/src/scripts/b_asc_con_ra_5.sh new file mode 100755 index 0000000..9cb1cda --- /dev/null +++ b/src/scripts/b_asc_con_ra_5.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: ascending +syntactic property: Container, RandomAsscess +search time: %R seconds +' +time { +cargo run b_asc_con_ra.rs gen_asc_con_ra 5 +} diff --git a/src/scripts/b_asc_con_ra_7.sh b/src/scripts/b_asc_con_ra_7.sh new file mode 100755 index 0000000..c797868 --- /dev/null +++ b/src/scripts/b_asc_con_ra_7.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: ascending +syntactic property: Container, RandomAsscess +search time: %R seconds +' +time { +cargo run b_asc_con_ra.rs gen_asc_con_ra 7 +} diff --git a/src/scripts/b_asc_con_ra_9.sh b/src/scripts/b_asc_con_ra_9.sh new file mode 100755 index 0000000..f60be36 --- /dev/null +++ b/src/scripts/b_asc_con_ra_9.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: ascending +syntactic property: Container, RandomAsscess +search time: %R seconds +' +time { +cargo run b_asc_con_ra.rs gen_asc_con_ra 9 +} diff --git a/src/scripts/b_des_con_3.sh b/src/scripts/b_des_con_3.sh new file mode 100755 index 0000000..c00c125 --- /dev/null +++ b/src/scripts/b_des_con_3.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: descending +syntactic property: Container +search time: %R seconds +' +time { +cargo run b_des_con.rs gen_des_con 3 +} diff --git a/src/scripts/b_des_con_5.sh b/src/scripts/b_des_con_5.sh new file mode 100755 index 0000000..b870f59 --- /dev/null +++ b/src/scripts/b_des_con_5.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: descending +syntactic property: Container +search time: %R seconds +' +time { +cargo run b_des_con.rs gen_des_con 5 +} diff --git a/src/scripts/b_des_con_7.sh b/src/scripts/b_des_con_7.sh new file mode 100755 index 0000000..ca11765 --- /dev/null +++ b/src/scripts/b_des_con_7.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: descending +syntactic property: Container +search time: %R seconds +' +time { +cargo run b_des_con.rs gen_des_con 7 +} diff --git a/src/scripts/b_des_con_9.sh b/src/scripts/b_des_con_9.sh new file mode 100755 index 0000000..1d34895 --- /dev/null +++ b/src/scripts/b_des_con_9.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: descending +syntactic property: Container +search time: %R seconds +' +time { +cargo run b_des_con.rs gen_des_con 9 +} diff --git a/src/scripts/b_des_con_ra_3.sh b/src/scripts/b_des_con_ra_3.sh new file mode 100755 index 0000000..fe7c0a0 --- /dev/null +++ b/src/scripts/b_des_con_ra_3.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: descending +syntactic property: Container, RandomAccess +search time: %R seconds +' +time { +cargo run b_des_con_ra.rs gen_des_con_ra 3 +} diff --git a/src/scripts/b_des_con_ra_5.sh b/src/scripts/b_des_con_ra_5.sh new file mode 100755 index 0000000..3769ff5 --- /dev/null +++ b/src/scripts/b_des_con_ra_5.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: descending +syntactic property: Container, RandomAccess +search time: %R seconds +' +time { +cargo run b_des_con_ra.rs gen_des_con_ra 5 +} diff --git a/src/scripts/b_des_con_ra_7.sh b/src/scripts/b_des_con_ra_7.sh new file mode 100755 index 0000000..9c3a7c9 --- /dev/null +++ b/src/scripts/b_des_con_ra_7.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: descending +syntactic property: Container, RandomAccess +search time: %R seconds +' +time { +cargo run b_des_con_ra.rs gen_des_con_ra 7 +} diff --git a/src/scripts/b_des_con_ra_9.sh b/src/scripts/b_des_con_ra_9.sh new file mode 100755 index 0000000..5d70bf4 --- /dev/null +++ b/src/scripts/b_des_con_ra_9.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: descending +syntactic property: Container, RandomAccess +search time: %R seconds +' +time { +cargo run b_des_con_ra.rs gen_des_con_ra 9 +} diff --git a/src/scripts/b_stack_con_3.sh b/src/scripts/b_stack_con_3.sh new file mode 100755 index 0000000..3317bd7 --- /dev/null +++ b/src/scripts/b_stack_con_3.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: lifo +syntactic property: Container, Stack +search time: %R seconds +' +time { +cargo run b_stack_con.rs gen_stack_con 3 +} diff --git a/src/scripts/b_stack_con_5.sh b/src/scripts/b_stack_con_5.sh new file mode 100755 index 0000000..ff7c2ea --- /dev/null +++ b/src/scripts/b_stack_con_5.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: lifo +syntactic property: Container, Stack +search time: %R seconds +' +time { +cargo run b_stack_con.rs gen_stack_con 5 +} diff --git a/src/scripts/b_stack_con_7.sh b/src/scripts/b_stack_con_7.sh new file mode 100755 index 0000000..f04d5f5 --- /dev/null +++ b/src/scripts/b_stack_con_7.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: lifo +syntactic property: Container, Stack +search time: %R seconds +' +time { +cargo run b_stack_con.rs gen_stack_con 7 +} diff --git a/src/scripts/b_stack_con_9.sh b/src/scripts/b_stack_con_9.sh new file mode 100755 index 0000000..9347fb6 --- /dev/null +++ b/src/scripts/b_stack_con_9.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: lifo +syntactic property: Container, Stack +search time: %R seconds +' +time { +cargo run b_stack_con.rs gen_stack_con 9 +} diff --git a/src/scripts/b_unique_asc_con_3.sh b/src/scripts/b_unique_asc_con_3.sh new file mode 100755 index 0000000..13fa6c3 --- /dev/null +++ b/src/scripts/b_unique_asc_con_3.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: unique, ascending +syntactic property: Container +search time: %R seconds +' +time { +cargo run b_unique_asc_con.rs gen_unique_asc_con 3 +} diff --git a/src/scripts/b_unique_asc_con_5.sh b/src/scripts/b_unique_asc_con_5.sh new file mode 100755 index 0000000..cd8fd68 --- /dev/null +++ b/src/scripts/b_unique_asc_con_5.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: unique, ascending +syntactic property: Container +search time: %R seconds +' +time { +cargo run b_unique_asc_con.rs gen_unique_asc_con 5 +} diff --git a/src/scripts/b_unique_asc_con_7.sh b/src/scripts/b_unique_asc_con_7.sh new file mode 100755 index 0000000..1a76def --- /dev/null +++ b/src/scripts/b_unique_asc_con_7.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: unique, ascending +syntactic property: Container +search time: %R seconds +' +time { +cargo run b_unique_asc_con.rs gen_unique_asc_con 7 +} diff --git a/src/scripts/b_unique_asc_con_9.sh b/src/scripts/b_unique_asc_con_9.sh new file mode 100755 index 0000000..fba4841 --- /dev/null +++ b/src/scripts/b_unique_asc_con_9.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: unique, ascending +syntactic property: Container +search time: %R seconds +' +time { +cargo run b_unique_asc_con.rs gen_unique_asc_con 9 +} diff --git a/src/scripts/b_unique_asc_con_ra_3.sh b/src/scripts/b_unique_asc_con_ra_3.sh new file mode 100755 index 0000000..7ef5b22 --- /dev/null +++ b/src/scripts/b_unique_asc_con_ra_3.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: unique, ascending +syntactic property: Container, RandomAccess +search time: %R seconds +' +time { +cargo run b_unique_asc_con_ra.rs gen_unique_asc_con_ra 3 +} diff --git a/src/scripts/b_unique_asc_con_ra_5.sh b/src/scripts/b_unique_asc_con_ra_5.sh new file mode 100755 index 0000000..a32ee2f --- /dev/null +++ b/src/scripts/b_unique_asc_con_ra_5.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: unique, ascending +syntactic property: Container, RandomAccess +search time: %R seconds +' +time { +cargo run b_unique_asc_con_ra.rs gen_unique_asc_con_ra 5 +} diff --git a/src/scripts/b_unique_asc_con_ra_7.sh b/src/scripts/b_unique_asc_con_ra_7.sh new file mode 100755 index 0000000..ea16baf --- /dev/null +++ b/src/scripts/b_unique_asc_con_ra_7.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: unique, ascending +syntactic property: Container, RandomAccess +search time: %R seconds +' +time { +cargo run b_unique_asc_con_ra.rs gen_unique_asc_con_ra 7 +} diff --git a/src/scripts/b_unique_asc_con_ra_9.sh b/src/scripts/b_unique_asc_con_ra_9.sh new file mode 100755 index 0000000..8dff946 --- /dev/null +++ b/src/scripts/b_unique_asc_con_ra_9.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: unique, ascending +syntactic property: Container, RandomAccess +search time: %R seconds +' +time { +cargo run b_unique_asc_con_ra.rs gen_unique_asc_con_ra 9 +} diff --git a/src/scripts/b_unique_con_3.sh b/src/scripts/b_unique_con_3.sh new file mode 100755 index 0000000..10cd4c4 --- /dev/null +++ b/src/scripts/b_unique_con_3.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: unique +syntactic property: Container +search time: %R seconds +' +time { +cargo run b_unique_con.rs gen_unique_con 3 +} diff --git a/src/scripts/b_unique_con_5.sh b/src/scripts/b_unique_con_5.sh new file mode 100755 index 0000000..fbf8073 --- /dev/null +++ b/src/scripts/b_unique_con_5.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: unique +syntactic property: Container +search time: %R seconds +' +time { +cargo run b_unique_con.rs gen_unique_con 5 +} diff --git a/src/scripts/b_unique_con_7.sh b/src/scripts/b_unique_con_7.sh new file mode 100755 index 0000000..c2bbf29 --- /dev/null +++ b/src/scripts/b_unique_con_7.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: unique +syntactic property: Container +search time: %R seconds +' +time { +cargo run b_unique_con.rs gen_unique_con 7 +} diff --git a/src/scripts/b_unique_con_9.sh b/src/scripts/b_unique_con_9.sh new file mode 100755 index 0000000..dd16dfc --- /dev/null +++ b/src/scripts/b_unique_con_9.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: unique +syntactic property: Container +search time: %R seconds +' +time { +cargo run b_unique_con.rs gen_unique_con 9 +} diff --git a/src/scripts/b_unique_con_ra_3.sh b/src/scripts/b_unique_con_ra_3.sh new file mode 100755 index 0000000..fbcac8d --- /dev/null +++ b/src/scripts/b_unique_con_ra_3.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: unique +syntactic properties: Container, RandomAccess +search time: %R seconds +' +time { +cargo run b_unique_con_ra.rs gen_unique_con_ra 3 +} diff --git a/src/scripts/b_unique_con_ra_5.sh b/src/scripts/b_unique_con_ra_5.sh new file mode 100755 index 0000000..6845931 --- /dev/null +++ b/src/scripts/b_unique_con_ra_5.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: unique +syntactic properties: Container, RandomAccess +search time: %R seconds +' +time { +cargo run b_unique_con_ra.rs gen_unique_con_ra 5 +} diff --git a/src/scripts/b_unique_con_ra_7.sh b/src/scripts/b_unique_con_ra_7.sh new file mode 100755 index 0000000..8b62c27 --- /dev/null +++ b/src/scripts/b_unique_con_ra_7.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: unique +syntactic properties: Container, RandomAccess +search time: %R seconds +' +time { +cargo run b_unique_con_ra.rs gen_unique_con_ra 7 +} diff --git a/src/scripts/b_unique_con_ra_9.sh b/src/scripts/b_unique_con_ra_9.sh new file mode 100755 index 0000000..f756117 --- /dev/null +++ b/src/scripts/b_unique_con_ra_9.sh @@ -0,0 +1,9 @@ +#!/bin/bash +TIMEFORMAT=' +semantic property: unique +syntactic properties: Container, RandomAccess +search time: %R seconds +' +time { +cargo run b_unique_con_ra.rs gen_unique_con_ra 9 +} diff --git a/src/tests/.gitignore b/src/tests/.gitignore new file mode 100644 index 0000000..e9a847e --- /dev/null +++ b/src/tests/.gitignore @@ -0,0 +1,2 @@ +*/target +target
\ No newline at end of file diff --git a/src/tests/Cargo.toml b/src/tests/Cargo.toml new file mode 100644 index 0000000..e7372e9 --- /dev/null +++ b/src/tests/Cargo.toml @@ -0,0 +1,16 @@ +[workspace] +resolver = "2" +members = [ + "b_asc_con", + "b_asc_con_ra", + "b_des_con", + "b_des_con_ra", + "b_stack_con", + "b_unique_asc_con", + "b_unique_asc_con_ra", + "b_unique_con", + "b_unique_con_ra", + "example_comp", + "example_stack", + "example_unique" +]
\ No newline at end of file diff --git a/src/tests/b_asc_con/Cargo.toml b/src/tests/b_asc_con/Cargo.toml new file mode 100644 index 0000000..654d8ab --- /dev/null +++ b/src/tests/b_asc_con/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "b_asc_con" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] diff --git a/src/tests/b_asc_con/src/main.rs b/src/tests/b_asc_con/src/main.rs new file mode 100644 index 0000000..0d18aee --- /dev/null +++ b/src/tests/b_asc_con/src/main.rs @@ -0,0 +1,11 @@ +mod types; +use types::*; + +fn main() { + let mut c = AscendingCon::<u32>::new(); + for x in 0..10 { + c.insert(x); + c.insert(x); + } + assert_eq!(c.len(), 20); +} diff --git a/src/tests/b_asc_con/src/types.pr.rs b/src/tests/b_asc_con/src/types.pr.rs new file mode 100644 index 0000000..12d3d02 --- /dev/null +++ b/src/tests/b_asc_con/src/types.pr.rs @@ -0,0 +1,6 @@ +/*SPEC* +property ascending<T> { + \c -> ((for-all-consecutive-pairs c) leq?) +} +type AscendingCon<T> = {c impl (Container) | (ascending c)} +*ENDSPEC*/ diff --git a/src/tests/b_asc_con_ra/Cargo.toml b/src/tests/b_asc_con_ra/Cargo.toml new file mode 100644 index 0000000..9bbe454 --- /dev/null +++ b/src/tests/b_asc_con_ra/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "b_asc_con_ra" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] diff --git a/src/tests/b_asc_con_ra/src/main.rs b/src/tests/b_asc_con_ra/src/main.rs new file mode 100644 index 0000000..0d18aee --- /dev/null +++ b/src/tests/b_asc_con_ra/src/main.rs @@ -0,0 +1,11 @@ +mod types; +use types::*; + +fn main() { + let mut c = AscendingCon::<u32>::new(); + for x in 0..10 { + c.insert(x); + c.insert(x); + } + assert_eq!(c.len(), 20); +} diff --git a/src/tests/b_asc_con_ra/src/types.pr.rs b/src/tests/b_asc_con_ra/src/types.pr.rs new file mode 100644 index 0000000..4d33cb9 --- /dev/null +++ b/src/tests/b_asc_con_ra/src/types.pr.rs @@ -0,0 +1,6 @@ +/*SPEC* +property ascending<T> { + \c -> ((for-all-consecutive-pairs c) leq?) +} +type AscendingCon<T> = {c impl (Container, Indexable) | (ascending c)} +*ENDSPEC*/ diff --git a/src/tests/b_des_con/Cargo.toml b/src/tests/b_des_con/Cargo.toml new file mode 100644 index 0000000..b1d0d61 --- /dev/null +++ b/src/tests/b_des_con/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "b_des_con" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] diff --git a/src/tests/b_des_con/src/main.rs b/src/tests/b_des_con/src/main.rs new file mode 100644 index 0000000..b64347b --- /dev/null +++ b/src/tests/b_des_con/src/main.rs @@ -0,0 +1,11 @@ +mod types; +use types::*; + +fn main() { + let mut c = DescendingCon::<u32>::new(); + for x in 0..10 { + c.insert(x); + c.insert(x); + } + assert_eq!(c.len(), 20); +} diff --git a/src/tests/b_des_con/src/types.pr.rs b/src/tests/b_des_con/src/types.pr.rs new file mode 100644 index 0000000..63f15ff --- /dev/null +++ b/src/tests/b_des_con/src/types.pr.rs @@ -0,0 +1,6 @@ +/*SPEC* +property descending<T> { + \c -> ((for-all-consecutive-pairs c) geq?) +} +type DescendingCon<T> = {c impl (Container) | (descending c)} +*ENDSPEC*/ diff --git a/src/tests/b_des_con_ra/Cargo.toml b/src/tests/b_des_con_ra/Cargo.toml new file mode 100644 index 0000000..5ea4fd4 --- /dev/null +++ b/src/tests/b_des_con_ra/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "b_des_con_ra" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] diff --git a/src/tests/b_des_con_ra/src/main.rs b/src/tests/b_des_con_ra/src/main.rs new file mode 100644 index 0000000..b64347b --- /dev/null +++ b/src/tests/b_des_con_ra/src/main.rs @@ -0,0 +1,11 @@ +mod types; +use types::*; + +fn main() { + let mut c = DescendingCon::<u32>::new(); + for x in 0..10 { + c.insert(x); + c.insert(x); + } + assert_eq!(c.len(), 20); +} diff --git a/src/tests/b_des_con_ra/src/types.pr.rs b/src/tests/b_des_con_ra/src/types.pr.rs new file mode 100644 index 0000000..db5adb0 --- /dev/null +++ b/src/tests/b_des_con_ra/src/types.pr.rs @@ -0,0 +1,6 @@ +/*SPEC* +property descending<T> { + \c -> ((for-all-consecutive-pairs c) geq?) +} +type DescendingCon<T> = {c impl (Container, Indexable) | (descending c)} +*ENDSPEC*/ diff --git a/src/tests/b_stack_con/Cargo.toml b/src/tests/b_stack_con/Cargo.toml new file mode 100644 index 0000000..6668e3d --- /dev/null +++ b/src/tests/b_stack_con/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "b_stack_con" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] diff --git a/src/tests/b_stack_con/src/main.rs b/src/tests/b_stack_con/src/main.rs new file mode 100644 index 0000000..7c83f6b --- /dev/null +++ b/src/tests/b_stack_con/src/main.rs @@ -0,0 +1,11 @@ +mod types; +use types::*; + +fn main() { + let mut c = StackCon::<u32>::new(); + for x in 0..10 { + c.insert(x); + c.insert(x); + } + assert_eq!(c.len(), 20); +} diff --git a/src/tests/b_stack_con/src/types.pr.rs b/src/tests/b_stack_con/src/types.pr.rs new file mode 100644 index 0000000..525fdee --- /dev/null +++ b/src/tests/b_stack_con/src/types.pr.rs @@ -0,0 +1,7 @@ +/*SPEC* +property lifo<T> { + \c <: (Stack) -> (forall \x -> ((equal? (pop ((push c) x))) x)) +} + +type StackCon<S> = {c impl (Container, Stack) | (lifo c)} +*ENDSPEC*/ diff --git a/src/tests/b_unique_asc_con/Cargo.toml b/src/tests/b_unique_asc_con/Cargo.toml new file mode 100644 index 0000000..8d4c00f --- /dev/null +++ b/src/tests/b_unique_asc_con/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "b_unique_asc_con" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] diff --git a/src/tests/b_unique_asc_con/src/main.rs b/src/tests/b_unique_asc_con/src/main.rs new file mode 100644 index 0000000..2eac169 --- /dev/null +++ b/src/tests/b_unique_asc_con/src/main.rs @@ -0,0 +1,11 @@ +mod types; +use types::*; + +fn main() { + let mut c = StrictlyAscendingCon::<u32>::new(); + for x in 0..10 { + c.insert(x); + c.insert(x); + } + assert_eq!(c.len(), 10); +} diff --git a/src/tests/b_unique_asc_con/src/types.pr.rs b/src/tests/b_unique_asc_con/src/types.pr.rs new file mode 100644 index 0000000..e84858e --- /dev/null +++ b/src/tests/b_unique_asc_con/src/types.pr.rs @@ -0,0 +1,10 @@ +/*SPEC* +property unique<T> { + \c <: (Container) -> ((for-all-elems c) \a -> ((unique-count? a) c)) +} +property ascending<T> { + \c -> ((for-all-consecutive-pairs c) leq?) +} + +type StrictlyAscendingCon<S> = {c impl (Container) | ((unique c) and (ascending c))} +*ENDSPEC*/ diff --git a/src/tests/b_unique_asc_con_ra/Cargo.toml b/src/tests/b_unique_asc_con_ra/Cargo.toml new file mode 100644 index 0000000..bb3fb42 --- /dev/null +++ b/src/tests/b_unique_asc_con_ra/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "b_unique_asc_con_ra" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] diff --git a/src/tests/b_unique_asc_con_ra/src/main.rs b/src/tests/b_unique_asc_con_ra/src/main.rs new file mode 100644 index 0000000..2eac169 --- /dev/null +++ b/src/tests/b_unique_asc_con_ra/src/main.rs @@ -0,0 +1,11 @@ +mod types; +use types::*; + +fn main() { + let mut c = StrictlyAscendingCon::<u32>::new(); + for x in 0..10 { + c.insert(x); + c.insert(x); + } + assert_eq!(c.len(), 10); +} diff --git a/src/tests/b_unique_asc_con_ra/src/types.pr.rs b/src/tests/b_unique_asc_con_ra/src/types.pr.rs new file mode 100644 index 0000000..ed64180 --- /dev/null +++ b/src/tests/b_unique_asc_con_ra/src/types.pr.rs @@ -0,0 +1,10 @@ +/*SPEC* +property unique<T> { + \c <: (Container) -> ((for-all-elems c) \a -> ((unique-count? a) c)) +} +property ascending<T> { + \c -> ((for-all-consecutive-pairs c) leq?) +} + +type StrictlyAscendingCon<S> = {c impl (Container, Indexable) | ((unique c) and (ascending c))} +*ENDSPEC*/ diff --git a/src/tests/b_unique_con/Cargo.toml b/src/tests/b_unique_con/Cargo.toml new file mode 100644 index 0000000..3ad6e18 --- /dev/null +++ b/src/tests/b_unique_con/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "b_unique_con" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] diff --git a/src/tests/b_unique_con/src/main.rs b/src/tests/b_unique_con/src/main.rs new file mode 100644 index 0000000..a2b4246 --- /dev/null +++ b/src/tests/b_unique_con/src/main.rs @@ -0,0 +1,11 @@ +mod types; +use types::*; + +fn main() { + let mut c = UniqueCon::<u32>::new(); + for x in 0..10 { + c.insert(x); + c.insert(x); + } + assert_eq!(c.len(), 10); +} diff --git a/src/tests/b_unique_con/src/types.pr.rs b/src/tests/b_unique_con/src/types.pr.rs new file mode 100644 index 0000000..7393cce --- /dev/null +++ b/src/tests/b_unique_con/src/types.pr.rs @@ -0,0 +1,7 @@ +/*SPEC* +property unique<T> { + \c <: (Container) -> ((for-all-elems c) \a -> ((unique-count? a) c)) +} + +type UniqueCon<S> = {c impl (Container) | (unique c)} +*ENDSPEC*/ diff --git a/src/tests/b_unique_con_ra/Cargo.toml b/src/tests/b_unique_con_ra/Cargo.toml new file mode 100644 index 0000000..8bc19d5 --- /dev/null +++ b/src/tests/b_unique_con_ra/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "b_unique_con_ra" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] diff --git a/src/tests/b_unique_con_ra/src/main.rs b/src/tests/b_unique_con_ra/src/main.rs new file mode 100644 index 0000000..a2b4246 --- /dev/null +++ b/src/tests/b_unique_con_ra/src/main.rs @@ -0,0 +1,11 @@ +mod types; +use types::*; + +fn main() { + let mut c = UniqueCon::<u32>::new(); + for x in 0..10 { + c.insert(x); + c.insert(x); + } + assert_eq!(c.len(), 10); +} diff --git a/src/tests/b_unique_con_ra/src/types.pr.rs b/src/tests/b_unique_con_ra/src/types.pr.rs new file mode 100644 index 0000000..7562dd7 --- /dev/null +++ b/src/tests/b_unique_con_ra/src/types.pr.rs @@ -0,0 +1,7 @@ +/*SPEC* +property unique<T> { + \c <: (Container) -> ((for-all-elems c) \a -> ((unique-count? a) c)) +} + +type UniqueCon<S> = {c impl (Container, Indexable) | (unique c)} +*ENDSPEC*/ diff --git a/src/tests/example_comp/Cargo.toml b/src/tests/example_comp/Cargo.toml new file mode 100644 index 0000000..67639f2 --- /dev/null +++ b/src/tests/example_comp/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "example_comp" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] diff --git a/src/tests/example_comp/src/main.rs b/src/tests/example_comp/src/main.rs new file mode 100644 index 0000000..d87425b --- /dev/null +++ b/src/tests/example_comp/src/main.rs @@ -0,0 +1,12 @@ +mod types; +use types::*; + +fn main() { + let mut c = StrictlyAscendingCon::<u32>::new(); + for x in 0..10 { + c.insert(x); + c.insert(x); + //c.first(); + } + assert_eq!(c.len(), 10); +} diff --git a/src/tests/example_comp/src/types.pr.rs b/src/tests/example_comp/src/types.pr.rs new file mode 100644 index 0000000..ed64180 --- /dev/null +++ b/src/tests/example_comp/src/types.pr.rs @@ -0,0 +1,10 @@ +/*SPEC* +property unique<T> { + \c <: (Container) -> ((for-all-elems c) \a -> ((unique-count? a) c)) +} +property ascending<T> { + \c -> ((for-all-consecutive-pairs c) leq?) +} + +type StrictlyAscendingCon<S> = {c impl (Container, Indexable) | ((unique c) and (ascending c))} +*ENDSPEC*/ diff --git a/src/tests/example_stack/Cargo.toml b/src/tests/example_stack/Cargo.toml new file mode 100644 index 0000000..3010313 --- /dev/null +++ b/src/tests/example_stack/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "example_stack" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] diff --git a/src/tests/example_stack/src/main.rs b/src/tests/example_stack/src/main.rs new file mode 100644 index 0000000..daa44d0 --- /dev/null +++ b/src/tests/example_stack/src/main.rs @@ -0,0 +1,12 @@ +mod types; +use types::*; + +fn main() { + let mut c = StackCon::<u32>::new(); + for x in 0..10 { + c.insert(x); + c.insert(x); + //c.first(); + } + assert_eq!(c.len(), 20); +} diff --git a/src/tests/example_stack/src/types.pr.rs b/src/tests/example_stack/src/types.pr.rs new file mode 100644 index 0000000..525fdee --- /dev/null +++ b/src/tests/example_stack/src/types.pr.rs @@ -0,0 +1,7 @@ +/*SPEC* +property lifo<T> { + \c <: (Stack) -> (forall \x -> ((equal? (pop ((push c) x))) x)) +} + +type StackCon<S> = {c impl (Container, Stack) | (lifo c)} +*ENDSPEC*/ diff --git a/src/tests/example_unique/Cargo.toml b/src/tests/example_unique/Cargo.toml new file mode 100644 index 0000000..732c903 --- /dev/null +++ b/src/tests/example_unique/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "example_unique" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] diff --git a/src/tests/example_unique/src/main.rs b/src/tests/example_unique/src/main.rs new file mode 100644 index 0000000..0f6c143 --- /dev/null +++ b/src/tests/example_unique/src/main.rs @@ -0,0 +1,12 @@ +mod types; +use types::*; + +fn main() { + let mut c = UniqueCon::<u32>::new(); + for x in 0..10 { + c.insert(x); + c.insert(x); + //c.first(); + } + assert_eq!(c.len(), 10); +} diff --git a/src/tests/example_unique/src/types.pr.rs b/src/tests/example_unique/src/types.pr.rs new file mode 100644 index 0000000..7393cce --- /dev/null +++ b/src/tests/example_unique/src/types.pr.rs @@ -0,0 +1,7 @@ +/*SPEC* +property unique<T> { + \c <: (Container) -> ((for-all-elems c) \a -> ((unique-count? a) c)) +} + +type UniqueCon<S> = {c impl (Container) | (unique c)} +*ENDSPEC*/ |