aboutsummaryrefslogtreecommitdiff
path: root/src/crates
diff options
context:
space:
mode:
authorAria Shrimpton <me@aria.rip>2024-01-31 17:43:05 +0000
committerAria Shrimpton <me@aria.rip>2024-01-31 17:43:05 +0000
commit6c8407043120f3855dd0229c0f838041c7f0eb38 (patch)
tree854cf43116cb49fb6a14fe53dc037f7c0fdfb61a /src/crates
parenteafe2080e9e825649bd84edba9647df0e811af99 (diff)
lints & refactors
Diffstat (limited to 'src/crates')
-rw-r--r--src/crates/candelabra/src/profiler/info.rs (renamed from src/crates/candelabra/src/profiler.rs)223
-rw-r--r--src/crates/candelabra/src/profiler/mod.rs200
-rw-r--r--src/crates/cli/src/display.rs2
3 files changed, 221 insertions, 204 deletions
diff --git a/src/crates/candelabra/src/profiler.rs b/src/crates/candelabra/src/profiler/info.rs
index 4677bbc..dc9a03c 100644
--- a/src/crates/candelabra/src/profiler.rs
+++ b/src/crates/candelabra/src/profiler/info.rs
@@ -1,33 +1,10 @@
-//! Profiling applications for info about container usage
-
-use anyhow::{anyhow, Context, Result};
-use camino::{Utf8Path, Utf8PathBuf};
-use log::{debug, log_enabled, trace, warn, Level};
-use primrose::ContainerSelector;
-use serde::{Deserialize, Serialize};
use std::collections::HashMap;
-use std::io::Write;
use std::str::FromStr;
-use std::{
- fs::{read_dir, File},
- io::Read,
- process::{Command, Stdio},
-};
-use tempfile::tempdir;
-
-use crate::cache::{gen_tree_hash, FileCache};
-use crate::candidates::ConTypeName;
-use crate::cost::benchmark::{tee_output, OpName};
-use crate::cost::{Cost, CostModel, Estimator};
-use crate::project::Project;
-use crate::{Paths, State};
-
-#[derive(Debug, Serialize, Deserialize)]
-pub(crate) struct CacheEntry {
- proj_hash: u64,
- proj_location: Utf8PathBuf,
- info: HashMap<ConTypeName, ProfilerInfo>,
-}
+
+use anyhow::{anyhow, Result};
+use serde::{Deserialize, Serialize};
+
+use crate::cost::{benchmark::OpName, Cost, CostModel, Estimator};
/// The information we get from profiling.
/// Rather than keeping all results, we split them into 'similar enough' partitions,
@@ -35,7 +12,7 @@ pub(crate) struct CacheEntry {
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
pub struct ProfilerInfo(pub Vec<ProfilerPartition>);
-/// A vector of container lifetimes which have similar characteristics
+/// A vector of container lifetimes which have similar usage characteristics
#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)]
pub struct ProfilerPartition {
pub occurences: f64,
@@ -43,10 +20,21 @@ pub struct ProfilerPartition {
pub avg_op_counts: HashMap<OpName, f64>,
}
-/// Breakdown of a cost value
+/// Lifetime of a single allocated collection.
+type CollectionLifetime = (f64, HashMap<OpName, usize>);
+
+/// Breakdown of a cost value by operation
pub type CostBreakdown<'a> = HashMap<&'a OpName, Cost>;
impl ProfilerInfo {
+ pub fn from(iter: impl Iterator<Item = Result<String>>) -> Result<Self> {
+ Ok(Self(
+ iter.map(|contents| parse_output(&contents?))
+ .fold(Ok(vec![]), partition_costs)?,
+ ))
+ }
+
+ /// Estimate the cost of using the implementation with the given cost model
pub fn estimate_cost(&self, cost_model: &CostModel) -> f64 {
self.0
.iter()
@@ -54,6 +42,7 @@ impl ProfilerInfo {
.sum::<f64>()
}
+ /// Get a breakdown of the cost by operation
pub fn cost_breakdown<'a>(&self, cost_model: &'a CostModel) -> CostBreakdown<'a> {
cost_model
.by_op
@@ -104,178 +93,6 @@ impl ProfilerPartition {
}
}
-impl State {
- pub(crate) fn profiler_info_cache(paths: &Paths) -> Result<FileCache<String, CacheEntry>> {
- FileCache::new(
- paths.target_dir.join("candelabra").join("profiler_info"),
- |_, v: &CacheEntry| {
- let proj_hash = gen_tree_hash(&v.proj_location).unwrap_or(0);
- v.proj_hash == proj_hash
- },
- )
- }
-
- /// Get or calculate profiler info for the given project.
- /// Results are cached by the modification time of the project's source tree
- pub fn profiler_info(&self, project: &Project) -> Result<HashMap<ConTypeName, ProfilerInfo>> {
- match self.profiler_info_cache.find(&project.name)? {
- Some(x) => Ok(x.info),
- None => {
- let info = self.calc_profiler_info(project)?;
-
- let proj_hash = gen_tree_hash(&project.source_dir)
- .context("Error generating project directory hash")?;
- if let Err(e) = self.profiler_info_cache.put(
- &project.name,
- &CacheEntry {
- proj_hash,
- proj_location: project.source_dir.clone(),
- info: info.clone(),
- },
- ) {
- warn!("Error caching profiler info for {}: {}", &project.name, e);
- }
-
- Ok(info)
- }
- }
- }
-
- /// Calculate profiler info for the given project.
- fn calc_profiler_info(&self, project: &Project) -> Result<HashMap<ConTypeName, ProfilerInfo>> {
- let candidate_list = self.project_candidate_list(project)?;
- let con_types = candidate_list
- .iter()
- .flat_map(|(_, con_types)| con_types.iter())
- .map(|(id, _)| id)
- .collect::<Vec<_>>();
-
- self.project_profiling_prep(project, &con_types)?;
- let mut acc = HashMap::new();
- for name in project.benchmarks.iter() {
- for (con_type, new_results) in self
- .profile_benchmark(project, name, &con_types)
- .with_context(|| format!("Error profiling benchmark {}", name))?
- {
- acc.entry(con_type)
- .and_modify(|pi: &mut ProfilerInfo| pi.0.extend(new_results.0.iter().cloned()))
- .or_insert(new_results);
- }
- }
-
- Ok(acc)
- }
-
- /// Prepare the given project to be profiled, by replacing all candidate types with the profiler wrapper.
- fn project_profiling_prep(&self, project: &Project, con_types: &[&String]) -> Result<()> {
- for (file, candidates) in self.project_candidate_list(project)? {
- self.file_profiling_prep(&file, &candidates, con_types)
- .with_context(|| format!("error preparing {} for profiling", file))?;
- }
-
- Ok(())
- }
-
- /// Prepare the given file to be profiled, by replacing all candidate types with the profiler wrapper.
- fn file_profiling_prep(
- &self,
- file: &Utf8Path,
- candidates: &[(String, Vec<String>)],
- con_types: &[&String],
- ) -> Result<()> {
- debug!("Setting up {} for profiling", file);
-
- let selector = ContainerSelector::from_path(
- file.as_std_path(),
- self.paths.library_src.as_std_path(),
- self.model_size,
- )
- .context("error creating container selector")?;
-
- let chosen = candidates
- .iter()
- .map(|(dest_name, impls)| (dest_name, &impls[0]))
- .collect::<Vec<_>>();
-
- let new_code = selector.gen_profiling_file(chosen.iter().map(|(d, c)| {
- (
- *d,
- con_types.iter().position(|id| id == d).unwrap(),
- c.as_str(),
- )
- }));
-
- let new_path = file.to_string().replace(".pr", "");
-
- trace!("New code: {}", new_code);
- trace!("New path: {}", new_path);
-
- let mut f = File::create(new_path).context("error creating new source file")?;
- f.write_all(new_code.as_bytes())
- .context("error writing new code")?;
-
- Ok(())
- }
-
- /// Run the given benchmark on the project, and parse the resulting profiling information.
- fn profile_benchmark(
- &self,
- project: &Project,
- name: &str,
- con_types: &[&String],
- ) -> Result<HashMap<String, ProfilerInfo>> {
- let profiler_out_dir = tempdir()?;
- debug!(
- "Running benchmark {} with out dir {:?}",
- name, profiler_out_dir
- );
-
- let child = Command::new("cargo")
- .current_dir(&project.source_dir)
- .args(["bench", "--bench", name])
- .env("PROFILER_OUT_DIR", profiler_out_dir.as_ref()) // Where profiler info gets outputted
- .stdout(Stdio::piped())
- .stderr(if log_enabled!(Level::Debug) {
- Stdio::inherit()
- } else {
- Stdio::null()
- })
- .spawn()
- .context("Error running bench command")?;
-
- tee_output(child)?;
-
- let mut con_type_results = HashMap::new();
- for dir in read_dir(&profiler_out_dir)? {
- // each directory has an index, corresponding to the container type name
- let dir = dir?;
- let con_type: String = con_types[dir
- .file_name()
- .into_string()
- .unwrap()
- .parse::<usize>()
- .unwrap()]
- .to_string();
-
- let partitions = read_dir(dir.path())?
- .map(|f| -> Result<String> {
- // read file contents
- let mut contents = String::new();
- File::open(f?.path())?.read_to_string(&mut contents)?;
- Ok(contents)
- })
- .map(|contents| parse_output(&contents?))
- .fold(Ok(vec![]), partition_costs)?;
-
- con_type_results.insert(con_type, ProfilerInfo(partitions));
- }
-
- Ok(con_type_results)
- }
-}
-
-type CollectionLifetime = (f64, HashMap<OpName, usize>);
-
/// Attempt to compress an iterator of collection lifetimes into as few partitions as possible
fn partition_costs(
acc: Result<Vec<ProfilerPartition>>,
@@ -367,7 +184,7 @@ mod tests {
use crate::{
cost::{CostModel, Estimator},
- profiler::partition_costs,
+ profiler::info::partition_costs,
};
use super::{ProfilerInfo, ProfilerPartition};
diff --git a/src/crates/candelabra/src/profiler/mod.rs b/src/crates/candelabra/src/profiler/mod.rs
new file mode 100644
index 0000000..568929b
--- /dev/null
+++ b/src/crates/candelabra/src/profiler/mod.rs
@@ -0,0 +1,200 @@
+//! Profiling applications for info about container usage
+
+mod info;
+
+use anyhow::{Context, Result};
+use camino::{Utf8Path, Utf8PathBuf};
+use log::{debug, log_enabled, trace, warn, Level};
+use primrose::ContainerSelector;
+use serde::{Deserialize, Serialize};
+use std::collections::HashMap;
+use std::io::Write;
+use std::{
+ fs::{read_dir, File},
+ io::Read,
+ process::{Command, Stdio},
+};
+use tempfile::tempdir;
+
+use crate::cache::{gen_tree_hash, FileCache};
+use crate::candidates::ConTypeName;
+use crate::cost::benchmark::tee_output;
+use crate::project::Project;
+use crate::{Paths, State};
+
+pub use self::info::{ProfilerInfo, ProfilerPartition};
+
+#[derive(Debug, Serialize, Deserialize)]
+pub(crate) struct CacheEntry {
+ proj_hash: u64,
+ proj_location: Utf8PathBuf,
+ info: HashMap<ConTypeName, ProfilerInfo>,
+}
+
+impl State {
+ pub(crate) fn profiler_info_cache(paths: &Paths) -> Result<FileCache<String, CacheEntry>> {
+ FileCache::new(
+ paths.target_dir.join("candelabra").join("profiler_info"),
+ |_, v: &CacheEntry| {
+ let proj_hash = gen_tree_hash(&v.proj_location).unwrap_or(0);
+ v.proj_hash == proj_hash
+ },
+ )
+ }
+
+ /// Get or calculate profiler info for the given project.
+ /// Results are cached by the modification time of the project's source tree
+ pub fn profiler_info(&self, project: &Project) -> Result<HashMap<ConTypeName, ProfilerInfo>> {
+ match self.profiler_info_cache.find(&project.name)? {
+ Some(x) => Ok(x.info),
+ None => {
+ let info = self.calc_profiler_info(project)?;
+
+ let proj_hash = gen_tree_hash(&project.source_dir)
+ .context("Error generating project directory hash")?;
+ if let Err(e) = self.profiler_info_cache.put(
+ &project.name,
+ &CacheEntry {
+ proj_hash,
+ proj_location: project.source_dir.clone(),
+ info: info.clone(),
+ },
+ ) {
+ warn!("Error caching profiler info for {}: {}", &project.name, e);
+ }
+
+ Ok(info)
+ }
+ }
+ }
+
+ /// Calculate profiler info for the given project.
+ fn calc_profiler_info(&self, project: &Project) -> Result<HashMap<ConTypeName, ProfilerInfo>> {
+ let candidate_list = self.project_candidate_list(project)?;
+ let con_types = candidate_list
+ .iter()
+ .flat_map(|(_, con_types)| con_types.iter())
+ .map(|(id, _)| id)
+ .collect::<Vec<_>>();
+
+ self.project_profiling_prep(project, &con_types)?;
+ let mut acc = HashMap::new();
+ for name in project.benchmarks.iter() {
+ for (con_type, new_results) in self
+ .profile_benchmark(project, name, &con_types)
+ .with_context(|| format!("Error profiling benchmark {}", name))?
+ {
+ acc.entry(con_type)
+ .and_modify(|pi: &mut ProfilerInfo| pi.0.extend(new_results.0.iter().cloned()))
+ .or_insert(new_results);
+ }
+ }
+
+ Ok(acc)
+ }
+
+ /// Prepare the given project to be profiled, by replacing all candidate types with the profiler wrapper.
+ fn project_profiling_prep(&self, project: &Project, con_types: &[&String]) -> Result<()> {
+ for (file, candidates) in self.project_candidate_list(project)? {
+ self.file_profiling_prep(&file, &candidates, con_types)
+ .with_context(|| format!("error preparing {} for profiling", file))?;
+ }
+
+ Ok(())
+ }
+
+ /// Prepare the given file to be profiled, by replacing all candidate types with the profiler wrapper.
+ fn file_profiling_prep(
+ &self,
+ file: &Utf8Path,
+ candidates: &[(String, Vec<String>)],
+ con_types: &[&String],
+ ) -> Result<()> {
+ debug!("Setting up {} for profiling", file);
+
+ let selector = ContainerSelector::from_path(
+ file.as_std_path(),
+ self.paths.library_src.as_std_path(),
+ self.model_size,
+ )
+ .context("error creating container selector")?;
+
+ let chosen = candidates
+ .iter()
+ .map(|(dest_name, impls)| (dest_name, &impls[0]))
+ .collect::<Vec<_>>();
+
+ let new_code = selector.gen_profiling_file(chosen.iter().map(|(d, c)| {
+ (
+ *d,
+ con_types.iter().position(|id| id == d).unwrap(),
+ c.as_str(),
+ )
+ }));
+
+ let new_path = file.to_string().replace(".pr", "");
+
+ trace!("New code: {}", new_code);
+ trace!("New path: {}", new_path);
+
+ let mut f = File::create(new_path).context("error creating new source file")?;
+ f.write_all(new_code.as_bytes())
+ .context("error writing new code")?;
+
+ Ok(())
+ }
+
+ /// Run the given benchmark on the project, and parse the resulting profiling information.
+ fn profile_benchmark(
+ &self,
+ project: &Project,
+ name: &str,
+ con_types: &[&String],
+ ) -> Result<HashMap<String, ProfilerInfo>> {
+ let profiler_out_dir = tempdir()?;
+ debug!(
+ "Running benchmark {} with out dir {:?}",
+ name, profiler_out_dir
+ );
+
+ let child = Command::new("cargo")
+ .current_dir(&project.source_dir)
+ .args(["bench", "--bench", name])
+ .env("PROFILER_OUT_DIR", profiler_out_dir.as_ref()) // Where profiler info gets outputted
+ .stdout(Stdio::piped())
+ .stderr(if log_enabled!(Level::Debug) {
+ Stdio::inherit()
+ } else {
+ Stdio::null()
+ })
+ .spawn()
+ .context("Error running bench command")?;
+
+ tee_output(child)?;
+
+ let mut con_type_results = HashMap::new();
+ for dir in read_dir(&profiler_out_dir)? {
+ // each directory has an index, corresponding to the container type name
+ let dir = dir?;
+ let con_type: String = con_types[dir
+ .file_name()
+ .into_string()
+ .unwrap()
+ .parse::<usize>()
+ .unwrap()]
+ .to_string();
+
+ con_type_results.insert(
+ con_type,
+ ProfilerInfo::from(read_dir(dir.path())?.map(|f| -> Result<String> {
+ // read file contents
+ let mut contents = String::new();
+ File::open(f?.path())?.read_to_string(&mut contents)?;
+ Ok(contents)
+ }))?,
+ );
+ }
+
+ Ok(con_type_results)
+ }
+}
diff --git a/src/crates/cli/src/display.rs b/src/crates/cli/src/display.rs
index 2ce9039..2debede 100644
--- a/src/crates/cli/src/display.rs
+++ b/src/crates/cli/src/display.rs
@@ -51,7 +51,7 @@ pub fn display_profiler_info(profile_info: ProfilerInfo) {
("occurences".to_string(), p.occurences),
]
.into_iter()
- .chain(p.avg_op_counts.into_iter()),
+ .chain(p.avg_op_counts),
)
}))
}