run rust benchmarks and compare on prs (#5734)
* run benchmarks and compare on prs Signed-off-by: Jess Frazelle <github@jessfraz.com> * run benchmarks and compare on prs Signed-off-by: Jess Frazelle <github@jessfraz.com> * fixes Signed-off-by: Jess Frazelle <github@jessfraz.com> * fixes Signed-off-by: Jess Frazelle <github@jessfraz.com> * benchmark kcl-samples Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> --------- Signed-off-by: Jess Frazelle <github@jessfraz.com>
This commit is contained in:
95
rust/kcl-lib/benches/benchmark_kcl_samples.rs
Normal file
95
rust/kcl-lib/benches/benchmark_kcl_samples.rs
Normal file
@ -0,0 +1,95 @@
|
||||
use std::{
|
||||
fs,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use criterion::{black_box, criterion_group, criterion_main, Criterion};
|
||||
use tokio::runtime::Runtime;
|
||||
|
||||
const IGNORE_DIRS: [&str; 2] = ["step", "screenshots"];
|
||||
|
||||
fn discover_benchmark_dirs(base_path: &Path) -> Vec<PathBuf> {
|
||||
let mut benchmark_dirs = Vec::new();
|
||||
|
||||
if let Ok(entries) = fs::read_dir(base_path) {
|
||||
for entry in entries.filter_map(Result::ok) {
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
let dir_name = path.file_name().unwrap().to_string_lossy();
|
||||
if !IGNORE_DIRS.iter().any(|&x| x == dir_name) {
|
||||
benchmark_dirs.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
benchmark_dirs
|
||||
}
|
||||
|
||||
fn find_main_kcl_file(dir_path: &Path) -> PathBuf {
|
||||
let file_path = dir_path.join("main.kcl");
|
||||
|
||||
if !file_path.exists() || !file_path.is_file() {
|
||||
panic!("Required main.kcl file not found in directory: {}", dir_path.display());
|
||||
}
|
||||
|
||||
file_path
|
||||
}
|
||||
|
||||
fn run_benchmarks(c: &mut Criterion) {
|
||||
// Specify the base directory containing benchmark subdirectories
|
||||
let base_dir = std::path::Path::new(env!("CARGO_MANIFEST_DIR")).join("../../public/kcl-samples");
|
||||
|
||||
if !base_dir.exists() || !base_dir.is_dir() {
|
||||
panic!("Invalid base directory: {}", base_dir.display());
|
||||
}
|
||||
|
||||
let benchmark_dirs = discover_benchmark_dirs(&base_dir);
|
||||
|
||||
let rt = Runtime::new().unwrap();
|
||||
|
||||
for dir in benchmark_dirs {
|
||||
let dir_name = dir.file_name().unwrap().to_string_lossy().to_string();
|
||||
|
||||
// Change the current directory to the benchmark directory.
|
||||
// This is necessary for the kcl-lib to correctly resolve relative paths.
|
||||
std::env::set_current_dir(&dir).unwrap();
|
||||
|
||||
// Find main.kcl file (will panic if not found)
|
||||
let input_file = find_main_kcl_file(&dir);
|
||||
|
||||
// Read the file content (panic on failure)
|
||||
let input_content = fs::read_to_string(&input_file)
|
||||
.unwrap_or_else(|e| panic!("Failed to read main.kcl in directory {}: {}", dir_name, e));
|
||||
|
||||
// Create a benchmark group for this directory
|
||||
let mut group = c.benchmark_group(&dir_name);
|
||||
group
|
||||
.sample_size(10)
|
||||
.measurement_time(std::time::Duration::from_secs(1)); // Short measurement time to keep it from running in parallel
|
||||
|
||||
let program = kcl_lib::Program::parse_no_errs(&input_content).unwrap();
|
||||
|
||||
group.bench_function("parse", |b| {
|
||||
b.iter(|| kcl_lib::Program::parse_no_errs(black_box(&input_content)).unwrap())
|
||||
});
|
||||
|
||||
group.bench_function("execute", |b| {
|
||||
b.iter(|| {
|
||||
rt.block_on(async {
|
||||
let ctx = kcl_lib::ExecutorContext::new_with_default_client(Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
let mut exec_state = kcl_lib::ExecState::new(&ctx.settings);
|
||||
ctx.run(black_box(&program), &mut exec_state).await.unwrap();
|
||||
ctx.close().await;
|
||||
})
|
||||
})
|
||||
});
|
||||
|
||||
group.finish();
|
||||
}
|
||||
}
|
||||
|
||||
criterion_group!(benches, run_benchmarks);
|
||||
criterion_main!(benches);
|
||||
@ -13,7 +13,14 @@ pub fn bench_execute(c: &mut Criterion) {
|
||||
let mut group = c.benchmark_group("executor");
|
||||
// Configure Criterion.rs to detect smaller differences and increase sample size to improve
|
||||
// precision and counteract the resulting noise.
|
||||
group.sample_size(10);
|
||||
group
|
||||
.sample_size(10)
|
||||
.measurement_time(std::time::Duration::from_secs(1)); // Short
|
||||
// measurement
|
||||
// time to keep
|
||||
// it from
|
||||
// running in
|
||||
// parallel
|
||||
group.bench_with_input(BenchmarkId::new("execute", name), &code, |b, &s| {
|
||||
let rt = Runtime::new().unwrap();
|
||||
// Spawn a future onto the runtime
|
||||
|
||||
Reference in New Issue
Block a user