fix cache and imports (#6647)
* updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * fix clippy Signed-off-by: Jess Frazelle <github@jessfraz.com> --------- Signed-off-by: Jess Frazelle <github@jessfraz.com>
This commit is contained in:
@ -80,6 +80,15 @@ pub(super) enum CacheResult {
|
||||
/// The program that needs to be executed.
|
||||
program: Node<Program>,
|
||||
},
|
||||
/// Check only the imports, and not the main program.
|
||||
/// Before sending this we already checked the main program and it is the same.
|
||||
/// And we made sure the import statements > 0.
|
||||
CheckImportsOnly {
|
||||
/// Argument is whether we need to reapply settings.
|
||||
reapply_settings: bool,
|
||||
/// The ast of the main file, which did not change.
|
||||
ast: Node<Program>,
|
||||
},
|
||||
/// Argument is whether we need to reapply settings.
|
||||
NoAction(bool),
|
||||
}
|
||||
@ -105,7 +114,19 @@ pub(super) async fn get_changed_program(old: CacheInformation<'_>, new: CacheInf
|
||||
// If the ASTs are the EXACT same we return None.
|
||||
// We don't even need to waste time computing the digests.
|
||||
if old.ast == new.ast {
|
||||
return CacheResult::NoAction(reapply_settings);
|
||||
// First we need to make sure an imported file didn't change it's ast.
|
||||
// We know they have the same imports because the ast is the same.
|
||||
// If we have no imports, we can skip this.
|
||||
if !old.ast.has_import_statements() {
|
||||
println!("No imports, no need to check.");
|
||||
return CacheResult::NoAction(reapply_settings);
|
||||
}
|
||||
|
||||
// Tell the CacheResult we need to check all the imports, but the main ast is the same.
|
||||
return CacheResult::CheckImportsOnly {
|
||||
reapply_settings,
|
||||
ast: old.ast.clone(),
|
||||
};
|
||||
}
|
||||
|
||||
// We have to clone just because the digests are stored inline :-(
|
||||
@ -119,7 +140,19 @@ pub(super) async fn get_changed_program(old: CacheInformation<'_>, new: CacheInf
|
||||
|
||||
// Check if the digest is the same.
|
||||
if old_ast.digest == new_ast.digest {
|
||||
return CacheResult::NoAction(reapply_settings);
|
||||
// First we need to make sure an imported file didn't change it's ast.
|
||||
// We know they have the same imports because the ast is the same.
|
||||
// If we have no imports, we can skip this.
|
||||
if !old.ast.has_import_statements() {
|
||||
println!("No imports, no need to check.");
|
||||
return CacheResult::NoAction(reapply_settings);
|
||||
}
|
||||
|
||||
// Tell the CacheResult we need to check all the imports, but the main ast is the same.
|
||||
return CacheResult::CheckImportsOnly {
|
||||
reapply_settings,
|
||||
ast: old.ast.clone(),
|
||||
};
|
||||
}
|
||||
|
||||
// Check if the annotations are different.
|
||||
@ -242,6 +275,8 @@ fn generate_changed_program(old_ast: Node<Program>, mut new_ast: Node<Program>,
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::*;
|
||||
use crate::execution::{parse_execute, parse_execute_with_project_dir, ExecTestResults};
|
||||
|
||||
@ -658,6 +693,92 @@ extrude(profile001, length = 100)"#
|
||||
)
|
||||
.await;
|
||||
|
||||
assert_eq!(result, CacheResult::NoAction(false));
|
||||
let CacheResult::CheckImportsOnly { reapply_settings, .. } = result else {
|
||||
panic!("Expected CheckImportsOnly, got {:?}", result);
|
||||
};
|
||||
|
||||
assert_eq!(reapply_settings, false);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_cache_multi_file_only_other_file_changes_should_reexecute() {
|
||||
let code = r#"import "toBeImported.kcl" as importedCube
|
||||
|
||||
importedCube
|
||||
|
||||
sketch001 = startSketchOn(XZ)
|
||||
profile001 = startProfile(sketch001, at = [-134.53, -56.17])
|
||||
|> angledLine(angle = 0, length = 79.05, tag = $rectangleSegmentA001)
|
||||
|> angledLine(angle = segAng(rectangleSegmentA001) - 90, length = 76.28)
|
||||
|> angledLine(angle = segAng(rectangleSegmentA001), length = -segLen(rectangleSegmentA001), tag = $seg01)
|
||||
|> line(endAbsolute = [profileStartX(%), profileStartY(%)], tag = $seg02)
|
||||
|> close()
|
||||
extrude001 = extrude(profile001, length = 100)
|
||||
sketch003 = startSketchOn(extrude001, face = seg02)
|
||||
sketch002 = startSketchOn(extrude001, face = seg01)
|
||||
"#;
|
||||
|
||||
let other_file = (
|
||||
std::path::PathBuf::from("toBeImported.kcl"),
|
||||
r#"sketch001 = startSketchOn(XZ)
|
||||
profile001 = startProfile(sketch001, at = [281.54, 305.81])
|
||||
|> angledLine(angle = 0, length = 123.43, tag = $rectangleSegmentA001)
|
||||
|> angledLine(angle = segAng(rectangleSegmentA001) - 90, length = 85.99)
|
||||
|> angledLine(angle = segAng(rectangleSegmentA001), length = -segLen(rectangleSegmentA001))
|
||||
|> line(endAbsolute = [profileStartX(%), profileStartY(%)])
|
||||
|> close()
|
||||
extrude(profile001, length = 100)"#
|
||||
.to_string(),
|
||||
);
|
||||
|
||||
let other_file2 = (
|
||||
std::path::PathBuf::from("toBeImported.kcl"),
|
||||
r#"sketch001 = startSketchOn(XZ)
|
||||
profile001 = startProfile(sketch001, at = [281.54, 305.81])
|
||||
|> angledLine(angle = 0, length = 123.43, tag = $rectangleSegmentA001)
|
||||
|> angledLine(angle = segAng(rectangleSegmentA001) - 90, length = 85.99)
|
||||
|> angledLine(angle = segAng(rectangleSegmentA001), length = -segLen(rectangleSegmentA001))
|
||||
|> line(endAbsolute = [profileStartX(%), profileStartY(%)])
|
||||
|> close()
|
||||
extrude(profile001, length = 100)
|
||||
|> translate(z=100)
|
||||
"#
|
||||
.to_string(),
|
||||
);
|
||||
|
||||
let tmp_dir = std::env::temp_dir();
|
||||
let tmp_dir = tmp_dir.join(uuid::Uuid::new_v4().to_string());
|
||||
|
||||
// Create a temporary file for each of the other files.
|
||||
let tmp_file = tmp_dir.join(other_file.0);
|
||||
std::fs::create_dir_all(tmp_file.parent().unwrap()).unwrap();
|
||||
std::fs::write(&tmp_file, other_file.1).unwrap();
|
||||
|
||||
let ExecTestResults { program, exec_ctxt, .. } =
|
||||
parse_execute_with_project_dir(code, Some(tmp_dir)).await.unwrap();
|
||||
|
||||
// Change the other file.
|
||||
std::fs::write(tmp_file, other_file2.1).unwrap();
|
||||
|
||||
let mut new_program = crate::Program::parse_no_errs(code).unwrap();
|
||||
new_program.compute_digest();
|
||||
|
||||
let result = get_changed_program(
|
||||
CacheInformation {
|
||||
ast: &program.ast,
|
||||
settings: &exec_ctxt.settings,
|
||||
},
|
||||
CacheInformation {
|
||||
ast: &new_program.ast,
|
||||
settings: &exec_ctxt.settings,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
let CacheResult::CheckImportsOnly { reapply_settings, .. } = result else {
|
||||
panic!("Expected CheckImportsOnly, got {:?}", result);
|
||||
};
|
||||
|
||||
assert_eq!(reapply_settings, false);
|
||||
}
|
||||
}
|
||||
|
@ -2791,13 +2791,12 @@ d = b + c
|
||||
let mut exec_state = ExecState::new(&exec_ctxt);
|
||||
|
||||
exec_ctxt
|
||||
.run_concurrent(
|
||||
.run(
|
||||
&crate::Program {
|
||||
ast: main.clone(),
|
||||
original_file_contents: "".to_owned(),
|
||||
},
|
||||
&mut exec_state,
|
||||
false,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
@ -42,6 +42,7 @@ use crate::{
|
||||
parsing::ast::types::{Expr, ImportPath, NodeRef},
|
||||
source_range::SourceRange,
|
||||
std::StdLib,
|
||||
walk::{Universe, UniverseMap},
|
||||
CompilationError, ExecError, KclErrorWithOutputs,
|
||||
};
|
||||
|
||||
@ -586,7 +587,7 @@ impl ExecutorContext {
|
||||
pub async fn run_with_caching(&self, program: crate::Program) -> Result<ExecOutcome, KclErrorWithOutputs> {
|
||||
assert!(!self.is_mock());
|
||||
|
||||
let (program, mut exec_state, preserve_mem) = if let Some(OldAstState {
|
||||
let (program, mut exec_state, preserve_mem, imports_info) = if let Some(OldAstState {
|
||||
ast: old_ast,
|
||||
exec_state: mut old_state,
|
||||
settings: old_settings,
|
||||
@ -603,7 +604,7 @@ impl ExecutorContext {
|
||||
};
|
||||
|
||||
// Get the program that actually changed from the old and new information.
|
||||
let (clear_scene, program) = match cache::get_changed_program(old, new).await {
|
||||
let (clear_scene, program, import_check_info) = match cache::get_changed_program(old, new).await {
|
||||
CacheResult::ReExecute {
|
||||
clear_scene,
|
||||
reapply_settings,
|
||||
@ -616,7 +617,7 @@ impl ExecutorContext {
|
||||
.await
|
||||
.is_err()
|
||||
{
|
||||
(true, program)
|
||||
(true, program, None)
|
||||
} else {
|
||||
(
|
||||
clear_scene,
|
||||
@ -624,6 +625,52 @@ impl ExecutorContext {
|
||||
ast: changed_program,
|
||||
original_file_contents: program.original_file_contents,
|
||||
},
|
||||
None,
|
||||
)
|
||||
}
|
||||
}
|
||||
CacheResult::CheckImportsOnly {
|
||||
reapply_settings,
|
||||
ast: changed_program,
|
||||
} => {
|
||||
if reapply_settings
|
||||
&& self
|
||||
.engine
|
||||
.reapply_settings(&self.settings, Default::default(), old_state.id_generator())
|
||||
.await
|
||||
.is_err()
|
||||
{
|
||||
(true, program, None)
|
||||
} else {
|
||||
// We need to check our imports to see if they changed.
|
||||
let mut new_exec_state = ExecState::new(self);
|
||||
let (new_universe, new_universe_map) = self.get_universe(&program, &mut new_exec_state).await?;
|
||||
let mut clear_scene = false;
|
||||
|
||||
let mut keys = new_universe.keys().clone().collect::<Vec<_>>();
|
||||
keys.sort();
|
||||
for key in keys {
|
||||
let (_, id, _, _) = &new_universe[key];
|
||||
let old_source = old_state.get_source(*id);
|
||||
let new_source = new_exec_state.get_source(*id);
|
||||
if old_source != new_source {
|
||||
clear_scene = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
(
|
||||
clear_scene,
|
||||
crate::Program {
|
||||
ast: changed_program,
|
||||
original_file_contents: program.original_file_contents,
|
||||
},
|
||||
// We only care about this if we are clearing the scene.
|
||||
if clear_scene {
|
||||
Some((new_universe, new_universe_map, new_exec_state))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
@ -646,7 +693,7 @@ impl ExecutorContext {
|
||||
let outcome = old_state.to_exec_outcome(result_env).await;
|
||||
return Ok(outcome);
|
||||
}
|
||||
(true, program)
|
||||
(true, program, None)
|
||||
}
|
||||
CacheResult::NoAction(false) => {
|
||||
let outcome = old_state.to_exec_outcome(result_env).await;
|
||||
@ -654,34 +701,42 @@ impl ExecutorContext {
|
||||
}
|
||||
};
|
||||
|
||||
let (exec_state, preserve_mem) = if clear_scene {
|
||||
// Pop the execution state, since we are starting fresh.
|
||||
let mut exec_state = old_state;
|
||||
exec_state.reset(self);
|
||||
let (exec_state, preserve_mem, universe_info) =
|
||||
if let Some((new_universe, new_universe_map, mut new_exec_state)) = import_check_info {
|
||||
// Clear the scene if the imports changed.
|
||||
self.send_clear_scene(&mut new_exec_state, Default::default())
|
||||
.await
|
||||
.map_err(KclErrorWithOutputs::no_outputs)?;
|
||||
|
||||
// We don't do this in mock mode since there is no engine connection
|
||||
// anyways and from the TS side we override memory and don't want to clear it.
|
||||
self.send_clear_scene(&mut exec_state, Default::default())
|
||||
.await
|
||||
.map_err(KclErrorWithOutputs::no_outputs)?;
|
||||
(new_exec_state, false, Some((new_universe, new_universe_map)))
|
||||
} else if clear_scene {
|
||||
// Pop the execution state, since we are starting fresh.
|
||||
let mut exec_state = old_state;
|
||||
exec_state.reset(self);
|
||||
|
||||
(exec_state, false)
|
||||
} else {
|
||||
old_state.mut_stack().restore_env(result_env);
|
||||
self.send_clear_scene(&mut exec_state, Default::default())
|
||||
.await
|
||||
.map_err(KclErrorWithOutputs::no_outputs)?;
|
||||
|
||||
(old_state, true)
|
||||
};
|
||||
(exec_state, false, None)
|
||||
} else {
|
||||
old_state.mut_stack().restore_env(result_env);
|
||||
|
||||
(program, exec_state, preserve_mem)
|
||||
(old_state, true, None)
|
||||
};
|
||||
|
||||
(program, exec_state, preserve_mem, universe_info)
|
||||
} else {
|
||||
let mut exec_state = ExecState::new(self);
|
||||
self.send_clear_scene(&mut exec_state, Default::default())
|
||||
.await
|
||||
.map_err(KclErrorWithOutputs::no_outputs)?;
|
||||
(program, exec_state, false)
|
||||
(program, exec_state, false, None)
|
||||
};
|
||||
|
||||
let result = self.run_concurrent(&program, &mut exec_state, preserve_mem).await;
|
||||
let result = self
|
||||
.run_concurrent(&program, &mut exec_state, imports_info, preserve_mem)
|
||||
.await;
|
||||
|
||||
if result.is_err() {
|
||||
cache::bust_cache().await;
|
||||
@ -714,7 +769,7 @@ impl ExecutorContext {
|
||||
program: &crate::Program,
|
||||
exec_state: &mut ExecState,
|
||||
) -> Result<(EnvironmentRef, Option<ModelingSessionData>), KclErrorWithOutputs> {
|
||||
self.run_concurrent(program, exec_state, false).await
|
||||
self.run_concurrent(program, exec_state, None, false).await
|
||||
}
|
||||
|
||||
/// Perform the execution of a program using a concurrent
|
||||
@ -728,47 +783,24 @@ impl ExecutorContext {
|
||||
&self,
|
||||
program: &crate::Program,
|
||||
exec_state: &mut ExecState,
|
||||
universe_info: Option<(Universe, UniverseMap)>,
|
||||
preserve_mem: bool,
|
||||
) -> Result<(EnvironmentRef, Option<ModelingSessionData>), KclErrorWithOutputs> {
|
||||
exec_state.add_root_module_contents(program);
|
||||
// Reuse our cached universe if we have one.
|
||||
#[allow(unused_variables)]
|
||||
let (universe, universe_map) = if let Some((universe, universe_map)) = universe_info {
|
||||
(universe, universe_map)
|
||||
} else {
|
||||
self.get_universe(program, exec_state).await?
|
||||
};
|
||||
|
||||
let default_planes = self.engine.get_default_planes().read().await.clone();
|
||||
|
||||
// Run the prelude to set up the engine.
|
||||
self.eval_prelude(exec_state, SourceRange::synthetic())
|
||||
.await
|
||||
.map_err(KclErrorWithOutputs::no_outputs)?;
|
||||
|
||||
let mut universe = std::collections::HashMap::new();
|
||||
|
||||
let default_planes = self.engine.get_default_planes().read().await.clone();
|
||||
#[cfg_attr(not(feature = "artifact-graph"), expect(unused_variables))]
|
||||
let root_imports = crate::walk::import_universe(
|
||||
self,
|
||||
&ModuleRepr::Kcl(program.ast.clone(), None),
|
||||
&mut universe,
|
||||
exec_state,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| {
|
||||
println!("Error: {err:?}");
|
||||
let module_id_to_module_path: IndexMap<ModuleId, ModulePath> = exec_state
|
||||
.global
|
||||
.path_to_source_id
|
||||
.iter()
|
||||
.map(|(k, v)| ((*v), k.clone()))
|
||||
.collect();
|
||||
|
||||
KclErrorWithOutputs::new(
|
||||
err,
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
exec_state.global.operations.clone(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
exec_state.global.artifact_commands.clone(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
exec_state.global.artifact_graph.clone(),
|
||||
module_id_to_module_path,
|
||||
exec_state.global.id_to_source.clone(),
|
||||
default_planes.clone(),
|
||||
)
|
||||
})?;
|
||||
|
||||
for modules in crate::walk::import_graph(&universe, self)
|
||||
.map_err(|err| {
|
||||
let module_id_to_module_path: IndexMap<ModuleId, ModulePath> = exec_state
|
||||
@ -821,7 +853,7 @@ impl ExecutorContext {
|
||||
ModulePath::Local { value, .. } => {
|
||||
// We only want to display the top-level module imports in
|
||||
// the Feature Tree, not transitive imports.
|
||||
if root_imports.contains_key(value) {
|
||||
if universe_map.contains_key(value) {
|
||||
exec_state.global.operations.push(Operation::GroupBegin {
|
||||
group: Group::ModuleInstance {
|
||||
name: value.file_name().unwrap_or_default().to_string_lossy().into_owned(),
|
||||
@ -974,6 +1006,52 @@ impl ExecutorContext {
|
||||
self.inner_run(program, exec_state, preserve_mem).await
|
||||
}
|
||||
|
||||
/// Get the universe & universe map of the program.
|
||||
/// And see if any of the imports changed.
|
||||
async fn get_universe(
|
||||
&self,
|
||||
program: &crate::Program,
|
||||
exec_state: &mut ExecState,
|
||||
) -> Result<(Universe, UniverseMap), KclErrorWithOutputs> {
|
||||
exec_state.add_root_module_contents(program);
|
||||
|
||||
let mut universe = std::collections::HashMap::new();
|
||||
|
||||
let default_planes = self.engine.get_default_planes().read().await.clone();
|
||||
|
||||
let root_imports = crate::walk::import_universe(
|
||||
self,
|
||||
&ModuleRepr::Kcl(program.ast.clone(), None),
|
||||
&mut universe,
|
||||
exec_state,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| {
|
||||
println!("Error: {err:?}");
|
||||
let module_id_to_module_path: IndexMap<ModuleId, ModulePath> = exec_state
|
||||
.global
|
||||
.path_to_source_id
|
||||
.iter()
|
||||
.map(|(k, v)| ((*v), k.clone()))
|
||||
.collect();
|
||||
|
||||
KclErrorWithOutputs::new(
|
||||
err,
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
exec_state.global.operations.clone(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
exec_state.global.artifact_commands.clone(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
exec_state.global.artifact_graph.clone(),
|
||||
module_id_to_module_path,
|
||||
exec_state.global.id_to_source.clone(),
|
||||
default_planes,
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok((universe, root_imports))
|
||||
}
|
||||
|
||||
/// Perform the execution of a program. Accept all possible parameters and
|
||||
/// output everything.
|
||||
async fn inner_run(
|
||||
|
@ -238,6 +238,10 @@ impl ExecState {
|
||||
self.global.id_to_source.insert(id, source.clone());
|
||||
}
|
||||
|
||||
pub(super) fn get_source(&self, id: ModuleId) -> Option<&ModuleSource> {
|
||||
self.global.id_to_source.get(&id)
|
||||
}
|
||||
|
||||
pub(super) fn add_module(&mut self, id: ModuleId, path: ModulePath, repr: ModuleRepr) {
|
||||
debug_assert!(self.global.path_to_source_id.contains_key(&path));
|
||||
let module_info = ModuleInfo { id, repr, path };
|
||||
|
@ -542,6 +542,16 @@ impl Program {
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks if the ast has any import statements.
|
||||
pub fn has_import_statements(&self) -> bool {
|
||||
for item in &self.body {
|
||||
if let BodyItem::ImportStatement(_) = item {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
pub fn in_comment(&self, pos: usize) -> bool {
|
||||
// Check if its in the body.
|
||||
if self.non_code_meta.in_comment(pos) {
|
||||
|
@ -21,8 +21,9 @@ type Dependency = (String, String);
|
||||
|
||||
type Graph = Vec<Dependency>;
|
||||
|
||||
type DependencyInfo = (AstNode<ImportStatement>, ModuleId, ModulePath, ModuleRepr);
|
||||
type Universe = HashMap<String, DependencyInfo>;
|
||||
pub(crate) type DependencyInfo = (AstNode<ImportStatement>, ModuleId, ModulePath, ModuleRepr);
|
||||
pub(crate) type UniverseMap = HashMap<PathBuf, AstNode<ImportStatement>>;
|
||||
pub(crate) type Universe = HashMap<String, DependencyInfo>;
|
||||
|
||||
/// Process a number of programs, returning the graph of dependencies.
|
||||
///
|
||||
@ -184,7 +185,7 @@ pub(crate) async fn import_universe(
|
||||
repr: &ModuleRepr,
|
||||
out: &mut Universe,
|
||||
exec_state: &mut ExecState,
|
||||
) -> Result<HashMap<PathBuf, crate::parsing::ast::types::Node<ImportStatement>>, KclError> {
|
||||
) -> Result<UniverseMap, KclError> {
|
||||
let modules = import_dependencies(repr, ctx)?;
|
||||
let mut module_imports = HashMap::new();
|
||||
for (filename, import_stmt, module_path) in modules {
|
||||
|
@ -9,4 +9,4 @@ pub use ast_node::Node;
|
||||
pub use ast_visitor::Visitable;
|
||||
pub use ast_walk::walk;
|
||||
pub use import_graph::import_graph;
|
||||
pub(crate) use import_graph::import_universe;
|
||||
pub(crate) use import_graph::{import_universe, Universe, UniverseMap};
|
||||
|
Reference in New Issue
Block a user