merge main
This commit is contained in:
@ -70,7 +70,7 @@ pub(super) fn expect_properties<'a>(
|
||||
) -> Result<&'a [Node<ObjectProperty>], KclError> {
|
||||
assert_eq!(annotation.name().unwrap(), for_key);
|
||||
Ok(&**annotation.properties.as_ref().ok_or_else(|| {
|
||||
KclError::Semantic(KclErrorDetails::new(
|
||||
KclError::new_semantic(KclErrorDetails::new(
|
||||
format!("Empty `{for_key}` annotation"),
|
||||
vec![annotation.as_source_range()],
|
||||
))
|
||||
@ -84,7 +84,7 @@ pub(super) fn expect_ident(expr: &Expr) -> Result<&str, KclError> {
|
||||
}
|
||||
}
|
||||
|
||||
Err(KclError::Semantic(KclErrorDetails::new(
|
||||
Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
"Unexpected settings value, expected a simple name, e.g., `mm`".to_owned(),
|
||||
vec![expr.into()],
|
||||
)))
|
||||
@ -98,7 +98,7 @@ pub(super) fn expect_number(expr: &Expr) -> Result<String, KclError> {
|
||||
}
|
||||
}
|
||||
|
||||
Err(KclError::Semantic(KclErrorDetails::new(
|
||||
Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
"Unexpected settings value, expected a number, e.g., `1.0`".to_owned(),
|
||||
vec![expr.into()],
|
||||
)))
|
||||
@ -113,7 +113,7 @@ pub(super) fn get_impl(annotations: &[Node<Annotation>], source_range: SourceRan
|
||||
if &*p.key.name == IMPL {
|
||||
if let Some(s) = p.value.ident_name() {
|
||||
return Impl::from_str(s).map(Some).map_err(|_| {
|
||||
KclError::Semantic(KclErrorDetails::new(
|
||||
KclError::new_semantic(KclErrorDetails::new(
|
||||
format!(
|
||||
"Invalid value for {} attribute, expected one of: {}",
|
||||
IMPL,
|
||||
@ -139,7 +139,7 @@ impl UnitLen {
|
||||
"inch" | "in" => Ok(UnitLen::Inches),
|
||||
"ft" => Ok(UnitLen::Feet),
|
||||
"yd" => Ok(UnitLen::Yards),
|
||||
value => Err(KclError::Semantic(KclErrorDetails::new(
|
||||
value => Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!(
|
||||
"Unexpected value for length units: `{value}`; expected one of `mm`, `cm`, `m`, `in`, `ft`, `yd`"
|
||||
),
|
||||
@ -154,7 +154,7 @@ impl UnitAngle {
|
||||
match s {
|
||||
"deg" => Ok(UnitAngle::Degrees),
|
||||
"rad" => Ok(UnitAngle::Radians),
|
||||
value => Err(KclError::Semantic(KclErrorDetails::new(
|
||||
value => Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!("Unexpected value for angle units: `{value}`; expected one of `deg`, `rad`"),
|
||||
vec![source_range],
|
||||
))),
|
||||
|
@ -24,7 +24,7 @@ macro_rules! internal_error {
|
||||
($range:expr, $($rest:tt)*) => {{
|
||||
let message = format!($($rest)*);
|
||||
debug_assert!(false, "{}", &message);
|
||||
return Err(KclError::Internal(KclErrorDetails::new(message, vec![$range])));
|
||||
return Err(KclError::new_internal(KclErrorDetails::new(message, vec![$range])));
|
||||
}};
|
||||
}
|
||||
|
||||
@ -676,6 +676,7 @@ impl EdgeCut {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ArtifactGraph {
|
||||
map: IndexMap<ArtifactId, Artifact>,
|
||||
item_count: usize,
|
||||
}
|
||||
|
||||
impl ArtifactGraph {
|
||||
@ -711,10 +712,10 @@ pub(super) fn build_artifact_graph(
|
||||
artifact_commands: &[ArtifactCommand],
|
||||
responses: &IndexMap<Uuid, WebSocketResponse>,
|
||||
ast: &Node<Program>,
|
||||
cached_body_items: usize,
|
||||
exec_artifacts: &mut IndexMap<ArtifactId, Artifact>,
|
||||
initial_graph: ArtifactGraph,
|
||||
) -> Result<ArtifactGraph, KclError> {
|
||||
let item_count = initial_graph.item_count;
|
||||
let mut map = initial_graph.into_map();
|
||||
|
||||
let mut path_to_plane_id_map = FnvHashMap::default();
|
||||
@ -725,7 +726,7 @@ pub(super) fn build_artifact_graph(
|
||||
for exec_artifact in exec_artifacts.values_mut() {
|
||||
// Note: We only have access to the new AST. So if these artifacts
|
||||
// somehow came from cached AST, this won't fill in anything.
|
||||
fill_in_node_paths(exec_artifact, ast, cached_body_items);
|
||||
fill_in_node_paths(exec_artifact, ast, item_count);
|
||||
}
|
||||
|
||||
for artifact_command in artifact_commands {
|
||||
@ -752,7 +753,7 @@ pub(super) fn build_artifact_graph(
|
||||
&flattened_responses,
|
||||
&path_to_plane_id_map,
|
||||
ast,
|
||||
cached_body_items,
|
||||
item_count,
|
||||
exec_artifacts,
|
||||
)?;
|
||||
for artifact in artifact_updates {
|
||||
@ -765,7 +766,10 @@ pub(super) fn build_artifact_graph(
|
||||
merge_artifact_into_map(&mut map, exec_artifact.clone());
|
||||
}
|
||||
|
||||
Ok(ArtifactGraph { map })
|
||||
Ok(ArtifactGraph {
|
||||
map,
|
||||
item_count: item_count + ast.body.len(),
|
||||
})
|
||||
}
|
||||
|
||||
/// These may have been created with placeholder `CodeRef`s because we didn't
|
||||
@ -949,7 +953,7 @@ fn artifacts_to_update(
|
||||
ModelingCmd::StartPath(_) => {
|
||||
let mut return_arr = Vec::new();
|
||||
let current_plane_id = path_to_plane_id_map.get(&artifact_command.cmd_id).ok_or_else(|| {
|
||||
KclError::Internal(KclErrorDetails::new(
|
||||
KclError::new_internal(KclErrorDetails::new(
|
||||
format!("Expected a current plane ID when processing StartPath command, but we have none: {id:?}"),
|
||||
vec![range],
|
||||
))
|
||||
@ -1137,7 +1141,7 @@ fn artifacts_to_update(
|
||||
// TODO: Using the first one. Make sure to revisit this
|
||||
// choice, don't think it matters for now.
|
||||
path_id: ArtifactId::new(*loft_cmd.section_ids.first().ok_or_else(|| {
|
||||
KclError::Internal(KclErrorDetails::new(
|
||||
KclError::new_internal(KclErrorDetails::new(
|
||||
format!("Expected at least one section ID in Loft command: {id:?}; cmd={cmd:?}"),
|
||||
vec![range],
|
||||
))
|
||||
@ -1180,7 +1184,7 @@ fn artifacts_to_update(
|
||||
};
|
||||
last_path = Some(path);
|
||||
let path_sweep_id = path.sweep_id.ok_or_else(|| {
|
||||
KclError::Internal(KclErrorDetails::new(
|
||||
KclError::new_internal(KclErrorDetails::new(
|
||||
format!(
|
||||
"Expected a sweep ID on the path when processing Solid3dGetExtrusionFaceInfo command, but we have none: {id:?}, {path:?}"
|
||||
),
|
||||
@ -1234,7 +1238,7 @@ fn artifacts_to_update(
|
||||
continue;
|
||||
};
|
||||
let path_sweep_id = path.sweep_id.ok_or_else(|| {
|
||||
KclError::Internal(KclErrorDetails::new(
|
||||
KclError::new_internal(KclErrorDetails::new(
|
||||
format!(
|
||||
"Expected a sweep ID on the path when processing last path's Solid3dGetExtrusionFaceInfo command, but we have none: {id:?}, {path:?}"
|
||||
),
|
||||
|
@ -6,25 +6,31 @@ use itertools::{EitherOrBoth, Itertools};
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use crate::{
|
||||
execution::{annotations, memory::Stack, state::ModuleInfoMap, EnvironmentRef, ExecState, ExecutorSettings},
|
||||
execution::{
|
||||
annotations,
|
||||
memory::Stack,
|
||||
state::{self as exec_state, ModuleInfoMap},
|
||||
EnvironmentRef, ExecutorSettings,
|
||||
},
|
||||
parsing::ast::types::{Annotation, Node, Program},
|
||||
walk::Node as WalkNode,
|
||||
ExecOutcome, ExecutorContext,
|
||||
};
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
/// A static mutable lock for updating the last successful execution state for the cache.
|
||||
static ref OLD_AST: Arc<RwLock<Option<OldAstState>>> = Default::default();
|
||||
static ref OLD_AST: Arc<RwLock<Option<GlobalState>>> = Default::default();
|
||||
// The last successful run's memory. Not cleared after an unssuccessful run.
|
||||
static ref PREV_MEMORY: Arc<RwLock<Option<(Stack, ModuleInfoMap)>>> = Default::default();
|
||||
}
|
||||
|
||||
/// Read the old ast memory from the lock.
|
||||
pub(crate) async fn read_old_ast() -> Option<OldAstState> {
|
||||
pub(super) async fn read_old_ast() -> Option<GlobalState> {
|
||||
let old_ast = OLD_AST.read().await;
|
||||
old_ast.clone()
|
||||
}
|
||||
|
||||
pub(super) async fn write_old_ast(old_state: OldAstState) {
|
||||
pub(super) async fn write_old_ast(old_state: GlobalState) {
|
||||
let mut old_ast = OLD_AST.write().await;
|
||||
*old_ast = Some(old_state);
|
||||
}
|
||||
@ -34,7 +40,7 @@ pub(crate) async fn read_old_memory() -> Option<(Stack, ModuleInfoMap)> {
|
||||
old_mem.clone()
|
||||
}
|
||||
|
||||
pub(super) async fn write_old_memory(mem: (Stack, ModuleInfoMap)) {
|
||||
pub(crate) async fn write_old_memory(mem: (Stack, ModuleInfoMap)) {
|
||||
let mut old_mem = PREV_MEMORY.write().await;
|
||||
*old_mem = Some(mem);
|
||||
}
|
||||
@ -56,16 +62,73 @@ pub struct CacheInformation<'a> {
|
||||
pub settings: &'a ExecutorSettings,
|
||||
}
|
||||
|
||||
/// The old ast and program memory.
|
||||
/// The cached state of the whole program.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct OldAstState {
|
||||
/// The ast.
|
||||
pub ast: Node<Program>,
|
||||
pub(super) struct GlobalState {
|
||||
pub(super) main: ModuleState,
|
||||
/// The exec state.
|
||||
pub exec_state: ExecState,
|
||||
pub(super) exec_state: exec_state::GlobalState,
|
||||
/// The last settings used for execution.
|
||||
pub settings: crate::execution::ExecutorSettings,
|
||||
pub result_env: EnvironmentRef,
|
||||
pub(super) settings: ExecutorSettings,
|
||||
}
|
||||
|
||||
impl GlobalState {
|
||||
pub fn new(
|
||||
state: exec_state::ExecState,
|
||||
settings: ExecutorSettings,
|
||||
ast: Node<Program>,
|
||||
result_env: EnvironmentRef,
|
||||
) -> Self {
|
||||
Self {
|
||||
main: ModuleState {
|
||||
ast,
|
||||
exec_state: state.mod_local,
|
||||
result_env,
|
||||
},
|
||||
exec_state: state.global,
|
||||
settings,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_settings(mut self, settings: ExecutorSettings) -> GlobalState {
|
||||
self.settings = settings;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn reconstitute_exec_state(&self) -> exec_state::ExecState {
|
||||
exec_state::ExecState {
|
||||
global: self.exec_state.clone(),
|
||||
mod_local: self.main.exec_state.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn into_exec_outcome(self, ctx: &ExecutorContext) -> ExecOutcome {
|
||||
// Fields are opt-in so that we don't accidentally leak private internal
|
||||
// state when we add more to ExecState.
|
||||
ExecOutcome {
|
||||
variables: self.main.exec_state.variables(self.main.result_env),
|
||||
filenames: self.exec_state.filenames(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
operations: self.exec_state.artifacts.operations,
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
artifact_commands: self.exec_state.artifacts.commands,
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
artifact_graph: self.exec_state.artifacts.graph,
|
||||
errors: self.exec_state.errors,
|
||||
default_planes: ctx.engine.get_default_planes().read().await.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Per-module cached state
|
||||
#[derive(Debug, Clone)]
|
||||
pub(super) struct ModuleState {
|
||||
/// The AST of the module.
|
||||
pub(super) ast: Node<Program>,
|
||||
/// The ExecState of the module.
|
||||
pub(super) exec_state: exec_state::ModuleState,
|
||||
/// The memory env for the module.
|
||||
pub(super) result_env: EnvironmentRef,
|
||||
}
|
||||
|
||||
/// The result of a cache check.
|
||||
@ -79,9 +142,6 @@ pub(super) enum CacheResult {
|
||||
reapply_settings: bool,
|
||||
/// The program that needs to be executed.
|
||||
program: Node<Program>,
|
||||
/// The number of body items that were cached and omitted from the
|
||||
/// program that needs to be executed. Used to compute [`crate::NodePath`].
|
||||
cached_body_items: usize,
|
||||
},
|
||||
/// Check only the imports, and not the main program.
|
||||
/// Before sending this we already checked the main program and it is the same.
|
||||
@ -146,7 +206,6 @@ pub(super) async fn get_changed_program(old: CacheInformation<'_>, new: CacheInf
|
||||
// We know they have the same imports because the ast is the same.
|
||||
// If we have no imports, we can skip this.
|
||||
if !old.ast.has_import_statements() {
|
||||
println!("No imports, no need to check.");
|
||||
return CacheResult::NoAction(reapply_settings);
|
||||
}
|
||||
|
||||
@ -194,7 +253,6 @@ pub(super) async fn get_changed_program(old: CacheInformation<'_>, new: CacheInf
|
||||
clear_scene: true,
|
||||
reapply_settings: true,
|
||||
program: new.ast.clone(),
|
||||
cached_body_items: 0,
|
||||
};
|
||||
}
|
||||
|
||||
@ -223,7 +281,6 @@ fn generate_changed_program(old_ast: Node<Program>, mut new_ast: Node<Program>,
|
||||
clear_scene: true,
|
||||
reapply_settings,
|
||||
program: new_ast,
|
||||
cached_body_items: 0,
|
||||
};
|
||||
}
|
||||
|
||||
@ -244,7 +301,6 @@ fn generate_changed_program(old_ast: Node<Program>, mut new_ast: Node<Program>,
|
||||
clear_scene: true,
|
||||
reapply_settings,
|
||||
program: new_ast,
|
||||
cached_body_items: 0,
|
||||
}
|
||||
}
|
||||
std::cmp::Ordering::Greater => {
|
||||
@ -261,7 +317,6 @@ fn generate_changed_program(old_ast: Node<Program>, mut new_ast: Node<Program>,
|
||||
clear_scene: false,
|
||||
reapply_settings,
|
||||
program: new_ast,
|
||||
cached_body_items: old_ast.body.len(),
|
||||
}
|
||||
}
|
||||
std::cmp::Ordering::Equal => {
|
||||
@ -600,7 +655,6 @@ startSketchOn(XY)
|
||||
clear_scene: true,
|
||||
reapply_settings: true,
|
||||
program: new_program.ast,
|
||||
cached_body_items: 0,
|
||||
}
|
||||
);
|
||||
}
|
||||
@ -639,7 +693,6 @@ startSketchOn(XY)
|
||||
clear_scene: true,
|
||||
reapply_settings: true,
|
||||
program: new_program.ast,
|
||||
cached_body_items: 0,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
@ -131,7 +131,7 @@ impl ExecutorContext {
|
||||
match statement {
|
||||
BodyItem::ImportStatement(import_stmt) => {
|
||||
if !matches!(body_type, BodyType::Root) {
|
||||
return Err(KclError::Semantic(KclErrorDetails::new(
|
||||
return Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
"Imports are only supported at the top-level of a file.".to_owned(),
|
||||
vec![import_stmt.into()],
|
||||
)));
|
||||
@ -164,15 +164,18 @@ impl ExecutorContext {
|
||||
let mut mod_value = mem.get_from(&mod_name, env_ref, import_item.into(), 0).cloned();
|
||||
|
||||
if value.is_err() && ty.is_err() && mod_value.is_err() {
|
||||
return Err(KclError::UndefinedValue(KclErrorDetails::new(
|
||||
format!("{} is not defined in module", import_item.name.name),
|
||||
vec![SourceRange::from(&import_item.name)],
|
||||
)));
|
||||
return Err(KclError::new_undefined_value(
|
||||
KclErrorDetails::new(
|
||||
format!("{} is not defined in module", import_item.name.name),
|
||||
vec![SourceRange::from(&import_item.name)],
|
||||
),
|
||||
None,
|
||||
));
|
||||
}
|
||||
|
||||
// Check that the item is allowed to be imported (in at least one namespace).
|
||||
if value.is_ok() && !module_exports.contains(&import_item.name.name) {
|
||||
value = Err(KclError::Semantic(KclErrorDetails::new(
|
||||
value = Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!(
|
||||
"Cannot import \"{}\" from module because it is not exported. Add \"export\" before the definition to export it.",
|
||||
import_item.name.name
|
||||
@ -182,7 +185,7 @@ impl ExecutorContext {
|
||||
}
|
||||
|
||||
if ty.is_ok() && !module_exports.contains(&ty_name) {
|
||||
ty = Err(KclError::Semantic(KclErrorDetails::new(format!(
|
||||
ty = Err(KclError::new_semantic(KclErrorDetails::new(format!(
|
||||
"Cannot import \"{}\" from module because it is not exported. Add \"export\" before the definition to export it.",
|
||||
import_item.name.name
|
||||
),
|
||||
@ -190,7 +193,7 @@ impl ExecutorContext {
|
||||
}
|
||||
|
||||
if mod_value.is_ok() && !module_exports.contains(&mod_name) {
|
||||
mod_value = Err(KclError::Semantic(KclErrorDetails::new(format!(
|
||||
mod_value = Err(KclError::new_semantic(KclErrorDetails::new(format!(
|
||||
"Cannot import \"{}\" from module because it is not exported. Add \"export\" before the definition to export it.",
|
||||
import_item.name.name
|
||||
),
|
||||
@ -253,7 +256,7 @@ impl ExecutorContext {
|
||||
.memory
|
||||
.get_from(name, env_ref, source_range, 0)
|
||||
.map_err(|_err| {
|
||||
KclError::Internal(KclErrorDetails::new(
|
||||
KclError::new_internal(KclErrorDetails::new(
|
||||
format!("{} is not defined in module (but was exported?)", name),
|
||||
vec![source_range],
|
||||
))
|
||||
@ -301,7 +304,12 @@ impl ExecutorContext {
|
||||
|
||||
let annotations = &variable_declaration.outer_attrs;
|
||||
|
||||
let value = self
|
||||
// During the evaluation of the variable's RHS, set context that this is all happening inside a variable
|
||||
// declaration, for the given name. This helps improve user-facing error messages.
|
||||
let lhs = variable_declaration.inner.name().to_owned();
|
||||
let prev_being_declared = exec_state.mod_local.being_declared.clone();
|
||||
exec_state.mod_local.being_declared = Some(lhs);
|
||||
let rhs_result = self
|
||||
.execute_expr(
|
||||
&variable_declaration.declaration.init,
|
||||
exec_state,
|
||||
@ -309,10 +317,14 @@ impl ExecutorContext {
|
||||
annotations,
|
||||
StatementKind::Declaration { name: &var_name },
|
||||
)
|
||||
.await?;
|
||||
.await;
|
||||
// Declaration over, so unset this context.
|
||||
exec_state.mod_local.being_declared = prev_being_declared;
|
||||
let rhs = rhs_result?;
|
||||
|
||||
exec_state
|
||||
.mut_stack()
|
||||
.add(var_name.clone(), value.clone(), source_range)?;
|
||||
.add(var_name.clone(), rhs.clone(), source_range)?;
|
||||
|
||||
// Track exports.
|
||||
if let ItemVisibility::Export = variable_declaration.visibility {
|
||||
@ -326,7 +338,7 @@ impl ExecutorContext {
|
||||
}
|
||||
}
|
||||
// Variable declaration can be the return value of a module.
|
||||
last_expr = matches!(body_type, BodyType::Root).then_some(value);
|
||||
last_expr = matches!(body_type, BodyType::Root).then_some(rhs);
|
||||
}
|
||||
BodyItem::TypeDeclaration(ty) => {
|
||||
let metadata = Metadata::from(&**ty);
|
||||
@ -336,7 +348,7 @@ impl ExecutorContext {
|
||||
let std_path = match &exec_state.mod_local.path {
|
||||
ModulePath::Std { value } => value,
|
||||
ModulePath::Local { .. } | ModulePath::Main => {
|
||||
return Err(KclError::Semantic(KclErrorDetails::new(
|
||||
return Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
"User-defined types are not yet supported.".to_owned(),
|
||||
vec![metadata.source_range],
|
||||
)));
|
||||
@ -352,7 +364,7 @@ impl ExecutorContext {
|
||||
.mut_stack()
|
||||
.add(name_in_mem.clone(), value, metadata.source_range)
|
||||
.map_err(|_| {
|
||||
KclError::Semantic(KclErrorDetails::new(
|
||||
KclError::new_semantic(KclErrorDetails::new(
|
||||
format!("Redefinition of type {}.", ty.name.name),
|
||||
vec![metadata.source_range],
|
||||
))
|
||||
@ -373,7 +385,7 @@ impl ExecutorContext {
|
||||
exec_state,
|
||||
metadata.source_range,
|
||||
)
|
||||
.map_err(|e| KclError::Semantic(e.into()))?,
|
||||
.map_err(|e| KclError::new_semantic(e.into()))?,
|
||||
),
|
||||
meta: vec![metadata],
|
||||
};
|
||||
@ -382,7 +394,7 @@ impl ExecutorContext {
|
||||
.mut_stack()
|
||||
.add(name_in_mem.clone(), value, metadata.source_range)
|
||||
.map_err(|_| {
|
||||
KclError::Semantic(KclErrorDetails::new(
|
||||
KclError::new_semantic(KclErrorDetails::new(
|
||||
format!("Redefinition of type {}.", ty.name.name),
|
||||
vec![metadata.source_range],
|
||||
))
|
||||
@ -393,7 +405,7 @@ impl ExecutorContext {
|
||||
}
|
||||
}
|
||||
None => {
|
||||
return Err(KclError::Semantic(KclErrorDetails::new(
|
||||
return Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
"User-defined types are not yet supported.".to_owned(),
|
||||
vec![metadata.source_range],
|
||||
)))
|
||||
@ -407,7 +419,7 @@ impl ExecutorContext {
|
||||
let metadata = Metadata::from(return_statement);
|
||||
|
||||
if matches!(body_type, BodyType::Root) {
|
||||
return Err(KclError::Semantic(KclErrorDetails::new(
|
||||
return Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
"Cannot return from outside a function.".to_owned(),
|
||||
vec![metadata.source_range],
|
||||
)));
|
||||
@ -426,7 +438,7 @@ impl ExecutorContext {
|
||||
.mut_stack()
|
||||
.add(memory::RETURN_NAME.to_owned(), value, metadata.source_range)
|
||||
.map_err(|_| {
|
||||
KclError::Semantic(KclErrorDetails::new(
|
||||
KclError::new_semantic(KclErrorDetails::new(
|
||||
"Multiple returns from a single function.".to_owned(),
|
||||
vec![metadata.source_range],
|
||||
))
|
||||
@ -531,7 +543,7 @@ impl ExecutorContext {
|
||||
*cache = Some((val, er, items.clone()));
|
||||
(er, items)
|
||||
}),
|
||||
ModuleRepr::Foreign(geom, _) => Err(KclError::Semantic(KclErrorDetails::new(
|
||||
ModuleRepr::Foreign(geom, _) => Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
"Cannot import items from foreign modules".to_owned(),
|
||||
vec![geom.source_range],
|
||||
))),
|
||||
@ -605,12 +617,12 @@ impl ExecutorContext {
|
||||
exec_state.global.mod_loader.leave_module(path);
|
||||
|
||||
result.map_err(|err| {
|
||||
if let KclError::ImportCycle(_) = err {
|
||||
if let KclError::ImportCycle { .. } = err {
|
||||
// It was an import cycle. Keep the original message.
|
||||
err.override_source_ranges(vec![source_range])
|
||||
} else {
|
||||
// TODO would be great to have line/column for the underlying error here
|
||||
KclError::Semantic(KclErrorDetails::new(
|
||||
KclError::new_semantic(KclErrorDetails::new(
|
||||
format!(
|
||||
"Error loading imported file ({path}). Open it to view more details.\n {}",
|
||||
err.message()
|
||||
@ -635,7 +647,12 @@ impl ExecutorContext {
|
||||
Expr::Literal(literal) => KclValue::from_literal((**literal).clone(), exec_state),
|
||||
Expr::TagDeclarator(tag) => tag.execute(exec_state).await?,
|
||||
Expr::Name(name) => {
|
||||
let value = name.get_result(exec_state, self).await?.clone();
|
||||
let being_declared = exec_state.mod_local.being_declared.clone();
|
||||
let value = name
|
||||
.get_result(exec_state, self)
|
||||
.await
|
||||
.map_err(|e| var_in_own_ref_err(e, &being_declared))?
|
||||
.clone();
|
||||
if let KclValue::Module { value: module_id, meta } = value {
|
||||
self.exec_module_for_result(
|
||||
module_id,
|
||||
@ -677,7 +694,7 @@ impl ExecutorContext {
|
||||
meta: vec![metadata.to_owned()],
|
||||
}
|
||||
} else {
|
||||
return Err(KclError::Semantic(KclErrorDetails::new(
|
||||
return Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
"Rust implementation of functions is restricted to the standard library".to_owned(),
|
||||
vec![metadata.source_range],
|
||||
)));
|
||||
@ -704,7 +721,7 @@ impl ExecutorContext {
|
||||
"you cannot declare variable {name} as %, because % can only be used in function calls"
|
||||
);
|
||||
|
||||
return Err(KclError::Semantic(KclErrorDetails::new(
|
||||
return Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
message,
|
||||
vec![pipe_substitution.into()],
|
||||
)));
|
||||
@ -712,7 +729,7 @@ impl ExecutorContext {
|
||||
StatementKind::Expression => match exec_state.mod_local.pipe_value.clone() {
|
||||
Some(x) => x,
|
||||
None => {
|
||||
return Err(KclError::Semantic(KclErrorDetails::new(
|
||||
return Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
"cannot use % outside a pipe expression".to_owned(),
|
||||
vec![pipe_substitution.into()],
|
||||
)));
|
||||
@ -741,6 +758,24 @@ impl ExecutorContext {
|
||||
}
|
||||
}
|
||||
|
||||
/// If the error is about an undefined name, and that name matches the name being defined,
|
||||
/// make the error message more specific.
|
||||
fn var_in_own_ref_err(e: KclError, being_declared: &Option<String>) -> KclError {
|
||||
let KclError::UndefinedValue { name, mut details } = e else {
|
||||
return e;
|
||||
};
|
||||
// TODO after June 26th: replace this with a let-chain,
|
||||
// which will be available in Rust 1.88
|
||||
// https://rust-lang.github.io/rfcs/2497-if-let-chains.html
|
||||
match (&being_declared, &name) {
|
||||
(Some(name0), Some(name1)) if name0 == name1 => {
|
||||
details.message = format!("You can't use `{name0}` because you're currently trying to define it. Use a different variable here instead.");
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
KclError::UndefinedValue { details, name }
|
||||
}
|
||||
|
||||
impl Node<AscribedExpression> {
|
||||
#[async_recursion]
|
||||
pub async fn get_result(&self, exec_state: &mut ExecState, ctx: &ExecutorContext) -> Result<KclValue, KclError> {
|
||||
@ -761,7 +796,7 @@ fn apply_ascription(
|
||||
source_range: SourceRange,
|
||||
) -> Result<KclValue, KclError> {
|
||||
let ty = RuntimeType::from_parsed(ty.inner.clone(), exec_state, value.into())
|
||||
.map_err(|e| KclError::Semantic(e.into()))?;
|
||||
.map_err(|e| KclError::new_semantic(e.into()))?;
|
||||
|
||||
value.coerce(&ty, false, exec_state).map_err(|_| {
|
||||
let suggestion = if ty == RuntimeType::length() {
|
||||
@ -771,7 +806,7 @@ fn apply_ascription(
|
||||
} else {
|
||||
""
|
||||
};
|
||||
KclError::Semantic(KclErrorDetails::new(
|
||||
KclError::new_semantic(KclErrorDetails::new(
|
||||
format!(
|
||||
"could not coerce value of type {} to type {ty}{suggestion}",
|
||||
value.human_friendly_type()
|
||||
@ -802,9 +837,20 @@ impl Node<Name> {
|
||||
&self,
|
||||
exec_state: &'a mut ExecState,
|
||||
ctx: &ExecutorContext,
|
||||
) -> Result<&'a KclValue, KclError> {
|
||||
let being_declared = exec_state.mod_local.being_declared.clone();
|
||||
self.get_result_inner(exec_state, ctx)
|
||||
.await
|
||||
.map_err(|e| var_in_own_ref_err(e, &being_declared))
|
||||
}
|
||||
|
||||
async fn get_result_inner<'a>(
|
||||
&self,
|
||||
exec_state: &'a mut ExecState,
|
||||
ctx: &ExecutorContext,
|
||||
) -> Result<&'a KclValue, KclError> {
|
||||
if self.abs_path {
|
||||
return Err(KclError::Semantic(KclErrorDetails::new(
|
||||
return Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
"Absolute paths (names beginning with `::` are not yet supported)".to_owned(),
|
||||
self.as_source_ranges(),
|
||||
)));
|
||||
@ -825,7 +871,7 @@ impl Node<Name> {
|
||||
let value = match mem_spec {
|
||||
Some((env, exports)) => {
|
||||
if !exports.contains(&p.name) {
|
||||
return Err(KclError::Semantic(KclErrorDetails::new(
|
||||
return Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!("Item {} not found in module's exported items", p.name),
|
||||
p.as_source_ranges(),
|
||||
)));
|
||||
@ -842,7 +888,7 @@ impl Node<Name> {
|
||||
};
|
||||
|
||||
let KclValue::Module { value: module_id, .. } = value else {
|
||||
return Err(KclError::Semantic(KclErrorDetails::new(
|
||||
return Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!(
|
||||
"Identifier in path must refer to a module, found {}",
|
||||
value.human_friendly_type()
|
||||
@ -888,7 +934,7 @@ impl Node<Name> {
|
||||
|
||||
// Either item or module is defined, but not exported.
|
||||
debug_assert!((item_value.is_ok() && !item_exported) || (mod_value.is_ok() && !mod_exported));
|
||||
Err(KclError::Semantic(KclErrorDetails::new(
|
||||
Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!("Item {} not found in module's exported items", self.name.name),
|
||||
self.name.as_source_ranges(),
|
||||
)))
|
||||
@ -913,14 +959,17 @@ impl Node<MemberExpression> {
|
||||
if let Some(value) = map.get(&property) {
|
||||
Ok(value.to_owned())
|
||||
} else {
|
||||
Err(KclError::UndefinedValue(KclErrorDetails::new(
|
||||
format!("Property '{property}' not found in object"),
|
||||
vec![self.clone().into()],
|
||||
)))
|
||||
Err(KclError::new_undefined_value(
|
||||
KclErrorDetails::new(
|
||||
format!("Property '{property}' not found in object"),
|
||||
vec![self.clone().into()],
|
||||
),
|
||||
None,
|
||||
))
|
||||
}
|
||||
}
|
||||
(KclValue::Object { .. }, Property::String(property), true) => {
|
||||
Err(KclError::Semantic(KclErrorDetails::new(
|
||||
Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!("Cannot index object with string; use dot notation instead, e.g. `obj.{property}`"),
|
||||
vec![self.clone().into()],
|
||||
)))
|
||||
@ -928,7 +977,7 @@ impl Node<MemberExpression> {
|
||||
(KclValue::Object { .. }, p, _) => {
|
||||
let t = p.type_name();
|
||||
let article = article_for(t);
|
||||
Err(KclError::Semantic(KclErrorDetails::new(
|
||||
Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!("Only strings can be used as the property of an object, but you're using {article} {t}",),
|
||||
vec![self.clone().into()],
|
||||
)))
|
||||
@ -938,10 +987,13 @@ impl Node<MemberExpression> {
|
||||
if let Some(value) = value_of_arr {
|
||||
Ok(value.to_owned())
|
||||
} else {
|
||||
Err(KclError::UndefinedValue(KclErrorDetails::new(
|
||||
format!("The array doesn't have any item at index {index}"),
|
||||
vec![self.clone().into()],
|
||||
)))
|
||||
Err(KclError::new_undefined_value(
|
||||
KclErrorDetails::new(
|
||||
format!("The array doesn't have any item at index {index}"),
|
||||
vec![self.clone().into()],
|
||||
),
|
||||
None,
|
||||
))
|
||||
}
|
||||
}
|
||||
// Singletons and single-element arrays should be interchangeable, but only indexing by 0 should work.
|
||||
@ -950,7 +1002,7 @@ impl Node<MemberExpression> {
|
||||
(KclValue::HomArray { .. }, p, _) => {
|
||||
let t = p.type_name();
|
||||
let article = article_for(t);
|
||||
Err(KclError::Semantic(KclErrorDetails::new(
|
||||
Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!("Only integers >= 0 can be used as the index of an array, but you're using {article} {t}",),
|
||||
vec![self.clone().into()],
|
||||
)))
|
||||
@ -971,7 +1023,7 @@ impl Node<MemberExpression> {
|
||||
(being_indexed, _, _) => {
|
||||
let t = being_indexed.human_friendly_type();
|
||||
let article = article_for(&t);
|
||||
Err(KclError::Semantic(KclErrorDetails::new(
|
||||
Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!("Only arrays can be indexed, but you're trying to index {article} {t}"),
|
||||
vec![self.clone().into()],
|
||||
)))
|
||||
@ -1049,7 +1101,7 @@ impl Node<BinaryExpression> {
|
||||
meta: _,
|
||||
} = left_value
|
||||
else {
|
||||
return Err(KclError::Semantic(KclErrorDetails::new(
|
||||
return Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!(
|
||||
"Cannot apply logical operator to non-boolean value: {}",
|
||||
left_value.human_friendly_type()
|
||||
@ -1062,7 +1114,7 @@ impl Node<BinaryExpression> {
|
||||
meta: _,
|
||||
} = right_value
|
||||
else {
|
||||
return Err(KclError::Semantic(KclErrorDetails::new(
|
||||
return Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!(
|
||||
"Cannot apply logical operator to non-boolean value: {}",
|
||||
right_value.human_friendly_type()
|
||||
@ -1168,7 +1220,7 @@ impl Node<UnaryExpression> {
|
||||
meta: _,
|
||||
} = value
|
||||
else {
|
||||
return Err(KclError::Semantic(KclErrorDetails::new(
|
||||
return Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!(
|
||||
"Cannot apply unary operator ! to non-boolean value: {}",
|
||||
value.human_friendly_type()
|
||||
@ -1189,7 +1241,7 @@ impl Node<UnaryExpression> {
|
||||
|
||||
let value = &self.argument.get_result(exec_state, ctx).await?;
|
||||
let err = || {
|
||||
KclError::Semantic(KclErrorDetails::new(
|
||||
KclError::new_semantic(KclErrorDetails::new(
|
||||
format!(
|
||||
"You can only negate numbers, planes, or lines, but this is a {}",
|
||||
value.human_friendly_type()
|
||||
@ -1292,7 +1344,7 @@ pub(crate) async fn execute_pipe_body(
|
||||
ctx: &ExecutorContext,
|
||||
) -> Result<KclValue, KclError> {
|
||||
let Some((first, body)) = body.split_first() else {
|
||||
return Err(KclError::Semantic(KclErrorDetails::new(
|
||||
return Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
"Pipe expressions cannot be empty".to_owned(),
|
||||
vec![source_range],
|
||||
)));
|
||||
@ -1311,7 +1363,7 @@ pub(crate) async fn execute_pipe_body(
|
||||
// Now that we've evaluated the first child expression in the pipeline, following child expressions
|
||||
// should use the previous child expression for %.
|
||||
// This means there's no more need for the previous pipe_value from the parent AST node above this one.
|
||||
let previous_pipe_value = std::mem::replace(&mut exec_state.mod_local.pipe_value, Some(output));
|
||||
let previous_pipe_value = exec_state.mod_local.pipe_value.replace(output);
|
||||
// Evaluate remaining elements.
|
||||
let result = inner_execute_pipe_body(exec_state, body, ctx).await;
|
||||
// Restore the previous pipe value.
|
||||
@ -1330,7 +1382,7 @@ async fn inner_execute_pipe_body(
|
||||
) -> Result<KclValue, KclError> {
|
||||
for expression in body {
|
||||
if let Expr::TagDeclarator(_) = expression {
|
||||
return Err(KclError::Semantic(KclErrorDetails::new(
|
||||
return Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!("This cannot be in a PipeExpression: {:?}", expression),
|
||||
vec![expression.into()],
|
||||
)));
|
||||
@ -1404,7 +1456,7 @@ impl Node<ArrayRangeExpression> {
|
||||
.await?;
|
||||
let (start, start_ty) = start_val
|
||||
.as_int_with_ty()
|
||||
.ok_or(KclError::Semantic(KclErrorDetails::new(
|
||||
.ok_or(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!("Expected int but found {}", start_val.human_friendly_type()),
|
||||
vec![self.into()],
|
||||
)))?;
|
||||
@ -1412,24 +1464,26 @@ impl Node<ArrayRangeExpression> {
|
||||
let end_val = ctx
|
||||
.execute_expr(&self.end_element, exec_state, &metadata, &[], StatementKind::Expression)
|
||||
.await?;
|
||||
let (end, end_ty) = end_val.as_int_with_ty().ok_or(KclError::Semantic(KclErrorDetails::new(
|
||||
format!("Expected int but found {}", end_val.human_friendly_type()),
|
||||
vec![self.into()],
|
||||
)))?;
|
||||
let (end, end_ty) = end_val
|
||||
.as_int_with_ty()
|
||||
.ok_or(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!("Expected int but found {}", end_val.human_friendly_type()),
|
||||
vec![self.into()],
|
||||
)))?;
|
||||
|
||||
if start_ty != end_ty {
|
||||
let start = start_val.as_ty_f64().unwrap_or(TyF64 { n: 0.0, ty: start_ty });
|
||||
let start = fmt::human_display_number(start.n, start.ty);
|
||||
let end = end_val.as_ty_f64().unwrap_or(TyF64 { n: 0.0, ty: end_ty });
|
||||
let end = fmt::human_display_number(end.n, end.ty);
|
||||
return Err(KclError::Semantic(KclErrorDetails::new(
|
||||
return Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!("Range start and end must be of the same type, but found {start} and {end}"),
|
||||
vec![self.into()],
|
||||
)));
|
||||
}
|
||||
|
||||
if end < start {
|
||||
return Err(KclError::Semantic(KclErrorDetails::new(
|
||||
return Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!("Range start is greater than range end: {start} .. {end}"),
|
||||
vec![self.into()],
|
||||
)));
|
||||
@ -1493,7 +1547,7 @@ fn article_for<S: AsRef<str>>(s: S) -> &'static str {
|
||||
fn number_as_f64(v: &KclValue, source_range: SourceRange) -> Result<TyF64, KclError> {
|
||||
v.as_ty_f64().ok_or_else(|| {
|
||||
let actual_type = v.human_friendly_type();
|
||||
KclError::Semantic(KclErrorDetails::new(
|
||||
KclError::new_semantic(KclErrorDetails::new(
|
||||
format!("Expected a number, but found {actual_type}",),
|
||||
vec![source_range],
|
||||
))
|
||||
@ -1585,13 +1639,13 @@ impl Property {
|
||||
if let Some(x) = crate::try_f64_to_usize(value) {
|
||||
Ok(Property::UInt(x))
|
||||
} else {
|
||||
Err(KclError::Semantic(KclErrorDetails::new(
|
||||
Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!("{value} is not a valid index, indices must be whole numbers >= 0"),
|
||||
property_sr,
|
||||
)))
|
||||
}
|
||||
}
|
||||
_ => Err(KclError::Semantic(KclErrorDetails::new(
|
||||
_ => Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
"Only numbers (>= 0) can be indexes".to_owned(),
|
||||
vec![sr],
|
||||
))),
|
||||
@ -1602,7 +1656,8 @@ impl Property {
|
||||
}
|
||||
|
||||
fn jvalue_to_prop(value: &KclValue, property_sr: Vec<SourceRange>, name: &str) -> Result<Property, KclError> {
|
||||
let make_err = |message: String| Err::<Property, _>(KclError::Semantic(KclErrorDetails::new(message, property_sr)));
|
||||
let make_err =
|
||||
|message: String| Err::<Property, _>(KclError::new_semantic(KclErrorDetails::new(message, property_sr)));
|
||||
match value {
|
||||
KclValue::Number{value: num, .. } => {
|
||||
let num = *num;
|
||||
@ -1846,7 +1901,7 @@ d = b + c
|
||||
crate::engine::conn_mock::EngineConnection::new()
|
||||
.await
|
||||
.map_err(|err| {
|
||||
KclError::Internal(KclErrorDetails::new(
|
||||
KclError::new_internal(KclErrorDetails::new(
|
||||
format!("Failed to create mock engine connection: {}", err),
|
||||
vec![SourceRange::default()],
|
||||
))
|
||||
@ -1858,7 +1913,6 @@ d = b + c
|
||||
project_directory: Some(crate::TypedPath(tmpdir.path().into())),
|
||||
..Default::default()
|
||||
},
|
||||
stdlib: Arc::new(crate::std::StdLib::new()),
|
||||
context_type: ContextType::Mock,
|
||||
};
|
||||
let mut exec_state = ExecState::new(&exec_ctxt);
|
||||
|
@ -2,7 +2,6 @@ use async_recursion::async_recursion;
|
||||
use indexmap::IndexMap;
|
||||
|
||||
use crate::{
|
||||
docs::StdLibFn,
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::{
|
||||
cad_op::{Group, OpArg, OpKclValue, Operation},
|
||||
@ -184,40 +183,6 @@ impl<'a> From<&'a FunctionSource> for FunctionDefinition<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&dyn StdLibFn> for FunctionDefinition<'static> {
|
||||
fn from(value: &dyn StdLibFn) -> Self {
|
||||
let mut input_arg = None;
|
||||
let mut named_args = IndexMap::new();
|
||||
for a in value.args(false) {
|
||||
if !a.label_required {
|
||||
input_arg = Some((a.name.clone(), None));
|
||||
continue;
|
||||
}
|
||||
|
||||
named_args.insert(
|
||||
a.name.clone(),
|
||||
(
|
||||
if a.required {
|
||||
None
|
||||
} else {
|
||||
Some(DefaultParamVal::none())
|
||||
},
|
||||
None,
|
||||
),
|
||||
);
|
||||
}
|
||||
FunctionDefinition {
|
||||
input_arg,
|
||||
named_args,
|
||||
return_type: None,
|
||||
deprecated: value.deprecated(),
|
||||
include_in_feature_tree: value.feature_tree_operation(),
|
||||
is_std: true,
|
||||
body: FunctionBody::Rust(value.std_lib_fn()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Node<CallExpressionKw> {
|
||||
#[async_recursion]
|
||||
pub async fn execute(&self, exec_state: &mut ExecState, ctx: &ExecutorContext) -> Result<KclValue, KclError> {
|
||||
@ -274,59 +239,44 @@ impl Node<CallExpressionKw> {
|
||||
exec_state.pipe_value().map(|v| Arg::new(v.clone(), callsite)),
|
||||
);
|
||||
|
||||
match ctx.stdlib.get_rust_function(fn_name) {
|
||||
Some(func) => {
|
||||
let def: FunctionDefinition = (&*func).into();
|
||||
// All std lib functions return a value, so the unwrap is safe.
|
||||
def.call_kw(Some(func.name()), exec_state, ctx, args, callsite)
|
||||
.await
|
||||
.map(Option::unwrap)
|
||||
.map_err(|e| {
|
||||
// This is used for the backtrace display. We don't add
|
||||
// another location the way we do for user-defined
|
||||
// functions because the error uses the Args, which
|
||||
// already points here.
|
||||
e.set_last_backtrace_fn_name(Some(func.name()))
|
||||
})
|
||||
}
|
||||
None => {
|
||||
// Clone the function so that we can use a mutable reference to
|
||||
// exec_state.
|
||||
let func = fn_name.get_result(exec_state, ctx).await?.clone();
|
||||
// Clone the function so that we can use a mutable reference to
|
||||
// exec_state.
|
||||
let func = fn_name.get_result(exec_state, ctx).await?.clone();
|
||||
|
||||
let Some(fn_src) = func.as_function() else {
|
||||
return Err(KclError::Semantic(KclErrorDetails::new(
|
||||
"cannot call this because it isn't a function".to_string(),
|
||||
vec![callsite],
|
||||
)));
|
||||
};
|
||||
let Some(fn_src) = func.as_function() else {
|
||||
return Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
"cannot call this because it isn't a function".to_string(),
|
||||
vec![callsite],
|
||||
)));
|
||||
};
|
||||
|
||||
let return_value = fn_src
|
||||
.call_kw(Some(fn_name.to_string()), exec_state, ctx, args, callsite)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
// Add the call expression to the source ranges.
|
||||
//
|
||||
// TODO: Use the name that the function was defined
|
||||
// with, not the identifier it was used with.
|
||||
e.add_unwind_location(Some(fn_name.name.name.clone()), callsite)
|
||||
})?;
|
||||
let return_value = fn_src
|
||||
.call_kw(Some(fn_name.to_string()), exec_state, ctx, args, callsite)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
// Add the call expression to the source ranges.
|
||||
//
|
||||
// TODO: Use the name that the function was defined
|
||||
// with, not the identifier it was used with.
|
||||
e.add_unwind_location(Some(fn_name.name.name.clone()), callsite)
|
||||
})?;
|
||||
|
||||
let result = return_value.ok_or_else(move || {
|
||||
let mut source_ranges: Vec<SourceRange> = vec![callsite];
|
||||
// We want to send the source range of the original function.
|
||||
if let KclValue::Function { meta, .. } = func {
|
||||
source_ranges = meta.iter().map(|m| m.source_range).collect();
|
||||
};
|
||||
KclError::UndefinedValue(KclErrorDetails::new(
|
||||
format!("Result of user-defined function {} is undefined", fn_name),
|
||||
source_ranges,
|
||||
))
|
||||
})?;
|
||||
let result = return_value.ok_or_else(move || {
|
||||
let mut source_ranges: Vec<SourceRange> = vec![callsite];
|
||||
// We want to send the source range of the original function.
|
||||
if let KclValue::Function { meta, .. } = func {
|
||||
source_ranges = meta.iter().map(|m| m.source_range).collect();
|
||||
};
|
||||
KclError::new_undefined_value(
|
||||
KclErrorDetails::new(
|
||||
format!("Result of user-defined function {} is undefined", fn_name),
|
||||
source_ranges,
|
||||
),
|
||||
None,
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
@ -500,7 +450,7 @@ fn update_memory_for_tags_of_geometry(result: &mut KclValue, exec_state: &mut Ex
|
||||
let tag_id = if let Some(t) = value.sketch.tags.get(&tag.name) {
|
||||
let mut t = t.clone();
|
||||
let Some(info) = t.get_cur_info() else {
|
||||
return Err(KclError::Internal(KclErrorDetails::new(
|
||||
return Err(KclError::new_internal(KclErrorDetails::new(
|
||||
format!("Tag {} does not have path info", tag.name),
|
||||
vec![tag.into()],
|
||||
)));
|
||||
@ -600,30 +550,33 @@ fn type_check_params_kw(
|
||||
|
||||
for (label, arg) in &mut args.labeled {
|
||||
match fn_def.named_args.get(label) {
|
||||
Some((_, ty)) => {
|
||||
if let Some(ty) = ty {
|
||||
arg.value = arg
|
||||
.value
|
||||
.coerce(
|
||||
&RuntimeType::from_parsed(ty.clone(), exec_state, arg.source_range).map_err(|e| KclError::Semantic(e.into()))?,
|
||||
true,
|
||||
exec_state,
|
||||
)
|
||||
.map_err(|e| {
|
||||
let mut message = format!(
|
||||
"{label} requires a value with type `{}`, but found {}",
|
||||
ty,
|
||||
arg.value.human_friendly_type(),
|
||||
);
|
||||
if let Some(ty) = e.explicit_coercion {
|
||||
// TODO if we have access to the AST for the argument we could choose which example to suggest.
|
||||
message = format!("{message}\n\nYou may need to add information about the type of the argument, for example:\n using a numeric suffix: `42{ty}`\n or using type ascription: `foo(): number({ty})`");
|
||||
}
|
||||
KclError::Semantic(KclErrorDetails::new(
|
||||
message,
|
||||
vec![arg.source_range],
|
||||
))
|
||||
})?;
|
||||
Some((def, ty)) => {
|
||||
// For optional args, passing None should be the same as not passing an arg.
|
||||
if !(def.is_some() && matches!(arg.value, KclValue::KclNone { .. })) {
|
||||
if let Some(ty) = ty {
|
||||
arg.value = arg
|
||||
.value
|
||||
.coerce(
|
||||
&RuntimeType::from_parsed(ty.clone(), exec_state, arg.source_range).map_err(|e| KclError::new_semantic(e.into()))?,
|
||||
true,
|
||||
exec_state,
|
||||
)
|
||||
.map_err(|e| {
|
||||
let mut message = format!(
|
||||
"{label} requires a value with type `{}`, but found {}",
|
||||
ty,
|
||||
arg.value.human_friendly_type(),
|
||||
);
|
||||
if let Some(ty) = e.explicit_coercion {
|
||||
// TODO if we have access to the AST for the argument we could choose which example to suggest.
|
||||
message = format!("{message}\n\nYou may need to add information about the type of the argument, for example:\n using a numeric suffix: `42{ty}`\n or using type ascription: `foo(): number({ty})`");
|
||||
}
|
||||
KclError::new_semantic(KclErrorDetails::new(
|
||||
message,
|
||||
vec![arg.source_range],
|
||||
))
|
||||
})?;
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
@ -670,7 +623,7 @@ fn type_check_params_kw(
|
||||
let first = errors.next().unwrap();
|
||||
errors.for_each(|e| exec_state.err(e));
|
||||
|
||||
return Err(KclError::Semantic(first.into()));
|
||||
return Err(KclError::new_semantic(first.into()));
|
||||
}
|
||||
|
||||
if let Some(arg) = &mut args.unlabeled {
|
||||
@ -680,12 +633,12 @@ fn type_check_params_kw(
|
||||
.value
|
||||
.coerce(
|
||||
&RuntimeType::from_parsed(ty.clone(), exec_state, arg.1.source_range)
|
||||
.map_err(|e| KclError::Semantic(e.into()))?,
|
||||
.map_err(|e| KclError::new_semantic(e.into()))?,
|
||||
true,
|
||||
exec_state,
|
||||
)
|
||||
.map_err(|_| {
|
||||
KclError::Semantic(KclErrorDetails::new(
|
||||
KclError::new_semantic(KclErrorDetails::new(
|
||||
format!(
|
||||
"The input argument of {} requires a value with type `{}`, but found {}",
|
||||
fn_name
|
||||
@ -703,7 +656,7 @@ fn type_check_params_kw(
|
||||
exec_state.err(CompilationError::err(
|
||||
arg.source_range,
|
||||
format!(
|
||||
"{} expects an unlabeled first parameter (`@{name}`), but it is labelled in the call",
|
||||
"{} expects an unlabeled first argument (`@{name}`), but it is labelled in the call",
|
||||
fn_name
|
||||
.map(|n| format!("The function `{}`", n))
|
||||
.unwrap_or_else(|| "This function".to_owned()),
|
||||
@ -742,7 +695,7 @@ fn assign_args_to_params_kw(
|
||||
.add(name.clone(), value, default_val.source_range())?;
|
||||
}
|
||||
None => {
|
||||
return Err(KclError::Semantic(KclErrorDetails::new(
|
||||
return Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!(
|
||||
"This function requires a parameter {}, but you haven't passed it one.",
|
||||
name
|
||||
@ -759,12 +712,12 @@ fn assign_args_to_params_kw(
|
||||
|
||||
let Some(unlabeled) = unlabelled else {
|
||||
return Err(if args.kw_args.labeled.contains_key(param_name) {
|
||||
KclError::Semantic(KclErrorDetails::new(
|
||||
KclError::new_semantic(KclErrorDetails::new(
|
||||
format!("The function does declare a parameter named '{param_name}', but this parameter doesn't use a label. Try removing the `{param_name}:`"),
|
||||
source_ranges,
|
||||
))
|
||||
} else {
|
||||
KclError::Semantic(KclErrorDetails::new(
|
||||
KclError::new_semantic(KclErrorDetails::new(
|
||||
"This function expects an unlabeled first parameter, but you haven't passed it one.".to_owned(),
|
||||
source_ranges,
|
||||
))
|
||||
@ -788,9 +741,9 @@ fn coerce_result_type(
|
||||
if let Ok(Some(val)) = result {
|
||||
if let Some(ret_ty) = &fn_def.return_type {
|
||||
let ty = RuntimeType::from_parsed(ret_ty.inner.clone(), exec_state, ret_ty.as_source_range())
|
||||
.map_err(|e| KclError::Semantic(e.into()))?;
|
||||
.map_err(|e| KclError::new_semantic(e.into()))?;
|
||||
let val = val.coerce(&ty, true, exec_state).map_err(|_| {
|
||||
KclError::Semantic(KclErrorDetails::new(
|
||||
KclError::new_semantic(KclErrorDetails::new(
|
||||
format!(
|
||||
"This function requires its result to be of type `{}`, but found {}",
|
||||
ty.human_friendly_type(),
|
||||
@ -874,7 +827,7 @@ mod test {
|
||||
"all params required, none given, should error",
|
||||
vec![req_param("x")],
|
||||
vec![],
|
||||
Err(KclError::Semantic(KclErrorDetails::new(
|
||||
Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
"This function requires a parameter x, but you haven't passed it one.".to_owned(),
|
||||
vec![SourceRange::default()],
|
||||
))),
|
||||
@ -889,7 +842,7 @@ mod test {
|
||||
"mixed params, too few given",
|
||||
vec![req_param("x"), opt_param("y")],
|
||||
vec![],
|
||||
Err(KclError::Semantic(KclErrorDetails::new(
|
||||
Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
"This function requires a parameter x, but you haven't passed it one.".to_owned(),
|
||||
vec![SourceRange::default()],
|
||||
))),
|
||||
@ -937,7 +890,6 @@ mod test {
|
||||
crate::engine::conn_mock::EngineConnection::new().await.unwrap(),
|
||||
)),
|
||||
fs: Arc::new(crate::fs::FileManager::new()),
|
||||
stdlib: Arc::new(crate::std::StdLib::new()),
|
||||
settings: Default::default(),
|
||||
context_type: ContextType::Mock,
|
||||
};
|
||||
|
@ -24,6 +24,7 @@ type Point3D = kcmc::shared::Point3d<f64>;
|
||||
#[derive(Debug, Clone, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
|
||||
#[ts(export)]
|
||||
#[serde(tag = "type")]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub enum Geometry {
|
||||
Sketch(Sketch),
|
||||
Solid(Solid),
|
||||
@ -52,6 +53,7 @@ impl Geometry {
|
||||
#[derive(Debug, Clone, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
|
||||
#[ts(export)]
|
||||
#[serde(tag = "type")]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub enum GeometryWithImportedGeometry {
|
||||
Sketch(Sketch),
|
||||
Solid(Solid),
|
||||
@ -469,7 +471,7 @@ impl TryFrom<PlaneData> for PlaneInfo {
|
||||
PlaneData::NegYZ => PlaneName::NegYz,
|
||||
PlaneData::Plane(_) => {
|
||||
// We will never get here since we already checked for PlaneData::Plane.
|
||||
return Err(KclError::Internal(KclErrorDetails::new(
|
||||
return Err(KclError::new_internal(KclErrorDetails::new(
|
||||
format!("PlaneData {:?} not found", value),
|
||||
Default::default(),
|
||||
)));
|
||||
@ -477,7 +479,7 @@ impl TryFrom<PlaneData> for PlaneInfo {
|
||||
};
|
||||
|
||||
let info = DEFAULT_PLANE_INFO.get(&name).ok_or_else(|| {
|
||||
KclError::Internal(KclErrorDetails::new(
|
||||
KclError::new_internal(KclErrorDetails::new(
|
||||
format!("Plane {} not found", name),
|
||||
Default::default(),
|
||||
))
|
||||
|
@ -37,25 +37,25 @@ pub async fn import_foreign(
|
||||
) -> Result<PreImportedGeometry, KclError> {
|
||||
// Make sure the file exists.
|
||||
if !ctxt.fs.exists(file_path, source_range).await? {
|
||||
return Err(KclError::Semantic(KclErrorDetails::new(
|
||||
return Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!("File `{}` does not exist.", file_path.display()),
|
||||
vec![source_range],
|
||||
)));
|
||||
}
|
||||
|
||||
let ext_format = get_import_format_from_extension(file_path.extension().ok_or_else(|| {
|
||||
KclError::Semantic(KclErrorDetails::new(
|
||||
KclError::new_semantic(KclErrorDetails::new(
|
||||
format!("No file extension found for `{}`", file_path.display()),
|
||||
vec![source_range],
|
||||
))
|
||||
})?)
|
||||
.map_err(|e| KclError::Semantic(KclErrorDetails::new(e.to_string(), vec![source_range])))?;
|
||||
.map_err(|e| KclError::new_semantic(KclErrorDetails::new(e.to_string(), vec![source_range])))?;
|
||||
|
||||
// Get the format type from the extension of the file.
|
||||
let format = if let Some(format) = format {
|
||||
// Validate the given format with the extension format.
|
||||
validate_extension_format(ext_format, format.clone())
|
||||
.map_err(|e| KclError::Semantic(KclErrorDetails::new(e.to_string(), vec![source_range])))?;
|
||||
.map_err(|e| KclError::new_semantic(KclErrorDetails::new(e.to_string(), vec![source_range])))?;
|
||||
format
|
||||
} else {
|
||||
ext_format
|
||||
@ -66,11 +66,11 @@ pub async fn import_foreign(
|
||||
.fs
|
||||
.read(file_path, source_range)
|
||||
.await
|
||||
.map_err(|e| KclError::Semantic(KclErrorDetails::new(e.to_string(), vec![source_range])))?;
|
||||
.map_err(|e| KclError::new_semantic(KclErrorDetails::new(e.to_string(), vec![source_range])))?;
|
||||
|
||||
// We want the file_path to be without the parent.
|
||||
let file_name = file_path.file_name().map(|p| p.to_string()).ok_or_else(|| {
|
||||
KclError::Semantic(KclErrorDetails::new(
|
||||
KclError::new_semantic(KclErrorDetails::new(
|
||||
format!("Could not get the file name from the path `{}`", file_path.display()),
|
||||
vec![source_range],
|
||||
))
|
||||
@ -87,7 +87,7 @@ pub async fn import_foreign(
|
||||
// file.
|
||||
if !file_contents.starts_with(b"glTF") {
|
||||
let json = gltf_json::Root::from_slice(&file_contents)
|
||||
.map_err(|e| KclError::Semantic(KclErrorDetails::new(e.to_string(), vec![source_range])))?;
|
||||
.map_err(|e| KclError::new_semantic(KclErrorDetails::new(e.to_string(), vec![source_range])))?;
|
||||
|
||||
// Read the gltf file and check if there is a bin file.
|
||||
for buffer in json.buffers.iter() {
|
||||
@ -95,16 +95,15 @@ pub async fn import_foreign(
|
||||
if !uri.starts_with("data:") {
|
||||
// We want this path relative to the file_path given.
|
||||
let bin_path = file_path.parent().map(|p| p.join(uri)).ok_or_else(|| {
|
||||
KclError::Semantic(KclErrorDetails::new(
|
||||
KclError::new_semantic(KclErrorDetails::new(
|
||||
format!("Could not get the parent path of the file `{}`", file_path.display()),
|
||||
vec![source_range],
|
||||
))
|
||||
})?;
|
||||
|
||||
let bin_contents =
|
||||
ctxt.fs.read(&bin_path, source_range).await.map_err(|e| {
|
||||
KclError::Semantic(KclErrorDetails::new(e.to_string(), vec![source_range]))
|
||||
})?;
|
||||
let bin_contents = ctxt.fs.read(&bin_path, source_range).await.map_err(|e| {
|
||||
KclError::new_semantic(KclErrorDetails::new(e.to_string(), vec![source_range]))
|
||||
})?;
|
||||
|
||||
import_files.push(ImportFile {
|
||||
path: uri.to_string(),
|
||||
@ -141,7 +140,7 @@ pub(super) fn format_from_annotations(
|
||||
if p.key.name == annotations::IMPORT_FORMAT {
|
||||
result = Some(
|
||||
get_import_format_from_extension(annotations::expect_ident(&p.value)?).map_err(|_| {
|
||||
KclError::Semantic(KclErrorDetails::new(
|
||||
KclError::new_semantic(KclErrorDetails::new(
|
||||
format!(
|
||||
"Unknown format for import, expected one of: {}",
|
||||
crate::IMPORT_FILE_EXTENSIONS.join(", ")
|
||||
@ -159,7 +158,7 @@ pub(super) fn format_from_annotations(
|
||||
path.extension()
|
||||
.and_then(|ext| get_import_format_from_extension(ext).ok())
|
||||
})
|
||||
.ok_or(KclError::Semantic(KclErrorDetails::new(
|
||||
.ok_or(KclError::new_semantic(KclErrorDetails::new(
|
||||
"Unknown or missing extension, and no specified format for imported file".to_owned(),
|
||||
vec![import_source_range],
|
||||
)))?;
|
||||
@ -174,7 +173,7 @@ pub(super) fn format_from_annotations(
|
||||
}
|
||||
annotations::IMPORT_FORMAT => {}
|
||||
_ => {
|
||||
return Err(KclError::Semantic(KclErrorDetails::new(
|
||||
return Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!(
|
||||
"Unexpected annotation for import, expected one of: {}, {}, {}",
|
||||
annotations::IMPORT_FORMAT,
|
||||
@ -199,7 +198,7 @@ fn set_coords(fmt: &mut InputFormat3d, coords_str: &str, source_range: SourceRan
|
||||
}
|
||||
|
||||
let Some(coords) = coords else {
|
||||
return Err(KclError::Semantic(KclErrorDetails::new(
|
||||
return Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!(
|
||||
"Unknown coordinate system: {coords_str}, expected one of: {}",
|
||||
annotations::IMPORT_COORDS_VALUES
|
||||
@ -217,7 +216,7 @@ fn set_coords(fmt: &mut InputFormat3d, coords_str: &str, source_range: SourceRan
|
||||
InputFormat3d::Ply(opts) => opts.coords = coords,
|
||||
InputFormat3d::Stl(opts) => opts.coords = coords,
|
||||
_ => {
|
||||
return Err(KclError::Semantic(KclErrorDetails::new(
|
||||
return Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!(
|
||||
"`{}` option cannot be applied to the specified format",
|
||||
annotations::IMPORT_COORDS
|
||||
@ -238,7 +237,7 @@ fn set_length_unit(fmt: &mut InputFormat3d, units_str: &str, source_range: Sourc
|
||||
InputFormat3d::Ply(opts) => opts.units = units.into(),
|
||||
InputFormat3d::Stl(opts) => opts.units = units.into(),
|
||||
_ => {
|
||||
return Err(KclError::Semantic(KclErrorDetails::new(
|
||||
return Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!(
|
||||
"`{}` option cannot be applied to the specified format",
|
||||
annotations::IMPORT_LENGTH_UNIT
|
||||
|
358
rust/kcl-lib/src/execution/import_graph.rs
Normal file
358
rust/kcl-lib/src/execution/import_graph.rs
Normal file
@ -0,0 +1,358 @@
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::{
|
||||
errors::KclErrorDetails,
|
||||
execution::typed_path::TypedPath,
|
||||
modules::{ModulePath, ModuleRepr},
|
||||
parsing::ast::types::{ImportPath, ImportStatement, Node as AstNode},
|
||||
walk::{Node, Visitable},
|
||||
ExecState, ExecutorContext, KclError, ModuleId, SourceRange,
|
||||
};
|
||||
|
||||
/// Specific dependency between two modules. The 0th element of this info
|
||||
/// is the "importing" module, the 1st is the "imported" module. The 0th
|
||||
/// module *depends on* the 1st module.
|
||||
type Dependency = (String, String);
|
||||
|
||||
type Graph = Vec<Dependency>;
|
||||
|
||||
pub(crate) type DependencyInfo = (AstNode<ImportStatement>, ModuleId, ModulePath, ModuleRepr);
|
||||
pub(crate) type UniverseMap = HashMap<TypedPath, AstNode<ImportStatement>>;
|
||||
pub(crate) type Universe = HashMap<String, DependencyInfo>;
|
||||
|
||||
/// Process a number of programs, returning the graph of dependencies.
|
||||
///
|
||||
/// This will (currently) return a list of lists of IDs that can be safely
|
||||
/// run concurrently. Each "stage" is blocking in this model, which will
|
||||
/// change in the future. Don't use this function widely, yet.
|
||||
#[allow(clippy::iter_over_hash_type)]
|
||||
pub(crate) fn import_graph(progs: &Universe, ctx: &ExecutorContext) -> Result<Vec<Vec<String>>, KclError> {
|
||||
let mut graph = Graph::new();
|
||||
|
||||
for (name, (_, _, path, repr)) in progs.iter() {
|
||||
graph.extend(
|
||||
import_dependencies(path, repr, ctx)?
|
||||
.into_iter()
|
||||
.map(|(dependency, _, _)| (name.clone(), dependency))
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
}
|
||||
|
||||
let all_modules: Vec<&str> = progs.keys().map(|v| v.as_str()).collect();
|
||||
topsort(&all_modules, graph)
|
||||
}
|
||||
|
||||
#[allow(clippy::iter_over_hash_type)]
|
||||
fn topsort(all_modules: &[&str], graph: Graph) -> Result<Vec<Vec<String>>, KclError> {
|
||||
if all_modules.is_empty() {
|
||||
return Ok(vec![]);
|
||||
}
|
||||
let mut dep_map = HashMap::<String, Vec<String>>::new();
|
||||
|
||||
for (dependent, dependency) in graph.iter() {
|
||||
let mut dependencies = dep_map.remove(dependent).unwrap_or_default();
|
||||
dependencies.push(dependency.to_owned());
|
||||
dep_map.insert(dependent.to_owned(), dependencies);
|
||||
}
|
||||
|
||||
// dep_map now contains reverse dependencies. For each module, it's a
|
||||
// list of what things are "waiting on it". A non-empty value for a key
|
||||
// means it's currently blocked.
|
||||
|
||||
let mut waiting_modules = all_modules.to_owned();
|
||||
let mut order = vec![];
|
||||
|
||||
loop {
|
||||
// Each pass through we need to find any modules which have nothing
|
||||
// "pointing at it" -- so-called reverse dependencies. This is an entry
|
||||
// that is either not in the dep_map OR an empty list.
|
||||
|
||||
let mut stage_modules: Vec<String> = vec![];
|
||||
|
||||
for module in &waiting_modules {
|
||||
let module = module.to_string();
|
||||
if dep_map.get(&module).map(|v| v.len()).unwrap_or(0) == 0 {
|
||||
// if it's None or empty, this is a node that we can process,
|
||||
// and remove from the graph.
|
||||
stage_modules.push(module.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
for stage_module in &stage_modules {
|
||||
// remove the ready-to-run module from the waiting list
|
||||
waiting_modules.retain(|v| *v != stage_module.as_str());
|
||||
|
||||
// remove any dependencies for the next run
|
||||
for (_, waiting_for) in dep_map.iter_mut() {
|
||||
waiting_for.retain(|v| v != stage_module);
|
||||
}
|
||||
}
|
||||
|
||||
if stage_modules.is_empty() {
|
||||
waiting_modules.sort();
|
||||
|
||||
return Err(KclError::new_import_cycle(KclErrorDetails::new(
|
||||
format!("circular import of modules not allowed: {}", waiting_modules.join(", ")),
|
||||
// TODO: we can get the right import lines from the AST, but we don't
|
||||
vec![SourceRange::default()],
|
||||
)));
|
||||
}
|
||||
|
||||
// not strictly needed here, but perhaps helpful to avoid thinking
|
||||
// there's any implied ordering as well as helping to make tests
|
||||
// easier.
|
||||
stage_modules.sort();
|
||||
|
||||
order.push(stage_modules);
|
||||
|
||||
if waiting_modules.is_empty() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(order)
|
||||
}
|
||||
|
||||
type ImportDependencies = Vec<(String, AstNode<ImportStatement>, ModulePath)>;
|
||||
|
||||
fn import_dependencies(
|
||||
path: &ModulePath,
|
||||
repr: &ModuleRepr,
|
||||
ctx: &ExecutorContext,
|
||||
) -> Result<ImportDependencies, KclError> {
|
||||
let ModuleRepr::Kcl(prog, _) = repr else {
|
||||
// It has no dependencies, so return an empty list.
|
||||
return Ok(vec![]);
|
||||
};
|
||||
|
||||
let ret = Arc::new(Mutex::new(vec![]));
|
||||
fn walk(
|
||||
ret: Arc<Mutex<ImportDependencies>>,
|
||||
node: Node<'_>,
|
||||
import_from: &ModulePath,
|
||||
ctx: &ExecutorContext,
|
||||
) -> Result<(), KclError> {
|
||||
if let Node::ImportStatement(is) = node {
|
||||
// We only care about Kcl and Foreign imports for now.
|
||||
let resolved_path = ModulePath::from_import_path(&is.path, &ctx.settings.project_directory, import_from)?;
|
||||
match &is.path {
|
||||
ImportPath::Kcl { filename } => {
|
||||
// We need to lock the mutex to push the dependency.
|
||||
// This is a bit of a hack, but it works for now.
|
||||
ret.lock()
|
||||
.map_err(|err| {
|
||||
KclError::new_internal(KclErrorDetails::new(
|
||||
format!("Failed to lock mutex: {}", err),
|
||||
Default::default(),
|
||||
))
|
||||
})?
|
||||
.push((filename.to_string(), is.clone(), resolved_path));
|
||||
}
|
||||
ImportPath::Foreign { path } => {
|
||||
ret.lock()
|
||||
.map_err(|err| {
|
||||
KclError::new_internal(KclErrorDetails::new(
|
||||
format!("Failed to lock mutex: {}", err),
|
||||
Default::default(),
|
||||
))
|
||||
})?
|
||||
.push((path.to_string(), is.clone(), resolved_path));
|
||||
}
|
||||
ImportPath::Std { .. } => { // do nothing
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for child in node.children().iter() {
|
||||
walk(ret.clone(), *child, import_from, ctx)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
walk(ret.clone(), prog.into(), path, ctx)?;
|
||||
|
||||
let ret = ret.lock().map_err(|err| {
|
||||
KclError::new_internal(KclErrorDetails::new(
|
||||
format!("Failed to lock mutex: {}", err),
|
||||
Default::default(),
|
||||
))
|
||||
})?;
|
||||
|
||||
Ok(ret.clone())
|
||||
}
|
||||
|
||||
/// Mutates the `out` universe with the imported modules. Returns the imports of
|
||||
/// only `repr`'s non-transitive imports.
|
||||
pub(crate) async fn import_universe(
|
||||
ctx: &ExecutorContext,
|
||||
path: &ModulePath,
|
||||
repr: &ModuleRepr,
|
||||
out: &mut Universe,
|
||||
exec_state: &mut ExecState,
|
||||
) -> Result<UniverseMap, KclError> {
|
||||
let modules = import_dependencies(path, repr, ctx)?;
|
||||
let mut module_imports = HashMap::new();
|
||||
for (filename, import_stmt, module_path) in modules {
|
||||
match &module_path {
|
||||
ModulePath::Main => {
|
||||
// We only care about what the root module imports.
|
||||
}
|
||||
ModulePath::Local { value, .. } => {
|
||||
module_imports.insert(value.clone(), import_stmt.clone());
|
||||
}
|
||||
ModulePath::Std { .. } => {
|
||||
// We don't care about std imports.
|
||||
}
|
||||
}
|
||||
|
||||
if out.contains_key(&filename) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let source_range = SourceRange::from(&import_stmt);
|
||||
let attrs = &import_stmt.outer_attrs;
|
||||
let module_id = ctx
|
||||
.open_module(&import_stmt.path, attrs, &module_path, exec_state, source_range)
|
||||
.await?;
|
||||
|
||||
let repr = {
|
||||
let Some(module_info) = exec_state.get_module(module_id) else {
|
||||
return Err(KclError::new_internal(KclErrorDetails::new(
|
||||
format!("Module {} not found", module_id),
|
||||
vec![import_stmt.into()],
|
||||
)));
|
||||
};
|
||||
module_info.repr.clone()
|
||||
};
|
||||
|
||||
out.insert(filename, (import_stmt, module_id, module_path.clone(), repr.clone()));
|
||||
Box::pin(import_universe(ctx, &module_path, &repr, out, exec_state)).await?;
|
||||
}
|
||||
|
||||
Ok(module_imports)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::parsing::ast::types::{ImportSelector, Program};
|
||||
|
||||
macro_rules! kcl {
|
||||
( $kcl:expr ) => {{
|
||||
$crate::parsing::top_level_parse($kcl).unwrap()
|
||||
}};
|
||||
}
|
||||
|
||||
fn into_module_info(program: AstNode<Program>) -> DependencyInfo {
|
||||
(
|
||||
AstNode::no_src(ImportStatement {
|
||||
selector: ImportSelector::None { alias: None },
|
||||
path: ImportPath::Kcl { filename: "".into() },
|
||||
visibility: Default::default(),
|
||||
digest: None,
|
||||
}),
|
||||
ModuleId::default(),
|
||||
ModulePath::Local { value: "".into() },
|
||||
ModuleRepr::Kcl(program, None),
|
||||
)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn order_imports() {
|
||||
let mut modules = HashMap::new();
|
||||
|
||||
let a = kcl!("");
|
||||
modules.insert("a.kcl".to_owned(), into_module_info(a));
|
||||
|
||||
let b = kcl!(
|
||||
"
|
||||
import \"a.kcl\"
|
||||
"
|
||||
);
|
||||
modules.insert("b.kcl".to_owned(), into_module_info(b));
|
||||
|
||||
let ctx = ExecutorContext::new_mock(None).await;
|
||||
let order = import_graph(&modules, &ctx).unwrap();
|
||||
assert_eq!(vec![vec!["a.kcl".to_owned()], vec!["b.kcl".to_owned()]], order);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn order_imports_none() {
|
||||
let mut modules = HashMap::new();
|
||||
|
||||
let a = kcl!(
|
||||
"
|
||||
y = 2
|
||||
"
|
||||
);
|
||||
modules.insert("a.kcl".to_owned(), into_module_info(a));
|
||||
|
||||
let b = kcl!(
|
||||
"
|
||||
x = 1
|
||||
"
|
||||
);
|
||||
modules.insert("b.kcl".to_owned(), into_module_info(b));
|
||||
|
||||
let ctx = ExecutorContext::new_mock(None).await;
|
||||
let order = import_graph(&modules, &ctx).unwrap();
|
||||
assert_eq!(vec![vec!["a.kcl".to_owned(), "b.kcl".to_owned()]], order);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn order_imports_2() {
|
||||
let mut modules = HashMap::new();
|
||||
|
||||
let a = kcl!("");
|
||||
modules.insert("a.kcl".to_owned(), into_module_info(a));
|
||||
|
||||
let b = kcl!(
|
||||
"
|
||||
import \"a.kcl\"
|
||||
"
|
||||
);
|
||||
modules.insert("b.kcl".to_owned(), into_module_info(b));
|
||||
|
||||
let c = kcl!(
|
||||
"
|
||||
import \"a.kcl\"
|
||||
"
|
||||
);
|
||||
modules.insert("c.kcl".to_owned(), into_module_info(c));
|
||||
|
||||
let ctx = ExecutorContext::new_mock(None).await;
|
||||
let order = import_graph(&modules, &ctx).unwrap();
|
||||
assert_eq!(
|
||||
vec![vec!["a.kcl".to_owned()], vec!["b.kcl".to_owned(), "c.kcl".to_owned()]],
|
||||
order
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn order_imports_cycle() {
|
||||
let mut modules = HashMap::new();
|
||||
|
||||
let a = kcl!(
|
||||
"
|
||||
import \"b.kcl\"
|
||||
"
|
||||
);
|
||||
modules.insert("a.kcl".to_owned(), into_module_info(a));
|
||||
|
||||
let b = kcl!(
|
||||
"
|
||||
import \"a.kcl\"
|
||||
"
|
||||
);
|
||||
modules.insert("b.kcl".to_owned(), into_module_info(b));
|
||||
|
||||
let ctx = ExecutorContext::new_mock(None).await;
|
||||
import_graph(&modules, &ctx).unwrap_err();
|
||||
}
|
||||
}
|
@ -574,7 +574,7 @@ impl KclValue {
|
||||
pub fn get_tag_identifier(&self) -> Result<TagIdentifier, KclError> {
|
||||
match self {
|
||||
KclValue::TagIdentifier(t) => Ok(*t.clone()),
|
||||
_ => Err(KclError::Semantic(KclErrorDetails::new(
|
||||
_ => Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!("Not a tag identifier: {:?}", self),
|
||||
self.clone().into(),
|
||||
))),
|
||||
@ -585,7 +585,7 @@ impl KclValue {
|
||||
pub fn get_tag_declarator(&self) -> Result<TagNode, KclError> {
|
||||
match self {
|
||||
KclValue::TagDeclarator(t) => Ok((**t).clone()),
|
||||
_ => Err(KclError::Semantic(KclErrorDetails::new(
|
||||
_ => Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!("Not a tag declarator: {:?}", self),
|
||||
self.clone().into(),
|
||||
))),
|
||||
@ -595,7 +595,7 @@ impl KclValue {
|
||||
/// If this KCL value is a bool, retrieve it.
|
||||
pub fn get_bool(&self) -> Result<bool, KclError> {
|
||||
self.as_bool().ok_or_else(|| {
|
||||
KclError::Type(KclErrorDetails::new(
|
||||
KclError::new_type(KclErrorDetails::new(
|
||||
format!("Expected bool, found {}", self.human_friendly_type()),
|
||||
self.into(),
|
||||
))
|
||||
|
@ -367,10 +367,10 @@ impl ProgramMemory {
|
||||
|
||||
let name = var.trim_start_matches(TYPE_PREFIX).trim_start_matches(MODULE_PREFIX);
|
||||
|
||||
Err(KclError::UndefinedValue(KclErrorDetails::new(
|
||||
format!("`{name}` is not defined"),
|
||||
vec![source_range],
|
||||
)))
|
||||
Err(KclError::new_undefined_value(
|
||||
KclErrorDetails::new(format!("`{name}` is not defined"), vec![source_range]),
|
||||
Some(name.to_owned()),
|
||||
))
|
||||
}
|
||||
|
||||
/// Iterate over all key/value pairs in the specified environment which satisfy the provided
|
||||
@ -488,10 +488,10 @@ impl ProgramMemory {
|
||||
};
|
||||
}
|
||||
|
||||
Err(KclError::UndefinedValue(KclErrorDetails::new(
|
||||
format!("`{}` is not defined", var),
|
||||
vec![],
|
||||
)))
|
||||
Err(KclError::new_undefined_value(
|
||||
KclErrorDetails::new(format!("`{}` is not defined", var), vec![]),
|
||||
Some(var.to_owned()),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
@ -646,7 +646,7 @@ impl Stack {
|
||||
pub fn add(&mut self, key: String, value: KclValue, source_range: SourceRange) -> Result<(), KclError> {
|
||||
let env = self.memory.get_env(self.current_env.index());
|
||||
if env.contains_key(&key) {
|
||||
return Err(KclError::ValueAlreadyDefined(KclErrorDetails::new(
|
||||
return Err(KclError::new_value_already_defined(KclErrorDetails::new(
|
||||
format!("Cannot redefine `{}`", key),
|
||||
vec![source_range],
|
||||
)));
|
||||
|
@ -5,9 +5,8 @@ use std::sync::Arc;
|
||||
use anyhow::Result;
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
pub use artifact::{Artifact, ArtifactCommand, ArtifactGraph, CodeRef, StartSketchOnFace, StartSketchOnPlane};
|
||||
use cache::OldAstState;
|
||||
use cache::GlobalState;
|
||||
pub use cache::{bust_cache, clear_mem_cache};
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
pub use cad_op::{Group, Operation};
|
||||
pub use geometry::*;
|
||||
pub use id_generator::IdGenerator;
|
||||
@ -27,13 +26,12 @@ use serde::{Deserialize, Serialize};
|
||||
pub use state::{ExecState, MetaSettings};
|
||||
use uuid::Uuid;
|
||||
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
use crate::execution::artifact::build_artifact_graph;
|
||||
use crate::{
|
||||
engine::EngineManager,
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::{
|
||||
cache::{CacheInformation, CacheResult},
|
||||
import_graph::{Universe, UniverseMap},
|
||||
typed_path::TypedPath,
|
||||
types::{UnitAngle, UnitLen},
|
||||
},
|
||||
@ -41,8 +39,6 @@ use crate::{
|
||||
modules::{ModuleId, ModulePath, ModuleRepr},
|
||||
parsing::ast::types::{Expr, ImportPath, NodeRef},
|
||||
source_range::SourceRange,
|
||||
std::StdLib,
|
||||
walk::{Universe, UniverseMap},
|
||||
CompilationError, ExecError, KclErrorWithOutputs,
|
||||
};
|
||||
|
||||
@ -56,6 +52,7 @@ pub mod fn_call;
|
||||
mod geometry;
|
||||
mod id_generator;
|
||||
mod import;
|
||||
mod import_graph;
|
||||
pub(crate) mod kcl_value;
|
||||
mod memory;
|
||||
mod state;
|
||||
@ -273,7 +270,6 @@ pub enum ContextType {
|
||||
pub struct ExecutorContext {
|
||||
pub engine: Arc<Box<dyn EngineManager>>,
|
||||
pub fs: Arc<FileManager>,
|
||||
pub stdlib: Arc<StdLib>,
|
||||
pub settings: ExecutorSettings,
|
||||
pub context_type: ContextType,
|
||||
}
|
||||
@ -412,7 +408,6 @@ impl ExecutorContext {
|
||||
Ok(Self {
|
||||
engine,
|
||||
fs: Arc::new(FileManager::new()),
|
||||
stdlib: Arc::new(StdLib::new()),
|
||||
settings,
|
||||
context_type: ContextType::Live,
|
||||
})
|
||||
@ -423,7 +418,6 @@ impl ExecutorContext {
|
||||
ExecutorContext {
|
||||
engine,
|
||||
fs,
|
||||
stdlib: Arc::new(StdLib::new()),
|
||||
settings,
|
||||
context_type: ContextType::Live,
|
||||
}
|
||||
@ -436,7 +430,6 @@ impl ExecutorContext {
|
||||
crate::engine::conn_mock::EngineConnection::new().await.unwrap(),
|
||||
)),
|
||||
fs: Arc::new(FileManager::new()),
|
||||
stdlib: Arc::new(StdLib::new()),
|
||||
settings: settings.unwrap_or_default(),
|
||||
context_type: ContextType::Mock,
|
||||
}
|
||||
@ -447,7 +440,6 @@ impl ExecutorContext {
|
||||
ExecutorContext {
|
||||
engine,
|
||||
fs,
|
||||
stdlib: Arc::new(StdLib::new()),
|
||||
settings,
|
||||
context_type: ContextType::Mock,
|
||||
}
|
||||
@ -458,7 +450,6 @@ impl ExecutorContext {
|
||||
ExecutorContext {
|
||||
engine,
|
||||
fs: Arc::new(FileManager::new()),
|
||||
stdlib: Arc::new(StdLib::new()),
|
||||
settings: Default::default(),
|
||||
context_type: ContextType::MockCustomForwarded,
|
||||
}
|
||||
@ -575,7 +566,7 @@ impl ExecutorContext {
|
||||
// part of the scene).
|
||||
exec_state.mut_stack().push_new_env_for_scope();
|
||||
|
||||
let result = self.inner_run(&program, 0, &mut exec_state, true).await?;
|
||||
let result = self.inner_run(&program, &mut exec_state, true).await?;
|
||||
|
||||
// Restore any temporary variables, then save any newly created variables back to
|
||||
// memory in case another run wants to use them. Note this is just saved to the preserved
|
||||
@ -583,7 +574,7 @@ impl ExecutorContext {
|
||||
|
||||
let mut mem = exec_state.stack().clone();
|
||||
let module_infos = exec_state.global.module_infos.clone();
|
||||
let outcome = exec_state.to_mock_exec_outcome(result.0).await;
|
||||
let outcome = exec_state.to_mock_exec_outcome(result.0, self).await;
|
||||
|
||||
mem.squash_env(result.0);
|
||||
cache::write_old_memory((mem, module_infos)).await;
|
||||
@ -594,169 +585,176 @@ impl ExecutorContext {
|
||||
pub async fn run_with_caching(&self, program: crate::Program) -> Result<ExecOutcome, KclErrorWithOutputs> {
|
||||
assert!(!self.is_mock());
|
||||
|
||||
let (program, mut exec_state, preserve_mem, cached_body_items, imports_info) = if let Some(OldAstState {
|
||||
ast: old_ast,
|
||||
exec_state: mut old_state,
|
||||
settings: old_settings,
|
||||
result_env,
|
||||
}) =
|
||||
cache::read_old_ast().await
|
||||
{
|
||||
let old = CacheInformation {
|
||||
ast: &old_ast,
|
||||
settings: &old_settings,
|
||||
};
|
||||
let new = CacheInformation {
|
||||
ast: &program.ast,
|
||||
settings: &self.settings,
|
||||
};
|
||||
|
||||
// Get the program that actually changed from the old and new information.
|
||||
let (clear_scene, program, body_items, import_check_info) = match cache::get_changed_program(old, new).await
|
||||
{
|
||||
CacheResult::ReExecute {
|
||||
clear_scene,
|
||||
reapply_settings,
|
||||
program: changed_program,
|
||||
cached_body_items,
|
||||
} => {
|
||||
if reapply_settings
|
||||
&& self
|
||||
.engine
|
||||
.reapply_settings(&self.settings, Default::default(), old_state.id_generator())
|
||||
.await
|
||||
.is_err()
|
||||
{
|
||||
(true, program, cached_body_items, None)
|
||||
} else {
|
||||
(
|
||||
clear_scene,
|
||||
crate::Program {
|
||||
ast: changed_program,
|
||||
original_file_contents: program.original_file_contents,
|
||||
},
|
||||
cached_body_items,
|
||||
None,
|
||||
)
|
||||
}
|
||||
}
|
||||
CacheResult::CheckImportsOnly {
|
||||
reapply_settings,
|
||||
ast: changed_program,
|
||||
} => {
|
||||
if reapply_settings
|
||||
&& self
|
||||
.engine
|
||||
.reapply_settings(&self.settings, Default::default(), old_state.id_generator())
|
||||
.await
|
||||
.is_err()
|
||||
{
|
||||
(true, program, old_ast.body.len(), None)
|
||||
} else {
|
||||
// We need to check our imports to see if they changed.
|
||||
let mut new_exec_state = ExecState::new(self);
|
||||
let (new_universe, new_universe_map) = self.get_universe(&program, &mut new_exec_state).await?;
|
||||
let mut clear_scene = false;
|
||||
|
||||
let mut keys = new_universe.keys().clone().collect::<Vec<_>>();
|
||||
keys.sort();
|
||||
for key in keys {
|
||||
let (_, id, _, _) = &new_universe[key];
|
||||
if let (Some(source0), Some(source1)) =
|
||||
(old_state.get_source(*id), new_exec_state.get_source(*id))
|
||||
{
|
||||
if source0.source != source1.source {
|
||||
clear_scene = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !clear_scene {
|
||||
// Return early we don't need to clear the scene.
|
||||
let outcome = old_state.to_exec_outcome(result_env).await;
|
||||
return Ok(outcome);
|
||||
}
|
||||
|
||||
(
|
||||
clear_scene,
|
||||
crate::Program {
|
||||
ast: changed_program,
|
||||
original_file_contents: program.original_file_contents,
|
||||
},
|
||||
old_ast.body.len(),
|
||||
// We only care about this if we are clearing the scene.
|
||||
if clear_scene {
|
||||
Some((new_universe, new_universe_map, new_exec_state))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
CacheResult::NoAction(true) => {
|
||||
if self
|
||||
.engine
|
||||
.reapply_settings(&self.settings, Default::default(), old_state.id_generator())
|
||||
.await
|
||||
.is_ok()
|
||||
{
|
||||
// We need to update the old ast state with the new settings!!
|
||||
cache::write_old_ast(OldAstState {
|
||||
ast: old_ast,
|
||||
exec_state: old_state.clone(),
|
||||
settings: self.settings.clone(),
|
||||
result_env,
|
||||
})
|
||||
.await;
|
||||
|
||||
let outcome = old_state.to_exec_outcome(result_env).await;
|
||||
return Ok(outcome);
|
||||
}
|
||||
(true, program, old_ast.body.len(), None)
|
||||
}
|
||||
CacheResult::NoAction(false) => {
|
||||
let outcome = old_state.to_exec_outcome(result_env).await;
|
||||
return Ok(outcome);
|
||||
}
|
||||
};
|
||||
|
||||
let (exec_state, preserve_mem, universe_info) =
|
||||
if let Some((new_universe, new_universe_map, mut new_exec_state)) = import_check_info {
|
||||
// Clear the scene if the imports changed.
|
||||
self.send_clear_scene(&mut new_exec_state, Default::default())
|
||||
.await
|
||||
.map_err(KclErrorWithOutputs::no_outputs)?;
|
||||
|
||||
(new_exec_state, false, Some((new_universe, new_universe_map)))
|
||||
} else if clear_scene {
|
||||
// Pop the execution state, since we are starting fresh.
|
||||
let mut exec_state = old_state;
|
||||
exec_state.reset(self);
|
||||
|
||||
self.send_clear_scene(&mut exec_state, Default::default())
|
||||
.await
|
||||
.map_err(KclErrorWithOutputs::no_outputs)?;
|
||||
|
||||
(exec_state, false, None)
|
||||
} else {
|
||||
old_state.mut_stack().restore_env(result_env);
|
||||
|
||||
(old_state, true, None)
|
||||
let (program, exec_state, result) = match cache::read_old_ast().await {
|
||||
Some(mut cached_state) => {
|
||||
let old = CacheInformation {
|
||||
ast: &cached_state.main.ast,
|
||||
settings: &cached_state.settings,
|
||||
};
|
||||
let new = CacheInformation {
|
||||
ast: &program.ast,
|
||||
settings: &self.settings,
|
||||
};
|
||||
|
||||
(program, exec_state, preserve_mem, body_items, universe_info)
|
||||
} else {
|
||||
let mut exec_state = ExecState::new(self);
|
||||
self.send_clear_scene(&mut exec_state, Default::default())
|
||||
.await
|
||||
.map_err(KclErrorWithOutputs::no_outputs)?;
|
||||
(program, exec_state, false, 0, None)
|
||||
};
|
||||
// Get the program that actually changed from the old and new information.
|
||||
let (clear_scene, program, import_check_info) = match cache::get_changed_program(old, new).await {
|
||||
CacheResult::ReExecute {
|
||||
clear_scene,
|
||||
reapply_settings,
|
||||
program: changed_program,
|
||||
} => {
|
||||
if reapply_settings
|
||||
&& self
|
||||
.engine
|
||||
.reapply_settings(
|
||||
&self.settings,
|
||||
Default::default(),
|
||||
&mut cached_state.main.exec_state.id_generator,
|
||||
)
|
||||
.await
|
||||
.is_err()
|
||||
{
|
||||
(true, program, None)
|
||||
} else {
|
||||
(
|
||||
clear_scene,
|
||||
crate::Program {
|
||||
ast: changed_program,
|
||||
original_file_contents: program.original_file_contents,
|
||||
},
|
||||
None,
|
||||
)
|
||||
}
|
||||
}
|
||||
CacheResult::CheckImportsOnly {
|
||||
reapply_settings,
|
||||
ast: changed_program,
|
||||
} => {
|
||||
if reapply_settings
|
||||
&& self
|
||||
.engine
|
||||
.reapply_settings(
|
||||
&self.settings,
|
||||
Default::default(),
|
||||
&mut cached_state.main.exec_state.id_generator,
|
||||
)
|
||||
.await
|
||||
.is_err()
|
||||
{
|
||||
(true, program, None)
|
||||
} else {
|
||||
// We need to check our imports to see if they changed.
|
||||
let mut new_exec_state = ExecState::new(self);
|
||||
let (new_universe, new_universe_map) =
|
||||
self.get_universe(&program, &mut new_exec_state).await?;
|
||||
|
||||
let result = self
|
||||
.run_concurrent(&program, cached_body_items, &mut exec_state, imports_info, preserve_mem)
|
||||
.await;
|
||||
let clear_scene = new_universe.keys().any(|key| {
|
||||
let id = new_universe[key].1;
|
||||
match (
|
||||
cached_state.exec_state.get_source(id),
|
||||
new_exec_state.global.get_source(id),
|
||||
) {
|
||||
(Some(s0), Some(s1)) => s0.source != s1.source,
|
||||
_ => false,
|
||||
}
|
||||
});
|
||||
|
||||
if !clear_scene {
|
||||
// Return early we don't need to clear the scene.
|
||||
return Ok(cached_state.into_exec_outcome(self).await);
|
||||
}
|
||||
|
||||
(
|
||||
true,
|
||||
crate::Program {
|
||||
ast: changed_program,
|
||||
original_file_contents: program.original_file_contents,
|
||||
},
|
||||
Some((new_universe, new_universe_map, new_exec_state)),
|
||||
)
|
||||
}
|
||||
}
|
||||
CacheResult::NoAction(true) => {
|
||||
if self
|
||||
.engine
|
||||
.reapply_settings(
|
||||
&self.settings,
|
||||
Default::default(),
|
||||
&mut cached_state.main.exec_state.id_generator,
|
||||
)
|
||||
.await
|
||||
.is_ok()
|
||||
{
|
||||
// We need to update the old ast state with the new settings!!
|
||||
cache::write_old_ast(GlobalState::with_settings(
|
||||
cached_state.clone(),
|
||||
self.settings.clone(),
|
||||
))
|
||||
.await;
|
||||
|
||||
return Ok(cached_state.into_exec_outcome(self).await);
|
||||
}
|
||||
(true, program, None)
|
||||
}
|
||||
CacheResult::NoAction(false) => {
|
||||
return Ok(cached_state.into_exec_outcome(self).await);
|
||||
}
|
||||
};
|
||||
|
||||
let (exec_state, result) = match import_check_info {
|
||||
Some((new_universe, new_universe_map, mut new_exec_state)) => {
|
||||
// Clear the scene if the imports changed.
|
||||
self.send_clear_scene(&mut new_exec_state, Default::default())
|
||||
.await
|
||||
.map_err(KclErrorWithOutputs::no_outputs)?;
|
||||
|
||||
let result = self
|
||||
.run_concurrent(
|
||||
&program,
|
||||
&mut new_exec_state,
|
||||
Some((new_universe, new_universe_map)),
|
||||
false,
|
||||
)
|
||||
.await;
|
||||
|
||||
(new_exec_state, result)
|
||||
}
|
||||
None if clear_scene => {
|
||||
// Pop the execution state, since we are starting fresh.
|
||||
let mut exec_state = cached_state.reconstitute_exec_state();
|
||||
exec_state.reset(self);
|
||||
|
||||
self.send_clear_scene(&mut exec_state, Default::default())
|
||||
.await
|
||||
.map_err(KclErrorWithOutputs::no_outputs)?;
|
||||
|
||||
let result = self.run_concurrent(&program, &mut exec_state, None, false).await;
|
||||
|
||||
(exec_state, result)
|
||||
}
|
||||
None => {
|
||||
let mut exec_state = cached_state.reconstitute_exec_state();
|
||||
exec_state.mut_stack().restore_env(cached_state.main.result_env);
|
||||
|
||||
let result = self.run_concurrent(&program, &mut exec_state, None, true).await;
|
||||
|
||||
(exec_state, result)
|
||||
}
|
||||
};
|
||||
|
||||
(program, exec_state, result)
|
||||
}
|
||||
None => {
|
||||
let mut exec_state = ExecState::new(self);
|
||||
self.send_clear_scene(&mut exec_state, Default::default())
|
||||
.await
|
||||
.map_err(KclErrorWithOutputs::no_outputs)?;
|
||||
|
||||
let result = self.run_concurrent(&program, &mut exec_state, None, false).await;
|
||||
|
||||
(program, exec_state, result)
|
||||
}
|
||||
};
|
||||
|
||||
if result.is_err() {
|
||||
cache::bust_cache().await;
|
||||
@ -766,15 +764,15 @@ impl ExecutorContext {
|
||||
let result = result?;
|
||||
|
||||
// Save this as the last successful execution to the cache.
|
||||
cache::write_old_ast(OldAstState {
|
||||
ast: program.ast,
|
||||
exec_state: exec_state.clone(),
|
||||
settings: self.settings.clone(),
|
||||
result_env: result.0,
|
||||
})
|
||||
cache::write_old_ast(GlobalState::new(
|
||||
exec_state.clone(),
|
||||
self.settings.clone(),
|
||||
program.ast,
|
||||
result.0,
|
||||
))
|
||||
.await;
|
||||
|
||||
let outcome = exec_state.to_exec_outcome(result.0).await;
|
||||
let outcome = exec_state.to_exec_outcome(result.0, self).await;
|
||||
Ok(outcome)
|
||||
}
|
||||
|
||||
@ -789,11 +787,11 @@ impl ExecutorContext {
|
||||
program: &crate::Program,
|
||||
exec_state: &mut ExecState,
|
||||
) -> Result<(EnvironmentRef, Option<ModelingSessionData>), KclErrorWithOutputs> {
|
||||
self.run_concurrent(program, 0, exec_state, None, false).await
|
||||
self.run_concurrent(program, exec_state, None, false).await
|
||||
}
|
||||
|
||||
/// Perform the execution of a program using a concurrent
|
||||
/// execution model. This has the same signature as [Self::run].
|
||||
/// execution model.
|
||||
///
|
||||
/// You can optionally pass in some initialization memory for partial
|
||||
/// execution.
|
||||
@ -802,13 +800,12 @@ impl ExecutorContext {
|
||||
pub async fn run_concurrent(
|
||||
&self,
|
||||
program: &crate::Program,
|
||||
cached_body_items: usize,
|
||||
exec_state: &mut ExecState,
|
||||
universe_info: Option<(Universe, UniverseMap)>,
|
||||
preserve_mem: bool,
|
||||
) -> Result<(EnvironmentRef, Option<ModelingSessionData>), KclErrorWithOutputs> {
|
||||
// Reuse our cached universe if we have one.
|
||||
#[allow(unused_variables)]
|
||||
|
||||
let (universe, universe_map) = if let Some((universe, universe_map)) = universe_info {
|
||||
(universe, universe_map)
|
||||
} else {
|
||||
@ -822,29 +819,8 @@ impl ExecutorContext {
|
||||
.await
|
||||
.map_err(KclErrorWithOutputs::no_outputs)?;
|
||||
|
||||
for modules in crate::walk::import_graph(&universe, self)
|
||||
.map_err(|err| {
|
||||
let module_id_to_module_path: IndexMap<ModuleId, ModulePath> = exec_state
|
||||
.global
|
||||
.path_to_source_id
|
||||
.iter()
|
||||
.map(|(k, v)| ((*v), k.clone()))
|
||||
.collect();
|
||||
|
||||
KclErrorWithOutputs::new(
|
||||
err,
|
||||
exec_state.errors().to_vec(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
exec_state.global.operations.clone(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
exec_state.global.artifact_commands.clone(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
exec_state.global.artifact_graph.clone(),
|
||||
module_id_to_module_path,
|
||||
exec_state.global.id_to_source.clone(),
|
||||
default_planes.clone(),
|
||||
)
|
||||
})?
|
||||
for modules in import_graph::import_graph(&universe, self)
|
||||
.map_err(|err| exec_state.error_with_outputs(err, default_planes.clone()))?
|
||||
.into_iter()
|
||||
{
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
@ -858,7 +834,7 @@ impl ExecutorContext {
|
||||
|
||||
for module in modules {
|
||||
let Some((import_stmt, module_id, module_path, repr)) = universe.get(&module) else {
|
||||
return Err(KclErrorWithOutputs::no_outputs(KclError::Internal(
|
||||
return Err(KclErrorWithOutputs::no_outputs(KclError::new_internal(
|
||||
KclErrorDetails::new(format!("Module {module} not found in universe"), Default::default()),
|
||||
)));
|
||||
};
|
||||
@ -866,7 +842,6 @@ impl ExecutorContext {
|
||||
let module_path = module_path.clone();
|
||||
let source_range = SourceRange::from(import_stmt);
|
||||
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
match &module_path {
|
||||
ModulePath::Main => {
|
||||
// This should never happen.
|
||||
@ -875,7 +850,7 @@ impl ExecutorContext {
|
||||
// We only want to display the top-level module imports in
|
||||
// the Feature Tree, not transitive imports.
|
||||
if universe_map.contains_key(value) {
|
||||
exec_state.global.operations.push(Operation::GroupBegin {
|
||||
exec_state.push_op(Operation::GroupBegin {
|
||||
group: Group::ModuleInstance {
|
||||
name: value.file_name().unwrap_or_default(),
|
||||
module_id,
|
||||
@ -885,7 +860,7 @@ impl ExecutorContext {
|
||||
// Due to concurrent execution, we cannot easily
|
||||
// group operations by module. So we leave the
|
||||
// group empty and close it immediately.
|
||||
exec_state.global.operations.push(Operation::GroupEnd);
|
||||
exec_state.push_op(Operation::GroupEnd);
|
||||
}
|
||||
}
|
||||
ModulePath::Std { .. } => {
|
||||
@ -920,7 +895,7 @@ impl ExecutorContext {
|
||||
|
||||
result.map(|val| ModuleRepr::Foreign(geom.clone(), val))
|
||||
}
|
||||
ModuleRepr::Dummy | ModuleRepr::Root => Err(KclError::Internal(KclErrorDetails::new(
|
||||
ModuleRepr::Dummy | ModuleRepr::Root => Err(KclError::new_internal(KclErrorDetails::new(
|
||||
format!("Module {module_path} not found in universe"),
|
||||
vec![source_range],
|
||||
))),
|
||||
@ -930,7 +905,6 @@ impl ExecutorContext {
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
{
|
||||
wasm_bindgen_futures::spawn_local(async move {
|
||||
//set.spawn(async move {
|
||||
let mut exec_state = exec_state;
|
||||
let exec_ctxt = exec_ctxt;
|
||||
|
||||
@ -1000,33 +974,13 @@ impl ExecutorContext {
|
||||
exec_state.global.module_infos[&module_id].restore_repr(repr);
|
||||
}
|
||||
Err(e) => {
|
||||
let module_id_to_module_path: IndexMap<ModuleId, ModulePath> = exec_state
|
||||
.global
|
||||
.path_to_source_id
|
||||
.iter()
|
||||
.map(|(k, v)| ((*v), k.clone()))
|
||||
.collect();
|
||||
|
||||
return Err(KclErrorWithOutputs::new(
|
||||
e,
|
||||
exec_state.errors().to_vec(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
exec_state.global.operations.clone(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
exec_state.global.artifact_commands.clone(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
exec_state.global.artifact_graph.clone(),
|
||||
module_id_to_module_path,
|
||||
exec_state.global.id_to_source.clone(),
|
||||
default_planes,
|
||||
));
|
||||
return Err(exec_state.error_with_outputs(e, default_planes));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.inner_run(program, cached_body_items, exec_state, preserve_mem)
|
||||
.await
|
||||
self.inner_run(program, exec_state, preserve_mem).await
|
||||
}
|
||||
|
||||
/// Get the universe & universe map of the program.
|
||||
@ -1042,7 +996,7 @@ impl ExecutorContext {
|
||||
|
||||
let default_planes = self.engine.get_default_planes().read().await.clone();
|
||||
|
||||
let root_imports = crate::walk::import_universe(
|
||||
let root_imports = import_graph::import_universe(
|
||||
self,
|
||||
&ModulePath::Main,
|
||||
&ModuleRepr::Kcl(program.ast.clone(), None),
|
||||
@ -1050,29 +1004,7 @@ impl ExecutorContext {
|
||||
exec_state,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| {
|
||||
println!("Error: {err:?}");
|
||||
let module_id_to_module_path: IndexMap<ModuleId, ModulePath> = exec_state
|
||||
.global
|
||||
.path_to_source_id
|
||||
.iter()
|
||||
.map(|(k, v)| ((*v), k.clone()))
|
||||
.collect();
|
||||
|
||||
KclErrorWithOutputs::new(
|
||||
err,
|
||||
exec_state.errors().to_vec(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
exec_state.global.operations.clone(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
exec_state.global.artifact_commands.clone(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
exec_state.global.artifact_graph.clone(),
|
||||
module_id_to_module_path,
|
||||
exec_state.global.id_to_source.clone(),
|
||||
default_planes,
|
||||
)
|
||||
})?;
|
||||
.map_err(|err| exec_state.error_with_outputs(err, default_planes))?;
|
||||
|
||||
Ok((universe, root_imports))
|
||||
}
|
||||
@ -1082,7 +1014,6 @@ impl ExecutorContext {
|
||||
async fn inner_run(
|
||||
&self,
|
||||
program: &crate::Program,
|
||||
cached_body_items: usize,
|
||||
exec_state: &mut ExecState,
|
||||
preserve_mem: bool,
|
||||
) -> Result<(EnvironmentRef, Option<ModelingSessionData>), KclErrorWithOutputs> {
|
||||
@ -1096,7 +1027,7 @@ impl ExecutorContext {
|
||||
|
||||
let default_planes = self.engine.get_default_planes().read().await.clone();
|
||||
let result = self
|
||||
.execute_and_build_graph(&program.ast, cached_body_items, exec_state, preserve_mem)
|
||||
.execute_and_build_graph(&program.ast, exec_state, preserve_mem)
|
||||
.await;
|
||||
|
||||
crate::log::log(format!(
|
||||
@ -1105,28 +1036,7 @@ impl ExecutorContext {
|
||||
));
|
||||
crate::log::log(format!("Engine stats: {:?}", self.engine.stats()));
|
||||
|
||||
let env_ref = result.map_err(|e| {
|
||||
let module_id_to_module_path: IndexMap<ModuleId, ModulePath> = exec_state
|
||||
.global
|
||||
.path_to_source_id
|
||||
.iter()
|
||||
.map(|(k, v)| ((*v), k.clone()))
|
||||
.collect();
|
||||
|
||||
KclErrorWithOutputs::new(
|
||||
e,
|
||||
exec_state.errors().to_vec(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
exec_state.global.operations.clone(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
exec_state.global.artifact_commands.clone(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
exec_state.global.artifact_graph.clone(),
|
||||
module_id_to_module_path,
|
||||
exec_state.global.id_to_source.clone(),
|
||||
default_planes.clone(),
|
||||
)
|
||||
})?;
|
||||
let env_ref = result.map_err(|e| exec_state.error_with_outputs(e, default_planes))?;
|
||||
|
||||
if !self.is_mock() {
|
||||
let mut mem = exec_state.stack().deep_clone();
|
||||
@ -1143,7 +1053,6 @@ impl ExecutorContext {
|
||||
async fn execute_and_build_graph(
|
||||
&self,
|
||||
program: NodeRef<'_, crate::parsing::ast::types::Program>,
|
||||
#[cfg_attr(not(feature = "artifact-graph"), expect(unused))] cached_body_items: usize,
|
||||
exec_state: &mut ExecState,
|
||||
preserve_mem: bool,
|
||||
) -> Result<EnvironmentRef, KclError> {
|
||||
@ -1170,40 +1079,9 @@ impl ExecutorContext {
|
||||
// and should be dropped.
|
||||
self.engine.clear_queues().await;
|
||||
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
{
|
||||
let new_commands = self.engine.take_artifact_commands().await;
|
||||
let new_responses = self.engine.take_responses().await;
|
||||
let initial_graph = exec_state.global.artifact_graph.clone();
|
||||
|
||||
// Build the artifact graph.
|
||||
let graph_result = build_artifact_graph(
|
||||
&new_commands,
|
||||
&new_responses,
|
||||
program,
|
||||
cached_body_items,
|
||||
&mut exec_state.global.artifacts,
|
||||
initial_graph,
|
||||
);
|
||||
// Move the artifact commands and responses into ExecState to
|
||||
// simplify cache management and error creation.
|
||||
exec_state.global.artifact_commands.extend(new_commands);
|
||||
exec_state.global.artifact_responses.extend(new_responses);
|
||||
|
||||
match graph_result {
|
||||
Ok(artifact_graph) => {
|
||||
exec_state.global.artifact_graph = artifact_graph;
|
||||
exec_result.map(|(_, env_ref, _)| env_ref)
|
||||
}
|
||||
Err(err) => {
|
||||
// Prefer the exec error.
|
||||
exec_result.and(Err(err))
|
||||
}
|
||||
}
|
||||
}
|
||||
#[cfg(not(feature = "artifact-graph"))]
|
||||
{
|
||||
exec_result.map(|(_, env_ref, _)| env_ref)
|
||||
match exec_state.build_artifact_graph(&self.engine, program).await {
|
||||
Ok(_) => exec_result.map(|(_, env_ref, _)| env_ref),
|
||||
Err(err) => exec_result.and(Err(err)),
|
||||
}
|
||||
}
|
||||
|
||||
@ -1283,7 +1161,7 @@ impl ExecutorContext {
|
||||
.await?;
|
||||
|
||||
let kittycad_modeling_cmds::websocket::OkWebSocketResponseData::Export { files } = resp else {
|
||||
return Err(KclError::Internal(crate::errors::KclErrorDetails::new(
|
||||
return Err(KclError::new_internal(crate::errors::KclErrorDetails::new(
|
||||
format!("Expected Export response, got {resp:?}",),
|
||||
vec![SourceRange::default()],
|
||||
)));
|
||||
@ -1303,7 +1181,7 @@ impl ExecutorContext {
|
||||
coords: *kittycad_modeling_cmds::coord::KITTYCAD,
|
||||
created: if deterministic_time {
|
||||
Some("2021-01-01T00:00:00Z".parse().map_err(|e| {
|
||||
KclError::Internal(crate::errors::KclErrorDetails::new(
|
||||
KclError::new_internal(crate::errors::KclErrorDetails::new(
|
||||
format!("Failed to parse date: {}", e),
|
||||
vec![SourceRange::default()],
|
||||
))
|
||||
@ -1383,14 +1261,13 @@ pub(crate) async fn parse_execute_with_project_dir(
|
||||
let exec_ctxt = ExecutorContext {
|
||||
engine: Arc::new(Box::new(
|
||||
crate::engine::conn_mock::EngineConnection::new().await.map_err(|err| {
|
||||
KclError::Internal(crate::errors::KclErrorDetails::new(
|
||||
KclError::new_internal(crate::errors::KclErrorDetails::new(
|
||||
format!("Failed to create mock engine connection: {}", err),
|
||||
vec![SourceRange::default()],
|
||||
))
|
||||
})?,
|
||||
)),
|
||||
fs: Arc::new(crate::fs::FileManager::new()),
|
||||
stdlib: Arc::new(crate::std::StdLib::new()),
|
||||
settings: ExecutorSettings {
|
||||
project_directory,
|
||||
..Default::default()
|
||||
@ -1799,7 +1676,7 @@ foo
|
||||
let err = result.unwrap_err();
|
||||
assert_eq!(
|
||||
err,
|
||||
KclError::Syntax(KclErrorDetails::new(
|
||||
KclError::new_syntax(KclErrorDetails::new(
|
||||
"Unexpected token: #".to_owned(),
|
||||
vec![SourceRange::new(14, 15, ModuleId::default())],
|
||||
)),
|
||||
@ -2058,7 +1935,7 @@ notTagIdentifier = !myTag";
|
||||
// TODO: We don't currently parse this, but we should. It should be
|
||||
// a runtime error instead.
|
||||
parse_execute(code10).await.unwrap_err(),
|
||||
KclError::Syntax(KclErrorDetails::new(
|
||||
KclError::new_syntax(KclErrorDetails::new(
|
||||
"Unexpected token: !".to_owned(),
|
||||
vec![SourceRange::new(10, 11, ModuleId::default())],
|
||||
))
|
||||
@ -2071,9 +1948,9 @@ notPipeSub = 1 |> identity(!%))";
|
||||
// TODO: We don't currently parse this, but we should. It should be
|
||||
// a runtime error instead.
|
||||
parse_execute(code11).await.unwrap_err(),
|
||||
KclError::Syntax(KclErrorDetails::new(
|
||||
"Unexpected token: |>".to_owned(),
|
||||
vec![SourceRange::new(44, 46, ModuleId::default())],
|
||||
KclError::new_syntax(KclErrorDetails::new(
|
||||
"There was an unexpected !. Try removing it.".to_owned(),
|
||||
vec![SourceRange::new(56, 57, ModuleId::default())],
|
||||
))
|
||||
);
|
||||
|
||||
@ -2206,7 +2083,7 @@ w = f() + f()
|
||||
}
|
||||
|
||||
// Get the id_generator from the first execution.
|
||||
let id_generator = cache::read_old_ast().await.unwrap().exec_state.mod_local.id_generator;
|
||||
let id_generator = cache::read_old_ast().await.unwrap().main.exec_state.id_generator;
|
||||
|
||||
let code = r#"sketch001 = startSketchOn(XZ)
|
||||
|> startProfile(at = [62.74, 206.13])
|
||||
@ -2227,7 +2104,7 @@ w = f() + f()
|
||||
// Execute the program.
|
||||
ctx.run_with_caching(program).await.unwrap();
|
||||
|
||||
let new_id_generator = cache::read_old_ast().await.unwrap().exec_state.mod_local.id_generator;
|
||||
let new_id_generator = cache::read_old_ast().await.unwrap().main.exec_state.id_generator;
|
||||
|
||||
assert_eq!(id_generator, new_id_generator);
|
||||
}
|
||||
|
@ -12,19 +12,19 @@ use uuid::Uuid;
|
||||
use crate::execution::{Artifact, ArtifactCommand, ArtifactGraph, ArtifactId};
|
||||
use crate::{
|
||||
errors::{KclError, KclErrorDetails, Severity},
|
||||
exec::DefaultPlanes,
|
||||
execution::{
|
||||
annotations,
|
||||
cad_op::Operation,
|
||||
id_generator::IdGenerator,
|
||||
memory::{ProgramMemory, Stack},
|
||||
types,
|
||||
types::NumericType,
|
||||
types::{self, NumericType},
|
||||
EnvironmentRef, ExecOutcome, ExecutorSettings, KclValue, UnitAngle, UnitLen,
|
||||
},
|
||||
modules::{ModuleId, ModuleInfo, ModuleLoader, ModulePath, ModuleRepr, ModuleSource},
|
||||
parsing::ast::types::Annotation,
|
||||
parsing::ast::types::{Annotation, NodeRef},
|
||||
source_range::SourceRange,
|
||||
CompilationError,
|
||||
CompilationError, EngineManager, ExecutorContext, KclErrorWithOutputs,
|
||||
};
|
||||
|
||||
/// State for executing a program.
|
||||
@ -32,7 +32,6 @@ use crate::{
|
||||
pub struct ExecState {
|
||||
pub(super) global: GlobalState,
|
||||
pub(super) mod_local: ModuleState,
|
||||
pub(super) exec_context: Option<super::ExecutorContext>,
|
||||
}
|
||||
|
||||
pub type ModuleInfoMap = IndexMap<ModuleId, ModuleInfo>;
|
||||
@ -45,33 +44,39 @@ pub(super) struct GlobalState {
|
||||
pub id_to_source: IndexMap<ModuleId, ModuleSource>,
|
||||
/// Map from module ID to module info.
|
||||
pub module_infos: ModuleInfoMap,
|
||||
/// Output map of UUIDs to artifacts.
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
pub artifacts: IndexMap<ArtifactId, Artifact>,
|
||||
/// Output commands to allow building the artifact graph by the caller.
|
||||
/// These are accumulated in the [`ExecutorContext`] but moved here for
|
||||
/// convenience of the execution cache.
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
pub artifact_commands: Vec<ArtifactCommand>,
|
||||
/// Responses from the engine for `artifact_commands`. We need to cache
|
||||
/// this so that we can build the artifact graph. These are accumulated in
|
||||
/// the [`ExecutorContext`] but moved here for convenience of the execution
|
||||
/// cache.
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
pub artifact_responses: IndexMap<Uuid, WebSocketResponse>,
|
||||
/// Output artifact graph.
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
pub artifact_graph: ArtifactGraph,
|
||||
/// Operations that have been performed in execution order, for display in
|
||||
/// the Feature Tree.
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
pub operations: Vec<Operation>,
|
||||
/// Module loader.
|
||||
pub mod_loader: ModuleLoader,
|
||||
/// Errors and warnings.
|
||||
pub errors: Vec<CompilationError>,
|
||||
#[allow(dead_code)]
|
||||
pub artifacts: ArtifactState,
|
||||
}
|
||||
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub(super) struct ArtifactState {
|
||||
/// Output map of UUIDs to artifacts.
|
||||
pub artifacts: IndexMap<ArtifactId, Artifact>,
|
||||
/// Output commands to allow building the artifact graph by the caller.
|
||||
/// These are accumulated in the [`ExecutorContext`] but moved here for
|
||||
/// convenience of the execution cache.
|
||||
pub commands: Vec<ArtifactCommand>,
|
||||
/// Responses from the engine for `artifact_commands`. We need to cache
|
||||
/// this so that we can build the artifact graph. These are accumulated in
|
||||
/// the [`ExecutorContext`] but moved here for convenience of the execution
|
||||
/// cache.
|
||||
pub responses: IndexMap<Uuid, WebSocketResponse>,
|
||||
/// Output artifact graph.
|
||||
pub graph: ArtifactGraph,
|
||||
/// Operations that have been performed in execution order, for display in
|
||||
/// the Feature Tree.
|
||||
pub operations: Vec<Operation>,
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "artifact-graph"))]
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub(super) struct ArtifactState {}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(super) struct ModuleState {
|
||||
/// The id generator for this module.
|
||||
@ -80,6 +85,11 @@ pub(super) struct ModuleState {
|
||||
/// The current value of the pipe operator returned from the previous
|
||||
/// expression. If we're not currently in a pipeline, this will be None.
|
||||
pub pipe_value: Option<KclValue>,
|
||||
/// The closest variable declaration being executed in any parent node in the AST.
|
||||
/// This is used to provide better error messages, e.g. noticing when the user is trying
|
||||
/// to use the variable `length` inside the RHS of its own definition, like `length = tan(length)`.
|
||||
/// TODO: Make this a reference.
|
||||
pub being_declared: Option<String>,
|
||||
/// Identifiers that have been exported from the current module.
|
||||
pub module_exports: Vec<String>,
|
||||
/// Settings specified from annotations.
|
||||
@ -93,7 +103,6 @@ impl ExecState {
|
||||
ExecState {
|
||||
global: GlobalState::new(&exec_context.settings),
|
||||
mod_local: ModuleState::new(ModulePath::Main, ProgramMemory::new(), Default::default()),
|
||||
exec_context: Some(exec_context.clone()),
|
||||
}
|
||||
}
|
||||
|
||||
@ -103,7 +112,6 @@ impl ExecState {
|
||||
*self = ExecState {
|
||||
global,
|
||||
mod_local: ModuleState::new(self.mod_local.path.clone(), ProgramMemory::new(), Default::default()),
|
||||
exec_context: Some(exec_context.clone()),
|
||||
};
|
||||
}
|
||||
|
||||
@ -125,45 +133,26 @@ impl ExecState {
|
||||
/// Convert to execution outcome when running in WebAssembly. We want to
|
||||
/// reduce the amount of data that crosses the WASM boundary as much as
|
||||
/// possible.
|
||||
pub async fn to_exec_outcome(self, main_ref: EnvironmentRef) -> ExecOutcome {
|
||||
pub async fn to_exec_outcome(self, main_ref: EnvironmentRef, ctx: &ExecutorContext) -> ExecOutcome {
|
||||
// Fields are opt-in so that we don't accidentally leak private internal
|
||||
// state when we add more to ExecState.
|
||||
ExecOutcome {
|
||||
variables: self
|
||||
.stack()
|
||||
.find_all_in_env(main_ref)
|
||||
.map(|(k, v)| (k.clone(), v.clone()))
|
||||
.collect(),
|
||||
variables: self.mod_local.variables(main_ref),
|
||||
filenames: self.global.filenames(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
operations: self.global.operations,
|
||||
operations: self.global.artifacts.operations,
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
artifact_commands: self.global.artifact_commands,
|
||||
artifact_commands: self.global.artifacts.commands,
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
artifact_graph: self.global.artifact_graph,
|
||||
artifact_graph: self.global.artifacts.graph,
|
||||
errors: self.global.errors,
|
||||
filenames: self
|
||||
.global
|
||||
.path_to_source_id
|
||||
.iter()
|
||||
.map(|(k, v)| ((*v), k.clone()))
|
||||
.collect(),
|
||||
default_planes: if let Some(ctx) = &self.exec_context {
|
||||
ctx.engine.get_default_planes().read().await.clone()
|
||||
} else {
|
||||
None
|
||||
},
|
||||
default_planes: ctx.engine.get_default_planes().read().await.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn to_mock_exec_outcome(self, main_ref: EnvironmentRef) -> ExecOutcome {
|
||||
// Fields are opt-in so that we don't accidentally leak private internal
|
||||
// state when we add more to ExecState.
|
||||
pub async fn to_mock_exec_outcome(self, main_ref: EnvironmentRef, ctx: &ExecutorContext) -> ExecOutcome {
|
||||
ExecOutcome {
|
||||
variables: self
|
||||
.stack()
|
||||
.find_all_in_env(main_ref)
|
||||
.map(|(k, v)| (k.clone(), v.clone()))
|
||||
.collect(),
|
||||
variables: self.mod_local.variables(main_ref),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
operations: Default::default(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
@ -172,11 +161,7 @@ impl ExecState {
|
||||
artifact_graph: Default::default(),
|
||||
errors: self.global.errors,
|
||||
filenames: Default::default(),
|
||||
default_planes: if let Some(ctx) = &self.exec_context {
|
||||
ctx.engine.get_default_planes().read().await.clone()
|
||||
} else {
|
||||
None
|
||||
},
|
||||
default_planes: ctx.engine.get_default_planes().read().await.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -199,12 +184,12 @@ impl ExecState {
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
pub(crate) fn add_artifact(&mut self, artifact: Artifact) {
|
||||
let id = artifact.id();
|
||||
self.global.artifacts.insert(id, artifact);
|
||||
self.global.artifacts.artifacts.insert(id, artifact);
|
||||
}
|
||||
|
||||
pub(crate) fn push_op(&mut self, op: Operation) {
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
self.global.operations.push(op);
|
||||
self.global.artifacts.operations.push(op);
|
||||
#[cfg(not(feature = "artifact-graph"))]
|
||||
drop(op);
|
||||
}
|
||||
@ -246,10 +231,6 @@ impl ExecState {
|
||||
self.global.id_to_source.insert(id, source.clone());
|
||||
}
|
||||
|
||||
pub(super) fn get_source(&self, id: ModuleId) -> Option<&ModuleSource> {
|
||||
self.global.id_to_source.get(&id)
|
||||
}
|
||||
|
||||
pub(super) fn add_module(&mut self, id: ModuleId, path: ModulePath, repr: ModuleRepr) {
|
||||
debug_assert!(self.global.path_to_source_id.contains_key(&path));
|
||||
let module_info = ModuleInfo { id, repr, path };
|
||||
@ -276,7 +257,7 @@ impl ExecState {
|
||||
}
|
||||
|
||||
pub(super) fn circular_import_error(&self, path: &ModulePath, source_range: SourceRange) -> KclError {
|
||||
KclError::ImportCycle(KclErrorDetails::new(
|
||||
KclError::new_import_cycle(KclErrorDetails::new(
|
||||
format!(
|
||||
"circular import of modules is not allowed: {} -> {}",
|
||||
self.global
|
||||
@ -295,6 +276,71 @@ impl ExecState {
|
||||
pub(crate) fn pipe_value(&self) -> Option<&KclValue> {
|
||||
self.mod_local.pipe_value.as_ref()
|
||||
}
|
||||
|
||||
pub(crate) fn error_with_outputs(
|
||||
&self,
|
||||
error: KclError,
|
||||
default_planes: Option<DefaultPlanes>,
|
||||
) -> KclErrorWithOutputs {
|
||||
let module_id_to_module_path: IndexMap<ModuleId, ModulePath> = self
|
||||
.global
|
||||
.path_to_source_id
|
||||
.iter()
|
||||
.map(|(k, v)| ((*v), k.clone()))
|
||||
.collect();
|
||||
|
||||
KclErrorWithOutputs::new(
|
||||
error,
|
||||
self.errors().to_vec(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
self.global.artifacts.operations.clone(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
self.global.artifacts.commands.clone(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
self.global.artifacts.graph.clone(),
|
||||
module_id_to_module_path,
|
||||
self.global.id_to_source.clone(),
|
||||
default_planes,
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
pub(crate) async fn build_artifact_graph(
|
||||
&mut self,
|
||||
engine: &Arc<Box<dyn EngineManager>>,
|
||||
program: NodeRef<'_, crate::parsing::ast::types::Program>,
|
||||
) -> Result<(), KclError> {
|
||||
let new_commands = engine.take_artifact_commands().await;
|
||||
let new_responses = engine.take_responses().await;
|
||||
let initial_graph = self.global.artifacts.graph.clone();
|
||||
|
||||
// Build the artifact graph.
|
||||
let graph_result = crate::execution::artifact::build_artifact_graph(
|
||||
&new_commands,
|
||||
&new_responses,
|
||||
program,
|
||||
&mut self.global.artifacts.artifacts,
|
||||
initial_graph,
|
||||
);
|
||||
// Move the artifact commands and responses into ExecState to
|
||||
// simplify cache management and error creation.
|
||||
self.global.artifacts.commands.extend(new_commands);
|
||||
self.global.artifacts.responses.extend(new_responses);
|
||||
|
||||
let artifact_graph = graph_result?;
|
||||
self.global.artifacts.graph = artifact_graph;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "artifact-graph"))]
|
||||
pub(crate) async fn build_artifact_graph(
|
||||
&mut self,
|
||||
_engine: &Arc<Box<dyn EngineManager>>,
|
||||
_program: NodeRef<'_, crate::parsing::ast::types::Program>,
|
||||
) -> Result<(), KclError> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl GlobalState {
|
||||
@ -302,16 +348,7 @@ impl GlobalState {
|
||||
let mut global = GlobalState {
|
||||
path_to_source_id: Default::default(),
|
||||
module_infos: Default::default(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
artifacts: Default::default(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
artifact_commands: Default::default(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
artifact_responses: Default::default(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
artifact_graph: Default::default(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
operations: Default::default(),
|
||||
mod_loader: Default::default(),
|
||||
errors: Default::default(),
|
||||
id_to_source: Default::default(),
|
||||
@ -334,6 +371,14 @@ impl GlobalState {
|
||||
.insert(ModulePath::Local { value: root_path }, root_id);
|
||||
global
|
||||
}
|
||||
|
||||
pub(super) fn filenames(&self) -> IndexMap<ModuleId, ModulePath> {
|
||||
self.path_to_source_id.iter().map(|(k, v)| ((*v), k.clone())).collect()
|
||||
}
|
||||
|
||||
pub(super) fn get_source(&self, id: ModuleId) -> Option<&ModuleSource> {
|
||||
self.id_to_source.get(&id)
|
||||
}
|
||||
}
|
||||
|
||||
impl ModuleState {
|
||||
@ -342,6 +387,7 @@ impl ModuleState {
|
||||
id_generator: IdGenerator::new(module_id),
|
||||
stack: memory.new_stack(),
|
||||
pipe_value: Default::default(),
|
||||
being_declared: Default::default(),
|
||||
module_exports: Default::default(),
|
||||
explicit_length_units: false,
|
||||
path,
|
||||
@ -352,6 +398,13 @@ impl ModuleState {
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn variables(&self, main_ref: EnvironmentRef) -> IndexMap<String, KclValue> {
|
||||
self.stack
|
||||
.find_all_in_env(main_ref)
|
||||
.map(|(k, v)| (k.clone(), v.clone()))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Deserialize, Serialize, PartialEq, Eq, ts_rs::TS, JsonSchema)]
|
||||
@ -389,7 +442,7 @@ impl MetaSettings {
|
||||
self.kcl_version = value;
|
||||
}
|
||||
name => {
|
||||
return Err(KclError::Semantic(KclErrorDetails::new(
|
||||
return Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!(
|
||||
"Unexpected settings key: `{name}`; expected one of `{}`, `{}`",
|
||||
annotations::SETTINGS_UNIT_LENGTH,
|
||||
|
@ -220,6 +220,7 @@ impl schemars::JsonSchema for TypedPath {
|
||||
///
|
||||
/// * Does **not** touch `..` or symlinks – call `canonicalize()` if you need that.
|
||||
/// * Returns an owned `PathBuf` only when normalisation was required.
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
fn normalise_import<S: AsRef<str>>(raw: S) -> std::path::PathBuf {
|
||||
let s = raw.as_ref();
|
||||
// On Unix we need to swap `\` → `/`. On Windows we leave it alone.
|
||||
|
@ -44,6 +44,10 @@ impl RuntimeType {
|
||||
RuntimeType::Primitive(PrimitiveType::Sketch)
|
||||
}
|
||||
|
||||
pub fn sketch_or_surface() -> Self {
|
||||
RuntimeType::Union(vec![Self::sketch(), Self::plane(), Self::face()])
|
||||
}
|
||||
|
||||
/// `[Sketch; 1+]`
|
||||
pub fn sketches() -> Self {
|
||||
RuntimeType::Array(
|
||||
@ -183,7 +187,7 @@ impl RuntimeType {
|
||||
};
|
||||
RuntimeType::Primitive(PrimitiveType::Number(ty))
|
||||
}
|
||||
AstPrimitiveType::Named(name) => Self::from_alias(&name.name, exec_state, source_range)?,
|
||||
AstPrimitiveType::Named { id } => Self::from_alias(&id.name, exec_state, source_range)?,
|
||||
AstPrimitiveType::Tag => RuntimeType::Primitive(PrimitiveType::Tag),
|
||||
AstPrimitiveType::ImportedGeometry => RuntimeType::Primitive(PrimitiveType::ImportedGeometry),
|
||||
AstPrimitiveType::Function(_) => RuntimeType::Primitive(PrimitiveType::Function),
|
||||
|
Reference in New Issue
Block a user