Move the wasm lib, and cleanup rust directory and all references (#5585)

* git mv src/wasm-lib rust

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* mv wasm-lib to workspace

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* mv kcl-lib

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* mv derive docs

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* resolve file paths

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* clippy

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* move more shit

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* fix more paths

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* make yarn build:wasm work

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* fix scripts

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* fixups

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* better references

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* fix cargo ci

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* fix reference

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* fix more ci

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* fix tests

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* cargo sort

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* fix script

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* fix

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* fmt

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* fix a dep

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* sort

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* remove unused deps

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* Revert "remove unused deps"

This reverts commit fbabdb062e275fd5cbc1476f8480a1afee15d972.

* updates

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* deps;

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* fixes

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* updates

Signed-off-by: Jess Frazelle <github@jessfraz.com>

---------

Signed-off-by: Jess Frazelle <github@jessfraz.com>
This commit is contained in:
Jess Frazelle
2025-03-01 13:59:01 -08:00
committed by GitHub
parent 0a2bf4b55f
commit c3bdc6f106
1443 changed files with 509 additions and 4274 deletions

View File

@ -0,0 +1,87 @@
//! Data on available annotations.
use kittycad_modeling_cmds::coord::{System, KITTYCAD, OPENGL, VULKAN};
use crate::{
errors::KclErrorDetails,
execution::kcl_value::{UnitAngle, UnitLen},
parsing::ast::types::{Annotation, Expr, Node, ObjectProperty},
KclError, SourceRange,
};
/// Annotations which should cause re-execution if they change.
pub(super) const SIGNIFICANT_ATTRS: [&str; 2] = [SETTINGS, NO_PRELUDE];
pub(crate) const SETTINGS: &str = "settings";
pub(crate) const SETTINGS_UNIT_LENGTH: &str = "defaultLengthUnit";
pub(crate) const SETTINGS_UNIT_ANGLE: &str = "defaultAngleUnit";
pub(super) const NO_PRELUDE: &str = "no_std";
pub(super) const IMPORT_FORMAT: &str = "format";
pub(super) const IMPORT_FORMAT_VALUES: [&str; 9] = ["fbx", "gltf", "glb", "obj", "ply", "sldprt", "stp", "step", "stl"];
pub(super) const IMPORT_COORDS: &str = "coords";
pub(super) const IMPORT_COORDS_VALUES: [(&str, &System); 3] =
[("zoo", KITTYCAD), ("opengl", OPENGL), ("vulkan", VULKAN)];
pub(super) const IMPORT_LENGTH_UNIT: &str = "lengthUnit";
pub(super) fn is_significant(attr: &&Node<Annotation>) -> bool {
match attr.name() {
Some(name) => SIGNIFICANT_ATTRS.contains(&name),
None => true,
}
}
pub(super) fn expect_properties<'a>(
for_key: &'static str,
annotation: &'a Node<Annotation>,
) -> Result<&'a [Node<ObjectProperty>], KclError> {
assert_eq!(annotation.name().unwrap(), for_key);
Ok(&**annotation.properties.as_ref().ok_or_else(|| {
KclError::Semantic(KclErrorDetails {
message: format!("Empty `{for_key}` annotation"),
source_ranges: vec![annotation.as_source_range()],
})
})?)
}
pub(super) fn expect_ident(expr: &Expr) -> Result<&str, KclError> {
match expr {
Expr::Identifier(id) => Ok(&id.name),
e => Err(KclError::Semantic(KclErrorDetails {
message: "Unexpected settings value, expected a simple name, e.g., `mm`".to_owned(),
source_ranges: vec![e.into()],
})),
}
}
impl UnitLen {
pub(super) fn from_str(s: &str, source_range: SourceRange) -> Result<Self, KclError> {
match s {
"mm" => Ok(UnitLen::Mm),
"cm" => Ok(UnitLen::Cm),
"m" => Ok(UnitLen::M),
"inch" | "in" => Ok(UnitLen::Inches),
"ft" => Ok(UnitLen::Feet),
"yd" => Ok(UnitLen::Yards),
value => Err(KclError::Semantic(KclErrorDetails {
message: format!(
"Unexpected value for length units: `{value}`; expected one of `mm`, `cm`, `m`, `in`, `ft`, `yd`"
),
source_ranges: vec![source_range],
})),
}
}
}
impl UnitAngle {
pub(super) fn from_str(s: &str, source_range: SourceRange) -> Result<Self, KclError> {
match s {
"deg" => Ok(UnitAngle::Degrees),
"rad" => Ok(UnitAngle::Radians),
value => Err(KclError::Semantic(KclErrorDetails {
message: format!("Unexpected value for angle units: `{value}`; expected one of `deg`, `rad`"),
source_ranges: vec![source_range],
})),
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,469 @@
//! Tests for the artifact graph that convert it to Mermaid diagrams.
use std::fmt::Write;
use super::*;
type NodeId = u32;
type Edges = IndexMap<(NodeId, NodeId), EdgeInfo>;
#[derive(Debug, Clone, PartialEq, Eq)]
struct EdgeInfo {
direction: EdgeDirection,
flow: EdgeFlow,
kind: EdgeKind,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
enum EdgeDirection {
Forward,
Backward,
Bidirectional,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
enum EdgeFlow {
SourceToTarget,
TargetToSource,
}
impl EdgeFlow {
#[must_use]
fn reverse(&self) -> EdgeFlow {
match self {
EdgeFlow::SourceToTarget => EdgeFlow::TargetToSource,
EdgeFlow::TargetToSource => EdgeFlow::SourceToTarget,
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
enum EdgeKind {
PathToSweep,
Other,
}
impl EdgeDirection {
#[must_use]
fn merge(&self, other: EdgeDirection) -> EdgeDirection {
match self {
EdgeDirection::Forward => match other {
EdgeDirection::Forward => EdgeDirection::Forward,
EdgeDirection::Backward => EdgeDirection::Bidirectional,
EdgeDirection::Bidirectional => EdgeDirection::Bidirectional,
},
EdgeDirection::Backward => match other {
EdgeDirection::Forward => EdgeDirection::Bidirectional,
EdgeDirection::Backward => EdgeDirection::Backward,
EdgeDirection::Bidirectional => EdgeDirection::Bidirectional,
},
EdgeDirection::Bidirectional => EdgeDirection::Bidirectional,
}
}
}
impl Artifact {
/// The IDs pointing back to prior nodes in a depth-first traversal of
/// the graph. This should be disjoint with `child_ids`.
pub(crate) fn back_edges(&self) -> Vec<ArtifactId> {
match self {
Artifact::Plane(_) => Vec::new(),
Artifact::Path(a) => vec![a.plane_id],
Artifact::Segment(a) => vec![a.path_id],
Artifact::Solid2d(a) => vec![a.path_id],
Artifact::StartSketchOnFace(a) => vec![a.face_id],
Artifact::StartSketchOnPlane(a) => vec![a.plane_id],
Artifact::Sweep(a) => vec![a.path_id],
Artifact::Wall(a) => vec![a.seg_id, a.sweep_id],
Artifact::Cap(a) => vec![a.sweep_id],
Artifact::SweepEdge(a) => vec![a.seg_id, a.sweep_id],
Artifact::EdgeCut(a) => vec![a.consumed_edge_id],
Artifact::EdgeCutEdge(a) => vec![a.edge_cut_id],
Artifact::Helix(a) => a.axis_id.map(|id| vec![id]).unwrap_or_default(),
}
}
/// The child IDs of this artifact, used to do a depth-first traversal of
/// the graph.
pub(crate) fn child_ids(&self) -> Vec<ArtifactId> {
match self {
Artifact::Plane(a) => a.path_ids.clone(),
Artifact::Path(a) => {
// Note: Don't include these since they're parents: plane_id.
let mut ids = a.seg_ids.clone();
if let Some(sweep_id) = a.sweep_id {
ids.push(sweep_id);
}
if let Some(solid2d_id) = a.solid2d_id {
ids.push(solid2d_id);
}
ids
}
Artifact::Segment(a) => {
// Note: Don't include these since they're parents: path_id.
let mut ids = Vec::new();
if let Some(surface_id) = a.surface_id {
ids.push(surface_id);
}
ids.extend(&a.edge_ids);
if let Some(edge_cut_id) = a.edge_cut_id {
ids.push(edge_cut_id);
}
ids
}
Artifact::Solid2d(_) => {
// Note: Don't include these since they're parents: path_id.
Vec::new()
}
Artifact::StartSketchOnFace { .. } => {
// Note: Don't include these since they're parents: face_id.
Vec::new()
}
Artifact::StartSketchOnPlane { .. } => {
// Note: Don't include these since they're parents: plane_id.
Vec::new()
}
Artifact::Sweep(a) => {
// Note: Don't include these since they're parents: path_id.
let mut ids = Vec::new();
ids.extend(&a.surface_ids);
ids.extend(&a.edge_ids);
ids
}
Artifact::Wall(a) => {
// Note: Don't include these since they're parents: seg_id,
// sweep_id.
let mut ids = Vec::new();
ids.extend(&a.edge_cut_edge_ids);
ids.extend(&a.path_ids);
ids
}
Artifact::Cap(a) => {
// Note: Don't include these since they're parents: sweep_id.
let mut ids = Vec::new();
ids.extend(&a.edge_cut_edge_ids);
ids.extend(&a.path_ids);
ids
}
Artifact::SweepEdge(_) => {
// Note: Don't include these since they're parents: seg_id,
// sweep_id.
Vec::new()
}
Artifact::EdgeCut(a) => {
// Note: Don't include these since they're parents:
// consumed_edge_id.
let mut ids = Vec::new();
ids.extend(&a.edge_ids);
if let Some(surface_id) = a.surface_id {
ids.push(surface_id);
}
ids
}
Artifact::EdgeCutEdge(a) => {
// Note: Don't include these since they're parents: edge_cut_id.
vec![a.surface_id]
}
Artifact::Helix(_) => {
// Note: Don't include these since they're parents: axis_id.
Vec::new()
}
}
}
}
impl ArtifactGraph {
/// Output the Mermaid flowchart for the artifact graph.
pub(crate) fn to_mermaid_flowchart(&self) -> Result<String, std::fmt::Error> {
let mut output = String::new();
output.push_str("```mermaid\n");
output.push_str("flowchart LR\n");
let mut next_id = 1_u32;
let mut stable_id_map = FnvHashMap::default();
for id in self.map.keys() {
stable_id_map.insert(*id, next_id);
next_id = next_id.checked_add(1).unwrap();
}
// Output all nodes first since edge order can change how Mermaid
// lays out nodes. This is also where we output more details about
// the nodes, like their labels.
self.flowchart_nodes(&mut output, &stable_id_map, " ")?;
self.flowchart_edges(&mut output, &stable_id_map, " ")?;
output.push_str("```\n");
Ok(output)
}
/// Output the Mermaid flowchart nodes, one for each artifact.
fn flowchart_nodes<W: Write>(
&self,
output: &mut W,
stable_id_map: &FnvHashMap<ArtifactId, NodeId>,
prefix: &str,
) -> std::fmt::Result {
// Artifact ID of the path is the key. The value is a list of
// artifact IDs in that group.
let mut groups = IndexMap::new();
let mut ungrouped = Vec::new();
for artifact in self.map.values() {
let id = artifact.id();
let grouped = match artifact {
Artifact::Plane(_) => false,
Artifact::Path(_) => {
groups.entry(id).or_insert_with(Vec::new).push(id);
true
}
Artifact::Segment(segment) => {
let path_id = segment.path_id;
groups.entry(path_id).or_insert_with(Vec::new).push(id);
true
}
Artifact::Solid2d(solid2d) => {
let path_id = solid2d.path_id;
groups.entry(path_id).or_insert_with(Vec::new).push(id);
true
}
Artifact::StartSketchOnFace { .. }
| Artifact::StartSketchOnPlane { .. }
| Artifact::Sweep(_)
| Artifact::Wall(_)
| Artifact::Cap(_)
| Artifact::SweepEdge(_)
| Artifact::EdgeCut(_)
| Artifact::EdgeCutEdge(_)
| Artifact::Helix(_) => false,
};
if !grouped {
ungrouped.push(id);
}
}
for (group_id, artifact_ids) in groups {
let group_id = *stable_id_map.get(&group_id).unwrap();
writeln!(output, "{prefix}subgraph path{group_id} [Path]")?;
let indented = format!("{} ", prefix);
for artifact_id in artifact_ids {
let artifact = self.map.get(&artifact_id).unwrap();
let id = *stable_id_map.get(&artifact_id).unwrap();
self.flowchart_node(output, artifact, id, &indented)?;
}
writeln!(output, "{prefix}end")?;
}
for artifact_id in ungrouped {
let artifact = self.map.get(&artifact_id).unwrap();
let id = *stable_id_map.get(&artifact_id).unwrap();
self.flowchart_node(output, artifact, id, prefix)?;
}
Ok(())
}
fn flowchart_node<W: Write>(
&self,
output: &mut W,
artifact: &Artifact,
id: NodeId,
prefix: &str,
) -> std::fmt::Result {
// For now, only showing the source range.
fn code_ref_display(code_ref: &CodeRef) -> [usize; 3] {
let range = code_ref.range;
[range.start(), range.end(), range.module_id().as_usize()]
}
match artifact {
Artifact::Plane(plane) => {
writeln!(
output,
"{prefix}{}[\"Plane<br>{:?}\"]",
id,
code_ref_display(&plane.code_ref)
)?;
}
Artifact::Path(path) => {
writeln!(
output,
"{prefix}{}[\"Path<br>{:?}\"]",
id,
code_ref_display(&path.code_ref)
)?;
}
Artifact::Segment(segment) => {
writeln!(
output,
"{prefix}{}[\"Segment<br>{:?}\"]",
id,
code_ref_display(&segment.code_ref)
)?;
}
Artifact::Solid2d(_solid2d) => {
writeln!(output, "{prefix}{}[Solid2d]", id)?;
}
Artifact::StartSketchOnFace(StartSketchOnFace { code_ref, .. }) => {
writeln!(
output,
"{prefix}{}[\"StartSketchOnFace<br>{:?}\"]",
id,
code_ref_display(code_ref)
)?;
}
Artifact::StartSketchOnPlane(StartSketchOnPlane { code_ref, .. }) => {
writeln!(
output,
"{prefix}{}[\"StartSketchOnPlane<br>{:?}\"]",
id,
code_ref_display(code_ref)
)?;
}
Artifact::Sweep(sweep) => {
writeln!(
output,
"{prefix}{}[\"Sweep {:?}<br>{:?}\"]",
id,
sweep.sub_type,
code_ref_display(&sweep.code_ref)
)?;
}
Artifact::Wall(_wall) => {
writeln!(output, "{prefix}{}[Wall]", id)?;
}
Artifact::Cap(cap) => {
writeln!(output, "{prefix}{}[\"Cap {:?}\"]", id, cap.sub_type)?;
}
Artifact::SweepEdge(sweep_edge) => {
writeln!(output, "{prefix}{}[\"SweepEdge {:?}\"]", id, sweep_edge.sub_type)?;
}
Artifact::EdgeCut(edge_cut) => {
writeln!(
output,
"{prefix}{}[\"EdgeCut {:?}<br>{:?}\"]",
id,
edge_cut.sub_type,
code_ref_display(&edge_cut.code_ref)
)?;
}
Artifact::EdgeCutEdge(_edge_cut_edge) => {
writeln!(output, "{prefix}{}[EdgeCutEdge]", id)?;
}
Artifact::Helix(helix) => {
writeln!(
output,
"{prefix}{}[\"Helix<br>{:?}\"]",
id,
code_ref_display(&helix.code_ref)
)?;
}
}
Ok(())
}
fn flowchart_edges<W: Write>(
&self,
output: &mut W,
stable_id_map: &FnvHashMap<ArtifactId, NodeId>,
prefix: &str,
) -> Result<(), std::fmt::Error> {
// Mermaid will display two edges in either direction, even using
// the `---` edge type. So we need to deduplicate them.
fn add_unique_edge(edges: &mut Edges, source_id: NodeId, target_id: NodeId, flow: EdgeFlow, kind: EdgeKind) {
if source_id == target_id {
// Self edge. Skip it.
return;
}
// The key is the node IDs in canonical order.
let a = source_id.min(target_id);
let b = source_id.max(target_id);
let new_direction = if a == source_id {
EdgeDirection::Forward
} else {
EdgeDirection::Backward
};
let initial_flow = if a == source_id { flow } else { flow.reverse() };
let edge = edges.entry((a, b)).or_insert(EdgeInfo {
direction: new_direction,
flow: initial_flow,
kind,
});
// Merge with existing edge.
edge.direction = edge.direction.merge(new_direction);
}
// Collect all edges to deduplicate them.
let mut edges = IndexMap::default();
for artifact in self.map.values() {
let source_id = *stable_id_map.get(&artifact.id()).unwrap();
// In Mermaid, the textual order defines the rank, even though the
// edge arrow can go in either direction.
//
// Back edges: parent <- self
// Child edges: self -> child
for (target_id, flow) in artifact
.back_edges()
.into_iter()
.zip(std::iter::repeat(EdgeFlow::TargetToSource))
.chain(
artifact
.child_ids()
.into_iter()
.zip(std::iter::repeat(EdgeFlow::SourceToTarget)),
)
{
let Some(target) = self.map.get(&target_id) else {
continue;
};
let edge_kind = match (artifact, target) {
(Artifact::Path(_), Artifact::Sweep(_)) | (Artifact::Sweep(_), Artifact::Path(_)) => {
EdgeKind::PathToSweep
}
_ => EdgeKind::Other,
};
let target_id = *stable_id_map.get(&target_id).unwrap();
add_unique_edge(&mut edges, source_id, target_id, flow, edge_kind);
}
}
// Output the edges.
for ((source_id, target_id), edge) in edges {
let extra = match edge.kind {
// Extra length. This is needed to make the graph layout more
// legible. Without it, the sweep will be at the same rank as
// the path's segments, and the sweep's edges overlap with the
// segment edges a lot.
EdgeKind::PathToSweep => "-",
EdgeKind::Other => "",
};
match edge.flow {
EdgeFlow::SourceToTarget => match edge.direction {
EdgeDirection::Forward => {
writeln!(output, "{prefix}{source_id} x{}--> {}", extra, target_id)?;
}
EdgeDirection::Backward => {
writeln!(output, "{prefix}{source_id} <{}--x {}", extra, target_id)?;
}
EdgeDirection::Bidirectional => {
writeln!(output, "{prefix}{source_id} {}--- {}", extra, target_id)?;
}
},
EdgeFlow::TargetToSource => match edge.direction {
EdgeDirection::Forward => {
writeln!(output, "{prefix}{target_id} x{}--> {}", extra, source_id)?;
}
EdgeDirection::Backward => {
writeln!(output, "{prefix}{target_id} <{}--x {}", extra, source_id)?;
}
EdgeDirection::Bidirectional => {
writeln!(output, "{prefix}{target_id} {}--- {}", extra, source_id)?;
}
},
}
}
Ok(())
}
}

View File

@ -0,0 +1,612 @@
//! Functions for helping with caching an ast and finding the parts the changed.
use std::sync::Arc;
use itertools::{EitherOrBoth, Itertools};
use tokio::sync::RwLock;
use crate::{
execution::{annotations, memory::ProgramMemory, EnvironmentRef, ExecState, ExecutorSettings},
parsing::ast::types::{Annotation, Node, Program},
walk::Node as WalkNode,
};
lazy_static::lazy_static! {
/// A static mutable lock for updating the last successful execution state for the cache.
static ref OLD_AST: Arc<RwLock<Option<OldAstState>>> = Default::default();
// The last successful run's memory. Not cleared after an unssuccessful run.
static ref PREV_MEMORY: Arc<RwLock<Option<ProgramMemory>>> = Default::default();
}
/// Read the old ast memory from the lock.
pub(super) async fn read_old_ast() -> Option<OldAstState> {
let old_ast = OLD_AST.read().await;
old_ast.clone()
}
pub(super) async fn write_old_ast(old_state: OldAstState) {
let mut old_ast = OLD_AST.write().await;
*old_ast = Some(old_state);
}
pub(super) async fn read_old_memory() -> Option<ProgramMemory> {
let old_mem = PREV_MEMORY.read().await;
old_mem.clone()
}
pub(super) async fn write_old_memory(mem: ProgramMemory) {
let mut old_mem = PREV_MEMORY.write().await;
*old_mem = Some(mem);
}
pub async fn bust_cache() {
let mut old_ast = OLD_AST.write().await;
*old_ast = None;
}
pub async fn clear_mem_cache() {
let mut old_mem = PREV_MEMORY.write().await;
*old_mem = None;
}
/// Information for the caching an AST and smartly re-executing it if we can.
#[derive(Debug, Clone)]
pub struct CacheInformation<'a> {
pub ast: &'a Node<Program>,
pub settings: &'a ExecutorSettings,
}
/// The old ast and program memory.
#[derive(Debug, Clone)]
pub struct OldAstState {
/// The ast.
pub ast: Node<Program>,
/// The exec state.
pub exec_state: ExecState,
/// The last settings used for execution.
pub settings: crate::execution::ExecutorSettings,
pub result_env: EnvironmentRef,
}
/// The result of a cache check.
#[derive(Debug, Clone, PartialEq)]
#[allow(clippy::large_enum_variant)]
pub(super) enum CacheResult {
ReExecute {
/// Should we clear the scene and start over?
clear_scene: bool,
/// Do we need to reapply settings?
reapply_settings: bool,
/// The program that needs to be executed.
program: Node<Program>,
},
/// Argument is whether we need to reapply settings.
NoAction(bool),
}
/// Given an old ast, old program memory and new ast, find the parts of the code that need to be
/// re-executed.
/// This function should never error, because in the case of any internal error, we should just pop
/// the cache.
///
/// Returns `None` when there are no changes to the program, i.e. it is
/// fully cached.
pub(super) async fn get_changed_program(old: CacheInformation<'_>, new: CacheInformation<'_>) -> CacheResult {
let mut reapply_settings = false;
// If the settings are different we might need to bust the cache.
// We specifically do this before checking if they are the exact same.
if old.settings != new.settings {
// If the units are different we need to re-execute the whole thing.
if old.settings.units != new.settings.units {
return CacheResult::ReExecute {
clear_scene: true,
reapply_settings: true,
program: new.ast.clone(),
};
}
// If anything else is different we may not need to re-execute, but rather just
// run the settings again.
reapply_settings = true;
}
// If the ASTs are the EXACT same we return None.
// We don't even need to waste time computing the digests.
if old.ast == new.ast {
return CacheResult::NoAction(reapply_settings);
}
// We have to clone just because the digests are stored inline :-(
let mut old_ast = old.ast.clone();
let mut new_ast = new.ast.clone();
// The digests should already be computed, but just in case we don't
// want to compare against none.
old_ast.compute_digest();
new_ast.compute_digest();
// Check if the digest is the same.
if old_ast.digest == new_ast.digest {
return CacheResult::NoAction(reapply_settings);
}
// Check if the annotations are different.
if !old_ast
.inner_attrs
.iter()
.filter(annotations::is_significant)
.zip_longest(new_ast.inner_attrs.iter().filter(annotations::is_significant))
.all(|pair| {
match pair {
EitherOrBoth::Both(old, new) => {
// Compare annotations, ignoring source ranges. Digests must
// have been computed before this.
let Annotation { name, properties, .. } = &old.inner;
let Annotation {
name: new_name,
properties: new_properties,
..
} = &new.inner;
name.as_ref().map(|n| n.digest) == new_name.as_ref().map(|n| n.digest)
&& properties
.as_ref()
.map(|props| props.iter().map(|p| p.digest).collect::<Vec<_>>())
== new_properties
.as_ref()
.map(|props| props.iter().map(|p| p.digest).collect::<Vec<_>>())
}
_ => false,
}
})
{
// If any of the annotations are different at the beginning of the
// program, it's likely the settings, and we have to bust the cache and
// re-execute the whole thing.
return CacheResult::ReExecute {
clear_scene: true,
reapply_settings: true,
program: new.ast.clone(),
};
}
// Check if the changes were only to Non-code areas, like comments or whitespace.
generate_changed_program(old_ast, new_ast, reapply_settings)
}
/// Force-generate a new CacheResult, even if one shouldn't be made. The
/// way in which this gets invoked should always be through
/// [get_changed_program]. This is purely to contain the logic on
/// how we construct a new [CacheResult].
///
/// Digests *must* be computed before calling this.
fn generate_changed_program(old_ast: Node<Program>, mut new_ast: Node<Program>, reapply_settings: bool) -> CacheResult {
if !old_ast.body.iter().zip(new_ast.body.iter()).all(|(old, new)| {
let old_node: WalkNode = old.into();
let new_node: WalkNode = new.into();
old_node.digest() == new_node.digest()
}) {
// If any of the nodes are different in the stretch of body that
// overlaps, we have to bust cache and rebuild the scene. This
// means a single insertion or deletion will result in a cache
// bust.
return CacheResult::ReExecute {
clear_scene: true,
reapply_settings,
program: new_ast,
};
}
// otherwise the overlapping section of the ast bodies matches.
// Let's see what the rest of the slice looks like.
match new_ast.body.len().cmp(&old_ast.body.len()) {
std::cmp::Ordering::Less => {
// the new AST is shorter than the old AST -- statements
// were removed from the "current" code in the "new" code.
//
// Statements up until now match which means this is a
// "pure delete" of the remaining slice, when we get to
// supporting that.
// Cache bust time.
CacheResult::ReExecute {
clear_scene: true,
reapply_settings,
program: new_ast,
}
}
std::cmp::Ordering::Greater => {
// the new AST is longer than the old AST, which means
// statements were added to the new code we haven't previously
// seen.
//
// Statements up until now are the same, which means this
// is a "pure addition" of the remaining slice.
new_ast.body = new_ast.body[old_ast.body.len()..].to_owned();
CacheResult::ReExecute {
clear_scene: false,
reapply_settings,
program: new_ast,
}
}
std::cmp::Ordering::Equal => {
// currently unreachable, but let's pretend like the code
// above can do something meaningful here for when we get
// to diffing and yanking chunks of the program apart.
// We don't actually want to do anything here; so we're going
// to not clear and do nothing. Is this wrong? I don't think
// so but i think many things. This def needs to change
// when the code above changes.
CacheResult::NoAction(reapply_settings)
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::execution::parse_execute;
#[tokio::test(flavor = "multi_thread")]
async fn test_get_changed_program_same_code() {
let new = r#"// Remove the end face for the extrusion.
firstSketch = startSketchOn('XY')
|> startProfileAt([-12, 12], %)
|> line(end = [24, 0])
|> line(end = [0, -24])
|> line(end = [-24, 0])
|> close()
|> extrude(length = 6)
// Remove the end face for the extrusion.
shell(firstSketch, faces = ['end'], thickness = 0.25)"#;
let (program, _, ctx, _) = parse_execute(new).await.unwrap();
let result = get_changed_program(
CacheInformation {
ast: &program.ast,
settings: &ctx.settings,
},
CacheInformation {
ast: &program.ast,
settings: &ctx.settings,
},
)
.await;
assert_eq!(result, CacheResult::NoAction(false));
}
#[tokio::test(flavor = "multi_thread")]
async fn test_get_changed_program_same_code_changed_whitespace() {
let old = r#" // Remove the end face for the extrusion.
firstSketch = startSketchOn('XY')
|> startProfileAt([-12, 12], %)
|> line(end = [24, 0])
|> line(end = [0, -24])
|> line(end = [-24, 0])
|> close()
|> extrude(length = 6)
// Remove the end face for the extrusion.
shell(firstSketch, faces = ['end'], thickness = 0.25) "#;
let new = r#"// Remove the end face for the extrusion.
firstSketch = startSketchOn('XY')
|> startProfileAt([-12, 12], %)
|> line(end = [24, 0])
|> line(end = [0, -24])
|> line(end = [-24, 0])
|> close()
|> extrude(length = 6)
// Remove the end face for the extrusion.
shell(firstSketch, faces = ['end'], thickness = 0.25)"#;
let (program_old, _, ctx, _) = parse_execute(old).await.unwrap();
let program_new = crate::Program::parse_no_errs(new).unwrap();
let result = get_changed_program(
CacheInformation {
ast: &program_old.ast,
settings: &ctx.settings,
},
CacheInformation {
ast: &program_new.ast,
settings: &ctx.settings,
},
)
.await;
assert_eq!(result, CacheResult::NoAction(false));
}
#[tokio::test(flavor = "multi_thread")]
async fn test_get_changed_program_same_code_changed_code_comment_start_of_program() {
let old = r#" // Removed the end face for the extrusion.
firstSketch = startSketchOn('XY')
|> startProfileAt([-12, 12], %)
|> line(end = [24, 0])
|> line(end = [0, -24])
|> line(end = [-24, 0])
|> close()
|> extrude(length = 6)
// Remove the end face for the extrusion.
shell(firstSketch, faces = ['end'], thickness = 0.25) "#;
let new = r#"// Remove the end face for the extrusion.
firstSketch = startSketchOn('XY')
|> startProfileAt([-12, 12], %)
|> line(end = [24, 0])
|> line(end = [0, -24])
|> line(end = [-24, 0])
|> close()
|> extrude(length = 6)
// Remove the end face for the extrusion.
shell(firstSketch, faces = ['end'], thickness = 0.25)"#;
let (program, _, ctx, _) = parse_execute(old).await.unwrap();
let program_new = crate::Program::parse_no_errs(new).unwrap();
let result = get_changed_program(
CacheInformation {
ast: &program.ast,
settings: &ctx.settings,
},
CacheInformation {
ast: &program_new.ast,
settings: &ctx.settings,
},
)
.await;
assert_eq!(result, CacheResult::NoAction(false));
}
#[tokio::test(flavor = "multi_thread")]
async fn test_get_changed_program_same_code_changed_code_comments_attrs() {
let old = r#"@foo(whatever = whatever)
@bar
// Removed the end face for the extrusion.
firstSketch = startSketchOn('XY')
|> startProfileAt([-12, 12], %)
|> line(end = [24, 0])
|> line(end = [0, -24])
|> line(end = [-24, 0]) // my thing
|> close()
|> extrude(length = 6)
// Remove the end face for the extrusion.
shell(firstSketch, faces = ['end'], thickness = 0.25) "#;
let new = r#"@foo(whatever = 42)
@baz
// Remove the end face for the extrusion.
firstSketch = startSketchOn('XY')
|> startProfileAt([-12, 12], %)
|> line(end = [24, 0])
|> line(end = [0, -24])
|> line(end = [-24, 0])
|> close()
|> extrude(length = 6)
// Remove the end face for the extrusion.
shell(firstSketch, faces = ['end'], thickness = 0.25)"#;
let (program, _, ctx, _) = parse_execute(old).await.unwrap();
let program_new = crate::Program::parse_no_errs(new).unwrap();
let result = get_changed_program(
CacheInformation {
ast: &program.ast,
settings: &ctx.settings,
},
CacheInformation {
ast: &program_new.ast,
settings: &ctx.settings,
},
)
.await;
assert_eq!(result, CacheResult::NoAction(false));
}
// Changing the units with the exact same file should bust the cache.
#[tokio::test(flavor = "multi_thread")]
async fn test_get_changed_program_same_code_but_different_units() {
let new = r#"// Remove the end face for the extrusion.
firstSketch = startSketchOn('XY')
|> startProfileAt([-12, 12], %)
|> line(end = [24, 0])
|> line(end = [0, -24])
|> line(end = [-24, 0])
|> close()
|> extrude(length = 6)
// Remove the end face for the extrusion.
shell(firstSketch, faces = ['end'], thickness = 0.25)"#;
let (program, _, mut ctx, _) = parse_execute(new).await.unwrap();
// Change the settings to cm.
ctx.settings.units = crate::UnitLength::Cm;
let result = get_changed_program(
CacheInformation {
ast: &program.ast,
settings: &Default::default(),
},
CacheInformation {
ast: &program.ast,
settings: &ctx.settings,
},
)
.await;
assert_eq!(
result,
CacheResult::ReExecute {
clear_scene: true,
reapply_settings: true,
program: program.ast
}
);
}
// Changing the grid settings with the exact same file should NOT bust the cache.
#[tokio::test(flavor = "multi_thread")]
async fn test_get_changed_program_same_code_but_different_grid_setting() {
let new = r#"// Remove the end face for the extrusion.
firstSketch = startSketchOn('XY')
|> startProfileAt([-12, 12], %)
|> line(end = [24, 0])
|> line(end = [0, -24])
|> line(end = [-24, 0])
|> close()
|> extrude(length = 6)
// Remove the end face for the extrusion.
shell(firstSketch, faces = ['end'], thickness = 0.25)"#;
let (program, _, mut ctx, _) = parse_execute(new).await.unwrap();
// Change the settings.
ctx.settings.show_grid = !ctx.settings.show_grid;
let result = get_changed_program(
CacheInformation {
ast: &program.ast,
settings: &Default::default(),
},
CacheInformation {
ast: &program.ast,
settings: &ctx.settings,
},
)
.await;
assert_eq!(result, CacheResult::NoAction(true));
}
// Changing the edge visibility settings with the exact same file should NOT bust the cache.
#[tokio::test(flavor = "multi_thread")]
async fn test_get_changed_program_same_code_but_different_edge_visiblity_setting() {
let new = r#"// Remove the end face for the extrusion.
firstSketch = startSketchOn('XY')
|> startProfileAt([-12, 12], %)
|> line(end = [24, 0])
|> line(end = [0, -24])
|> line(end = [-24, 0])
|> close()
|> extrude(length = 6)
// Remove the end face for the extrusion.
shell(firstSketch, faces = ['end'], thickness = 0.25)"#;
let (program, _, mut ctx, _) = parse_execute(new).await.unwrap();
// Change the settings.
ctx.settings.highlight_edges = !ctx.settings.highlight_edges;
let result = get_changed_program(
CacheInformation {
ast: &program.ast,
settings: &Default::default(),
},
CacheInformation {
ast: &program.ast,
settings: &ctx.settings,
},
)
.await;
assert_eq!(result, CacheResult::NoAction(true));
// Change the settings back.
let old_settings = ctx.settings.clone();
ctx.settings.highlight_edges = !ctx.settings.highlight_edges;
let result = get_changed_program(
CacheInformation {
ast: &program.ast,
settings: &old_settings,
},
CacheInformation {
ast: &program.ast,
settings: &ctx.settings,
},
)
.await;
assert_eq!(result, CacheResult::NoAction(true));
// Change the settings back.
let old_settings = ctx.settings.clone();
ctx.settings.highlight_edges = !ctx.settings.highlight_edges;
let result = get_changed_program(
CacheInformation {
ast: &program.ast,
settings: &old_settings,
},
CacheInformation {
ast: &program.ast,
settings: &ctx.settings,
},
)
.await;
assert_eq!(result, CacheResult::NoAction(true));
}
// Changing the units settings using an annotation with the exact same file
// should bust the cache.
#[tokio::test(flavor = "multi_thread")]
async fn test_get_changed_program_same_code_but_different_unit_setting_using_annotation() {
let old_code = r#"@settings(defaultLengthUnit = in)
startSketchOn('XY')
"#;
let new_code = r#"@settings(defaultLengthUnit = mm)
startSketchOn('XY')
"#;
let (program, _, ctx, _) = parse_execute(old_code).await.unwrap();
let mut new_program = crate::Program::parse_no_errs(new_code).unwrap();
new_program.compute_digest();
let result = get_changed_program(
CacheInformation {
ast: &program.ast,
settings: &ctx.settings,
},
CacheInformation {
ast: &new_program.ast,
settings: &ctx.settings,
},
)
.await;
assert_eq!(
result,
CacheResult::ReExecute {
clear_scene: true,
reapply_settings: true,
program: new_program.ast
}
);
}
}

View File

@ -0,0 +1,299 @@
use indexmap::IndexMap;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use super::{kcl_value::NumericType, ArtifactId, KclValue};
use crate::{docs::StdLibFn, std::get_stdlib_fn, SourceRange};
/// A CAD modeling operation for display in the feature tree, AKA operations
/// timeline.
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
#[ts(export)]
#[serde(tag = "type")]
pub enum Operation {
#[serde(rename_all = "camelCase")]
StdLibCall {
/// The standard library function being called.
#[serde(flatten)]
std_lib_fn: StdLibFnRef,
/// The unlabeled argument to the function.
unlabeled_arg: Option<OpArg>,
/// The labeled keyword arguments to the function.
labeled_args: IndexMap<String, OpArg>,
/// The source range of the operation in the source code.
source_range: SourceRange,
/// True if the operation resulted in an error.
#[serde(default, skip_serializing_if = "is_false")]
is_error: bool,
},
#[serde(rename_all = "camelCase")]
UserDefinedFunctionCall {
/// The name of the user-defined function being called. Anonymous
/// functions have no name.
name: Option<String>,
/// The location of the function being called so that there's enough
/// info to go to its definition.
function_source_range: SourceRange,
/// The unlabeled argument to the function.
unlabeled_arg: Option<OpArg>,
/// The labeled keyword arguments to the function.
labeled_args: IndexMap<String, OpArg>,
/// The source range of the operation in the source code.
source_range: SourceRange,
},
UserDefinedFunctionReturn,
}
impl Operation {
/// If the variant is `StdLibCall`, set the `is_error` field.
pub(crate) fn set_std_lib_call_is_error(&mut self, is_err: bool) {
match self {
Self::StdLibCall { ref mut is_error, .. } => *is_error = is_err,
Self::UserDefinedFunctionCall { .. } | Self::UserDefinedFunctionReturn => {}
}
}
}
/// An argument to a CAD modeling operation.
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub struct OpArg {
/// The runtime value of the argument. Instead of using [`KclValue`], we
/// refer to scene objects using their [`ArtifactId`]s.
value: OpKclValue,
/// The KCL code expression for the argument. This is used in the UI so
/// that the user can edit the expression.
source_range: SourceRange,
}
impl OpArg {
pub(crate) fn new(value: OpKclValue, source_range: SourceRange) -> Self {
Self { value, source_range }
}
}
/// A reference to a standard library function. This exists to implement
/// `PartialEq` and `Eq` for `Operation`.
#[derive(Debug, Clone, Deserialize, Serialize, ts_rs::TS, JsonSchema)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub struct StdLibFnRef {
// The following doc comment gets inlined into Operation, overriding what's
// there, in the generated TS. We serialize to its name. Renaming the
// field to "name" allows it to match the other variant.
/// The standard library function being called.
#[serde(
rename = "name",
serialize_with = "std_lib_fn_name",
deserialize_with = "std_lib_fn_from_name"
)]
#[ts(type = "string", rename = "name")]
pub std_lib_fn: Box<dyn StdLibFn>,
}
impl StdLibFnRef {
pub(crate) fn new(std_lib_fn: Box<dyn StdLibFn>) -> Self {
Self { std_lib_fn }
}
}
impl From<&Box<dyn StdLibFn>> for StdLibFnRef {
fn from(std_lib_fn: &Box<dyn StdLibFn>) -> Self {
Self::new(std_lib_fn.clone())
}
}
impl PartialEq for StdLibFnRef {
fn eq(&self, other: &Self) -> bool {
self.std_lib_fn.name() == other.std_lib_fn.name()
}
}
impl Eq for StdLibFnRef {}
#[expect(clippy::borrowed_box, reason = "Explicit Box is needed for serde")]
fn std_lib_fn_name<S>(std_lib_fn: &Box<dyn StdLibFn>, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let name = std_lib_fn.name();
serializer.serialize_str(&name)
}
fn std_lib_fn_from_name<'de, D>(deserializer: D) -> Result<Box<dyn StdLibFn>, D::Error>
where
D: serde::Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
if let Some(std_lib_fn) = get_stdlib_fn(&s) {
Ok(std_lib_fn)
} else {
Err(serde::de::Error::custom(format!("not a KCL stdlib function: {}", s)))
}
}
fn is_false(b: &bool) -> bool {
!*b
}
/// A KCL value used in Operations. `ArtifactId`s are used to refer to the
/// actual scene objects. Any data not needed in the UI may be omitted.
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
#[ts(export)]
#[serde(tag = "type")]
pub enum OpKclValue {
Uuid {
value: ::uuid::Uuid,
},
Bool {
value: bool,
},
Number {
value: f64,
ty: NumericType,
},
String {
value: String,
},
Array {
value: Vec<OpKclValue>,
},
Object {
value: OpKclObjectFields,
},
TagIdentifier {
/// The name of the tag identifier.
value: String,
/// The artifact ID of the object it refers to.
artifact_id: Option<ArtifactId>,
},
TagDeclarator {
name: String,
},
Plane {
artifact_id: ArtifactId,
},
Face {
artifact_id: ArtifactId,
},
Sketch {
value: Box<OpSketch>,
},
Sketches {
value: Vec<OpSketch>,
},
Solid {
value: Box<OpSolid>,
},
Solids {
value: Vec<OpSolid>,
},
Helix {
value: Box<OpHelix>,
},
ImportedGeometry {
artifact_id: ArtifactId,
},
Function {},
Module {},
KclNone {},
}
pub type OpKclObjectFields = IndexMap<String, OpKclValue>;
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub struct OpSketch {
artifact_id: ArtifactId,
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub struct OpSolid {
artifact_id: ArtifactId,
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub struct OpHelix {
artifact_id: ArtifactId,
}
impl From<&KclValue> for OpKclValue {
fn from(value: &KclValue) -> Self {
match value {
KclValue::Uuid { value, .. } => Self::Uuid { value: *value },
KclValue::Bool { value, .. } => Self::Bool { value: *value },
KclValue::Number { value, ty, .. } => Self::Number {
value: *value,
ty: ty.clone(),
},
KclValue::String { value, .. } => Self::String { value: value.clone() },
KclValue::Array { value, .. } => {
let value = value.iter().map(Self::from).collect();
Self::Array { value }
}
KclValue::Object { value, .. } => {
let value = value.iter().map(|(k, v)| (k.clone(), Self::from(v))).collect();
Self::Object { value }
}
KclValue::TagIdentifier(tag_identifier) => Self::TagIdentifier {
value: tag_identifier.value.clone(),
artifact_id: tag_identifier.info.as_ref().map(|info| ArtifactId::new(info.id)),
},
KclValue::TagDeclarator(node) => Self::TagDeclarator {
name: node.name.clone(),
},
KclValue::Plane { value } => Self::Plane {
artifact_id: value.artifact_id,
},
KclValue::Face { value } => Self::Face {
artifact_id: value.artifact_id,
},
KclValue::Sketch { value } => Self::Sketch {
value: Box::new(OpSketch {
artifact_id: value.artifact_id,
}),
},
KclValue::Sketches { value } => {
let value = value
.iter()
.map(|sketch| OpSketch {
artifact_id: sketch.artifact_id,
})
.collect();
Self::Sketches { value }
}
KclValue::Solid { value } => Self::Solid {
value: Box::new(OpSolid {
artifact_id: value.artifact_id,
}),
},
KclValue::Solids { value } => {
let value = value
.iter()
.map(|solid| OpSolid {
artifact_id: solid.artifact_id,
})
.collect();
Self::Solids { value }
}
KclValue::Helix { value } => Self::Helix {
value: Box::new(OpHelix {
artifact_id: value.artifact_id,
}),
},
KclValue::ImportedGeometry(imported_geometry) => Self::ImportedGeometry {
artifact_id: ArtifactId::new(imported_geometry.id),
},
KclValue::Function { .. } => Self::Function {},
KclValue::Module { .. } => Self::Module {},
KclValue::KclNone { .. } => Self::KclNone {},
KclValue::Tombstone { .. } => unreachable!("Tombstone OpKclValue"),
}
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,536 @@
use std::{ffi::OsStr, path::Path, str::FromStr};
use anyhow::Result;
use kcmc::{
coord::{System, KITTYCAD},
each_cmd as mcmd,
format::InputFormat3d,
ok_response::OkModelingCmdResponse,
shared::FileImportFormat,
units::UnitLength,
websocket::OkWebSocketResponseData,
ImportFile, ModelingCmd,
};
use kittycad_modeling_cmds as kcmc;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::{
errors::{KclError, KclErrorDetails},
execution::{annotations, kcl_value::UnitLen, ExecState, ExecutorContext, ImportedGeometry},
fs::FileSystem,
parsing::ast::types::{Annotation, Node},
source_range::SourceRange,
};
// Zoo co-ordinate system.
//
// * Forward: -Y
// * Up: +Z
// * Handedness: Right
pub const ZOO_COORD_SYSTEM: System = *KITTYCAD;
pub async fn import_foreign(
file_path: &Path,
format: Option<InputFormat3d>,
exec_state: &mut ExecState,
ctxt: &ExecutorContext,
source_range: SourceRange,
) -> Result<PreImportedGeometry, KclError> {
// Make sure the file exists.
if !ctxt.fs.exists(file_path, source_range).await? {
return Err(KclError::Semantic(KclErrorDetails {
message: format!("File `{}` does not exist.", file_path.display()),
source_ranges: vec![source_range],
}));
}
let ext_format =
get_import_format_from_extension(file_path.extension().and_then(OsStr::to_str).ok_or_else(|| {
KclError::Semantic(KclErrorDetails {
message: format!("No file extension found for `{}`", file_path.display()),
source_ranges: vec![source_range],
})
})?)
.map_err(|e| {
KclError::Semantic(KclErrorDetails {
message: e.to_string(),
source_ranges: vec![source_range],
})
})?;
// Get the format type from the extension of the file.
let format = if let Some(format) = format {
// Validate the given format with the extension format.
validate_extension_format(ext_format, format.clone()).map_err(|e| {
KclError::Semantic(KclErrorDetails {
message: e.to_string(),
source_ranges: vec![source_range],
})
})?;
format
} else {
ext_format
};
// Get the file contents for each file path.
let file_contents = ctxt.fs.read(file_path, source_range).await.map_err(|e| {
KclError::Semantic(KclErrorDetails {
message: e.to_string(),
source_ranges: vec![source_range],
})
})?;
// We want the file_path to be without the parent.
let file_name = std::path::Path::new(&file_path)
.file_name()
.map(|p| p.to_string_lossy().to_string())
.ok_or_else(|| {
KclError::Semantic(KclErrorDetails {
message: format!("Could not get the file name from the path `{}`", file_path.display()),
source_ranges: vec![source_range],
})
})?;
let mut import_files = vec![kcmc::ImportFile {
path: file_name.to_string(),
data: file_contents.clone(),
}];
// In the case of a gltf importing a bin file we need to handle that! and figure out where the
// file is relative to our current file.
if let InputFormat3d::Gltf(..) = format {
// Check if the file is a binary gltf file, in that case we don't need to import the bin
// file.
if !file_contents.starts_with(b"glTF") {
let json = gltf_json::Root::from_slice(&file_contents).map_err(|e| {
KclError::Semantic(KclErrorDetails {
message: e.to_string(),
source_ranges: vec![source_range],
})
})?;
// Read the gltf file and check if there is a bin file.
for buffer in json.buffers.iter() {
if let Some(uri) = &buffer.uri {
if !uri.starts_with("data:") {
// We want this path relative to the file_path given.
let bin_path = std::path::Path::new(&file_path)
.parent()
.map(|p| p.join(uri))
.map(|p| p.to_string_lossy().to_string())
.ok_or_else(|| {
KclError::Semantic(KclErrorDetails {
message: format!(
"Could not get the parent path of the file `{}`",
file_path.display()
),
source_ranges: vec![source_range],
})
})?;
let bin_contents = ctxt.fs.read(&bin_path, source_range).await.map_err(|e| {
KclError::Semantic(KclErrorDetails {
message: e.to_string(),
source_ranges: vec![source_range],
})
})?;
import_files.push(ImportFile {
path: uri.to_string(),
data: bin_contents,
});
}
}
}
}
}
Ok(PreImportedGeometry {
id: exec_state.next_uuid(),
source_range,
command: mcmd::ImportFiles {
files: import_files.clone(),
format,
},
})
}
pub(super) fn format_from_annotations(
annotations: &[Node<Annotation>],
path: &Path,
import_source_range: SourceRange,
) -> Result<Option<InputFormat3d>, KclError> {
if annotations.is_empty() {
return Ok(None);
}
let props = annotations.iter().flat_map(|a| a.properties.as_deref().unwrap_or(&[]));
let mut result = None;
for p in props.clone() {
if p.key.name == annotations::IMPORT_FORMAT {
result = Some(
get_import_format_from_extension(annotations::expect_ident(&p.value)?).map_err(|_| {
KclError::Semantic(KclErrorDetails {
message: format!(
"Unknown format for import, expected one of: {}",
annotations::IMPORT_FORMAT_VALUES.join(", ")
),
source_ranges: vec![p.as_source_range()],
})
})?,
);
break;
}
}
let mut result = result
.or_else(|| {
path.extension()
.and_then(OsStr::to_str)
.and_then(|ext| get_import_format_from_extension(ext).ok())
})
.ok_or(KclError::Semantic(KclErrorDetails {
message: "Unknown or missing extension, and no specified format for imported file".to_owned(),
source_ranges: vec![import_source_range],
}))?;
for p in props {
match p.key.name.as_str() {
annotations::IMPORT_COORDS => {
set_coords(&mut result, annotations::expect_ident(&p.value)?, p.as_source_range())?;
}
annotations::IMPORT_LENGTH_UNIT => {
set_length_unit(&mut result, annotations::expect_ident(&p.value)?, p.as_source_range())?;
}
annotations::IMPORT_FORMAT => {}
_ => {
return Err(KclError::Semantic(KclErrorDetails {
message: format!(
"Unexpected annotation for import, expected one of: {}, {}, {}",
annotations::IMPORT_FORMAT,
annotations::IMPORT_COORDS,
annotations::IMPORT_LENGTH_UNIT
),
source_ranges: vec![p.as_source_range()],
}))
}
}
}
Ok(Some(result))
}
fn set_coords(fmt: &mut InputFormat3d, coords_str: &str, source_range: SourceRange) -> Result<(), KclError> {
let mut coords = None;
for (name, val) in annotations::IMPORT_COORDS_VALUES {
if coords_str == name {
coords = Some(*val);
}
}
let Some(coords) = coords else {
return Err(KclError::Semantic(KclErrorDetails {
message: format!(
"Unknown coordinate system: {coords_str}, expected one of: {}",
annotations::IMPORT_COORDS_VALUES
.iter()
.map(|(n, _)| *n)
.collect::<Vec<_>>()
.join(", ")
),
source_ranges: vec![source_range],
}));
};
match fmt {
InputFormat3d::Obj(opts) => opts.coords = coords,
InputFormat3d::Ply(opts) => opts.coords = coords,
InputFormat3d::Stl(opts) => opts.coords = coords,
_ => {
return Err(KclError::Semantic(KclErrorDetails {
message: format!(
"`{}` option cannot be applied to the specified format",
annotations::IMPORT_COORDS
),
source_ranges: vec![source_range],
}))
}
}
Ok(())
}
fn set_length_unit(fmt: &mut InputFormat3d, units_str: &str, source_range: SourceRange) -> Result<(), KclError> {
let units = UnitLen::from_str(units_str, source_range)?;
match fmt {
InputFormat3d::Obj(opts) => opts.units = units.into(),
InputFormat3d::Ply(opts) => opts.units = units.into(),
InputFormat3d::Stl(opts) => opts.units = units.into(),
_ => {
return Err(KclError::Semantic(KclErrorDetails {
message: format!(
"`{}` option cannot be applied to the specified format",
annotations::IMPORT_LENGTH_UNIT
),
source_ranges: vec![source_range],
}))
}
}
Ok(())
}
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
pub struct PreImportedGeometry {
id: Uuid,
command: mcmd::ImportFiles,
pub source_range: SourceRange,
}
pub async fn send_to_engine(pre: PreImportedGeometry, ctxt: &ExecutorContext) -> Result<ImportedGeometry, KclError> {
if ctxt.no_engine_commands().await {
return Ok(ImportedGeometry {
id: pre.id,
value: pre.command.files.iter().map(|f| f.path.to_string()).collect(),
meta: vec![pre.source_range.into()],
});
}
let resp = ctxt
.engine
.send_modeling_cmd(pre.id, pre.source_range, &ModelingCmd::from(pre.command.clone()))
.await?;
let OkWebSocketResponseData::Modeling {
modeling_response: OkModelingCmdResponse::ImportFiles(imported_files),
} = &resp
else {
return Err(KclError::Engine(KclErrorDetails {
message: format!("ImportFiles response was not as expected: {:?}", resp),
source_ranges: vec![pre.source_range],
}));
};
Ok(ImportedGeometry {
id: imported_files.object_id,
value: pre.command.files.iter().map(|f| f.path.to_string()).collect(),
meta: vec![pre.source_range.into()],
})
}
/// Get the source format from the extension.
fn get_import_format_from_extension(ext: &str) -> Result<InputFormat3d> {
let format = match FileImportFormat::from_str(ext) {
Ok(format) => format,
Err(_) => {
if ext == "stp" {
FileImportFormat::Step
} else if ext == "glb" {
FileImportFormat::Gltf
} else {
anyhow::bail!("unknown source format for file extension: {ext}. Try setting the `--src-format` flag explicitly or use a valid format.")
}
}
};
// Make the default units millimeters.
let ul = UnitLength::Millimeters;
// Zoo co-ordinate system.
//
// * Forward: -Y
// * Up: +Z
// * Handedness: Right
match format {
FileImportFormat::Step => Ok(InputFormat3d::Step(kcmc::format::step::import::Options {
split_closed_faces: false,
})),
FileImportFormat::Stl => Ok(InputFormat3d::Stl(kcmc::format::stl::import::Options {
coords: ZOO_COORD_SYSTEM,
units: ul,
})),
FileImportFormat::Obj => Ok(InputFormat3d::Obj(kcmc::format::obj::import::Options {
coords: ZOO_COORD_SYSTEM,
units: ul,
})),
FileImportFormat::Gltf => Ok(InputFormat3d::Gltf(kcmc::format::gltf::import::Options {})),
FileImportFormat::Ply => Ok(InputFormat3d::Ply(kcmc::format::ply::import::Options {
coords: ZOO_COORD_SYSTEM,
units: ul,
})),
FileImportFormat::Fbx => Ok(InputFormat3d::Fbx(kcmc::format::fbx::import::Options {})),
FileImportFormat::Sldprt => Ok(InputFormat3d::Sldprt(kcmc::format::sldprt::import::Options {
split_closed_faces: false,
})),
}
}
fn validate_extension_format(ext: InputFormat3d, given: InputFormat3d) -> Result<()> {
if let InputFormat3d::Stl(_) = ext {
if let InputFormat3d::Stl(_) = given {
return Ok(());
}
}
if let InputFormat3d::Obj(_) = ext {
if let InputFormat3d::Obj(_) = given {
return Ok(());
}
}
if let InputFormat3d::Ply(_) = ext {
if let InputFormat3d::Ply(_) = given {
return Ok(());
}
}
if ext == given {
return Ok(());
}
anyhow::bail!(
"The given format does not match the file extension. Expected: `{}`, Given: `{}`",
get_name_of_format(ext),
get_name_of_format(given)
)
}
fn get_name_of_format(type_: InputFormat3d) -> &'static str {
match type_ {
InputFormat3d::Fbx(_) => "fbx",
InputFormat3d::Gltf(_) => "gltf",
InputFormat3d::Obj(_) => "obj",
InputFormat3d::Ply(_) => "ply",
InputFormat3d::Sldprt(_) => "sldprt",
InputFormat3d::Step(_) => "step",
InputFormat3d::Stl(_) => "stl",
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn annotations() {
// no annotations
assert!(
format_from_annotations(&[], Path::new("../foo.txt"), SourceRange::default(),)
.unwrap()
.is_none()
);
// no format, no options
let text = "@()\nimport '../foo.gltf' as foo";
let parsed = crate::Program::parse_no_errs(text).unwrap().ast;
let attrs = parsed.body[0].get_attrs();
let fmt = format_from_annotations(attrs, Path::new("../foo.gltf"), SourceRange::default())
.unwrap()
.unwrap();
assert_eq!(
fmt,
InputFormat3d::Gltf(kittycad_modeling_cmds::format::gltf::import::Options {})
);
// format, no options
let text = "@(format = gltf)\nimport '../foo.txt' as foo";
let parsed = crate::Program::parse_no_errs(text).unwrap().ast;
let attrs = parsed.body[0].get_attrs();
let fmt = format_from_annotations(attrs, Path::new("../foo.txt"), SourceRange::default())
.unwrap()
.unwrap();
assert_eq!(
fmt,
InputFormat3d::Gltf(kittycad_modeling_cmds::format::gltf::import::Options {})
);
// format, no extension (wouldn't parse but might some day)
let fmt = format_from_annotations(attrs, Path::new("../foo"), SourceRange::default())
.unwrap()
.unwrap();
assert_eq!(
fmt,
InputFormat3d::Gltf(kittycad_modeling_cmds::format::gltf::import::Options {})
);
// format, options
let text = "@(format = obj, coords = vulkan, lengthUnit = ft)\nimport '../foo.txt' as foo";
let parsed = crate::Program::parse_no_errs(text).unwrap().ast;
let attrs = parsed.body[0].get_attrs();
let fmt = format_from_annotations(attrs, Path::new("../foo.txt"), SourceRange::default())
.unwrap()
.unwrap();
assert_eq!(
fmt,
InputFormat3d::Obj(kittycad_modeling_cmds::format::obj::import::Options {
coords: *kittycad_modeling_cmds::coord::VULKAN,
units: kittycad_modeling_cmds::units::UnitLength::Feet,
})
);
// no format, options
let text = "@(coords = vulkan, lengthUnit = ft)\nimport '../foo.obj' as foo";
let parsed = crate::Program::parse_no_errs(text).unwrap().ast;
let attrs = parsed.body[0].get_attrs();
let fmt = format_from_annotations(attrs, Path::new("../foo.obj"), SourceRange::default())
.unwrap()
.unwrap();
assert_eq!(
fmt,
InputFormat3d::Obj(kittycad_modeling_cmds::format::obj::import::Options {
coords: *kittycad_modeling_cmds::coord::VULKAN,
units: kittycad_modeling_cmds::units::UnitLength::Feet,
})
);
// err - format, options, but no options for specified format
assert_annotation_error(
"@(format = gltf, lengthUnit = ft)\nimport '../foo.txt' as foo",
"../foo.txt",
"`lengthUnit` option cannot be applied",
);
// err - no format, options, but no options for specified format
assert_annotation_error(
"@(lengthUnit = ft)\nimport '../foo.gltf' as foo",
"../foo.gltf",
"lengthUnit` option cannot be applied",
);
// err - bad option
assert_annotation_error(
"@(format = obj, coords = vulkan, lengthUni = ft)\nimport '../foo.txt' as foo",
"../foo.txt",
"Unexpected annotation",
);
// err - bad format
assert_annotation_error(
"@(format = foo)\nimport '../foo.txt' as foo",
"../foo.txt",
"Unknown format for import",
);
// err - bad coord value
assert_annotation_error(
"@(format = gltf, coords = north)\nimport '../foo.txt' as foo",
"../foo.txt",
"Unknown coordinate system",
);
// err - bad unit value
assert_annotation_error(
"@(format = gltf, lengthUnit = gallons)\nimport '../foo.txt' as foo",
"../foo.txt",
"Unexpected value for length units",
);
}
#[track_caller]
fn assert_annotation_error(src: &str, path: &str, expected: &str) {
let parsed = crate::Program::parse_no_errs(src).unwrap().ast;
let attrs = parsed.body[0].get_attrs();
let err = format_from_annotations(attrs, Path::new(path), SourceRange::default()).unwrap_err();
assert!(
err.message().contains(expected),
"Expected: `{expected}`, found `{}`",
err.message()
);
}
}

View File

@ -0,0 +1,989 @@
use std::collections::HashMap;
use anyhow::Result;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use super::{memory::EnvironmentRef, MetaSettings};
use crate::{
errors::KclErrorDetails,
execution::{
ExecState, ExecutorContext, Face, Helix, ImportedGeometry, Metadata, Plane, Sketch, SketchSet, Solid, SolidSet,
TagIdentifier,
},
parsing::{
ast::types::{
DefaultParamVal, FunctionExpression, KclNone, Literal, LiteralValue, Node,
PrimitiveType as AstPrimitiveType, TagDeclarator, TagNode, Type,
},
token::NumericSuffix,
},
std::{args::Arg, StdFnProps},
CompilationError, KclError, ModuleId, SourceRange,
};
pub type KclObjectFields = HashMap<String, KclValue>;
/// Any KCL value.
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
#[ts(export)]
#[serde(tag = "type")]
pub enum KclValue {
Uuid {
value: ::uuid::Uuid,
#[serde(rename = "__meta")]
meta: Vec<Metadata>,
},
Bool {
value: bool,
#[serde(rename = "__meta")]
meta: Vec<Metadata>,
},
Number {
value: f64,
ty: NumericType,
#[serde(rename = "__meta")]
meta: Vec<Metadata>,
},
String {
value: String,
#[serde(rename = "__meta")]
meta: Vec<Metadata>,
},
Array {
value: Vec<KclValue>,
#[serde(rename = "__meta")]
meta: Vec<Metadata>,
},
Object {
value: KclObjectFields,
#[serde(rename = "__meta")]
meta: Vec<Metadata>,
},
TagIdentifier(Box<TagIdentifier>),
TagDeclarator(crate::parsing::ast::types::BoxNode<TagDeclarator>),
Plane {
value: Box<Plane>,
},
Face {
value: Box<Face>,
},
Sketch {
value: Box<Sketch>,
},
Sketches {
value: Vec<Box<Sketch>>,
},
Solid {
value: Box<Solid>,
},
Solids {
value: Vec<Box<Solid>>,
},
Helix {
value: Box<Helix>,
},
ImportedGeometry(ImportedGeometry),
#[ts(skip)]
Function {
#[serde(skip)]
value: FunctionSource,
#[serde(rename = "__meta")]
meta: Vec<Metadata>,
},
Module {
value: ModuleId,
#[serde(rename = "__meta")]
meta: Vec<Metadata>,
},
KclNone {
value: KclNone,
#[serde(rename = "__meta")]
meta: Vec<Metadata>,
},
// Only used for memory management. Should never be visible outside of the memory module.
Tombstone {
value: (),
#[serde(rename = "__meta")]
meta: Vec<Metadata>,
},
}
#[derive(Debug, Clone, PartialEq, Default)]
pub enum FunctionSource {
#[default]
None,
Std {
func: crate::std::StdFn,
props: StdFnProps,
},
User {
ast: crate::parsing::ast::types::BoxNode<FunctionExpression>,
memory: EnvironmentRef,
},
}
impl JsonSchema for FunctionSource {
fn schema_name() -> String {
"FunctionSource".to_owned()
}
fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
// TODO: Actually generate a reasonable schema.
gen.subschema_for::<()>()
}
}
impl From<SketchSet> for KclValue {
fn from(sg: SketchSet) -> Self {
match sg {
SketchSet::Sketch(value) => KclValue::Sketch { value },
SketchSet::Sketches(value) => KclValue::Sketches { value },
}
}
}
impl From<Vec<Box<Sketch>>> for KclValue {
fn from(sg: Vec<Box<Sketch>>) -> Self {
KclValue::Sketches { value: sg }
}
}
impl From<SolidSet> for KclValue {
fn from(eg: SolidSet) -> Self {
match eg {
SolidSet::Solid(eg) => KclValue::Solid { value: eg },
SolidSet::Solids(egs) => KclValue::Solids { value: egs },
}
}
}
impl From<Vec<Box<Solid>>> for KclValue {
fn from(eg: Vec<Box<Solid>>) -> Self {
if eg.len() == 1 {
KclValue::Solid { value: eg[0].clone() }
} else {
KclValue::Solids { value: eg }
}
}
}
impl From<KclValue> for Vec<SourceRange> {
fn from(item: KclValue) -> Self {
match item {
KclValue::TagDeclarator(t) => vec![SourceRange::new(t.start, t.end, t.module_id)],
KclValue::TagIdentifier(t) => to_vec_sr(&t.meta),
KclValue::Solid { value } => to_vec_sr(&value.meta),
KclValue::Solids { value } => value.iter().flat_map(|eg| to_vec_sr(&eg.meta)).collect(),
KclValue::Sketch { value } => to_vec_sr(&value.meta),
KclValue::Sketches { value } => value.iter().flat_map(|eg| to_vec_sr(&eg.meta)).collect(),
KclValue::Helix { value } => to_vec_sr(&value.meta),
KclValue::ImportedGeometry(i) => to_vec_sr(&i.meta),
KclValue::Function { meta, .. } => to_vec_sr(&meta),
KclValue::Plane { value } => to_vec_sr(&value.meta),
KclValue::Face { value } => to_vec_sr(&value.meta),
KclValue::Bool { meta, .. } => to_vec_sr(&meta),
KclValue::Number { meta, .. } => to_vec_sr(&meta),
KclValue::String { meta, .. } => to_vec_sr(&meta),
KclValue::Array { meta, .. } => to_vec_sr(&meta),
KclValue::Object { meta, .. } => to_vec_sr(&meta),
KclValue::Module { meta, .. } => to_vec_sr(&meta),
KclValue::Uuid { meta, .. } => to_vec_sr(&meta),
KclValue::KclNone { meta, .. } => to_vec_sr(&meta),
KclValue::Tombstone { .. } => unreachable!("Tombstone SourceRange"),
}
}
}
fn to_vec_sr(meta: &[Metadata]) -> Vec<SourceRange> {
meta.iter().map(|m| m.source_range).collect()
}
impl From<&KclValue> for Vec<SourceRange> {
fn from(item: &KclValue) -> Self {
match item {
KclValue::TagDeclarator(t) => vec![SourceRange::new(t.start, t.end, t.module_id)],
KclValue::TagIdentifier(t) => to_vec_sr(&t.meta),
KclValue::Solid { value } => to_vec_sr(&value.meta),
KclValue::Solids { value } => value.iter().flat_map(|eg| to_vec_sr(&eg.meta)).collect(),
KclValue::Sketch { value } => to_vec_sr(&value.meta),
KclValue::Sketches { value } => value.iter().flat_map(|eg| to_vec_sr(&eg.meta)).collect(),
KclValue::Helix { value } => to_vec_sr(&value.meta),
KclValue::ImportedGeometry(i) => to_vec_sr(&i.meta),
KclValue::Function { meta, .. } => to_vec_sr(meta),
KclValue::Plane { value } => to_vec_sr(&value.meta),
KclValue::Face { value } => to_vec_sr(&value.meta),
KclValue::Bool { meta, .. } => to_vec_sr(meta),
KclValue::Number { meta, .. } => to_vec_sr(meta),
KclValue::String { meta, .. } => to_vec_sr(meta),
KclValue::Uuid { meta, .. } => to_vec_sr(meta),
KclValue::Array { meta, .. } => to_vec_sr(meta),
KclValue::Object { meta, .. } => to_vec_sr(meta),
KclValue::Module { meta, .. } => to_vec_sr(meta),
KclValue::KclNone { meta, .. } => to_vec_sr(meta),
KclValue::Tombstone { .. } => unreachable!("Tombstone &SourceRange"),
}
}
}
impl KclValue {
pub(crate) fn metadata(&self) -> Vec<Metadata> {
match self {
KclValue::Uuid { value: _, meta } => meta.clone(),
KclValue::Bool { value: _, meta } => meta.clone(),
KclValue::Number { meta, .. } => meta.clone(),
KclValue::String { value: _, meta } => meta.clone(),
KclValue::Array { value: _, meta } => meta.clone(),
KclValue::Object { value: _, meta } => meta.clone(),
KclValue::TagIdentifier(x) => x.meta.clone(),
KclValue::TagDeclarator(x) => vec![x.metadata()],
KclValue::Plane { value } => value.meta.clone(),
KclValue::Face { value } => value.meta.clone(),
KclValue::Sketch { value } => value.meta.clone(),
KclValue::Sketches { value } => value.iter().flat_map(|sketch| &sketch.meta).copied().collect(),
KclValue::Solid { value } => value.meta.clone(),
KclValue::Solids { value } => value.iter().flat_map(|sketch| &sketch.meta).copied().collect(),
KclValue::Helix { value } => value.meta.clone(),
KclValue::ImportedGeometry(x) => x.meta.clone(),
KclValue::Function { meta, .. } => meta.clone(),
KclValue::Module { meta, .. } => meta.clone(),
KclValue::KclNone { meta, .. } => meta.clone(),
KclValue::Tombstone { .. } => unreachable!("Tombstone Metadata"),
}
}
pub(crate) fn function_def_source_range(&self) -> Option<SourceRange> {
let KclValue::Function {
value: FunctionSource::User { ast, .. },
..
} = self
else {
return None;
};
// TODO: It would be nice if we could extract the source range starting
// at the fn, but that's the variable declaration.
Some(ast.as_source_range())
}
pub(crate) fn get_solid_set(&self) -> Result<SolidSet> {
match self {
KclValue::Solid { value } => Ok(SolidSet::Solid(value.clone())),
KclValue::Solids { value } => Ok(SolidSet::Solids(value.clone())),
KclValue::Array { value, .. } => {
let solids: Vec<_> = value
.iter()
.enumerate()
.map(|(i, v)| {
v.as_solid().map(|v| v.to_owned()).map(Box::new).ok_or_else(|| {
anyhow::anyhow!(
"expected this array to only contain solids, but element {i} was actually {}",
v.human_friendly_type()
)
})
})
.collect::<Result<_, _>>()?;
Ok(SolidSet::Solids(solids))
}
_ => anyhow::bail!("Not a solid or solids: {:?}", self),
}
}
#[allow(unused)]
pub(crate) fn none() -> Self {
Self::KclNone {
value: Default::default(),
meta: Default::default(),
}
}
/// Human readable type name used in error messages. Should not be relied
/// on for program logic.
pub(crate) fn human_friendly_type(&self) -> &'static str {
match self {
KclValue::Uuid { .. } => "Unique ID (uuid)",
KclValue::TagDeclarator(_) => "TagDeclarator",
KclValue::TagIdentifier(_) => "TagIdentifier",
KclValue::Solid { .. } => "Solid",
KclValue::Solids { .. } => "Solids",
KclValue::Sketch { .. } => "Sketch",
KclValue::Sketches { .. } => "Sketches",
KclValue::Helix { .. } => "Helix",
KclValue::ImportedGeometry(_) => "ImportedGeometry",
KclValue::Function { .. } => "Function",
KclValue::Plane { .. } => "Plane",
KclValue::Face { .. } => "Face",
KclValue::Bool { .. } => "boolean (true/false value)",
KclValue::Number { .. } => "number",
KclValue::String { .. } => "string (text)",
KclValue::Array { .. } => "array (list)",
KclValue::Object { .. } => "object",
KclValue::Module { .. } => "module",
KclValue::KclNone { .. } => "None",
KclValue::Tombstone { .. } => "TOMBSTONE",
}
}
pub(crate) fn from_literal(literal: Node<Literal>, settings: &MetaSettings) -> Self {
let meta = vec![literal.metadata()];
match literal.inner.value {
LiteralValue::Number { value, suffix } => KclValue::Number {
value,
meta,
ty: NumericType::from_parsed(suffix, settings),
},
LiteralValue::String(value) => KclValue::String { value, meta },
LiteralValue::Bool(value) => KclValue::Bool { value, meta },
}
}
pub(crate) fn from_default_param(param: DefaultParamVal, settings: &MetaSettings) -> Self {
match param {
DefaultParamVal::Literal(lit) => Self::from_literal(lit, settings),
DefaultParamVal::KclNone(none) => KclValue::KclNone {
value: none,
meta: Default::default(),
},
}
}
pub(crate) fn map_env_ref(&self, env_map: &HashMap<EnvironmentRef, EnvironmentRef>) -> Self {
let mut result = self.clone();
if let KclValue::Function {
value: FunctionSource::User { ref mut memory, .. },
..
} = result
{
if let Some(new) = env_map.get(memory) {
*memory = *new;
}
}
result
}
/// Put the number into a KCL value.
pub const fn from_number(f: f64, meta: Vec<Metadata>) -> Self {
Self::Number {
value: f,
meta,
ty: NumericType::Unknown,
}
}
pub const fn from_number_with_type(f: f64, ty: NumericType, meta: Vec<Metadata>) -> Self {
Self::Number { value: f, meta, ty }
}
/// Put the point into a KCL value.
pub fn from_point2d(p: [f64; 2], ty: NumericType, meta: Vec<Metadata>) -> Self {
Self::Array {
value: vec![
Self::Number {
value: p[0],
meta: meta.clone(),
ty: ty.clone(),
},
Self::Number {
value: p[1],
meta: meta.clone(),
ty,
},
],
meta,
}
}
pub(crate) fn as_usize(&self) -> Option<usize> {
match self {
KclValue::Number { value, .. } => crate::try_f64_to_usize(*value),
_ => None,
}
}
pub fn as_int(&self) -> Option<i64> {
match self {
KclValue::Number { value, .. } => crate::try_f64_to_i64(*value),
_ => None,
}
}
pub fn as_object(&self) -> Option<&KclObjectFields> {
if let KclValue::Object { value, meta: _ } = &self {
Some(value)
} else {
None
}
}
pub fn into_object(self) -> Option<KclObjectFields> {
if let KclValue::Object { value, meta: _ } = self {
Some(value)
} else {
None
}
}
pub fn as_str(&self) -> Option<&str> {
if let KclValue::String { value, meta: _ } = &self {
Some(value)
} else {
None
}
}
pub fn as_array(&self) -> Option<&[KclValue]> {
if let KclValue::Array { value, meta: _ } = &self {
Some(value)
} else {
None
}
}
pub fn as_point2d(&self) -> Option<[f64; 2]> {
let arr = self.as_array()?;
if arr.len() != 2 {
return None;
}
let x = arr[0].as_f64()?;
let y = arr[1].as_f64()?;
Some([x, y])
}
pub fn as_uuid(&self) -> Option<uuid::Uuid> {
if let KclValue::Uuid { value, meta: _ } = &self {
Some(*value)
} else {
None
}
}
pub fn as_plane(&self) -> Option<&Plane> {
if let KclValue::Plane { value } = &self {
Some(value)
} else {
None
}
}
pub fn as_solid(&self) -> Option<&Solid> {
if let KclValue::Solid { value } = &self {
Some(value)
} else {
None
}
}
pub fn as_sketch(&self) -> Option<&Sketch> {
if let KclValue::Sketch { value } = self {
Some(value)
} else {
None
}
}
pub fn as_f64(&self) -> Option<f64> {
if let KclValue::Number { value, .. } = &self {
Some(*value)
} else {
None
}
}
pub fn as_bool(&self) -> Option<bool> {
if let KclValue::Bool { value, meta: _ } = &self {
Some(*value)
} else {
None
}
}
/// If this value fits in a u32, return it.
pub fn get_u32(&self, source_ranges: Vec<SourceRange>) -> Result<u32, KclError> {
let u = self.as_int().and_then(|n| u64::try_from(n).ok()).ok_or_else(|| {
KclError::Semantic(KclErrorDetails {
message: "Expected an integer >= 0".to_owned(),
source_ranges: source_ranges.clone(),
})
})?;
u32::try_from(u).map_err(|_| {
KclError::Semantic(KclErrorDetails {
message: "Number was too big".to_owned(),
source_ranges,
})
})
}
/// If this value is of type function, return it.
pub fn get_function(&self) -> Option<&FunctionSource> {
match self {
KclValue::Function { value, .. } => Some(value),
_ => None,
}
}
/// Get a tag identifier from a memory item.
pub fn get_tag_identifier(&self) -> Result<TagIdentifier, KclError> {
match self {
KclValue::TagIdentifier(t) => Ok(*t.clone()),
_ => Err(KclError::Semantic(KclErrorDetails {
message: format!("Not a tag identifier: {:?}", self),
source_ranges: self.clone().into(),
})),
}
}
/// Get a tag declarator from a memory item.
pub fn get_tag_declarator(&self) -> Result<TagNode, KclError> {
match self {
KclValue::TagDeclarator(t) => Ok((**t).clone()),
_ => Err(KclError::Semantic(KclErrorDetails {
message: format!("Not a tag declarator: {:?}", self),
source_ranges: self.clone().into(),
})),
}
}
/// Get an optional tag from a memory item.
pub fn get_tag_declarator_opt(&self) -> Result<Option<TagNode>, KclError> {
match self {
KclValue::TagDeclarator(t) => Ok(Some((**t).clone())),
_ => Err(KclError::Semantic(KclErrorDetails {
message: format!("Not a tag declarator: {:?}", self),
source_ranges: self.clone().into(),
})),
}
}
/// If this KCL value is a bool, retrieve it.
pub fn get_bool(&self) -> Result<bool, KclError> {
let Self::Bool { value: b, .. } = self else {
return Err(KclError::Type(KclErrorDetails {
source_ranges: self.into(),
message: format!("Expected bool, found {}", self.human_friendly_type()),
}));
};
Ok(*b)
}
/// True if `self` has a type which is a subtype of `ty` without coercion.
pub fn has_type(&self, ty: &RuntimeType) -> bool {
let Some(self_ty) = self.ty() else {
return false;
};
self_ty.subtype(ty)
}
fn ty(&self) -> Option<RuntimeType> {
match self {
KclValue::Bool { .. } => Some(RuntimeType::Primitive(PrimitiveType::Boolean)),
KclValue::Number { ty, .. } => Some(RuntimeType::Primitive(PrimitiveType::Number(ty.clone()))),
KclValue::String { .. } => Some(RuntimeType::Primitive(PrimitiveType::String)),
KclValue::Object { value, .. } => {
let properties = value
.iter()
.map(|(k, v)| v.ty().map(|t| (k.clone(), t)))
.collect::<Option<Vec<_>>>()?;
Some(RuntimeType::Object(properties))
}
KclValue::Plane { .. } => Some(RuntimeType::Primitive(PrimitiveType::Plane)),
KclValue::Sketch { .. } => Some(RuntimeType::Primitive(PrimitiveType::Sketch)),
KclValue::Sketches { .. } => Some(RuntimeType::Array(PrimitiveType::Sketch)),
KclValue::Solid { .. } => Some(RuntimeType::Primitive(PrimitiveType::Solid)),
KclValue::Solids { .. } => Some(RuntimeType::Array(PrimitiveType::Solid)),
KclValue::Array { value, .. } => Some(RuntimeType::Tuple(
value
.iter()
.map(|v| v.ty().and_then(RuntimeType::primitive))
.collect::<Option<Vec<_>>>()?,
)),
KclValue::Face { .. } => None,
KclValue::Helix { .. } => None,
KclValue::ImportedGeometry(..) => None,
KclValue::Function { .. } => None,
KclValue::Module { .. } => None,
KclValue::TagIdentifier(_) => None,
KclValue::TagDeclarator(_) => None,
KclValue::KclNone { .. } => None,
KclValue::Uuid { .. } => None,
KclValue::Tombstone { .. } => None,
}
}
/// If this memory item is a function, call it with the given arguments, return its val as Ok.
/// If it's not a function, return Err.
pub async fn call_fn(
&self,
args: Vec<Arg>,
exec_state: &mut ExecState,
ctx: ExecutorContext,
source_range: SourceRange,
) -> Result<Option<KclValue>, KclError> {
match self {
KclValue::Function {
value: FunctionSource::Std { func, props },
..
} => {
if props.deprecated {
exec_state.warn(CompilationError::err(
source_range,
format!(
"`{}` is deprecated, see the docs for a recommended replacement",
props.name
),
));
}
exec_state.mut_memory().push_new_env_for_rust_call();
let args = crate::std::Args::new(
args,
source_range,
ctx.clone(),
exec_state.mod_local.pipe_value.clone().map(Arg::synthetic),
);
let result = func(exec_state, args).await.map(Some);
exec_state.mut_memory().pop_env();
result
}
KclValue::Function {
value: FunctionSource::User { ast, memory },
..
} => crate::execution::exec_ast::call_user_defined_function(args, *memory, ast, exec_state, &ctx).await,
_ => Err(KclError::Semantic(KclErrorDetails {
message: "cannot call this because it isn't a function".to_string(),
source_ranges: vec![source_range],
})),
}
}
/// If this is a function, call it by applying keyword arguments.
/// If it's not a function, returns an error.
pub async fn call_fn_kw(
&self,
args: crate::std::Args,
exec_state: &mut ExecState,
ctx: ExecutorContext,
callsite: SourceRange,
) -> Result<Option<KclValue>, KclError> {
match self {
KclValue::Function {
value: FunctionSource::Std { func: _, props },
..
} => {
if props.deprecated {
exec_state.warn(CompilationError::err(
callsite,
format!(
"`{}` is deprecated, see the docs for a recommended replacement",
props.name
),
));
}
todo!("Implement KCL stdlib fns with keyword args");
}
KclValue::Function {
value: FunctionSource::User { ast, memory },
..
} => {
crate::execution::exec_ast::call_user_defined_function_kw(args.kw_args, *memory, ast, exec_state, &ctx)
.await
}
_ => Err(KclError::Semantic(KclErrorDetails {
message: "cannot call this because it isn't a function".to_string(),
source_ranges: vec![callsite],
})),
}
}
}
#[derive(Debug, Clone, PartialEq)]
pub enum RuntimeType {
Primitive(PrimitiveType),
Array(PrimitiveType),
Tuple(Vec<PrimitiveType>),
Object(Vec<(String, RuntimeType)>),
}
impl RuntimeType {
pub fn from_parsed(value: Type, settings: &super::MetaSettings) -> Option<Self> {
match value {
Type::Primitive(pt) => Some(RuntimeType::Primitive(PrimitiveType::from_parsed(pt, settings)?)),
Type::Array(pt) => Some(RuntimeType::Array(PrimitiveType::from_parsed(pt, settings)?)),
Type::Object { properties } => Some(RuntimeType::Object(
properties
.into_iter()
.map(|p| {
p.type_.and_then(|t| {
RuntimeType::from_parsed(t.inner, settings).map(|ty| (p.identifier.inner.name, ty))
})
})
.collect::<Option<Vec<_>>>()?,
)),
}
}
// Subtype with no coercion, including refining numeric types.
fn subtype(&self, sup: &RuntimeType) -> bool {
use RuntimeType::*;
match (self, sup) {
// TODO arrays could be covariant
(Primitive(t1), Primitive(t2)) | (Array(t1), Array(t2)) => t1 == t2,
(Tuple(t1), Tuple(t2)) => t1 == t2,
(Tuple(t1), Array(t2)) => t1.iter().all(|t| t == t2),
// TODO record subtyping - subtype can be larger, fields can be covariant.
(Object(t1), Object(t2)) => t1 == t2,
_ => false,
}
}
fn primitive(self) -> Option<PrimitiveType> {
match self {
RuntimeType::Primitive(t) => Some(t),
_ => None,
}
}
}
#[derive(Debug, Clone, PartialEq)]
pub enum PrimitiveType {
Number(NumericType),
String,
Boolean,
Sketch,
Solid,
Plane,
}
impl PrimitiveType {
fn from_parsed(value: AstPrimitiveType, settings: &super::MetaSettings) -> Option<Self> {
match value {
AstPrimitiveType::String => Some(PrimitiveType::String),
AstPrimitiveType::Boolean => Some(PrimitiveType::Boolean),
AstPrimitiveType::Number(suffix) => Some(PrimitiveType::Number(NumericType::from_parsed(suffix, settings))),
AstPrimitiveType::Sketch => Some(PrimitiveType::Sketch),
AstPrimitiveType::Solid => Some(PrimitiveType::Solid),
AstPrimitiveType::Plane => Some(PrimitiveType::Plane),
_ => None,
}
}
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
#[ts(export)]
#[serde(tag = "type")]
pub enum NumericType {
// Specified by the user (directly or indirectly)
Known(UnitType),
// Unspecified, using defaults
Default { len: UnitLen, angle: UnitAngle },
// Exceeded the ability of the type system to track.
Unknown,
// Type info has been explicitly cast away.
Any,
}
impl NumericType {
pub fn count() -> Self {
NumericType::Known(UnitType::Count)
}
/// Combine two types when we expect them to be equal.
pub fn combine_eq(self, other: &NumericType) -> NumericType {
if &self == other {
self
} else {
NumericType::Unknown
}
}
/// Combine n types when we expect them to be equal.
///
/// Precondition: tys.len() > 0
pub fn combine_n_eq(tys: &[NumericType]) -> NumericType {
let ty0 = tys[0].clone();
for t in &tys[1..] {
if t != &ty0 {
return NumericType::Unknown;
}
}
ty0
}
/// Combine two types in addition-like operations.
pub fn combine_add(a: NumericType, b: NumericType) -> NumericType {
if a == b {
return a;
}
NumericType::Unknown
}
/// Combine two types in multiplication-like operations.
pub fn combine_mul(a: NumericType, b: NumericType) -> NumericType {
if a == NumericType::count() {
return b;
}
if b == NumericType::count() {
return a;
}
NumericType::Unknown
}
/// Combine two types in division-like operations.
pub fn combine_div(a: NumericType, b: NumericType) -> NumericType {
if b == NumericType::count() {
return a;
}
NumericType::Unknown
}
pub fn from_parsed(suffix: NumericSuffix, settings: &super::MetaSettings) -> Self {
match suffix {
NumericSuffix::None => NumericType::Default {
len: settings.default_length_units,
angle: settings.default_angle_units,
},
NumericSuffix::Count => NumericType::Known(UnitType::Count),
NumericSuffix::Mm => NumericType::Known(UnitType::Length(UnitLen::Mm)),
NumericSuffix::Cm => NumericType::Known(UnitType::Length(UnitLen::Cm)),
NumericSuffix::M => NumericType::Known(UnitType::Length(UnitLen::M)),
NumericSuffix::Inch => NumericType::Known(UnitType::Length(UnitLen::Inches)),
NumericSuffix::Ft => NumericType::Known(UnitType::Length(UnitLen::Feet)),
NumericSuffix::Yd => NumericType::Known(UnitType::Length(UnitLen::Yards)),
NumericSuffix::Deg => NumericType::Known(UnitType::Angle(UnitAngle::Degrees)),
NumericSuffix::Rad => NumericType::Known(UnitType::Angle(UnitAngle::Radians)),
}
}
}
impl From<UnitLen> for NumericType {
fn from(value: UnitLen) -> Self {
NumericType::Known(UnitType::Length(value))
}
}
impl From<UnitAngle> for NumericType {
fn from(value: UnitAngle) -> Self {
NumericType::Known(UnitType::Angle(value))
}
}
#[derive(Debug, Clone, Copy, Deserialize, Serialize, PartialEq, Eq, ts_rs::TS, JsonSchema)]
#[ts(export)]
#[serde(tag = "type")]
pub enum UnitType {
Count,
Length(UnitLen),
Angle(UnitAngle),
}
// TODO called UnitLen so as not to clash with UnitLength in settings)
/// A unit of length.
#[derive(Debug, Default, Clone, Copy, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema, Eq)]
#[ts(export)]
#[serde(tag = "type")]
pub enum UnitLen {
#[default]
Mm,
Cm,
M,
Inches,
Feet,
Yards,
}
impl std::fmt::Display for UnitLen {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
UnitLen::Mm => write!(f, "mm"),
UnitLen::Cm => write!(f, "cm"),
UnitLen::M => write!(f, "m"),
UnitLen::Inches => write!(f, "in"),
UnitLen::Feet => write!(f, "ft"),
UnitLen::Yards => write!(f, "yd"),
}
}
}
impl TryFrom<NumericSuffix> for UnitLen {
type Error = ();
fn try_from(suffix: NumericSuffix) -> std::result::Result<Self, Self::Error> {
match suffix {
NumericSuffix::Mm => Ok(Self::Mm),
NumericSuffix::Cm => Ok(Self::Cm),
NumericSuffix::M => Ok(Self::M),
NumericSuffix::Inch => Ok(Self::Inches),
NumericSuffix::Ft => Ok(Self::Feet),
NumericSuffix::Yd => Ok(Self::Yards),
_ => Err(()),
}
}
}
impl From<crate::UnitLength> for UnitLen {
fn from(unit: crate::UnitLength) -> Self {
match unit {
crate::UnitLength::Cm => UnitLen::Cm,
crate::UnitLength::Ft => UnitLen::Feet,
crate::UnitLength::In => UnitLen::Inches,
crate::UnitLength::M => UnitLen::M,
crate::UnitLength::Mm => UnitLen::Mm,
crate::UnitLength::Yd => UnitLen::Yards,
}
}
}
impl From<UnitLen> for crate::UnitLength {
fn from(unit: UnitLen) -> Self {
match unit {
UnitLen::Cm => crate::UnitLength::Cm,
UnitLen::Feet => crate::UnitLength::Ft,
UnitLen::Inches => crate::UnitLength::In,
UnitLen::M => crate::UnitLength::M,
UnitLen::Mm => crate::UnitLength::Mm,
UnitLen::Yards => crate::UnitLength::Yd,
}
}
}
impl From<UnitLen> for kittycad_modeling_cmds::units::UnitLength {
fn from(unit: UnitLen) -> Self {
match unit {
UnitLen::Cm => kittycad_modeling_cmds::units::UnitLength::Centimeters,
UnitLen::Feet => kittycad_modeling_cmds::units::UnitLength::Feet,
UnitLen::Inches => kittycad_modeling_cmds::units::UnitLength::Inches,
UnitLen::M => kittycad_modeling_cmds::units::UnitLength::Meters,
UnitLen::Mm => kittycad_modeling_cmds::units::UnitLength::Millimeters,
UnitLen::Yards => kittycad_modeling_cmds::units::UnitLength::Yards,
}
}
}
/// A unit of angle.
#[derive(Debug, Default, Clone, Copy, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema, Eq)]
#[ts(export)]
#[serde(tag = "type")]
pub enum UnitAngle {
#[default]
Degrees,
Radians,
}
impl std::fmt::Display for UnitAngle {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
UnitAngle::Degrees => write!(f, "deg"),
UnitAngle::Radians => write!(f, "rad"),
}
}
}
impl TryFrom<NumericSuffix> for UnitAngle {
type Error = ();
fn try_from(suffix: NumericSuffix) -> std::result::Result<Self, Self::Error> {
match suffix {
NumericSuffix::Deg => Ok(Self::Degrees),
NumericSuffix::Rad => Ok(Self::Radians),
_ => Err(()),
}
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,360 @@
use anyhow::Result;
use indexmap::IndexMap;
use kittycad_modeling_cmds::websocket::WebSocketResponse;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use super::EnvironmentRef;
use crate::{
errors::{KclError, KclErrorDetails, Severity},
execution::{
annotations, kcl_value, memory::ProgramMemory, Artifact, ArtifactCommand, ArtifactGraph, ArtifactId,
ExecOutcome, ExecutorSettings, KclValue, Operation, UnitAngle, UnitLen,
},
modules::{ModuleId, ModuleInfo, ModuleLoader, ModulePath, ModuleRepr, ModuleSource},
parsing::ast::types::Annotation,
source_range::SourceRange,
CompilationError,
};
/// State for executing a program.
#[derive(Debug, Clone)]
pub struct ExecState {
pub(super) global: GlobalState,
pub(super) mod_local: ModuleState,
}
#[derive(Debug, Clone)]
pub(super) struct GlobalState {
/// Program variable bindings.
pub memory: ProgramMemory,
/// The stable artifact ID generator.
pub id_generator: IdGenerator,
/// Map from source file absolute path to module ID.
pub path_to_source_id: IndexMap<ModulePath, ModuleId>,
/// Map from module ID to source file.
pub id_to_source: IndexMap<ModuleId, ModuleSource>,
/// Map from module ID to module info.
pub module_infos: IndexMap<ModuleId, ModuleInfo>,
/// Output map of UUIDs to artifacts.
pub artifacts: IndexMap<ArtifactId, Artifact>,
/// Output commands to allow building the artifact graph by the caller.
/// These are accumulated in the [`ExecutorContext`] but moved here for
/// convenience of the execution cache.
pub artifact_commands: Vec<ArtifactCommand>,
/// Responses from the engine for `artifact_commands`. We need to cache
/// this so that we can build the artifact graph. These are accumulated in
/// the [`ExecutorContext`] but moved here for convenience of the execution
/// cache.
pub artifact_responses: IndexMap<Uuid, WebSocketResponse>,
/// Output artifact graph.
pub artifact_graph: ArtifactGraph,
/// Operations that have been performed in execution order, for display in
/// the Feature Tree.
pub operations: Vec<Operation>,
/// Module loader.
pub mod_loader: ModuleLoader,
/// Errors and warnings.
pub errors: Vec<CompilationError>,
}
#[derive(Debug, Clone)]
pub(super) struct ModuleState {
/// The current value of the pipe operator returned from the previous
/// expression. If we're not currently in a pipeline, this will be None.
pub pipe_value: Option<KclValue>,
/// Identifiers that have been exported from the current module.
pub module_exports: Vec<String>,
/// Settings specified from annotations.
pub settings: MetaSettings,
}
impl ExecState {
pub fn new(exec_settings: &ExecutorSettings) -> Self {
ExecState {
global: GlobalState::new(exec_settings),
mod_local: ModuleState::new(exec_settings, None),
}
}
pub(super) fn reset(&mut self, exec_settings: &ExecutorSettings) {
let mut id_generator = self.global.id_generator.clone();
// We do not pop the ids, since we want to keep the same id generator.
// This is for the front end to keep track of the ids.
id_generator.next_id = 0;
let mut global = GlobalState::new(exec_settings);
global.id_generator = id_generator;
*self = ExecState {
global,
mod_local: ModuleState::new(exec_settings, None),
};
}
/// Log a non-fatal error.
pub fn err(&mut self, e: CompilationError) {
self.global.errors.push(e);
}
/// Log a warning.
pub fn warn(&mut self, mut e: CompilationError) {
e.severity = Severity::Warning;
self.global.errors.push(e);
}
pub fn errors(&self) -> &[CompilationError] {
&self.global.errors
}
/// Convert to execution outcome when running in WebAssembly. We want to
/// reduce the amount of data that crosses the WASM boundary as much as
/// possible.
pub fn to_wasm_outcome(self, main_ref: EnvironmentRef) -> ExecOutcome {
// Fields are opt-in so that we don't accidentally leak private internal
// state when we add more to ExecState.
ExecOutcome {
variables: self
.memory()
.find_all_in_env(main_ref, |_| true)
.map(|(k, v)| (k.clone(), v.clone()))
.collect(),
operations: self.global.operations,
artifacts: self.global.artifacts,
artifact_commands: self.global.artifact_commands,
artifact_graph: self.global.artifact_graph,
errors: self.global.errors,
filenames: self
.global
.path_to_source_id
.iter()
.map(|(k, v)| ((*v), k.clone()))
.collect(),
}
}
pub fn to_mock_wasm_outcome(self, main_ref: EnvironmentRef) -> ExecOutcome {
// Fields are opt-in so that we don't accidentally leak private internal
// state when we add more to ExecState.
ExecOutcome {
variables: self
.memory()
.find_all_in_env(main_ref, |_| true)
.map(|(k, v)| (k.clone(), v.clone()))
.collect(),
operations: Default::default(),
artifacts: Default::default(),
artifact_commands: Default::default(),
artifact_graph: Default::default(),
errors: self.global.errors,
filenames: Default::default(),
}
}
pub(crate) fn memory(&self) -> &ProgramMemory {
&self.global.memory
}
pub(crate) fn mut_memory(&mut self) -> &mut ProgramMemory {
&mut self.global.memory
}
pub(crate) fn next_uuid(&mut self) -> Uuid {
self.global.id_generator.next_uuid()
}
pub(crate) fn add_artifact(&mut self, artifact: Artifact) {
let id = artifact.id();
self.global.artifacts.insert(id, artifact);
}
pub(super) fn next_module_id(&self) -> ModuleId {
ModuleId::from_usize(self.global.path_to_source_id.len())
}
pub(super) fn id_for_module(&self, path: &ModulePath) -> Option<ModuleId> {
self.global.path_to_source_id.get(path).cloned()
}
pub(super) fn add_path_to_source_id(&mut self, path: ModulePath, id: ModuleId) {
debug_assert!(!self.global.path_to_source_id.contains_key(&path));
self.global.path_to_source_id.insert(path.clone(), id);
}
pub(crate) fn add_root_module_contents(&mut self, program: &crate::Program) {
let root_id = ModuleId::default();
// Get the path for the root module.
let path = self
.global
.path_to_source_id
.iter()
.find(|(_, v)| **v == root_id)
.unwrap()
.0
.clone();
self.add_id_to_source(
root_id,
ModuleSource {
path,
source: program.original_file_contents.to_string(),
},
);
}
pub(super) fn add_id_to_source(&mut self, id: ModuleId, source: ModuleSource) {
self.global.id_to_source.insert(id, source.clone());
}
pub(super) fn add_module(&mut self, id: ModuleId, path: ModulePath, repr: ModuleRepr) {
debug_assert!(self.global.path_to_source_id.contains_key(&path));
let module_info = ModuleInfo { id, repr, path };
self.global.module_infos.insert(id, module_info);
}
pub fn length_unit(&self) -> UnitLen {
self.mod_local.settings.default_length_units
}
pub fn angle_unit(&self) -> UnitAngle {
self.mod_local.settings.default_angle_units
}
pub(super) fn circular_import_error(&self, path: &ModulePath, source_range: SourceRange) -> KclError {
KclError::ImportCycle(KclErrorDetails {
message: format!(
"circular import of modules is not allowed: {} -> {}",
self.global
.mod_loader
.import_stack
.iter()
.map(|p| p.as_path().to_string_lossy())
.collect::<Vec<_>>()
.join(" -> "),
path,
),
source_ranges: vec![source_range],
})
}
}
impl GlobalState {
fn new(settings: &ExecutorSettings) -> Self {
let mut global = GlobalState {
memory: ProgramMemory::new(),
id_generator: Default::default(),
path_to_source_id: Default::default(),
module_infos: Default::default(),
artifacts: Default::default(),
artifact_commands: Default::default(),
artifact_responses: Default::default(),
artifact_graph: Default::default(),
operations: Default::default(),
mod_loader: Default::default(),
errors: Default::default(),
id_to_source: Default::default(),
};
let root_id = ModuleId::default();
let root_path = settings.current_file.clone().unwrap_or_default();
global.module_infos.insert(
root_id,
ModuleInfo {
id: root_id,
path: ModulePath::Local {
value: root_path.clone(),
},
repr: ModuleRepr::Root,
},
);
global
.path_to_source_id
.insert(ModulePath::Local { value: root_path }, root_id);
global
}
}
impl ModuleState {
pub(super) fn new(exec_settings: &ExecutorSettings, std_path: Option<String>) -> Self {
ModuleState {
pipe_value: Default::default(),
module_exports: Default::default(),
settings: MetaSettings {
default_length_units: exec_settings.units.into(),
default_angle_units: Default::default(),
std_path,
},
}
}
}
#[derive(Debug, Default, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub struct MetaSettings {
pub default_length_units: kcl_value::UnitLen,
pub default_angle_units: kcl_value::UnitAngle,
pub std_path: Option<String>,
}
impl MetaSettings {
pub(crate) fn update_from_annotation(
&mut self,
annotation: &crate::parsing::ast::types::Node<Annotation>,
) -> Result<(), KclError> {
let properties = annotations::expect_properties(annotations::SETTINGS, annotation)?;
for p in properties {
match &*p.inner.key.name {
annotations::SETTINGS_UNIT_LENGTH => {
let value = annotations::expect_ident(&p.inner.value)?;
let value = kcl_value::UnitLen::from_str(value, annotation.as_source_range())?;
self.default_length_units = value;
}
annotations::SETTINGS_UNIT_ANGLE => {
let value = annotations::expect_ident(&p.inner.value)?;
let value = kcl_value::UnitAngle::from_str(value, annotation.as_source_range())?;
self.default_angle_units = value;
}
name => {
return Err(KclError::Semantic(KclErrorDetails {
message: format!(
"Unexpected settings key: `{name}`; expected one of `{}`, `{}`",
annotations::SETTINGS_UNIT_LENGTH,
annotations::SETTINGS_UNIT_ANGLE
),
source_ranges: vec![annotation.as_source_range()],
}))
}
}
}
Ok(())
}
}
/// A generator for ArtifactIds that can be stable across executions.
#[derive(Debug, Clone, Default, Deserialize, Serialize, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct IdGenerator {
pub(super) next_id: usize,
ids: Vec<uuid::Uuid>,
}
impl IdGenerator {
pub fn new() -> Self {
Self::default()
}
pub fn next_uuid(&mut self) -> uuid::Uuid {
if let Some(id) = self.ids.get(self.next_id) {
self.next_id += 1;
*id
} else {
let id = uuid::Uuid::new_v4();
self.ids.push(id);
self.next_id += 1;
id
}
}
}