Merge remote-tracking branch 'origin/main' into paultag/import
This commit is contained in:
@ -53,7 +53,7 @@ pub trait CoreDump: Clone {
|
||||
.meta()
|
||||
.create_debug_uploads(vec![kittycad::types::multipart::Attachment {
|
||||
name: "".to_string(),
|
||||
filename: Some(format!(r#"modeling-app/coredump-{coredump_id}-screenshot.png"#)),
|
||||
filepath: Some(format!(r#"modeling-app/coredump-{coredump_id}-screenshot.png"#).into()),
|
||||
content_type: Some("image/png".to_string()),
|
||||
data,
|
||||
}])
|
||||
@ -101,7 +101,7 @@ pub trait CoreDump: Clone {
|
||||
.meta()
|
||||
.create_debug_uploads(vec![kittycad::types::multipart::Attachment {
|
||||
name: "".to_string(),
|
||||
filename: Some(format!(r#"modeling-app/coredump-{}.json"#, coredump_id)),
|
||||
filepath: Some(format!(r#"modeling-app/coredump-{}.json"#, coredump_id).into()),
|
||||
content_type: Some("application/json".to_string()),
|
||||
data,
|
||||
}])
|
||||
|
@ -339,9 +339,9 @@ fn generate_index(combined: &IndexMap<String, Box<dyn StdLibFn>>, kcl_lib: &[Doc
|
||||
}
|
||||
|
||||
functions.entry(d.mod_name()).or_default().push(match d {
|
||||
DocData::Fn(f) => (f.name.clone(), d.file_name()),
|
||||
DocData::Const(c) => (c.name.clone(), d.file_name()),
|
||||
DocData::Ty(t) => (t.name.clone(), d.file_name()),
|
||||
DocData::Fn(f) => (f.preferred_name.clone(), d.file_name()),
|
||||
DocData::Const(c) => (c.preferred_name.clone(), d.file_name()),
|
||||
DocData::Ty(t) => (t.preferred_name.clone(), d.file_name()),
|
||||
});
|
||||
|
||||
if let DocData::Const(c) = d {
|
||||
@ -1172,7 +1172,7 @@ fn find_examples(text: &str, filename: &str) -> Vec<(String, String)> {
|
||||
|
||||
async fn run_example(text: &str) -> Result<()> {
|
||||
let program = crate::Program::parse_no_errs(text)?;
|
||||
let ctx = ExecutorContext::new_with_default_client(crate::UnitLength::Mm).await?;
|
||||
let ctx = ExecutorContext::new_with_default_client().await?;
|
||||
let mut exec_state = crate::execution::ExecState::new(&ctx);
|
||||
ctx.run(&program, &mut exec_state).await?;
|
||||
Ok(())
|
||||
|
@ -9,7 +9,7 @@ use tower_lsp::lsp_types::{
|
||||
use crate::{
|
||||
execution::annotations,
|
||||
parsing::{
|
||||
ast::types::{Annotation, Node, PrimitiveType, Type, VariableKind},
|
||||
ast::types::{Annotation, ImportSelector, Node, PrimitiveType, Type, VariableKind},
|
||||
token::NumericSuffix,
|
||||
},
|
||||
ModuleId,
|
||||
@ -17,7 +17,7 @@ use crate::{
|
||||
|
||||
pub fn walk_prelude() -> Vec<DocData> {
|
||||
let mut visitor = CollectionVisitor::default();
|
||||
visitor.visit_module("prelude").unwrap();
|
||||
visitor.visit_module("prelude", "").unwrap();
|
||||
visitor.result
|
||||
}
|
||||
|
||||
@ -29,7 +29,7 @@ struct CollectionVisitor {
|
||||
}
|
||||
|
||||
impl CollectionVisitor {
|
||||
fn visit_module(&mut self, name: &str) -> Result<(), String> {
|
||||
fn visit_module(&mut self, name: &str, preferred_prefix: &str) -> Result<(), String> {
|
||||
let old_name = std::mem::replace(&mut self.name, name.to_owned());
|
||||
let source = crate::modules::read_std(name).unwrap();
|
||||
let parsed = crate::parsing::parse_str(source, ModuleId::from_usize(self.id))
|
||||
@ -40,14 +40,16 @@ impl CollectionVisitor {
|
||||
for n in &parsed.body {
|
||||
match n {
|
||||
crate::parsing::ast::types::BodyItem::ImportStatement(import) if !import.visibility.is_default() => {
|
||||
// Only supports glob imports for now.
|
||||
assert!(matches!(
|
||||
import.selector,
|
||||
crate::parsing::ast::types::ImportSelector::Glob(..)
|
||||
));
|
||||
match &import.path {
|
||||
crate::parsing::ast::types::ImportPath::Std { path } => {
|
||||
self.visit_module(&path[1])?;
|
||||
match import.selector {
|
||||
ImportSelector::Glob(..) => self.visit_module(&path[1], "")?,
|
||||
ImportSelector::None { .. } => {
|
||||
self.visit_module(&path[1], &format!("{}::", import.module_name().unwrap()))?
|
||||
}
|
||||
// Only supports glob or whole-module imports for now.
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
}
|
||||
p => return Err(format!("Unexpected import: `{p}`")),
|
||||
}
|
||||
@ -59,8 +61,8 @@ impl CollectionVisitor {
|
||||
format!("std::{}::", self.name)
|
||||
};
|
||||
let mut dd = match var.kind {
|
||||
VariableKind::Fn => DocData::Fn(FnData::from_ast(var, qual_name)),
|
||||
VariableKind::Const => DocData::Const(ConstData::from_ast(var, qual_name)),
|
||||
VariableKind::Fn => DocData::Fn(FnData::from_ast(var, qual_name, preferred_prefix)),
|
||||
VariableKind::Const => DocData::Const(ConstData::from_ast(var, qual_name, preferred_prefix)),
|
||||
};
|
||||
|
||||
dd.with_meta(&var.outer_attrs);
|
||||
@ -77,7 +79,7 @@ impl CollectionVisitor {
|
||||
} else {
|
||||
format!("std::{}::", self.name)
|
||||
};
|
||||
let mut dd = DocData::Ty(TyData::from_ast(ty, qual_name));
|
||||
let mut dd = DocData::Ty(TyData::from_ast(ty, qual_name, preferred_prefix));
|
||||
|
||||
dd.with_meta(&ty.outer_attrs);
|
||||
for a in &ty.outer_attrs {
|
||||
@ -200,6 +202,8 @@ impl DocData {
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ConstData {
|
||||
pub name: String,
|
||||
/// How the const is indexed, etc.
|
||||
pub preferred_name: String,
|
||||
/// The fully qualified name.
|
||||
pub qual_name: String,
|
||||
pub value: Option<String>,
|
||||
@ -216,7 +220,11 @@ pub struct ConstData {
|
||||
}
|
||||
|
||||
impl ConstData {
|
||||
fn from_ast(var: &crate::parsing::ast::types::VariableDeclaration, mut qual_name: String) -> Self {
|
||||
fn from_ast(
|
||||
var: &crate::parsing::ast::types::VariableDeclaration,
|
||||
mut qual_name: String,
|
||||
preferred_prefix: &str,
|
||||
) -> Self {
|
||||
assert_eq!(var.kind, crate::parsing::ast::types::VariableKind::Const);
|
||||
|
||||
let (value, ty) = match &var.declaration.init {
|
||||
@ -240,6 +248,7 @@ impl ConstData {
|
||||
let name = var.declaration.id.name.clone();
|
||||
qual_name.push_str(&name);
|
||||
ConstData {
|
||||
preferred_name: format!("{preferred_prefix}{name}"),
|
||||
name,
|
||||
qual_name,
|
||||
value,
|
||||
@ -272,7 +281,7 @@ impl ConstData {
|
||||
detail.push_str(ty);
|
||||
}
|
||||
CompletionItem {
|
||||
label: self.name.clone(),
|
||||
label: self.preferred_name.clone(),
|
||||
label_details: Some(CompletionItemLabelDetails {
|
||||
detail: self.value.clone(),
|
||||
description: None,
|
||||
@ -306,6 +315,8 @@ impl ConstData {
|
||||
pub struct FnData {
|
||||
/// The name of the function.
|
||||
pub name: String,
|
||||
/// How the function is indexed, etc.
|
||||
pub preferred_name: String,
|
||||
/// The fully qualified name.
|
||||
pub qual_name: String,
|
||||
/// The args of the function.
|
||||
@ -326,7 +337,11 @@ pub struct FnData {
|
||||
}
|
||||
|
||||
impl FnData {
|
||||
fn from_ast(var: &crate::parsing::ast::types::VariableDeclaration, mut qual_name: String) -> Self {
|
||||
fn from_ast(
|
||||
var: &crate::parsing::ast::types::VariableDeclaration,
|
||||
mut qual_name: String,
|
||||
preferred_prefix: &str,
|
||||
) -> Self {
|
||||
assert_eq!(var.kind, crate::parsing::ast::types::VariableKind::Fn);
|
||||
let crate::parsing::ast::types::Expr::FunctionExpression(expr) = &var.declaration.init else {
|
||||
unreachable!();
|
||||
@ -345,6 +360,7 @@ impl FnData {
|
||||
}
|
||||
|
||||
FnData {
|
||||
preferred_name: format!("{preferred_prefix}{name}"),
|
||||
name,
|
||||
qual_name,
|
||||
args: expr.params.iter().map(ArgData::from_ast).collect(),
|
||||
@ -443,7 +459,7 @@ impl FnData {
|
||||
}
|
||||
// We end with ${} so you can jump to the end of the snippet.
|
||||
// After the last argument.
|
||||
format!("{}({})${{}}", self.name, args.join(", "))
|
||||
format!("{}({})${{}}", self.preferred_name, args.join(", "))
|
||||
}
|
||||
|
||||
fn to_signature_help(&self) -> SignatureHelp {
|
||||
@ -452,7 +468,7 @@ impl FnData {
|
||||
|
||||
SignatureHelp {
|
||||
signatures: vec![SignatureInformation {
|
||||
label: self.name.clone(),
|
||||
label: self.preferred_name.clone(),
|
||||
documentation: self.short_docs().map(|s| {
|
||||
Documentation::MarkupContent(MarkupContent {
|
||||
kind: MarkupKind::Markdown,
|
||||
@ -580,6 +596,8 @@ impl ArgKind {
|
||||
pub struct TyData {
|
||||
/// The name of the function.
|
||||
pub name: String,
|
||||
/// How the type is indexed, etc.
|
||||
pub preferred_name: String,
|
||||
/// The fully qualified name.
|
||||
pub qual_name: String,
|
||||
pub properties: Properties,
|
||||
@ -597,7 +615,11 @@ pub struct TyData {
|
||||
}
|
||||
|
||||
impl TyData {
|
||||
fn from_ast(ty: &crate::parsing::ast::types::TypeDeclaration, mut qual_name: String) -> Self {
|
||||
fn from_ast(
|
||||
ty: &crate::parsing::ast::types::TypeDeclaration,
|
||||
mut qual_name: String,
|
||||
preferred_prefix: &str,
|
||||
) -> Self {
|
||||
let name = ty.name.name.clone();
|
||||
qual_name.push_str(&name);
|
||||
let mut referenced_types = HashSet::new();
|
||||
@ -606,6 +628,7 @@ impl TyData {
|
||||
}
|
||||
|
||||
TyData {
|
||||
preferred_name: format!("{preferred_prefix}{name}"),
|
||||
name,
|
||||
qual_name,
|
||||
properties: Properties {
|
||||
@ -641,7 +664,7 @@ impl TyData {
|
||||
|
||||
fn to_completion_item(&self) -> CompletionItem {
|
||||
CompletionItem {
|
||||
label: self.name.clone(),
|
||||
label: self.preferred_name.clone(),
|
||||
label_details: self.alias.as_ref().map(|t| CompletionItemLabelDetails {
|
||||
detail: Some(format!("type {} = {t}", self.name)),
|
||||
description: None,
|
||||
@ -658,7 +681,7 @@ impl TyData {
|
||||
preselect: None,
|
||||
sort_text: None,
|
||||
filter_text: None,
|
||||
insert_text: Some(self.name.clone()),
|
||||
insert_text: Some(self.preferred_name.clone()),
|
||||
insert_text_format: Some(InsertTextFormat::SNIPPET),
|
||||
insert_text_mode: None,
|
||||
text_edit: None,
|
||||
@ -987,20 +1010,17 @@ mod test {
|
||||
let std = walk_prelude();
|
||||
for d in std {
|
||||
for (i, eg) in d.examples().enumerate() {
|
||||
let result =
|
||||
match crate::test_server::execute_and_snapshot(eg, crate::settings::types::UnitLength::Mm, None)
|
||||
.await
|
||||
{
|
||||
Err(crate::errors::ExecError::Kcl(e)) => {
|
||||
return Err(miette::Report::new(crate::errors::Report {
|
||||
error: e.error,
|
||||
filename: format!("{}{i}", d.name()),
|
||||
kcl_source: eg.to_string(),
|
||||
}));
|
||||
}
|
||||
Err(other_err) => panic!("{}", other_err),
|
||||
Ok(img) => img,
|
||||
};
|
||||
let result = match crate::test_server::execute_and_snapshot(eg, None).await {
|
||||
Err(crate::errors::ExecError::Kcl(e)) => {
|
||||
return Err(miette::Report::new(crate::errors::Report {
|
||||
error: e.error,
|
||||
filename: format!("{}{i}", d.name()),
|
||||
kcl_source: eg.to_string(),
|
||||
}));
|
||||
}
|
||||
Err(other_err) => panic!("{}", other_err),
|
||||
Ok(img) => img,
|
||||
};
|
||||
twenty_twenty::assert_image(
|
||||
format!("tests/outputs/serial_test_example_{}{i}.png", d.example_name()),
|
||||
&result,
|
||||
|
@ -1141,7 +1141,7 @@ mod tests {
|
||||
let snippet = scale_fn.to_autocomplete_snippet().unwrap();
|
||||
assert_eq!(
|
||||
snippet,
|
||||
r#"scale(${0:%}, scale = [${1:3.14}, ${2:3.14}, ${3:3.14}])${}"#
|
||||
r#"scale(${0:%}, x = ${1:3.14}, y = ${2:3.14}, z = ${3:3.14})${}"#
|
||||
);
|
||||
}
|
||||
|
||||
@ -1152,7 +1152,7 @@ mod tests {
|
||||
let snippet = translate_fn.to_autocomplete_snippet().unwrap();
|
||||
assert_eq!(
|
||||
snippet,
|
||||
r#"translate(${0:%}, translate = [${1:3.14}, ${2:3.14}, ${3:3.14}])${}"#
|
||||
r#"translate(${0:%}, x = ${1:3.14}, y = ${2:3.14}, z = ${3:3.14})${}"#
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -121,6 +121,13 @@ impl EngineConnection {
|
||||
}
|
||||
})?;
|
||||
|
||||
if value.is_null() || value.is_undefined() {
|
||||
return Err(KclError::Engine(KclErrorDetails {
|
||||
message: "Received null or undefined response from engine".into(),
|
||||
source_ranges: vec![source_range],
|
||||
}));
|
||||
}
|
||||
|
||||
// Convert JsValue to a Uint8Array
|
||||
let data = js_sys::Uint8Array::from(value);
|
||||
|
||||
|
@ -170,12 +170,16 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
|
||||
self.clear_queues().await;
|
||||
|
||||
self.batch_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
id_generator.next_uuid(),
|
||||
source_range,
|
||||
&ModelingCmd::SceneClearAll(mcmd::SceneClearAll::default()),
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Reset to the default units. Modules assume the engine starts in the
|
||||
// default state.
|
||||
self.set_units(Default::default(), source_range, id_generator).await?;
|
||||
|
||||
// Flush the batch queue, so clear is run right away.
|
||||
// Otherwise the hooks below won't work.
|
||||
self.flush_batch(false, source_range).await?;
|
||||
@ -195,9 +199,10 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
|
||||
&self,
|
||||
visible: bool,
|
||||
source_range: SourceRange,
|
||||
id_generator: &mut IdGenerator,
|
||||
) -> Result<(), crate::errors::KclError> {
|
||||
self.batch_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
id_generator.next_uuid(),
|
||||
source_range,
|
||||
&ModelingCmd::from(mcmd::EdgeLinesVisible { hidden: !visible }),
|
||||
)
|
||||
@ -231,10 +236,11 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
|
||||
&self,
|
||||
units: crate::UnitLength,
|
||||
source_range: SourceRange,
|
||||
id_generator: &mut IdGenerator,
|
||||
) -> Result<(), crate::errors::KclError> {
|
||||
// Before we even start executing the program, set the units.
|
||||
self.batch_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
id_generator.next_uuid(),
|
||||
source_range,
|
||||
&ModelingCmd::from(mcmd::SetSceneUnits { unit: units.into() }),
|
||||
)
|
||||
@ -248,15 +254,15 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
|
||||
&self,
|
||||
settings: &crate::ExecutorSettings,
|
||||
source_range: SourceRange,
|
||||
id_generator: &mut IdGenerator,
|
||||
) -> Result<(), crate::errors::KclError> {
|
||||
// Set the edge visibility.
|
||||
self.set_edge_visibility(settings.highlight_edges, source_range).await?;
|
||||
|
||||
// Change the units.
|
||||
self.set_units(settings.units, source_range).await?;
|
||||
self.set_edge_visibility(settings.highlight_edges, source_range, id_generator)
|
||||
.await?;
|
||||
|
||||
// Send the command to show the grid.
|
||||
self.modify_grid(!settings.show_grid, source_range).await?;
|
||||
self.modify_grid(!settings.show_grid, source_range, id_generator)
|
||||
.await?;
|
||||
|
||||
// We do not have commands for changing ssao on the fly.
|
||||
|
||||
@ -502,6 +508,7 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
|
||||
y_axis: Point3d,
|
||||
color: Option<Color>,
|
||||
source_range: SourceRange,
|
||||
id_generator: &mut IdGenerator,
|
||||
) -> Result<uuid::Uuid, KclError> {
|
||||
// Create new default planes.
|
||||
let default_size = 100.0;
|
||||
@ -524,7 +531,7 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
|
||||
if let Some(color) = color {
|
||||
// Set the color.
|
||||
self.batch_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
id_generator.next_uuid(),
|
||||
source_range,
|
||||
&ModelingCmd::from(mcmd::PlaneSetColor { color, plane_id }),
|
||||
)
|
||||
@ -615,7 +622,7 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
|
||||
for (name, plane_id, x_axis, y_axis, color) in plane_settings {
|
||||
planes.insert(
|
||||
name,
|
||||
self.make_default_plane(plane_id, x_axis, y_axis, color, source_range)
|
||||
self.make_default_plane(plane_id, x_axis, y_axis, color, source_range, id_generator)
|
||||
.await?,
|
||||
);
|
||||
}
|
||||
@ -701,10 +708,15 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
|
||||
}))
|
||||
}
|
||||
|
||||
async fn modify_grid(&self, hidden: bool, source_range: SourceRange) -> Result<(), KclError> {
|
||||
async fn modify_grid(
|
||||
&self,
|
||||
hidden: bool,
|
||||
source_range: SourceRange,
|
||||
id_generator: &mut IdGenerator,
|
||||
) -> Result<(), KclError> {
|
||||
// Hide/show the grid.
|
||||
self.batch_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
id_generator.next_uuid(),
|
||||
source_range,
|
||||
&ModelingCmd::from(mcmd::ObjectVisible {
|
||||
hidden,
|
||||
@ -715,7 +727,7 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
|
||||
|
||||
// Hide/show the grid scale text.
|
||||
self.batch_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
id_generator.next_uuid(),
|
||||
source_range,
|
||||
&ModelingCmd::from(mcmd::ObjectVisible {
|
||||
hidden,
|
||||
|
@ -6,7 +6,7 @@ use itertools::{EitherOrBoth, Itertools};
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use crate::{
|
||||
execution::{annotations, memory::Stack, EnvironmentRef, ExecState, ExecutorSettings},
|
||||
execution::{annotations, memory::Stack, state::ModuleInfoMap, EnvironmentRef, ExecState, ExecutorSettings},
|
||||
parsing::ast::types::{Annotation, Node, Program},
|
||||
walk::Node as WalkNode,
|
||||
};
|
||||
@ -15,7 +15,7 @@ lazy_static::lazy_static! {
|
||||
/// A static mutable lock for updating the last successful execution state for the cache.
|
||||
static ref OLD_AST: Arc<RwLock<Option<OldAstState>>> = Default::default();
|
||||
// The last successful run's memory. Not cleared after an unssuccessful run.
|
||||
static ref PREV_MEMORY: Arc<RwLock<Option<Stack>>> = Default::default();
|
||||
static ref PREV_MEMORY: Arc<RwLock<Option<(Stack, ModuleInfoMap)>>> = Default::default();
|
||||
}
|
||||
|
||||
/// Read the old ast memory from the lock.
|
||||
@ -29,12 +29,12 @@ pub(super) async fn write_old_ast(old_state: OldAstState) {
|
||||
*old_ast = Some(old_state);
|
||||
}
|
||||
|
||||
pub(crate) async fn read_old_memory() -> Option<Stack> {
|
||||
pub(crate) async fn read_old_memory() -> Option<(Stack, ModuleInfoMap)> {
|
||||
let old_mem = PREV_MEMORY.read().await;
|
||||
old_mem.clone()
|
||||
}
|
||||
|
||||
pub(super) async fn write_old_memory(mem: Stack) {
|
||||
pub(super) async fn write_old_memory(mem: (Stack, ModuleInfoMap)) {
|
||||
let mut old_mem = PREV_MEMORY.write().await;
|
||||
*old_mem = Some(mem);
|
||||
}
|
||||
@ -97,15 +97,6 @@ pub(super) async fn get_changed_program(old: CacheInformation<'_>, new: CacheInf
|
||||
// If the settings are different we might need to bust the cache.
|
||||
// We specifically do this before checking if they are the exact same.
|
||||
if old.settings != new.settings {
|
||||
// If the units are different we need to re-execute the whole thing.
|
||||
if old.settings.units != new.settings.units {
|
||||
return CacheResult::ReExecute {
|
||||
clear_scene: true,
|
||||
reapply_settings: true,
|
||||
program: new.ast.clone(),
|
||||
};
|
||||
}
|
||||
|
||||
// If anything else is different we may not need to re-execute, but rather just
|
||||
// run the settings again.
|
||||
reapply_settings = true;
|
||||
@ -424,50 +415,6 @@ shell(firstSketch, faces = ['end'], thickness = 0.25)"#;
|
||||
assert_eq!(result, CacheResult::NoAction(false));
|
||||
}
|
||||
|
||||
// Changing the units with the exact same file should bust the cache.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_get_changed_program_same_code_but_different_units() {
|
||||
let new = r#"// Remove the end face for the extrusion.
|
||||
firstSketch = startSketchOn('XY')
|
||||
|> startProfileAt([-12, 12], %)
|
||||
|> line(end = [24, 0])
|
||||
|> line(end = [0, -24])
|
||||
|> line(end = [-24, 0])
|
||||
|> close()
|
||||
|> extrude(length = 6)
|
||||
|
||||
// Remove the end face for the extrusion.
|
||||
shell(firstSketch, faces = ['end'], thickness = 0.25)"#;
|
||||
|
||||
let ExecTestResults {
|
||||
program, mut exec_ctxt, ..
|
||||
} = parse_execute(new).await.unwrap();
|
||||
|
||||
// Change the settings to cm.
|
||||
exec_ctxt.settings.units = crate::UnitLength::Cm;
|
||||
|
||||
let result = get_changed_program(
|
||||
CacheInformation {
|
||||
ast: &program.ast,
|
||||
settings: &Default::default(),
|
||||
},
|
||||
CacheInformation {
|
||||
ast: &program.ast,
|
||||
settings: &exec_ctxt.settings,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
result,
|
||||
CacheResult::ReExecute {
|
||||
clear_scene: true,
|
||||
reapply_settings: true,
|
||||
program: program.ast
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// Changing the grid settings with the exact same file should NOT bust the cache.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_get_changed_program_same_code_but_different_grid_setting() {
|
||||
@ -615,4 +562,42 @@ startSketchOn('XY')
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// Removing the units settings using an annotation, when it was non-default
|
||||
// units, with the exact same file should bust the cache.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_get_changed_program_same_code_but_removed_unit_setting_using_annotation() {
|
||||
let old_code = r#"@settings(defaultLengthUnit = in)
|
||||
startSketchOn('XY')
|
||||
"#;
|
||||
let new_code = r#"
|
||||
startSketchOn('XY')
|
||||
"#;
|
||||
|
||||
let ExecTestResults { program, exec_ctxt, .. } = parse_execute(old_code).await.unwrap();
|
||||
|
||||
let mut new_program = crate::Program::parse_no_errs(new_code).unwrap();
|
||||
new_program.compute_digest();
|
||||
|
||||
let result = get_changed_program(
|
||||
CacheInformation {
|
||||
ast: &program.ast,
|
||||
settings: &exec_ctxt.settings,
|
||||
},
|
||||
CacheInformation {
|
||||
ast: &new_program.ast,
|
||||
settings: &exec_ctxt.settings,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
assert_eq!(
|
||||
result,
|
||||
CacheResult::ReExecute {
|
||||
clear_scene: true,
|
||||
reapply_settings: true,
|
||||
program: new_program.ast
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -64,7 +64,11 @@ impl ExecutorContext {
|
||||
let new_units = exec_state.length_unit();
|
||||
if !self.engine.execution_kind().await.is_isolated() {
|
||||
self.engine
|
||||
.set_units(new_units.into(), annotation.as_source_range())
|
||||
.set_units(
|
||||
new_units.into(),
|
||||
annotation.as_source_range(),
|
||||
exec_state.id_generator(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
} else {
|
||||
@ -106,12 +110,7 @@ impl ExecutorContext {
|
||||
let old_units = exec_state.length_unit();
|
||||
let original_execution = self.engine.replace_execution_kind(exec_kind).await;
|
||||
|
||||
let mut local_state = ModuleState::new(
|
||||
&self.settings,
|
||||
path.std_path(),
|
||||
exec_state.stack().memory.clone(),
|
||||
Some(module_id),
|
||||
);
|
||||
let mut local_state = ModuleState::new(path.std_path(), exec_state.stack().memory.clone(), Some(module_id));
|
||||
if !preserve_mem {
|
||||
std::mem::swap(&mut exec_state.mod_local, &mut local_state);
|
||||
}
|
||||
@ -143,7 +142,9 @@ impl ExecutorContext {
|
||||
// command and we'd need to flush the batch again.
|
||||
// This avoids that.
|
||||
if !exec_kind.is_isolated() && new_units != old_units && *path != ModulePath::Main {
|
||||
self.engine.set_units(old_units.into(), Default::default()).await?;
|
||||
self.engine
|
||||
.set_units(old_units.into(), Default::default(), exec_state.id_generator())
|
||||
.await?;
|
||||
}
|
||||
self.engine.replace_execution_kind(original_execution).await;
|
||||
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
use crate::execution::ModuleId;
|
||||
|
||||
const NAMESPACE_KCL: uuid::Uuid = uuid::uuid!("efcd6508-4ce6-4a09-8317-e6a6994a3cd7");
|
||||
const NAMESPACE_KCL: uuid::Uuid = uuid::uuid!("8bda3118-75eb-58c7-a866-bef1dcb495e7");
|
||||
|
||||
/// A generator for ArtifactIds that can be stable across executions.
|
||||
#[derive(Debug, Clone, Default, PartialEq)]
|
||||
|
@ -40,7 +40,6 @@ use crate::{
|
||||
fs::FileManager,
|
||||
modules::{ModuleId, ModulePath},
|
||||
parsing::ast::types::{Expr, ImportPath, NodeRef},
|
||||
settings::types::UnitLength,
|
||||
source_range::SourceRange,
|
||||
std::StdLib,
|
||||
CompilationError, ExecError, ExecutionKind, KclErrorWithOutputs,
|
||||
@ -266,8 +265,6 @@ pub struct ExecutorContext {
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
|
||||
#[ts(export)]
|
||||
pub struct ExecutorSettings {
|
||||
/// The project-default unit to use in modeling dimensions.
|
||||
pub units: UnitLength,
|
||||
/// Highlight edges of 3D objects?
|
||||
pub highlight_edges: bool,
|
||||
/// Whether or not Screen Space Ambient Occlusion (SSAO) is enabled.
|
||||
@ -288,7 +285,6 @@ pub struct ExecutorSettings {
|
||||
impl Default for ExecutorSettings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
units: Default::default(),
|
||||
highlight_edges: true,
|
||||
enable_ssao: false,
|
||||
show_grid: false,
|
||||
@ -302,7 +298,6 @@ impl Default for ExecutorSettings {
|
||||
impl From<crate::settings::types::Configuration> for ExecutorSettings {
|
||||
fn from(config: crate::settings::types::Configuration) -> Self {
|
||||
Self {
|
||||
units: config.settings.modeling.base_unit,
|
||||
highlight_edges: config.settings.modeling.highlight_edges.into(),
|
||||
enable_ssao: config.settings.modeling.enable_ssao.into(),
|
||||
show_grid: config.settings.modeling.show_scale_grid,
|
||||
@ -316,7 +311,6 @@ impl From<crate::settings::types::Configuration> for ExecutorSettings {
|
||||
impl From<crate::settings::types::project::ProjectConfiguration> for ExecutorSettings {
|
||||
fn from(config: crate::settings::types::project::ProjectConfiguration) -> Self {
|
||||
Self {
|
||||
units: config.settings.modeling.base_unit,
|
||||
highlight_edges: config.settings.modeling.highlight_edges.into(),
|
||||
enable_ssao: config.settings.modeling.enable_ssao.into(),
|
||||
show_grid: Default::default(),
|
||||
@ -330,7 +324,6 @@ impl From<crate::settings::types::project::ProjectConfiguration> for ExecutorSet
|
||||
impl From<crate::settings::types::ModelingSettings> for ExecutorSettings {
|
||||
fn from(modeling: crate::settings::types::ModelingSettings) -> Self {
|
||||
Self {
|
||||
units: modeling.base_unit,
|
||||
highlight_edges: modeling.highlight_edges.into(),
|
||||
enable_ssao: modeling.enable_ssao.into(),
|
||||
show_grid: modeling.show_scale_grid,
|
||||
@ -344,7 +337,6 @@ impl From<crate::settings::types::ModelingSettings> for ExecutorSettings {
|
||||
impl From<crate::settings::types::project::ProjectModelingSettings> for ExecutorSettings {
|
||||
fn from(modeling: crate::settings::types::project::ProjectModelingSettings) -> Self {
|
||||
Self {
|
||||
units: modeling.base_unit,
|
||||
highlight_edges: modeling.highlight_edges.into(),
|
||||
enable_ssao: modeling.enable_ssao.into(),
|
||||
show_grid: Default::default(),
|
||||
@ -492,26 +484,17 @@ impl ExecutorContext {
|
||||
/// This allows for passing in `ZOO_API_TOKEN` and `ZOO_HOST` as environment
|
||||
/// variables.
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub async fn new_with_default_client(units: UnitLength) -> Result<Self> {
|
||||
pub async fn new_with_default_client() -> Result<Self> {
|
||||
// Create the client.
|
||||
let ctx = Self::new_with_client(
|
||||
ExecutorSettings {
|
||||
units,
|
||||
..Default::default()
|
||||
},
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
let ctx = Self::new_with_client(Default::default(), None, None).await?;
|
||||
Ok(ctx)
|
||||
}
|
||||
|
||||
/// For executing unit tests.
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub async fn new_for_unit_test(units: UnitLength, engine_addr: Option<String>) -> Result<Self> {
|
||||
pub async fn new_for_unit_test(engine_addr: Option<String>) -> Result<Self> {
|
||||
let ctx = ExecutorContext::new_with_client(
|
||||
ExecutorSettings {
|
||||
units,
|
||||
highlight_edges: true,
|
||||
enable_ssao: false,
|
||||
show_grid: false,
|
||||
@ -545,6 +528,18 @@ impl ExecutorContext {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn bust_cache_and_reset_scene(&self) -> Result<ExecOutcome, KclErrorWithOutputs> {
|
||||
cache::bust_cache().await;
|
||||
|
||||
// Execute an empty program to clear and reset the scene.
|
||||
// We specifically want to be returned the objects after the scene is reset.
|
||||
// Like the default planes so it is easier to just execute an empty program
|
||||
// after the cache is busted.
|
||||
let outcome = self.run_with_caching(crate::Program::empty()).await?;
|
||||
|
||||
Ok(outcome)
|
||||
}
|
||||
|
||||
async fn prepare_mem(&self, exec_state: &mut ExecState) -> Result<(), KclErrorWithOutputs> {
|
||||
self.eval_prelude(exec_state, SourceRange::synthetic())
|
||||
.await
|
||||
@ -563,7 +558,10 @@ impl ExecutorContext {
|
||||
let mut exec_state = ExecState::new(self);
|
||||
if use_prev_memory {
|
||||
match cache::read_old_memory().await {
|
||||
Some(mem) => *exec_state.mut_stack() = mem,
|
||||
Some(mem) => {
|
||||
*exec_state.mut_stack() = mem.0;
|
||||
exec_state.global.module_infos = mem.1;
|
||||
}
|
||||
None => self.prepare_mem(&mut exec_state).await?,
|
||||
}
|
||||
} else {
|
||||
@ -581,10 +579,11 @@ impl ExecutorContext {
|
||||
// memory, not to the exec_state which is not cached for mock execution.
|
||||
|
||||
let mut mem = exec_state.stack().clone();
|
||||
let module_infos = exec_state.global.module_infos.clone();
|
||||
let outcome = exec_state.to_mock_wasm_outcome(result.0).await;
|
||||
|
||||
mem.squash_env(result.0);
|
||||
cache::write_old_memory(mem).await;
|
||||
cache::write_old_memory((mem, module_infos)).await;
|
||||
|
||||
Ok(outcome)
|
||||
}
|
||||
@ -618,7 +617,7 @@ impl ExecutorContext {
|
||||
if reapply_settings
|
||||
&& self
|
||||
.engine
|
||||
.reapply_settings(&self.settings, Default::default())
|
||||
.reapply_settings(&self.settings, Default::default(), old_state.id_generator())
|
||||
.await
|
||||
.is_err()
|
||||
{
|
||||
@ -636,7 +635,7 @@ impl ExecutorContext {
|
||||
CacheResult::NoAction(true) => {
|
||||
if self
|
||||
.engine
|
||||
.reapply_settings(&self.settings, Default::default())
|
||||
.reapply_settings(&self.settings, Default::default(), old_state.id_generator())
|
||||
.await
|
||||
.is_ok()
|
||||
{
|
||||
@ -737,7 +736,7 @@ impl ExecutorContext {
|
||||
|
||||
// Re-apply the settings, in case the cache was busted.
|
||||
self.engine
|
||||
.reapply_settings(&self.settings, Default::default())
|
||||
.reapply_settings(&self.settings, Default::default(), exec_state.id_generator())
|
||||
.await
|
||||
.map_err(KclErrorWithOutputs::no_outputs)?;
|
||||
|
||||
@ -774,7 +773,7 @@ impl ExecutorContext {
|
||||
if !self.is_mock() {
|
||||
let mut mem = exec_state.stack().deep_clone();
|
||||
mem.restore_env(env_ref);
|
||||
cache::write_old_memory(mem).await;
|
||||
cache::write_old_memory((mem, exec_state.global.module_infos.clone())).await;
|
||||
}
|
||||
let session_data = self.engine.get_session_data().await;
|
||||
Ok((env_ref, session_data))
|
||||
@ -862,11 +861,6 @@ impl ExecutorContext {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Update the units for the executor.
|
||||
pub(crate) fn update_units(&mut self, units: UnitLength) {
|
||||
self.settings.units = units;
|
||||
}
|
||||
|
||||
/// Get a snapshot of the current scene.
|
||||
pub async fn prepare_snapshot(&self) -> std::result::Result<TakeSnapshot, ExecError> {
|
||||
// Zoom to fit.
|
||||
@ -1008,11 +1002,7 @@ mod tests {
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::*;
|
||||
use crate::{
|
||||
errors::{KclErrorDetails, Severity},
|
||||
execution::memory::Stack,
|
||||
ModuleId,
|
||||
};
|
||||
use crate::{errors::KclErrorDetails, execution::memory::Stack, ModuleId};
|
||||
|
||||
/// Convenience function to get a JSON value from memory and unwrap.
|
||||
#[track_caller]
|
||||
@ -1615,34 +1605,6 @@ const inInches = 2.0 * inch()"#;
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_unit_suggest() {
|
||||
let src = "foo = 42";
|
||||
let program = crate::Program::parse_no_errs(src).unwrap();
|
||||
let ctx = ExecutorContext {
|
||||
engine: Arc::new(Box::new(
|
||||
crate::engine::conn_mock::EngineConnection::new().await.unwrap(),
|
||||
)),
|
||||
fs: Arc::new(crate::fs::FileManager::new()),
|
||||
stdlib: Arc::new(crate::std::StdLib::new()),
|
||||
settings: ExecutorSettings {
|
||||
units: UnitLength::Ft,
|
||||
..Default::default()
|
||||
},
|
||||
context_type: ContextType::Mock,
|
||||
};
|
||||
let mut exec_state = ExecState::new(&ctx);
|
||||
ctx.run(&program, &mut exec_state).await.unwrap();
|
||||
let errs = exec_state.errors();
|
||||
assert_eq!(errs.len(), 1, "{errs:?}");
|
||||
let warn = &errs[0];
|
||||
assert_eq!(warn.severity, Severity::Warning);
|
||||
assert_eq!(
|
||||
warn.apply_suggestion(src).unwrap(),
|
||||
"@settings(defaultLengthUnit = ft)\nfoo = 42"
|
||||
)
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_zero_param_fn() {
|
||||
let ast = r#"const sigmaAllow = 35000 // psi
|
||||
@ -1971,9 +1933,7 @@ let w = f() + f()
|
||||
)
|
||||
"#;
|
||||
|
||||
let ctx = crate::test_server::new_context(UnitLength::Mm, true, None)
|
||||
.await
|
||||
.unwrap();
|
||||
let ctx = crate::test_server::new_context(true, None).await.unwrap();
|
||||
let old_program = crate::Program::parse_no_errs(code).unwrap();
|
||||
|
||||
// Execute the program.
|
||||
@ -2026,9 +1986,7 @@ let w = f() + f()
|
||||
)
|
||||
"#;
|
||||
|
||||
let mut ctx = crate::test_server::new_context(UnitLength::Mm, true, None)
|
||||
.await
|
||||
.unwrap();
|
||||
let mut ctx = crate::test_server::new_context(true, None).await.unwrap();
|
||||
let old_program = crate::Program::parse_no_errs(code).unwrap();
|
||||
|
||||
// Execute the program.
|
||||
@ -2060,11 +2018,13 @@ let w = f() + f()
|
||||
|
||||
// Ensure the settings are as expected.
|
||||
assert_eq!(settings_state, ctx.settings);
|
||||
|
||||
ctx.close().await;
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn mock_after_not_mock() {
|
||||
let ctx = ExecutorContext::new_with_default_client(UnitLength::Mm).await.unwrap();
|
||||
let ctx = ExecutorContext::new_with_default_client().await.unwrap();
|
||||
let program = crate::Program::parse_no_errs("x = 2").unwrap();
|
||||
let result = ctx.run_with_caching(program).await.unwrap();
|
||||
assert_eq!(result.variables.get("x").unwrap().as_f64().unwrap(), 2.0);
|
||||
@ -2073,6 +2033,9 @@ let w = f() + f()
|
||||
let program2 = crate::Program::parse_no_errs("z = x + 1").unwrap();
|
||||
let result = ctx2.run_mock(program2, true).await.unwrap();
|
||||
assert_eq!(result.variables.get("z").unwrap().as_f64().unwrap(), 3.0);
|
||||
|
||||
ctx.close().await;
|
||||
ctx2.close().await;
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
|
@ -30,6 +30,8 @@ pub struct ExecState {
|
||||
pub(super) exec_context: Option<super::ExecutorContext>,
|
||||
}
|
||||
|
||||
pub type ModuleInfoMap = IndexMap<ModuleId, ModuleInfo>;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(super) struct GlobalState {
|
||||
/// Map from source file absolute path to module ID.
|
||||
@ -37,7 +39,7 @@ pub(super) struct GlobalState {
|
||||
/// Map from module ID to source file.
|
||||
pub id_to_source: IndexMap<ModuleId, ModuleSource>,
|
||||
/// Map from module ID to module info.
|
||||
pub module_infos: IndexMap<ModuleId, ModuleInfo>,
|
||||
pub module_infos: ModuleInfoMap,
|
||||
/// Output map of UUIDs to artifacts.
|
||||
pub artifacts: IndexMap<ArtifactId, Artifact>,
|
||||
/// Output commands to allow building the artifact graph by the caller.
|
||||
@ -80,7 +82,7 @@ impl ExecState {
|
||||
pub fn new(exec_context: &super::ExecutorContext) -> Self {
|
||||
ExecState {
|
||||
global: GlobalState::new(&exec_context.settings),
|
||||
mod_local: ModuleState::new(&exec_context.settings, None, ProgramMemory::new(), Default::default()),
|
||||
mod_local: ModuleState::new(None, ProgramMemory::new(), Default::default()),
|
||||
exec_context: Some(exec_context.clone()),
|
||||
}
|
||||
}
|
||||
@ -90,7 +92,7 @@ impl ExecState {
|
||||
|
||||
*self = ExecState {
|
||||
global,
|
||||
mod_local: ModuleState::new(&exec_context.settings, None, ProgramMemory::new(), Default::default()),
|
||||
mod_local: ModuleState::new(None, ProgramMemory::new(), Default::default()),
|
||||
exec_context: Some(exec_context.clone()),
|
||||
};
|
||||
}
|
||||
@ -291,12 +293,7 @@ impl GlobalState {
|
||||
}
|
||||
|
||||
impl ModuleState {
|
||||
pub(super) fn new(
|
||||
exec_settings: &ExecutorSettings,
|
||||
std_path: Option<String>,
|
||||
memory: Arc<ProgramMemory>,
|
||||
module_id: Option<ModuleId>,
|
||||
) -> Self {
|
||||
pub(super) fn new(std_path: Option<String>, memory: Arc<ProgramMemory>, module_id: Option<ModuleId>) -> Self {
|
||||
ModuleState {
|
||||
id_generator: IdGenerator::new(module_id),
|
||||
stack: memory.new_stack(),
|
||||
@ -305,14 +302,14 @@ impl ModuleState {
|
||||
explicit_length_units: false,
|
||||
std_path,
|
||||
settings: MetaSettings {
|
||||
default_length_units: exec_settings.units.into(),
|
||||
default_length_units: Default::default(),
|
||||
default_angle_units: Default::default(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
|
||||
#[derive(Debug, Default, Clone, Deserialize, Serialize, PartialEq, Eq, ts_rs::TS, JsonSchema)]
|
||||
#[ts(export)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct MetaSettings {
|
||||
|
@ -93,11 +93,11 @@ pub use lsp::{
|
||||
kcl::{Backend as KclLspBackend, Server as KclLspServerSubCommand},
|
||||
};
|
||||
pub use modules::ModuleId;
|
||||
pub use parsing::ast::{modify::modify_ast_for_sketch, types::FormatOptions};
|
||||
pub use parsing::ast::types::FormatOptions;
|
||||
pub use settings::types::{project::ProjectConfiguration, Configuration, UnitLength};
|
||||
pub use source_range::SourceRange;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub use unparser::recast_dir;
|
||||
pub use unparser::{recast_dir, walk_dir};
|
||||
|
||||
// Rather than make executor public and make lots of it pub(crate), just re-export into a new module.
|
||||
// Ideally we wouldn't export these things at all, they should only be used for testing.
|
||||
@ -195,6 +195,10 @@ impl Program {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn is_empty_or_only_settings(&self) -> bool {
|
||||
self.ast.is_empty_or_only_settings()
|
||||
}
|
||||
|
||||
pub fn lint_all(&self) -> Result<Vec<lint::Discovered>, anyhow::Error> {
|
||||
self.ast.lint_all()
|
||||
}
|
||||
@ -211,6 +215,14 @@ impl Program {
|
||||
pub fn recast_with_options(&self, options: &FormatOptions) -> String {
|
||||
self.ast.recast(options, 0)
|
||||
}
|
||||
|
||||
/// Create an empty program.
|
||||
pub fn empty() -> Self {
|
||||
Self {
|
||||
ast: parsing::ast::types::Node::no_src(parsing::ast::types::Program::default()),
|
||||
original_file_contents: String::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
@ -786,7 +786,7 @@ impl Backend {
|
||||
vec![kittycad::types::multipart::Attachment {
|
||||
// Clean the URI part.
|
||||
name: "attachment".to_string(),
|
||||
filename: Some("attachment.zip".to_string()),
|
||||
filepath: Some("attachment.zip".into()),
|
||||
content_type: Some("application/x-zip".to_string()),
|
||||
data: self.create_zip().await?,
|
||||
}],
|
||||
@ -812,56 +812,6 @@ impl Backend {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn update_units(
|
||||
&self,
|
||||
params: custom_notifications::UpdateUnitsParams,
|
||||
) -> RpcResult<Option<custom_notifications::UpdateUnitsResponse>> {
|
||||
{
|
||||
let mut ctx = self.executor_ctx.write().await;
|
||||
// Borrow the executor context mutably.
|
||||
let Some(ref mut executor_ctx) = *ctx else {
|
||||
self.client
|
||||
.log_message(MessageType::ERROR, "no executor context set to update units for")
|
||||
.await;
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
self.client
|
||||
.log_message(MessageType::INFO, format!("update units: {:?}", params))
|
||||
.await;
|
||||
|
||||
if executor_ctx.settings.units == params.units
|
||||
&& !self.has_diagnostics(params.text_document.uri.as_ref()).await
|
||||
{
|
||||
// Return early the units are the same.
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Set the engine units.
|
||||
executor_ctx.update_units(params.units);
|
||||
}
|
||||
// Lock is dropped here since nested.
|
||||
// This is IMPORTANT.
|
||||
|
||||
let new_params = TextDocumentItem {
|
||||
uri: params.text_document.uri.clone(),
|
||||
text: std::mem::take(&mut params.text.to_string()),
|
||||
version: Default::default(),
|
||||
language_id: Default::default(),
|
||||
};
|
||||
|
||||
// Force re-execution.
|
||||
self.inner_on_change(new_params, true).await;
|
||||
|
||||
// Check if we have diagnostics.
|
||||
// If we do we return early, since we failed in some way.
|
||||
if self.has_diagnostics(params.text_document.uri.as_ref()).await {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
Ok(Some(custom_notifications::UpdateUnitsResponse {}))
|
||||
}
|
||||
|
||||
pub async fn update_can_execute(
|
||||
&self,
|
||||
params: custom_notifications::UpdateCanExecuteParams,
|
||||
@ -1635,7 +1585,7 @@ fn position_to_char_index(position: Position, code: &str) -> usize {
|
||||
|
||||
async fn with_cached_var<T>(name: &str, f: impl Fn(&KclValue) -> T) -> Option<T> {
|
||||
let mem = cache::read_old_memory().await?;
|
||||
let value = mem.get(name, SourceRange::default()).ok()?;
|
||||
let value = mem.0.get(name, SourceRange::default()).ok()?;
|
||||
|
||||
Some(f(value))
|
||||
}
|
||||
|
@ -42,7 +42,6 @@ pub async fn kcl_lsp_server(execute: bool) -> Result<crate::lsp::kcl::Backend> {
|
||||
can_execute: Arc::new(tokio::sync::RwLock::new(can_execute)),
|
||||
is_initialized: Default::default(),
|
||||
})
|
||||
.custom_method("kcl/updateUnits", crate::lsp::kcl::Backend::update_units)
|
||||
.custom_method("kcl/updateCanExecute", crate::lsp::kcl::Backend::update_can_execute)
|
||||
.finish();
|
||||
|
||||
|
@ -1016,6 +1016,8 @@ startSketchOn(XY)
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
||||
server.executor_ctx().await.clone().unwrap().close().await;
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
@ -2318,78 +2320,8 @@ async fn kcl_test_kcl_lsp_on_change_update_memory() {
|
||||
}],
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 10)]
|
||||
async fn kcl_test_kcl_lsp_update_units() {
|
||||
let server = kcl_lsp_server(true).await.unwrap();
|
||||
|
||||
let same_text = r#"fn cube = (pos, scale) => {
|
||||
sg = startSketchOn(XY)
|
||||
|> startProfileAt(pos, %)
|
||||
|> line(end = [0, scale])
|
||||
|> line(end = [scale, 0])
|
||||
|> line(end = [0, -scale])
|
||||
|
||||
return sg
|
||||
}
|
||||
part001 = cube([0,0], 20)
|
||||
|> close()
|
||||
|> extrude(length = 20)"#
|
||||
.to_string();
|
||||
|
||||
// Send open file.
|
||||
server
|
||||
.did_open(tower_lsp::lsp_types::DidOpenTextDocumentParams {
|
||||
text_document: tower_lsp::lsp_types::TextDocumentItem {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
language_id: "kcl".to_string(),
|
||||
version: 1,
|
||||
text: same_text.clone(),
|
||||
},
|
||||
})
|
||||
.await;
|
||||
|
||||
// Get the tokens.
|
||||
let tokens = server.token_map.get("file:///test.kcl").unwrap().clone();
|
||||
assert_eq!(tokens.as_slice().len(), 123);
|
||||
|
||||
// Get the ast.
|
||||
let ast = server.ast_map.get("file:///test.kcl").unwrap().clone();
|
||||
assert_eq!(ast.ast.body.len(), 2);
|
||||
|
||||
// Send change file.
|
||||
server
|
||||
.did_change(tower_lsp::lsp_types::DidChangeTextDocumentParams {
|
||||
text_document: tower_lsp::lsp_types::VersionedTextDocumentIdentifier {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
version: 1,
|
||||
},
|
||||
content_changes: vec![tower_lsp::lsp_types::TextDocumentContentChangeEvent {
|
||||
range: None,
|
||||
range_length: None,
|
||||
text: same_text.clone(),
|
||||
}],
|
||||
})
|
||||
.await;
|
||||
|
||||
let units = server.executor_ctx.read().await.clone().unwrap().settings.units;
|
||||
assert_eq!(units, crate::settings::types::UnitLength::Mm);
|
||||
|
||||
// Update the units.
|
||||
server
|
||||
.update_units(crate::lsp::kcl::custom_notifications::UpdateUnitsParams {
|
||||
text_document: crate::lsp::kcl::custom_notifications::TextDocumentIdentifier {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
},
|
||||
units: crate::settings::types::UnitLength::M,
|
||||
text: same_text.clone(),
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let units = server.executor_ctx().await.clone().unwrap().settings.units;
|
||||
assert_eq!(units, crate::settings::types::UnitLength::M);
|
||||
server.executor_ctx().await.clone().unwrap().close().await;
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
@ -2506,6 +2438,8 @@ async fn kcl_test_kcl_lsp_diagnostics_on_execution_error() {
|
||||
|
||||
// Get the diagnostics.
|
||||
assert_diagnostic_count(server.diagnostics_map.get("file:///test.kcl").as_deref(), 0);
|
||||
|
||||
server.executor_ctx().await.clone().unwrap().close().await;
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
@ -2556,6 +2490,8 @@ async fn kcl_test_kcl_lsp_full_to_empty_file_updates_ast_and_memory() {
|
||||
// Get the ast.
|
||||
let ast = server.ast_map.get("file:///test.kcl").unwrap().clone();
|
||||
assert_eq!(ast.ast, default_hashed);
|
||||
|
||||
server.executor_ctx().await.clone().unwrap().close().await;
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
@ -2642,6 +2578,8 @@ async fn kcl_test_kcl_lsp_code_unchanged_but_has_diagnostics_reexecute() {
|
||||
|
||||
// Assure we have no diagnostics.
|
||||
assert_diagnostic_count(server.diagnostics_map.get("file:///test.kcl").as_deref(), 0);
|
||||
|
||||
server.executor_ctx().await.clone().unwrap().close().await;
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
@ -2717,141 +2655,8 @@ async fn kcl_test_kcl_lsp_code_and_ast_unchanged_but_has_diagnostics_reexecute()
|
||||
|
||||
// Assure we have no diagnostics.
|
||||
assert_diagnostic_count(server.diagnostics_map.get("file:///test.kcl").as_deref(), 0);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn kcl_test_kcl_lsp_code_and_ast_units_unchanged_but_has_diagnostics_reexecute_on_unit_change() {
|
||||
let server = kcl_lsp_server(true).await.unwrap();
|
||||
|
||||
let code = r#"part001 = startSketchOn(XY)
|
||||
|> startProfileAt([-10, -10], %)
|
||||
|> line(end = [20, 0])
|
||||
|> line(end = [0, 20])
|
||||
|> line(end = [-20, 0])
|
||||
|> close()
|
||||
|> extrude(length = 3.14)"#;
|
||||
|
||||
// Send open file.
|
||||
server
|
||||
.did_open(tower_lsp::lsp_types::DidOpenTextDocumentParams {
|
||||
text_document: tower_lsp::lsp_types::TextDocumentItem {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
language_id: "kcl".to_string(),
|
||||
version: 1,
|
||||
text: code.to_string(),
|
||||
},
|
||||
})
|
||||
.await;
|
||||
|
||||
// Get the ast.
|
||||
let ast = server.ast_map.get("file:///test.kcl").unwrap().clone();
|
||||
assert!(ast.ast != Node::<Program>::default());
|
||||
|
||||
// Assure we have no diagnostics.
|
||||
assert_diagnostic_count(server.diagnostics_map.get("file:///test.kcl").as_deref(), 0);
|
||||
|
||||
// Add some fake diagnostics.
|
||||
server.diagnostics_map.insert(
|
||||
"file:///test.kcl".to_string(),
|
||||
vec![tower_lsp::lsp_types::Diagnostic {
|
||||
range: tower_lsp::lsp_types::Range {
|
||||
start: tower_lsp::lsp_types::Position { line: 0, character: 0 },
|
||||
end: tower_lsp::lsp_types::Position { line: 0, character: 0 },
|
||||
},
|
||||
message: "fake diagnostic".to_string(),
|
||||
severity: Some(tower_lsp::lsp_types::DiagnosticSeverity::ERROR),
|
||||
code: None,
|
||||
source: None,
|
||||
related_information: None,
|
||||
tags: None,
|
||||
data: None,
|
||||
code_description: None,
|
||||
}],
|
||||
);
|
||||
// Assure we have one diagnostics.
|
||||
assert_diagnostic_count(server.diagnostics_map.get("file:///test.kcl").as_deref(), 1);
|
||||
|
||||
let units = server.executor_ctx().await.clone().unwrap().settings.units;
|
||||
assert_eq!(units, crate::settings::types::UnitLength::Mm);
|
||||
|
||||
// Update the units to the _same_ units.
|
||||
server
|
||||
.update_units(crate::lsp::kcl::custom_notifications::UpdateUnitsParams {
|
||||
text_document: crate::lsp::kcl::custom_notifications::TextDocumentIdentifier {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
},
|
||||
units: crate::settings::types::UnitLength::Mm,
|
||||
text: code.to_string(),
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let units = server.executor_ctx().await.clone().unwrap().settings.units;
|
||||
assert_eq!(units, crate::settings::types::UnitLength::Mm);
|
||||
|
||||
// Get the ast.
|
||||
let ast = server.ast_map.get("file:///test.kcl").unwrap().clone();
|
||||
assert!(ast.ast != Node::<Program>::default());
|
||||
|
||||
// Assure we have no diagnostics.
|
||||
assert_diagnostic_count(server.diagnostics_map.get("file:///test.kcl").as_deref(), 0);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn kcl_test_kcl_lsp_code_and_ast_units_unchanged_but_has_memory_reexecute_on_unit_change() {
|
||||
let server = kcl_lsp_server(true).await.unwrap();
|
||||
|
||||
let code = r#"part001 = startSketchOn(XY)
|
||||
|> startProfileAt([-10, -10], %)
|
||||
|> line(end = [20, 0])
|
||||
|> line(end = [0, 20])
|
||||
|> line(end = [-20, 0])
|
||||
|> close()
|
||||
|> extrude(length = 3.14)"#;
|
||||
|
||||
// Send open file.
|
||||
server
|
||||
.did_open(tower_lsp::lsp_types::DidOpenTextDocumentParams {
|
||||
text_document: tower_lsp::lsp_types::TextDocumentItem {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
language_id: "kcl".to_string(),
|
||||
version: 1,
|
||||
text: code.to_string(),
|
||||
},
|
||||
})
|
||||
.await;
|
||||
|
||||
// Get the ast.
|
||||
let ast = server.ast_map.get("file:///test.kcl").unwrap().clone();
|
||||
assert!(ast.ast != Node::<Program>::default());
|
||||
|
||||
// Assure we have no diagnostics.
|
||||
assert_diagnostic_count(server.diagnostics_map.get("file:///test.kcl").as_deref(), 0);
|
||||
|
||||
let units = server.executor_ctx().await.clone().unwrap().settings.units;
|
||||
assert_eq!(units, crate::settings::types::UnitLength::Mm);
|
||||
|
||||
// Update the units to the _same_ units.
|
||||
server
|
||||
.update_units(crate::lsp::kcl::custom_notifications::UpdateUnitsParams {
|
||||
text_document: crate::lsp::kcl::custom_notifications::TextDocumentIdentifier {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
},
|
||||
units: crate::settings::types::UnitLength::Mm,
|
||||
text: code.to_string(),
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let units = server.executor_ctx().await.clone().unwrap().settings.units;
|
||||
assert_eq!(units, crate::settings::types::UnitLength::Mm);
|
||||
|
||||
// Get the ast.
|
||||
let ast = server.ast_map.get("file:///test.kcl").unwrap().clone();
|
||||
assert!(ast.ast != Node::<Program>::default());
|
||||
|
||||
// Assure we have no diagnostics.
|
||||
assert_diagnostic_count(server.diagnostics_map.get("file:///test.kcl").as_deref(), 0);
|
||||
server.executor_ctx().await.clone().unwrap().close().await;
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
@ -2885,23 +2690,6 @@ async fn kcl_test_kcl_lsp_cant_execute_set() {
|
||||
// Assure we have no diagnostics.
|
||||
assert_diagnostic_count(server.diagnostics_map.get("file:///test.kcl").as_deref(), 0);
|
||||
|
||||
// Update the units to the _same_ units.
|
||||
let units = server.executor_ctx().await.clone().unwrap().settings.units;
|
||||
assert_eq!(units, crate::settings::types::UnitLength::Mm);
|
||||
server
|
||||
.update_units(crate::lsp::kcl::custom_notifications::UpdateUnitsParams {
|
||||
text_document: crate::lsp::kcl::custom_notifications::TextDocumentIdentifier {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
},
|
||||
units: crate::settings::types::UnitLength::Mm,
|
||||
text: code.to_string(),
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let units = server.executor_ctx().await.clone().unwrap().settings.units;
|
||||
assert_eq!(units, crate::settings::types::UnitLength::Mm);
|
||||
|
||||
// Get the ast.
|
||||
let ast = server.ast_map.get("file:///test.kcl").unwrap().clone();
|
||||
assert!(ast.ast != Node::<Program>::default());
|
||||
@ -2918,23 +2706,6 @@ async fn kcl_test_kcl_lsp_cant_execute_set() {
|
||||
.unwrap();
|
||||
assert_eq!(server.can_execute().await, false);
|
||||
|
||||
// Update the units to the _same_ units.
|
||||
let units = server.executor_ctx().await.clone().unwrap().settings.units;
|
||||
assert_eq!(units, crate::settings::types::UnitLength::Mm);
|
||||
server
|
||||
.update_units(crate::lsp::kcl::custom_notifications::UpdateUnitsParams {
|
||||
text_document: crate::lsp::kcl::custom_notifications::TextDocumentIdentifier {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
},
|
||||
units: crate::settings::types::UnitLength::Mm,
|
||||
text: code.to_string(),
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let units = server.executor_ctx().await.clone().unwrap().settings.units;
|
||||
assert_eq!(units, crate::settings::types::UnitLength::Mm);
|
||||
|
||||
let mut default_hashed = Node::<Program>::default();
|
||||
default_hashed.compute_digest();
|
||||
|
||||
@ -2952,29 +2723,14 @@ async fn kcl_test_kcl_lsp_cant_execute_set() {
|
||||
.unwrap();
|
||||
assert_eq!(server.can_execute().await, true);
|
||||
|
||||
// Update the units to the _same_ units.
|
||||
let units = server.executor_ctx.read().await.clone().unwrap().settings.units;
|
||||
assert_eq!(units, crate::settings::types::UnitLength::Mm);
|
||||
server
|
||||
.update_units(crate::lsp::kcl::custom_notifications::UpdateUnitsParams {
|
||||
text_document: crate::lsp::kcl::custom_notifications::TextDocumentIdentifier {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
},
|
||||
units: crate::settings::types::UnitLength::Mm,
|
||||
text: code.to_string(),
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let units = server.executor_ctx.read().await.clone().unwrap().settings.units;
|
||||
assert_eq!(units, crate::settings::types::UnitLength::Mm);
|
||||
|
||||
// Get the ast.
|
||||
let ast = server.ast_map.get("file:///test.kcl").unwrap().clone();
|
||||
assert!(ast.ast != Node::<Program>::default());
|
||||
|
||||
// Assure we have no diagnostics.
|
||||
assert_diagnostic_count(server.diagnostics_map.get("file:///test.kcl").as_deref(), 0);
|
||||
|
||||
server.executor_ctx().await.clone().unwrap().close().await;
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
@ -3243,6 +2999,8 @@ part001 = startSketchOn(XY)
|
||||
|
||||
// Check the diagnostics.
|
||||
assert_diagnostic_count(server.diagnostics_map.get("file:///test.kcl").as_deref(), 2);
|
||||
|
||||
server.executor_ctx().await.clone().unwrap().close().await;
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
@ -3310,6 +3068,8 @@ NEW_LINT = 1"#
|
||||
|
||||
// Check the diagnostics.
|
||||
assert_diagnostic_count(server.diagnostics_map.get("file:///test.kcl").as_deref(), 2);
|
||||
|
||||
server.executor_ctx().await.clone().unwrap().close().await;
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
@ -3377,6 +3137,8 @@ NEW_LINT = 1"#
|
||||
|
||||
// Check the diagnostics.
|
||||
assert_diagnostic_count(server.diagnostics_map.get("file:///test.kcl").as_deref(), 1);
|
||||
|
||||
server.executor_ctx().await.clone().unwrap().close().await;
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
@ -3460,6 +3222,8 @@ NEW_LINT = 1"#
|
||||
|
||||
// Check the diagnostics.
|
||||
assert_diagnostic_count(server.diagnostics_map.get("file:///test.kcl").as_deref(), 1);
|
||||
|
||||
server.executor_ctx().await.clone().unwrap().close().await;
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
@ -3551,6 +3315,8 @@ part001 = startSketchOn(XY)
|
||||
|
||||
// Check the diagnostics.
|
||||
assert_diagnostic_count(server.diagnostics_map.get("file:///test.kcl").as_deref(), 2);
|
||||
|
||||
server.executor_ctx().await.clone().unwrap().close().await;
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
@ -3649,4 +3415,6 @@ async fn kcl_test_kcl_lsp_multi_file_error() {
|
||||
} else {
|
||||
panic!("Expected diagnostics");
|
||||
}
|
||||
|
||||
server.executor_ctx().await.clone().unwrap().close().await;
|
||||
}
|
||||
|
@ -88,6 +88,7 @@ pub(crate) fn read_std(mod_name: &str) -> Option<&'static str> {
|
||||
"prelude" => Some(include_str!("../std/prelude.kcl")),
|
||||
"math" => Some(include_str!("../std/math.kcl")),
|
||||
"sketch" => Some(include_str!("../std/sketch.kcl")),
|
||||
"turns" => Some(include_str!("../std/turns.kcl")),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,4 @@
|
||||
pub(crate) mod digest;
|
||||
pub mod modify;
|
||||
pub mod types;
|
||||
|
||||
use crate::{
|
||||
|
@ -1,285 +0,0 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use kcmc::{
|
||||
each_cmd as mcmd, ok_response::OkModelingCmdResponse, shared::PathCommand, websocket::OkWebSocketResponseData,
|
||||
ModelingCmd,
|
||||
};
|
||||
use kittycad_modeling_cmds as kcmc;
|
||||
|
||||
use super::types::{CallExpressionKw, Identifier, LabeledArg, LiteralValue};
|
||||
use crate::{
|
||||
engine::EngineManager,
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::Point2d,
|
||||
parsing::ast::types::{
|
||||
ArrayExpression, CallExpression, ConstraintLevel, FormatOptions, Literal, Node, PipeExpression,
|
||||
PipeSubstitution, VariableDeclarator,
|
||||
},
|
||||
source_range::SourceRange,
|
||||
ModuleId, Program,
|
||||
};
|
||||
|
||||
type Point3d = kcmc::shared::Point3d<f64>;
|
||||
|
||||
#[derive(Debug)]
|
||||
/// The control point data for a curve or line.
|
||||
struct ControlPointData {
|
||||
/// The control points for the curve or line.
|
||||
points: Vec<Point3d>,
|
||||
/// The command that created this curve or line.
|
||||
_command: PathCommand,
|
||||
/// The id of the curve or line.
|
||||
_id: uuid::Uuid,
|
||||
}
|
||||
|
||||
const EPSILON: f64 = 0.015625; // or 2^-6
|
||||
|
||||
/// Update the AST to reflect the new state of the program after something like
|
||||
/// a move or a new line.
|
||||
pub async fn modify_ast_for_sketch(
|
||||
engine: &Arc<Box<dyn EngineManager>>,
|
||||
program: &mut Program,
|
||||
module_id: ModuleId,
|
||||
// The name of the sketch.
|
||||
sketch_name: &str,
|
||||
// The type of plane the sketch is on. `XY` or `XZ`, etc
|
||||
plane: crate::execution::PlaneType,
|
||||
// The ID of the parent sketch.
|
||||
sketch_id: uuid::Uuid,
|
||||
) -> Result<String, KclError> {
|
||||
// First we need to check if this sketch is constrained (even partially).
|
||||
// If it is, we cannot modify it.
|
||||
|
||||
// Get the information about the sketch.
|
||||
if let Some(ast_sketch) = program.ast.get_variable(sketch_name) {
|
||||
let constraint_level = match ast_sketch {
|
||||
super::types::Definition::Variable(var) => var.get_constraint_level(),
|
||||
super::types::Definition::Import(import) => import.get_constraint_level(),
|
||||
super::types::Definition::Type(_) => ConstraintLevel::Ignore {
|
||||
source_ranges: Vec::new(),
|
||||
},
|
||||
};
|
||||
match &constraint_level {
|
||||
ConstraintLevel::None { source_ranges: _ } => {}
|
||||
ConstraintLevel::Ignore { source_ranges: _ } => {}
|
||||
ConstraintLevel::Partial {
|
||||
source_ranges: _,
|
||||
levels,
|
||||
} => {
|
||||
return Err(KclError::Engine(KclErrorDetails {
|
||||
message: format!(
|
||||
"Sketch {} is constrained `{}` and cannot be modified",
|
||||
sketch_name, constraint_level
|
||||
),
|
||||
source_ranges: levels.get_all_partial_or_full_source_ranges(),
|
||||
}));
|
||||
}
|
||||
ConstraintLevel::Full { source_ranges } => {
|
||||
return Err(KclError::Engine(KclErrorDetails {
|
||||
message: format!(
|
||||
"Sketch {} is constrained `{}` and cannot be modified",
|
||||
sketch_name, constraint_level
|
||||
),
|
||||
source_ranges: source_ranges.clone(),
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Let's start by getting the path info.
|
||||
|
||||
// Let's get the path info.
|
||||
let resp = engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
SourceRange::default(),
|
||||
&ModelingCmd::PathGetInfo(mcmd::PathGetInfo { path_id: sketch_id }),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let OkWebSocketResponseData::Modeling {
|
||||
modeling_response: OkModelingCmdResponse::PathGetInfo(path_info),
|
||||
} = &resp
|
||||
else {
|
||||
return Err(KclError::Engine(KclErrorDetails {
|
||||
message: format!("Get path info response was not as expected: {:?}", resp),
|
||||
source_ranges: vec![SourceRange::default()],
|
||||
}));
|
||||
};
|
||||
|
||||
// Now let's get the control points for all the segments.
|
||||
// TODO: We should probably await all these at once so we aren't going one by one.
|
||||
// But I guess this is fine for now.
|
||||
// We absolutely have to preserve the order of the control points.
|
||||
let mut control_points = Vec::new();
|
||||
for segment in &path_info.segments {
|
||||
if let Some(command_id) = &segment.command_id {
|
||||
let cmd = ModelingCmd::from(mcmd::CurveGetControlPoints {
|
||||
curve_id: (*command_id).into(),
|
||||
});
|
||||
let h = engine.send_modeling_cmd(uuid::Uuid::new_v4(), SourceRange::default(), &cmd);
|
||||
|
||||
let OkWebSocketResponseData::Modeling {
|
||||
modeling_response: OkModelingCmdResponse::CurveGetControlPoints(data),
|
||||
} = h.await?
|
||||
else {
|
||||
return Err(KclError::Engine(KclErrorDetails {
|
||||
message: format!("Curve get control points response was not as expected: {:?}", resp),
|
||||
source_ranges: vec![SourceRange::default()],
|
||||
}));
|
||||
};
|
||||
|
||||
control_points.push(ControlPointData {
|
||||
points: data.control_points.clone(),
|
||||
_command: segment.command,
|
||||
_id: (*command_id).into(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if control_points.is_empty() {
|
||||
return Err(KclError::Engine(KclErrorDetails {
|
||||
message: format!("No control points found for sketch {}", sketch_name),
|
||||
source_ranges: vec![SourceRange::default()],
|
||||
}));
|
||||
}
|
||||
|
||||
let first_control_points = control_points.first().ok_or_else(|| {
|
||||
KclError::Engine(KclErrorDetails {
|
||||
message: format!("No control points found for sketch {}", sketch_name),
|
||||
source_ranges: vec![SourceRange::default()],
|
||||
})
|
||||
})?;
|
||||
|
||||
let mut additional_lines = Vec::new();
|
||||
let mut last_point = first_control_points.points[1];
|
||||
for control_point in control_points[1..].iter() {
|
||||
additional_lines.push([
|
||||
(control_point.points[1].x - last_point.x),
|
||||
(control_point.points[1].y - last_point.y),
|
||||
]);
|
||||
last_point = Point3d {
|
||||
x: control_point.points[1].x,
|
||||
y: control_point.points[1].y,
|
||||
z: control_point.points[1].z,
|
||||
};
|
||||
}
|
||||
|
||||
// Okay now let's recalculate the sketch from the control points.
|
||||
let start_sketch_at_end = Point3d {
|
||||
x: (first_control_points.points[1].x - first_control_points.points[0].x),
|
||||
y: (first_control_points.points[1].y - first_control_points.points[0].y),
|
||||
z: (first_control_points.points[1].z - first_control_points.points[0].z),
|
||||
};
|
||||
let sketch = create_start_sketch_on(
|
||||
sketch_name,
|
||||
[first_control_points.points[0].x, first_control_points.points[0].y],
|
||||
[start_sketch_at_end.x, start_sketch_at_end.y],
|
||||
plane,
|
||||
additional_lines,
|
||||
)?;
|
||||
|
||||
// Add the sketch back to the program.
|
||||
program.ast.replace_variable(sketch_name, sketch);
|
||||
|
||||
let recasted = program.ast.recast(&FormatOptions::default(), 0);
|
||||
|
||||
// Re-parse the ast so we get the correct source ranges.
|
||||
program.ast = crate::parsing::parse_str(&recasted, module_id).parse_errs_as_err()?;
|
||||
|
||||
Ok(recasted)
|
||||
}
|
||||
|
||||
/// Create a pipe expression that starts a sketch at the given point and draws a line to the given point.
|
||||
fn create_start_sketch_on(
|
||||
name: &str,
|
||||
start: [f64; 2],
|
||||
end: [f64; 2],
|
||||
plane: crate::execution::PlaneType,
|
||||
additional_lines: Vec<[f64; 2]>,
|
||||
) -> Result<Node<VariableDeclarator>, KclError> {
|
||||
let start_sketch_on = CallExpression::new("startSketchOn", vec![Literal::new(plane.to_string().into()).into()])?;
|
||||
let start_profile_at = CallExpression::new(
|
||||
"startProfileAt",
|
||||
vec![
|
||||
ArrayExpression::new(vec![
|
||||
Literal::new(LiteralValue::from_f64_no_uom(round_before_recast(start[0]))).into(),
|
||||
Literal::new(LiteralValue::from_f64_no_uom(round_before_recast(start[1]))).into(),
|
||||
])
|
||||
.into(),
|
||||
PipeSubstitution::new().into(),
|
||||
],
|
||||
)?;
|
||||
|
||||
// Keep track of where we are so we can close the sketch if we need to.
|
||||
let mut current_position = Point2d {
|
||||
x: start[0],
|
||||
y: start[1],
|
||||
};
|
||||
current_position.x += end[0];
|
||||
current_position.y += end[1];
|
||||
|
||||
let expr = ArrayExpression::new(vec![
|
||||
Literal::new(LiteralValue::from_f64_no_uom(round_before_recast(end[0]))).into(),
|
||||
Literal::new(LiteralValue::from_f64_no_uom(round_before_recast(end[1]))).into(),
|
||||
])
|
||||
.into();
|
||||
let initial_line = CallExpressionKw::new(
|
||||
"line",
|
||||
None,
|
||||
vec![LabeledArg {
|
||||
label: Node::no_src(super::types::Identifier {
|
||||
name: "end".to_owned(),
|
||||
digest: None,
|
||||
}),
|
||||
arg: expr,
|
||||
}],
|
||||
)?;
|
||||
|
||||
let mut pipe_body = vec![start_sketch_on.into(), start_profile_at.into(), initial_line.into()];
|
||||
|
||||
for (index, line) in additional_lines.iter().enumerate() {
|
||||
current_position.x += line[0];
|
||||
current_position.y += line[1];
|
||||
|
||||
// If we are on the last line, check if we have to close the sketch.
|
||||
if index == additional_lines.len() - 1 {
|
||||
let diff_x = (current_position.x - start[0]).abs();
|
||||
let diff_y = (current_position.y - start[1]).abs();
|
||||
// Compare the end of the last line to the start of the first line.
|
||||
// This is a bit more lenient if you look at the value of epsilon.
|
||||
if diff_x <= EPSILON && diff_y <= EPSILON {
|
||||
// We have to close the sketch.
|
||||
let close = CallExpressionKw::new("close", None, vec![])?;
|
||||
pipe_body.push(close.into());
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: we should check if we should close the sketch.
|
||||
let expr = ArrayExpression::new(vec![
|
||||
Literal::new(LiteralValue::from_f64_no_uom(round_before_recast(line[0]))).into(),
|
||||
Literal::new(LiteralValue::from_f64_no_uom(round_before_recast(line[1]))).into(),
|
||||
])
|
||||
.into();
|
||||
let line = CallExpressionKw::new(
|
||||
"line",
|
||||
None,
|
||||
vec![LabeledArg {
|
||||
arg: expr,
|
||||
label: Node::no_src(Identifier {
|
||||
name: "end".to_owned(),
|
||||
digest: None,
|
||||
}),
|
||||
}],
|
||||
)?;
|
||||
pipe_body.push(line.into());
|
||||
}
|
||||
|
||||
Ok(VariableDeclarator::new(name, PipeExpression::new(pipe_body).into()))
|
||||
}
|
||||
|
||||
fn round_before_recast(num: f64) -> f64 {
|
||||
// We use 2 decimal places.
|
||||
(num * 100.0).round() / 100.0
|
||||
}
|
@ -133,6 +133,13 @@ impl<T> Node<T> {
|
||||
})
|
||||
}
|
||||
|
||||
fn reset_source(&mut self) {
|
||||
self.start = 0;
|
||||
self.end = 0;
|
||||
self.module_id = ModuleId::default();
|
||||
self.comment_start = 0;
|
||||
}
|
||||
|
||||
pub fn as_source_range(&self) -> SourceRange {
|
||||
SourceRange::new(self.start, self.end, self.module_id)
|
||||
}
|
||||
@ -345,7 +352,10 @@ impl Node<Program> {
|
||||
let mut found = false;
|
||||
for node in &mut new_program.inner_attrs {
|
||||
if node.name() == Some(annotations::SETTINGS) {
|
||||
*node = Node::no_src(Annotation::new_from_meta_settings(&settings));
|
||||
node.inner = Annotation::new_from_meta_settings(&settings);
|
||||
// Previous source range no longer makes sense, but we want to
|
||||
// preserve other things like comments.
|
||||
node.reset_source();
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
@ -359,6 +369,26 @@ impl Node<Program> {
|
||||
|
||||
Ok(new_program)
|
||||
}
|
||||
|
||||
/// Returns true if the given KCL is empty or only contains settings that
|
||||
/// would be auto-generated.
|
||||
pub fn is_empty_or_only_settings(&self) -> bool {
|
||||
if !self.body.is_empty() {
|
||||
return false;
|
||||
}
|
||||
|
||||
if self.non_code_meta.start_nodes.iter().any(|node| node.is_comment()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for item in &self.inner_attrs {
|
||||
if item.name() != Some(annotations::SETTINGS) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl Program {
|
||||
@ -1609,19 +1639,21 @@ impl ImportStatement {
|
||||
return Some(alias.name.clone());
|
||||
}
|
||||
|
||||
let mut parts = match &self.path {
|
||||
ImportPath::Kcl { filename: s } | ImportPath::Foreign { path: s } => s.split('.'),
|
||||
_ => return None,
|
||||
};
|
||||
let path = parts.next()?;
|
||||
let _ext = parts.next()?;
|
||||
let rest = parts.next();
|
||||
match &self.path {
|
||||
ImportPath::Kcl { filename: s } | ImportPath::Foreign { path: s } => {
|
||||
let mut parts = s.split('.');
|
||||
let path = parts.next()?;
|
||||
let _ext = parts.next()?;
|
||||
let rest = parts.next();
|
||||
|
||||
if rest.is_some() {
|
||||
return None;
|
||||
if rest.is_some() {
|
||||
return None;
|
||||
}
|
||||
|
||||
path.rsplit(&['/', '\\']).next().map(str::to_owned)
|
||||
}
|
||||
ImportPath::Std { path } => path.last().cloned(),
|
||||
}
|
||||
|
||||
path.rsplit(&['/', '\\']).next().map(str::to_owned)
|
||||
}
|
||||
}
|
||||
|
||||
@ -3552,6 +3584,37 @@ mod tests {
|
||||
|
||||
use super::*;
|
||||
|
||||
#[track_caller]
|
||||
fn parse(code: &str) -> Node<Program> {
|
||||
crate::parsing::top_level_parse(code).unwrap()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_or_only_settings() {
|
||||
// Empty is empty.
|
||||
assert!(parse("").is_empty_or_only_settings());
|
||||
|
||||
// Whitespace is empty.
|
||||
assert!(parse(" ").is_empty_or_only_settings());
|
||||
|
||||
// Settings are empty.
|
||||
assert!(parse(r#"@settings(defaultLengthUnit = mm)"#).is_empty_or_only_settings());
|
||||
|
||||
// Only comments is not empty.
|
||||
assert!(!parse("// comment").is_empty_or_only_settings());
|
||||
|
||||
// Any statement is not empty.
|
||||
assert!(!parse("5").is_empty_or_only_settings());
|
||||
|
||||
// Any statement is not empty, even with settings.
|
||||
let code = r#"@settings(defaultLengthUnit = mm)
|
||||
5"#;
|
||||
assert!(!parse(code).is_empty_or_only_settings());
|
||||
|
||||
// Non-settings attributes are not empty.
|
||||
assert!(!parse("@foo").is_empty_or_only_settings());
|
||||
}
|
||||
|
||||
// We have this as a test so we can ensure it never panics with an unwrap in the server.
|
||||
#[test]
|
||||
fn test_variable_kind_to_completion() {
|
||||
@ -4140,6 +4203,50 @@ startSketchOn(XY)
|
||||
r#"@settings(defaultLengthUnit = mm)
|
||||
|
||||
startSketchOn(XY)
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_change_meta_settings_preserves_comments() {
|
||||
let code = r#"// Title
|
||||
|
||||
// Set Units
|
||||
@settings(defaultLengthUnit = in)
|
||||
|
||||
// Between
|
||||
|
||||
// Above Code
|
||||
5
|
||||
"#;
|
||||
let program = crate::parsing::top_level_parse(code).unwrap();
|
||||
|
||||
let new_program = program
|
||||
.change_meta_settings(crate::execution::MetaSettings {
|
||||
default_length_units: crate::execution::types::UnitLen::Cm,
|
||||
..Default::default()
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
let result = new_program.meta_settings().unwrap();
|
||||
assert!(result.is_some());
|
||||
let meta_settings = result.unwrap();
|
||||
|
||||
assert_eq!(meta_settings.default_length_units, crate::execution::types::UnitLen::Cm);
|
||||
|
||||
let formatted = new_program.recast(&Default::default(), 0);
|
||||
|
||||
assert_eq!(
|
||||
formatted,
|
||||
r#"// Title
|
||||
|
||||
// Set Units
|
||||
@settings(defaultLengthUnit = cm)
|
||||
|
||||
// Between
|
||||
|
||||
// Above Code
|
||||
5
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
@ -152,7 +152,12 @@ const STR_DEPRECATIONS: [(&str, &str); 6] = [
|
||||
("-YZ", "-YZ"),
|
||||
];
|
||||
const FN_DEPRECATIONS: [(&str, &str); 3] = [("pi", "PI"), ("e", "E"), ("tau", "TAU")];
|
||||
const CONST_DEPRECATIONS: [(&str, &str); 0] = [];
|
||||
const CONST_DEPRECATIONS: [(&str, &str); 4] = [
|
||||
("ZERO", "turns::ZERO"),
|
||||
("QUARTER_TURN", "turns::QUARTER_TURN"),
|
||||
("HALF_TURN", "turns::HALF_TURN"),
|
||||
("THREE_QUARTER_TURN", "turns::THREE_QUARTER_TURN"),
|
||||
];
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub enum DeprecationKind {
|
||||
|
@ -1823,14 +1823,6 @@ fn import_stmt(i: &mut TokenSlice) -> PResult<BoxNode<ImportStatement>> {
|
||||
)
|
||||
.into(),
|
||||
));
|
||||
} else if matches!(path, ImportPath::Std { .. }) && matches!(selector, ImportSelector::None { .. }) {
|
||||
return Err(ErrMode::Cut(
|
||||
CompilationError::fatal(
|
||||
SourceRange::new(start, end, module_id),
|
||||
"the standard library cannot be imported as a part",
|
||||
)
|
||||
.into(),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(Node::boxed(
|
||||
@ -2341,21 +2333,6 @@ fn nameable_identifier(i: &mut TokenSlice) -> PResult<Node<Identifier>> {
|
||||
));
|
||||
}
|
||||
|
||||
if let Some(suggestion) = super::deprecation(&result.name, DeprecationKind::Const) {
|
||||
ParseContext::warn(
|
||||
CompilationError::err(
|
||||
result.as_source_range(),
|
||||
format!("Using `{}` is deprecated, prefer using `{}`.", result.name, suggestion),
|
||||
)
|
||||
.with_suggestion(
|
||||
format!("Replace `{}` with `{}`", result.name, suggestion),
|
||||
suggestion,
|
||||
None,
|
||||
Tag::Deprecated,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
@ -2374,8 +2351,7 @@ fn name(i: &mut TokenSlice) -> PResult<Node<Name>> {
|
||||
let name = idents.pop().unwrap();
|
||||
let end = name.end;
|
||||
let module_id = name.module_id;
|
||||
|
||||
Ok(Node::new(
|
||||
let result = Node::new(
|
||||
Name {
|
||||
name,
|
||||
path: idents,
|
||||
@ -2385,7 +2361,24 @@ fn name(i: &mut TokenSlice) -> PResult<Node<Name>> {
|
||||
start,
|
||||
end,
|
||||
module_id,
|
||||
))
|
||||
);
|
||||
|
||||
if let Some(suggestion) = super::deprecation(&result.to_string(), DeprecationKind::Const) {
|
||||
ParseContext::warn(
|
||||
CompilationError::err(
|
||||
result.as_source_range(),
|
||||
format!("Using `{result}` is deprecated, prefer using `{suggestion}`."),
|
||||
)
|
||||
.with_suggestion(
|
||||
format!("Replace `{result}` with `{suggestion}`"),
|
||||
suggestion,
|
||||
None,
|
||||
Tag::Deprecated,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
impl TryFrom<Token> for Node<TagDeclarator> {
|
||||
|
@ -114,6 +114,8 @@ async fn unparse_test(test: &Test) {
|
||||
let kcl_files = kcl_files.into_iter().filter(|f| f != &entry_point);
|
||||
let futures = kcl_files
|
||||
.into_iter()
|
||||
.filter(|file| file.extension().is_some_and(|ext| ext == "kcl")) // We only care about kcl
|
||||
// files here.
|
||||
.map(|file| {
|
||||
let snap_path = Path::new("..").join(&test.output_dir);
|
||||
tokio::spawn(async move {
|
||||
@ -153,13 +155,9 @@ async fn execute_test(test: &Test, render_to_png: bool, export_step: bool) {
|
||||
let ast = crate::Program::parse_no_errs(&input).unwrap();
|
||||
|
||||
// Run the program.
|
||||
let exec_res = crate::test_server::execute_and_snapshot_ast(
|
||||
ast,
|
||||
crate::settings::types::UnitLength::Mm,
|
||||
Some(test.input_dir.join(&test.entry_point)),
|
||||
export_step,
|
||||
)
|
||||
.await;
|
||||
let exec_res =
|
||||
crate::test_server::execute_and_snapshot_ast(ast, Some(test.input_dir.join(&test.entry_point)), export_step)
|
||||
.await;
|
||||
match exec_res {
|
||||
Ok((exec_state, env_ref, png, step)) => {
|
||||
let fail_path = test.output_dir.join("execution_error.snap");
|
||||
|
@ -1,6 +1,7 @@
|
||||
//! Run all the KCL samples in the `kcl_samples` directory.
|
||||
use std::{
|
||||
fs,
|
||||
panic::{catch_unwind, AssertUnwindSafe},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
@ -35,9 +36,35 @@ fn parse(dir_name: &str, dir_path: &Path) {
|
||||
|
||||
#[kcl_directory_test_macro::test_all_dirs("../public/kcl-samples")]
|
||||
async fn unparse(dir_name: &str, dir_path: &Path) {
|
||||
// TODO: turn this on when we fix the comments recasting.
|
||||
// let t = test(dir_name, dir_path.join("main.kcl").to_str().unwrap().to_owned());
|
||||
// super::unparse_test(&t).await;
|
||||
let t = test(dir_name, dir_path.join("main.kcl").to_str().unwrap().to_owned());
|
||||
unparse_test(&t).await;
|
||||
}
|
||||
|
||||
/// This is different from the rest of the simulation tests because we want to write
|
||||
/// back out to the original file.
|
||||
async fn unparse_test(test: &Test) {
|
||||
let kcl_files = crate::unparser::walk_dir(&test.input_dir).await.unwrap();
|
||||
let futures = kcl_files
|
||||
.into_iter()
|
||||
.filter(|file| file.extension().is_some_and(|ext| ext == "kcl")) // We only care about kcl
|
||||
// files here.
|
||||
.map(|file| {
|
||||
tokio::spawn(async move {
|
||||
let contents = tokio::fs::read_to_string(&file).await.unwrap();
|
||||
let program = crate::Program::parse_no_errs(&contents).unwrap();
|
||||
let recast = program.recast_with_options(&Default::default());
|
||||
|
||||
catch_unwind(AssertUnwindSafe(|| {
|
||||
expectorate::assert_contents(&file, &recast.to_string());
|
||||
}))
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Join all futures and await their completion.
|
||||
for future in futures {
|
||||
future.await.unwrap().unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
#[kcl_directory_test_macro::test_all_dirs("../public/kcl-samples")]
|
||||
|
@ -66,7 +66,7 @@ pub async fn appearance(exec_state: &mut ExecState, args: Args) -> Result<KclVal
|
||||
}));
|
||||
}
|
||||
|
||||
let result = inner_appearance(solids, data.color, data.metalness, data.roughness, args).await?;
|
||||
let result = inner_appearance(solids, data.color, data.metalness, data.roughness, exec_state, args).await?;
|
||||
Ok(result.into())
|
||||
}
|
||||
|
||||
@ -287,6 +287,7 @@ async fn inner_appearance(
|
||||
color: String,
|
||||
metalness: Option<f64>,
|
||||
roughness: Option<f64>,
|
||||
exec_state: &mut ExecState,
|
||||
args: Args,
|
||||
) -> Result<Vec<Solid>, KclError> {
|
||||
for solid in &solids {
|
||||
@ -306,7 +307,7 @@ async fn inner_appearance(
|
||||
};
|
||||
|
||||
args.batch_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
exec_state.next_uuid(),
|
||||
ModelingCmd::from(mcmd::ObjectSetMaterialParamsPbr {
|
||||
object_id: solid.id,
|
||||
color,
|
||||
|
@ -8,6 +8,7 @@ use kittycad_modeling_cmds as kcmc;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::DEFAULT_TOLERANCE;
|
||||
use crate::{
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::{
|
||||
@ -15,7 +16,6 @@ use crate::{
|
||||
EdgeCut, ExecState, ExtrudeSurface, FilletSurface, GeoMeta, KclValue, Solid, TagIdentifier,
|
||||
},
|
||||
parsing::ast::types::TagNode,
|
||||
settings::types::UnitLength,
|
||||
std::Args,
|
||||
SourceRange,
|
||||
};
|
||||
@ -165,7 +165,7 @@ async fn inner_fillet(
|
||||
edge_id,
|
||||
object_id: solid.id,
|
||||
radius: LengthUnit(radius),
|
||||
tolerance: LengthUnit(tolerance.unwrap_or(default_tolerance(&args.ctx.settings.units))),
|
||||
tolerance: LengthUnit(tolerance.unwrap_or(DEFAULT_TOLERANCE)),
|
||||
cut_type: CutType::Fillet,
|
||||
// We make this a none so that we can remove it in the future.
|
||||
face_id: None,
|
||||
@ -195,17 +195,6 @@ async fn inner_fillet(
|
||||
Ok(solid)
|
||||
}
|
||||
|
||||
pub(crate) fn default_tolerance(units: &UnitLength) -> f64 {
|
||||
match units {
|
||||
UnitLength::Mm => 0.0000001,
|
||||
UnitLength::Cm => 0.0000001,
|
||||
UnitLength::In => 0.0000001,
|
||||
UnitLength::Ft => 0.0001,
|
||||
UnitLength::Yd => 0.001,
|
||||
UnitLength::M => 0.001,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
@ -7,11 +7,12 @@ use kcl_derive_docs::stdlib;
|
||||
use kcmc::{each_cmd as mcmd, length_unit::LengthUnit, ModelingCmd};
|
||||
use kittycad_modeling_cmds as kcmc;
|
||||
|
||||
use super::DEFAULT_TOLERANCE;
|
||||
use crate::{
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::{types::RuntimeType, ExecState, KclValue, Sketch, Solid},
|
||||
parsing::ast::types::TagNode,
|
||||
std::{extrude::do_post_extrude, fillet::default_tolerance, Args},
|
||||
std::{extrude::do_post_extrude, Args},
|
||||
};
|
||||
|
||||
const DEFAULT_V_DEGREE: u32 = 2;
|
||||
@ -159,7 +160,7 @@ async fn inner_loft(
|
||||
section_ids: sketches.iter().map(|group| group.id).collect(),
|
||||
base_curve_index,
|
||||
bez_approximate_rational,
|
||||
tolerance: LengthUnit(tolerance.unwrap_or(default_tolerance(&args.ctx.settings.units))),
|
||||
tolerance: LengthUnit(tolerance.unwrap_or(DEFAULT_TOLERANCE)),
|
||||
v_degree,
|
||||
}),
|
||||
)
|
||||
|
@ -278,6 +278,9 @@ pub enum FunctionKind {
|
||||
UserDefined,
|
||||
}
|
||||
|
||||
/// The default tolerance for modeling commands in [`kittycad_modeling_cmds::length_unit::LengthUnit`].
|
||||
const DEFAULT_TOLERANCE: f64 = 0.0000001;
|
||||
|
||||
/// Compute the length of the given leg.
|
||||
pub async fn leg_length(_exec_state: &mut ExecState, args: Args) -> Result<KclValue, KclError> {
|
||||
let (hypotenuse, leg, ty) = args.get_hypotenuse_leg()?;
|
||||
|
@ -5,11 +5,12 @@ use kcl_derive_docs::stdlib;
|
||||
use kcmc::{each_cmd as mcmd, length_unit::LengthUnit, shared::Angle, ModelingCmd};
|
||||
use kittycad_modeling_cmds::{self as kcmc};
|
||||
|
||||
use super::DEFAULT_TOLERANCE;
|
||||
use crate::{
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::{types::RuntimeType, ExecState, KclValue, Sketch, Solid},
|
||||
parsing::ast::types::TagNode,
|
||||
std::{axis_or_reference::Axis2dOrEdgeReference, extrude::do_post_extrude, fillet::default_tolerance, Args},
|
||||
std::{axis_or_reference::Axis2dOrEdgeReference, extrude::do_post_extrude, Args},
|
||||
};
|
||||
|
||||
/// Revolve a sketch or set of sketches around an axis.
|
||||
@ -273,7 +274,7 @@ async fn inner_revolve(
|
||||
target: sketch.id.into(),
|
||||
axis,
|
||||
origin,
|
||||
tolerance: LengthUnit(tolerance.unwrap_or(default_tolerance(&args.ctx.settings.units))),
|
||||
tolerance: LengthUnit(tolerance.unwrap_or(DEFAULT_TOLERANCE)),
|
||||
axis_is_2d: true,
|
||||
}),
|
||||
)
|
||||
@ -287,7 +288,7 @@ async fn inner_revolve(
|
||||
angle,
|
||||
target: sketch.id.into(),
|
||||
edge_id,
|
||||
tolerance: LengthUnit(tolerance.unwrap_or(default_tolerance(&args.ctx.settings.units))),
|
||||
tolerance: LengthUnit(tolerance.unwrap_or(DEFAULT_TOLERANCE)),
|
||||
}),
|
||||
)
|
||||
.await?;
|
||||
|
@ -7,11 +7,12 @@ use kittycad_modeling_cmds::{self as kcmc};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::DEFAULT_TOLERANCE;
|
||||
use crate::{
|
||||
errors::KclError,
|
||||
execution::{types::RuntimeType, ExecState, Helix, KclValue, Sketch, Solid},
|
||||
parsing::ast::types::TagNode,
|
||||
std::{extrude::do_post_extrude, fillet::default_tolerance, Args},
|
||||
std::{extrude::do_post_extrude, Args},
|
||||
};
|
||||
|
||||
/// A path to sweep along.
|
||||
@ -191,7 +192,7 @@ async fn inner_sweep(
|
||||
target: sketch.id.into(),
|
||||
trajectory,
|
||||
sectional: sectional.unwrap_or(false),
|
||||
tolerance: LengthUnit(tolerance.unwrap_or(default_tolerance(&args.ctx.settings.units))),
|
||||
tolerance: LengthUnit(tolerance.unwrap_or(DEFAULT_TOLERANCE)),
|
||||
}),
|
||||
)
|
||||
.await?;
|
||||
|
@ -28,15 +28,22 @@ pub async fn scale(exec_state: &mut ExecState, args: Args) -> Result<KclValue, K
|
||||
]),
|
||||
exec_state,
|
||||
)?;
|
||||
let scale = args.get_kw_arg("scale")?;
|
||||
let scale_x = args.get_kw_arg("x")?;
|
||||
let scale_y = args.get_kw_arg("y")?;
|
||||
let scale_z = args.get_kw_arg("z")?;
|
||||
let global = args.get_kw_arg_opt("global")?;
|
||||
|
||||
let objects = inner_scale(objects, scale, global, exec_state, args).await?;
|
||||
let objects = inner_scale(objects, scale_x, scale_y, scale_z, global, exec_state, args).await?;
|
||||
Ok(objects.into())
|
||||
}
|
||||
|
||||
/// Scale a solid or a sketch.
|
||||
///
|
||||
/// This is really useful for resizing parts. You can create a part and then scale it to the
|
||||
/// correct size.
|
||||
///
|
||||
/// For sketches, you can use this to scale a sketch and then loft it with another sketch.
|
||||
///
|
||||
/// By default the transform is applied in local sketch axis, therefore the origin will not move.
|
||||
///
|
||||
/// If you want to apply the transform in global space, set `global` to `true`. The origin of the
|
||||
@ -78,7 +85,9 @@ pub async fn scale(exec_state: &mut ExecState, args: Args) -> Result<KclValue, K
|
||||
/// |> hole(pipeHole, %)
|
||||
/// |> sweep(path = sweepPath)
|
||||
/// |> scale(
|
||||
/// scale = [1.0, 1.0, 2.5],
|
||||
/// x = 1.0,
|
||||
/// y = 1.0,
|
||||
/// z = 2.5,
|
||||
/// )
|
||||
/// ```
|
||||
///
|
||||
@ -89,7 +98,9 @@ pub async fn scale(exec_state: &mut ExecState, args: Args) -> Result<KclValue, K
|
||||
///
|
||||
/// cube
|
||||
/// |> scale(
|
||||
/// scale = [1.0, 1.0, 2.5],
|
||||
/// x = 1.0,
|
||||
/// y = 1.0,
|
||||
/// z = 2.5,
|
||||
/// )
|
||||
/// ```
|
||||
///
|
||||
@ -124,7 +135,7 @@ pub async fn scale(exec_state: &mut ExecState, args: Args) -> Result<KclValue, K
|
||||
/// parts = sweep([rectangleSketch, circleSketch], path = sweepPath)
|
||||
///
|
||||
/// // Scale the sweep.
|
||||
/// scale(parts, scale = [1.0, 1.0, 0.5])
|
||||
/// scale(parts, x = 1.0, y = 1.0, z = 0.5)
|
||||
/// ```
|
||||
#[stdlib {
|
||||
name = "scale",
|
||||
@ -133,13 +144,17 @@ pub async fn scale(exec_state: &mut ExecState, args: Args) -> Result<KclValue, K
|
||||
unlabeled_first = true,
|
||||
args = {
|
||||
objects = {docs = "The solid, sketch, or set of solids or sketches to scale."},
|
||||
scale = {docs = "The scale factor for the x, y, and z axes."},
|
||||
x = {docs = "The scale factor for the x axis."},
|
||||
y = {docs = "The scale factor for the y axis."},
|
||||
z = {docs = "The scale factor for the z axis."},
|
||||
global = {docs = "If true, the transform is applied in global space. The origin of the model will move. By default, the transform is applied in local sketch axis, therefore the origin will not move."}
|
||||
}
|
||||
}]
|
||||
async fn inner_scale(
|
||||
objects: SolidOrSketchOrImportedGeometry,
|
||||
scale: [f64; 3],
|
||||
x: f64,
|
||||
y: f64,
|
||||
z: f64,
|
||||
global: Option<bool>,
|
||||
exec_state: &mut ExecState,
|
||||
args: Args,
|
||||
@ -159,11 +174,7 @@ async fn inner_scale(
|
||||
object_id,
|
||||
transforms: vec![shared::ComponentTransform {
|
||||
scale: Some(shared::TransformBy::<Point3d<f64>> {
|
||||
property: Point3d {
|
||||
x: scale[0],
|
||||
y: scale[1],
|
||||
z: scale[2],
|
||||
},
|
||||
property: Point3d { x, y, z },
|
||||
set: false,
|
||||
is_local: !global.unwrap_or(false),
|
||||
}),
|
||||
@ -190,15 +201,23 @@ pub async fn translate(exec_state: &mut ExecState, args: Args) -> Result<KclValu
|
||||
]),
|
||||
exec_state,
|
||||
)?;
|
||||
let translate = args.get_kw_arg("translate")?;
|
||||
let translate_x = args.get_kw_arg("x")?;
|
||||
let translate_y = args.get_kw_arg("y")?;
|
||||
let translate_z = args.get_kw_arg("z")?;
|
||||
let global = args.get_kw_arg_opt("global")?;
|
||||
|
||||
let objects = inner_translate(objects, translate, global, exec_state, args).await?;
|
||||
let objects = inner_translate(objects, translate_x, translate_y, translate_z, global, exec_state, args).await?;
|
||||
Ok(objects.into())
|
||||
}
|
||||
|
||||
/// Move a solid or a sketch.
|
||||
///
|
||||
/// This is really useful for assembling parts together. You can create a part
|
||||
/// and then move it to the correct location.
|
||||
///
|
||||
/// Translate is really useful for sketches if you want to move a sketch
|
||||
/// and then rotate it using the `rotate` function to create a loft.
|
||||
///
|
||||
/// ```no_run
|
||||
/// // Move a pipe.
|
||||
///
|
||||
@ -232,7 +251,9 @@ pub async fn translate(exec_state: &mut ExecState, args: Args) -> Result<KclValu
|
||||
/// |> hole(pipeHole, %)
|
||||
/// |> sweep(path = sweepPath)
|
||||
/// |> translate(
|
||||
/// translate = [1.0, 1.0, 2.5],
|
||||
/// x = 1.0,
|
||||
/// y = 1.0,
|
||||
/// z = 2.5,
|
||||
/// )
|
||||
/// ```
|
||||
///
|
||||
@ -243,7 +264,9 @@ pub async fn translate(exec_state: &mut ExecState, args: Args) -> Result<KclValu
|
||||
///
|
||||
/// cube
|
||||
/// |> translate(
|
||||
/// translate = [1.0, 1.0, 2.5],
|
||||
/// x = 1.0,
|
||||
/// y = 1.0,
|
||||
/// z = 2.5,
|
||||
/// )
|
||||
/// ```
|
||||
///
|
||||
@ -278,7 +301,7 @@ pub async fn translate(exec_state: &mut ExecState, args: Args) -> Result<KclValu
|
||||
/// parts = sweep([rectangleSketch, circleSketch], path = sweepPath)
|
||||
///
|
||||
/// // Move the sweeps.
|
||||
/// translate(parts, translate = [1.0, 1.0, 2.5])
|
||||
/// translate(parts, x = 1.0, y = 1.0, z = 2.5)
|
||||
/// ```
|
||||
///
|
||||
/// ```no_run
|
||||
@ -301,7 +324,9 @@ pub async fn translate(exec_state: &mut ExecState, args: Args) -> Result<KclValu
|
||||
///
|
||||
/// square(10)
|
||||
/// |> translate(
|
||||
/// translate = [5, 5, 0],
|
||||
/// x = 5,
|
||||
/// y = 5,
|
||||
/// z = 0,
|
||||
/// )
|
||||
/// |> extrude(
|
||||
/// length = 10,
|
||||
@ -324,7 +349,7 @@ pub async fn translate(exec_state: &mut ExecState, args: Args) -> Result<KclValu
|
||||
/// profile001 = square()
|
||||
///
|
||||
/// profile002 = square()
|
||||
/// |> translate(translate = [0, 0, 20])
|
||||
/// |> translate(x = 0, y = 0, z = 20)
|
||||
/// |> rotate(axis = [0, 0, 1.0], angle = 45)
|
||||
///
|
||||
/// loft([profile001, profile002])
|
||||
@ -336,13 +361,17 @@ pub async fn translate(exec_state: &mut ExecState, args: Args) -> Result<KclValu
|
||||
unlabeled_first = true,
|
||||
args = {
|
||||
objects = {docs = "The solid, sketch, or set of solids or sketches to move."},
|
||||
translate = {docs = "The amount to move the solid or sketch in all three axes."},
|
||||
x = {docs = "The amount to move the solid or sketch along the x axis."},
|
||||
y = {docs = "The amount to move the solid or sketch along the y axis."},
|
||||
z = {docs = "The amount to move the solid or sketch along the z axis."},
|
||||
global = {docs = "If true, the transform is applied in global space. The origin of the model will move. By default, the transform is applied in local sketch axis, therefore the origin will not move."}
|
||||
}
|
||||
}]
|
||||
async fn inner_translate(
|
||||
objects: SolidOrSketchOrImportedGeometry,
|
||||
translate: [f64; 3],
|
||||
x: f64,
|
||||
y: f64,
|
||||
z: f64,
|
||||
global: Option<bool>,
|
||||
exec_state: &mut ExecState,
|
||||
args: Args,
|
||||
@ -363,9 +392,9 @@ async fn inner_translate(
|
||||
transforms: vec![shared::ComponentTransform {
|
||||
translate: Some(shared::TransformBy::<Point3d<LengthUnit>> {
|
||||
property: shared::Point3d {
|
||||
x: LengthUnit(translate[0]),
|
||||
y: LengthUnit(translate[1]),
|
||||
z: LengthUnit(translate[2]),
|
||||
x: LengthUnit(x),
|
||||
y: LengthUnit(y),
|
||||
z: LengthUnit(z),
|
||||
},
|
||||
set: false,
|
||||
is_local: !global.unwrap_or(false),
|
||||
@ -506,6 +535,11 @@ pub async fn rotate(exec_state: &mut ExecState, args: Args) -> Result<KclValue,
|
||||
|
||||
/// Rotate a solid or a sketch.
|
||||
///
|
||||
/// This is really useful for assembling parts together. You can create a part
|
||||
/// and then rotate it to the correct orientation.
|
||||
///
|
||||
/// For sketches, you can use this to rotate a sketch and then loft it with another sketch.
|
||||
///
|
||||
/// ### Using Roll, Pitch, and Yaw
|
||||
///
|
||||
/// When rotating a part in 3D space, "roll," "pitch," and "yaw" refer to the
|
||||
@ -667,7 +701,7 @@ pub async fn rotate(exec_state: &mut ExecState, args: Args) -> Result<KclValue,
|
||||
/// profile001 = square()
|
||||
///
|
||||
/// profile002 = square()
|
||||
/// |> translate(translate = [0, 0, 20])
|
||||
/// |> translate(x = 0, y = 0, z = 20)
|
||||
/// |> rotate(axis = [0, 0, 1.0], angle = 45)
|
||||
///
|
||||
/// loft([profile001, profile002])
|
||||
|
@ -6,7 +6,6 @@ use crate::{
|
||||
engine::new_zoo_client,
|
||||
errors::ExecErrorWithState,
|
||||
execution::{EnvironmentRef, ExecState, ExecutorContext, ExecutorSettings},
|
||||
settings::types::UnitLength,
|
||||
ConnectionError, ExecError, KclError, KclErrorWithOutputs, Program,
|
||||
};
|
||||
|
||||
@ -19,12 +18,8 @@ pub struct RequestBody {
|
||||
|
||||
/// Executes a kcl program and takes a snapshot of the result.
|
||||
/// This returns the bytes of the snapshot.
|
||||
pub async fn execute_and_snapshot(
|
||||
code: &str,
|
||||
units: UnitLength,
|
||||
current_file: Option<PathBuf>,
|
||||
) -> Result<image::DynamicImage, ExecError> {
|
||||
let ctx = new_context(units, true, current_file).await?;
|
||||
pub async fn execute_and_snapshot(code: &str, current_file: Option<PathBuf>) -> Result<image::DynamicImage, ExecError> {
|
||||
let ctx = new_context(true, current_file).await?;
|
||||
let program = Program::parse_no_errs(code).map_err(KclErrorWithOutputs::no_outputs)?;
|
||||
let res = do_execute_and_snapshot(&ctx, program)
|
||||
.await
|
||||
@ -38,17 +33,26 @@ pub async fn execute_and_snapshot(
|
||||
/// This returns the bytes of the snapshot.
|
||||
pub async fn execute_and_snapshot_ast(
|
||||
ast: Program,
|
||||
units: UnitLength,
|
||||
current_file: Option<PathBuf>,
|
||||
with_export_step: bool,
|
||||
) -> Result<(ExecState, EnvironmentRef, image::DynamicImage, Option<Vec<u8>>), ExecErrorWithState> {
|
||||
let ctx = new_context(units, true, current_file).await?;
|
||||
let (exec_state, env, img) = do_execute_and_snapshot(&ctx, ast).await?;
|
||||
let ctx = new_context(true, current_file).await?;
|
||||
let (exec_state, env, img) = match do_execute_and_snapshot(&ctx, ast).await {
|
||||
Ok((exec_state, env_ref, img)) => (exec_state, env_ref, img),
|
||||
Err(err) => {
|
||||
// If there was an error executing the program, return it.
|
||||
// Close the context to avoid any resource leaks.
|
||||
ctx.close().await;
|
||||
return Err(err);
|
||||
}
|
||||
};
|
||||
let mut step = None;
|
||||
if with_export_step {
|
||||
let files = match ctx.export_step(true).await {
|
||||
Ok(f) => f,
|
||||
Err(err) => {
|
||||
// Close the context to avoid any resource leaks.
|
||||
ctx.close().await;
|
||||
return Err(ExecErrorWithState::new(
|
||||
ExecError::BadExport(format!("Export failed: {:?}", err)),
|
||||
exec_state.clone(),
|
||||
@ -64,10 +68,9 @@ pub async fn execute_and_snapshot_ast(
|
||||
|
||||
pub async fn execute_and_snapshot_no_auth(
|
||||
code: &str,
|
||||
units: UnitLength,
|
||||
current_file: Option<PathBuf>,
|
||||
) -> Result<(image::DynamicImage, EnvironmentRef), ExecError> {
|
||||
let ctx = new_context(units, false, current_file).await?;
|
||||
let ctx = new_context(false, current_file).await?;
|
||||
let program = Program::parse_no_errs(code).map_err(KclErrorWithOutputs::no_outputs)?;
|
||||
let res = do_execute_and_snapshot(&ctx, program)
|
||||
.await
|
||||
@ -111,11 +114,7 @@ async fn do_execute_and_snapshot(
|
||||
Ok((exec_state, result.0, img))
|
||||
}
|
||||
|
||||
pub async fn new_context(
|
||||
units: UnitLength,
|
||||
with_auth: bool,
|
||||
current_file: Option<PathBuf>,
|
||||
) -> Result<ExecutorContext, ConnectionError> {
|
||||
pub async fn new_context(with_auth: bool, current_file: Option<PathBuf>) -> Result<ExecutorContext, ConnectionError> {
|
||||
let mut client = new_zoo_client(if with_auth { None } else { Some("bad_token".to_string()) }, None)
|
||||
.map_err(ConnectionError::CouldNotMakeClient)?;
|
||||
if !with_auth {
|
||||
@ -126,7 +125,6 @@ pub async fn new_context(
|
||||
}
|
||||
|
||||
let mut settings = ExecutorSettings {
|
||||
units,
|
||||
highlight_edges: true,
|
||||
enable_ssao: false,
|
||||
show_grid: false,
|
||||
@ -145,7 +143,6 @@ pub async fn new_context(
|
||||
|
||||
pub async fn execute_and_export_step(
|
||||
code: &str,
|
||||
units: UnitLength,
|
||||
current_file: Option<PathBuf>,
|
||||
) -> Result<
|
||||
(
|
||||
@ -155,7 +152,7 @@ pub async fn execute_and_export_step(
|
||||
),
|
||||
ExecErrorWithState,
|
||||
> {
|
||||
let ctx = new_context(units, true, current_file).await?;
|
||||
let ctx = new_context(true, current_file).await?;
|
||||
let mut exec_state = ExecState::new(&ctx);
|
||||
let program = Program::parse_no_errs(code)
|
||||
.map_err(|err| ExecErrorWithState::new(KclErrorWithOutputs::no_outputs(err).into(), exec_state.clone()))?;
|
||||
|
@ -1,5 +1,8 @@
|
||||
use std::fmt::Write;
|
||||
|
||||
#[cfg(feature = "cli")]
|
||||
use clap::ValueEnum;
|
||||
|
||||
use crate::parsing::{
|
||||
ast::types::{
|
||||
Annotation, ArrayExpression, ArrayRangeExpression, BinaryExpression, BinaryOperator, BinaryPart, BodyItem,
|
||||
@ -864,10 +867,33 @@ impl Parameter {
|
||||
}
|
||||
}
|
||||
|
||||
/// Collect all the kcl files in a directory, recursively.
|
||||
lazy_static::lazy_static! {
|
||||
|
||||
pub static ref IMPORT_FILE_EXTENSIONS: Vec<String> = {
|
||||
let mut import_file_extensions = vec!["stp".to_string(), "glb".to_string(), "fbxb".to_string()];
|
||||
#[cfg(feature = "cli")]
|
||||
let named_extensions = kittycad::types::FileImportFormat::value_variants()
|
||||
.iter()
|
||||
.map(|x| format!("{}", x))
|
||||
.collect::<Vec<String>>();
|
||||
#[cfg(not(feature = "cli"))]
|
||||
let named_extensions = vec![]; // We don't really need this outside of the CLI.
|
||||
// Add all the default import formats.
|
||||
import_file_extensions.extend_from_slice(&named_extensions);
|
||||
import_file_extensions
|
||||
};
|
||||
|
||||
pub static ref RELEVANT_EXTENSIONS: Vec<String> = {
|
||||
let mut relevant_extensions = IMPORT_FILE_EXTENSIONS.clone();
|
||||
relevant_extensions.push("kcl".to_string());
|
||||
relevant_extensions
|
||||
};
|
||||
}
|
||||
|
||||
/// Collect all the kcl (and other relevant) files in a directory, recursively.
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[async_recursion::async_recursion]
|
||||
pub(crate) async fn walk_dir(dir: &std::path::PathBuf) -> Result<Vec<std::path::PathBuf>, anyhow::Error> {
|
||||
pub async fn walk_dir(dir: &std::path::PathBuf) -> Result<Vec<std::path::PathBuf>, anyhow::Error> {
|
||||
// Make sure we actually have a directory.
|
||||
if !dir.is_dir() {
|
||||
anyhow::bail!("`{}` is not a directory", dir.display());
|
||||
@ -881,7 +907,10 @@ pub(crate) async fn walk_dir(dir: &std::path::PathBuf) -> Result<Vec<std::path::
|
||||
|
||||
if path.is_dir() {
|
||||
files.extend(walk_dir(&path).await?);
|
||||
} else if path.extension().is_some_and(|ext| ext == "kcl") {
|
||||
} else if path
|
||||
.extension()
|
||||
.is_some_and(|ext| RELEVANT_EXTENSIONS.contains(&ext.to_string_lossy().to_string()))
|
||||
{
|
||||
files.push(path);
|
||||
}
|
||||
}
|
||||
@ -901,6 +930,8 @@ pub async fn recast_dir(dir: &std::path::Path, options: &crate::FormatOptions) -
|
||||
|
||||
let futures = files
|
||||
.into_iter()
|
||||
.filter(|file| file.extension().is_some_and(|ext| ext == "kcl")) // We only care about kcl
|
||||
// files here.
|
||||
.map(|file| {
|
||||
let options = options.clone();
|
||||
tokio::spawn(async move {
|
||||
|
Reference in New Issue
Block a user