move back to using dashmap and cleanup heaps of code (#2834)
* more Signed-off-by: Jess Frazelle <github@jessfraz.com> * fixups Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * everything pre mutex locks Signed-off-by: Jess Frazelle <github@jessfraz.com> * remove clones Signed-off-by: Jess Frazelle <github@jessfraz.com> * another clone Signed-off-by: Jess Frazelle <github@jessfraz.com> * iupdates Signed-off-by: Jess Frazelle <github@jessfraz.com> * fixes Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * progress Signed-off-by: Jess Frazelle <github@jessfraz.com> * more fixes Signed-off-by: Jess Frazelle <github@jessfraz.com> * cleanup Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * test-utils Signed-off-by: Jess Frazelle <github@jessfraz.com> * fixes Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * all features Signed-off-by: Jess Frazelle <github@jessfraz.com> * better naming Signed-off-by: Jess Frazelle <github@jessfraz.com> * upates Signed-off-by: Jess Frazelle <github@jessfraz.com> --------- Signed-off-by: Jess Frazelle <github@jessfraz.com>
This commit is contained in:
4
.github/workflows/cargo-bench.yml
vendored
4
.github/workflows/cargo-bench.yml
vendored
@ -38,5 +38,7 @@ jobs:
|
|||||||
- name: Benchmark kcl library
|
- name: Benchmark kcl library
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |-
|
run: |-
|
||||||
cd src/wasm-lib/kcl; cargo bench -- iai
|
cd src/wasm-lib/kcl; cargo bench --all-features -- iai
|
||||||
|
env:
|
||||||
|
KITTYCAD_API_TOKEN: ${{secrets.KITTYCAD_API_TOKEN}}
|
||||||
|
|
||||||
|
2
src/wasm-lib/Cargo.lock
generated
2
src/wasm-lib/Cargo.lock
generated
@ -533,6 +533,7 @@ dependencies = [
|
|||||||
"ciborium",
|
"ciborium",
|
||||||
"clap",
|
"clap",
|
||||||
"criterion-plot",
|
"criterion-plot",
|
||||||
|
"futures",
|
||||||
"is-terminal",
|
"is-terminal",
|
||||||
"itertools 0.10.5",
|
"itertools 0.10.5",
|
||||||
"num-traits",
|
"num-traits",
|
||||||
@ -545,6 +546,7 @@ dependencies = [
|
|||||||
"serde_derive",
|
"serde_derive",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"tinytemplate",
|
"tinytemplate",
|
||||||
|
"tokio",
|
||||||
"walkdir",
|
"walkdir",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -19,7 +19,7 @@ chrono = "0.4.38"
|
|||||||
clap = { version = "4.5.7", default-features = false, optional = true }
|
clap = { version = "4.5.7", default-features = false, optional = true }
|
||||||
dashmap = "6.0.1"
|
dashmap = "6.0.1"
|
||||||
databake = { version = "0.1.8", features = ["derive"] }
|
databake = { version = "0.1.8", features = ["derive"] }
|
||||||
derive-docs = { version = "0.1.19", path = "../derive-docs" }
|
derive-docs = { version = "0.1.19", path = "../derive-docs" }
|
||||||
form_urlencoded = "1.2.1"
|
form_urlencoded = "1.2.1"
|
||||||
futures = { version = "0.3.30" }
|
futures = { version = "0.3.30" }
|
||||||
git_rev = "0.1.0"
|
git_rev = "0.1.0"
|
||||||
@ -28,7 +28,7 @@ kittycad = { workspace = true, features = ["clap"] }
|
|||||||
lazy_static = "1.5.0"
|
lazy_static = "1.5.0"
|
||||||
mime_guess = "2.0.4"
|
mime_guess = "2.0.4"
|
||||||
parse-display = "0.9.1"
|
parse-display = "0.9.1"
|
||||||
pyo3 = {version = "0.22.0", optional = true}
|
pyo3 = { version = "0.22.0", optional = true }
|
||||||
reqwest = { version = "0.11.26", default-features = false, features = ["stream", "rustls-tls"] }
|
reqwest = { version = "0.11.26", default-features = false, features = ["stream", "rustls-tls"] }
|
||||||
ropey = "1.6.1"
|
ropey = "1.6.1"
|
||||||
schemars = { version = "0.8.17", features = ["impl_json_schema", "url", "uuid1"] }
|
schemars = { version = "0.8.17", features = ["impl_json_schema", "url", "uuid1"] }
|
||||||
@ -67,6 +67,8 @@ cli = ["dep:clap"]
|
|||||||
disable-println = []
|
disable-println = []
|
||||||
engine = []
|
engine = []
|
||||||
pyo3 = ["dep:pyo3"]
|
pyo3 = ["dep:pyo3"]
|
||||||
|
# Helper functions also used in benchmarks.
|
||||||
|
lsp-test-util = []
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
panic = "abort"
|
panic = "abort"
|
||||||
@ -78,10 +80,10 @@ debug = true # Flamegraphs of benchmarks require accurate debug symbols
|
|||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
base64 = "0.22.1"
|
base64 = "0.22.1"
|
||||||
convert_case = "0.6.0"
|
convert_case = "0.6.0"
|
||||||
criterion = "0.5.1"
|
criterion = { version = "0.5.1", features = ["async_tokio"] }
|
||||||
expectorate = "1.1.0"
|
expectorate = "1.1.0"
|
||||||
iai = "0.1"
|
iai = "0.1"
|
||||||
image = {version = "0.25.1", default-features = false, features = ["png"] }
|
image = { version = "0.25.1", default-features = false, features = ["png"] }
|
||||||
insta = { version = "1.38.0", features = ["json"] }
|
insta = { version = "1.38.0", features = ["json"] }
|
||||||
itertools = "0.13.0"
|
itertools = "0.13.0"
|
||||||
pretty_assertions = "1.4.0"
|
pretty_assertions = "1.4.0"
|
||||||
@ -95,3 +97,13 @@ harness = false
|
|||||||
[[bench]]
|
[[bench]]
|
||||||
name = "compiler_benchmark_iai"
|
name = "compiler_benchmark_iai"
|
||||||
harness = false
|
harness = false
|
||||||
|
|
||||||
|
[[bench]]
|
||||||
|
name = "lsp_semantic_tokens_benchmark_criterion"
|
||||||
|
harness = false
|
||||||
|
required-features = ["lsp-test-util"]
|
||||||
|
|
||||||
|
[[bench]]
|
||||||
|
name = "lsp_semantic_tokens_benchmark_iai"
|
||||||
|
harness = false
|
||||||
|
required-features = ["lsp-test-util"]
|
||||||
|
@ -0,0 +1,65 @@
|
|||||||
|
use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion};
|
||||||
|
use kcl_lib::lsp::test_util::kcl_lsp_server;
|
||||||
|
use tokio::runtime::Runtime;
|
||||||
|
use tower_lsp::LanguageServer;
|
||||||
|
|
||||||
|
async fn kcl_lsp_semantic_tokens(code: &str) {
|
||||||
|
let server = kcl_lsp_server(false).await.unwrap();
|
||||||
|
|
||||||
|
// Send open file.
|
||||||
|
server
|
||||||
|
.did_open(tower_lsp::lsp_types::DidOpenTextDocumentParams {
|
||||||
|
text_document: tower_lsp::lsp_types::TextDocumentItem {
|
||||||
|
uri: "file:///test.kcl".try_into().unwrap(),
|
||||||
|
language_id: "kcl".to_string(),
|
||||||
|
version: 1,
|
||||||
|
text: code.to_string(),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
// Send semantic tokens request.
|
||||||
|
black_box(
|
||||||
|
server
|
||||||
|
.semantic_tokens_full(tower_lsp::lsp_types::SemanticTokensParams {
|
||||||
|
text_document: tower_lsp::lsp_types::TextDocumentIdentifier {
|
||||||
|
uri: "file:///test.kcl".try_into().unwrap(),
|
||||||
|
},
|
||||||
|
partial_result_params: Default::default(),
|
||||||
|
work_done_progress_params: Default::default(),
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.unwrap(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn bench_kcl_lsp_semantic_tokens(c: &mut Criterion) {
|
||||||
|
for (name, code) in [
|
||||||
|
("pipes_on_pipes", PIPES_PROGRAM),
|
||||||
|
("big_kitt", KITT_PROGRAM),
|
||||||
|
("cube", CUBE_PROGRAM),
|
||||||
|
("math", MATH_PROGRAM),
|
||||||
|
("mike_stress_test", MIKE_STRESS_TEST_PROGRAM),
|
||||||
|
("global_tags", GLOBAL_TAGS_FILE),
|
||||||
|
] {
|
||||||
|
c.bench_with_input(BenchmarkId::new("semantic_tokens_", name), &code, |b, &s| {
|
||||||
|
let rt = Runtime::new().unwrap();
|
||||||
|
|
||||||
|
// Spawn a future onto the runtime
|
||||||
|
b.iter(|| {
|
||||||
|
rt.block_on(kcl_lsp_semantic_tokens(s));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
criterion_group!(benches, bench_kcl_lsp_semantic_tokens);
|
||||||
|
criterion_main!(benches);
|
||||||
|
|
||||||
|
const KITT_PROGRAM: &str = include_str!("../../tests/executor/inputs/kittycad_svg.kcl");
|
||||||
|
const PIPES_PROGRAM: &str = include_str!("../../tests/executor/inputs/pipes_on_pipes.kcl");
|
||||||
|
const CUBE_PROGRAM: &str = include_str!("../../tests/executor/inputs/cube.kcl");
|
||||||
|
const MATH_PROGRAM: &str = include_str!("../../tests/executor/inputs/math.kcl");
|
||||||
|
const MIKE_STRESS_TEST_PROGRAM: &str = include_str!("../../tests/executor/inputs/mike_stress_test.kcl");
|
||||||
|
const GLOBAL_TAGS_FILE: &str = include_str!("../../tests/executor/inputs/global-tags.kcl");
|
@ -0,0 +1,45 @@
|
|||||||
|
use iai::black_box;
|
||||||
|
use kcl_lib::lsp::test_util::kcl_lsp_server;
|
||||||
|
use tower_lsp::LanguageServer;
|
||||||
|
|
||||||
|
async fn kcl_lsp_semantic_tokens(code: &str) {
|
||||||
|
let server = kcl_lsp_server(false).await.unwrap();
|
||||||
|
|
||||||
|
// Send open file.
|
||||||
|
server
|
||||||
|
.did_open(tower_lsp::lsp_types::DidOpenTextDocumentParams {
|
||||||
|
text_document: tower_lsp::lsp_types::TextDocumentItem {
|
||||||
|
uri: "file:///test.kcl".try_into().unwrap(),
|
||||||
|
language_id: "kcl".to_string(),
|
||||||
|
version: 1,
|
||||||
|
text: code.to_string(),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
// Send semantic tokens request.
|
||||||
|
black_box(
|
||||||
|
server
|
||||||
|
.semantic_tokens_full(tower_lsp::lsp_types::SemanticTokensParams {
|
||||||
|
text_document: tower_lsp::lsp_types::TextDocumentIdentifier {
|
||||||
|
uri: "file:///test.kcl".try_into().unwrap(),
|
||||||
|
},
|
||||||
|
partial_result_params: Default::default(),
|
||||||
|
work_done_progress_params: Default::default(),
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.unwrap(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn semantic_tokens_global_tags() {
|
||||||
|
let code = GLOBAL_TAGS_FILE;
|
||||||
|
kcl_lsp_semantic_tokens(code).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
iai::main! {
|
||||||
|
semantic_tokens_global_tags,
|
||||||
|
}
|
||||||
|
|
||||||
|
const GLOBAL_TAGS_FILE: &str = include_str!("../../tests/executor/inputs/global-tags.kcl");
|
@ -828,7 +828,7 @@ mod tests {
|
|||||||
assert_eq!(
|
assert_eq!(
|
||||||
some_function,
|
some_function,
|
||||||
crate::ast::types::Function::StdLib {
|
crate::ast::types::Function::StdLib {
|
||||||
func: Box::new(crate::std::sketch::Line),
|
func: Box::new(crate::std::sketch::Line)
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -142,7 +142,7 @@ impl IntoDiagnostic for KclError {
|
|||||||
|
|
||||||
Diagnostic {
|
Diagnostic {
|
||||||
range: source_ranges.first().map(|r| r.to_lsp_range(code)).unwrap_or_default(),
|
range: source_ranges.first().map(|r| r.to_lsp_range(code)).unwrap_or_default(),
|
||||||
severity: Some(DiagnosticSeverity::ERROR),
|
severity: Some(self.severity()),
|
||||||
code: None,
|
code: None,
|
||||||
// TODO: this is neat we can pass a URL to a help page here for this specific error.
|
// TODO: this is neat we can pass a URL to a help page here for this specific error.
|
||||||
code_description: None,
|
code_description: None,
|
||||||
@ -153,6 +153,10 @@ impl IntoDiagnostic for KclError {
|
|||||||
data: None,
|
data: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn severity(&self) -> DiagnosticSeverity {
|
||||||
|
DiagnosticSeverity::ERROR
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This is different than to_string() in that it will serialize the Error
|
/// This is different than to_string() in that it will serialize the Error
|
||||||
|
@ -65,7 +65,11 @@ mod tests {
|
|||||||
assert_finding!(lint_variables, Z0001, "const thicc_nes = 0.5");
|
assert_finding!(lint_variables, Z0001, "const thicc_nes = 0.5");
|
||||||
}
|
}
|
||||||
|
|
||||||
test_finding!(z0001_full_bad, lint_variables, Z0001, "\
|
test_finding!(
|
||||||
|
z0001_full_bad,
|
||||||
|
lint_variables,
|
||||||
|
Z0001,
|
||||||
|
"\
|
||||||
// Define constants
|
// Define constants
|
||||||
const pipeLength = 40
|
const pipeLength = 40
|
||||||
const pipeSmallDia = 10
|
const pipeSmallDia = 10
|
||||||
@ -94,9 +98,14 @@ const Part001 = startSketchOn('XY')
|
|||||||
|> angledLineToX({ angle: 60, to: pipeLargeDia }, %)
|
|> angledLineToX({ angle: 60, to: pipeLargeDia }, %)
|
||||||
|> close(%)
|
|> close(%)
|
||||||
|> revolve({ axis: 'y' }, %)
|
|> revolve({ axis: 'y' }, %)
|
||||||
");
|
"
|
||||||
|
);
|
||||||
|
|
||||||
test_no_finding!(z0001_full_good, lint_variables, Z0001, "\
|
test_no_finding!(
|
||||||
|
z0001_full_good,
|
||||||
|
lint_variables,
|
||||||
|
Z0001,
|
||||||
|
"\
|
||||||
// Define constants
|
// Define constants
|
||||||
const pipeLength = 40
|
const pipeLength = 40
|
||||||
const pipeSmallDia = 10
|
const pipeSmallDia = 10
|
||||||
@ -125,5 +134,6 @@ const part001 = startSketchOn('XY')
|
|||||||
|> angledLineToX({ angle: 60, to: pipeLargeDia }, %)
|
|> angledLineToX({ angle: 60, to: pipeLargeDia }, %)
|
||||||
|> close(%)
|
|> close(%)
|
||||||
|> revolve({ axis: 'y' }, %)
|
|> revolve({ axis: 'y' }, %)
|
||||||
");
|
"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
@ -70,6 +70,10 @@ impl IntoDiagnostic for Discovered {
|
|||||||
fn to_lsp_diagnostic(&self, code: &str) -> Diagnostic {
|
fn to_lsp_diagnostic(&self, code: &str) -> Diagnostic {
|
||||||
(&self).to_lsp_diagnostic(code)
|
(&self).to_lsp_diagnostic(code)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn severity(&self) -> DiagnosticSeverity {
|
||||||
|
(&self).severity()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IntoDiagnostic for &Discovered {
|
impl IntoDiagnostic for &Discovered {
|
||||||
@ -79,7 +83,7 @@ impl IntoDiagnostic for &Discovered {
|
|||||||
|
|
||||||
Diagnostic {
|
Diagnostic {
|
||||||
range: source_range.to_lsp_range(code),
|
range: source_range.to_lsp_range(code),
|
||||||
severity: Some(DiagnosticSeverity::INFORMATION),
|
severity: Some(self.severity()),
|
||||||
code: None,
|
code: None,
|
||||||
// TODO: this is neat we can pass a URL to a help page here for this specific error.
|
// TODO: this is neat we can pass a URL to a help page here for this specific error.
|
||||||
code_description: None,
|
code_description: None,
|
||||||
@ -90,6 +94,10 @@ impl IntoDiagnostic for &Discovered {
|
|||||||
data: None,
|
data: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn severity(&self) -> DiagnosticSeverity {
|
||||||
|
DiagnosticSeverity::INFORMATION
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Abstract lint problem type.
|
/// Abstract lint problem type.
|
||||||
|
@ -3,59 +3,15 @@
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use tokio::sync::RwLock;
|
use dashmap::DashMap;
|
||||||
use tower_lsp::lsp_types::{
|
use tower_lsp::lsp_types::{
|
||||||
CreateFilesParams, DeleteFilesParams, DidChangeConfigurationParams, DidChangeTextDocumentParams,
|
CreateFilesParams, DeleteFilesParams, Diagnostic, DidChangeConfigurationParams, DidChangeTextDocumentParams,
|
||||||
DidChangeWatchedFilesParams, DidChangeWorkspaceFoldersParams, DidCloseTextDocumentParams,
|
DidChangeWatchedFilesParams, DidChangeWorkspaceFoldersParams, DidCloseTextDocumentParams,
|
||||||
DidOpenTextDocumentParams, DidSaveTextDocumentParams, DocumentDiagnosticReport, InitializedParams, MessageType,
|
DidOpenTextDocumentParams, DidSaveTextDocumentParams, InitializedParams, MessageType, RenameFilesParams,
|
||||||
RenameFilesParams, TextDocumentItem, WorkspaceFolder,
|
TextDocumentItem, WorkspaceFolder,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::fs::FileSystem;
|
||||||
fs::FileSystem,
|
|
||||||
lsp::safemap::SafeMap,
|
|
||||||
thread::{JoinHandle, Thread},
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct InnerHandle(Arc<JoinHandle>);
|
|
||||||
|
|
||||||
impl InnerHandle {
|
|
||||||
pub fn new(handle: JoinHandle) -> Self {
|
|
||||||
Self(Arc::new(handle))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_finished(&self) -> bool {
|
|
||||||
self.0.is_finished()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn cancel(&self) {
|
|
||||||
self.0.abort();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct UpdateHandle(Arc<RwLock<Option<InnerHandle>>>);
|
|
||||||
|
|
||||||
impl UpdateHandle {
|
|
||||||
pub fn new(handle: InnerHandle) -> Self {
|
|
||||||
Self(Arc::new(RwLock::new(Some(handle))))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn read(&self) -> Option<InnerHandle> {
|
|
||||||
self.0.read().await.clone()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn write(&self, handle: Option<InnerHandle>) {
|
|
||||||
*self.0.write().await = handle;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for UpdateHandle {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self(Arc::new(RwLock::new(None)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A trait for the backend of the language server.
|
/// A trait for the backend of the language server.
|
||||||
#[async_trait::async_trait]
|
#[async_trait::async_trait]
|
||||||
@ -71,10 +27,6 @@ where
|
|||||||
|
|
||||||
async fn set_is_initialized(&self, is_initialized: bool);
|
async fn set_is_initialized(&self, is_initialized: bool);
|
||||||
|
|
||||||
async fn current_handle(&self) -> Option<InnerHandle>;
|
|
||||||
|
|
||||||
async fn set_current_handle(&self, handle: Option<InnerHandle>);
|
|
||||||
|
|
||||||
async fn workspace_folders(&self) -> Vec<WorkspaceFolder>;
|
async fn workspace_folders(&self) -> Vec<WorkspaceFolder>;
|
||||||
|
|
||||||
async fn add_workspace_folders(&self, folders: Vec<WorkspaceFolder>);
|
async fn add_workspace_folders(&self, folders: Vec<WorkspaceFolder>);
|
||||||
@ -82,7 +34,7 @@ where
|
|||||||
async fn remove_workspace_folders(&self, folders: Vec<WorkspaceFolder>);
|
async fn remove_workspace_folders(&self, folders: Vec<WorkspaceFolder>);
|
||||||
|
|
||||||
/// Get the current code map.
|
/// Get the current code map.
|
||||||
fn code_map(&self) -> &SafeMap<String, Vec<u8>>;
|
fn code_map(&self) -> &DashMap<String, Vec<u8>>;
|
||||||
|
|
||||||
/// Insert a new code map.
|
/// Insert a new code map.
|
||||||
async fn insert_code_map(&self, uri: String, text: Vec<u8>);
|
async fn insert_code_map(&self, uri: String, text: Vec<u8>);
|
||||||
@ -94,62 +46,36 @@ where
|
|||||||
async fn clear_code_state(&self);
|
async fn clear_code_state(&self);
|
||||||
|
|
||||||
/// Get the current diagnostics map.
|
/// Get the current diagnostics map.
|
||||||
fn current_diagnostics_map(&self) -> &SafeMap<String, DocumentDiagnosticReport>;
|
fn current_diagnostics_map(&self) -> &DashMap<String, Vec<Diagnostic>>;
|
||||||
|
|
||||||
/// On change event.
|
/// On change event.
|
||||||
async fn inner_on_change(&self, params: TextDocumentItem, force: bool);
|
async fn inner_on_change(&self, params: TextDocumentItem, force: bool);
|
||||||
|
|
||||||
/// Check if the file has diagnostics.
|
/// Check if the file has diagnostics.
|
||||||
async fn has_diagnostics(&self, uri: &str) -> bool {
|
async fn has_diagnostics(&self, uri: &str) -> bool {
|
||||||
if let Some(tower_lsp::lsp_types::DocumentDiagnosticReport::Full(diagnostics)) =
|
let Some(diagnostics) = self.current_diagnostics_map().get(uri) else {
|
||||||
self.current_diagnostics_map().get(uri).await
|
return false;
|
||||||
{
|
};
|
||||||
!diagnostics.full_document_diagnostic_report.items.is_empty()
|
|
||||||
} else {
|
!diagnostics.is_empty()
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn on_change(&self, params: TextDocumentItem) {
|
async fn on_change(&self, params: TextDocumentItem) {
|
||||||
// Check if the document is in the current code map and if it is the same as what we have
|
// Check if the document is in the current code map and if it is the same as what we have
|
||||||
// stored.
|
// stored.
|
||||||
let filename = params.uri.to_string();
|
let filename = params.uri.to_string();
|
||||||
if let Some(current_code) = self.code_map().get(&filename).await {
|
if let Some(current_code) = self.code_map().get(&filename) {
|
||||||
if current_code == params.text.as_bytes() && !self.has_diagnostics(&filename).await {
|
if *current_code == params.text.as_bytes() && !self.has_diagnostics(&filename).await {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if we already have a handle running.
|
println!("on_change after check: {:?}", params);
|
||||||
if let Some(current_handle) = self.current_handle().await {
|
|
||||||
self.set_current_handle(None).await;
|
|
||||||
// Drop that handle to cancel it.
|
|
||||||
current_handle.cancel();
|
|
||||||
}
|
|
||||||
|
|
||||||
let cloned = self.clone();
|
self.insert_code_map(params.uri.to_string(), params.text.as_bytes().to_vec())
|
||||||
let task = JoinHandle::new(async move {
|
.await;
|
||||||
cloned
|
println!("on_change after insert: {:?}", params);
|
||||||
.insert_code_map(params.uri.to_string(), params.text.as_bytes().to_vec())
|
self.inner_on_change(params, false).await;
|
||||||
.await;
|
|
||||||
cloned.inner_on_change(params, false).await;
|
|
||||||
cloned.set_current_handle(None).await;
|
|
||||||
});
|
|
||||||
let update_handle = InnerHandle::new(task);
|
|
||||||
|
|
||||||
// Set our new handle.
|
|
||||||
self.set_current_handle(Some(update_handle.clone())).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn wait_on_handle(&self) {
|
|
||||||
while let Some(handle) = self.current_handle().await {
|
|
||||||
if !handle.is_finished() {
|
|
||||||
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
self.set_current_handle(None).await;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn update_from_disk<P: AsRef<std::path::Path> + std::marker::Send>(&self, path: P) -> Result<()> {
|
async fn update_from_disk<P: AsRef<std::path::Path> + std::marker::Send>(&self, path: P) -> Result<()> {
|
||||||
@ -211,7 +137,7 @@ where
|
|||||||
self.remove_workspace_folders(params.event.removed).await;
|
self.remove_workspace_folders(params.event.removed).await;
|
||||||
// Remove the code from the current code map.
|
// Remove the code from the current code map.
|
||||||
// We do this since it means the user is changing projects so let's refresh the state.
|
// We do this since it means the user is changing projects so let's refresh the state.
|
||||||
if !self.code_map().is_empty().await && should_clear {
|
if !self.code_map().is_empty() && should_clear {
|
||||||
self.clear_code_state().await;
|
self.clear_code_state().await;
|
||||||
}
|
}
|
||||||
for added in params.event.added {
|
for added in params.event.added {
|
||||||
|
@ -9,28 +9,27 @@ use std::{
|
|||||||
sync::{Arc, RwLock},
|
sync::{Arc, RwLock},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use dashmap::DashMap;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use tower_lsp::{
|
use tower_lsp::{
|
||||||
jsonrpc::{Error, Result},
|
jsonrpc::{Error, Result},
|
||||||
lsp_types::{
|
lsp_types::{
|
||||||
CreateFilesParams, DeleteFilesParams, DidChangeConfigurationParams, DidChangeTextDocumentParams,
|
CreateFilesParams, DeleteFilesParams, Diagnostic, DidChangeConfigurationParams, DidChangeTextDocumentParams,
|
||||||
DidChangeWatchedFilesParams, DidChangeWorkspaceFoldersParams, DidCloseTextDocumentParams,
|
DidChangeWatchedFilesParams, DidChangeWorkspaceFoldersParams, DidCloseTextDocumentParams,
|
||||||
DidOpenTextDocumentParams, DidSaveTextDocumentParams, DocumentDiagnosticReport, InitializeParams,
|
DidOpenTextDocumentParams, DidSaveTextDocumentParams, InitializeParams, InitializeResult, InitializedParams,
|
||||||
InitializeResult, InitializedParams, MessageType, OneOf, RenameFilesParams, ServerCapabilities,
|
MessageType, OneOf, RenameFilesParams, ServerCapabilities, TextDocumentItem, TextDocumentSyncCapability,
|
||||||
TextDocumentItem, TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions, WorkspaceFolder,
|
TextDocumentSyncKind, TextDocumentSyncOptions, WorkspaceFolder, WorkspaceFoldersServerCapabilities,
|
||||||
WorkspaceFoldersServerCapabilities, WorkspaceServerCapabilities,
|
WorkspaceServerCapabilities,
|
||||||
},
|
},
|
||||||
LanguageServer,
|
LanguageServer,
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::backend::{InnerHandle, UpdateHandle};
|
|
||||||
use crate::lsp::{
|
use crate::lsp::{
|
||||||
backend::Backend as _,
|
backend::Backend as _,
|
||||||
copilot::types::{
|
copilot::types::{
|
||||||
CopilotAcceptCompletionParams, CopilotCompletionResponse, CopilotCompletionTelemetry, CopilotEditorInfo,
|
CopilotAcceptCompletionParams, CopilotCompletionResponse, CopilotCompletionTelemetry, CopilotEditorInfo,
|
||||||
CopilotLspCompletionParams, CopilotRejectCompletionParams, DocParams,
|
CopilotLspCompletionParams, CopilotRejectCompletionParams, DocParams,
|
||||||
},
|
},
|
||||||
safemap::SafeMap,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Deserialize, Serialize, Debug)]
|
#[derive(Deserialize, Serialize, Debug)]
|
||||||
@ -50,9 +49,9 @@ pub struct Backend {
|
|||||||
/// The file system client to use.
|
/// The file system client to use.
|
||||||
pub fs: Arc<crate::fs::FileManager>,
|
pub fs: Arc<crate::fs::FileManager>,
|
||||||
/// The workspace folders.
|
/// The workspace folders.
|
||||||
pub workspace_folders: SafeMap<String, WorkspaceFolder>,
|
pub workspace_folders: DashMap<String, WorkspaceFolder>,
|
||||||
/// Current code.
|
/// Current code.
|
||||||
pub code_map: SafeMap<String, Vec<u8>>,
|
pub code_map: DashMap<String, Vec<u8>>,
|
||||||
/// The Zoo API client.
|
/// The Zoo API client.
|
||||||
pub zoo_client: kittycad::Client,
|
pub zoo_client: kittycad::Client,
|
||||||
/// The editor info is used to store information about the editor.
|
/// The editor info is used to store information about the editor.
|
||||||
@ -60,12 +59,11 @@ pub struct Backend {
|
|||||||
/// The cache is used to store the results of previous requests.
|
/// The cache is used to store the results of previous requests.
|
||||||
pub cache: Arc<cache::CopilotCache>,
|
pub cache: Arc<cache::CopilotCache>,
|
||||||
/// Storage so we can send telemetry data back out.
|
/// Storage so we can send telemetry data back out.
|
||||||
pub telemetry: SafeMap<uuid::Uuid, CopilotCompletionTelemetry>,
|
pub telemetry: DashMap<uuid::Uuid, CopilotCompletionTelemetry>,
|
||||||
/// Diagnostics.
|
/// Diagnostics.
|
||||||
pub diagnostics_map: SafeMap<String, DocumentDiagnosticReport>,
|
pub diagnostics_map: DashMap<String, Vec<Diagnostic>>,
|
||||||
|
|
||||||
pub is_initialized: Arc<tokio::sync::RwLock<bool>>,
|
pub is_initialized: Arc<tokio::sync::RwLock<bool>>,
|
||||||
pub current_handle: UpdateHandle,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Implement the shared backend trait for the language server.
|
// Implement the shared backend trait for the language server.
|
||||||
@ -87,47 +85,40 @@ impl crate::lsp::backend::Backend for Backend {
|
|||||||
*self.is_initialized.write().await = is_initialized;
|
*self.is_initialized.write().await = is_initialized;
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn current_handle(&self) -> Option<InnerHandle> {
|
|
||||||
self.current_handle.read().await
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn set_current_handle(&self, handle: Option<InnerHandle>) {
|
|
||||||
self.current_handle.write(handle).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn workspace_folders(&self) -> Vec<WorkspaceFolder> {
|
async fn workspace_folders(&self) -> Vec<WorkspaceFolder> {
|
||||||
self.workspace_folders.inner().await.values().cloned().collect()
|
// TODO: fix clone
|
||||||
|
self.workspace_folders.iter().map(|i| i.clone()).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn add_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
|
async fn add_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
|
||||||
for folder in folders {
|
for folder in folders {
|
||||||
self.workspace_folders.insert(folder.name.to_string(), folder).await;
|
self.workspace_folders.insert(folder.name.to_string(), folder);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn remove_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
|
async fn remove_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
|
||||||
for folder in folders {
|
for folder in folders {
|
||||||
self.workspace_folders.remove(&folder.name).await;
|
self.workspace_folders.remove(&folder.name);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn code_map(&self) -> &SafeMap<String, Vec<u8>> {
|
fn code_map(&self) -> &DashMap<String, Vec<u8>> {
|
||||||
&self.code_map
|
&self.code_map
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn insert_code_map(&self, uri: String, text: Vec<u8>) {
|
async fn insert_code_map(&self, uri: String, text: Vec<u8>) {
|
||||||
self.code_map.insert(uri, text).await;
|
self.code_map.insert(uri, text);
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn remove_from_code_map(&self, uri: String) -> Option<Vec<u8>> {
|
async fn remove_from_code_map(&self, uri: String) -> Option<Vec<u8>> {
|
||||||
self.code_map.remove(&uri).await
|
self.code_map.remove(&uri).map(|(_, v)| v)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn clear_code_state(&self) {
|
async fn clear_code_state(&self) {
|
||||||
self.code_map.clear().await;
|
self.code_map.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn current_diagnostics_map(&self) -> &SafeMap<String, DocumentDiagnosticReport> {
|
fn current_diagnostics_map(&self) -> &DashMap<String, Vec<Diagnostic>> {
|
||||||
&self.diagnostics_map
|
&self.diagnostics_map
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -140,8 +131,15 @@ impl Backend {
|
|||||||
/// Get completions from the kittycad api.
|
/// Get completions from the kittycad api.
|
||||||
pub async fn get_completions(&self, language: String, prompt: String, suffix: String) -> Result<Vec<String>> {
|
pub async fn get_completions(&self, language: String, prompt: String, suffix: String) -> Result<Vec<String>> {
|
||||||
let body = kittycad::types::KclCodeCompletionRequest {
|
let body = kittycad::types::KclCodeCompletionRequest {
|
||||||
prompt: Some(prompt.clone()),
|
extra: Some(kittycad::types::KclCodeCompletionParams {
|
||||||
suffix: Some(suffix.clone()),
|
language: Some(language.to_string()),
|
||||||
|
next_indent: None,
|
||||||
|
trim_by_indentation: true,
|
||||||
|
prompt_tokens: Some(prompt.len() as u32),
|
||||||
|
suffix_tokens: Some(suffix.len() as u32),
|
||||||
|
}),
|
||||||
|
prompt: Some(prompt),
|
||||||
|
suffix: Some(suffix),
|
||||||
max_tokens: Some(500),
|
max_tokens: Some(500),
|
||||||
temperature: Some(1.0),
|
temperature: Some(1.0),
|
||||||
top_p: Some(1.0),
|
top_p: Some(1.0),
|
||||||
@ -151,13 +149,6 @@ impl Backend {
|
|||||||
nwo: None,
|
nwo: None,
|
||||||
// We haven't implemented streaming yet.
|
// We haven't implemented streaming yet.
|
||||||
stream: false,
|
stream: false,
|
||||||
extra: Some(kittycad::types::KclCodeCompletionParams {
|
|
||||||
language: Some(language.to_string()),
|
|
||||||
next_indent: None,
|
|
||||||
trim_by_indentation: true,
|
|
||||||
prompt_tokens: Some(prompt.len() as u32),
|
|
||||||
suffix_tokens: Some(suffix.len() as u32),
|
|
||||||
}),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let resp = self
|
let resp = self
|
||||||
@ -236,7 +227,7 @@ impl Backend {
|
|||||||
completion: completion.clone(),
|
completion: completion.clone(),
|
||||||
params: params.clone(),
|
params: params.clone(),
|
||||||
};
|
};
|
||||||
self.telemetry.insert(completion.uuid, telemetry).await;
|
self.telemetry.insert(completion.uuid, telemetry);
|
||||||
}
|
}
|
||||||
self.cache
|
self.cache
|
||||||
.set_cached_result(&doc_params.uri, &doc_params.pos.line, &response);
|
.set_cached_result(&doc_params.uri, &doc_params.pos.line, &response);
|
||||||
@ -250,7 +241,7 @@ impl Backend {
|
|||||||
.await;
|
.await;
|
||||||
|
|
||||||
// Get the original telemetry data.
|
// Get the original telemetry data.
|
||||||
let Some(original) = self.telemetry.remove(¶ms.uuid).await else {
|
let Some(original) = self.telemetry.remove(¶ms.uuid) else {
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -269,7 +260,7 @@ impl Backend {
|
|||||||
// Get the original telemetry data.
|
// Get the original telemetry data.
|
||||||
let mut originals: Vec<CopilotCompletionTelemetry> = Default::default();
|
let mut originals: Vec<CopilotCompletionTelemetry> = Default::default();
|
||||||
for uuid in params.uuids {
|
for uuid in params.uuids {
|
||||||
if let Some(original) = self.telemetry.remove(&uuid).await {
|
if let Some(original) = self.telemetry.remove(&uuid).map(|(_, v)| v) {
|
||||||
originals.push(original);
|
originals.push(original);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -342,7 +333,7 @@ impl LanguageServer for Backend {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async fn did_change(&self, params: DidChangeTextDocumentParams) {
|
async fn did_change(&self, params: DidChangeTextDocumentParams) {
|
||||||
self.do_did_change(params.clone()).await;
|
self.do_did_change(params).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn did_save(&self, params: DidSaveTextDocumentParams) {
|
async fn did_save(&self, params: DidSaveTextDocumentParams) {
|
||||||
|
@ -14,12 +14,13 @@ pub mod custom_notifications;
|
|||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
#[cfg(feature = "cli")]
|
#[cfg(feature = "cli")]
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
|
use dashmap::DashMap;
|
||||||
use sha2::Digest;
|
use sha2::Digest;
|
||||||
use tower_lsp::{
|
use tower_lsp::{
|
||||||
jsonrpc::Result as RpcResult,
|
jsonrpc::Result as RpcResult,
|
||||||
lsp_types::{
|
lsp_types::{
|
||||||
CompletionItem, CompletionItemKind, CompletionOptions, CompletionParams, CompletionResponse, CreateFilesParams,
|
CompletionItem, CompletionItemKind, CompletionOptions, CompletionParams, CompletionResponse, CreateFilesParams,
|
||||||
DeleteFilesParams, DiagnosticOptions, DiagnosticServerCapabilities, DiagnosticSeverity,
|
DeleteFilesParams, Diagnostic, DiagnosticOptions, DiagnosticServerCapabilities, DiagnosticSeverity,
|
||||||
DidChangeConfigurationParams, DidChangeTextDocumentParams, DidChangeWatchedFilesParams,
|
DidChangeConfigurationParams, DidChangeTextDocumentParams, DidChangeWatchedFilesParams,
|
||||||
DidChangeWorkspaceFoldersParams, DidCloseTextDocumentParams, DidOpenTextDocumentParams,
|
DidChangeWorkspaceFoldersParams, DidCloseTextDocumentParams, DidOpenTextDocumentParams,
|
||||||
DidSaveTextDocumentParams, DocumentDiagnosticParams, DocumentDiagnosticReport, DocumentDiagnosticReportResult,
|
DidSaveTextDocumentParams, DocumentDiagnosticParams, DocumentDiagnosticReport, DocumentDiagnosticReportResult,
|
||||||
@ -43,11 +44,7 @@ use crate::lint::checks;
|
|||||||
use crate::{
|
use crate::{
|
||||||
ast::types::{Value, VariableKind},
|
ast::types::{Value, VariableKind},
|
||||||
executor::SourceRange,
|
executor::SourceRange,
|
||||||
lsp::{
|
lsp::{backend::Backend as _, util::IntoDiagnostic},
|
||||||
backend::{Backend as _, InnerHandle, UpdateHandle},
|
|
||||||
safemap::SafeMap,
|
|
||||||
util::IntoDiagnostic,
|
|
||||||
},
|
|
||||||
parser::PIPE_OPERATOR,
|
parser::PIPE_OPERATOR,
|
||||||
token::TokenType,
|
token::TokenType,
|
||||||
};
|
};
|
||||||
@ -96,25 +93,25 @@ pub struct Backend {
|
|||||||
/// The file system client to use.
|
/// The file system client to use.
|
||||||
pub fs: Arc<crate::fs::FileManager>,
|
pub fs: Arc<crate::fs::FileManager>,
|
||||||
/// The workspace folders.
|
/// The workspace folders.
|
||||||
pub workspace_folders: SafeMap<String, WorkspaceFolder>,
|
pub workspace_folders: DashMap<String, WorkspaceFolder>,
|
||||||
/// The stdlib completions for the language.
|
/// The stdlib completions for the language.
|
||||||
pub stdlib_completions: HashMap<String, CompletionItem>,
|
pub stdlib_completions: HashMap<String, CompletionItem>,
|
||||||
/// The stdlib signatures for the language.
|
/// The stdlib signatures for the language.
|
||||||
pub stdlib_signatures: HashMap<String, SignatureHelp>,
|
pub stdlib_signatures: HashMap<String, SignatureHelp>,
|
||||||
/// Token maps.
|
/// Token maps.
|
||||||
pub token_map: SafeMap<String, Vec<crate::token::Token>>,
|
pub token_map: DashMap<String, Vec<crate::token::Token>>,
|
||||||
/// AST maps.
|
/// AST maps.
|
||||||
pub ast_map: SafeMap<String, crate::ast::types::Program>,
|
pub ast_map: DashMap<String, crate::ast::types::Program>,
|
||||||
/// Memory maps.
|
/// Memory maps.
|
||||||
pub memory_map: SafeMap<String, crate::executor::ProgramMemory>,
|
pub memory_map: DashMap<String, crate::executor::ProgramMemory>,
|
||||||
/// Current code.
|
/// Current code.
|
||||||
pub code_map: SafeMap<String, Vec<u8>>,
|
pub code_map: DashMap<String, Vec<u8>>,
|
||||||
/// Diagnostics.
|
/// Diagnostics.
|
||||||
pub diagnostics_map: SafeMap<String, DocumentDiagnosticReport>,
|
pub diagnostics_map: DashMap<String, Vec<Diagnostic>>,
|
||||||
/// Symbols map.
|
/// Symbols map.
|
||||||
pub symbols_map: SafeMap<String, Vec<DocumentSymbol>>,
|
pub symbols_map: DashMap<String, Vec<DocumentSymbol>>,
|
||||||
/// Semantic tokens map.
|
/// Semantic tokens map.
|
||||||
pub semantic_tokens_map: SafeMap<String, Vec<SemanticToken>>,
|
pub semantic_tokens_map: DashMap<String, Vec<SemanticToken>>,
|
||||||
/// The Zoo API client.
|
/// The Zoo API client.
|
||||||
pub zoo_client: kittycad::Client,
|
pub zoo_client: kittycad::Client,
|
||||||
/// If we can send telemetry for this user.
|
/// If we can send telemetry for this user.
|
||||||
@ -125,7 +122,6 @@ pub struct Backend {
|
|||||||
pub can_execute: Arc<RwLock<bool>>,
|
pub can_execute: Arc<RwLock<bool>>,
|
||||||
|
|
||||||
pub is_initialized: Arc<RwLock<bool>>,
|
pub is_initialized: Arc<RwLock<bool>>,
|
||||||
pub current_handle: UpdateHandle,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Implement the shared backend trait for the language server.
|
// Implement the shared backend trait for the language server.
|
||||||
@ -147,83 +143,75 @@ impl crate::lsp::backend::Backend for Backend {
|
|||||||
*self.is_initialized.write().await = is_initialized;
|
*self.is_initialized.write().await = is_initialized;
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn current_handle(&self) -> Option<InnerHandle> {
|
|
||||||
self.current_handle.read().await
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn set_current_handle(&self, handle: Option<InnerHandle>) {
|
|
||||||
self.current_handle.write(handle).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn workspace_folders(&self) -> Vec<WorkspaceFolder> {
|
async fn workspace_folders(&self) -> Vec<WorkspaceFolder> {
|
||||||
self.workspace_folders.inner().await.values().cloned().collect()
|
// TODO: fix clone
|
||||||
|
self.workspace_folders.iter().map(|i| i.clone()).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn add_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
|
async fn add_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
|
||||||
for folder in folders {
|
for folder in folders {
|
||||||
self.workspace_folders.insert(folder.name.to_string(), folder).await;
|
self.workspace_folders.insert(folder.name.to_string(), folder);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn remove_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
|
async fn remove_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
|
||||||
for folder in folders {
|
for folder in folders {
|
||||||
self.workspace_folders.remove(&folder.name).await;
|
self.workspace_folders.remove(&folder.name);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn code_map(&self) -> &SafeMap<String, Vec<u8>> {
|
fn code_map(&self) -> &DashMap<String, Vec<u8>> {
|
||||||
&self.code_map
|
&self.code_map
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn insert_code_map(&self, uri: String, text: Vec<u8>) {
|
async fn insert_code_map(&self, uri: String, text: Vec<u8>) {
|
||||||
self.code_map.insert(uri, text).await;
|
self.code_map.insert(uri, text);
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn remove_from_code_map(&self, uri: String) -> Option<Vec<u8>> {
|
async fn remove_from_code_map(&self, uri: String) -> Option<Vec<u8>> {
|
||||||
self.code_map.remove(&uri).await
|
self.code_map.remove(&uri).map(|x| x.1)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn clear_code_state(&self) {
|
async fn clear_code_state(&self) {
|
||||||
self.code_map.clear().await;
|
self.code_map.clear();
|
||||||
self.token_map.clear().await;
|
self.token_map.clear();
|
||||||
self.ast_map.clear().await;
|
self.ast_map.clear();
|
||||||
self.diagnostics_map.clear().await;
|
self.diagnostics_map.clear();
|
||||||
self.symbols_map.clear().await;
|
self.symbols_map.clear();
|
||||||
self.semantic_tokens_map.clear().await;
|
self.semantic_tokens_map.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn current_diagnostics_map(&self) -> &SafeMap<String, DocumentDiagnosticReport> {
|
fn current_diagnostics_map(&self) -> &DashMap<String, Vec<Diagnostic>> {
|
||||||
&self.diagnostics_map
|
&self.diagnostics_map
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn inner_on_change(&self, params: TextDocumentItem, force: bool) {
|
async fn inner_on_change(&self, params: TextDocumentItem, force: bool) {
|
||||||
|
let filename = params.uri.to_string();
|
||||||
// We already updated the code map in the shared backend.
|
// We already updated the code map in the shared backend.
|
||||||
|
|
||||||
// Lets update the tokens.
|
// Lets update the tokens.
|
||||||
let tokens = match crate::token::lexer(¶ms.text) {
|
let tokens = match crate::token::lexer(¶ms.text) {
|
||||||
Ok(tokens) => tokens,
|
Ok(tokens) => tokens,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
self.add_to_diagnostics(¶ms, err, true).await;
|
self.add_to_diagnostics(¶ms, &[err], true).await;
|
||||||
self.token_map.remove(¶ms.uri.to_string()).await;
|
self.token_map.remove(&filename);
|
||||||
self.ast_map.remove(¶ms.uri.to_string()).await;
|
self.ast_map.remove(&filename);
|
||||||
self.symbols_map.remove(¶ms.uri.to_string()).await;
|
self.symbols_map.remove(&filename);
|
||||||
self.semantic_tokens_map.remove(¶ms.uri.to_string()).await;
|
self.semantic_tokens_map.remove(&filename);
|
||||||
self.memory_map.remove(¶ms.uri.to_string()).await;
|
self.memory_map.remove(&filename);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Get the previous tokens.
|
|
||||||
let previous_tokens = self.token_map.get(¶ms.uri.to_string()).await;
|
|
||||||
|
|
||||||
// Try to get the memory for the current code.
|
// Try to get the memory for the current code.
|
||||||
let has_memory = if let Some(memory) = self.memory_map.get(¶ms.uri.to_string()).await {
|
let has_memory = if let Some(memory) = self.memory_map.get(&filename) {
|
||||||
memory != crate::executor::ProgramMemory::default()
|
*memory != crate::executor::ProgramMemory::default()
|
||||||
} else {
|
} else {
|
||||||
false
|
false
|
||||||
};
|
};
|
||||||
|
|
||||||
let tokens_changed = if let Some(previous_tokens) = &previous_tokens {
|
// Get the previous tokens.
|
||||||
|
let tokens_changed = if let Some(previous_tokens) = self.token_map.get(&filename) {
|
||||||
*previous_tokens != tokens
|
*previous_tokens != tokens
|
||||||
} else {
|
} else {
|
||||||
true
|
true
|
||||||
@ -237,7 +225,7 @@ impl crate::lsp::backend::Backend for Backend {
|
|||||||
|
|
||||||
if tokens_changed {
|
if tokens_changed {
|
||||||
// Update our token map.
|
// Update our token map.
|
||||||
self.token_map.insert(params.uri.to_string(), tokens.clone()).await;
|
self.token_map.insert(params.uri.to_string(), tokens.clone());
|
||||||
// Update our semantic tokens.
|
// Update our semantic tokens.
|
||||||
self.update_semantic_tokens(&tokens, ¶ms).await;
|
self.update_semantic_tokens(&tokens, ¶ms).await;
|
||||||
}
|
}
|
||||||
@ -248,19 +236,19 @@ impl crate::lsp::backend::Backend for Backend {
|
|||||||
let ast = match result {
|
let ast = match result {
|
||||||
Ok(ast) => ast,
|
Ok(ast) => ast,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
self.add_to_diagnostics(¶ms, err, true).await;
|
self.add_to_diagnostics(¶ms, &[err], true).await;
|
||||||
self.ast_map.remove(¶ms.uri.to_string()).await;
|
self.ast_map.remove(&filename);
|
||||||
self.symbols_map.remove(¶ms.uri.to_string()).await;
|
self.symbols_map.remove(&filename);
|
||||||
self.memory_map.remove(¶ms.uri.to_string()).await;
|
self.memory_map.remove(&filename);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Check if the ast changed.
|
// Check if the ast changed.
|
||||||
let ast_changed = match self.ast_map.get(¶ms.uri.to_string()).await {
|
let ast_changed = match self.ast_map.get(&filename) {
|
||||||
Some(old_ast) => {
|
Some(old_ast) => {
|
||||||
// Check if the ast changed.
|
// Check if the ast changed.
|
||||||
old_ast != ast
|
*old_ast != ast
|
||||||
}
|
}
|
||||||
None => true,
|
None => true,
|
||||||
};
|
};
|
||||||
@ -271,14 +259,12 @@ impl crate::lsp::backend::Backend for Backend {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if ast_changed {
|
if ast_changed {
|
||||||
self.ast_map.insert(params.uri.to_string(), ast.clone()).await;
|
self.ast_map.insert(params.uri.to_string(), ast.clone());
|
||||||
// Update the symbols map.
|
// Update the symbols map.
|
||||||
self.symbols_map
|
self.symbols_map.insert(
|
||||||
.insert(
|
params.uri.to_string(),
|
||||||
params.uri.to_string(),
|
ast.get_lsp_symbols(¶ms.text).unwrap_or_default(),
|
||||||
ast.get_lsp_symbols(¶ms.text).unwrap_or_default(),
|
);
|
||||||
)
|
|
||||||
.await;
|
|
||||||
|
|
||||||
// Update our semantic tokens.
|
// Update our semantic tokens.
|
||||||
self.update_semantic_tokens(&tokens, ¶ms).await;
|
self.update_semantic_tokens(&tokens, ¶ms).await;
|
||||||
@ -290,12 +276,7 @@ impl crate::lsp::backend::Backend for Backend {
|
|||||||
.into_iter()
|
.into_iter()
|
||||||
.flatten()
|
.flatten()
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
// Clear the lints before we lint.
|
self.add_to_diagnostics(¶ms, &discovered_findings, false).await;
|
||||||
self.clear_diagnostics_map(¶ms.uri, Some(DiagnosticSeverity::INFORMATION))
|
|
||||||
.await;
|
|
||||||
for discovered_finding in &discovered_findings {
|
|
||||||
self.add_to_diagnostics(¶ms, discovered_finding, false).await;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -326,16 +307,8 @@ impl Backend {
|
|||||||
*self.can_execute.read().await
|
*self.can_execute.read().await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn set_can_execute(&self, can_execute: bool) {
|
pub async fn executor_ctx(&self) -> tokio::sync::RwLockReadGuard<'_, Option<crate::executor::ExecutorContext>> {
|
||||||
*self.can_execute.write().await = can_execute;
|
self.executor_ctx.read().await
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn executor_ctx(&self) -> Option<crate::executor::ExecutorContext> {
|
|
||||||
self.executor_ctx.read().await.clone()
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn set_executor_ctx(&self, executor_ctx: crate::executor::ExecutorContext) {
|
|
||||||
*self.executor_ctx.write().await = Some(executor_ctx);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn update_semantic_tokens(&self, tokens: &[crate::token::Token], params: &TextDocumentItem) {
|
async fn update_semantic_tokens(&self, tokens: &[crate::token::Token], params: &TextDocumentItem) {
|
||||||
@ -369,7 +342,7 @@ impl Backend {
|
|||||||
|
|
||||||
// Calculate the token modifiers.
|
// Calculate the token modifiers.
|
||||||
// Get the value at the current position.
|
// Get the value at the current position.
|
||||||
let token_modifiers_bitset = if let Some(ast) = self.ast_map.get(¶ms.uri.to_string()).await {
|
let token_modifiers_bitset = if let Some(ast) = self.ast_map.get(params.uri.as_str()) {
|
||||||
let token_index = Arc::new(Mutex::new(token_type_index));
|
let token_index = Arc::new(Mutex::new(token_type_index));
|
||||||
let modifier_index: Arc<Mutex<u32>> = Arc::new(Mutex::new(0));
|
let modifier_index: Arc<Mutex<u32>> = Arc::new(Mutex::new(0));
|
||||||
crate::walk::walk(&ast, &|node: crate::walk::Node| {
|
crate::walk::walk(&ast, &|node: crate::walk::Node| {
|
||||||
@ -519,15 +492,12 @@ impl Backend {
|
|||||||
|
|
||||||
last_position = position;
|
last_position = position;
|
||||||
}
|
}
|
||||||
self.semantic_tokens_map
|
self.semantic_tokens_map.insert(params.uri.to_string(), semantic_tokens);
|
||||||
.insert(params.uri.to_string(), semantic_tokens)
|
|
||||||
.await;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn clear_diagnostics_map(&self, uri: &url::Url, severity: Option<DiagnosticSeverity>) {
|
async fn clear_diagnostics_map(&self, uri: &url::Url, severity: Option<DiagnosticSeverity>) {
|
||||||
let mut items = match self.diagnostics_map.get(uri.as_str()).await {
|
let Some(mut items) = self.diagnostics_map.get_mut(uri.as_str()) else {
|
||||||
Some(DocumentDiagnosticReport::Full(report)) => report.full_document_diagnostic_report.items,
|
return;
|
||||||
_ => vec![],
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// If we only want to clear a specific severity, do that.
|
// If we only want to clear a specific severity, do that.
|
||||||
@ -537,84 +507,72 @@ impl Backend {
|
|||||||
items.clear();
|
items.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
self.diagnostics_map
|
if items.is_empty() {
|
||||||
.insert(
|
#[cfg(not(target_arch = "wasm32"))]
|
||||||
uri.to_string(),
|
{
|
||||||
DocumentDiagnosticReport::Full(RelatedFullDocumentDiagnosticReport {
|
self.client.publish_diagnostics(uri.clone(), items.clone(), None).await;
|
||||||
related_documents: None,
|
}
|
||||||
full_document_diagnostic_report: FullDocumentDiagnosticReport {
|
|
||||||
result_id: None,
|
|
||||||
items: items.clone(),
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
|
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
// We need to drop the items here.
|
||||||
{
|
drop(items);
|
||||||
self.client.publish_diagnostics(uri.clone(), items, None).await;
|
|
||||||
|
self.diagnostics_map.remove(uri.as_str());
|
||||||
|
} else {
|
||||||
|
// We don't need to update the map since we used get_mut.
|
||||||
|
|
||||||
|
#[cfg(not(target_arch = "wasm32"))]
|
||||||
|
{
|
||||||
|
self.client.publish_diagnostics(uri.clone(), items.clone(), None).await;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn add_to_diagnostics<DiagT: IntoDiagnostic + std::fmt::Debug>(
|
async fn add_to_diagnostics<DiagT: IntoDiagnostic + std::fmt::Debug>(
|
||||||
&self,
|
&self,
|
||||||
params: &TextDocumentItem,
|
params: &TextDocumentItem,
|
||||||
diagnostic: DiagT,
|
diagnostics: &[DiagT],
|
||||||
clear_all_before_add: bool,
|
clear_all_before_add: bool,
|
||||||
) {
|
) {
|
||||||
self.client
|
self.client
|
||||||
.log_message(MessageType::INFO, format!("adding {:?} to diag", diagnostic))
|
.log_message(MessageType::INFO, format!("adding {:?} to diag", diagnostics))
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
let diagnostic = diagnostic.to_lsp_diagnostic(¶ms.text);
|
|
||||||
|
|
||||||
if clear_all_before_add {
|
if clear_all_before_add {
|
||||||
self.clear_diagnostics_map(¶ms.uri, None).await;
|
self.clear_diagnostics_map(¶ms.uri, None).await;
|
||||||
} else if diagnostic.severity == Some(DiagnosticSeverity::ERROR) {
|
} else if diagnostics.iter().all(|x| x.severity() == DiagnosticSeverity::ERROR) {
|
||||||
// If the diagnostic is an error, it will be the only error we get since that halts
|
// If the diagnostic is an error, it will be the only error we get since that halts
|
||||||
// execution.
|
// execution.
|
||||||
// Clear the diagnostics before we add a new one.
|
// Clear the diagnostics before we add a new one.
|
||||||
self.clear_diagnostics_map(¶ms.uri, Some(DiagnosticSeverity::ERROR))
|
self.clear_diagnostics_map(¶ms.uri, Some(DiagnosticSeverity::ERROR))
|
||||||
.await;
|
.await;
|
||||||
|
} else if diagnostics
|
||||||
|
.iter()
|
||||||
|
.all(|x| x.severity() == DiagnosticSeverity::INFORMATION)
|
||||||
|
{
|
||||||
|
// If the diagnostic is a lint, we will pass them all to add at once so we need to
|
||||||
|
// clear the old ones.
|
||||||
|
self.clear_diagnostics_map(¶ms.uri, Some(DiagnosticSeverity::INFORMATION))
|
||||||
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
let DocumentDiagnosticReport::Full(mut report) =
|
let mut items = if let Some(items) = self.diagnostics_map.get(params.uri.as_str()) {
|
||||||
self.diagnostics_map
|
// TODO: Would be awesome to fix the clone here.
|
||||||
.get(params.uri.as_str())
|
items.clone()
|
||||||
.await
|
} else {
|
||||||
.unwrap_or(DocumentDiagnosticReport::Full(RelatedFullDocumentDiagnosticReport {
|
vec![]
|
||||||
related_documents: None,
|
|
||||||
full_document_diagnostic_report: FullDocumentDiagnosticReport {
|
|
||||||
result_id: None,
|
|
||||||
items: vec![],
|
|
||||||
},
|
|
||||||
}))
|
|
||||||
else {
|
|
||||||
unreachable!();
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// Ensure we don't already have this diagnostic.
|
for diagnostic in diagnostics {
|
||||||
if report
|
let d = diagnostic.to_lsp_diagnostic(¶ms.text);
|
||||||
.full_document_diagnostic_report
|
// Make sure we don't duplicate diagnostics.
|
||||||
.items
|
if !items.iter().any(|x| x == &d) {
|
||||||
.iter()
|
items.push(d);
|
||||||
.any(|x| x == &diagnostic)
|
}
|
||||||
{
|
|
||||||
self.client
|
|
||||||
.publish_diagnostics(params.uri.clone(), report.full_document_diagnostic_report.items, None)
|
|
||||||
.await;
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
report.full_document_diagnostic_report.items.push(diagnostic);
|
self.diagnostics_map.insert(params.uri.to_string(), items.clone());
|
||||||
|
|
||||||
self.diagnostics_map
|
self.client.publish_diagnostics(params.uri.clone(), items, None).await;
|
||||||
.insert(params.uri.to_string(), DocumentDiagnosticReport::Full(report.clone()))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
self.client
|
|
||||||
.publish_diagnostics(params.uri.clone(), report.full_document_diagnostic_report.items, None)
|
|
||||||
.await;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn execute(&self, params: &TextDocumentItem, ast: &crate::ast::types::Program) -> Result<()> {
|
async fn execute(&self, params: &TextDocumentItem, ast: &crate::ast::types::Program) -> Result<()> {
|
||||||
@ -624,7 +582,8 @@ impl Backend {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Execute the code if we have an executor context.
|
// Execute the code if we have an executor context.
|
||||||
let Some(executor_ctx) = self.executor_ctx().await else {
|
let ctx = self.executor_ctx().await;
|
||||||
|
let Some(ref executor_ctx) = *ctx else {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -639,17 +598,16 @@ impl Backend {
|
|||||||
let memory = match executor_ctx.run(ast, None).await {
|
let memory = match executor_ctx.run(ast, None).await {
|
||||||
Ok(memory) => memory,
|
Ok(memory) => memory,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
self.memory_map.remove(¶ms.uri.to_string()).await;
|
self.memory_map.remove(params.uri.as_str());
|
||||||
self.add_to_diagnostics(params, err, false).await;
|
self.add_to_diagnostics(params, &[err], false).await;
|
||||||
|
|
||||||
// Since we already published the diagnostics we don't really care about the error
|
// Since we already published the diagnostics we don't really care about the error
|
||||||
// string.
|
// string.
|
||||||
return Err(anyhow::anyhow!("failed to execute code"));
|
return Err(anyhow::anyhow!("failed to execute code"));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
drop(executor_ctx);
|
|
||||||
|
|
||||||
self.memory_map.insert(params.uri.to_string(), memory.clone()).await;
|
self.memory_map.insert(params.uri.to_string(), memory.clone());
|
||||||
|
|
||||||
// Send the notification to the client that the memory was updated.
|
// Send the notification to the client that the memory was updated.
|
||||||
self.client
|
self.client
|
||||||
@ -688,7 +646,7 @@ impl Backend {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async fn completions_get_variables_from_ast(&self, file_name: &str) -> Vec<CompletionItem> {
|
async fn completions_get_variables_from_ast(&self, file_name: &str) -> Vec<CompletionItem> {
|
||||||
let ast = match self.ast_map.get(file_name).await {
|
let ast = match self.ast_map.get(file_name) {
|
||||||
Some(ast) => ast,
|
Some(ast) => ast,
|
||||||
None => return vec![],
|
None => return vec![],
|
||||||
};
|
};
|
||||||
@ -705,7 +663,9 @@ impl Backend {
|
|||||||
// Collect all the file data we know.
|
// Collect all the file data we know.
|
||||||
let mut buf = vec![];
|
let mut buf = vec![];
|
||||||
let mut zip = zip::ZipWriter::new(std::io::Cursor::new(&mut buf));
|
let mut zip = zip::ZipWriter::new(std::io::Cursor::new(&mut buf));
|
||||||
for (entry, value) in self.code_map.inner().await.iter() {
|
for code in self.code_map.iter() {
|
||||||
|
let entry = code.key();
|
||||||
|
let value = code.value();
|
||||||
let file_name = entry.replace("file://", "").to_string();
|
let file_name = entry.replace("file://", "").to_string();
|
||||||
|
|
||||||
let options = zip::write::SimpleFileOptions::default().compression_method(zip::CompressionMethod::Stored);
|
let options = zip::write::SimpleFileOptions::default().compression_method(zip::CompressionMethod::Stored);
|
||||||
@ -741,7 +701,7 @@ impl Backend {
|
|||||||
// Get the workspace folders.
|
// Get the workspace folders.
|
||||||
// The key of the workspace folder is the project name.
|
// The key of the workspace folder is the project name.
|
||||||
let workspace_folders = self.workspace_folders().await;
|
let workspace_folders = self.workspace_folders().await;
|
||||||
let project_names: Vec<String> = workspace_folders.iter().map(|v| v.name.clone()).collect::<Vec<_>>();
|
let project_names: Vec<&str> = workspace_folders.iter().map(|v| v.name.as_str()).collect::<Vec<_>>();
|
||||||
// Get the first name.
|
// Get the first name.
|
||||||
let project_name = project_names
|
let project_name = project_names
|
||||||
.first()
|
.first()
|
||||||
@ -788,7 +748,9 @@ impl Backend {
|
|||||||
let filename = params.text_document.uri.to_string();
|
let filename = params.text_document.uri.to_string();
|
||||||
|
|
||||||
{
|
{
|
||||||
let Some(mut executor_ctx) = self.executor_ctx().await else {
|
let mut ctx = self.executor_ctx.write().await;
|
||||||
|
// Borrow the executor context mutably.
|
||||||
|
let Some(ref mut executor_ctx) = *ctx else {
|
||||||
self.client
|
self.client
|
||||||
.log_message(MessageType::ERROR, "no executor context set to update units for")
|
.log_message(MessageType::ERROR, "no executor context set to update units for")
|
||||||
.await;
|
.await;
|
||||||
@ -800,8 +762,8 @@ impl Backend {
|
|||||||
.await;
|
.await;
|
||||||
|
|
||||||
// Try to get the memory for the current code.
|
// Try to get the memory for the current code.
|
||||||
let has_memory = if let Some(memory) = self.memory_map.get(&filename).await {
|
let has_memory = if let Some(memory) = self.memory_map.get(&filename) {
|
||||||
memory != crate::executor::ProgramMemory::default()
|
*memory != crate::executor::ProgramMemory::default()
|
||||||
} else {
|
} else {
|
||||||
false
|
false
|
||||||
};
|
};
|
||||||
@ -816,10 +778,6 @@ impl Backend {
|
|||||||
|
|
||||||
// Set the engine units.
|
// Set the engine units.
|
||||||
executor_ctx.update_units(params.units);
|
executor_ctx.update_units(params.units);
|
||||||
|
|
||||||
// Update the locked executor context.
|
|
||||||
self.set_executor_ctx(executor_ctx.clone()).await;
|
|
||||||
drop(executor_ctx);
|
|
||||||
}
|
}
|
||||||
// Lock is dropped here since nested.
|
// Lock is dropped here since nested.
|
||||||
// This is IMPORTANT.
|
// This is IMPORTANT.
|
||||||
@ -847,20 +805,13 @@ impl Backend {
|
|||||||
&self,
|
&self,
|
||||||
params: custom_notifications::UpdateCanExecuteParams,
|
params: custom_notifications::UpdateCanExecuteParams,
|
||||||
) -> RpcResult<custom_notifications::UpdateCanExecuteResponse> {
|
) -> RpcResult<custom_notifications::UpdateCanExecuteResponse> {
|
||||||
let can_execute = self.can_execute().await;
|
let mut can_execute = self.can_execute.write().await;
|
||||||
|
|
||||||
if can_execute == params.can_execute {
|
if *can_execute == params.can_execute {
|
||||||
return Ok(custom_notifications::UpdateCanExecuteResponse {});
|
return Ok(custom_notifications::UpdateCanExecuteResponse {});
|
||||||
}
|
}
|
||||||
|
|
||||||
if !params.can_execute {
|
*can_execute = params.can_execute;
|
||||||
// Kill any in progress executions.
|
|
||||||
if let Some(current_handle) = self.current_handle().await {
|
|
||||||
current_handle.cancel();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
self.set_can_execute(params.can_execute).await;
|
|
||||||
|
|
||||||
Ok(custom_notifications::UpdateCanExecuteResponse {})
|
Ok(custom_notifications::UpdateCanExecuteResponse {})
|
||||||
}
|
}
|
||||||
@ -973,7 +924,7 @@ impl LanguageServer for Backend {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async fn did_change(&self, params: DidChangeTextDocumentParams) {
|
async fn did_change(&self, params: DidChangeTextDocumentParams) {
|
||||||
self.do_did_change(params.clone()).await;
|
self.do_did_change(params).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn did_save(&self, params: DidSaveTextDocumentParams) {
|
async fn did_save(&self, params: DidSaveTextDocumentParams) {
|
||||||
@ -1012,7 +963,7 @@ impl LanguageServer for Backend {
|
|||||||
async fn hover(&self, params: HoverParams) -> RpcResult<Option<Hover>> {
|
async fn hover(&self, params: HoverParams) -> RpcResult<Option<Hover>> {
|
||||||
let filename = params.text_document_position_params.text_document.uri.to_string();
|
let filename = params.text_document_position_params.text_document.uri.to_string();
|
||||||
|
|
||||||
let Some(current_code) = self.code_map.get(&filename).await else {
|
let Some(current_code) = self.code_map.get(&filename) else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
let Ok(current_code) = std::str::from_utf8(¤t_code) else {
|
let Ok(current_code) = std::str::from_utf8(¤t_code) else {
|
||||||
@ -1022,7 +973,7 @@ impl LanguageServer for Backend {
|
|||||||
let pos = position_to_char_index(params.text_document_position_params.position, current_code);
|
let pos = position_to_char_index(params.text_document_position_params.position, current_code);
|
||||||
|
|
||||||
// Let's iterate over the AST and find the node that contains the cursor.
|
// Let's iterate over the AST and find the node that contains the cursor.
|
||||||
let Some(ast) = self.ast_map.get(&filename).await else {
|
let Some(ast) = self.ast_map.get(&filename) else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -1055,7 +1006,11 @@ impl LanguageServer for Backend {
|
|||||||
value: format!(
|
value: format!(
|
||||||
"```{}{}```\n{}",
|
"```{}{}```\n{}",
|
||||||
name,
|
name,
|
||||||
label_details.detail.clone().unwrap_or_default(),
|
if let Some(detail) = &label_details.detail {
|
||||||
|
detail
|
||||||
|
} else {
|
||||||
|
""
|
||||||
|
},
|
||||||
docs
|
docs
|
||||||
),
|
),
|
||||||
}),
|
}),
|
||||||
@ -1114,7 +1069,7 @@ impl LanguageServer for Backend {
|
|||||||
let filename = params.text_document.uri.to_string();
|
let filename = params.text_document.uri.to_string();
|
||||||
|
|
||||||
// Get the current diagnostics for this file.
|
// Get the current diagnostics for this file.
|
||||||
let Some(diagnostic) = self.diagnostics_map.get(&filename).await else {
|
let Some(items) = self.diagnostics_map.get(&filename) else {
|
||||||
// Send an empty report.
|
// Send an empty report.
|
||||||
return Ok(DocumentDiagnosticReportResult::Report(DocumentDiagnosticReport::Full(
|
return Ok(DocumentDiagnosticReportResult::Report(DocumentDiagnosticReport::Full(
|
||||||
RelatedFullDocumentDiagnosticReport {
|
RelatedFullDocumentDiagnosticReport {
|
||||||
@ -1127,13 +1082,21 @@ impl LanguageServer for Backend {
|
|||||||
)));
|
)));
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(DocumentDiagnosticReportResult::Report(diagnostic.clone()))
|
Ok(DocumentDiagnosticReportResult::Report(DocumentDiagnosticReport::Full(
|
||||||
|
RelatedFullDocumentDiagnosticReport {
|
||||||
|
related_documents: None,
|
||||||
|
full_document_diagnostic_report: FullDocumentDiagnosticReport {
|
||||||
|
result_id: None,
|
||||||
|
items: items.clone(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn signature_help(&self, params: SignatureHelpParams) -> RpcResult<Option<SignatureHelp>> {
|
async fn signature_help(&self, params: SignatureHelpParams) -> RpcResult<Option<SignatureHelp>> {
|
||||||
let filename = params.text_document_position_params.text_document.uri.to_string();
|
let filename = params.text_document_position_params.text_document.uri.to_string();
|
||||||
|
|
||||||
let Some(current_code) = self.code_map.get(&filename).await else {
|
let Some(current_code) = self.code_map.get(&filename) else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
let Ok(current_code) = std::str::from_utf8(¤t_code) else {
|
let Ok(current_code) = std::str::from_utf8(¤t_code) else {
|
||||||
@ -1143,7 +1106,7 @@ impl LanguageServer for Backend {
|
|||||||
let pos = position_to_char_index(params.text_document_position_params.position, current_code);
|
let pos = position_to_char_index(params.text_document_position_params.position, current_code);
|
||||||
|
|
||||||
// Let's iterate over the AST and find the node that contains the cursor.
|
// Let's iterate over the AST and find the node that contains the cursor.
|
||||||
let Some(ast) = self.ast_map.get(&filename).await else {
|
let Some(ast) = self.ast_map.get(&filename) else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -1177,7 +1140,7 @@ impl LanguageServer for Backend {
|
|||||||
|
|
||||||
signature.active_parameter = Some(parameter_index);
|
signature.active_parameter = Some(parameter_index);
|
||||||
|
|
||||||
Ok(Some(signature.clone()))
|
Ok(Some(signature))
|
||||||
}
|
}
|
||||||
crate::ast::types::Hover::Comment { value: _, range: _ } => {
|
crate::ast::types::Hover::Comment { value: _, range: _ } => {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
@ -1194,7 +1157,7 @@ impl LanguageServer for Backend {
|
|||||||
async fn semantic_tokens_full(&self, params: SemanticTokensParams) -> RpcResult<Option<SemanticTokensResult>> {
|
async fn semantic_tokens_full(&self, params: SemanticTokensParams) -> RpcResult<Option<SemanticTokensResult>> {
|
||||||
let filename = params.text_document.uri.to_string();
|
let filename = params.text_document.uri.to_string();
|
||||||
|
|
||||||
let Some(semantic_tokens) = self.semantic_tokens_map.get(&filename).await else {
|
let Some(semantic_tokens) = self.semantic_tokens_map.get(&filename) else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -1207,7 +1170,7 @@ impl LanguageServer for Backend {
|
|||||||
async fn document_symbol(&self, params: DocumentSymbolParams) -> RpcResult<Option<DocumentSymbolResponse>> {
|
async fn document_symbol(&self, params: DocumentSymbolParams) -> RpcResult<Option<DocumentSymbolResponse>> {
|
||||||
let filename = params.text_document.uri.to_string();
|
let filename = params.text_document.uri.to_string();
|
||||||
|
|
||||||
let Some(symbols) = self.symbols_map.get(&filename).await else {
|
let Some(symbols) = self.symbols_map.get(&filename) else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -1217,7 +1180,7 @@ impl LanguageServer for Backend {
|
|||||||
async fn formatting(&self, params: DocumentFormattingParams) -> RpcResult<Option<Vec<TextEdit>>> {
|
async fn formatting(&self, params: DocumentFormattingParams) -> RpcResult<Option<Vec<TextEdit>>> {
|
||||||
let filename = params.text_document.uri.to_string();
|
let filename = params.text_document.uri.to_string();
|
||||||
|
|
||||||
let Some(current_code) = self.code_map.get(&filename).await else {
|
let Some(current_code) = self.code_map.get(&filename) else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
let Ok(current_code) = std::str::from_utf8(¤t_code) else {
|
let Ok(current_code) = std::str::from_utf8(¤t_code) else {
|
||||||
@ -1254,7 +1217,7 @@ impl LanguageServer for Backend {
|
|||||||
async fn rename(&self, params: RenameParams) -> RpcResult<Option<WorkspaceEdit>> {
|
async fn rename(&self, params: RenameParams) -> RpcResult<Option<WorkspaceEdit>> {
|
||||||
let filename = params.text_document_position.text_document.uri.to_string();
|
let filename = params.text_document_position.text_document.uri.to_string();
|
||||||
|
|
||||||
let Some(current_code) = self.code_map.get(&filename).await else {
|
let Some(current_code) = self.code_map.get(&filename) else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
let Ok(current_code) = std::str::from_utf8(¤t_code) else {
|
let Ok(current_code) = std::str::from_utf8(¤t_code) else {
|
||||||
@ -1297,7 +1260,7 @@ impl LanguageServer for Backend {
|
|||||||
let filename = params.text_document.uri.to_string();
|
let filename = params.text_document.uri.to_string();
|
||||||
|
|
||||||
// Get the ast.
|
// Get the ast.
|
||||||
let Some(ast) = self.ast_map.get(&filename).await else {
|
let Some(ast) = self.ast_map.get(&filename) else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -3,7 +3,8 @@
|
|||||||
pub mod backend;
|
pub mod backend;
|
||||||
pub mod copilot;
|
pub mod copilot;
|
||||||
pub mod kcl;
|
pub mod kcl;
|
||||||
mod safemap;
|
#[cfg(any(test, feature = "lsp-test-util"))]
|
||||||
|
pub mod test_util;
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
pub mod util;
|
pub mod util;
|
||||||
|
@ -1,60 +0,0 @@
|
|||||||
//! A map type that is safe to use in a concurrent environment.
|
|
||||||
//! But also in wasm.
|
|
||||||
//! Previously, we used `dashmap::DashMap` for this purpose, but it doesn't work in wasm.
|
|
||||||
|
|
||||||
use std::{borrow::Borrow, collections::HashMap, hash::Hash, sync::Arc};
|
|
||||||
|
|
||||||
use tokio::sync::RwLock;
|
|
||||||
|
|
||||||
/// A thread-safe map type.
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct SafeMap<K: Eq + Hash + Clone, V: Clone>(Arc<RwLock<HashMap<K, V>>>);
|
|
||||||
|
|
||||||
impl<K: Eq + Hash + Clone, V: Clone> SafeMap<K, V> {
|
|
||||||
/// Create a new empty map.
|
|
||||||
pub fn new() -> Self {
|
|
||||||
SafeMap(Arc::new(RwLock::new(HashMap::new())))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn len(&self) -> usize {
|
|
||||||
self.0.read().await.len()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn is_empty(&self) -> bool {
|
|
||||||
self.0.read().await.is_empty()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn clear(&self) {
|
|
||||||
self.0.write().await.clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Insert a key-value pair into the map.
|
|
||||||
pub async fn insert(&self, key: K, value: V) {
|
|
||||||
self.0.write().await.insert(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a reference to the value associated with the given key.
|
|
||||||
pub async fn get<Q>(&self, key: &Q) -> Option<V>
|
|
||||||
where
|
|
||||||
K: Borrow<Q>,
|
|
||||||
Q: Hash + Eq + ?Sized,
|
|
||||||
{
|
|
||||||
self.0.read().await.get(key).cloned()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Remove the key-value pair associated with the given key.
|
|
||||||
pub async fn remove(&self, key: &K) -> Option<V> {
|
|
||||||
self.0.write().await.remove(key)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a reference to the underlying map.
|
|
||||||
pub async fn inner(&self) -> HashMap<K, V> {
|
|
||||||
self.0.read().await.clone()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K: Eq + Hash + Clone, V: Clone> Default for SafeMap<K, V> {
|
|
||||||
fn default() -> Self {
|
|
||||||
SafeMap::new()
|
|
||||||
}
|
|
||||||
}
|
|
112
src/wasm-lib/kcl/src/lsp/test_util.rs
Normal file
112
src/wasm-lib/kcl/src/lsp/test_util.rs
Normal file
@ -0,0 +1,112 @@
|
|||||||
|
use std::sync::{Arc, RwLock};
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
use tower_lsp::LanguageServer;
|
||||||
|
|
||||||
|
fn new_zoo_client() -> kittycad::Client {
|
||||||
|
let user_agent = concat!(env!("CARGO_PKG_NAME"), ".rs/", env!("CARGO_PKG_VERSION"),);
|
||||||
|
let http_client = reqwest::Client::builder()
|
||||||
|
.user_agent(user_agent)
|
||||||
|
// For file conversions we need this to be long.
|
||||||
|
.timeout(std::time::Duration::from_secs(600))
|
||||||
|
.connect_timeout(std::time::Duration::from_secs(60));
|
||||||
|
let ws_client = reqwest::Client::builder()
|
||||||
|
.user_agent(user_agent)
|
||||||
|
// For file conversions we need this to be long.
|
||||||
|
.timeout(std::time::Duration::from_secs(600))
|
||||||
|
.connect_timeout(std::time::Duration::from_secs(60))
|
||||||
|
.connection_verbose(true)
|
||||||
|
.tcp_keepalive(std::time::Duration::from_secs(600))
|
||||||
|
.http1_only();
|
||||||
|
|
||||||
|
let token = std::env::var("KITTYCAD_API_TOKEN").expect("KITTYCAD_API_TOKEN not set");
|
||||||
|
|
||||||
|
// Create the client.
|
||||||
|
let mut client = kittycad::Client::new_from_reqwest(token, http_client, ws_client);
|
||||||
|
// Set a local engine address if it's set.
|
||||||
|
if let Ok(addr) = std::env::var("LOCAL_ENGINE_ADDR") {
|
||||||
|
client.set_base_url(addr);
|
||||||
|
}
|
||||||
|
|
||||||
|
client
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a fake kcl lsp server for testing.
|
||||||
|
pub async fn kcl_lsp_server(execute: bool) -> Result<crate::lsp::kcl::Backend> {
|
||||||
|
let stdlib = crate::std::StdLib::new();
|
||||||
|
let stdlib_completions = crate::lsp::kcl::get_completions_from_stdlib(&stdlib)?;
|
||||||
|
let stdlib_signatures = crate::lsp::kcl::get_signatures_from_stdlib(&stdlib)?;
|
||||||
|
|
||||||
|
let zoo_client = new_zoo_client();
|
||||||
|
|
||||||
|
let executor_ctx = if execute {
|
||||||
|
Some(crate::executor::ExecutorContext::new(&zoo_client, Default::default()).await?)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let can_execute = executor_ctx.is_some();
|
||||||
|
|
||||||
|
// Create the backend.
|
||||||
|
let (service, _) = tower_lsp::LspService::build(|client| crate::lsp::kcl::Backend {
|
||||||
|
client,
|
||||||
|
fs: Arc::new(crate::fs::FileManager::new()),
|
||||||
|
workspace_folders: Default::default(),
|
||||||
|
stdlib_completions,
|
||||||
|
stdlib_signatures,
|
||||||
|
token_map: Default::default(),
|
||||||
|
ast_map: Default::default(),
|
||||||
|
memory_map: Default::default(),
|
||||||
|
code_map: Default::default(),
|
||||||
|
diagnostics_map: Default::default(),
|
||||||
|
symbols_map: Default::default(),
|
||||||
|
semantic_tokens_map: Default::default(),
|
||||||
|
zoo_client,
|
||||||
|
can_send_telemetry: true,
|
||||||
|
executor_ctx: Arc::new(tokio::sync::RwLock::new(executor_ctx)),
|
||||||
|
can_execute: Arc::new(tokio::sync::RwLock::new(can_execute)),
|
||||||
|
is_initialized: Default::default(),
|
||||||
|
})
|
||||||
|
.custom_method("kcl/updateUnits", crate::lsp::kcl::Backend::update_units)
|
||||||
|
.custom_method("kcl/updateCanExecute", crate::lsp::kcl::Backend::update_can_execute)
|
||||||
|
.finish();
|
||||||
|
|
||||||
|
let server = service.inner();
|
||||||
|
|
||||||
|
server
|
||||||
|
.initialize(tower_lsp::lsp_types::InitializeParams::default())
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
server.initialized(tower_lsp::lsp_types::InitializedParams {}).await;
|
||||||
|
|
||||||
|
Ok(server.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a fake copilot lsp server for testing.
|
||||||
|
pub async fn copilot_lsp_server() -> Result<crate::lsp::copilot::Backend> {
|
||||||
|
// We don't actually need to authenticate to the backend for this test.
|
||||||
|
let zoo_client = kittycad::Client::new_from_env();
|
||||||
|
|
||||||
|
// Create the backend.
|
||||||
|
let (service, _) = tower_lsp::LspService::new(|client| crate::lsp::copilot::Backend {
|
||||||
|
client,
|
||||||
|
fs: Arc::new(crate::fs::FileManager::new()),
|
||||||
|
workspace_folders: Default::default(),
|
||||||
|
code_map: Default::default(),
|
||||||
|
zoo_client,
|
||||||
|
editor_info: Arc::new(RwLock::new(crate::lsp::copilot::types::CopilotEditorInfo::default())),
|
||||||
|
cache: Arc::new(crate::lsp::copilot::cache::CopilotCache::new()),
|
||||||
|
telemetry: Default::default(),
|
||||||
|
is_initialized: Default::default(),
|
||||||
|
diagnostics_map: Default::default(),
|
||||||
|
});
|
||||||
|
let server = service.inner();
|
||||||
|
|
||||||
|
server
|
||||||
|
.initialize(tower_lsp::lsp_types::InitializeParams::default())
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
server.initialized(tower_lsp::lsp_types::InitializedParams {}).await;
|
||||||
|
|
||||||
|
Ok(server.clone())
|
||||||
|
}
|
File diff suppressed because it is too large
Load Diff
@ -37,4 +37,7 @@ pub fn get_line_before(pos: Position, rope: &Rope) -> Option<String> {
|
|||||||
pub trait IntoDiagnostic {
|
pub trait IntoDiagnostic {
|
||||||
/// Convert the traited object to a [lsp_types::Diagnostic].
|
/// Convert the traited object to a [lsp_types::Diagnostic].
|
||||||
fn to_lsp_diagnostic(&self, text: &str) -> Diagnostic;
|
fn to_lsp_diagnostic(&self, text: &str) -> Diagnostic;
|
||||||
|
|
||||||
|
/// Get the severity of the diagnostic.
|
||||||
|
fn severity(&self) -> tower_lsp::lsp_types::DiagnosticSeverity;
|
||||||
}
|
}
|
||||||
|
@ -474,11 +474,7 @@ fn integer_range(i: TokenSlice) -> PResult<Vec<Value>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn object_property(i: TokenSlice) -> PResult<ObjectProperty> {
|
fn object_property(i: TokenSlice) -> PResult<ObjectProperty> {
|
||||||
let key = identifier
|
let key = identifier.context(expected("the property's key (the name or identifier of the property), e.g. in 'height: 4', 'height' is the property key")).parse_next(i)?;
|
||||||
.context(expected(
|
|
||||||
"the property's key (the name or identifier of the property), e.g. in 'height: 4', 'height' is the property key",
|
|
||||||
))
|
|
||||||
.parse_next(i)?;
|
|
||||||
colon
|
colon
|
||||||
.context(expected(
|
.context(expected(
|
||||||
"a colon, which separates the property's key from the value you're setting it to, e.g. 'height: 4'",
|
"a colon, which separates the property's key from the value you're setting it to, e.g. 'height: 4'",
|
||||||
@ -588,12 +584,9 @@ fn member_expression_subscript(i: TokenSlice) -> PResult<(LiteralIdentifier, usi
|
|||||||
fn member_expression(i: TokenSlice) -> PResult<MemberExpression> {
|
fn member_expression(i: TokenSlice) -> PResult<MemberExpression> {
|
||||||
// This is an identifier, followed by a sequence of members (aka properties)
|
// This is an identifier, followed by a sequence of members (aka properties)
|
||||||
// First, the identifier.
|
// First, the identifier.
|
||||||
let id = identifier
|
let id = identifier.context(expected("the identifier of the object whose property you're trying to access, e.g. in 'shape.size.width', 'shape' is the identifier")).parse_next(i)?;
|
||||||
.context(expected("the identifier of the object whose property you're trying to access, e.g. in 'shape.size.width', 'shape' is the identifier"))
|
|
||||||
.parse_next(i)?;
|
|
||||||
// Now a sequence of members.
|
// Now a sequence of members.
|
||||||
let member = alt((member_expression_dot, member_expression_subscript))
|
let member = alt((member_expression_dot, member_expression_subscript)).context(expected("a member/property, e.g. size.x and size['height'] and size[0] are all different ways to access a member/property of 'size'"));
|
||||||
.context(expected("a member/property, e.g. size.x and size['height'] and size[0] are all different ways to access a member/property of 'size'"));
|
|
||||||
let mut members: Vec<_> = repeat(1.., member)
|
let mut members: Vec<_> = repeat(1.., member)
|
||||||
.context(expected("a sequence of at least one members/properties"))
|
.context(expected("a sequence of at least one members/properties"))
|
||||||
.parse_next(i)?;
|
.parse_next(i)?;
|
||||||
@ -1111,19 +1104,9 @@ fn unary_expression(i: TokenSlice) -> PResult<UnaryExpression> {
|
|||||||
// TODO: negation. Original parser doesn't support `not` yet.
|
// TODO: negation. Original parser doesn't support `not` yet.
|
||||||
TokenType::Operator => Err(KclError::Syntax(KclErrorDetails {
|
TokenType::Operator => Err(KclError::Syntax(KclErrorDetails {
|
||||||
source_ranges: token.as_source_ranges(),
|
source_ranges: token.as_source_ranges(),
|
||||||
message: format!(
|
message: format!("{EXPECTED} but found {} which is an operator, but not a unary one (unary operators apply to just a single operand, your operator applies to two or more operands)", token.value.as_str(),),
|
||||||
"{EXPECTED} but found {} which is an operator, but not a unary one (unary operators apply to just a single operand, your operator applies to two or more operands)",
|
|
||||||
token.value.as_str(),
|
|
||||||
),
|
|
||||||
})),
|
|
||||||
other => Err(KclError::Syntax(KclErrorDetails {
|
|
||||||
source_ranges: token.as_source_ranges(),
|
|
||||||
message: format!(
|
|
||||||
"{EXPECTED} but found {} which is {}",
|
|
||||||
token.value.as_str(),
|
|
||||||
other,
|
|
||||||
),
|
|
||||||
})),
|
})),
|
||||||
|
other => Err(KclError::Syntax(KclErrorDetails { source_ranges: token.as_source_ranges(), message: format!("{EXPECTED} but found {} which is {}", token.value.as_str(), other,) })),
|
||||||
})
|
})
|
||||||
.context(expected("a unary expression, e.g. -x or -3"))
|
.context(expected("a unary expression, e.g. -x or -3"))
|
||||||
.parse_next(i)?;
|
.parse_next(i)?;
|
||||||
@ -1691,7 +1674,7 @@ const mySk1 = startSketchAt([0, 0])"#;
|
|||||||
start0.value,
|
start0.value,
|
||||||
NonCodeValue::BlockComment {
|
NonCodeValue::BlockComment {
|
||||||
value: "comment at start".to_owned(),
|
value: "comment at start".to_owned(),
|
||||||
style: CommentStyle::Block,
|
style: CommentStyle::Block
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
assert_eq!(start1.value, NonCodeValue::NewLine);
|
assert_eq!(start1.value, NonCodeValue::NewLine);
|
||||||
@ -1756,8 +1739,8 @@ const mySk1 = startSketchAt([0, 0])"#;
|
|||||||
start: 32,
|
start: 32,
|
||||||
end: 33,
|
end: 33,
|
||||||
value: 2u32.into(),
|
value: 2u32.into(),
|
||||||
raw: "2".to_owned(),
|
raw: "2".to_owned()
|
||||||
})),
|
}))
|
||||||
})],
|
})],
|
||||||
non_code_meta: NonCodeMeta {
|
non_code_meta: NonCodeMeta {
|
||||||
non_code_nodes: Default::default(),
|
non_code_nodes: Default::default(),
|
||||||
@ -1765,7 +1748,7 @@ const mySk1 = startSketchAt([0, 0])"#;
|
|||||||
start: 7,
|
start: 7,
|
||||||
end: 25,
|
end: 25,
|
||||||
value: NonCodeValue::NewLine
|
value: NonCodeValue::NewLine
|
||||||
}],
|
}]
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
return_type: None,
|
return_type: None,
|
||||||
@ -1790,7 +1773,7 @@ const mySk1 = startSketchAt([0, 0])"#;
|
|||||||
non_code_meta.non_code_nodes.get(&2).unwrap()[0].value,
|
non_code_meta.non_code_nodes.get(&2).unwrap()[0].value,
|
||||||
NonCodeValue::InlineComment {
|
NonCodeValue::InlineComment {
|
||||||
value: "inline-comment".to_owned(),
|
value: "inline-comment".to_owned(),
|
||||||
style: CommentStyle::Line,
|
style: CommentStyle::Line
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
assert_eq!(body.len(), 4);
|
assert_eq!(body.len(), 4);
|
||||||
@ -1815,8 +1798,8 @@ const mySk1 = startSketchAt([0, 0])"#;
|
|||||||
end: 20,
|
end: 20,
|
||||||
value: NonCodeValue::BlockComment {
|
value: NonCodeValue::BlockComment {
|
||||||
value: "this is a comment".to_owned(),
|
value: "this is a comment".to_owned(),
|
||||||
style: CommentStyle::Line,
|
style: CommentStyle::Line
|
||||||
},
|
}
|
||||||
}],
|
}],
|
||||||
non_code_meta.start,
|
non_code_meta.start,
|
||||||
);
|
);
|
||||||
@ -1827,13 +1810,13 @@ const mySk1 = startSketchAt([0, 0])"#;
|
|||||||
end: 82,
|
end: 82,
|
||||||
value: NonCodeValue::InlineComment {
|
value: NonCodeValue::InlineComment {
|
||||||
value: "block\n comment".to_owned(),
|
value: "block\n comment".to_owned(),
|
||||||
style: CommentStyle::Block,
|
style: CommentStyle::Block
|
||||||
},
|
}
|
||||||
},
|
},
|
||||||
NonCodeNode {
|
NonCodeNode {
|
||||||
start: 82,
|
start: 82,
|
||||||
end: 86,
|
end: 86,
|
||||||
value: NonCodeValue::NewLine,
|
value: NonCodeValue::NewLine
|
||||||
},
|
},
|
||||||
]),
|
]),
|
||||||
non_code_meta.non_code_nodes.get(&0),
|
non_code_meta.non_code_nodes.get(&0),
|
||||||
@ -1844,8 +1827,8 @@ const mySk1 = startSketchAt([0, 0])"#;
|
|||||||
end: 129,
|
end: 129,
|
||||||
value: NonCodeValue::BlockComment {
|
value: NonCodeValue::BlockComment {
|
||||||
value: "this is also a comment".to_owned(),
|
value: "this is also a comment".to_owned(),
|
||||||
style: CommentStyle::Line,
|
style: CommentStyle::Line
|
||||||
},
|
}
|
||||||
}]),
|
}]),
|
||||||
non_code_meta.non_code_nodes.get(&1),
|
non_code_meta.non_code_nodes.get(&1),
|
||||||
);
|
);
|
||||||
@ -1864,7 +1847,7 @@ const mySk1 = startSketchAt([0, 0])"#;
|
|||||||
actual.non_code_meta.non_code_nodes.get(&0).unwrap()[0].value,
|
actual.non_code_meta.non_code_nodes.get(&0).unwrap()[0].value,
|
||||||
NonCodeValue::InlineComment {
|
NonCodeValue::InlineComment {
|
||||||
value: "block\n comment".to_owned(),
|
value: "block\n comment".to_owned(),
|
||||||
style: CommentStyle::Block,
|
style: CommentStyle::Block
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -1912,7 +1895,7 @@ const mySk1 = startSketchAt([0, 0])"#;
|
|||||||
start: 9,
|
start: 9,
|
||||||
end: 10,
|
end: 10,
|
||||||
value: 3u32.into(),
|
value: 3u32.into(),
|
||||||
raw: "3".to_owned(),
|
raw: "3".to_owned()
|
||||||
}))
|
}))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -567,7 +567,7 @@ mod tests {
|
|||||||
project_name: Some("assembly".to_string()),
|
project_name: Some("assembly".to_string()),
|
||||||
project_path: "/Users/macinatormax/Documents/kittycad-modeling-projects/assembly".to_string(),
|
project_path: "/Users/macinatormax/Documents/kittycad-modeling-projects/assembly".to_string(),
|
||||||
current_file_name: None,
|
current_file_name: None,
|
||||||
current_file_path: None,
|
current_file_path: None
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -586,7 +586,7 @@ mod tests {
|
|||||||
project_name: None,
|
project_name: None,
|
||||||
project_path: "/Users/macinatormax/Documents/kittycad-modeling-projects".to_string(),
|
project_path: "/Users/macinatormax/Documents/kittycad-modeling-projects".to_string(),
|
||||||
current_file_name: None,
|
current_file_name: None,
|
||||||
current_file_path: None,
|
current_file_path: None
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -624,7 +624,7 @@ mod tests {
|
|||||||
project_name: Some("modeling-app".to_string()),
|
project_name: Some("modeling-app".to_string()),
|
||||||
project_path: "/Users/macinatormax/kittycad/modeling-app".to_string(),
|
project_path: "/Users/macinatormax/kittycad/modeling-app".to_string(),
|
||||||
current_file_name: None,
|
current_file_name: None,
|
||||||
current_file_path: None,
|
current_file_path: None
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -642,7 +642,7 @@ mod tests {
|
|||||||
project_name: Some("browser".to_string()),
|
project_name: Some("browser".to_string()),
|
||||||
project_path: "/browser".to_string(),
|
project_path: "/browser".to_string(),
|
||||||
current_file_name: Some("main.kcl".to_string()),
|
current_file_name: Some("main.kcl".to_string()),
|
||||||
current_file_path: Some("/browser/main.kcl".to_string()),
|
current_file_path: Some("/browser/main.kcl".to_string())
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -660,7 +660,7 @@ mod tests {
|
|||||||
project_name: Some("browser".to_string()),
|
project_name: Some("browser".to_string()),
|
||||||
project_path: "/browser".to_string(),
|
project_path: "/browser".to_string(),
|
||||||
current_file_name: None,
|
current_file_name: None,
|
||||||
current_file_path: None,
|
current_file_path: None
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -1046,13 +1046,7 @@ const model = import("model.obj")"#
|
|||||||
let result = super::ProjectState::new_from_path(tmp_project_dir.join("settings.toml")).await;
|
let result = super::ProjectState::new_from_path(tmp_project_dir.join("settings.toml")).await;
|
||||||
|
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(result.unwrap_err().to_string(), format!("File type (toml) cannot be opened with this app: `{}`, try opening one of the following file types: stp, glb, fbxb, fbx, gltf, obj, ply, sldprt, step, stl, kcl", tmp_project_dir.join("settings.toml").display()));
|
||||||
result.unwrap_err().to_string(),
|
|
||||||
format!(
|
|
||||||
"File type (toml) cannot be opened with this app: `{}`, try opening one of the following file types: stp, glb, fbxb, fbx, gltf, obj, ply, sldprt, step, stl, kcl",
|
|
||||||
tmp_project_dir.join("settings.toml").display()
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
std::fs::remove_dir_all(tmp_project_dir).unwrap();
|
std::fs::remove_dir_all(tmp_project_dir).unwrap();
|
||||||
}
|
}
|
||||||
@ -1067,13 +1061,7 @@ const model = import("model.obj")"#
|
|||||||
let result = super::ProjectState::new_from_path(tmp_project_dir.join("settings.docx")).await;
|
let result = super::ProjectState::new_from_path(tmp_project_dir.join("settings.docx")).await;
|
||||||
|
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(result.unwrap_err().to_string(), format!("File type (docx) cannot be opened with this app: `{}`, try opening one of the following file types: stp, glb, fbxb, fbx, gltf, obj, ply, sldprt, step, stl, kcl", tmp_project_dir.join("settings.docx").display()));
|
||||||
result.unwrap_err().to_string(),
|
|
||||||
format!(
|
|
||||||
"File type (docx) cannot be opened with this app: `{}`, try opening one of the following file types: stp, glb, fbxb, fbx, gltf, obj, ply, sldprt, step, stl, kcl",
|
|
||||||
tmp_project_dir.join("settings.docx").display()
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
std::fs::remove_dir_all(tmp_project_dir).unwrap();
|
std::fs::remove_dir_all(tmp_project_dir).unwrap();
|
||||||
}
|
}
|
||||||
|
@ -640,7 +640,7 @@ textWrapping = true
|
|||||||
app: AppSettings {
|
app: AppSettings {
|
||||||
appearance: AppearanceSettings {
|
appearance: AppearanceSettings {
|
||||||
theme: AppTheme::Dark,
|
theme: AppTheme::Dark,
|
||||||
color: Default::default(),
|
color: Default::default()
|
||||||
},
|
},
|
||||||
onboarding_status: OnboardingStatus::Dismissed,
|
onboarding_status: OnboardingStatus::Dismissed,
|
||||||
project_directory: None,
|
project_directory: None,
|
||||||
@ -654,15 +654,15 @@ textWrapping = true
|
|||||||
mouse_controls: Default::default(),
|
mouse_controls: Default::default(),
|
||||||
highlight_edges: Default::default(),
|
highlight_edges: Default::default(),
|
||||||
show_debug_panel: true,
|
show_debug_panel: true,
|
||||||
enable_ssao: false.into(),
|
enable_ssao: false.into()
|
||||||
},
|
},
|
||||||
text_editor: TextEditorSettings {
|
text_editor: TextEditorSettings {
|
||||||
text_wrapping: true.into(),
|
text_wrapping: true.into(),
|
||||||
blinking_cursor: true.into(),
|
blinking_cursor: true.into()
|
||||||
},
|
},
|
||||||
project: Default::default(),
|
project: Default::default(),
|
||||||
command_bar: CommandBarSettings {
|
command_bar: CommandBarSettings {
|
||||||
include_settings: true.into(),
|
include_settings: true.into()
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -698,7 +698,7 @@ includeSettings = false
|
|||||||
app: AppSettings {
|
app: AppSettings {
|
||||||
appearance: AppearanceSettings {
|
appearance: AppearanceSettings {
|
||||||
theme: AppTheme::Dark,
|
theme: AppTheme::Dark,
|
||||||
color: 138.0.into(),
|
color: 138.0.into()
|
||||||
},
|
},
|
||||||
onboarding_status: Default::default(),
|
onboarding_status: Default::default(),
|
||||||
project_directory: None,
|
project_directory: None,
|
||||||
@ -712,15 +712,15 @@ includeSettings = false
|
|||||||
mouse_controls: Default::default(),
|
mouse_controls: Default::default(),
|
||||||
highlight_edges: Default::default(),
|
highlight_edges: Default::default(),
|
||||||
show_debug_panel: true,
|
show_debug_panel: true,
|
||||||
enable_ssao: true.into(),
|
enable_ssao: true.into()
|
||||||
},
|
},
|
||||||
text_editor: TextEditorSettings {
|
text_editor: TextEditorSettings {
|
||||||
text_wrapping: false.into(),
|
text_wrapping: false.into(),
|
||||||
blinking_cursor: false.into(),
|
blinking_cursor: false.into()
|
||||||
},
|
},
|
||||||
project: Default::default(),
|
project: Default::default(),
|
||||||
command_bar: CommandBarSettings {
|
command_bar: CommandBarSettings {
|
||||||
include_settings: false.into(),
|
include_settings: false.into()
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -761,7 +761,7 @@ defaultProjectName = "projects-$nnn"
|
|||||||
app: AppSettings {
|
app: AppSettings {
|
||||||
appearance: AppearanceSettings {
|
appearance: AppearanceSettings {
|
||||||
theme: AppTheme::Dark,
|
theme: AppTheme::Dark,
|
||||||
color: 138.0.into(),
|
color: 138.0.into()
|
||||||
},
|
},
|
||||||
onboarding_status: OnboardingStatus::Dismissed,
|
onboarding_status: OnboardingStatus::Dismissed,
|
||||||
project_directory: None,
|
project_directory: None,
|
||||||
@ -775,18 +775,18 @@ defaultProjectName = "projects-$nnn"
|
|||||||
mouse_controls: Default::default(),
|
mouse_controls: Default::default(),
|
||||||
highlight_edges: Default::default(),
|
highlight_edges: Default::default(),
|
||||||
show_debug_panel: true,
|
show_debug_panel: true,
|
||||||
enable_ssao: true.into(),
|
enable_ssao: true.into()
|
||||||
},
|
},
|
||||||
text_editor: TextEditorSettings {
|
text_editor: TextEditorSettings {
|
||||||
text_wrapping: false.into(),
|
text_wrapping: false.into(),
|
||||||
blinking_cursor: false.into(),
|
blinking_cursor: false.into()
|
||||||
},
|
},
|
||||||
project: ProjectSettings {
|
project: ProjectSettings {
|
||||||
directory: "/Users/macinatormax/Documents/kittycad-modeling-projects".into(),
|
directory: "/Users/macinatormax/Documents/kittycad-modeling-projects".into(),
|
||||||
default_project_name: "projects-$nnn".to_string().into(),
|
default_project_name: "projects-$nnn".to_string().into()
|
||||||
},
|
},
|
||||||
command_bar: CommandBarSettings {
|
command_bar: CommandBarSettings {
|
||||||
include_settings: false.into(),
|
include_settings: false.into()
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -836,7 +836,7 @@ projectDirectory = "/Users/macinatormax/Documents/kittycad-modeling-projects""#;
|
|||||||
app: AppSettings {
|
app: AppSettings {
|
||||||
appearance: AppearanceSettings {
|
appearance: AppearanceSettings {
|
||||||
theme: AppTheme::System,
|
theme: AppTheme::System,
|
||||||
color: Default::default(),
|
color: Default::default()
|
||||||
},
|
},
|
||||||
onboarding_status: OnboardingStatus::Dismissed,
|
onboarding_status: OnboardingStatus::Dismissed,
|
||||||
project_directory: None,
|
project_directory: None,
|
||||||
@ -850,15 +850,15 @@ projectDirectory = "/Users/macinatormax/Documents/kittycad-modeling-projects""#;
|
|||||||
mouse_controls: Default::default(),
|
mouse_controls: Default::default(),
|
||||||
highlight_edges: true.into(),
|
highlight_edges: true.into(),
|
||||||
show_debug_panel: false,
|
show_debug_panel: false,
|
||||||
enable_ssao: true.into(),
|
enable_ssao: true.into()
|
||||||
},
|
},
|
||||||
text_editor: TextEditorSettings {
|
text_editor: TextEditorSettings {
|
||||||
text_wrapping: true.into(),
|
text_wrapping: true.into(),
|
||||||
blinking_cursor: true.into(),
|
blinking_cursor: true.into()
|
||||||
},
|
},
|
||||||
project: ProjectSettings {
|
project: ProjectSettings {
|
||||||
directory: "/Users/macinatormax/Documents/kittycad-modeling-projects".into(),
|
directory: "/Users/macinatormax/Documents/kittycad-modeling-projects".into(),
|
||||||
default_project_name: "project-$nnn".to_string().into(),
|
default_project_name: "project-$nnn".to_string().into()
|
||||||
},
|
},
|
||||||
command_bar: CommandBarSettings {
|
command_bar: CommandBarSettings {
|
||||||
include_settings: true.into()
|
include_settings: true.into()
|
||||||
|
@ -115,7 +115,7 @@ includeSettings = false
|
|||||||
app: AppSettings {
|
app: AppSettings {
|
||||||
appearance: AppearanceSettings {
|
appearance: AppearanceSettings {
|
||||||
theme: AppTheme::Dark,
|
theme: AppTheme::Dark,
|
||||||
color: 138.0.into(),
|
color: 138.0.into()
|
||||||
},
|
},
|
||||||
onboarding_status: Default::default(),
|
onboarding_status: Default::default(),
|
||||||
project_directory: None,
|
project_directory: None,
|
||||||
@ -129,14 +129,14 @@ includeSettings = false
|
|||||||
mouse_controls: Default::default(),
|
mouse_controls: Default::default(),
|
||||||
highlight_edges: Default::default(),
|
highlight_edges: Default::default(),
|
||||||
show_debug_panel: true,
|
show_debug_panel: true,
|
||||||
enable_ssao: true.into(),
|
enable_ssao: true.into()
|
||||||
},
|
},
|
||||||
text_editor: TextEditorSettings {
|
text_editor: TextEditorSettings {
|
||||||
text_wrapping: false.into(),
|
text_wrapping: false.into(),
|
||||||
blinking_cursor: false.into(),
|
blinking_cursor: false.into()
|
||||||
},
|
},
|
||||||
command_bar: CommandBarSettings {
|
command_bar: CommandBarSettings {
|
||||||
include_settings: false.into(),
|
include_settings: false.into()
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -85,9 +85,9 @@ async fn inner_chamfer(
|
|||||||
// error to the user that they can only tag one edge at a time.
|
// error to the user that they can only tag one edge at a time.
|
||||||
if tag.is_some() && data.tags.len() > 1 {
|
if tag.is_some() && data.tags.len() > 1 {
|
||||||
return Err(KclError::Type(KclErrorDetails {
|
return Err(KclError::Type(KclErrorDetails {
|
||||||
message: "You can only tag one edge at a time with a tagged chamfer. Either delete the tag for the chamfer fn if you don't need it OR separate into individual chamfer functions for each tag.".to_string(),
|
message: "You can only tag one edge at a time with a tagged chamfer. Either delete the tag for the chamfer fn if you don't need it OR separate into individual chamfer functions for each tag.".to_string(),
|
||||||
source_ranges: vec![args.source_range],
|
source_ranges: vec![args.source_range],
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut fillet_or_chamfers = Vec::new();
|
let mut fillet_or_chamfers = Vec::new();
|
||||||
|
@ -314,10 +314,7 @@ fn get_import_format_from_extension(ext: &str) -> Result<kittycad::types::InputF
|
|||||||
} else if ext == "glb" {
|
} else if ext == "glb" {
|
||||||
kittycad::types::FileImportFormat::Gltf
|
kittycad::types::FileImportFormat::Gltf
|
||||||
} else {
|
} else {
|
||||||
anyhow::bail!(
|
anyhow::bail!("unknown source format for file extension: {}. Try setting the `--src-format` flag explicitly or use a valid format.", ext)
|
||||||
"unknown source format for file extension: {}. Try setting the `--src-format` flag explicitly or use a valid format.",
|
|
||||||
ext
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -431,7 +431,7 @@ mod tests {
|
|||||||
);
|
);
|
||||||
|
|
||||||
if let Err(err) = result {
|
if let Err(err) = result {
|
||||||
assert!(err.to_string().contains( "Point Point2d { x: 0.0, y: 5.0 } is not on the circumference of the circle with center Point2d { x: 10.0, y: -10.0 } and radius 10."));
|
assert!(err.to_string().contains("Point Point2d { x: 0.0, y: 5.0 } is not on the circumference of the circle with center Point2d { x: 10.0, y: -10.0 } and radius 10."));
|
||||||
} else {
|
} else {
|
||||||
panic!("Expected error");
|
panic!("Expected error");
|
||||||
}
|
}
|
||||||
|
@ -297,7 +297,6 @@ pub async fn kcl_lsp_run(
|
|||||||
executor_ctx: Arc::new(tokio::sync::RwLock::new(executor_ctx)),
|
executor_ctx: Arc::new(tokio::sync::RwLock::new(executor_ctx)),
|
||||||
|
|
||||||
is_initialized: Default::default(),
|
is_initialized: Default::default(),
|
||||||
current_handle: Default::default(),
|
|
||||||
})
|
})
|
||||||
.custom_method("kcl/updateUnits", kcl_lib::lsp::kcl::Backend::update_units)
|
.custom_method("kcl/updateUnits", kcl_lib::lsp::kcl::Backend::update_units)
|
||||||
.custom_method("kcl/updateCanExecute", kcl_lib::lsp::kcl::Backend::update_can_execute)
|
.custom_method("kcl/updateCanExecute", kcl_lib::lsp::kcl::Backend::update_can_execute)
|
||||||
@ -356,7 +355,6 @@ pub async fn copilot_lsp_run(config: ServerConfig, token: String, baseurl: Strin
|
|||||||
zoo_client,
|
zoo_client,
|
||||||
|
|
||||||
is_initialized: Default::default(),
|
is_initialized: Default::default(),
|
||||||
current_handle: Default::default(),
|
|
||||||
diagnostics_map: Default::default(),
|
diagnostics_map: Default::default(),
|
||||||
})
|
})
|
||||||
.custom_method("copilot/setEditorInfo", kcl_lib::lsp::copilot::Backend::set_editor_info)
|
.custom_method("copilot/setEditorInfo", kcl_lib::lsp::copilot::Backend::set_editor_info)
|
||||||
|
Reference in New Issue
Block a user