first commit

This commit is contained in:
锦恢 2024-09-06 15:29:46 +08:00
commit 1b91a7dad9
26 changed files with 14126 additions and 0 deletions

13
.gitignore vendored Normal file
View File

@ -0,0 +1,13 @@
# Generated by Cargo
# will have compiled files and executables
debug/
target/
# These are backup files generated by rustfmt
**/*.rs.bk
tests_rtl
log_files
test.txt
.vscode/

1705
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

27
Cargo.toml Normal file
View File

@ -0,0 +1,27 @@
[package]
name = "veridian"
version = "0.1.0"
authors = ["Vivek Malneedi <vivekmalneedi@gmail.com>"]
edition = "2018"
[dependencies]
sv-parser = "0.8.2"
log = "0.4.19"
tower-lsp = "0.20.0"
flexi_logger = "0.27.4"
ropey = "1.6.0"
tokio = { version = "1.29.1", features = ["macros", "io-std", "rt-multi-thread"] }
path-clean = "1.0.1"
pathdiff = "0.2.1"
walkdir = "2.3.3"
serde_yaml = "0.9.25"
anyhow = "1.0.72"
serde = "1.0.179"
which = "6.0.0"
regex = "1.9.1"
structopt = "0.3.26"
strum = "0.26.1"
strum_macros = "0.26.1"
[dev-dependencies]
tempdir = "0.3.7"

21
LICENSE Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2021 Vivek Malneedi
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

2
README.md Normal file
View File

@ -0,0 +1,2 @@
转为 Digital IDE 设计的 lsp server

2
rustfmt.toml Normal file
View File

@ -0,0 +1,2 @@
edition = "2018"
max_width = 100

781
src/completion.rs Normal file
View File

@ -0,0 +1,781 @@
use crate::server::LSPServer;
use crate::sources::LSPSupport;
use log::{debug, trace};
use ropey::{Rope, RopeSlice};
use std::time::Instant;
use tower_lsp::lsp_types::*;
pub mod keyword;
impl LSPServer {
pub fn completion(&self, params: CompletionParams) -> Option<CompletionResponse> {
debug!("completion requested");
trace!("{:#?}", &params);
let now = Instant::now();
let doc = params.text_document_position;
let file_id = self.srcs.get_id(&doc.text_document.uri).to_owned();
self.srcs.wait_parse_ready(file_id, false);
trace!("comp wait parse: {}", now.elapsed().as_millis());
let file = self.srcs.get_file(file_id)?;
let file = file.read().ok()?;
trace!("comp read: {}", now.elapsed().as_millis());
let token = get_completion_token(
&file.text,
file.text.line(doc.position.line as usize),
doc.position,
);
let response = match params.context {
Some(context) => match context.trigger_kind {
CompletionTriggerKind::TRIGGER_CHARACTER => {
debug!(
"trigger char completion: {}",
context.trigger_character.clone()?.as_str()
);
match context.trigger_character?.as_str() {
"." => Some(self.srcs.get_dot_completions(
token.trim_end_matches('.'),
file.text.pos_to_byte(&doc.position),
&doc.text_document.uri,
)?),
"$" => Some(CompletionList {
is_incomplete: false,
items: self.sys_tasks.clone(),
}),
"`" => Some(CompletionList {
is_incomplete: false,
items: self.directives.clone(),
}),
_ => None,
}
}
CompletionTriggerKind::TRIGGER_FOR_INCOMPLETE_COMPLETIONS => None,
CompletionTriggerKind::INVOKED => {
debug!("Invoked Completion");
let mut comps = self.srcs.get_completions(
&token,
file.text.pos_to_byte(&doc.position),
&doc.text_document.uri,
)?;
// complete keywords
comps.items.extend::<Vec<CompletionItem>>(
self.key_comps
.iter()
.filter(|x| x.label.starts_with(&token))
.cloned()
.collect(),
);
Some(comps)
}
_ => None,
},
None => {
let trigger = prev_char(&file.text, &doc.position);
match trigger {
'.' => Some(self.srcs.get_dot_completions(
token.trim_end_matches('.'),
file.text.pos_to_byte(&doc.position),
&doc.text_document.uri,
)?),
'$' => Some(CompletionList {
is_incomplete: false,
items: self.sys_tasks.clone(),
}),
'`' => Some(CompletionList {
is_incomplete: false,
items: self.directives.clone(),
}),
_ => {
let mut comps = self.srcs.get_completions(
&token,
file.text.pos_to_byte(&doc.position),
&doc.text_document.uri,
)?;
comps.items.extend::<Vec<CompletionItem>>(
self.key_comps
.iter()
.filter(|x| x.label.starts_with(&token))
.cloned()
.collect(),
);
Some(comps)
}
}
}
};
// eprintln!("comp response: {}", now.elapsed().as_millis());
Some(CompletionResponse::List(response?))
}
}
/// get the previous non-whitespace character
fn prev_char(text: &Rope, pos: &Position) -> char {
let char_idx = text.pos_to_char(pos);
if char_idx > 0 {
for i in (0..char_idx).rev() {
let res = text.char(i);
if !res.is_whitespace() {
return res;
}
}
' '
} else {
' '
}
}
/// attempt to get the token the user was trying to complete, by
/// filtering out characters unneeded for name resolution
fn get_completion_token(text: &Rope, line: RopeSlice, pos: Position) -> String {
let mut token = String::new();
let mut line_iter = line.chars();
for _ in 0..(line.utf16_cu_to_char(pos.character as usize)) {
line_iter.next();
}
let mut c = line_iter.prev();
//TODO: make this a regex
while c.is_some()
&& (c.unwrap().is_alphanumeric()
|| c.unwrap() == '_'
|| c.unwrap() == '.'
|| c.unwrap() == '['
|| c.unwrap() == ']')
{
token.push(c.unwrap());
c = line_iter.prev();
}
let mut result: String = token.chars().rev().collect();
if result.contains('[') {
let l_bracket_offset = result.find('[').unwrap_or(result.len());
result.replace_range(l_bracket_offset.., "");
}
if &result == "." {
// probably a instantiation, the token should be what we're instatiating
let mut char_iter = text.chars();
let mut token = String::new();
for _ in 0..text.pos_to_char(&pos) {
char_iter.next();
}
let mut c = char_iter.prev();
// go to the last semicolon
while c.is_some() && (c.unwrap() != ';') {
c = char_iter.prev();
}
// go the the start of the next symbol
while c.is_some() && !(c.unwrap().is_alphanumeric() || c.unwrap() == '_') {
c = char_iter.next();
}
// then extract the next symbol
while c.is_some() && (c.unwrap().is_alphanumeric() || c.unwrap() == '_') {
token.push(c.unwrap());
c = char_iter.next();
}
token
} else {
result
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::definition::def_types::Scope;
use crate::definition::get_scopes;
use crate::sources::{parse, LSPSupport};
use crate::support::test_init;
use ropey::Rope;
#[test]
fn test_get_completion_token() {
test_init();
let text = Rope::from_str("abc abc.cba de_fg cde[4]");
let mut result = get_completion_token(
&text,
text.line(0),
Position {
line: 0,
character: 3,
},
);
assert_eq!(&result, "abc");
result = get_completion_token(
&text,
text.line(0),
Position {
line: 0,
character: 11,
},
);
assert_eq!(&result, "abc.cba");
result = get_completion_token(
&text,
text.line(0),
Position {
line: 0,
character: 16,
},
);
assert_eq!(&result, "de_f");
result = get_completion_token(
&text,
text.line(0),
Position {
line: 0,
character: 23,
},
);
assert_eq!(&result, "cde");
}
#[test]
fn test_completion() {
test_init();
let server = LSPServer::new(None);
let uri = Url::parse("file:///test.sv").unwrap();
let text = r#"module test;
logic abc;
logic abcd;
endmodule
"#;
let open_params = DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "systemverilog".to_owned(),
version: 0,
text: text.to_owned(),
},
};
server.did_open(open_params);
let fid = server.srcs.get_id(&uri);
server.srcs.wait_parse_ready(fid, true);
let change_params = DidChangeTextDocumentParams {
text_document: VersionedTextDocumentIdentifier {
uri: uri.clone(),
version: 3,
},
content_changes: vec![
TextDocumentContentChangeEvent {
range: Some(Range {
start: Position {
line: 3,
character: 0,
},
end: Position {
line: 3,
character: 0,
},
}),
range_length: None,
text: "\n".to_owned(),
},
TextDocumentContentChangeEvent {
range: Some(Range {
start: Position {
line: 4,
character: 0,
},
end: Position {
line: 4,
character: 0,
},
}),
range_length: None,
text: " ".to_owned(),
},
TextDocumentContentChangeEvent {
range: Some(Range {
start: Position {
line: 4,
character: 2,
},
end: Position {
line: 4,
character: 2,
},
}),
range_length: None,
text: "a".to_owned(),
},
],
};
server.did_change(change_params);
server.srcs.wait_parse_ready(fid, true);
let completion_params = CompletionParams {
text_document_position: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri },
position: Position {
line: 4,
character: 3,
},
},
work_done_progress_params: WorkDoneProgressParams::default(),
partial_result_params: PartialResultParams::default(),
context: Some(CompletionContext {
trigger_kind: CompletionTriggerKind::INVOKED,
trigger_character: None,
}),
};
let response: CompletionResponse = server.completion(completion_params).unwrap();
let item1 = CompletionItem {
label: "abc".to_owned(),
kind: Some(CompletionItemKind::VARIABLE),
detail: Some("logic".to_string()),
..CompletionItem::default()
};
let item2 = CompletionItem {
label: "abcd".to_owned(),
kind: Some(CompletionItemKind::VARIABLE),
detail: Some("logic".to_string()),
..CompletionItem::default()
};
if let CompletionResponse::List(item) = response {
assert!(item.items.contains(&item1));
assert!(item.items.contains(&item2));
} else {
panic!();
}
}
#[test]
fn test_nested_completion() {
test_init();
let server = LSPServer::new(None);
let uri = Url::parse("file:///test.sv").unwrap();
let text = r#"module test;
logic aouter;
function func1();
logic abc;
func1 = abc;
endfunction
function func2();
logic abcd;
func2 = abcd;
endfunction
endmodule
"#;
let open_params = DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "systemverilog".to_owned(),
version: 0,
text: text.to_owned(),
},
};
server.did_open(open_params);
let fid = server.srcs.get_id(&uri);
server.srcs.wait_parse_ready(fid, true);
let change_params = DidChangeTextDocumentParams {
text_document: VersionedTextDocumentIdentifier {
uri: uri.clone(),
version: 3,
},
content_changes: vec![
TextDocumentContentChangeEvent {
range: Some(Range {
start: Position {
line: 4,
character: 0,
},
end: Position {
line: 4,
character: 0,
},
}),
range_length: None,
text: "\n".to_owned(),
},
TextDocumentContentChangeEvent {
range: Some(Range {
start: Position {
line: 4,
character: 0,
},
end: Position {
line: 4,
character: 0,
},
}),
range_length: None,
text: " ".to_owned(),
},
TextDocumentContentChangeEvent {
range: Some(Range {
start: Position {
line: 4,
character: 2,
},
end: Position {
line: 4,
character: 2,
},
}),
range_length: None,
text: "a".to_owned(),
},
],
};
server.did_change(change_params);
server.srcs.wait_parse_ready(fid, true);
let completion_params = CompletionParams {
text_document_position: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri },
position: Position {
line: 4,
character: 3,
},
},
work_done_progress_params: WorkDoneProgressParams::default(),
partial_result_params: PartialResultParams::default(),
context: Some(CompletionContext {
trigger_kind: CompletionTriggerKind::INVOKED,
trigger_character: None,
}),
};
let response: CompletionResponse = server.completion(completion_params).unwrap();
let item1 = CompletionItem {
label: "abc".to_owned(),
kind: Some(CompletionItemKind::VARIABLE),
detail: Some("logic".to_string()),
..CompletionItem::default()
};
let item3 = CompletionItem {
label: "aouter".to_owned(),
kind: Some(CompletionItemKind::VARIABLE),
detail: Some("logic".to_string()),
..CompletionItem::default()
};
if let CompletionResponse::List(item) = response {
eprintln!("{:#?}", item);
assert!(item.items.contains(&item1));
for comp in &item.items {
assert!(comp.label != "abcd");
}
assert!(item.items.contains(&item3));
} else {
panic!();
}
}
#[test]
fn test_dot_completion() {
test_init();
let server = LSPServer::new(None);
let uri = Url::parse("file:///test.sv").unwrap();
let text = r#"interface test_inter;
wire abcd;
endinterface
module test(
test_inter abc
);
abc.
test_inter.
endmodule
"#;
let open_params = DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "systemverilog".to_owned(),
version: 0,
text: text.to_owned(),
},
};
server.did_open(open_params);
let fid = server.srcs.get_id(&uri);
server.srcs.wait_parse_ready(fid, true);
let file = server.srcs.get_file(fid).unwrap();
let file = file.read().unwrap();
eprintln!("{}", file.syntax_tree.as_ref().unwrap());
eprintln!(
"{:#?}",
server.srcs.scope_tree.read().unwrap().as_ref().unwrap()
);
let completion_params = CompletionParams {
text_document_position: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri: uri.clone() },
position: Position {
line: 6,
character: 8,
},
},
work_done_progress_params: WorkDoneProgressParams::default(),
partial_result_params: PartialResultParams::default(),
context: Some(CompletionContext {
trigger_kind: CompletionTriggerKind::TRIGGER_CHARACTER,
trigger_character: Some(".".to_string()),
}),
};
let response: CompletionResponse = server.completion(completion_params).unwrap();
dbg!(&response);
let item1 = CompletionItem {
label: "abcd".to_owned(),
kind: Some(CompletionItemKind::VARIABLE),
detail: Some("wire".to_string()),
..CompletionItem::default()
};
if let CompletionResponse::List(item) = response {
eprintln!("{:#?}", item);
assert!(item.items.contains(&item1));
assert!(item.items.len() == 1);
} else {
panic!();
}
let completion_params = CompletionParams {
text_document_position: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri },
position: Position {
line: 7,
character: 14,
},
},
work_done_progress_params: WorkDoneProgressParams::default(),
partial_result_params: PartialResultParams::default(),
context: Some(CompletionContext {
trigger_kind: CompletionTriggerKind::TRIGGER_CHARACTER,
trigger_character: Some(".".to_string()),
}),
};
let response: CompletionResponse = server.completion(completion_params).unwrap();
if let CompletionResponse::List(item) = response {
eprintln!("{:#?}", item);
assert!(item.items.contains(&item1));
assert!(item.items.len() == 1);
} else {
panic!();
}
}
#[test]
fn test_trigger_dot_nocontext() {
test_init();
let server = LSPServer::new(None);
let uri = Url::parse("file:///test.sv").unwrap();
let text = r#"interface test_inter;
wire abcd;
endinterface
module test(
test_inter abc
);
abc.
test_inter.
endmodule
"#;
let open_params = DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "systemverilog".to_owned(),
version: 0,
text: text.to_owned(),
},
};
server.did_open(open_params);
let fid = server.srcs.get_id(&uri);
server.srcs.wait_parse_ready(fid, true);
let file = server.srcs.get_file(fid).unwrap();
let file = file.read().unwrap();
eprintln!("{}", file.syntax_tree.as_ref().unwrap());
eprintln!(
"{:#?}",
server.srcs.scope_tree.read().unwrap().as_ref().unwrap()
);
let completion_params = CompletionParams {
text_document_position: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri: uri.clone() },
position: Position {
line: 6,
character: 8,
},
},
work_done_progress_params: WorkDoneProgressParams::default(),
partial_result_params: PartialResultParams::default(),
context: None,
};
let response: CompletionResponse = server.completion(completion_params).unwrap();
dbg!(&response);
let item1 = CompletionItem {
label: "abcd".to_owned(),
kind: Some(CompletionItemKind::VARIABLE),
detail: Some("wire".to_string()),
..CompletionItem::default()
};
if let CompletionResponse::List(item) = response {
eprintln!("{:#?}", item);
assert!(item.items.contains(&item1));
assert!(item.items.len() == 1);
} else {
panic!();
}
let completion_params = CompletionParams {
text_document_position: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri },
position: Position {
line: 7,
character: 14,
},
},
work_done_progress_params: WorkDoneProgressParams::default(),
partial_result_params: PartialResultParams::default(),
context: Some(CompletionContext {
trigger_kind: CompletionTriggerKind::TRIGGER_CHARACTER,
trigger_character: Some(".".to_string()),
}),
};
let response: CompletionResponse = server.completion(completion_params).unwrap();
if let CompletionResponse::List(item) = response {
eprintln!("{:#?}", item);
assert!(item.items.contains(&item1));
assert!(item.items.len() == 1);
} else {
panic!();
}
}
#[test]
fn test_dot_completion_instantiation() {
test_init();
let text = r#"interface test_inter;
wire wrong;
logic clk;
endinterface
module test;
logic clk;
test_inter2 t (
.clk(clk),
.
)
endmodule
interface test_inter2;
wire abcd;
logic clk;
endinterface
"#;
let doc = Rope::from_str(&text);
let url = Url::parse("file:///test.sv").unwrap();
let syntax_tree = parse(&doc, &url, &None, &Vec::new()).unwrap();
let scope_tree = get_scopes(&syntax_tree, &url).unwrap();
let pos = Position::new(8, 9);
let token = get_completion_token(&doc, doc.line(pos.line as usize), pos);
let completions = scope_tree.get_dot_completion(
token.trim_end_matches('.'),
doc.pos_to_byte(&pos),
&url,
&scope_tree,
);
let labels: Vec<String> = completions.iter().map(|x| x.label.clone()).collect();
assert_eq!(labels, vec!["abcd", "clk"]);
}
/*
#[test]
fn test_package_completion() {
test_init();
let text = r#"package p;
struct {int x;} s1;
struct {int x;} s2;
function void f();
int x;
endfunction
endpackage
module m;
import p::*;
if (1) begin : s1
initial begin
s1.x = 1;
f.x = 1;
end
int x;
end
endmodule
"#;
let doc = Rope::from_str(&text);
let url = Url::parse("file:///test.sv").unwrap();
let syntax_tree = parse(&doc, &url, &None, &Vec::new()).unwrap();
let scope_tree = get_scopes(&syntax_tree, &url).unwrap();
dbg!(&scope_tree);
/*
let pos = Position::new(8, 9);
let token = get_completion_token(&doc, doc.line(pos.line as usize), pos);
let completions = scope_tree.get_dot_completion(
token.trim_end_matches('.'),
doc.pos_to_byte(&pos),
&url,
&scope_tree,
);
let labels: Vec<String> = completions.iter().map(|x| x.label.clone()).collect();
assert_eq!(labels, vec!["abcd", "clk"]);
*/
panic!();
}
*/
#[test]
fn test_inter_file_completion() {
test_init();
let server = LSPServer::new(None);
let uri = Url::parse("file:///test.sv").unwrap();
let uri2 = Url::parse("file:///test2.sv").unwrap();
let text = r#"module test;
s
endmodule
"#;
let text2 = r#"interface simple_bus;
logic clk;
endinterface"#;
let open_params = DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "systemverilog".to_owned(),
version: 0,
text: text.to_owned(),
},
};
let open_params2 = DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri2.clone(),
language_id: "systemverilog".to_owned(),
version: 0,
text: text2.to_owned(),
},
};
server.did_open(open_params);
server.did_open(open_params2);
let fid = server.srcs.get_id(&uri);
let fid2 = server.srcs.get_id(&uri2);
server.srcs.wait_parse_ready(fid, true);
server.srcs.wait_parse_ready(fid2, true);
let completion_params = CompletionParams {
text_document_position: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri },
position: Position {
line: 1,
character: 5,
},
},
work_done_progress_params: WorkDoneProgressParams::default(),
partial_result_params: PartialResultParams::default(),
context: Some(CompletionContext {
trigger_kind: CompletionTriggerKind::INVOKED,
trigger_character: None,
}),
};
let response: CompletionResponse = server.completion(completion_params).unwrap();
let scope_tree = server.srcs.scope_tree.read().unwrap();
dbg!(scope_tree.as_ref().unwrap());
if let CompletionResponse::List(item) = response {
// eprintln!("{:#?}", item);
let names: Vec<&String> = item.items.iter().map(|x| &x.label).collect();
assert!(names.contains(&&"simple_bus".to_string()));
} else {
panic!();
}
}
}

466
src/completion/keyword.rs Normal file
View File

@ -0,0 +1,466 @@
use tower_lsp::lsp_types::*;
pub fn keyword_completions(keywords: &[(&str, &str)]) -> Vec<CompletionItem> {
let mut items: Vec<CompletionItem> = Vec::new();
for key in keywords {
if key.1.is_empty() {
items.push(CompletionItem {
label: key.0.to_string(),
kind: Some(CompletionItemKind::KEYWORD),
..CompletionItem::default()
});
} else {
items.push(CompletionItem {
label: key.0.to_string(),
kind: Some(CompletionItemKind::KEYWORD),
insert_text: Some(key.1.to_string()),
insert_text_format: Some(InsertTextFormat::SNIPPET),
..CompletionItem::default()
})
}
}
items
}
pub fn other_completions(tasks: &[&str]) -> Vec<CompletionItem> {
tasks
.iter()
.map(|x| CompletionItem {
label: x.to_string(),
kind: Some(CompletionItemKind::FUNCTION),
..CompletionItem::default()
})
.collect()
}
pub const KEYWORDS: &[(&str, &str)] = &[
("accept_on", ""),
("alias", ""),
("always", "always @($1) begin\nend"),
("always_comb", "always_comb begin\n\t$1\nend"),
("always_ff", "always_ff @($1) begin\nend"),
("always_latch", "always_latch begin\n\t$1\nend"),
("and", ""),
("assert", ""),
("assign", ""),
("assume", ""),
("automatic", ""),
("before", ""),
("begin", "begin\n\t$1\nend"),
("bind", ""),
("bins", ""),
("binsof", ""),
("bit", ""),
("break", ""),
("buf", ""),
("bufif0", ""),
("bufif1", ""),
("byte", ""),
("case", "case $1;\nendcase"),
("casex", "casex $1;\nendcase"),
("casez", "casez $1;\nendcase"),
("cell", ""),
("chandle", ""),
("checker", "checker $1;\nendchecker"),
("class", "class $1;\nendclass"),
("clocking", "clocking $1;\nendclocking"),
("cmos", ""),
("config", "config $1;\nendconfig"),
("const", ""),
("constraint", ""),
("context", ""),
("continue", ""),
("cover", ""),
("covergroup", ""),
("coverpoint", ""),
("cross", ""),
("deassign", ""),
("default", ""),
("defparam", ""),
("design", ""),
("disable", ""),
("dist", ""),
("do", ""),
("edge", ""),
("else", ""),
("end", ""),
("endcase", ""),
("endchecker", ""),
("endclass", ""),
("endclocking", ""),
("endconfig", ""),
("endfunction", ""),
("endgenerate", ""),
("endgroup", ""),
("endinterface", ""),
("endmodule", ""),
("endpackage", ""),
("endprimitive", ""),
("endprogram", ""),
("endproperty", ""),
("endspecify", ""),
("endsequence", ""),
("endtable", ""),
("endtask", ""),
("enum", ""),
("event", ""),
("eventually", ""),
("expect", ""),
("export", ""),
("extends", ""),
("extern", ""),
("final", ""),
("first_match", ""),
("for", ""),
("force", ""),
("foreach", ""),
("forever", ""),
("fork", ""),
("forkjoin", ""),
("function", "function $1;\nendfunction"),
("generate", "generate\n\t$1\nendgenerate"),
("genvar", ""),
("global", ""),
("highz0", ""),
("highz1", ""),
("if", ""),
("iff", ""),
("ifnone", ""),
("ignore_bins", ""),
("illegal_bins", ""),
("implements", ""),
("implies", ""),
("import", ""),
("incdir", ""),
("include", ""),
("initial", ""),
("inout", ""),
("input", ""),
("inside", ""),
("instance", ""),
("int", ""),
("integer", ""),
("interconnect", ""),
("interface", "interface $1;\nendinterface"),
("intersect", ""),
("join", ""),
("join_any", ""),
("join_none", ""),
("large", ""),
("let", ""),
("liblist", ""),
("library", ""),
("local", ""),
("localparam", ""),
("logic", ""),
("longint", ""),
("macromodule", ""),
("matches", ""),
("medium", ""),
("modport", ""),
("module", "module $1 ($2);\nendmodule"),
("nand", ""),
("negedge", ""),
("nettype", ""),
("new", ""),
("nexttime", ""),
("nmos", ""),
("nor", ""),
("noshowcancelled", ""),
("not", ""),
("notif0", ""),
("notif1", ""),
("null", ""),
("or", ""),
("output", ""),
("package", "package $1;\nendpackage"),
("packed", ""),
("parameter", ""),
("pmos", ""),
("posedge", ""),
("primitive", "primitive $1;\nendprimitive"),
("priority", ""),
("program", "program $1;\nendprogram"),
("property", "property $1;\nendproperty"),
("protected", ""),
("pull0", ""),
("pull1", ""),
("pulldown", ""),
("pullup", ""),
("pulsestyle_ondetect", ""),
("pulsestyle_onevent", ""),
("pure", ""),
("rand", ""),
("randc", ""),
("randcase", ""),
("randsequence", ""),
("rcmos", ""),
("real", ""),
("realtime", ""),
("ref", ""),
("reg", ""),
("reject_on", ""),
("release", ""),
("repeat", ""),
("restrict", ""),
("return", ""),
("rnmos", ""),
("rpmos", ""),
("rtran", ""),
("rtranif0", ""),
("rtranif1", ""),
("s_always", ""),
("s_eventually", ""),
("s_nexttime", ""),
("s_until", ""),
("s_until_with", ""),
("scalared", ""),
("sequence", "sequence $1;\nendsequence"),
("shortint", ""),
("shortreal", ""),
("showcancelled", ""),
("signed", ""),
("small", ""),
("soft", ""),
("solve", ""),
("specify", "specify $1;\nendspecify"),
("specparam", ""),
("static", ""),
("string", ""),
("strong", ""),
("strong0", ""),
("strong1", ""),
("struct", ""),
("super", ""),
("supply0", ""),
("supply1", ""),
("sync_accept_on", ""),
("sync_reject_on", ""),
("table", "table $1;\nendtable"),
("tagged", ""),
("task", "task $1;\nendtask"),
("this", ""),
("throughout", ""),
("time", ""),
("timeprecision", ""),
("timeunit", ""),
("tran", ""),
("tranif0", ""),
("tranif1", ""),
("tri", ""),
("tri0", ""),
("tri1", ""),
("triand", ""),
("trior", ""),
("trireg", ""),
("type", ""),
("typedef", ""),
("union", ""),
("unique", ""),
("unique0", ""),
("unsigned", ""),
("until", ""),
("until_with", ""),
("untyped", ""),
("use", ""),
("uwire", ""),
("var", ""),
("vectored", ""),
("virtual", ""),
("void", ""),
("wait", ""),
("wait_order", ""),
("wand", ""),
("weak", ""),
("weak0", ""),
("weak1", ""),
("while", ""),
("wildcard", ""),
("wire", ""),
("with", ""),
("within", ""),
("wor", ""),
("xnor", ""),
("xor", ""),
];
pub const SYS_TASKS: &[&str] = &[
"finish",
"exit",
"fatal",
"warning",
"stop",
"error",
"info",
"realtime",
"time",
"asserton",
"assertkill",
"assertpasson",
"assertfailon",
"assertnonvacuouson",
"stime",
"printtimescale",
"timeformat",
"bitstoreal",
"bitstoshortreal",
"itor",
"signed",
"cast",
"realtobits",
"shortrealtobits",
"rtoi",
"unsigned",
"sampled",
"fell",
"changed",
"past_gclk",
"fell_gclk",
"changed_gclk",
"rising_gclk",
"steady_gclk",
"bits",
"typename",
"isunbounded",
"coverage_control",
"coverage_get",
"coverage_save",
"set_coverage_db_name",
"dimensions",
"right",
"high",
"size",
"random",
"dist_erlang",
"dist_normal",
"dist_t",
"asin",
"acos",
"atan",
"atan2",
"hypot",
"sinh",
"cosh",
"tanh",
"asinh",
"acosh",
"atanh",
"q_initialize",
"q_remove",
"q_exam",
"q_add",
"q_full",
"async$and$array",
"async$nand$array",
"async$or$array",
"async$nor$array",
"sync$and$array",
"sync$nand$array",
"sync$or$array",
"sync$nor$array",
"countones",
"onehot0",
"fatal",
"warning",
"dist_chi_square",
"dist_exponential",
"dist_poisson",
"dist_uniform",
"countbits",
"onehot",
"isunknown",
"coverage_get_max",
"coverage_merge",
"get_coverage",
"load_coverage_db",
"clog2",
"ln",
"log10",
"exp",
"sqrt",
"pow",
"floor",
"ceil",
"sin",
"cos",
"tan",
"rose",
"stable",
"past",
"rose_gclk",
"stable_gclk",
"future_gclk",
"falling_gclk",
"changing_gclk",
"unpacked_dimensions",
"left",
"low",
"increment",
"assertoff",
"assertcontrol",
"assertpassoff",
"assertfailoff",
"assertvacuousoff",
"error",
"info",
"async$and$plane",
"async$nand$plane",
"async$or$plane",
"async$nor$plane",
"sync$and$plane",
"sync$nand$plane",
"sync$or$plane",
"sync$nor$plane",
"system",
"countdrivers",
"getpattern",
"incsave",
"input",
"key",
"list",
"log",
"nokey",
"nolog",
"reset",
"reset_count",
"reset_value",
"restart",
"save",
"scale",
"scope",
"showscopes",
"showvars",
"sreadmemb",
"sreadmemh",
];
pub const DIRECTIVES: &[&str] = &[
"__FILE__",
"__LINE__",
"begin_keywords",
"celldefine",
"default_nettype",
"define",
"else",
"elsif",
"end_keywords",
"endcelldefine",
"endif",
"ifdef",
"ifndef",
"include",
"line",
"nounconnected_drive",
"pragma",
"resetall",
"timescale",
"unconnected_drive",
"undef",
"undefineall",
"default_decay_time",
"default_trireg_strength",
"delay_mode_distributed",
"delay_mode_path",
"delay_mode_unit",
"delay_mode_zero",
];

499
src/definition.rs Normal file
View File

@ -0,0 +1,499 @@
use crate::definition::extract_defs::get_ident;
use crate::server::LSPServer;
use crate::sources::LSPSupport;
use log::{debug, trace};
use ropey::{Rope, RopeSlice};
use sv_parser::*;
use tower_lsp::lsp_types::*;
pub mod def_types;
pub use def_types::*;
mod extract_defs;
use extract_defs::*;
impl LSPServer {
pub fn goto_definition(&self, params: GotoDefinitionParams) -> Option<GotoDefinitionResponse> {
let doc = params.text_document_position_params.text_document.uri;
let pos = params.text_document_position_params.position;
let file_id = self.srcs.get_id(&doc).to_owned();
self.srcs.wait_parse_ready(file_id, false);
let file = self.srcs.get_file(file_id)?;
let file = file.read().ok()?;
let token = get_definition_token(file.text.line(pos.line as usize), pos);
debug!("goto definition, token: {}", &token);
let scope_tree = self.srcs.scope_tree.read().ok()?;
trace!("{:#?}", scope_tree.as_ref()?);
let def = scope_tree
.as_ref()?
.get_definition(&token, file.text.pos_to_byte(&pos), &doc)?;
let def_pos = file.text.byte_to_pos(def.byte_idx());
debug!("def: {:?}", def_pos);
Some(GotoDefinitionResponse::Scalar(Location::new(
def.url(),
Range::new(def_pos, def_pos),
)))
}
pub fn hover(&self, params: HoverParams) -> Option<Hover> {
let doc = params.text_document_position_params.text_document.uri;
let pos = params.text_document_position_params.position;
let file_id = self.srcs.get_id(&doc).to_owned();
self.srcs.wait_parse_ready(file_id, false);
let file = self.srcs.get_file(file_id)?;
let file = file.read().ok()?;
let token = get_definition_token(file.text.line(pos.line as usize), pos);
debug!("hover, token: {}", &token);
let scope_tree = self.srcs.scope_tree.read().ok()?;
let def = scope_tree
.as_ref()?
.get_definition(&token, file.text.pos_to_byte(&pos), &doc)?;
let def_line = file.text.byte_to_line(def.byte_idx());
Some(Hover {
contents: HoverContents::Scalar(MarkedString::LanguageString(LanguageString {
language: "systemverilog".to_owned(),
value: get_hover(&file.text, def_line),
})),
range: None,
})
}
pub fn document_symbol(&self, params: DocumentSymbolParams) -> Option<DocumentSymbolResponse> {
let uri = params.text_document.uri;
let file_id = self.srcs.get_id(&uri).to_owned();
self.srcs.wait_parse_ready(file_id, false);
let file = self.srcs.get_file(file_id)?;
let file = file.read().ok()?;
let scope_tree = self.srcs.scope_tree.read().ok()?;
Some(DocumentSymbolResponse::Nested(
scope_tree.as_ref()?.document_symbols(&uri, &file.text),
))
}
pub fn document_highlight(
&self,
params: DocumentHighlightParams,
) -> Option<Vec<DocumentHighlight>> {
let uri = params.text_document_position_params.text_document.uri;
let pos = params.text_document_position_params.position;
let file_id = self.srcs.get_id(&uri).to_owned();
self.srcs.wait_parse_ready(file_id, false);
let file = self.srcs.get_file(file_id)?;
let file = file.read().ok()?;
let token = get_definition_token(file.text.line(pos.line as usize), pos);
let scope_tree = self.srcs.scope_tree.read().ok()?;
// use the byte_idx of the definition if possible, otherwise use the cursor
let byte_idx =
match scope_tree
.as_ref()?
.get_definition(&token, file.text.pos_to_byte(&pos), &uri)
{
Some(def) => def.byte_idx,
None => file.text.pos_to_byte(&pos),
};
let syntax_tree = file.syntax_tree.as_ref()?;
let references = all_identifiers(syntax_tree, &token);
Some(
scope_tree
.as_ref()?
.document_highlights(&uri, &file.text, references, byte_idx),
)
}
}
/// return all identifiers in a syntax tree matching a given token
fn all_identifiers(syntax_tree: &SyntaxTree, token: &str) -> Vec<(String, usize)> {
let mut idents: Vec<(String, usize)> = Vec::new();
for node in syntax_tree {
if let RefNode::Identifier(_) = node {
let (ident, byte_idx) = get_ident(syntax_tree, node);
if ident == token {
idents.push((ident, byte_idx));
}
}
}
idents
}
/// retrieve the token the user invoked goto definition or hover on
fn get_definition_token(line: RopeSlice, pos: Position) -> String {
let mut token = String::new();
let mut line_iter = line.chars();
for _ in 0..(line.utf16_cu_to_char(pos.character as usize)) {
line_iter.next();
}
let mut c = line_iter.prev();
while c.is_some() && (c.unwrap().is_alphanumeric() || c.unwrap() == '_') {
token.push(c.unwrap());
c = line_iter.prev();
}
token = token.chars().rev().collect();
line_iter = line.chars();
for _ in 0..(line.utf16_cu_to_char(pos.character as usize)) {
line_iter.next();
}
let mut c = line_iter.next();
while c.is_some() && (c.unwrap().is_alphanumeric() || c.unwrap() == '_') {
token.push(c.unwrap());
c = line_iter.next();
}
token
}
type ScopesAndDefs = Option<(Vec<Box<dyn Scope>>, Vec<Box<dyn Definition>>)>;
/// Take a given syntax node from a sv-parser syntax tree and extract out the definition/scope at
/// that point.
pub fn match_definitions(
syntax_tree: &SyntaxTree,
event_iter: &mut EventIter,
node: RefNode,
url: &Url,
) -> ScopesAndDefs {
let mut definitions: Vec<Box<dyn Definition>> = Vec::new();
let mut scopes: Vec<Box<dyn Scope>> = Vec::new();
match node {
RefNode::ModuleDeclaration(n) => {
let module = module_dec(syntax_tree, n, event_iter, url);
if module.is_some() {
scopes.push(Box::new(module?));
}
}
RefNode::InterfaceDeclaration(n) => {
let interface = interface_dec(syntax_tree, n, event_iter, url);
if interface.is_some() {
scopes.push(Box::new(interface?));
}
}
RefNode::UdpDeclaration(n) => {
let dec = udp_dec(syntax_tree, n, event_iter, url);
if dec.is_some() {
scopes.push(Box::new(dec?));
}
}
RefNode::ProgramDeclaration(n) => {
let dec = program_dec(syntax_tree, n, event_iter, url);
if dec.is_some() {
scopes.push(Box::new(dec?));
}
}
RefNode::PackageDeclaration(n) => {
let dec = package_dec(syntax_tree, n, event_iter, url);
if dec.is_some() {
scopes.push(Box::new(dec?));
}
}
RefNode::ConfigDeclaration(n) => {
let dec = config_dec(syntax_tree, n, event_iter, url);
if dec.is_some() {
scopes.push(Box::new(dec?));
}
}
RefNode::ClassDeclaration(n) => {
let dec = class_dec(syntax_tree, n, event_iter, url);
if dec.is_some() {
scopes.push(Box::new(dec?));
}
}
RefNode::PortDeclaration(n) => {
let ports = port_dec_non_ansi(syntax_tree, n, event_iter, url);
if ports.is_some() {
for port in ports? {
definitions.push(Box::new(port));
}
}
}
RefNode::NetDeclaration(n) => {
let nets = net_dec(syntax_tree, n, event_iter, url);
if nets.is_some() {
for net in nets? {
definitions.push(Box::new(net));
}
}
}
RefNode::DataDeclaration(n) => {
let vars = data_dec(syntax_tree, n, event_iter, url);
if let Some(vars) = vars {
for var in vars {
match var {
Declaration::Dec(dec) => definitions.push(Box::new(dec)),
Declaration::Import(dec) => definitions.push(Box::new(dec)),
Declaration::Scope(scope) => scopes.push(Box::new(scope)),
}
}
}
}
RefNode::ParameterDeclaration(n) => {
let vars = param_dec(syntax_tree, n, event_iter, url);
if vars.is_some() {
for var in vars? {
definitions.push(Box::new(var));
}
}
}
RefNode::LocalParameterDeclaration(n) => {
let vars = localparam_dec(syntax_tree, n, event_iter, url);
if vars.is_some() {
for var in vars? {
definitions.push(Box::new(var));
}
}
}
RefNode::FunctionDeclaration(n) => {
let dec = function_dec(syntax_tree, n, event_iter, url);
if dec.is_some() {
scopes.push(Box::new(dec?));
}
}
RefNode::TaskDeclaration(n) => {
let dec = task_dec(syntax_tree, n, event_iter, url);
if dec.is_some() {
scopes.push(Box::new(dec?));
}
}
RefNode::ModportDeclaration(n) => {
let decs = modport_dec(syntax_tree, n, event_iter, url);
if decs.is_some() {
for dec in decs? {
definitions.push(Box::new(dec));
}
}
}
RefNode::ModuleInstantiation(n) => {
let decs = module_inst(syntax_tree, n, event_iter, url);
if decs.is_some() {
for dec in decs? {
definitions.push(Box::new(dec));
}
}
}
RefNode::TextMacroDefinition(n) => {
let dec = text_macro_def(syntax_tree, n, event_iter, url);
if dec.is_some() {
definitions.push(Box::new(dec?));
}
}
_ => (),
}
Some((scopes, definitions))
}
/// convert the syntax tree to a scope tree
/// the root node is the global scope
pub fn get_scopes(syntax_tree: &SyntaxTree, url: &Url) -> Option<GenericScope> {
trace!("{}", syntax_tree);
let mut scopes: Vec<Box<dyn Scope>> = Vec::new();
let mut global_scope: GenericScope = GenericScope::new(url);
global_scope.ident = "global".to_string();
let mut event_iter = syntax_tree.into_iter().event();
// iterate over each enter event and extract out any scopes or definitions
// match_definitions is recursively called so we get a tree in the end
while let Some(event) = event_iter.next() {
match event {
NodeEvent::Enter(node) => {
let mut result = match_definitions(syntax_tree, &mut event_iter, node, url)?;
global_scope.defs.append(&mut result.1);
scopes.append(&mut result.0);
}
NodeEvent::Leave(_) => (),
}
}
global_scope.scopes.append(&mut scopes);
Some(global_scope)
}
/// get the hover information
fn get_hover(doc: &Rope, line: usize) -> String {
if line == 0 {
return doc.line(line).to_string();
}
let mut hover: Vec<String> = Vec::new();
let mut multiline: bool = false;
let mut valid: bool = true;
let mut current: String = doc.line(line).to_string();
let ltrim: String = " ".repeat(current.len() - current.trim_start().len());
let mut line_idx = line;
// iterate upwards from the definition, and grab the comments
while valid {
hover.push(current.clone());
line_idx -= 1;
valid = false;
if line_idx > 0 {
current = doc.line(line_idx).to_string();
let currentl = current.clone().trim_start().to_owned();
let currentr = current.clone().trim_end().to_owned();
if currentl.starts_with("/*") && currentr.ends_with("*/") {
valid = true;
} else if currentr.ends_with("*/") {
multiline = true;
valid = true;
} else if currentl.starts_with("/*") {
multiline = false;
valid = true;
} else {
valid = currentl.starts_with("//") || multiline;
}
}
}
hover.reverse();
let mut result: Vec<String> = Vec::new();
for i in hover {
if let Some(stripped) = i.strip_prefix(&ltrim) {
result.push(stripped.to_owned());
} else {
result.push(i);
}
}
result.join("").trim_end().to_owned()
}
#[cfg(test)]
mod tests {
use super::*;
use crate::sources::{parse, LSPSupport};
use crate::support::test_init;
use ropey::Rope;
use std::fs::read_to_string;
use std::path::PathBuf;
#[test]
fn test_definition_token() {
test_init();
let line = Rope::from_str("assign ab_c[2:0] = 3'b000;");
let token = get_definition_token(line.line(0), Position::new(0, 10));
assert_eq!(token, "ab_c".to_owned());
}
#[test]
fn test_get_definition() {
test_init();
let mut d = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
d.push("test_data/definition_test.sv");
let text = read_to_string(d).unwrap();
let doc = Rope::from_str(&text);
let url = Url::parse("file:///test_data/definition_test.sv").unwrap();
let syntax_tree = parse(&doc, &url, &None, &Vec::new()).unwrap();
trace!("{}", &syntax_tree);
let scope_tree = get_scopes(&syntax_tree, &url).unwrap();
trace!("{:#?}", &scope_tree);
for def in &scope_tree.defs {
trace!("{:?} {:?}", def, doc.byte_to_pos(def.byte_idx()));
}
let token = get_definition_token(doc.line(3), Position::new(3, 13));
for def in scope_tree.defs {
if token == def.ident() {
assert_eq!(doc.byte_to_pos(def.byte_idx()), Position::new(3, 9))
}
}
}
#[test]
fn test_hover() {
test_init();
let text = r#"
// module test
// test module
module test;
/* a */
logic a;
/**
* b
*/
logic b;
endmodule"#;
let doc = Rope::from_str(text);
eprintln!("{}", get_hover(&doc, 2));
assert_eq!(
get_hover(&doc, 3),
r#"// module test
// test module
module test;"#
.to_owned()
);
assert_eq!(
get_hover(&doc, 9),
r#"/**
* b
*/
logic b;"#
.to_owned()
);
}
#[test]
fn test_symbols() {
test_init();
let text = r#"
module test;
logic a;
logic b;
endmodule"#;
let doc = Rope::from_str(&text);
let url = Url::parse("file:///test.sv").unwrap();
let syntax_tree = parse(&doc, &url, &None, &Vec::new()).unwrap();
let scope_tree = get_scopes(&syntax_tree, &url).unwrap();
let symbol = scope_tree.document_symbols(&url, &doc);
let symbol = symbol.get(0).unwrap();
assert_eq!(&symbol.name, "test");
let names: Vec<String> = symbol
.children
.as_ref()
.unwrap()
.iter()
.map(|x| x.name.clone())
.collect();
assert!(names.contains(&"a".to_string()));
assert!(names.contains(&"b".to_string()));
}
#[test]
fn test_highlight() {
test_init();
let text = r#"
module test;
logic clk;
assign clk = 1'b1;
endmodule"#;
let doc = Rope::from_str(&text);
let url = Url::parse("file:///test.sv").unwrap();
let syntax_tree = parse(&doc, &url, &None, &Vec::new()).unwrap();
let scope_tree = get_scopes(&syntax_tree, &url).unwrap();
let references = all_identifiers(&syntax_tree, "clk");
let highlights = scope_tree.document_highlights(
&url,
&doc,
references,
doc.pos_to_byte(&Position::new(2, 8)),
);
let expected = vec![
DocumentHighlight {
range: Range {
start: Position {
line: 2,
character: 8,
},
end: Position {
line: 2,
character: 11,
},
},
kind: None,
},
DocumentHighlight {
range: Range {
start: Position {
line: 3,
character: 9,
},
end: Position {
line: 3,
character: 12,
},
},
kind: None,
},
];
assert_eq!(highlights, expected)
}
}

965
src/definition/def_types.rs Normal file
View File

@ -0,0 +1,965 @@
use crate::sources::LSPSupport;
use log::trace;
use ropey::Rope;
use tower_lsp::lsp_types::*;
/// cleanup the text of a definition so it can be included in completions
pub fn clean_type_str(type_str: &str, ident: &str) -> String {
let endings: &[_] = &[';', ','];
// remove anything after an equals sign
let eq_offset = type_str.find('=').unwrap_or(type_str.len());
let mut result = type_str.to_string();
result.replace_range(eq_offset.., "");
result
.trim_start()
.trim_end()
.trim_end_matches(endings)
.trim_end_matches(ident)
.split_whitespace()
.collect::<Vec<&str>>()
.join(" ")
.replace("[ ", "[")
.replace(" ]", "]")
.replace(" : ", ":")
}
pub fn copy_defs(defs: &[Box<dyn Definition>]) -> Vec<Box<dyn Definition>> {
let mut decs: Vec<Box<dyn Definition>> = Vec::new();
for def in defs {
decs.push(Box::new(GenericDec {
ident: def.ident(),
byte_idx: def.byte_idx(),
url: def.url(),
type_str: def.type_str(),
completion_kind: def.completion_kind(),
symbol_kind: def.symbol_kind(),
def_type: def.def_type(),
}))
}
decs
}
pub fn copy_scopes(scopes: &[Box<dyn Scope>]) -> Vec<Box<dyn Scope>> {
let mut scope_decs: Vec<Box<dyn Scope>> = Vec::new();
for scope in scopes {
let mut scope_copy = GenericScope {
ident: scope.ident(),
byte_idx: scope.byte_idx(),
start: scope.start(),
end: scope.end(),
url: scope.url(),
type_str: scope.type_str(),
completion_kind: scope.completion_kind(),
symbol_kind: scope.symbol_kind(),
def_type: scope.def_type(),
defs: Vec::new(),
scopes: Vec::new(),
};
scope_copy.defs.extend(copy_defs(scope.defs()));
scope_copy.scopes.extend(copy_scopes(scope.scopes()));
scope_decs.push(Box::new(scope_copy))
}
scope_decs
}
/// A definition of any SystemVerilog variable or construct
pub trait Definition: std::fmt::Debug + Sync + Send {
// identifier
fn ident(&self) -> String;
// byte index in file of definition
fn byte_idx(&self) -> usize;
// url pointing to the file the definition is in
fn url(&self) -> Url;
// cleaned up text of the definition
fn type_str(&self) -> String;
// the kind of this definition, for use in completions
fn completion_kind(&self) -> CompletionItemKind;
// the kind of this definition, for use in showing document symbols
// for some reason this kind is different than CompletionItemKind
fn symbol_kind(&self) -> SymbolKind;
// the kind of this definition, simplified for internal use
fn def_type(&self) -> DefinitionType;
// whether the definition identifier starts with the given token
fn starts_with(&self, token: &str) -> bool;
// constructs the completion for this definition
fn completion(&self) -> CompletionItem;
fn dot_completion(&self, scope_tree: &GenericScope) -> Vec<CompletionItem>;
}
pub trait Scope: std::fmt::Debug + Definition + Sync + Send {
// the start byte of this scope
fn start(&self) -> usize;
// the end byte of this scope
fn end(&self) -> usize;
// all the within this scope
fn defs(&self) -> &Vec<Box<dyn Definition>>;
// all the scopes within this scope, ex. task inside a module
fn scopes(&self) -> &Vec<Box<dyn Scope>>;
// the definition of this scope
fn definition(&self) -> GenericDec {
GenericDec {
ident: self.ident(),
byte_idx: self.byte_idx(),
url: self.url(),
type_str: self.type_str(),
completion_kind: self.completion_kind(),
symbol_kind: self.symbol_kind(),
def_type: DefinitionType::GenericScope,
}
}
/// return a completion from the scope tree, this function should be called on the global scope
fn get_completion(&self, token: &str, byte_idx: usize, url: &Url) -> Vec<CompletionItem> {
let mut completions: Vec<CompletionItem> = Vec::new();
// first we need to go down the scope tree, to the scope the user is invoking a completion
// in
for scope in self.scopes() {
if &scope.url() == url && scope.start() <= byte_idx && byte_idx <= scope.end() {
completions = scope.get_completion(token, byte_idx, url);
break;
}
}
// now that we are in the users scope, we can attempt to find a relevant completion
// we proceed back upwards through the scope tree, adding any definitions that match
// the users token
let completion_idents: Vec<String> = completions.iter().map(|x| x.label.clone()).collect();
for def in self.defs() {
if !completion_idents.contains(&def.ident()) && def.starts_with(token) {
completions.push(def.completion());
}
}
for scope in self.scopes() {
if scope.starts_with(token) {
completions.push(scope.completion());
}
}
completions
}
/// return a dot completion from the scope tree, this function should be called on the global
/// scope
fn get_dot_completion(
&self,
token: &str,
byte_idx: usize,
url: &Url,
scope_tree: &GenericScope,
) -> Vec<CompletionItem> {
trace!("dot entering: {}, token: {}", self.ident(), token);
trace!("{:?}", self.scopes());
// first we need to go down the scope tree, to the scope the user is invoking a completion
// in
for scope in self.scopes() {
trace!(
"{}, {}, {}, {}",
scope.ident(),
byte_idx,
scope.start(),
scope.end()
);
if &scope.url() == url && scope.start() <= byte_idx && byte_idx <= scope.end() {
eprintln!("checking dot completion: {}", scope.ident());
let result = scope.get_dot_completion(token, byte_idx, url, scope_tree);
if !result.is_empty() {
return result;
}
}
}
// now that we are in the users scope, we can attempt to find the relevant definition
// we proceed back upwards through the scope tree, and if a definition matches our token,
// we invoke dot completion on that definition and pass it the syntax tree
for def in self.defs() {
trace!("def: {:?}", def);
if def.starts_with(token) {
trace!("complete def: {:?}", def);
return def.dot_completion(scope_tree);
}
}
for scope in self.scopes() {
if scope.starts_with(token) {
trace!("found dot-completion scope: {}", scope.ident());
return scope.dot_completion(scope_tree);
}
}
Vec::new()
}
/// return a definition from the scope tree, this function should be called on the global
/// scope
fn get_definition(&self, token: &str, byte_idx: usize, url: &Url) -> Option<GenericDec> {
let mut definition: Option<GenericDec> = None;
for scope in self.scopes() {
if &scope.url() == url && scope.start() <= byte_idx && byte_idx <= scope.end() {
definition = scope.get_definition(token, byte_idx, url);
break;
}
}
if definition.is_none() {
for def in self.defs() {
if def.ident() == token {
return Some(GenericDec {
ident: def.ident(),
byte_idx: def.byte_idx(),
url: def.url(),
type_str: def.type_str(),
completion_kind: def.completion_kind(),
symbol_kind: def.symbol_kind(),
def_type: DefinitionType::Net,
});
}
}
for scope in self.scopes() {
if scope.ident() == token {
return Some(scope.definition());
}
}
}
definition
}
/// returns all symbols in a document
fn document_symbols(&self, uri: &Url, doc: &Rope) -> Vec<DocumentSymbol> {
let mut symbols: Vec<DocumentSymbol> = Vec::new();
for scope in self.scopes() {
if &scope.url() == uri {
#[allow(deprecated)]
symbols.push(DocumentSymbol {
name: scope.ident(),
detail: Some(scope.type_str()),
kind: scope.symbol_kind(),
deprecated: None,
range: Range::new(doc.byte_to_pos(scope.start()), doc.byte_to_pos(scope.end())),
selection_range: Range::new(
doc.byte_to_pos(scope.byte_idx()),
doc.byte_to_pos(scope.byte_idx() + scope.ident().len()),
),
children: Some(scope.document_symbols(uri, doc)),
tags: None,
})
}
}
for def in self.defs() {
#[allow(deprecated)]
symbols.push(DocumentSymbol {
name: def.ident(),
detail: Some(def.type_str()),
kind: def.symbol_kind(),
deprecated: None,
range: Range::new(
doc.byte_to_pos(def.byte_idx()),
doc.byte_to_pos(def.byte_idx() + def.ident().len()),
),
selection_range: Range::new(
doc.byte_to_pos(def.byte_idx()),
doc.byte_to_pos(def.byte_idx() + def.ident().len()),
),
children: None,
tags: None,
})
}
symbols
}
/// highlight all references of a symbol
fn document_highlights(
&self,
uri: &Url,
doc: &Rope,
// all references in the doc's syntax tree
references: Vec<(String, usize)>,
// byte_idx of symbol definition
byte_idx: usize,
) -> Vec<DocumentHighlight> {
// to find references we need to grab references from locations downward from the
// definition
for scope in self.scopes() {
if &scope.url() == uri && scope.start() <= byte_idx && byte_idx <= scope.end() {
return scope.document_highlights(uri, doc, references, byte_idx);
}
}
// we should now be in the scope of the definition, so we can grab all references
// in this scope. This also grabs references below this scope.
references
.iter()
.filter(|x| self.start() <= x.1 && x.1 <= self.end())
.map(|x| DocumentHighlight {
range: Range::new(doc.byte_to_pos(x.1), doc.byte_to_pos(x.1 + x.0.len())),
kind: None,
})
.collect()
}
}
#[derive(Debug, Clone, Copy)]
pub enum DefinitionType {
Port,
Net,
Data,
Modport,
Subroutine,
ModuleInstantiation,
GenericScope,
Class,
}
#[derive(Debug)]
pub struct PortDec {
pub ident: String,
pub byte_idx: usize,
pub url: Url,
pub type_str: String,
pub completion_kind: CompletionItemKind,
pub symbol_kind: SymbolKind,
pub def_type: DefinitionType,
pub interface: Option<String>,
pub modport: Option<String>,
}
impl PortDec {
pub fn new(url: &Url) -> Self {
Self {
ident: String::new(),
byte_idx: 0,
type_str: String::new(),
completion_kind: CompletionItemKind::PROPERTY,
symbol_kind: SymbolKind::PROPERTY,
def_type: DefinitionType::Port,
interface: None,
modport: None,
url: url.clone(),
}
}
}
impl Definition for PortDec {
fn ident(&self) -> String {
self.ident.clone()
}
fn byte_idx(&self) -> usize {
self.byte_idx
}
fn url(&self) -> Url {
self.url.clone()
}
fn type_str(&self) -> String {
self.type_str.clone()
}
fn completion_kind(&self) -> CompletionItemKind {
self.completion_kind
}
fn symbol_kind(&self) -> SymbolKind {
self.symbol_kind
}
fn def_type(&self) -> DefinitionType {
self.def_type
}
fn starts_with(&self, token: &str) -> bool {
self.ident.starts_with(token)
}
fn completion(&self) -> CompletionItem {
CompletionItem {
label: self.ident.clone(),
detail: Some(clean_type_str(&self.type_str, &self.ident)),
kind: Some(self.completion_kind),
..CompletionItem::default()
}
}
fn dot_completion(&self, scope_tree: &GenericScope) -> Vec<CompletionItem> {
for scope in &scope_tree.scopes {
if let Some(interface) = &self.interface {
if &scope.ident() == interface {
return match &self.modport {
Some(modport) => {
for def in scope.defs() {
if def.starts_with(modport) {
return def.dot_completion(scope_tree);
}
}
Vec::new()
}
None => scope
.defs()
.iter()
.filter(|x| !x.starts_with(&scope.ident()))
.map(|x| x.completion())
.collect(),
};
}
}
}
Vec::new()
}
}
#[derive(Debug)]
pub struct GenericDec {
pub ident: String,
pub byte_idx: usize,
pub url: Url,
pub type_str: String,
pub completion_kind: CompletionItemKind,
pub symbol_kind: SymbolKind,
pub def_type: DefinitionType,
}
impl GenericDec {
pub fn new(url: &Url) -> Self {
Self {
ident: String::new(),
byte_idx: 0,
url: url.clone(),
type_str: String::new(),
completion_kind: CompletionItemKind::VARIABLE,
// FIXME: check if this replacement is correct
symbol_kind: SymbolKind::NULL,
def_type: DefinitionType::Net,
}
}
}
impl Definition for GenericDec {
fn ident(&self) -> String {
self.ident.clone()
}
fn byte_idx(&self) -> usize {
self.byte_idx
}
fn url(&self) -> Url {
self.url.clone()
}
fn type_str(&self) -> String {
self.type_str.clone()
}
fn completion_kind(&self) -> CompletionItemKind {
self.completion_kind
}
fn symbol_kind(&self) -> SymbolKind {
self.symbol_kind
}
fn def_type(&self) -> DefinitionType {
self.def_type
}
fn starts_with(&self, token: &str) -> bool {
self.ident.starts_with(token)
}
fn completion(&self) -> CompletionItem {
CompletionItem {
label: self.ident.clone(),
detail: Some(clean_type_str(&self.type_str, &self.ident)),
kind: Some(self.completion_kind),
..CompletionItem::default()
}
}
fn dot_completion(&self, _: &GenericScope) -> Vec<CompletionItem> {
Vec::new()
}
}
#[derive(Debug)]
pub struct PackageImport {
pub ident: String,
pub byte_idx: usize,
pub url: Url,
pub type_str: String,
pub completion_kind: CompletionItemKind,
pub symbol_kind: SymbolKind,
pub def_type: DefinitionType,
pub asterisk: bool,
pub import_ident: Option<String>,
}
impl PackageImport {
pub fn new(url: &Url) -> Self {
Self {
ident: String::new(),
byte_idx: 0,
url: url.clone(),
type_str: String::new(),
completion_kind: CompletionItemKind::TEXT,
symbol_kind: SymbolKind::NAMESPACE,
def_type: DefinitionType::Data,
asterisk: false,
import_ident: None,
}
}
}
impl Definition for PackageImport {
fn ident(&self) -> String {
self.ident.clone()
}
fn byte_idx(&self) -> usize {
self.byte_idx
}
fn url(&self) -> Url {
self.url.clone()
}
fn type_str(&self) -> String {
self.type_str.clone()
}
fn completion_kind(&self) -> CompletionItemKind {
self.completion_kind
}
fn symbol_kind(&self) -> SymbolKind {
self.symbol_kind
}
fn def_type(&self) -> DefinitionType {
self.def_type
}
fn starts_with(&self, token: &str) -> bool {
self.ident.starts_with(token)
}
fn completion(&self) -> CompletionItem {
CompletionItem {
label: self.ident.clone(),
detail: Some(clean_type_str(&self.type_str, &self.ident.clone())),
kind: Some(self.completion_kind),
..CompletionItem::default()
}
}
fn dot_completion(&self, _: &GenericScope) -> Vec<CompletionItem> {
Vec::new()
}
}
#[derive(Debug)]
pub struct SubDec {
pub ident: String,
pub byte_idx: usize,
pub url: Url,
pub type_str: String,
pub completion_kind: CompletionItemKind,
pub symbol_kind: SymbolKind,
pub def_type: DefinitionType,
pub start: usize,
pub end: usize,
pub defs: Vec<Box<dyn Definition>>,
pub scopes: Vec<Box<dyn Scope>>,
}
impl SubDec {
pub fn new(url: &Url) -> Self {
Self {
ident: String::new(),
byte_idx: 0,
url: url.clone(),
type_str: String::new(),
completion_kind: CompletionItemKind::FUNCTION,
symbol_kind: SymbolKind::FUNCTION,
def_type: DefinitionType::Subroutine,
start: 0,
end: 0,
defs: Vec::new(),
scopes: Vec::new(),
}
}
}
impl Definition for SubDec {
fn ident(&self) -> String {
self.ident.clone()
}
fn byte_idx(&self) -> usize {
self.byte_idx
}
fn url(&self) -> Url {
self.url.clone()
}
fn type_str(&self) -> String {
self.type_str.clone()
}
fn completion_kind(&self) -> CompletionItemKind {
self.completion_kind
}
fn symbol_kind(&self) -> SymbolKind {
self.symbol_kind
}
fn def_type(&self) -> DefinitionType {
self.def_type
}
fn starts_with(&self, token: &str) -> bool {
self.ident.starts_with(token)
}
fn completion(&self) -> CompletionItem {
CompletionItem {
label: self.ident.clone(),
detail: Some(clean_type_str(&self.type_str, &self.ident)),
kind: Some(self.completion_kind),
..CompletionItem::default()
}
}
fn dot_completion(&self, _: &GenericScope) -> Vec<CompletionItem> {
Vec::new()
}
}
impl Scope for SubDec {
fn start(&self) -> usize {
self.start
}
fn end(&self) -> usize {
self.end
}
fn defs(&self) -> &Vec<Box<dyn Definition>> {
&self.defs
}
fn scopes(&self) -> &Vec<Box<dyn Scope>> {
&self.scopes
}
}
#[derive(Debug)]
pub struct ModportDec {
pub ident: String,
pub byte_idx: usize,
pub url: Url,
pub type_str: String,
pub completion_kind: CompletionItemKind,
pub symbol_kind: SymbolKind,
pub def_type: DefinitionType,
pub ports: Vec<Box<dyn Definition>>,
}
impl ModportDec {
pub fn new(url: &Url) -> Self {
Self {
ident: String::new(),
byte_idx: 0,
url: url.clone(),
type_str: String::new(),
completion_kind: CompletionItemKind::INTERFACE,
symbol_kind: SymbolKind::INTERFACE,
def_type: DefinitionType::Modport,
ports: Vec::new(),
}
}
}
impl Definition for ModportDec {
fn ident(&self) -> String {
self.ident.clone()
}
fn byte_idx(&self) -> usize {
self.byte_idx
}
fn url(&self) -> Url {
self.url.clone()
}
fn type_str(&self) -> String {
self.type_str.clone()
}
fn completion_kind(&self) -> CompletionItemKind {
self.completion_kind
}
fn symbol_kind(&self) -> SymbolKind {
self.symbol_kind
}
fn def_type(&self) -> DefinitionType {
self.def_type
}
fn starts_with(&self, token: &str) -> bool {
self.ident.starts_with(token)
}
fn completion(&self) -> CompletionItem {
CompletionItem {
label: self.ident.clone(),
detail: Some(clean_type_str(&self.type_str, &self.ident)),
kind: Some(self.completion_kind),
..CompletionItem::default()
}
}
fn dot_completion(&self, _: &GenericScope) -> Vec<CompletionItem> {
self.ports.iter().map(|x| x.completion()).collect()
}
}
#[derive(Debug)]
pub struct ModInst {
pub ident: String,
pub byte_idx: usize,
pub url: Url,
pub type_str: String,
pub completion_kind: CompletionItemKind,
pub symbol_kind: SymbolKind,
pub def_type: DefinitionType,
pub mod_ident: String,
}
impl ModInst {
pub fn new(url: &Url) -> Self {
Self {
ident: String::new(),
byte_idx: 0,
url: url.clone(),
type_str: String::new(),
completion_kind: CompletionItemKind::MODULE,
symbol_kind: SymbolKind::MODULE,
def_type: DefinitionType::ModuleInstantiation,
mod_ident: String::new(),
}
}
}
impl Definition for ModInst {
fn ident(&self) -> String {
self.ident.clone()
}
fn byte_idx(&self) -> usize {
self.byte_idx
}
fn url(&self) -> Url {
self.url.clone()
}
fn type_str(&self) -> String {
self.type_str.clone()
}
fn completion_kind(&self) -> CompletionItemKind {
self.completion_kind
}
fn symbol_kind(&self) -> SymbolKind {
self.symbol_kind
}
fn def_type(&self) -> DefinitionType {
self.def_type
}
fn starts_with(&self, token: &str) -> bool {
self.ident.starts_with(token)
}
fn completion(&self) -> CompletionItem {
CompletionItem {
label: self.ident.clone(),
detail: Some(clean_type_str(&self.type_str, &self.ident)),
kind: Some(self.completion_kind),
..CompletionItem::default()
}
}
fn dot_completion(&self, scope_tree: &GenericScope) -> Vec<CompletionItem> {
for scope in &scope_tree.scopes {
if scope.ident() == self.mod_ident {
return scope
.defs()
.iter()
.filter(|x| !x.starts_with(&scope.ident()))
.map(|x| x.completion())
.collect();
}
}
Vec::new()
}
}
#[derive(Debug)]
pub struct GenericScope {
pub ident: String,
pub byte_idx: usize,
pub start: usize,
pub end: usize,
pub url: Url,
pub type_str: String,
pub completion_kind: CompletionItemKind,
pub symbol_kind: SymbolKind,
pub def_type: DefinitionType,
pub defs: Vec<Box<dyn Definition>>,
pub scopes: Vec<Box<dyn Scope>>,
}
impl GenericScope {
pub fn new(url: &Url) -> Self {
Self {
ident: String::new(),
byte_idx: 0,
start: 0,
end: 0,
url: url.clone(),
type_str: String::new(),
completion_kind: CompletionItemKind::MODULE,
symbol_kind: SymbolKind::MODULE,
def_type: DefinitionType::GenericScope,
defs: Vec::new(),
scopes: Vec::new(),
}
}
#[cfg(test)]
pub fn contains_scope(&self, scope_ident: &str) -> bool {
for scope in &self.scopes {
if scope.starts_with(scope_ident) {
return true;
}
}
false
}
}
impl Definition for GenericScope {
fn ident(&self) -> String {
self.ident.clone()
}
fn byte_idx(&self) -> usize {
self.byte_idx
}
fn url(&self) -> Url {
self.url.clone()
}
fn type_str(&self) -> String {
self.type_str.clone()
}
fn completion_kind(&self) -> CompletionItemKind {
self.completion_kind
}
fn symbol_kind(&self) -> SymbolKind {
self.symbol_kind
}
fn def_type(&self) -> DefinitionType {
self.def_type
}
fn starts_with(&self, token: &str) -> bool {
self.ident.starts_with(token)
}
fn completion(&self) -> CompletionItem {
CompletionItem {
label: self.ident.clone(),
detail: Some(clean_type_str(&self.type_str, &self.ident)),
kind: Some(self.completion_kind),
..CompletionItem::default()
}
}
fn dot_completion(&self, scope_tree: &GenericScope) -> Vec<CompletionItem> {
for scope in scope_tree.scopes() {
if scope.ident() == self.ident {
return scope
.defs()
.iter()
.filter(|x| !x.starts_with(&scope.ident()))
.map(|x| x.completion())
.collect();
}
}
Vec::new()
}
}
impl Scope for GenericScope {
fn start(&self) -> usize {
self.start
}
fn end(&self) -> usize {
self.end
}
fn defs(&self) -> &Vec<Box<dyn Definition>> {
&self.defs
}
fn scopes(&self) -> &Vec<Box<dyn Scope>> {
&self.scopes
}
}
#[derive(Debug)]
pub struct ClassDec {
pub ident: String,
pub byte_idx: usize,
pub start: usize,
pub end: usize,
pub url: Url,
pub type_str: String,
pub completion_kind: CompletionItemKind,
pub symbol_kind: SymbolKind,
pub def_type: DefinitionType,
pub defs: Vec<Box<dyn Definition>>,
pub scopes: Vec<Box<dyn Scope>>,
// class, package
pub extends: (Vec<String>, Option<String>),
// class, package
pub implements: Vec<(String, Option<String>)>,
pub interface: bool,
}
impl ClassDec {
pub fn new(url: &Url) -> Self {
Self {
ident: String::new(),
byte_idx: 0,
start: 0,
end: 0,
url: url.clone(),
type_str: String::new(),
completion_kind: CompletionItemKind::CLASS,
symbol_kind: SymbolKind::CLASS,
def_type: DefinitionType::Class,
defs: Vec::new(),
scopes: Vec::new(),
extends: (Vec::new(), None),
implements: Vec::new(),
interface: false,
}
}
}
impl Definition for ClassDec {
fn ident(&self) -> String {
self.ident.clone()
}
fn byte_idx(&self) -> usize {
self.byte_idx
}
fn url(&self) -> Url {
self.url.clone()
}
fn type_str(&self) -> String {
self.type_str.clone()
}
fn completion_kind(&self) -> CompletionItemKind {
self.completion_kind
}
fn symbol_kind(&self) -> SymbolKind {
self.symbol_kind
}
fn def_type(&self) -> DefinitionType {
self.def_type
}
fn starts_with(&self, token: &str) -> bool {
self.ident.starts_with(token)
}
fn completion(&self) -> CompletionItem {
CompletionItem {
label: self.ident.clone(),
detail: Some(clean_type_str(&self.type_str, &self.ident)),
kind: Some(self.completion_kind),
..CompletionItem::default()
}
}
fn dot_completion(&self, scope_tree: &GenericScope) -> Vec<CompletionItem> {
for scope in scope_tree.scopes() {
if scope.ident() == self.ident {
return scope
.defs()
.iter()
.filter(|x| !x.starts_with(&scope.ident()))
.map(|x| x.completion())
.collect();
}
}
Vec::new()
}
}
impl Scope for ClassDec {
fn start(&self) -> usize {
self.start
}
fn end(&self) -> usize {
self.end
}
fn defs(&self) -> &Vec<Box<dyn Definition>> {
&self.defs
}
fn scopes(&self) -> &Vec<Box<dyn Scope>> {
&self.scopes
}
}

File diff suppressed because it is too large Load Diff

496
src/diagnostics.rs Normal file
View File

@ -0,0 +1,496 @@
use crate::server::ProjectConfig;
#[cfg(feature = "slang")]
use path_clean::PathClean;
use regex::Regex;
use ropey::Rope;
#[cfg(feature = "slang")]
use std::env::current_dir;
#[cfg(feature = "slang")]
use std::path::Path;
use std::path::PathBuf;
use std::process::{Command, Stdio};
use tower_lsp::lsp_types::*;
#[cfg(feature = "slang")]
use veridian_slang::slang_compile;
use walkdir::DirEntry;
#[cfg(feature = "slang")]
use walkdir::WalkDir;
#[cfg(feature = "slang")]
pub fn get_diagnostics(
uri: Url,
rope: &Rope,
files: Vec<Url>,
conf: &ProjectConfig,
) -> PublishDiagnosticsParams {
if !(cfg!(test) && (uri.to_string().starts_with("file:///test"))) {
let paths = get_paths(files, conf.auto_search_workdir);
let mut diagnostics = {
if conf.verilator.syntax.enabled {
if let Ok(path) = uri.to_file_path() {
match verilator_syntax(
rope,
path,
&conf.verilator.syntax.path,
&conf.verilator.syntax.args,
) {
Some(diags) => diags,
None => Vec::new(),
}
} else {
Vec::new()
}
} else if conf.verible.syntax.enabled {
match verible_syntax(rope, &conf.verible.syntax.path, &conf.verible.syntax.args) {
Some(diags) => diags,
None => Vec::new(),
}
} else {
Vec::new()
}
};
diagnostics.append(&mut parse_report(
uri.clone(),
slang_compile(paths).unwrap(),
));
PublishDiagnosticsParams {
uri,
diagnostics,
version: None,
}
} else {
PublishDiagnosticsParams {
uri,
diagnostics: Vec::new(),
version: None,
}
}
}
#[cfg(not(feature = "slang"))]
pub fn get_diagnostics(
uri: Url,
rope: &Rope,
#[allow(unused_variables)] files: Vec<Url>,
conf: &ProjectConfig,
) -> PublishDiagnosticsParams {
if !(cfg!(test) && (uri.to_string().starts_with("file:///test"))) {
let diagnostics = {
if conf.verilator.syntax.enabled {
if let Ok(path) = uri.to_file_path() {
match verilator_syntax(
rope,
path,
&conf.verilator.syntax.path,
&conf.verilator.syntax.args,
) {
Some(diags) => diags,
None => Vec::new(),
}
} else {
Vec::new()
}
} else if conf.verible.syntax.enabled {
match verible_syntax(rope, &conf.verible.syntax.path, &conf.verible.syntax.args) {
Some(diags) => diags,
None => Vec::new(),
}
} else {
Vec::new()
}
};
PublishDiagnosticsParams {
uri,
diagnostics,
version: None,
}
} else {
PublishDiagnosticsParams {
uri,
diagnostics: Vec::new(),
version: None,
}
}
}
/// recursively find source file paths from working directory
/// and open files
#[cfg(feature = "slang")]
fn get_paths(files: Vec<Url>, search_workdir: bool) -> Vec<PathBuf> {
// check recursively from working dir for source files
let mut paths: Vec<PathBuf> = Vec::new();
if search_workdir {
let walker = WalkDir::new(".").into_iter();
for entry in walker.filter_entry(|e| !is_hidden(e)) {
let entry = entry.unwrap();
if entry.file_type().is_file() {
let extension = entry.path().extension().unwrap();
if extension == "sv" || extension == "svh" || extension == "v" || extension == "vh"
{
paths.push(entry.path().to_path_buf());
}
}
}
}
// check recursively from opened files for source files
for file in files {
if let Ok(path) = file.to_file_path() {
if !paths.contains(&path) {
let walker = WalkDir::new(path.parent().unwrap()).into_iter();
for entry in walker.filter_entry(|e| !is_hidden(e)).flatten() {
if entry.file_type().is_file() && entry.path().extension().is_some() {
let extension = entry.path().extension().unwrap();
if extension == "sv"
|| extension == "svh"
|| extension == "v"
|| extension == "vh"
{
let entry_path = entry.path().to_path_buf();
if !paths.contains(&entry_path) {
paths.push(entry_path);
}
}
}
}
}
}
}
paths
}
pub fn is_hidden(entry: &DirEntry) -> bool {
entry
.file_name()
.to_str()
.map(|s| s.starts_with('.'))
.unwrap_or(false)
}
#[cfg(feature = "slang")]
/// parse a report from slang
fn parse_report(uri: Url, report: String) -> Vec<Diagnostic> {
let mut diagnostics: Vec<Diagnostic> = Vec::new();
for line in report.lines() {
let diag: Vec<&str> = line.splitn(5, ':').collect();
if absolute_path(diag.first().unwrap()) == uri.to_file_path().unwrap().as_os_str() {
let pos = Position::new(
diag.get(1).unwrap().parse::<u32>().unwrap() - 1,
diag.get(2).unwrap().parse::<u32>().unwrap() - 1,
);
diagnostics.push(Diagnostic::new(
Range::new(pos, pos),
slang_severity(diag.get(3).unwrap()),
None,
Some("slang".to_owned()),
(*diag.get(4).unwrap()).to_owned(),
None,
None,
))
}
}
diagnostics
}
#[cfg(feature = "slang")]
fn slang_severity(severity: &str) -> Option<DiagnosticSeverity> {
match severity {
" error" => Some(DiagnosticSeverity::ERROR),
" warning" => Some(DiagnosticSeverity::WARNING),
" note" => Some(DiagnosticSeverity::INFORMATION),
_ => None,
}
}
#[cfg(feature = "slang")]
// convert relative path to absolute
fn absolute_path(path_str: &str) -> PathBuf {
let path = Path::new(path_str);
current_dir().unwrap().join(path).clean()
}
/// convert captured severity string to DiagnosticSeverity
fn verilator_severity(severity: &str) -> Option<DiagnosticSeverity> {
match severity {
"Error" => Some(DiagnosticSeverity::ERROR),
s if s.starts_with("Warning") => Some(DiagnosticSeverity::WARNING),
// NOTE: afaik, verilator doesn't have an info or hint severity
_ => Some(DiagnosticSeverity::INFORMATION),
}
}
/// syntax checking using verilator --lint-only
fn verilator_syntax(
rope: &Rope,
file_path: PathBuf,
verilator_syntax_path: &str,
verilator_syntax_args: &[String],
) -> Option<Vec<Diagnostic>> {
let mut child = Command::new(verilator_syntax_path)
.stdin(Stdio::piped())
.stderr(Stdio::piped())
.stdout(Stdio::piped())
.args(verilator_syntax_args)
.arg(file_path.to_str()?)
.spawn()
.ok()?;
static RE: std::sync::OnceLock<Regex> = std::sync::OnceLock::new();
let re = RE.get_or_init(|| {
Regex::new(
r"%(?P<severity>Error|Warning)(-(?P<warning_type>[A-Z0-9_]+))?: (?P<filepath>[^:]+):(?P<line>\d+):((?P<col>\d+):)? ?(?P<message>.*)",
)
.unwrap()
});
// write file to stdin, read output from stdout
rope.write_to(child.stdin.as_mut()?).ok()?;
let output = child.wait_with_output().ok()?;
if !output.status.success() {
let mut diags: Vec<Diagnostic> = Vec::new();
let raw_output = String::from_utf8(output.stderr).ok()?;
let filtered_output = raw_output
.lines()
.filter(|line| line.starts_with('%'))
.collect::<Vec<&str>>();
for error in filtered_output {
let caps = match re.captures(error) {
Some(caps) => caps,
None => continue,
};
// check if diagnostic is for this file, since verilator can provide diagnostics for
// included files
if caps.name("filepath")?.as_str() != file_path.to_str().unwrap_or("") {
continue;
}
let severity = verilator_severity(caps.name("severity")?.as_str());
let line: u32 = caps.name("line")?.as_str().to_string().parse().ok()?;
let col: u32 = caps.name("col").map_or("1", |m| m.as_str()).parse().ok()?;
let pos = Position::new(line - 1, col - 1);
let msg = match severity {
Some(DiagnosticSeverity::ERROR) => caps.name("message")?.as_str().to_string(),
Some(DiagnosticSeverity::WARNING) => format!(
"{}: {}",
caps.name("warning_type")?.as_str(),
caps.name("message")?.as_str()
),
_ => "".to_string(),
};
diags.push(Diagnostic::new(
Range::new(pos, pos),
severity,
None,
Some("verilator".to_string()),
msg,
None,
None,
));
}
Some(diags)
} else {
None
}
}
/// syntax checking using verible-verilog-syntax
fn verible_syntax(
rope: &Rope,
verible_syntax_path: &str,
verible_syntax_args: &[String],
) -> Option<Vec<Diagnostic>> {
let mut child = Command::new(verible_syntax_path)
.stdin(Stdio::piped())
.stderr(Stdio::piped())
.stdout(Stdio::piped())
.args(verible_syntax_args)
.arg("-")
.spawn()
.ok()?;
static RE: std::sync::OnceLock<Regex> = std::sync::OnceLock::new();
let re = RE.get_or_init(|| {
Regex::new(
r"^.+:(?P<line>\d*):(?P<startcol>\d*)(?:-(?P<endcol>\d*))?:\s(?P<message>.*)\s.*$",
)
.unwrap()
});
// write file to stdin, read output from stdout
rope.write_to(child.stdin.as_mut()?).ok()?;
let output = child.wait_with_output().ok()?;
if !output.status.success() {
let mut diags: Vec<Diagnostic> = Vec::new();
let raw_output = String::from_utf8(output.stdout).ok()?;
for error in raw_output.lines() {
let caps = re.captures(error)?;
let line: u32 = caps.name("line")?.as_str().parse().ok()?;
let startcol: u32 = caps.name("startcol")?.as_str().parse().ok()?;
let endcol: Option<u32> = match caps.name("endcol").map(|e| e.as_str().parse()) {
Some(Ok(e)) => Some(e),
None => None,
Some(Err(_)) => return None,
};
let start_pos = Position::new(line - 1, startcol - 1);
let end_pos = Position::new(line - 1, endcol.unwrap_or(startcol) - 1);
diags.push(Diagnostic::new(
Range::new(start_pos, end_pos),
Some(DiagnosticSeverity::ERROR),
None,
Some("verible".to_string()),
caps.name("message")?.as_str().to_string(),
None,
None,
));
}
Some(diags)
} else {
None
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::support::test_init;
use std::fs::File;
use std::io::Write;
use tempdir::TempDir;
#[test]
#[cfg(feature = "slang")]
fn test_diagnostics() {
test_init();
let uri = Url::from_file_path(absolute_path("test_data/diag/diag_test.sv")).unwrap();
let expected = PublishDiagnosticsParams::new(
uri.clone(),
vec![Diagnostic::new(
Range::new(Position::new(3, 13), Position::new(3, 13)),
Some(DiagnosticSeverity::WARNING),
None,
Some("slang".to_owned()),
" cannot refer to element 2 of \'logic[1:0]\' [-Windex-oob]".to_owned(),
None,
None,
)],
None,
);
let diag = get_diagnostics(
uri.clone(),
&Rope::default(),
vec![uri],
&ProjectConfig::default(),
);
assert_eq!(diag.uri, expected.uri);
assert_eq!(diag.version, expected.version);
assert_eq!(diag.diagnostics.last(), expected.diagnostics.last());
}
#[test]
fn test_unsaved_file() {
test_init();
let uri = Url::parse("file://test.sv").unwrap();
get_diagnostics(
uri.clone(),
&Rope::default(),
vec![uri],
&ProjectConfig::default(),
);
}
#[test]
fn test_verible_syntax() {
let text = r#"module test;
logic abc;
logic abcd;
a
endmodule
"#;
let doc = Rope::from_str(text);
let errors = verible_syntax(&doc, "verible-verilog-syntax", &[])
.expect("verible-verilog-syntax not found, test can not run");
let expected: Vec<Diagnostic> = vec![Diagnostic {
range: Range {
start: Position {
line: 5,
character: 0,
},
end: Position {
line: 5,
character: 8,
},
},
severity: Some(DiagnosticSeverity::ERROR),
code: None,
source: Some("verible".to_string()),
message: "syntax error at token".to_string(),
related_information: None,
tags: None,
code_description: None,
data: None,
}];
assert_eq!(errors, expected);
}
#[test]
fn test_verilator_syntax() {
let text = r#"module test;
logic abc;
logic abcd;
a
endmodule
"#;
let doc = Rope::from_str(text);
// verilator can't read from stdin so we must create a temp dir to place our
// test file
let dir = TempDir::new("verilator_test").unwrap();
let file_path_1 = dir.path().join("test.sv");
let mut f = File::create(&file_path_1).unwrap();
f.write_all(text.as_bytes()).unwrap();
f.sync_all().unwrap();
let errors = verilator_syntax(
&doc,
file_path_1,
"verilator",
&[
"--lint-only".to_string(),
"--sv".to_string(),
"-Wall".to_string(),
],
)
.expect("verilator not found, test can not run");
drop(f);
dir.close().unwrap();
let expected: Vec<Diagnostic> = vec![Diagnostic {
range: Range {
start: Position {
line: 5,
character: 0,
},
end: Position {
line: 5,
character: 0,
},
},
severity: Some(DiagnosticSeverity::ERROR),
code: None,
source: Some("verilator".to_string()),
message: "syntax error, unexpected endmodule, expecting IDENTIFIER or randomize"
.to_string(),
related_information: None,
tags: None,
code_description: None,
data: None,
}];
assert_eq!(errors[0].severity, expected[0].severity);
assert_eq!(errors[0].range.start.line, expected[0].range.start.line);
assert_eq!(errors[0].range.end.line, expected[0].range.end.line);
assert!(errors[0].message.contains("syntax error"));
}
}

173
src/format.rs Normal file
View File

@ -0,0 +1,173 @@
use crate::server::LSPServer;
use crate::sources::LSPSupport;
use log::info;
use ropey::Rope;
use std::process::{Command, Stdio};
use tower_lsp::lsp_types::*;
impl LSPServer {
pub fn formatting(&self, params: DocumentFormattingParams) -> Option<Vec<TextEdit>> {
let uri = params.text_document.uri;
info!("formatting {}", &uri);
let file_id = self.srcs.get_id(&uri).to_owned();
self.srcs.wait_parse_ready(file_id, false);
let file = self.srcs.get_file(file_id)?;
let file = file.read().ok()?;
let conf = self.conf.read().unwrap();
if conf.verible.format.enabled {
Some(vec![TextEdit::new(
Range::new(
file.text.char_to_pos(0),
file.text.char_to_pos(file.text.len_chars()),
),
format_document(
&file.text,
None,
&conf.verible.format.path,
&conf.verible.format.args,
)?,
)])
} else {
None
}
}
pub fn range_formatting(&self, params: DocumentRangeFormattingParams) -> Option<Vec<TextEdit>> {
let uri = params.text_document.uri;
info!("range formatting {}", &uri);
let file_id = self.srcs.get_id(&uri).to_owned();
self.srcs.wait_parse_ready(file_id, false);
let file = self.srcs.get_file(file_id)?;
let file = file.read().ok()?;
let conf = self.conf.read().unwrap();
if conf.verible.format.enabled {
Some(vec![TextEdit::new(
file.text.char_range_to_range(0..file.text.len_chars()),
format_document(
&file.text,
Some(params.range),
&conf.verible.format.path,
&conf.verible.format.args,
)?,
)])
} else {
None
}
}
}
/// format the document using verible-verilog-format
pub fn format_document(
rope: &Rope,
range: Option<Range>,
verible_format_path: &str,
verible_format_args: &[String],
) -> Option<String> {
let mut child = Command::new(verible_format_path);
child
.stdin(Stdio::piped())
.stderr(Stdio::piped())
.stdout(Stdio::piped())
.args(verible_format_args);
// rangeFormatting
if let Some(r) = range {
child
.arg("--lines")
.arg(format!("{}-{}", r.start.line + 1, r.end.line + 1));
}
let mut child = child.arg("-").spawn().ok()?;
// write file to stdin, read output from stdout
rope.write_to(child.stdin.as_mut()?).ok()?;
let output = child.wait_with_output().ok()?;
if output.status.success() {
info!("formatting succeeded");
let raw_output = String::from_utf8(output.stdout).ok()?;
Some(raw_output)
} else {
None
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::server::ProjectConfig;
use crate::support::test_init;
use which::which;
#[test]
fn test_formatting() {
test_init();
let text = r#"
module test;
logic a;
logic b;
endmodule"#;
let text_fixed = r#"
module test;
logic a;
logic b;
endmodule
"#;
let doc = Rope::from_str(&text);
if which("verible-verilog-format").is_ok() {
assert_eq!(
format_document(
&doc,
None,
&ProjectConfig::default().verible.format.path,
&[]
)
.unwrap(),
text_fixed.to_string()
);
}
}
#[test]
fn test_range_formatting() {
test_init();
let text = r#"module t1;
logic a;
logic b;
logic c;
endmodule
module t2;
logic a;
logic b;
logic c;
endmodule"#;
let text_fixed = r#"module t1;
logic a;
logic b;
logic c;
endmodule
module t2;
logic a;
logic b;
logic c;
endmodule
"#;
let doc = Rope::from_str(&text);
if which("verible-verilog-format").is_ok() {
assert_eq!(
format_document(
&doc,
Some(Range::new(Position::new(0, 0), Position::new(4, 9))),
&ProjectConfig::default().verible.format.path,
&[]
)
.unwrap(),
text_fixed.to_string()
);
}
}
}

9
src/lib.rs Normal file
View File

@ -0,0 +1,9 @@
#![recursion_limit = "256"]
pub mod completion;
pub mod definition;
pub mod diagnostics;
pub mod format;
pub mod server;
pub mod sources;
pub mod support;

35
src/main.rs Normal file
View File

@ -0,0 +1,35 @@
#![recursion_limit = "256"]
use log::info;
use std::sync::Arc;
use structopt::StructOpt;
use tower_lsp::{LspService, Server};
mod completion;
mod definition;
mod diagnostics;
mod format;
mod server;
mod sources;
#[cfg(test)]
mod support;
use server::Backend;
#[derive(StructOpt, Debug)]
#[structopt(name = "veridian", about = "A SystemVerilog/Verilog Language Server")]
struct Opt {}
#[tokio::main]
async fn main() {
let _ = Opt::from_args();
let log_handle = flexi_logger::Logger::with(flexi_logger::LogSpecification::info())
.start()
.unwrap();
info!("starting veridian...");
let stdin = tokio::io::stdin();
let stdout = tokio::io::stdout();
let (service, messages) = LspService::new(|client| Arc::new(Backend::new(client, log_handle)));
Server::new(stdin, stdout, messages).serve(service).await;
}

385
src/server.rs Normal file
View File

@ -0,0 +1,385 @@
use crate::sources::*;
use crate::completion::keyword::*;
use flexi_logger::LoggerHandle;
use log::{debug, info, warn};
use path_clean::PathClean;
use serde::{Deserialize, Serialize};
use std::env::current_dir;
use std::fs::File;
use std::io::Read;
use std::path::PathBuf;
use std::string::ToString;
use std::sync::{Mutex, RwLock};
use tower_lsp::jsonrpc::{Error, ErrorCode, Result};
use tower_lsp::lsp_types::*;
use tower_lsp::{Client, LanguageServer};
use which::which;
pub struct LSPServer {
pub srcs: Sources,
pub key_comps: Vec<CompletionItem>,
pub sys_tasks: Vec<CompletionItem>,
pub directives: Vec<CompletionItem>,
pub conf: RwLock<ProjectConfig>,
pub log_handle: Mutex<Option<LoggerHandle>>,
}
impl LSPServer {
pub fn new(log_handle: Option<LoggerHandle>) -> LSPServer {
LSPServer {
srcs: Sources::new(),
key_comps: keyword_completions(KEYWORDS),
sys_tasks: other_completions(SYS_TASKS),
directives: other_completions(DIRECTIVES),
conf: RwLock::new(ProjectConfig::default()),
log_handle: Mutex::new(log_handle),
}
}
}
pub struct Backend {
client: Client,
server: LSPServer,
}
impl Backend {
pub fn new(client: Client, log_handle: LoggerHandle) -> Backend {
Backend {
client,
server: LSPServer::new(Some(log_handle)),
}
}
}
#[derive(strum_macros::Display, Debug, Serialize, Deserialize)]
pub enum LogLevel {
#[strum(serialize = "error")]
Error,
#[strum(serialize = "warn")]
Warn,
#[strum(serialize = "info")]
Info,
#[strum(serialize = "debug")]
Debug,
#[strum(serialize = "trace")]
Trace,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(default)]
pub struct ProjectConfig {
// if true, recursively search the working directory for files to run diagnostics on
pub auto_search_workdir: bool,
// list of directories with header files
pub include_dirs: Vec<String>,
// list of directories to recursively search for SystemVerilog/Verilog sources
pub source_dirs: Vec<String>,
// config options for verible tools
pub verible: Verible,
// config options for verilator tools
pub verilator: Verilator,
// log level
pub log_level: LogLevel,
}
impl Default for ProjectConfig {
fn default() -> Self {
ProjectConfig {
auto_search_workdir: true,
include_dirs: Vec::new(),
source_dirs: Vec::new(),
verible: Verible::default(),
verilator: Verilator::default(),
log_level: LogLevel::Info,
}
}
}
#[derive(Default, Debug, Serialize, Deserialize)]
#[serde(default)]
pub struct Verible {
pub syntax: VeribleSyntax,
pub format: VeribleFormat,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(default)]
pub struct VeribleSyntax {
pub enabled: bool,
pub path: String,
pub args: Vec<String>,
}
impl Default for VeribleSyntax {
fn default() -> Self {
Self {
enabled: true,
path: "verible-verilog-syntax".to_string(),
args: Vec::new(),
}
}
}
#[derive(Debug, Default, Serialize, Deserialize)]
#[serde(default)]
pub struct Verilator {
pub syntax: VerilatorSyntax,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(default)]
pub struct VerilatorSyntax {
pub enabled: bool,
pub path: String,
pub args: Vec<String>,
}
impl Default for VerilatorSyntax {
fn default() -> Self {
Self {
enabled: true,
path: "verilator".to_string(),
args: vec![
"--lint-only".to_string(),
"--sv".to_string(),
"-Wall".to_string(),
],
}
}
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(default)]
pub struct VeribleFormat {
pub enabled: bool,
pub path: String,
pub args: Vec<String>,
}
impl Default for VeribleFormat {
fn default() -> Self {
Self {
enabled: true,
path: "verible-verilog-format".to_string(),
args: Vec::new(),
}
}
}
fn read_config(root_uri: Option<Url>) -> anyhow::Result<ProjectConfig> {
let path = root_uri
.ok_or_else(|| anyhow::anyhow!("couldn't resolve workdir path"))?
.to_file_path()
.map_err(|_| anyhow::anyhow!("couldn't resolve workdir path"))?;
let mut config: Option<PathBuf> = None;
for dir in path.ancestors() {
let config_path = dir.join("veridian.yaml");
if config_path.exists() {
info!("found config: veridian.yaml");
config = Some(config_path);
break;
}
let config_path = dir.join("veridian.yml");
if config_path.exists() {
info!("found config: veridian.yml");
config = Some(config_path);
break;
}
}
let mut contents = String::new();
File::open(config.ok_or_else(|| anyhow::anyhow!("unable to read config file"))?)?
.read_to_string(&mut contents)?;
info!("reading config file");
Ok(serde_yaml::from_str(&contents)?)
}
// convert string path to absolute path
fn absolute_path(path_str: &str) -> Option<PathBuf> {
let path = PathBuf::from(path_str);
if !path.exists() {
return None;
}
if !path.has_root() {
Some(current_dir().unwrap().join(path).clean())
} else {
Some(path)
}
}
#[tower_lsp::async_trait]
impl LanguageServer for Backend {
async fn initialize(&self, params: InitializeParams) -> Result<InitializeResult> {
// grab include dirs and source dirs from config, and convert to abs path
let mut inc_dirs = self.server.srcs.include_dirs.write().unwrap();
let mut src_dirs = self.server.srcs.source_dirs.write().unwrap();
match read_config(params.root_uri) {
Ok(conf) => {
inc_dirs.extend(conf.include_dirs.iter().filter_map(|x| absolute_path(x)));
debug!("{:#?}", inc_dirs);
src_dirs.extend(conf.source_dirs.iter().filter_map(|x| absolute_path(x)));
debug!("{:#?}", src_dirs);
let mut log_handle = self.server.log_handle.lock().unwrap();
let log_handle = log_handle.as_mut();
if let Some(handle) = log_handle {
handle
.parse_and_push_temp_spec(&conf.log_level.to_string())
.map_err(|e| Error {
code: ErrorCode::InvalidParams,
message: e.to_string().into(),
data: None,
})?;
}
*self.server.conf.write().unwrap() = conf;
}
Err(e) => {
warn!("found errors in config file: {:#?}", e);
}
}
let mut conf = self.server.conf.write().unwrap();
conf.verible.syntax.enabled = which(&conf.verible.syntax.path).is_ok();
if cfg!(feature = "slang") {
info!("enabled linting with slang");
}
if conf.verilator.syntax.enabled {
info!("enabled linting with verilator")
} else if conf.verible.syntax.enabled {
info!("enabled linting with verible-verilog-syntax")
}
conf.verible.format.enabled = which(&conf.verible.format.path).is_ok();
if conf.verible.format.enabled {
info!("enabled formatting with verible-verilog-format");
} else {
info!("formatting unavailable");
}
drop(inc_dirs);
drop(src_dirs);
// parse all source files found from walking source dirs and include dirs
self.server.srcs.init();
Ok(InitializeResult {
server_info: None,
capabilities: ServerCapabilities {
text_document_sync: Some(TextDocumentSyncCapability::Options(
TextDocumentSyncOptions {
open_close: Some(true),
change: Some(TextDocumentSyncKind::INCREMENTAL),
will_save: None,
will_save_wait_until: None,
save: Some(TextDocumentSyncSaveOptions::SaveOptions(SaveOptions {
include_text: None,
})),
},
)),
completion_provider: Some(CompletionOptions {
resolve_provider: Some(false),
trigger_characters: Some(vec![
".".to_string(),
"$".to_string(),
"`".to_string(),
]),
work_done_progress_options: WorkDoneProgressOptions {
work_done_progress: None,
},
all_commit_characters: None,
//TODO: check if corect
completion_item: None,
}),
definition_provider: Some(OneOf::Left(true)),
hover_provider: Some(HoverProviderCapability::Simple(true)),
document_symbol_provider: Some(OneOf::Left(true)),
document_formatting_provider: Some(OneOf::Left(conf.verible.format.enabled)),
document_range_formatting_provider: Some(OneOf::Left(conf.verible.format.enabled)),
document_highlight_provider: Some(OneOf::Left(true)),
..ServerCapabilities::default()
},
})
}
async fn initialized(&self, _: InitializedParams) {
self.client
.log_message(MessageType::INFO, "veridian initialized!")
.await;
}
async fn shutdown(&self) -> Result<()> {
Ok(())
}
async fn did_open(&self, params: DidOpenTextDocumentParams) {
let diagnostics = self.server.did_open(params);
self.client
.publish_diagnostics(
diagnostics.uri,
diagnostics.diagnostics,
diagnostics.version,
)
.await;
}
async fn did_change(&self, params: DidChangeTextDocumentParams) {
self.server.did_change(params);
}
async fn did_save(&self, params: DidSaveTextDocumentParams) {
let diagnostics = self.server.did_save(params);
self.client
.publish_diagnostics(
diagnostics.uri,
diagnostics.diagnostics,
diagnostics.version,
)
.await;
}
async fn completion(&self, params: CompletionParams) -> Result<Option<CompletionResponse>> {
Ok(self.server.completion(params))
}
async fn goto_definition(
&self,
params: GotoDefinitionParams,
) -> Result<Option<GotoDefinitionResponse>> {
Ok(self.server.goto_definition(params))
}
async fn hover(&self, params: HoverParams) -> Result<Option<Hover>> {
Ok(self.server.hover(params))
}
async fn document_symbol(
&self,
params: DocumentSymbolParams,
) -> Result<Option<DocumentSymbolResponse>> {
Ok(self.server.document_symbol(params))
}
async fn formatting(&self, params: DocumentFormattingParams) -> Result<Option<Vec<TextEdit>>> {
Ok(self.server.formatting(params))
}
async fn range_formatting(
&self,
params: DocumentRangeFormattingParams,
) -> Result<Option<Vec<TextEdit>>> {
Ok(self.server.range_formatting(params))
}
async fn document_highlight(
&self,
params: DocumentHighlightParams,
) -> Result<Option<Vec<DocumentHighlight>>> {
Ok(self.server.document_highlight(params))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_config() {
let config = r#"
auto_search_workdir: false
format: true
verible:
syntax:
enabled: true
path: "verible-verilog-syntax"
format:
args:
- --net_variable_alignment=align
log_level: Info
"#;
let config = serde_yaml::from_str::<ProjectConfig>(config);
dbg!(&config);
assert!(config.is_ok());
}
}

629
src/sources.rs Normal file
View File

@ -0,0 +1,629 @@
use crate::definition::def_types::*;
use crate::definition::get_scopes;
use crate::diagnostics::{get_diagnostics, is_hidden};
use crate::server::LSPServer;
use log::{debug, error, trace};
use pathdiff::diff_paths;
use ropey::{Rope, RopeSlice};
use std::cmp::min;
use std::collections::HashMap;
use std::env::current_dir;
use std::fs;
use std::ops::Range as StdRange;
use std::path::PathBuf;
use std::sync::{Arc, Condvar, Mutex, RwLock};
use std::thread;
use std::time::Instant;
use sv_parser::*;
use thread::JoinHandle;
use tower_lsp::lsp_types::*;
use walkdir::WalkDir;
impl LSPServer {
pub fn did_open(&self, params: DidOpenTextDocumentParams) -> PublishDiagnosticsParams {
let document: TextDocumentItem = params.text_document;
let uri = document.uri.clone();
debug!("did_open: {}", &uri);
// check if doc is already added
if self.srcs.names.read().unwrap().contains_key(&document.uri) {
// convert to a did_change that replace the entire text
self.did_change(DidChangeTextDocumentParams {
text_document: VersionedTextDocumentIdentifier::new(document.uri, document.version),
content_changes: vec![TextDocumentContentChangeEvent {
range: None,
range_length: None,
text: document.text,
}],
});
} else {
self.srcs.add(document);
}
// diagnostics
let urls = self.srcs.names.read().unwrap().keys().cloned().collect();
let file_id = self.srcs.get_id(&uri);
let file = self.srcs.get_file(file_id).unwrap();
let file = file.read().unwrap();
get_diagnostics(uri, &file.text, urls, &self.conf.read().unwrap())
}
pub fn did_change(&self, params: DidChangeTextDocumentParams) {
debug!("did_change: {}", &params.text_document.uri);
let file_id = self.srcs.get_id(&params.text_document.uri);
let file = self.srcs.get_file(file_id).unwrap();
let mut file = file.write().unwrap();
// loop through changes and apply
for change in params.content_changes {
if change.range.is_none() {
file.text = Rope::from_str(&change.text);
} else {
file.text.apply_change(&change);
}
file.last_change_range = change.range;
}
file.version = params.text_document.version;
drop(file);
// invalidate syntaxtree and wake parse thread
let meta_data = self.srcs.get_meta_data(file_id).unwrap();
let (lock, cvar) = &*meta_data.read().unwrap().valid_parse;
let mut valid = lock.lock().unwrap();
*valid = false;
cvar.notify_all();
}
pub fn did_save(&self, params: DidSaveTextDocumentParams) -> PublishDiagnosticsParams {
let urls = self.srcs.names.read().unwrap().keys().cloned().collect();
let file_id = self.srcs.get_id(&params.text_document.uri);
let file = self.srcs.get_file(file_id).unwrap();
let file = file.read().unwrap();
get_diagnostics(
params.text_document.uri,
&file.text,
urls,
&self.conf.read().unwrap(),
)
}
}
/// The Source struct holds all file specific information
pub struct Source {
pub id: usize,
pub uri: Url,
pub text: Rope,
pub version: i32,
pub syntax_tree: Option<SyntaxTree>,
// if there is a parse error, we can remove the last change
pub last_change_range: Option<Range>,
}
/// file metadata, including whether or not the syntax tree is up to date
pub struct SourceMeta {
pub id: usize,
pub valid_parse: Arc<(Mutex<bool>, Condvar)>,
pub parse_handle: JoinHandle<()>,
}
/// find SystemVerilog/Verilog sources recursively from opened files
fn find_src_paths(dirs: &[PathBuf]) -> Vec<PathBuf> {
let mut paths: Vec<PathBuf> = Vec::new();
for dir in dirs {
let walker = WalkDir::new(dir).into_iter();
for entry in walker.filter_entry(|e| !is_hidden(e)) {
let entry = entry.unwrap();
if entry.file_type().is_file() && entry.path().extension().is_some() {
let extension = entry.path().extension().unwrap();
if extension == "sv" || extension == "svh" || extension == "v" || extension == "vh"
{
let entry_path = entry.path().to_path_buf();
if !paths.contains(&entry_path) {
paths.push(entry_path);
}
}
}
}
}
paths
}
/// The Sources struct manages all source files
pub struct Sources {
// all files
pub files: Arc<RwLock<Vec<Arc<RwLock<Source>>>>>,
// map file urls to id
pub names: Arc<RwLock<HashMap<Url, usize>>>,
// file metadata
pub meta: Arc<RwLock<Vec<Arc<RwLock<SourceMeta>>>>>,
// all source files are indexed into this tree, which can then
// be used for completion, name resolution
pub scope_tree: Arc<RwLock<Option<GenericScope>>>,
// include directories, passed to parser to resolve `include
pub include_dirs: Arc<RwLock<Vec<PathBuf>>>,
// source directories
pub source_dirs: Arc<RwLock<Vec<PathBuf>>>,
}
impl std::default::Default for Sources {
fn default() -> Self {
Self::new()
}
}
impl Sources {
pub fn new() -> Self {
Self {
files: Arc::new(RwLock::new(Vec::new())),
names: Arc::new(RwLock::new(HashMap::new())),
meta: Arc::new(RwLock::new(Vec::new())),
scope_tree: Arc::new(RwLock::new(None)),
include_dirs: Arc::new(RwLock::new(Vec::new())),
source_dirs: Arc::new(RwLock::new(Vec::new())),
}
}
pub fn init(&self) {
let mut paths: Vec<PathBuf> = Vec::new();
for path in &*self.include_dirs.read().unwrap() {
paths.push(path.clone());
}
for path in &*self.source_dirs.read().unwrap() {
paths.push(path.clone());
}
// find and add all source/header files recursively from configured include and source directories
let src_paths = find_src_paths(&paths);
for path in src_paths {
if let Ok(url) = Url::from_file_path(&path) {
if let Ok(text) = fs::read_to_string(&path) {
self.add(TextDocumentItem::new(
url,
"systemverilog".to_string(),
-1,
text,
));
}
}
}
}
/// add a source file, creating a parse thread for that file
pub fn add(&self, doc: TextDocumentItem) {
// use a condvar to synchronize the parse thread
// the valid bool decides whether or not the file
// needs to be re-parsed
#[allow(clippy::mutex_atomic)] // https://github.com/rust-lang/rust-clippy/issues/1516
let valid_parse = Arc::new((Mutex::new(false), Condvar::new()));
let valid_parse2 = valid_parse.clone();
let mut files = self.files.write().unwrap();
let source = Arc::new(RwLock::new(Source {
id: files.len(),
uri: doc.uri.clone(),
text: Rope::from_str(&doc.text),
version: doc.version,
syntax_tree: None,
last_change_range: None,
}));
let source_handle = source.clone();
let scope_handle = self.scope_tree.clone();
let inc_dirs = self.include_dirs.clone();
// spawn parse thread
let parse_handle = thread::spawn(move || {
let (lock, cvar) = &*valid_parse2;
loop {
let now = Instant::now();
let file = source_handle.read().unwrap();
let text = file.text.clone();
let uri = &file.uri.clone();
let range = &file.last_change_range.clone();
drop(file);
trace!("{}, parse read: {}", uri, now.elapsed().as_millis());
let syntax_tree = parse(&text, uri, range, &inc_dirs.read().unwrap());
let mut scope_tree = match &syntax_tree {
Some(tree) => get_scopes(tree, uri),
None => None,
};
trace!(
"{}, parse read complete: {}",
uri,
now.elapsed().as_millis()
);
let mut file = source_handle.write().unwrap();
trace!("{}, parse write: {}", uri, now.elapsed().as_millis());
file.syntax_tree = syntax_tree;
drop(file);
debug!("try write global scope");
let mut global_scope = scope_handle.write().unwrap();
match &mut *global_scope {
Some(scope) => match &mut scope_tree {
Some(tree) => {
scope.defs.retain(|x| &x.url() != uri);
scope.scopes.retain(|x| &x.url() != uri);
scope.defs.append(&mut tree.defs);
scope.scopes.append(&mut tree.scopes);
}
None => (),
},
None => *global_scope = scope_tree,
}
// eprintln!("{:#?}", *global_scope);
drop(global_scope);
trace!("{}, write global scope", uri);
trace!(
"{}, parse write complete: {}",
uri,
now.elapsed().as_millis()
);
let mut valid = lock.lock().unwrap();
*valid = true;
cvar.notify_all();
while *valid {
valid = cvar.wait(valid).unwrap();
}
}
});
files.push(source);
let fid = files.len() - 1;
self.meta
.write()
.unwrap()
.push(Arc::new(RwLock::new(SourceMeta {
id: fid,
valid_parse,
parse_handle,
})));
debug!("added {}", &doc.uri);
self.names.write().unwrap().insert(doc.uri, fid);
}
/// get file by id
pub fn get_file(&self, id: usize) -> Option<Arc<RwLock<Source>>> {
let files = self.files.read().ok()?;
for file in files.iter() {
let source = file.read().ok()?;
if source.id == id {
return Some(file.clone());
}
}
None
}
/// get metadata by file id
pub fn get_meta_data(&self, id: usize) -> Option<Arc<RwLock<SourceMeta>>> {
let meta = self.meta.read().ok()?;
for data in meta.iter() {
let i = data.read().ok()?;
if i.id == id {
return Some(data.clone());
}
}
None
}
/// wait for a valid parse
pub fn wait_parse_ready(&self, id: usize, wait_valid: bool) {
let file = self.get_file(id).unwrap();
let file = file.read().unwrap();
if file.syntax_tree.is_none() || wait_valid {
drop(file);
let meta_data = self.get_meta_data(id).unwrap();
let (lock, cvar) = &*meta_data.read().unwrap().valid_parse;
let mut valid = lock.lock().unwrap();
while !*valid {
valid = cvar.wait(valid).unwrap();
}
}
}
/// get file id from url
pub fn get_id(&self, uri: &Url) -> usize {
*self.names.read().unwrap().get(uri).unwrap()
}
/// compute identifier completions
pub fn get_completions(
&self,
token: &str,
byte_idx: usize,
url: &Url,
) -> Option<CompletionList> {
debug!("retrieving identifier completion for token: {}", &token);
Some(CompletionList {
is_incomplete: false,
items: self
.scope_tree
.read()
.ok()?
.as_ref()?
.get_completion(token, byte_idx, url),
})
}
/// compute dot completions
pub fn get_dot_completions(
&self,
token: &str,
byte_idx: usize,
url: &Url,
) -> Option<CompletionList> {
debug!("retrieving dot completion for token: {}", &token);
let tree = self.scope_tree.read().ok()?;
Some(CompletionList {
is_incomplete: false,
items: tree
.as_ref()?
.get_dot_completion(token, byte_idx, url, tree.as_ref()?),
})
}
}
//TODO: show all unrecoverable parse errors to user
/// parse the file using sv-parser, attempt to recover if the parse fails
pub fn parse(
doc: &Rope,
uri: &Url,
last_change_range: &Option<Range>,
inc_paths: &[PathBuf],
) -> Option<SyntaxTree> {
let mut parse_iterations = 1;
let mut i = 0;
let mut includes: Vec<PathBuf> = inc_paths.to_vec();
let mut reverted_change = false;
let mut text = doc.clone();
while i < parse_iterations {
i += 1;
match parse_sv_str(
&text.to_string(),
uri.to_file_path().unwrap(),
&HashMap::new(),
&includes,
false,
) {
Ok((syntax_tree, _)) => {
debug!("parse complete of {}", uri);
trace!("{}", syntax_tree.to_string());
return Some(syntax_tree);
}
Err(err) => {
match err {
// syntax error
sv_parser::Error::Parse(trace) => match trace {
Some((_, bpos)) => {
let mut line_start = text.byte_to_line(bpos);
let mut line_end = text.byte_to_line(bpos) + 1;
if !reverted_change {
if let Some(range) = last_change_range {
line_start = range.start.line as usize;
line_end = range.end.line as usize + 1;
reverted_change = true;
}
}
for line_idx in line_start..line_end {
let line = text.line(line_idx);
let start_char = text.line_to_char(line_idx);
let line_length = line.len_chars();
text.remove(start_char..(start_char + line_length - 1));
text.insert(start_char, &" ".to_owned().repeat(line_length));
}
parse_iterations += 1;
}
None => return None,
},
// include error, take the include path from the error message and
// add it as an include dir for the next parser invocation
sv_parser::Error::Include { source: x } => {
if let sv_parser::Error::File { source: _, path: z } = *x {
// Include paths have to be relative to the working directory
// so we have to convert a source file relative path to a working directory
// relative path. This should have been handled by sv-parser
let mut inc_path_given = z.clone();
let mut uri_path = uri.to_file_path().unwrap();
uri_path.pop();
let rel_path = diff_paths(uri_path, current_dir().unwrap()).unwrap();
inc_path_given.pop();
let inc_path = rel_path.join(inc_path_given);
if !includes.contains(&inc_path) {
includes.push(inc_path);
} else {
error!("parser: include error: {:?}", z);
break;
}
parse_iterations += 1;
}
}
_ => error!("parse error, {:?}", err),
};
}
}
}
None
}
//TODO: add bounds checking for utf8<->utf16 conversions
/// This trait defines some helper functions to convert between lsp types
/// and char/byte positions
pub trait LSPSupport {
fn pos_to_byte(&self, pos: &Position) -> usize;
fn pos_to_char(&self, pos: &Position) -> usize;
fn byte_to_pos(&self, byte_idx: usize) -> Position;
fn char_to_pos(&self, char_idx: usize) -> Position;
fn range_to_char_range(&self, range: &Range) -> StdRange<usize>;
fn char_range_to_range(&self, range: StdRange<usize>) -> Range;
fn apply_change(&mut self, change: &TextDocumentContentChangeEvent);
}
/// Extend ropey's Rope type with lsp convenience functions
impl LSPSupport for Rope {
fn pos_to_byte(&self, pos: &Position) -> usize {
self.char_to_byte(self.pos_to_char(pos))
}
fn pos_to_char(&self, pos: &Position) -> usize {
let line_slice = self.line(pos.line as usize);
self.line_to_char(pos.line as usize) + line_slice.utf16_cu_to_char(pos.character as usize)
}
fn byte_to_pos(&self, byte_idx: usize) -> Position {
self.char_to_pos(self.byte_to_char(min(byte_idx, self.len_bytes() - 1)))
}
fn char_to_pos(&self, char_idx: usize) -> Position {
let line = self.char_to_line(char_idx);
let line_slice = self.line(line);
Position {
line: line as u32,
character: line_slice.char_to_utf16_cu(char_idx - self.line_to_char(line)) as u32,
}
}
fn range_to_char_range(&self, range: &Range) -> StdRange<usize> {
self.pos_to_char(&range.start)..self.pos_to_char(&range.end)
}
fn char_range_to_range(&self, range: StdRange<usize>) -> Range {
Range {
start: self.char_to_pos(range.start),
end: self.char_to_pos(range.end),
}
}
fn apply_change(&mut self, change: &TextDocumentContentChangeEvent) {
if let Some(range) = change.range {
let char_range = self.range_to_char_range(&range);
self.remove(char_range.clone());
if !change.text.is_empty() {
self.insert(char_range.start, &change.text);
}
}
}
}
impl<'a> LSPSupport for RopeSlice<'a> {
fn pos_to_byte(&self, pos: &Position) -> usize {
self.char_to_byte(self.pos_to_char(pos))
}
fn pos_to_char(&self, pos: &Position) -> usize {
let line_slice = self.line(pos.line as usize);
self.line_to_char(pos.line as usize) + line_slice.utf16_cu_to_char(pos.character as usize)
}
fn byte_to_pos(&self, byte_idx: usize) -> Position {
self.char_to_pos(self.byte_to_char(min(byte_idx, self.len_bytes() - 1)))
}
fn char_to_pos(&self, char_idx: usize) -> Position {
let line = self.char_to_line(char_idx);
let line_slice = self.line(line);
Position {
line: line as u32,
character: line_slice.char_to_utf16_cu(char_idx - self.line_to_char(line)) as u32,
}
}
fn range_to_char_range(&self, range: &Range) -> StdRange<usize> {
self.pos_to_char(&range.start)..self.pos_to_char(&range.end)
}
fn char_range_to_range(&self, range: StdRange<usize>) -> Range {
Range {
start: self.char_to_pos(range.start),
end: self.char_to_pos(range.end),
}
}
fn apply_change(&mut self, _: &TextDocumentContentChangeEvent) {
panic!("can't edit a rope slice");
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::support::test_init;
use std::fs::read_to_string;
#[test]
fn test_open_and_change() {
test_init();
let server = LSPServer::new(None);
let uri = Url::parse("file:///test.sv").unwrap();
let text = r#"module test;
logic abc;
endmodule"#;
let open_params = DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "systemverilog".to_owned(),
version: 0,
text: text.to_owned(),
},
};
server.did_open(open_params);
let fid = server.srcs.get_id(&uri);
let file = server.srcs.get_file(fid).unwrap();
let file = file.read().unwrap();
assert_eq!(file.text.to_string(), text.to_owned());
drop(file);
let change_params = DidChangeTextDocumentParams {
text_document: VersionedTextDocumentIdentifier { uri, version: 1 },
content_changes: vec![TextDocumentContentChangeEvent {
range: Some(Range {
start: Position {
line: 1,
character: 8,
},
end: Position {
line: 1,
character: 11,
},
}),
range_length: None,
text: "var1".to_owned(),
}],
};
server.did_change(change_params);
let file = server.srcs.get_file(fid).unwrap();
let file = file.read().unwrap();
assert_eq!(
file.text.to_string(),
r#"module test;
logic var1;
endmodule"#
.to_owned()
);
assert_eq!(file.version, 1);
}
#[test]
fn test_fault_tolerance() {
test_init();
let server = LSPServer::new(None);
let uri = Url::parse("file:///test.sv").unwrap();
let text = r#"module test;
logic abc
endmodule"#;
let open_params = DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "systemverilog".to_owned(),
version: 0,
text: text.to_owned(),
},
};
server.did_open(open_params);
let fid = server.srcs.get_id(&uri);
server.srcs.wait_parse_ready(fid, true);
assert!(server
.srcs
.scope_tree
.read()
.unwrap()
.as_ref()
.unwrap()
.contains_scope("test"));
}
#[test]
fn test_header() {
test_init();
let mut d = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
d.push("test_data/top_inc.sv");
let text = read_to_string(&d).unwrap();
let doc = Rope::from_str(&text);
assert!(parse(&doc, &Url::from_file_path(d).unwrap(), &None, &Vec::new()).is_some(),);
// TODO: add missing header test
}
}

3
src/support.rs Normal file
View File

@ -0,0 +1,3 @@
pub fn test_init() {
let _ = flexi_logger::Logger::with(flexi_logger::LogSpecification::info()).start();
}

4
test/client/src/.gitignore vendored Normal file
View File

@ -0,0 +1,4 @@
out
node_modules
.vscode-test
*.vsix

View File

@ -0,0 +1,8 @@
.vscode/**
**/*.ts
**/*.map
.gitignore
**/tsconfig.json
**/tsconfig.base.json
contributing.md
.travis.yml

View File

@ -0,0 +1,6 @@
# veridian
a vscode client extension for the veridian language server.
- veridian must be installed seperately, see https://github.com/vivekmalneedi/veridian for details
- provides syntax highlighting, the grammar for which is borrowed from https://github.com/TheClams/SystemVerilog and is under the Apache 2.0 License

View File

@ -0,0 +1,56 @@
/* --------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
* ------------------------------------------------------------------------------------------ */
import { workspace, ExtensionContext } from "vscode";
import {
LanguageClient,
LanguageClientOptions,
ServerOptions,
Executable,
} from "vscode-languageclient/node";
let client: LanguageClient;
const workSpaceFolder = workspace.workspaceFolders?.[0];
let cwd: string = workSpaceFolder.uri.fsPath;
const serverPath: string = workspace.getConfiguration().get("veridian.serverPath");
export function activate(context: ExtensionContext) {
const run: Executable = {
command: serverPath,
// options: { cwd },
};
// If the extension is launched in debug mode then the debug server options are used
// Otherwise the run options are used
let serverOptions: ServerOptions = {
run,
debug: run,
};
// Options to control the language client
let clientOptions: LanguageClientOptions = {
// Register the server for plain text documents
documentSelector: [{ scheme: "file", language: "systemverilog" }],
};
// Create the language client and start the client.
client = new LanguageClient(
"veridian",
"veridian",
serverOptions,
clientOptions
);
// Start the client. This will also launch the server
client.start();
}
export function deactivate(): Thenable<void> | undefined {
if (!client) {
return undefined;
}
return client.stop();
}

4368
test/client/src/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,96 @@
{
"name": "veridian",
"description": "A client for the Veridian Language Server for SystemVerilog/Verilog",
"author": "Vivek Malneedi",
"publisher": "vivekmalneedi",
"license": "MIT",
"version": "0.1.0",
"categories": [
"Programming Languages",
"Snippets",
"Linters"
],
"keywords": [
"SystemVerilog",
"Verilog"
],
"repository": {
"type": "git",
"url": "https://github.com/vivekmalneedi/veridian"
},
"activationEvents": [
"onLanguage:systemverilog",
"onLanguage:verilog"
],
"main": "./out/extension",
"scripts": {
"vscode:prepublish": "npm run compile",
"compile": "tsc -b",
"watch": "tsc -b -w"
},
"contributes": {
"languages": [
{
"id": "systemverilog",
"extensions": [
".sv",
".svh",
".v",
".vh",
".verilog"
],
"aliases": [
"SystemVerilog",
"verilog",
"Verilog"
]
}
],
"grammars": [
{
"language": "systemverilog",
"scopeName": "source.systemverilog",
"path": "./syntaxes/systemverilog.tmLanguage.json"
}
],
"configuration": {
"type": "object",
"title": "veridian",
"properties": {
"veridian.serverPath": {
"scope": "window",
"type": "string",
"default": "veridian",
"description": "path of the veridian binary"
},
"veridian.trace.server": {
"scope": "window",
"type": "string",
"enum": [
"off",
"messages",
"verbose"
],
"default": "off",
"description": "Traces the communication between VS Code and the language server."
}
}
}
},
"engines": {
"vscode": "^1.56.0"
},
"dependencies": {
"vscode-languageclient": "^7.0.0"
},
"devDependencies": {
"@types/vscode": "^1.56.0",
"vscode-test": "^1.5.2",
"@types/mocha": "^8.2.2",
"mocha": "^8.4.0",
"@types/node": "^15.12.1",
"eslint": "^7.28.0",
"@typescript-eslint/parser": "^4.26.0",
"typescript": "^4.3.2"
}
}

View File

@ -0,0 +1,12 @@
{
"compilerOptions": {
"module": "commonjs",
"target": "es2019",
"lib": ["ES2019"],
"outDir": "out",
"rootDir": "src",
"sourceMap": true
},
"include": ["src"],
"exclude": ["node_modules", ".vscode-test"]
}

File diff suppressed because it is too large Load Diff