diff --git a/Cargo.lock b/Cargo.lock index e852ca7..0ea5b44 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1055,8 +1055,6 @@ dependencies = [ [[package]] name = "sv-parser" version = "0.13.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "425309b8d5bbd38fe592ba1a19e91ccdaab66e1ef312eb0ce0b8d6e4295ef4cb" dependencies = [ "nom", "nom-greedyerror", @@ -1069,8 +1067,6 @@ dependencies = [ [[package]] name = "sv-parser-error" version = "0.13.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d702ef215611db3c9cbd57c39a543fc72924eff9defb2b5e0461617929ef1a86" dependencies = [ "thiserror", ] @@ -1078,8 +1074,6 @@ dependencies = [ [[package]] name = "sv-parser-macros" version = "0.13.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a2eba85c18b26226dee76a6f07a1cb189130012a32109474ae182666b4e9b00" dependencies = [ "quote", "syn 2.0.28", @@ -1088,8 +1082,6 @@ dependencies = [ [[package]] name = "sv-parser-parser" version = "0.13.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ccb13fe101bdf64fa533121759d96f75b4e3038c03320fad4cd697063f9e101" dependencies = [ "nom", "nom-greedyerror", @@ -1105,8 +1097,6 @@ dependencies = [ [[package]] name = "sv-parser-pp" version = "0.13.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d580999e597ef78b966343276e3a948b2d99348dbc58ddc1e9a596c3a7a8a10" dependencies = [ "nom", "nom-greedyerror", @@ -1118,8 +1108,6 @@ dependencies = [ [[package]] name = "sv-parser-syntaxtree" version = "0.13.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75394b4b48cf789e5eb148f4a47f82c119424166572e13e3dee5394fc1da7ad0" dependencies = [ "regex", "sv-parser-macros", diff --git a/Cargo.toml b/Cargo.toml index 2b9449a..b55a2b6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,7 +5,7 @@ authors = ["LSTM-Kirigaya <1193466151@qq.com>"] edition = "2018" [dependencies] -sv-parser = "0.13.3" +sv-parser = { version = "0.13.3", path = "sv-parser/sv-parser"} log = "0.4.19" tower-lsp = "0.20.0" flexi_logger = "0.29.0" diff --git a/src/completion.rs b/src/completion.rs index 03d13cb..7426abe 100644 --- a/src/completion.rs +++ b/src/completion.rs @@ -1,24 +1,17 @@ use crate::server::LSPServer; use crate::sources::LSPSupport; -use log::{debug, info, log, trace}; use ropey::{Rope, RopeSlice}; -use std::time::Instant; use tower_lsp::lsp_types::*; pub mod keyword; impl LSPServer { pub fn completion(&self, params: CompletionParams) -> Option { - debug!("completion requested"); - trace!("{:#?}", ¶ms); - let now = Instant::now(); let doc = params.text_document_position; let file_id = self.srcs.get_id(&doc.text_document.uri).to_owned(); self.srcs.wait_parse_ready(file_id, false); - trace!("comp wait parse: {}", now.elapsed().as_millis()); let file = self.srcs.get_file(file_id)?; let file = file.read().ok()?; - trace!("comp read: {}", now.elapsed().as_millis()); let token = get_completion_token( &file.text, file.text.line(doc.position.line as usize), @@ -30,10 +23,6 @@ impl LSPServer { let response = match params.context { Some(context) => match context.trigger_kind { CompletionTriggerKind::TRIGGER_CHARACTER => { - debug!( - "trigger char completion: {}", - context.trigger_character.clone()?.as_str() - ); match context.trigger_character?.as_str() { "." => Some(self.srcs.get_dot_completions( token.trim_end_matches('.'), @@ -53,7 +42,6 @@ impl LSPServer { } CompletionTriggerKind::TRIGGER_FOR_INCOMPLETE_COMPLETIONS => None, CompletionTriggerKind::INVOKED => { - debug!("Invoked Completion"); let mut comps = self.srcs.get_completions( &token, file.text.pos_to_byte(&doc.position), diff --git a/src/core.rs b/src/core.rs index 8fc4557..ac5d98f 100644 --- a/src/core.rs +++ b/src/core.rs @@ -1 +1,7 @@ -pub mod fast_hdlparam; \ No newline at end of file +pub mod fast_hdlparam; +// pub use fast_hdlparam::*; + +pub mod sv_parser; +// pub use sv_parser::*; + +// pub mod vhdl_parser; \ No newline at end of file diff --git a/src/core/hdl_param.rs b/src/core/hdl_param.rs deleted file mode 100644 index 8622d82..0000000 --- a/src/core/hdl_param.rs +++ /dev/null @@ -1,2 +0,0 @@ -use std::collections::HashMap; - diff --git a/src/core/sv_parser.rs b/src/core/sv_parser.rs index 3d90bd6..15aa000 100644 --- a/src/core/sv_parser.rs +++ b/src/core/sv_parser.rs @@ -1,9 +1,9 @@ use std::collections::HashMap; use std::path::PathBuf; use sv_parser::{parse_sv, unwrap_node, ConstantMintypmaxExpression, GateInstantiation, ListOfParameterAssignments, ListOfPortConnections, Locate, PackedDimensionRange, RefNode, SyntaxTree}; -use crate::core::fast_hdlparam::FastHdlparam; +use super::fast_hdlparam::FastHdlparam; -pub fn sv_parser(path: &str) -> FastHdlparam { +pub fn sv_parser(path: &str) -> Option { // The path of SystemVerilog source file let path = PathBuf::from(path); // The list of defined macros @@ -11,236 +11,232 @@ pub fn sv_parser(path: &str) -> FastHdlparam { // The list of include paths let includes: Vec = Vec::new(); - // Parse - let result: Result<(SyntaxTree, HashMap>), sv_parser::Error> = parse_sv(&path, &defines, &includes, false, true); + let result = parse_sv(&path, &defines, &includes, false, true); + if let Ok((syntax_tree, _)) = result { + let hdlparam = make_fast_from_syntaxtree(syntax_tree); + return Some(hdlparam); + } - let hdlparam = make_fast_from_syntaxtree(result); - - hdlparam + None } -pub fn make_fast_from_syntaxtree(result: Result<(SyntaxTree, HashMap>), sv_parser::Error>) -> FastHdlparam { +pub fn make_fast_from_syntaxtree(syntax_tree: SyntaxTree) -> FastHdlparam { let mut hdlparam = FastHdlparam { fast: Vec::new() }; - let mut nonansi_port_locate = HashMap::new(); let mut ansi_port_last_dir = ""; - if let Ok((syntax_tree, _)) = result { - let content = syntax_tree.text.text().split('\n') - .map(|s| s.to_string()) - .collect::>(); - // &SyntaxTree is iterable - for node in &syntax_tree { - match node { - RefNode::ModuleDeclaration(x) => { - let id = unwrap_node!(x, ModuleIdentifier).unwrap(); - let id = get_identifier(id).unwrap(); - let (line, character) = (id.line, get_column_by_offset(&content, id.offset) as u32); - let end_character = character + id.len as u32; - let name = syntax_tree.get_str(&id).unwrap(); - hdlparam.new_module(name, line, character, end_character); - } - RefNode::ParameterPortDeclaration(x) => { - let id = unwrap_node!(x, ParameterIdentifier).unwrap(); - let id = get_identifier(id).unwrap(); - let name = syntax_tree.get_str(&id).unwrap(); - match unwrap_node!(x, ParameterDeclarationParam, ParameterPortDeclarationParamList) { - Some(RefNode::ParameterDeclarationParam(param_node)) => { - // println!("{:?}", param_node); - let keyword_locate = param_node.nodes.0.nodes.0; - // println!("keyword {:#?}", keyword_locate); - let (start_line, start_character) = (id.line, get_column_by_offset(&content, keyword_locate.offset) as u32); - let (mut end_line, mut end_character) = (id.line, start_character + id.len as u32); - let net_type = match unwrap_node!(param_node, DataType) { - Some(RefNode::DataType(data_type)) => { - let id = get_identifier(unwrap_node!(data_type, SimpleIdentifier, Keyword).unwrap()).unwrap(); - syntax_tree.get_str(&id).unwrap() - } - _ => "wire" - }; - let init = match unwrap_node!(param_node, ConstantMintypmaxExpression).unwrap() { - RefNode::ConstantMintypmaxExpression(expression) => { - // println!("expression {:?}", expression); - let (exp, last_locate) = parse_parameter_expression(&syntax_tree, expression); - (end_line, end_character) = if last_locate != None { - // println!("param {:?} lastlocate {:?}", name, last_locate); - (last_locate.unwrap().line, (get_column_by_offset(&content, last_locate.unwrap().offset) + last_locate.unwrap().len) as u32) - } else { - (end_line, end_character) - }; - // println!("end pos {} {}", end_line, end_character); - exp - } - _ => "unknown".to_string() - }; - hdlparam.add_parameter(name, net_type, init.as_str(), start_line, start_character, end_line, end_character); - } - _ => () + let content = syntax_tree.text.text().split('\n') + .map(|s| s.to_string()) + .collect::>(); + // &SyntaxTree is iterable + for node in &syntax_tree { + match node { + RefNode::ModuleDeclaration(x) => { + let id = unwrap_node!(x, ModuleIdentifier).unwrap(); + let id = get_identifier(id).unwrap(); + let (line, character) = (id.line, get_column_by_offset(&content, id.offset) as u32); + let end_character = character + id.len as u32; + let name = syntax_tree.get_str(&id).unwrap(); + hdlparam.new_module(name, line, character, end_character); + } + RefNode::ParameterPortDeclaration(x) => { + let id = unwrap_node!(x, ParameterIdentifier).unwrap(); + let id = get_identifier(id).unwrap(); + let name = syntax_tree.get_str(&id).unwrap(); + match unwrap_node!(x, ParameterDeclarationParam, ParameterPortDeclarationParamList) { + Some(RefNode::ParameterDeclarationParam(param_node)) => { + // println!("{:?}", param_node); + let keyword_locate = param_node.nodes.0.nodes.0; + // println!("keyword {:#?}", keyword_locate); + let (start_line, start_character) = (id.line, get_column_by_offset(&content, keyword_locate.offset) as u32); + let (mut end_line, mut end_character) = (id.line, start_character + id.len as u32); + let net_type = match unwrap_node!(param_node, DataType) { + Some(RefNode::DataType(data_type)) => { + let id = get_identifier(unwrap_node!(data_type, SimpleIdentifier, Keyword).unwrap()).unwrap(); + syntax_tree.get_str(&id).unwrap() + } + _ => "wire" + }; + let init = match unwrap_node!(param_node, ConstantMintypmaxExpression).unwrap() { + RefNode::ConstantMintypmaxExpression(expression) => { + // println!("expression {:?}", expression); + let (exp, last_locate) = parse_parameter_expression(&syntax_tree, expression); + (end_line, end_character) = if last_locate != None { + // println!("param {:?} lastlocate {:?}", name, last_locate); + (last_locate.unwrap().line, (get_column_by_offset(&content, last_locate.unwrap().offset) + last_locate.unwrap().len) as u32) + } else { + (end_line, end_character) + }; + // println!("end pos {} {}", end_line, end_character); + exp + } + _ => "unknown".to_string() + }; + hdlparam.add_parameter(name, net_type, init.as_str(), start_line, start_character, end_line, end_character); } + _ => () } - RefNode::Port(x) => { - let id = unwrap_node!(x, SimpleIdentifier).unwrap(); - let locate = get_identifier(id).unwrap(); - let name = syntax_tree.get_str(&locate.clone()).unwrap(); - // println!("get port {} {:?}", name, locate); - nonansi_port_locate.insert(name, locate); - } - RefNode::PortDeclaration(x) => { - let id = unwrap_node!(x, InputDeclaration, OutputDeclaration, InoutDeclaration).unwrap(); - let id = get_identifier(id).unwrap(); - let dir_type = syntax_tree.get_str(&id).unwrap(); - let (dir_line, dir_character) = (id.line, get_column_by_offset(&content, id.offset) as u32); + } + RefNode::Port(x) => { + let id = unwrap_node!(x, SimpleIdentifier).unwrap(); + let locate = get_identifier(id).unwrap(); + let name = syntax_tree.get_str(&locate.clone()).unwrap(); + // println!("get port {} {:?}", name, locate); + nonansi_port_locate.insert(name, locate); + } + RefNode::PortDeclaration(x) => { + let id = unwrap_node!(x, InputDeclaration, OutputDeclaration, InoutDeclaration).unwrap(); + let id = get_identifier(id).unwrap(); + let dir_type = syntax_tree.get_str(&id).unwrap(); + let (dir_line, dir_character) = (id.line, get_column_by_offset(&content, id.offset) as u32); - let net_type = match unwrap_node!(x, DataType, ImplicitDataType) { + let net_type = match unwrap_node!(x, DataType, ImplicitDataType) { + Some(RefNode::DataType(x)) => { + let id = unwrap_node!(x, Keyword).unwrap(); + syntax_tree.get_str(&get_identifier(id).unwrap()).unwrap() + }, + Some(RefNode::ImplicitDataType(_)) => "wire", + _ => "unknown" + }; + + let width = match unwrap_node!(x, PackedDimensionRange) { + Some(RefNode::PackedDimensionRange(x)) => { + parse_port_expression(&syntax_tree, x) + } + _ => "1".to_string() + }; + + let id = unwrap_node!(x, PortIdentifier).unwrap(); + let id = get_identifier(id).unwrap(); + let name = syntax_tree.get_str(&id).unwrap(); + let (start_line, start_character, end_line, end_character) = if nonansi_port_locate.contains_key(name) { + let start_character = get_column_by_offset(&content, nonansi_port_locate[name].offset); + (nonansi_port_locate[name].line, start_character as u32, + nonansi_port_locate[name].line, (start_character + nonansi_port_locate[name].len) as u32) + } else { + (dir_line, dir_character, id.line, (get_column_by_offset(&content, id.offset) + id.len) as u32) + }; + + hdlparam.add_port(name, dir_type, net_type, width.as_str(), start_line, start_character, end_line, end_character); + } + RefNode::AnsiPortDeclaration(x) => { + let id = unwrap_node!(x, PortIdentifier).unwrap(); + let name_locate = get_identifier(id).unwrap(); + let name = syntax_tree.get_str(&name_locate).unwrap(); + let character = get_column_by_offset(&content, name_locate.offset); + let (end_line, end_character) = (name_locate.line, (character + name_locate.len) as u32); + + let id = unwrap_node!(x, PortDirection); + let (start_line, start_character) = if id != None { + let id = id.unwrap(); + let dir_locate = get_identifier(id).unwrap(); + ansi_port_last_dir = syntax_tree.get_str(&dir_locate).unwrap(); + (dir_locate.line, get_column_by_offset(&content, dir_locate.offset) as u32) + } else { + (name_locate.line, character as u32) + }; + + let net_type = if unwrap_node!(x, AnsiPortDeclarationVariable) != None { + "wire" + } else { + match unwrap_node!(x, DataType, ImplicitDataType) { Some(RefNode::DataType(x)) => { let id = unwrap_node!(x, Keyword).unwrap(); syntax_tree.get_str(&get_identifier(id).unwrap()).unwrap() }, Some(RefNode::ImplicitDataType(_)) => "wire", _ => "unknown" - }; + } + }; - let width = match unwrap_node!(x, PackedDimensionRange) { - Some(RefNode::PackedDimensionRange(x)) => { - parse_port_expression(&syntax_tree, x) - } - _ => "1".to_string() - }; + let width = match unwrap_node!(x, PackedDimensionRange) { + Some(RefNode::PackedDimensionRange(x)) => { + parse_port_expression(&syntax_tree, x) + } + _ => "1".to_string() + }; - let id = unwrap_node!(x, PortIdentifier).unwrap(); - let id = get_identifier(id).unwrap(); - let name = syntax_tree.get_str(&id).unwrap(); - let (start_line, start_character, end_line, end_character) = if nonansi_port_locate.contains_key(name) { - let start_character = get_column_by_offset(&content, nonansi_port_locate[name].offset); - (nonansi_port_locate[name].line, start_character as u32, - nonansi_port_locate[name].line, (start_character + nonansi_port_locate[name].len) as u32) - } else { - (dir_line, dir_character, id.line, (get_column_by_offset(&content, id.offset) + id.len) as u32) - }; - - hdlparam.add_port(name, dir_type, net_type, width.as_str(), start_line, start_character, end_line, end_character); - } - RefNode::AnsiPortDeclaration(x) => { - let id = unwrap_node!(x, PortIdentifier).unwrap(); - let name_locate = get_identifier(id).unwrap(); - let name = syntax_tree.get_str(&name_locate).unwrap(); - let character = get_column_by_offset(&content, name_locate.offset); - let (end_line, end_character) = (name_locate.line, (character + name_locate.len) as u32); - - let id = unwrap_node!(x, PortDirection); - let (start_line, start_character) = if id != None { - let id = id.unwrap(); - let dir_locate = get_identifier(id).unwrap(); - ansi_port_last_dir = syntax_tree.get_str(&dir_locate).unwrap(); - (dir_locate.line, get_column_by_offset(&content, dir_locate.offset) as u32) - } else { - (name_locate.line, character as u32) - }; - - let net_type = if unwrap_node!(x, AnsiPortDeclarationVariable) != None { - "wire" - } else { - match unwrap_node!(x, DataType, ImplicitDataType) { - Some(RefNode::DataType(x)) => { - let id = unwrap_node!(x, Keyword).unwrap(); - syntax_tree.get_str(&get_identifier(id).unwrap()).unwrap() - }, - Some(RefNode::ImplicitDataType(_)) => "wire", - _ => "unknown" - } - }; - - let width = match unwrap_node!(x, PackedDimensionRange) { - Some(RefNode::PackedDimensionRange(x)) => { - parse_port_expression(&syntax_tree, x) - } - _ => "1".to_string() - }; - - hdlparam.add_port(name, ansi_port_last_dir, net_type, width.as_str(), start_line, start_character, end_line, end_character); - } - RefNode::ModuleInstantiation(x) => { - let id = unwrap_node!(x, ModuleIdentifier).unwrap(); - let id = get_identifier(id).unwrap(); - let inst_type = syntax_tree.get_str(&id).unwrap(); - - let id = unwrap_node!(x, HierarchicalInstance).unwrap(); - let id = get_identifier(id).unwrap(); - let name = syntax_tree.get_str(&id).unwrap(); - let (line, character) = (id.line, get_column_by_offset(&content, id.offset) as u32); - let end_character = character + id.len as u32; - - let (param_start_line, param_start_character, - param_end_line, param_end_character) = match unwrap_node!(x, ListOfParameterAssignments) { - Some(RefNode::ListOfParameterAssignments(x)) => { - let locate = get_inst_param_last_locate(x); - if locate.is_none() { - (0, 0, 0, 0) - } else { - ( - locate.unwrap().0.line, get_column_by_offset(&content, locate.unwrap().0.offset) as u32, - locate.unwrap().1.line, (get_column_by_offset(&content, locate.unwrap().1.offset) + locate.unwrap().1.len) as u32 - ) - } - } - _ => (0, 0, 0, 0) - }; - - let (port_start_line, port_start_character, - port_end_line, port_end_character) = match unwrap_node!(x, ListOfPortConnections) { - Some(RefNode::ListOfPortConnections(x)) => { - let locate = get_inst_port_last_locate(x); - if locate.is_none() { - (0, 0, 0, 0) - } else { - ( - locate.unwrap().0.line, get_column_by_offset(&content, locate.unwrap().0.offset) as u32, - locate.unwrap().1.line, (get_column_by_offset(&content, locate.unwrap().1.offset) + locate.unwrap().1.len) as u32 - ) - } - } - _ => (0, 0, 0, 0) - }; - - hdlparam.add_instance(name, inst_type, line, character, end_character, - param_start_line, param_start_character, param_end_line, param_end_character, - port_start_line, port_start_character, port_end_line, port_end_character - ); - } - RefNode::GateInstantiation(x) => { - let id = unwrap_node!(x, GateInstantiation).unwrap(); - let id = get_identifier(id).unwrap(); - let inst_type = syntax_tree.get_str(&id).unwrap(); - - let id = unwrap_node!(x, NInputGateInstance, NOutputGateInstance).unwrap(); - let id = get_identifier(id).unwrap(); - let name = syntax_tree.get_str(&id).unwrap(); - let (line, character) = (id.line, get_column_by_offset(&content, id.offset) as u32); - let end_character = character + id.len as u32; - - let locate = get_gateinst_port_locate(x); - let (param_start_line, param_start_character, param_end_line, param_end_character) = (0, 0, 0, 0); - let (port_start_line, port_start_character, port_end_line, port_end_character) = if locate.is_none() { - (0,0,0,0) - } else { - ( - locate.unwrap().0.line, get_column_by_offset(&content, locate.unwrap().0.offset) as u32, - locate.unwrap().1.line, (get_column_by_offset(&content, locate.unwrap().1.offset) + locate.unwrap().1.len) as u32 - ) - }; - - hdlparam.add_instance(name, inst_type, line, character, end_character, - param_start_line, param_start_character, param_end_line, param_end_character, - port_start_line, port_start_character, port_end_line, port_end_character - ); - } - _ => () + hdlparam.add_port(name, ansi_port_last_dir, net_type, width.as_str(), start_line, start_character, end_line, end_character); } + RefNode::ModuleInstantiation(x) => { + let id = unwrap_node!(x, ModuleIdentifier).unwrap(); + let id = get_identifier(id).unwrap(); + let inst_type = syntax_tree.get_str(&id).unwrap(); + + let id = unwrap_node!(x, HierarchicalInstance).unwrap(); + let id = get_identifier(id).unwrap(); + let name = syntax_tree.get_str(&id).unwrap(); + let (line, character) = (id.line, get_column_by_offset(&content, id.offset) as u32); + let end_character = character + id.len as u32; + + let (param_start_line, param_start_character, + param_end_line, param_end_character) = match unwrap_node!(x, ListOfParameterAssignments) { + Some(RefNode::ListOfParameterAssignments(x)) => { + let locate = get_inst_param_last_locate(x); + if locate.is_none() { + (0, 0, 0, 0) + } else { + ( + locate.unwrap().0.line, get_column_by_offset(&content, locate.unwrap().0.offset) as u32, + locate.unwrap().1.line, (get_column_by_offset(&content, locate.unwrap().1.offset) + locate.unwrap().1.len) as u32 + ) + } + } + _ => (0, 0, 0, 0) + }; + + let (port_start_line, port_start_character, + port_end_line, port_end_character) = match unwrap_node!(x, ListOfPortConnections) { + Some(RefNode::ListOfPortConnections(x)) => { + let locate = get_inst_port_last_locate(x); + if locate.is_none() { + (0, 0, 0, 0) + } else { + ( + locate.unwrap().0.line, get_column_by_offset(&content, locate.unwrap().0.offset) as u32, + locate.unwrap().1.line, (get_column_by_offset(&content, locate.unwrap().1.offset) + locate.unwrap().1.len) as u32 + ) + } + } + _ => (0, 0, 0, 0) + }; + + hdlparam.add_instance(name, inst_type, line, character, end_character, + param_start_line, param_start_character, param_end_line, param_end_character, + port_start_line, port_start_character, port_end_line, port_end_character + ); + } + RefNode::GateInstantiation(x) => { + let id = unwrap_node!(x, GateInstantiation).unwrap(); + let id = get_identifier(id).unwrap(); + let inst_type = syntax_tree.get_str(&id).unwrap(); + + let id = unwrap_node!(x, NInputGateInstance, NOutputGateInstance).unwrap(); + let id = get_identifier(id).unwrap(); + let name = syntax_tree.get_str(&id).unwrap(); + let (line, character) = (id.line, get_column_by_offset(&content, id.offset) as u32); + let end_character = character + id.len as u32; + + let locate = get_gateinst_port_locate(x); + let (param_start_line, param_start_character, param_end_line, param_end_character) = (0, 0, 0, 0); + let (port_start_line, port_start_character, port_end_line, port_end_character) = if locate.is_none() { + (0,0,0,0) + } else { + ( + locate.unwrap().0.line, get_column_by_offset(&content, locate.unwrap().0.offset) as u32, + locate.unwrap().1.line, (get_column_by_offset(&content, locate.unwrap().1.offset) + locate.unwrap().1.len) as u32 + ) + }; + + hdlparam.add_instance(name, inst_type, line, character, end_character, + param_start_line, param_start_character, param_end_line, param_end_character, + port_start_line, port_start_character, port_end_line, port_end_character + ); + } + _ => () } - } else { - println!("Parse failed"); } hdlparam diff --git a/src/core/vhdl_parser.rs b/src/core/vhdl_parser.rs index 28b93a8..9ead6f6 100644 --- a/src/core/vhdl_parser.rs +++ b/src/core/vhdl_parser.rs @@ -1,7 +1,7 @@ use std::collections::HashSet; use std::path::PathBuf; use vhdl_lang::{kind_str, Token, VHDLParser, VHDLStandard}; -use crate::fast_hdlparam::*; +use super::fast_hdlparam::*; pub fn vhdl_parser(path: &str) -> FastHdlparam { // The path of SystemVerilog source file diff --git a/src/custom_request.rs b/src/custom_request.rs index 12435ee..b3ca646 100644 --- a/src/custom_request.rs +++ b/src/custom_request.rs @@ -1,12 +1,19 @@ use std::future; +use std::path::PathBuf; use std::sync::Arc; + +use log::info; +use ropey::Rope; use serde::Deserialize; -use serde_json::{json, Value}; +use serde_json::Value; use tower_lsp::jsonrpc::Result; use tower_lsp::lsp_types::*; use crate::core::fast_hdlparam::FastHdlparam; +use crate::core::sv_parser::make_fast_from_syntaxtree; + use crate::server::Backend; +use crate::sources::parse; #[derive(Debug, Deserialize)] pub struct CustomRequestParams { @@ -15,11 +22,6 @@ pub struct CustomRequestParams { } -#[derive(Debug, Deserialize)] -pub struct CustomResponse { - result: serde_json::Value -} - #[derive(Clone)] pub struct CustomRequest; @@ -32,27 +34,47 @@ impl <'a>tower_lsp::jsonrpc::Method<&'a Arc, (), Result> for Custo } pub fn custom_request() -> Result { - // let command = params[0].clone(); - // let message = String::from("receive command: ") + &command; - - // let response = CustomResponse { - // result: serde_json::Value::String(message), - // }; - Ok(123) } #[derive(Clone)] pub struct DoFastApi; -impl <'a>tower_lsp::jsonrpc::Method<&'a Arc, (), Result> for CustomRequest { +impl <'a>tower_lsp::jsonrpc::Method<&'a Arc, (TextDocumentItem, ), Result> for DoFastApi { type Future = future::Ready>; - fn invoke(&self, _server: &'a Arc, _params: ()) -> Self::Future { - future::ready(do_fast()) + fn invoke(&self, _server: &'a Arc, _params: (TextDocumentItem, )) -> Self::Future { + let doc = _params.0; + future::ready(do_fast(doc)) } } -pub fn do_fast() -> Result { +/// 前端交互接口: do_fast,输入文件路径,计算出对应的 fast 结构 +pub fn do_fast(doc: TextDocumentItem) -> Result { + info!("lsp get doc: {:?}", doc); + let uri = doc.uri; + let text = Rope::from_str(&doc.text); + // fast 解析不需要 include + let includes: Vec = Vec::new(); + + let parse_result = parse( + &text, + &uri, + &None, + &includes + ); + + if let Some(syntax_tree) = parse_result { + let hdlparam = make_fast_from_syntaxtree(syntax_tree); + return Ok(hdlparam); + } + + let api_error = tower_lsp::jsonrpc::Error { + code: tower_lsp::jsonrpc::ErrorCode::ParseError, + message: std::borrow::Cow::Owned("message".to_string()), + data: None + }; + + Err(api_error) } \ No newline at end of file diff --git a/src/definition.rs b/src/definition.rs index 4d08090..f6196d2 100644 --- a/src/definition.rs +++ b/src/definition.rs @@ -1,7 +1,7 @@ use crate::definition::extract_defs::get_ident; use crate::server::LSPServer; use crate::sources::LSPSupport; -use log::{debug, info, log, trace}; + use ropey::{Rope, RopeSlice}; use sv_parser::*; use tower_lsp::lsp_types::*; @@ -396,12 +396,8 @@ mod tests { let doc = Rope::from_str(&text); let url = Url::parse("file:///test_data/definition_test.sv").unwrap(); let syntax_tree = parse(&doc, &url, &None, &Vec::new()).unwrap(); - trace!("{}", &syntax_tree); let scope_tree = get_scopes(&syntax_tree, &url).unwrap(); - trace!("{:#?}", &scope_tree); - for def in &scope_tree.defs { - trace!("{:?} {:?}", def, doc.byte_to_pos(def.byte_idx())); - } + let token = get_definition_token(doc.line(3), Position::new(3, 13)); for def in scope_tree.defs { if token == def.ident() { diff --git a/src/definition/def_types.rs b/src/definition/def_types.rs index 4a878d5..b7eb7c1 100644 --- a/src/definition/def_types.rs +++ b/src/definition/def_types.rs @@ -1,5 +1,4 @@ use crate::sources::LSPSupport; -use log::{info, trace}; use ropey::Rope; use tower_lsp::lsp_types::*; @@ -147,18 +146,9 @@ pub trait Scope: std::fmt::Debug + Definition + Sync + Send { url: &Url, scope_tree: &GenericScope, ) -> Vec { - trace!("dot entering: {}, token: {}", self.ident(), token); - trace!("{:?}", self.scopes()); // first we need to go down the scope tree, to the scope the user is invoking a completion // in for scope in self.scopes() { - trace!( - "{}, {}, {}, {}", - scope.ident(), - byte_idx, - scope.start(), - scope.end() - ); if &scope.url() == url && scope.start() <= byte_idx && byte_idx <= scope.end() { eprintln!("checking dot completion: {}", scope.ident()); let result = scope.get_dot_completion(token, byte_idx, url, scope_tree); @@ -171,15 +161,12 @@ pub trait Scope: std::fmt::Debug + Definition + Sync + Send { // we proceed back upwards through the scope tree, and if a definition matches our token, // we invoke dot completion on that definition and pass it the syntax tree for def in self.defs() { - trace!("def: {:?}", def); if def.starts_with(token) { - trace!("complete def: {:?}", def); return def.dot_completion(scope_tree); } } for scope in self.scopes() { if scope.starts_with(token) { - trace!("found dot-completion scope: {}", scope.ident()); return scope.dot_completion(scope_tree); } } diff --git a/src/definition/extract_defs.rs b/src/definition/extract_defs.rs index 0c3c92d..bc68377 100644 --- a/src/definition/extract_defs.rs +++ b/src/definition/extract_defs.rs @@ -2,7 +2,6 @@ use crate::definition::def_types::*; use crate::definition::match_definitions; use sv_parser::*; use tower_lsp::lsp_types::*; -use log::{debug, info, log, trace}; pub fn get_ident(tree: &SyntaxTree, node: RefNode) -> (String, usize) { let loc = unwrap_locate!(node).unwrap(); diff --git a/src/diagnostics.rs b/src/diagnostics.rs index 73a32de..4c452b8 100644 --- a/src/diagnostics.rs +++ b/src/diagnostics.rs @@ -6,58 +6,7 @@ use std::process::{Command, Stdio}; use tower_lsp::lsp_types::*; use walkdir::DirEntry; -#[cfg(feature = "slang")] -pub fn get_diagnostics( - uri: Url, - rope: &Rope, - files: Vec, - conf: &ProjectConfig, -) -> PublishDiagnosticsParams { - if !(cfg!(test) && (uri.to_string().starts_with("file:///test"))) { - let paths = get_paths(files, conf.auto_search_workdir); - let mut diagnostics = { - if conf.verilator.syntax.enabled { - if let Ok(path) = uri.to_file_path() { - match verilator_syntax( - rope, - path, - &conf.verilator.syntax.path, - &conf.verilator.syntax.args, - ) { - Some(diags) => diags, - None => Vec::new(), - } - } else { - Vec::new() - } - } else if conf.verible.syntax.enabled { - match verible_syntax(rope, &conf.verible.syntax.path, &conf.verible.syntax.args) { - Some(diags) => diags, - None => Vec::new(), - } - } else { - Vec::new() - } - }; - diagnostics.append(&mut parse_report( - uri.clone(), - slang_compile(paths).unwrap(), - )); - PublishDiagnosticsParams { - uri, - diagnostics, - version: None, - } - } else { - PublishDiagnosticsParams { - uri, - diagnostics: Vec::new(), - version: None, - } - } -} -#[cfg(not(feature = "slang"))] pub fn get_diagnostics( uri: Url, rope: &Rope, @@ -103,53 +52,6 @@ pub fn get_diagnostics( } } -/// recursively find source file paths from working directory -/// and open files -#[cfg(feature = "slang")] -fn get_paths(files: Vec, search_workdir: bool) -> Vec { - // check recursively from working dir for source files - let mut paths: Vec = Vec::new(); - if search_workdir { - let walker = WalkDir::new(".").into_iter(); - for entry in walker.filter_entry(|e| !is_hidden(e)) { - let entry = entry.unwrap(); - if entry.file_type().is_file() { - let extension = entry.path().extension().unwrap(); - - if extension == "sv" || extension == "svh" || extension == "v" || extension == "vh" - { - paths.push(entry.path().to_path_buf()); - } - } - } - } - - // check recursively from opened files for source files - for file in files { - if let Ok(path) = file.to_file_path() { - if !paths.contains(&path) { - let walker = WalkDir::new(path.parent().unwrap()).into_iter(); - for entry in walker.filter_entry(|e| !is_hidden(e)).flatten() { - if entry.file_type().is_file() && entry.path().extension().is_some() { - let extension = entry.path().extension().unwrap(); - - if extension == "sv" - || extension == "svh" - || extension == "v" - || extension == "vh" - { - let entry_path = entry.path().to_path_buf(); - if !paths.contains(&entry_path) { - paths.push(entry_path); - } - } - } - } - } - } - } - paths -} pub fn is_hidden(entry: &DirEntry) -> bool { entry @@ -159,47 +61,6 @@ pub fn is_hidden(entry: &DirEntry) -> bool { .unwrap_or(false) } -#[cfg(feature = "slang")] -/// parse a report from slang -fn parse_report(uri: Url, report: String) -> Vec { - let mut diagnostics: Vec = Vec::new(); - for line in report.lines() { - let diag: Vec<&str> = line.splitn(5, ':').collect(); - if absolute_path(diag.first().unwrap()) == uri.to_file_path().unwrap().as_os_str() { - let pos = Position::new( - diag.get(1).unwrap().parse::().unwrap() - 1, - diag.get(2).unwrap().parse::().unwrap() - 1, - ); - diagnostics.push(Diagnostic::new( - Range::new(pos, pos), - slang_severity(diag.get(3).unwrap()), - None, - Some("slang".to_owned()), - (*diag.get(4).unwrap()).to_owned(), - None, - None, - )) - } - } - diagnostics -} - -#[cfg(feature = "slang")] -fn slang_severity(severity: &str) -> Option { - match severity { - " error" => Some(DiagnosticSeverity::ERROR), - " warning" => Some(DiagnosticSeverity::WARNING), - " note" => Some(DiagnosticSeverity::INFORMATION), - _ => None, - } -} - -#[cfg(feature = "slang")] -// convert relative path to absolute -fn absolute_path(path_str: &str) -> PathBuf { - let path = Path::new(path_str); - current_dir().unwrap().join(path).clean() -} /// convert captured severity string to DiagnosticSeverity fn verilator_severity(severity: &str) -> Option { @@ -347,35 +208,6 @@ mod tests { use std::io::Write; use tempdir::TempDir; - #[test] - #[cfg(feature = "slang")] - fn test_diagnostics() { - test_init(); - let uri = Url::from_file_path(absolute_path("test_data/diag/diag_test.sv")).unwrap(); - let expected = PublishDiagnosticsParams::new( - uri.clone(), - vec![Diagnostic::new( - Range::new(Position::new(3, 13), Position::new(3, 13)), - Some(DiagnosticSeverity::WARNING), - None, - Some("slang".to_owned()), - " cannot refer to element 2 of \'logic[1:0]\' [-Windex-oob]".to_owned(), - None, - None, - )], - None, - ); - let diag = get_diagnostics( - uri.clone(), - &Rope::default(), - vec![uri], - &ProjectConfig::default(), - ); - assert_eq!(diag.uri, expected.uri); - assert_eq!(diag.version, expected.version); - assert_eq!(diag.diagnostics.last(), expected.diagnostics.last()); - } - #[test] fn test_unsaved_file() { test_init(); diff --git a/src/lib.rs b/src/lib.rs index 7d6ab7b..18a1fed 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,5 +1,6 @@ #![recursion_limit = "256"] +pub mod core; pub mod completion; pub mod definition; pub mod diagnostics; @@ -8,4 +9,3 @@ pub mod server; pub mod sources; pub mod support; pub mod custom_request; -pub mod core; \ No newline at end of file diff --git a/src/server.rs b/src/server.rs index 6226efe..0ae76aa 100644 --- a/src/server.rs +++ b/src/server.rs @@ -4,7 +4,6 @@ use flexi_logger::LoggerHandle; use log::{debug, info, warn}; use path_clean::PathClean; use serde::{Deserialize, Serialize}; -use std::collections::HashSet; use std::env::current_dir; use std::fs::File; use std::io::Read; @@ -40,8 +39,7 @@ impl LSPServer { pub struct Backend { client: Client, - server: LSPServer, - watched_files: HashSet + server: LSPServer } impl Backend { @@ -49,7 +47,6 @@ impl Backend { Backend { client, server: LSPServer::new(Some(log_handle)), - watched_files: HashSet::new() } } } @@ -240,9 +237,7 @@ impl LanguageServer for Backend { } let mut conf = self.server.conf.write().unwrap(); conf.verible.syntax.enabled = which(&conf.verible.syntax.path).is_ok(); - if cfg!(feature = "slang") { - info!("enabled linting with slang"); - } + if conf.verilator.syntax.enabled { info!("enabled linting with verilator") } else if conf.verible.syntax.enabled { @@ -333,22 +328,6 @@ impl LanguageServer for Backend { .await; } - async fn did_change_watched_files(&self, params: DidChangeWatchedFilesParams) { - // for change in params.changes { - // let file_uri = change.uri.to_string(); - // match change.typ { - // FileChangeType::CREATED | FileChangeType::CHANGED => { - // self.watched_files.insert(file_uri.clone()); - // self.client.log_message(MessageType::INFO, format!("File added/changed: {}", file_uri)).await; - // } - // FileChangeType::DELETED => { - // self.watched_files.remove(&file_uri); - // self.client.log_message(MessageType::INFO, format!("File deleted: {}", file_uri)).await; - // } - // } - // } - } - async fn completion(&self, params: CompletionParams) -> Result> { Ok(self.server.completion(params)) diff --git a/src/sources.rs b/src/sources.rs index ed9300d..b1aaf65 100644 --- a/src/sources.rs +++ b/src/sources.rs @@ -2,7 +2,7 @@ use crate::definition::def_types::*; use crate::definition::get_scopes; use crate::diagnostics::{get_diagnostics, is_hidden}; use crate::server::LSPServer; -use log::{debug, error, trace}; +use log::{debug, error}; use pathdiff::diff_paths; use ropey::{Rope, RopeSlice}; use std::cmp::min; diff --git a/sv-parser b/sv-parser new file mode 160000 index 0000000..af11f5f --- /dev/null +++ b/sv-parser @@ -0,0 +1 @@ +Subproject commit af11f5ff1ef091562d2b17cdf4de3614aedf2286