This commit is contained in:
锦恢 2024-09-21 22:42:00 +08:00
parent c84a77dd36
commit 74464e9293
16 changed files with 261 additions and 469 deletions

12
Cargo.lock generated
View File

@ -1055,8 +1055,6 @@ dependencies = [
[[package]] [[package]]
name = "sv-parser" name = "sv-parser"
version = "0.13.3" version = "0.13.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "425309b8d5bbd38fe592ba1a19e91ccdaab66e1ef312eb0ce0b8d6e4295ef4cb"
dependencies = [ dependencies = [
"nom", "nom",
"nom-greedyerror", "nom-greedyerror",
@ -1069,8 +1067,6 @@ dependencies = [
[[package]] [[package]]
name = "sv-parser-error" name = "sv-parser-error"
version = "0.13.3" version = "0.13.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d702ef215611db3c9cbd57c39a543fc72924eff9defb2b5e0461617929ef1a86"
dependencies = [ dependencies = [
"thiserror", "thiserror",
] ]
@ -1078,8 +1074,6 @@ dependencies = [
[[package]] [[package]]
name = "sv-parser-macros" name = "sv-parser-macros"
version = "0.13.3" version = "0.13.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a2eba85c18b26226dee76a6f07a1cb189130012a32109474ae182666b4e9b00"
dependencies = [ dependencies = [
"quote", "quote",
"syn 2.0.28", "syn 2.0.28",
@ -1088,8 +1082,6 @@ dependencies = [
[[package]] [[package]]
name = "sv-parser-parser" name = "sv-parser-parser"
version = "0.13.3" version = "0.13.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1ccb13fe101bdf64fa533121759d96f75b4e3038c03320fad4cd697063f9e101"
dependencies = [ dependencies = [
"nom", "nom",
"nom-greedyerror", "nom-greedyerror",
@ -1105,8 +1097,6 @@ dependencies = [
[[package]] [[package]]
name = "sv-parser-pp" name = "sv-parser-pp"
version = "0.13.3" version = "0.13.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d580999e597ef78b966343276e3a948b2d99348dbc58ddc1e9a596c3a7a8a10"
dependencies = [ dependencies = [
"nom", "nom",
"nom-greedyerror", "nom-greedyerror",
@ -1118,8 +1108,6 @@ dependencies = [
[[package]] [[package]]
name = "sv-parser-syntaxtree" name = "sv-parser-syntaxtree"
version = "0.13.3" version = "0.13.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75394b4b48cf789e5eb148f4a47f82c119424166572e13e3dee5394fc1da7ad0"
dependencies = [ dependencies = [
"regex", "regex",
"sv-parser-macros", "sv-parser-macros",

View File

@ -5,7 +5,7 @@ authors = ["LSTM-Kirigaya <1193466151@qq.com>"]
edition = "2018" edition = "2018"
[dependencies] [dependencies]
sv-parser = "0.13.3" sv-parser = { version = "0.13.3", path = "sv-parser/sv-parser"}
log = "0.4.19" log = "0.4.19"
tower-lsp = "0.20.0" tower-lsp = "0.20.0"
flexi_logger = "0.29.0" flexi_logger = "0.29.0"

View File

@ -1,24 +1,17 @@
use crate::server::LSPServer; use crate::server::LSPServer;
use crate::sources::LSPSupport; use crate::sources::LSPSupport;
use log::{debug, info, log, trace};
use ropey::{Rope, RopeSlice}; use ropey::{Rope, RopeSlice};
use std::time::Instant;
use tower_lsp::lsp_types::*; use tower_lsp::lsp_types::*;
pub mod keyword; pub mod keyword;
impl LSPServer { impl LSPServer {
pub fn completion(&self, params: CompletionParams) -> Option<CompletionResponse> { pub fn completion(&self, params: CompletionParams) -> Option<CompletionResponse> {
debug!("completion requested");
trace!("{:#?}", &params);
let now = Instant::now();
let doc = params.text_document_position; let doc = params.text_document_position;
let file_id = self.srcs.get_id(&doc.text_document.uri).to_owned(); let file_id = self.srcs.get_id(&doc.text_document.uri).to_owned();
self.srcs.wait_parse_ready(file_id, false); self.srcs.wait_parse_ready(file_id, false);
trace!("comp wait parse: {}", now.elapsed().as_millis());
let file = self.srcs.get_file(file_id)?; let file = self.srcs.get_file(file_id)?;
let file = file.read().ok()?; let file = file.read().ok()?;
trace!("comp read: {}", now.elapsed().as_millis());
let token = get_completion_token( let token = get_completion_token(
&file.text, &file.text,
file.text.line(doc.position.line as usize), file.text.line(doc.position.line as usize),
@ -30,10 +23,6 @@ impl LSPServer {
let response = match params.context { let response = match params.context {
Some(context) => match context.trigger_kind { Some(context) => match context.trigger_kind {
CompletionTriggerKind::TRIGGER_CHARACTER => { CompletionTriggerKind::TRIGGER_CHARACTER => {
debug!(
"trigger char completion: {}",
context.trigger_character.clone()?.as_str()
);
match context.trigger_character?.as_str() { match context.trigger_character?.as_str() {
"." => Some(self.srcs.get_dot_completions( "." => Some(self.srcs.get_dot_completions(
token.trim_end_matches('.'), token.trim_end_matches('.'),
@ -53,7 +42,6 @@ impl LSPServer {
} }
CompletionTriggerKind::TRIGGER_FOR_INCOMPLETE_COMPLETIONS => None, CompletionTriggerKind::TRIGGER_FOR_INCOMPLETE_COMPLETIONS => None,
CompletionTriggerKind::INVOKED => { CompletionTriggerKind::INVOKED => {
debug!("Invoked Completion");
let mut comps = self.srcs.get_completions( let mut comps = self.srcs.get_completions(
&token, &token,
file.text.pos_to_byte(&doc.position), file.text.pos_to_byte(&doc.position),

View File

@ -1 +1,7 @@
pub mod fast_hdlparam; pub mod fast_hdlparam;
// pub use fast_hdlparam::*;
pub mod sv_parser;
// pub use sv_parser::*;
// pub mod vhdl_parser;

View File

@ -1,2 +0,0 @@
use std::collections::HashMap;

View File

@ -1,9 +1,9 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::path::PathBuf; use std::path::PathBuf;
use sv_parser::{parse_sv, unwrap_node, ConstantMintypmaxExpression, GateInstantiation, ListOfParameterAssignments, ListOfPortConnections, Locate, PackedDimensionRange, RefNode, SyntaxTree}; use sv_parser::{parse_sv, unwrap_node, ConstantMintypmaxExpression, GateInstantiation, ListOfParameterAssignments, ListOfPortConnections, Locate, PackedDimensionRange, RefNode, SyntaxTree};
use crate::core::fast_hdlparam::FastHdlparam; use super::fast_hdlparam::FastHdlparam;
pub fn sv_parser(path: &str) -> FastHdlparam { pub fn sv_parser(path: &str) -> Option<FastHdlparam> {
// The path of SystemVerilog source file // The path of SystemVerilog source file
let path = PathBuf::from(path); let path = PathBuf::from(path);
// The list of defined macros // The list of defined macros
@ -11,23 +11,22 @@ pub fn sv_parser(path: &str) -> FastHdlparam {
// The list of include paths // The list of include paths
let includes: Vec<PathBuf> = Vec::new(); let includes: Vec<PathBuf> = Vec::new();
// Parse let result = parse_sv(&path, &defines, &includes, false, true);
let result: Result<(SyntaxTree, HashMap<String, Option<sv_parser::Define>>), sv_parser::Error> = parse_sv(&path, &defines, &includes, false, true); if let Ok((syntax_tree, _)) = result {
let hdlparam = make_fast_from_syntaxtree(syntax_tree);
let hdlparam = make_fast_from_syntaxtree(result); return Some(hdlparam);
hdlparam
} }
pub fn make_fast_from_syntaxtree(result: Result<(SyntaxTree, HashMap<String, Option<sv_parser::Define>>), sv_parser::Error>) -> FastHdlparam { None
}
pub fn make_fast_from_syntaxtree(syntax_tree: SyntaxTree) -> FastHdlparam {
let mut hdlparam = FastHdlparam { let mut hdlparam = FastHdlparam {
fast: Vec::new() fast: Vec::new()
}; };
let mut nonansi_port_locate = HashMap::new(); let mut nonansi_port_locate = HashMap::new();
let mut ansi_port_last_dir = ""; let mut ansi_port_last_dir = "";
if let Ok((syntax_tree, _)) = result {
let content = syntax_tree.text.text().split('\n') let content = syntax_tree.text.text().split('\n')
.map(|s| s.to_string()) .map(|s| s.to_string())
.collect::<Vec<String>>(); .collect::<Vec<String>>();
@ -239,9 +238,6 @@ pub fn make_fast_from_syntaxtree(result: Result<(SyntaxTree, HashMap<String, Opt
_ => () _ => ()
} }
} }
} else {
println!("Parse failed");
}
hdlparam hdlparam
} }

View File

@ -1,7 +1,7 @@
use std::collections::HashSet; use std::collections::HashSet;
use std::path::PathBuf; use std::path::PathBuf;
use vhdl_lang::{kind_str, Token, VHDLParser, VHDLStandard}; use vhdl_lang::{kind_str, Token, VHDLParser, VHDLStandard};
use crate::fast_hdlparam::*; use super::fast_hdlparam::*;
pub fn vhdl_parser(path: &str) -> FastHdlparam { pub fn vhdl_parser(path: &str) -> FastHdlparam {
// The path of SystemVerilog source file // The path of SystemVerilog source file

View File

@ -1,12 +1,19 @@
use std::future; use std::future;
use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use log::info;
use ropey::Rope;
use serde::Deserialize; use serde::Deserialize;
use serde_json::{json, Value}; use serde_json::Value;
use tower_lsp::jsonrpc::Result; use tower_lsp::jsonrpc::Result;
use tower_lsp::lsp_types::*; use tower_lsp::lsp_types::*;
use crate::core::fast_hdlparam::FastHdlparam; use crate::core::fast_hdlparam::FastHdlparam;
use crate::core::sv_parser::make_fast_from_syntaxtree;
use crate::server::Backend; use crate::server::Backend;
use crate::sources::parse;
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
pub struct CustomRequestParams { pub struct CustomRequestParams {
@ -15,11 +22,6 @@ pub struct CustomRequestParams {
} }
#[derive(Debug, Deserialize)]
pub struct CustomResponse {
result: serde_json::Value
}
#[derive(Clone)] #[derive(Clone)]
pub struct CustomRequest; pub struct CustomRequest;
@ -32,27 +34,47 @@ impl <'a>tower_lsp::jsonrpc::Method<&'a Arc<Backend>, (), Result<i32>> for Custo
} }
pub fn custom_request() -> Result<i32> { pub fn custom_request() -> Result<i32> {
// let command = params[0].clone();
// let message = String::from("receive command: ") + &command;
// let response = CustomResponse {
// result: serde_json::Value::String(message),
// };
Ok(123) Ok(123)
} }
#[derive(Clone)] #[derive(Clone)]
pub struct DoFastApi; pub struct DoFastApi;
impl <'a>tower_lsp::jsonrpc::Method<&'a Arc<Backend>, (), Result<FastHdlparam>> for CustomRequest { impl <'a>tower_lsp::jsonrpc::Method<&'a Arc<Backend>, (TextDocumentItem, ), Result<FastHdlparam>> for DoFastApi {
type Future = future::Ready<Result<FastHdlparam>>; type Future = future::Ready<Result<FastHdlparam>>;
fn invoke(&self, _server: &'a Arc<Backend>, _params: ()) -> Self::Future { fn invoke(&self, _server: &'a Arc<Backend>, _params: (TextDocumentItem, )) -> Self::Future {
future::ready(do_fast()) let doc = _params.0;
future::ready(do_fast(doc))
} }
} }
pub fn do_fast() -> Result<FastHdlparam> { /// 前端交互接口: do_fast输入文件路径计算出对应的 fast 结构
pub fn do_fast(doc: TextDocumentItem) -> Result<FastHdlparam> {
info!("lsp get doc: {:?}", doc);
let uri = doc.uri;
let text = Rope::from_str(&doc.text);
// fast 解析不需要 include
let includes: Vec<PathBuf> = Vec::new();
let parse_result = parse(
&text,
&uri,
&None,
&includes
);
if let Some(syntax_tree) = parse_result {
let hdlparam = make_fast_from_syntaxtree(syntax_tree);
return Ok(hdlparam);
}
let api_error = tower_lsp::jsonrpc::Error {
code: tower_lsp::jsonrpc::ErrorCode::ParseError,
message: std::borrow::Cow::Owned("message".to_string()),
data: None
};
Err(api_error)
} }

View File

@ -1,7 +1,7 @@
use crate::definition::extract_defs::get_ident; use crate::definition::extract_defs::get_ident;
use crate::server::LSPServer; use crate::server::LSPServer;
use crate::sources::LSPSupport; use crate::sources::LSPSupport;
use log::{debug, info, log, trace};
use ropey::{Rope, RopeSlice}; use ropey::{Rope, RopeSlice};
use sv_parser::*; use sv_parser::*;
use tower_lsp::lsp_types::*; use tower_lsp::lsp_types::*;
@ -396,12 +396,8 @@ mod tests {
let doc = Rope::from_str(&text); let doc = Rope::from_str(&text);
let url = Url::parse("file:///test_data/definition_test.sv").unwrap(); let url = Url::parse("file:///test_data/definition_test.sv").unwrap();
let syntax_tree = parse(&doc, &url, &None, &Vec::new()).unwrap(); let syntax_tree = parse(&doc, &url, &None, &Vec::new()).unwrap();
trace!("{}", &syntax_tree);
let scope_tree = get_scopes(&syntax_tree, &url).unwrap(); let scope_tree = get_scopes(&syntax_tree, &url).unwrap();
trace!("{:#?}", &scope_tree);
for def in &scope_tree.defs {
trace!("{:?} {:?}", def, doc.byte_to_pos(def.byte_idx()));
}
let token = get_definition_token(doc.line(3), Position::new(3, 13)); let token = get_definition_token(doc.line(3), Position::new(3, 13));
for def in scope_tree.defs { for def in scope_tree.defs {
if token == def.ident() { if token == def.ident() {

View File

@ -1,5 +1,4 @@
use crate::sources::LSPSupport; use crate::sources::LSPSupport;
use log::{info, trace};
use ropey::Rope; use ropey::Rope;
use tower_lsp::lsp_types::*; use tower_lsp::lsp_types::*;
@ -147,18 +146,9 @@ pub trait Scope: std::fmt::Debug + Definition + Sync + Send {
url: &Url, url: &Url,
scope_tree: &GenericScope, scope_tree: &GenericScope,
) -> Vec<CompletionItem> { ) -> Vec<CompletionItem> {
trace!("dot entering: {}, token: {}", self.ident(), token);
trace!("{:?}", self.scopes());
// first we need to go down the scope tree, to the scope the user is invoking a completion // first we need to go down the scope tree, to the scope the user is invoking a completion
// in // in
for scope in self.scopes() { for scope in self.scopes() {
trace!(
"{}, {}, {}, {}",
scope.ident(),
byte_idx,
scope.start(),
scope.end()
);
if &scope.url() == url && scope.start() <= byte_idx && byte_idx <= scope.end() { if &scope.url() == url && scope.start() <= byte_idx && byte_idx <= scope.end() {
eprintln!("checking dot completion: {}", scope.ident()); eprintln!("checking dot completion: {}", scope.ident());
let result = scope.get_dot_completion(token, byte_idx, url, scope_tree); let result = scope.get_dot_completion(token, byte_idx, url, scope_tree);
@ -171,15 +161,12 @@ pub trait Scope: std::fmt::Debug + Definition + Sync + Send {
// we proceed back upwards through the scope tree, and if a definition matches our token, // we proceed back upwards through the scope tree, and if a definition matches our token,
// we invoke dot completion on that definition and pass it the syntax tree // we invoke dot completion on that definition and pass it the syntax tree
for def in self.defs() { for def in self.defs() {
trace!("def: {:?}", def);
if def.starts_with(token) { if def.starts_with(token) {
trace!("complete def: {:?}", def);
return def.dot_completion(scope_tree); return def.dot_completion(scope_tree);
} }
} }
for scope in self.scopes() { for scope in self.scopes() {
if scope.starts_with(token) { if scope.starts_with(token) {
trace!("found dot-completion scope: {}", scope.ident());
return scope.dot_completion(scope_tree); return scope.dot_completion(scope_tree);
} }
} }

View File

@ -2,7 +2,6 @@ use crate::definition::def_types::*;
use crate::definition::match_definitions; use crate::definition::match_definitions;
use sv_parser::*; use sv_parser::*;
use tower_lsp::lsp_types::*; use tower_lsp::lsp_types::*;
use log::{debug, info, log, trace};
pub fn get_ident(tree: &SyntaxTree, node: RefNode) -> (String, usize) { pub fn get_ident(tree: &SyntaxTree, node: RefNode) -> (String, usize) {
let loc = unwrap_locate!(node).unwrap(); let loc = unwrap_locate!(node).unwrap();

View File

@ -6,58 +6,7 @@ use std::process::{Command, Stdio};
use tower_lsp::lsp_types::*; use tower_lsp::lsp_types::*;
use walkdir::DirEntry; use walkdir::DirEntry;
#[cfg(feature = "slang")]
pub fn get_diagnostics(
uri: Url,
rope: &Rope,
files: Vec<Url>,
conf: &ProjectConfig,
) -> PublishDiagnosticsParams {
if !(cfg!(test) && (uri.to_string().starts_with("file:///test"))) {
let paths = get_paths(files, conf.auto_search_workdir);
let mut diagnostics = {
if conf.verilator.syntax.enabled {
if let Ok(path) = uri.to_file_path() {
match verilator_syntax(
rope,
path,
&conf.verilator.syntax.path,
&conf.verilator.syntax.args,
) {
Some(diags) => diags,
None => Vec::new(),
}
} else {
Vec::new()
}
} else if conf.verible.syntax.enabled {
match verible_syntax(rope, &conf.verible.syntax.path, &conf.verible.syntax.args) {
Some(diags) => diags,
None => Vec::new(),
}
} else {
Vec::new()
}
};
diagnostics.append(&mut parse_report(
uri.clone(),
slang_compile(paths).unwrap(),
));
PublishDiagnosticsParams {
uri,
diagnostics,
version: None,
}
} else {
PublishDiagnosticsParams {
uri,
diagnostics: Vec::new(),
version: None,
}
}
}
#[cfg(not(feature = "slang"))]
pub fn get_diagnostics( pub fn get_diagnostics(
uri: Url, uri: Url,
rope: &Rope, rope: &Rope,
@ -103,53 +52,6 @@ pub fn get_diagnostics(
} }
} }
/// recursively find source file paths from working directory
/// and open files
#[cfg(feature = "slang")]
fn get_paths(files: Vec<Url>, search_workdir: bool) -> Vec<PathBuf> {
// check recursively from working dir for source files
let mut paths: Vec<PathBuf> = Vec::new();
if search_workdir {
let walker = WalkDir::new(".").into_iter();
for entry in walker.filter_entry(|e| !is_hidden(e)) {
let entry = entry.unwrap();
if entry.file_type().is_file() {
let extension = entry.path().extension().unwrap();
if extension == "sv" || extension == "svh" || extension == "v" || extension == "vh"
{
paths.push(entry.path().to_path_buf());
}
}
}
}
// check recursively from opened files for source files
for file in files {
if let Ok(path) = file.to_file_path() {
if !paths.contains(&path) {
let walker = WalkDir::new(path.parent().unwrap()).into_iter();
for entry in walker.filter_entry(|e| !is_hidden(e)).flatten() {
if entry.file_type().is_file() && entry.path().extension().is_some() {
let extension = entry.path().extension().unwrap();
if extension == "sv"
|| extension == "svh"
|| extension == "v"
|| extension == "vh"
{
let entry_path = entry.path().to_path_buf();
if !paths.contains(&entry_path) {
paths.push(entry_path);
}
}
}
}
}
}
}
paths
}
pub fn is_hidden(entry: &DirEntry) -> bool { pub fn is_hidden(entry: &DirEntry) -> bool {
entry entry
@ -159,47 +61,6 @@ pub fn is_hidden(entry: &DirEntry) -> bool {
.unwrap_or(false) .unwrap_or(false)
} }
#[cfg(feature = "slang")]
/// parse a report from slang
fn parse_report(uri: Url, report: String) -> Vec<Diagnostic> {
let mut diagnostics: Vec<Diagnostic> = Vec::new();
for line in report.lines() {
let diag: Vec<&str> = line.splitn(5, ':').collect();
if absolute_path(diag.first().unwrap()) == uri.to_file_path().unwrap().as_os_str() {
let pos = Position::new(
diag.get(1).unwrap().parse::<u32>().unwrap() - 1,
diag.get(2).unwrap().parse::<u32>().unwrap() - 1,
);
diagnostics.push(Diagnostic::new(
Range::new(pos, pos),
slang_severity(diag.get(3).unwrap()),
None,
Some("slang".to_owned()),
(*diag.get(4).unwrap()).to_owned(),
None,
None,
))
}
}
diagnostics
}
#[cfg(feature = "slang")]
fn slang_severity(severity: &str) -> Option<DiagnosticSeverity> {
match severity {
" error" => Some(DiagnosticSeverity::ERROR),
" warning" => Some(DiagnosticSeverity::WARNING),
" note" => Some(DiagnosticSeverity::INFORMATION),
_ => None,
}
}
#[cfg(feature = "slang")]
// convert relative path to absolute
fn absolute_path(path_str: &str) -> PathBuf {
let path = Path::new(path_str);
current_dir().unwrap().join(path).clean()
}
/// convert captured severity string to DiagnosticSeverity /// convert captured severity string to DiagnosticSeverity
fn verilator_severity(severity: &str) -> Option<DiagnosticSeverity> { fn verilator_severity(severity: &str) -> Option<DiagnosticSeverity> {
@ -347,35 +208,6 @@ mod tests {
use std::io::Write; use std::io::Write;
use tempdir::TempDir; use tempdir::TempDir;
#[test]
#[cfg(feature = "slang")]
fn test_diagnostics() {
test_init();
let uri = Url::from_file_path(absolute_path("test_data/diag/diag_test.sv")).unwrap();
let expected = PublishDiagnosticsParams::new(
uri.clone(),
vec![Diagnostic::new(
Range::new(Position::new(3, 13), Position::new(3, 13)),
Some(DiagnosticSeverity::WARNING),
None,
Some("slang".to_owned()),
" cannot refer to element 2 of \'logic[1:0]\' [-Windex-oob]".to_owned(),
None,
None,
)],
None,
);
let diag = get_diagnostics(
uri.clone(),
&Rope::default(),
vec![uri],
&ProjectConfig::default(),
);
assert_eq!(diag.uri, expected.uri);
assert_eq!(diag.version, expected.version);
assert_eq!(diag.diagnostics.last(), expected.diagnostics.last());
}
#[test] #[test]
fn test_unsaved_file() { fn test_unsaved_file() {
test_init(); test_init();

View File

@ -1,5 +1,6 @@
#![recursion_limit = "256"] #![recursion_limit = "256"]
pub mod core;
pub mod completion; pub mod completion;
pub mod definition; pub mod definition;
pub mod diagnostics; pub mod diagnostics;
@ -8,4 +9,3 @@ pub mod server;
pub mod sources; pub mod sources;
pub mod support; pub mod support;
pub mod custom_request; pub mod custom_request;
pub mod core;

View File

@ -4,7 +4,6 @@ use flexi_logger::LoggerHandle;
use log::{debug, info, warn}; use log::{debug, info, warn};
use path_clean::PathClean; use path_clean::PathClean;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::collections::HashSet;
use std::env::current_dir; use std::env::current_dir;
use std::fs::File; use std::fs::File;
use std::io::Read; use std::io::Read;
@ -40,8 +39,7 @@ impl LSPServer {
pub struct Backend { pub struct Backend {
client: Client, client: Client,
server: LSPServer, server: LSPServer
watched_files: HashSet<String>
} }
impl Backend { impl Backend {
@ -49,7 +47,6 @@ impl Backend {
Backend { Backend {
client, client,
server: LSPServer::new(Some(log_handle)), server: LSPServer::new(Some(log_handle)),
watched_files: HashSet::new()
} }
} }
} }
@ -240,9 +237,7 @@ impl LanguageServer for Backend {
} }
let mut conf = self.server.conf.write().unwrap(); let mut conf = self.server.conf.write().unwrap();
conf.verible.syntax.enabled = which(&conf.verible.syntax.path).is_ok(); conf.verible.syntax.enabled = which(&conf.verible.syntax.path).is_ok();
if cfg!(feature = "slang") {
info!("enabled linting with slang");
}
if conf.verilator.syntax.enabled { if conf.verilator.syntax.enabled {
info!("enabled linting with verilator") info!("enabled linting with verilator")
} else if conf.verible.syntax.enabled { } else if conf.verible.syntax.enabled {
@ -333,22 +328,6 @@ impl LanguageServer for Backend {
.await; .await;
} }
async fn did_change_watched_files(&self, params: DidChangeWatchedFilesParams) {
// for change in params.changes {
// let file_uri = change.uri.to_string();
// match change.typ {
// FileChangeType::CREATED | FileChangeType::CHANGED => {
// self.watched_files.insert(file_uri.clone());
// self.client.log_message(MessageType::INFO, format!("File added/changed: {}", file_uri)).await;
// }
// FileChangeType::DELETED => {
// self.watched_files.remove(&file_uri);
// self.client.log_message(MessageType::INFO, format!("File deleted: {}", file_uri)).await;
// }
// }
// }
}
async fn completion(&self, params: CompletionParams) -> Result<Option<CompletionResponse>> { async fn completion(&self, params: CompletionParams) -> Result<Option<CompletionResponse>> {
Ok(self.server.completion(params)) Ok(self.server.completion(params))

View File

@ -2,7 +2,7 @@ use crate::definition::def_types::*;
use crate::definition::get_scopes; use crate::definition::get_scopes;
use crate::diagnostics::{get_diagnostics, is_hidden}; use crate::diagnostics::{get_diagnostics, is_hidden};
use crate::server::LSPServer; use crate::server::LSPServer;
use log::{debug, error, trace}; use log::{debug, error};
use pathdiff::diff_paths; use pathdiff::diff_paths;
use ropey::{Rope, RopeSlice}; use ropey::{Rope, RopeSlice};
use std::cmp::min; use std::cmp::min;

1
sv-parser Submodule

@ -0,0 +1 @@
Subproject commit af11f5ff1ef091562d2b17cdf4de3614aedf2286