diff --git a/sv-parser-macros/src/lib.rs b/sv-parser-macros/src/lib.rs index fe8b8c3..d19fead 100644 --- a/sv-parser-macros/src/lib.rs +++ b/sv-parser-macros/src/lib.rs @@ -120,7 +120,7 @@ pub fn any_node_derive(input: TokenStream) -> TokenStream { } fn impl_any_node(ast: &DeriveInput) -> TokenStream { - let ref data = match ast.data { + let data = &match ast.data { Enum(ref data) => data, _ => unreachable!(), }; @@ -170,7 +170,7 @@ pub fn ref_node_derive(input: TokenStream) -> TokenStream { } fn impl_ref_node(ast: &DeriveInput) -> TokenStream { - let ref data = match ast.data { + let data = &match ast.data { Enum(ref data) => data, _ => unreachable!(), }; diff --git a/sv-parser-parser/src/lib.rs b/sv-parser-parser/src/lib.rs index 4c2b7b9..b50ff79 100644 --- a/sv-parser-parser/src/lib.rs +++ b/sv-parser-parser/src/lib.rs @@ -1,3 +1,5 @@ +#![allow(clippy::many_single_char_names, clippy::module_inception)] + pub mod keywords; #[macro_use] pub mod utils; @@ -34,7 +36,6 @@ pub(crate) use nom::combinator::*; pub(crate) use nom::error::{context, make_error, ErrorKind}; pub(crate) use nom::multi::*; pub(crate) use nom::sequence::*; -//pub(crate) use nom::{Err, IResult}; pub(crate) use nom::Err; pub(crate) use nom_greedyerror::GreedyError; pub(crate) use nom_packrat::{self, packrat_parser, HasExtraState}; diff --git a/sv-parser-parser/src/utils.rs b/sv-parser-parser/src/utils.rs index 1710f33..850f09c 100644 --- a/sv-parser-parser/src/utils.rs +++ b/sv-parser-parser/src/utils.rs @@ -26,9 +26,7 @@ where #[cfg(not(feature = "trace"))] pub(crate) fn symbol<'a>(t: &'a str) -> impl Fn(Span<'a>) -> IResult, Symbol> { move |s: Span<'a>| { - let (s, x) = map(ws(map(tag(t.clone()), |x: Span| into_locate(x))), |x| { - Symbol { nodes: x } - })(s)?; + let (s, x) = map(ws(map(tag(t), into_locate)), |x| Symbol { nodes: x })(s)?; Ok((s, x)) } } @@ -38,9 +36,7 @@ pub(crate) fn symbol<'a>(t: &'a str) -> impl Fn(Span<'a>) -> IResult, S move |s: Span<'a>| { let (depth, s) = nom_tracable::forward_trace(s, &format!("symbol(\"{}\")", t)); let body = || { - let (s, x) = map(ws(map(tag(t.clone()), |x: Span| into_locate(x))), |x| { - Symbol { nodes: x } - })(s)?; + let (s, x) = map(ws(map(tag(t), into_locate)), |x| Symbol { nodes: x })(s)?; Ok((s, x)) }; let ret = body(); @@ -51,9 +47,7 @@ pub(crate) fn symbol<'a>(t: &'a str) -> impl Fn(Span<'a>) -> IResult, S #[cfg(not(feature = "trace"))] pub(crate) fn symbol_exact<'a>(t: &'a str) -> impl Fn(Span<'a>) -> IResult, Symbol> { move |s: Span<'a>| { - let (s, x) = map(no_ws(map(tag(t.clone()), into_locate)), |x| Symbol { - nodes: x, - })(s)?; + let (s, x) = map(no_ws(map(tag(t), into_locate)), |x| Symbol { nodes: x })(s)?; Ok((s, x)) } } @@ -63,9 +57,7 @@ pub(crate) fn symbol_exact<'a>(t: &'a str) -> impl Fn(Span<'a>) -> IResult| { let (depth, s) = nom_tracable::forward_trace(s, &format!("symbol(\"{}\")", t)); let body = || { - let (s, x) = map(no_ws(map(tag(t.clone()), into_locate)), |x| Symbol { - nodes: x, - })(s)?; + let (s, x) = map(no_ws(map(tag(t), into_locate)), |x| Symbol { nodes: x })(s)?; Ok((s, x)) }; let ret = body(); @@ -78,8 +70,8 @@ pub(crate) fn keyword<'a>(t: &'a str) -> impl Fn(Span<'a>) -> IResult, move |s: Span<'a>| { let (s, x) = map( ws(alt(( - all_consuming(map(tag(t.clone()), into_locate)), - terminated(map(tag(t.clone()), into_locate), peek(none_of(AZ09_))), + all_consuming(map(tag(t), into_locate)), + terminated(map(tag(t), into_locate), peek(none_of(AZ09_))), ))), |x| Keyword { nodes: x }, )(s)?; @@ -94,8 +86,8 @@ pub(crate) fn keyword<'a>(t: &'a str) -> impl Fn(Span<'a>) -> IResult, let body = || { let (s, x) = map( ws(alt(( - all_consuming(map(tag(t.clone()), into_locate)), - terminated(map(tag(t.clone()), into_locate), peek(none_of(AZ09_))), + all_consuming(map(tag(t), into_locate)), + terminated(map(tag(t), into_locate), peek(none_of(AZ09_))), ))), |x| Keyword { nodes: x }, )(s)?; diff --git a/sv-parser-pp/src/lib.rs b/sv-parser-pp/src/lib.rs index 9610f16..91bf09b 100644 --- a/sv-parser-pp/src/lib.rs +++ b/sv-parser-pp/src/lib.rs @@ -1,2 +1,4 @@ +#![allow(clippy::type_complexity)] + pub mod preprocess; pub mod range; diff --git a/sv-parser-pp/src/preprocess.rs b/sv-parser-pp/src/preprocess.rs index 0e88d2e..d371b6e 100644 --- a/sv-parser-pp/src/preprocess.rs +++ b/sv-parser-pp/src/preprocess.rs @@ -4,6 +4,7 @@ use nom_greedyerror::error_position; use std::collections::{BTreeMap, HashMap}; use std::convert::TryInto; use std::fs::File; +use std::hash::BuildHasher; use std::io::{BufReader, Read}; use std::path::{Path, PathBuf}; use sv_parser_error::Error; @@ -114,9 +115,9 @@ impl DefineText { pub type Defines = HashMap>; -pub fn preprocess, U: AsRef>( +pub fn preprocess, U: AsRef, V: BuildHasher>( path: T, - pre_defines: &Defines, + pre_defines: &HashMap, V>, include_paths: &[U], ignore_include: bool, ) -> Result<(PreprocessedText, Defines), Error> { @@ -131,10 +132,10 @@ pub fn preprocess, U: AsRef>( preprocess_str(&s, path, pre_defines, include_paths, ignore_include, 0) } -pub fn preprocess_str, U: AsRef>( +pub fn preprocess_str, U: AsRef, V: BuildHasher>( s: &str, path: T, - pre_defines: &Defines, + pre_defines: &HashMap, V>, include_paths: &[U], ignore_include: bool, resolve_depth: usize, @@ -399,14 +400,12 @@ pub fn preprocess_str, U: AsRef>( } } }; - if path.is_relative() { - if !path.exists() { - for include_path in include_paths { - let new_path = include_path.as_ref().join(&path); - if new_path.exists() { - path = new_path; - break; - } + if path.is_relative() && !path.exists() { + for include_path in include_paths { + let new_path = include_path.as_ref().join(&path); + if new_path.exists() { + path = new_path; + break; } } } @@ -609,10 +608,10 @@ fn resolve_text_macro_usage, U: AsRef>( } else { Ok(None) } - } else if let Some(_) = define { + } else if define.is_some() { Ok(None) } else { - Err(Error::DefineNotFound(String::from(id))) + Err(Error::DefineNotFound(id)) } } diff --git a/sv-parser-syntaxtree/src/lib.rs b/sv-parser-syntaxtree/src/lib.rs index fe8dc18..c0ec115 100644 --- a/sv-parser-syntaxtree/src/lib.rs +++ b/sv-parser-syntaxtree/src/lib.rs @@ -1,4 +1,9 @@ #![recursion_limit = "256"] +#![allow( + clippy::module_inception, + clippy::large_enum_variant, + clippy::type_complexity +)] pub mod any_node; pub mod behavioral_statements; diff --git a/sv-parser/src/lib.rs b/sv-parser/src/lib.rs index 110313c..449198f 100644 --- a/sv-parser/src/lib.rs +++ b/sv-parser/src/lib.rs @@ -4,6 +4,7 @@ use nom::combinator::all_consuming; use nom_greedyerror::error_position; use std::collections::HashMap; use std::fmt; +use std::hash::BuildHasher; use std::path::{Path, PathBuf}; pub use sv_parser_error::Error; use sv_parser_parser::{lib_parser, sv_parser, Span, SpanInfo}; @@ -22,14 +23,11 @@ impl SyntaxTree { let mut beg = None; let mut end = 0; for n in Iter::new(nodes.into()) { - match n { - RefNode::Locate(x) => { - if beg.is_none() { - beg = Some(x.offset); - } - end = x.offset + x.len; + if let RefNode::Locate(x) = n { + if beg.is_none() { + beg = Some(x.offset); } - _ => (), + end = x.offset + x.len; } } if let Some(beg) = beg { @@ -94,9 +92,9 @@ impl<'a> IntoIterator for &'a SyntaxTree { } } -pub fn parse_sv, U: AsRef>( +pub fn parse_sv, U: AsRef, V: BuildHasher>( path: T, - pre_defines: &HashMap>, + pre_defines: &HashMap, V>, include_paths: &[U], ignore_include: bool, ) -> Result<(SyntaxTree, Defines), Error> { @@ -131,10 +129,10 @@ pub fn parse_sv, U: AsRef>( } } -pub fn parse_sv_str, U: AsRef>( +pub fn parse_sv_str, U: AsRef, V: BuildHasher>( s: &str, path: T, - pre_defines: &HashMap>, + pre_defines: &HashMap, V>, include_paths: &[U], ignore_include: bool, ) -> Result<(SyntaxTree, Defines), Error> { @@ -169,9 +167,9 @@ pub fn parse_sv_str, U: AsRef>( } } -pub fn parse_lib, U: AsRef>( +pub fn parse_lib, U: AsRef, V: BuildHasher>( path: T, - pre_defines: &HashMap>, + pre_defines: &HashMap, V>, include_paths: &[U], ignore_include: bool, ) -> Result<(SyntaxTree, Defines), Error> { @@ -206,10 +204,10 @@ pub fn parse_lib, U: AsRef>( } } -pub fn parse_lib_str, U: AsRef>( +pub fn parse_lib_str, U: AsRef, V: BuildHasher>( s: &str, path: T, - pre_defines: &HashMap>, + pre_defines: &HashMap, V>, include_paths: &[U], ignore_include: bool, ) -> Result<(SyntaxTree, Defines), Error> {