Apply clippy
This commit is contained in:
parent
de2cf60b42
commit
7f24c6b66d
@ -120,7 +120,7 @@ pub fn any_node_derive(input: TokenStream) -> TokenStream {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn impl_any_node(ast: &DeriveInput) -> TokenStream {
|
fn impl_any_node(ast: &DeriveInput) -> TokenStream {
|
||||||
let ref data = match ast.data {
|
let data = &match ast.data {
|
||||||
Enum(ref data) => data,
|
Enum(ref data) => data,
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
};
|
};
|
||||||
@ -170,7 +170,7 @@ pub fn ref_node_derive(input: TokenStream) -> TokenStream {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn impl_ref_node(ast: &DeriveInput) -> TokenStream {
|
fn impl_ref_node(ast: &DeriveInput) -> TokenStream {
|
||||||
let ref data = match ast.data {
|
let data = &match ast.data {
|
||||||
Enum(ref data) => data,
|
Enum(ref data) => data,
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
};
|
};
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
#![allow(clippy::many_single_char_names, clippy::module_inception)]
|
||||||
|
|
||||||
pub mod keywords;
|
pub mod keywords;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
pub mod utils;
|
pub mod utils;
|
||||||
@ -34,7 +36,6 @@ pub(crate) use nom::combinator::*;
|
|||||||
pub(crate) use nom::error::{context, make_error, ErrorKind};
|
pub(crate) use nom::error::{context, make_error, ErrorKind};
|
||||||
pub(crate) use nom::multi::*;
|
pub(crate) use nom::multi::*;
|
||||||
pub(crate) use nom::sequence::*;
|
pub(crate) use nom::sequence::*;
|
||||||
//pub(crate) use nom::{Err, IResult};
|
|
||||||
pub(crate) use nom::Err;
|
pub(crate) use nom::Err;
|
||||||
pub(crate) use nom_greedyerror::GreedyError;
|
pub(crate) use nom_greedyerror::GreedyError;
|
||||||
pub(crate) use nom_packrat::{self, packrat_parser, HasExtraState};
|
pub(crate) use nom_packrat::{self, packrat_parser, HasExtraState};
|
||||||
|
@ -26,9 +26,7 @@ where
|
|||||||
#[cfg(not(feature = "trace"))]
|
#[cfg(not(feature = "trace"))]
|
||||||
pub(crate) fn symbol<'a>(t: &'a str) -> impl Fn(Span<'a>) -> IResult<Span<'a>, Symbol> {
|
pub(crate) fn symbol<'a>(t: &'a str) -> impl Fn(Span<'a>) -> IResult<Span<'a>, Symbol> {
|
||||||
move |s: Span<'a>| {
|
move |s: Span<'a>| {
|
||||||
let (s, x) = map(ws(map(tag(t.clone()), |x: Span| into_locate(x))), |x| {
|
let (s, x) = map(ws(map(tag(t), into_locate)), |x| Symbol { nodes: x })(s)?;
|
||||||
Symbol { nodes: x }
|
|
||||||
})(s)?;
|
|
||||||
Ok((s, x))
|
Ok((s, x))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -38,9 +36,7 @@ pub(crate) fn symbol<'a>(t: &'a str) -> impl Fn(Span<'a>) -> IResult<Span<'a>, S
|
|||||||
move |s: Span<'a>| {
|
move |s: Span<'a>| {
|
||||||
let (depth, s) = nom_tracable::forward_trace(s, &format!("symbol(\"{}\")", t));
|
let (depth, s) = nom_tracable::forward_trace(s, &format!("symbol(\"{}\")", t));
|
||||||
let body = || {
|
let body = || {
|
||||||
let (s, x) = map(ws(map(tag(t.clone()), |x: Span| into_locate(x))), |x| {
|
let (s, x) = map(ws(map(tag(t), into_locate)), |x| Symbol { nodes: x })(s)?;
|
||||||
Symbol { nodes: x }
|
|
||||||
})(s)?;
|
|
||||||
Ok((s, x))
|
Ok((s, x))
|
||||||
};
|
};
|
||||||
let ret = body();
|
let ret = body();
|
||||||
@ -51,9 +47,7 @@ pub(crate) fn symbol<'a>(t: &'a str) -> impl Fn(Span<'a>) -> IResult<Span<'a>, S
|
|||||||
#[cfg(not(feature = "trace"))]
|
#[cfg(not(feature = "trace"))]
|
||||||
pub(crate) fn symbol_exact<'a>(t: &'a str) -> impl Fn(Span<'a>) -> IResult<Span<'a>, Symbol> {
|
pub(crate) fn symbol_exact<'a>(t: &'a str) -> impl Fn(Span<'a>) -> IResult<Span<'a>, Symbol> {
|
||||||
move |s: Span<'a>| {
|
move |s: Span<'a>| {
|
||||||
let (s, x) = map(no_ws(map(tag(t.clone()), into_locate)), |x| Symbol {
|
let (s, x) = map(no_ws(map(tag(t), into_locate)), |x| Symbol { nodes: x })(s)?;
|
||||||
nodes: x,
|
|
||||||
})(s)?;
|
|
||||||
Ok((s, x))
|
Ok((s, x))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -63,9 +57,7 @@ pub(crate) fn symbol_exact<'a>(t: &'a str) -> impl Fn(Span<'a>) -> IResult<Span<
|
|||||||
move |s: Span<'a>| {
|
move |s: Span<'a>| {
|
||||||
let (depth, s) = nom_tracable::forward_trace(s, &format!("symbol(\"{}\")", t));
|
let (depth, s) = nom_tracable::forward_trace(s, &format!("symbol(\"{}\")", t));
|
||||||
let body = || {
|
let body = || {
|
||||||
let (s, x) = map(no_ws(map(tag(t.clone()), into_locate)), |x| Symbol {
|
let (s, x) = map(no_ws(map(tag(t), into_locate)), |x| Symbol { nodes: x })(s)?;
|
||||||
nodes: x,
|
|
||||||
})(s)?;
|
|
||||||
Ok((s, x))
|
Ok((s, x))
|
||||||
};
|
};
|
||||||
let ret = body();
|
let ret = body();
|
||||||
@ -78,8 +70,8 @@ pub(crate) fn keyword<'a>(t: &'a str) -> impl Fn(Span<'a>) -> IResult<Span<'a>,
|
|||||||
move |s: Span<'a>| {
|
move |s: Span<'a>| {
|
||||||
let (s, x) = map(
|
let (s, x) = map(
|
||||||
ws(alt((
|
ws(alt((
|
||||||
all_consuming(map(tag(t.clone()), into_locate)),
|
all_consuming(map(tag(t), into_locate)),
|
||||||
terminated(map(tag(t.clone()), into_locate), peek(none_of(AZ09_))),
|
terminated(map(tag(t), into_locate), peek(none_of(AZ09_))),
|
||||||
))),
|
))),
|
||||||
|x| Keyword { nodes: x },
|
|x| Keyword { nodes: x },
|
||||||
)(s)?;
|
)(s)?;
|
||||||
@ -94,8 +86,8 @@ pub(crate) fn keyword<'a>(t: &'a str) -> impl Fn(Span<'a>) -> IResult<Span<'a>,
|
|||||||
let body = || {
|
let body = || {
|
||||||
let (s, x) = map(
|
let (s, x) = map(
|
||||||
ws(alt((
|
ws(alt((
|
||||||
all_consuming(map(tag(t.clone()), into_locate)),
|
all_consuming(map(tag(t), into_locate)),
|
||||||
terminated(map(tag(t.clone()), into_locate), peek(none_of(AZ09_))),
|
terminated(map(tag(t), into_locate), peek(none_of(AZ09_))),
|
||||||
))),
|
))),
|
||||||
|x| Keyword { nodes: x },
|
|x| Keyword { nodes: x },
|
||||||
)(s)?;
|
)(s)?;
|
||||||
|
@ -1,2 +1,4 @@
|
|||||||
|
#![allow(clippy::type_complexity)]
|
||||||
|
|
||||||
pub mod preprocess;
|
pub mod preprocess;
|
||||||
pub mod range;
|
pub mod range;
|
||||||
|
@ -4,6 +4,7 @@ use nom_greedyerror::error_position;
|
|||||||
use std::collections::{BTreeMap, HashMap};
|
use std::collections::{BTreeMap, HashMap};
|
||||||
use std::convert::TryInto;
|
use std::convert::TryInto;
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
|
use std::hash::BuildHasher;
|
||||||
use std::io::{BufReader, Read};
|
use std::io::{BufReader, Read};
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use sv_parser_error::Error;
|
use sv_parser_error::Error;
|
||||||
@ -114,9 +115,9 @@ impl DefineText {
|
|||||||
|
|
||||||
pub type Defines = HashMap<String, Option<Define>>;
|
pub type Defines = HashMap<String, Option<Define>>;
|
||||||
|
|
||||||
pub fn preprocess<T: AsRef<Path>, U: AsRef<Path>>(
|
pub fn preprocess<T: AsRef<Path>, U: AsRef<Path>, V: BuildHasher>(
|
||||||
path: T,
|
path: T,
|
||||||
pre_defines: &Defines,
|
pre_defines: &HashMap<String, Option<Define>, V>,
|
||||||
include_paths: &[U],
|
include_paths: &[U],
|
||||||
ignore_include: bool,
|
ignore_include: bool,
|
||||||
) -> Result<(PreprocessedText, Defines), Error> {
|
) -> Result<(PreprocessedText, Defines), Error> {
|
||||||
@ -131,10 +132,10 @@ pub fn preprocess<T: AsRef<Path>, U: AsRef<Path>>(
|
|||||||
preprocess_str(&s, path, pre_defines, include_paths, ignore_include, 0)
|
preprocess_str(&s, path, pre_defines, include_paths, ignore_include, 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn preprocess_str<T: AsRef<Path>, U: AsRef<Path>>(
|
pub fn preprocess_str<T: AsRef<Path>, U: AsRef<Path>, V: BuildHasher>(
|
||||||
s: &str,
|
s: &str,
|
||||||
path: T,
|
path: T,
|
||||||
pre_defines: &Defines,
|
pre_defines: &HashMap<String, Option<Define>, V>,
|
||||||
include_paths: &[U],
|
include_paths: &[U],
|
||||||
ignore_include: bool,
|
ignore_include: bool,
|
||||||
resolve_depth: usize,
|
resolve_depth: usize,
|
||||||
@ -399,8 +400,7 @@ pub fn preprocess_str<T: AsRef<Path>, U: AsRef<Path>>(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
if path.is_relative() {
|
if path.is_relative() && !path.exists() {
|
||||||
if !path.exists() {
|
|
||||||
for include_path in include_paths {
|
for include_path in include_paths {
|
||||||
let new_path = include_path.as_ref().join(&path);
|
let new_path = include_path.as_ref().join(&path);
|
||||||
if new_path.exists() {
|
if new_path.exists() {
|
||||||
@ -409,7 +409,6 @@ pub fn preprocess_str<T: AsRef<Path>, U: AsRef<Path>>(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
let (include, new_defines) = preprocess(path, &defines, include_paths, false)
|
let (include, new_defines) = preprocess(path, &defines, include_paths, false)
|
||||||
.map_err(|x| Error::Include {
|
.map_err(|x| Error::Include {
|
||||||
source: Box::new(x),
|
source: Box::new(x),
|
||||||
@ -609,10 +608,10 @@ fn resolve_text_macro_usage<T: AsRef<Path>, U: AsRef<Path>>(
|
|||||||
} else {
|
} else {
|
||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
} else if let Some(_) = define {
|
} else if define.is_some() {
|
||||||
Ok(None)
|
Ok(None)
|
||||||
} else {
|
} else {
|
||||||
Err(Error::DefineNotFound(String::from(id)))
|
Err(Error::DefineNotFound(id))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,4 +1,9 @@
|
|||||||
#![recursion_limit = "256"]
|
#![recursion_limit = "256"]
|
||||||
|
#![allow(
|
||||||
|
clippy::module_inception,
|
||||||
|
clippy::large_enum_variant,
|
||||||
|
clippy::type_complexity
|
||||||
|
)]
|
||||||
|
|
||||||
pub mod any_node;
|
pub mod any_node;
|
||||||
pub mod behavioral_statements;
|
pub mod behavioral_statements;
|
||||||
|
@ -4,6 +4,7 @@ use nom::combinator::all_consuming;
|
|||||||
use nom_greedyerror::error_position;
|
use nom_greedyerror::error_position;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
use std::hash::BuildHasher;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
pub use sv_parser_error::Error;
|
pub use sv_parser_error::Error;
|
||||||
use sv_parser_parser::{lib_parser, sv_parser, Span, SpanInfo};
|
use sv_parser_parser::{lib_parser, sv_parser, Span, SpanInfo};
|
||||||
@ -22,15 +23,12 @@ impl SyntaxTree {
|
|||||||
let mut beg = None;
|
let mut beg = None;
|
||||||
let mut end = 0;
|
let mut end = 0;
|
||||||
for n in Iter::new(nodes.into()) {
|
for n in Iter::new(nodes.into()) {
|
||||||
match n {
|
if let RefNode::Locate(x) = n {
|
||||||
RefNode::Locate(x) => {
|
|
||||||
if beg.is_none() {
|
if beg.is_none() {
|
||||||
beg = Some(x.offset);
|
beg = Some(x.offset);
|
||||||
}
|
}
|
||||||
end = x.offset + x.len;
|
end = x.offset + x.len;
|
||||||
}
|
}
|
||||||
_ => (),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if let Some(beg) = beg {
|
if let Some(beg) = beg {
|
||||||
let ret = unsafe { self.text.text().get_unchecked(beg..end) };
|
let ret = unsafe { self.text.text().get_unchecked(beg..end) };
|
||||||
@ -94,9 +92,9 @@ impl<'a> IntoIterator for &'a SyntaxTree {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_sv<T: AsRef<Path>, U: AsRef<Path>>(
|
pub fn parse_sv<T: AsRef<Path>, U: AsRef<Path>, V: BuildHasher>(
|
||||||
path: T,
|
path: T,
|
||||||
pre_defines: &HashMap<String, Option<Define>>,
|
pre_defines: &HashMap<String, Option<Define>, V>,
|
||||||
include_paths: &[U],
|
include_paths: &[U],
|
||||||
ignore_include: bool,
|
ignore_include: bool,
|
||||||
) -> Result<(SyntaxTree, Defines), Error> {
|
) -> Result<(SyntaxTree, Defines), Error> {
|
||||||
@ -131,10 +129,10 @@ pub fn parse_sv<T: AsRef<Path>, U: AsRef<Path>>(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_sv_str<T: AsRef<Path>, U: AsRef<Path>>(
|
pub fn parse_sv_str<T: AsRef<Path>, U: AsRef<Path>, V: BuildHasher>(
|
||||||
s: &str,
|
s: &str,
|
||||||
path: T,
|
path: T,
|
||||||
pre_defines: &HashMap<String, Option<Define>>,
|
pre_defines: &HashMap<String, Option<Define>, V>,
|
||||||
include_paths: &[U],
|
include_paths: &[U],
|
||||||
ignore_include: bool,
|
ignore_include: bool,
|
||||||
) -> Result<(SyntaxTree, Defines), Error> {
|
) -> Result<(SyntaxTree, Defines), Error> {
|
||||||
@ -169,9 +167,9 @@ pub fn parse_sv_str<T: AsRef<Path>, U: AsRef<Path>>(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_lib<T: AsRef<Path>, U: AsRef<Path>>(
|
pub fn parse_lib<T: AsRef<Path>, U: AsRef<Path>, V: BuildHasher>(
|
||||||
path: T,
|
path: T,
|
||||||
pre_defines: &HashMap<String, Option<Define>>,
|
pre_defines: &HashMap<String, Option<Define>, V>,
|
||||||
include_paths: &[U],
|
include_paths: &[U],
|
||||||
ignore_include: bool,
|
ignore_include: bool,
|
||||||
) -> Result<(SyntaxTree, Defines), Error> {
|
) -> Result<(SyntaxTree, Defines), Error> {
|
||||||
@ -206,10 +204,10 @@ pub fn parse_lib<T: AsRef<Path>, U: AsRef<Path>>(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_lib_str<T: AsRef<Path>, U: AsRef<Path>>(
|
pub fn parse_lib_str<T: AsRef<Path>, U: AsRef<Path>, V: BuildHasher>(
|
||||||
s: &str,
|
s: &str,
|
||||||
path: T,
|
path: T,
|
||||||
pre_defines: &HashMap<String, Option<Define>>,
|
pre_defines: &HashMap<String, Option<Define>, V>,
|
||||||
include_paths: &[U],
|
include_paths: &[U],
|
||||||
ignore_include: bool,
|
ignore_include: bool,
|
||||||
) -> Result<(SyntaxTree, Defines), Error> {
|
) -> Result<(SyntaxTree, Defines), Error> {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user