Apply clippy

This commit is contained in:
dalance 2020-02-09 21:54:16 +09:00
parent de2cf60b42
commit 7f24c6b66d
7 changed files with 45 additions and 48 deletions

View File

@ -120,7 +120,7 @@ pub fn any_node_derive(input: TokenStream) -> TokenStream {
}
fn impl_any_node(ast: &DeriveInput) -> TokenStream {
let ref data = match ast.data {
let data = &match ast.data {
Enum(ref data) => data,
_ => unreachable!(),
};
@ -170,7 +170,7 @@ pub fn ref_node_derive(input: TokenStream) -> TokenStream {
}
fn impl_ref_node(ast: &DeriveInput) -> TokenStream {
let ref data = match ast.data {
let data = &match ast.data {
Enum(ref data) => data,
_ => unreachable!(),
};

View File

@ -1,3 +1,5 @@
#![allow(clippy::many_single_char_names, clippy::module_inception)]
pub mod keywords;
#[macro_use]
pub mod utils;
@ -34,7 +36,6 @@ pub(crate) use nom::combinator::*;
pub(crate) use nom::error::{context, make_error, ErrorKind};
pub(crate) use nom::multi::*;
pub(crate) use nom::sequence::*;
//pub(crate) use nom::{Err, IResult};
pub(crate) use nom::Err;
pub(crate) use nom_greedyerror::GreedyError;
pub(crate) use nom_packrat::{self, packrat_parser, HasExtraState};

View File

@ -26,9 +26,7 @@ where
#[cfg(not(feature = "trace"))]
pub(crate) fn symbol<'a>(t: &'a str) -> impl Fn(Span<'a>) -> IResult<Span<'a>, Symbol> {
move |s: Span<'a>| {
let (s, x) = map(ws(map(tag(t.clone()), |x: Span| into_locate(x))), |x| {
Symbol { nodes: x }
})(s)?;
let (s, x) = map(ws(map(tag(t), into_locate)), |x| Symbol { nodes: x })(s)?;
Ok((s, x))
}
}
@ -38,9 +36,7 @@ pub(crate) fn symbol<'a>(t: &'a str) -> impl Fn(Span<'a>) -> IResult<Span<'a>, S
move |s: Span<'a>| {
let (depth, s) = nom_tracable::forward_trace(s, &format!("symbol(\"{}\")", t));
let body = || {
let (s, x) = map(ws(map(tag(t.clone()), |x: Span| into_locate(x))), |x| {
Symbol { nodes: x }
})(s)?;
let (s, x) = map(ws(map(tag(t), into_locate)), |x| Symbol { nodes: x })(s)?;
Ok((s, x))
};
let ret = body();
@ -51,9 +47,7 @@ pub(crate) fn symbol<'a>(t: &'a str) -> impl Fn(Span<'a>) -> IResult<Span<'a>, S
#[cfg(not(feature = "trace"))]
pub(crate) fn symbol_exact<'a>(t: &'a str) -> impl Fn(Span<'a>) -> IResult<Span<'a>, Symbol> {
move |s: Span<'a>| {
let (s, x) = map(no_ws(map(tag(t.clone()), into_locate)), |x| Symbol {
nodes: x,
})(s)?;
let (s, x) = map(no_ws(map(tag(t), into_locate)), |x| Symbol { nodes: x })(s)?;
Ok((s, x))
}
}
@ -63,9 +57,7 @@ pub(crate) fn symbol_exact<'a>(t: &'a str) -> impl Fn(Span<'a>) -> IResult<Span<
move |s: Span<'a>| {
let (depth, s) = nom_tracable::forward_trace(s, &format!("symbol(\"{}\")", t));
let body = || {
let (s, x) = map(no_ws(map(tag(t.clone()), into_locate)), |x| Symbol {
nodes: x,
})(s)?;
let (s, x) = map(no_ws(map(tag(t), into_locate)), |x| Symbol { nodes: x })(s)?;
Ok((s, x))
};
let ret = body();
@ -78,8 +70,8 @@ pub(crate) fn keyword<'a>(t: &'a str) -> impl Fn(Span<'a>) -> IResult<Span<'a>,
move |s: Span<'a>| {
let (s, x) = map(
ws(alt((
all_consuming(map(tag(t.clone()), into_locate)),
terminated(map(tag(t.clone()), into_locate), peek(none_of(AZ09_))),
all_consuming(map(tag(t), into_locate)),
terminated(map(tag(t), into_locate), peek(none_of(AZ09_))),
))),
|x| Keyword { nodes: x },
)(s)?;
@ -94,8 +86,8 @@ pub(crate) fn keyword<'a>(t: &'a str) -> impl Fn(Span<'a>) -> IResult<Span<'a>,
let body = || {
let (s, x) = map(
ws(alt((
all_consuming(map(tag(t.clone()), into_locate)),
terminated(map(tag(t.clone()), into_locate), peek(none_of(AZ09_))),
all_consuming(map(tag(t), into_locate)),
terminated(map(tag(t), into_locate), peek(none_of(AZ09_))),
))),
|x| Keyword { nodes: x },
)(s)?;

View File

@ -1,2 +1,4 @@
#![allow(clippy::type_complexity)]
pub mod preprocess;
pub mod range;

View File

@ -4,6 +4,7 @@ use nom_greedyerror::error_position;
use std::collections::{BTreeMap, HashMap};
use std::convert::TryInto;
use std::fs::File;
use std::hash::BuildHasher;
use std::io::{BufReader, Read};
use std::path::{Path, PathBuf};
use sv_parser_error::Error;
@ -114,9 +115,9 @@ impl DefineText {
pub type Defines = HashMap<String, Option<Define>>;
pub fn preprocess<T: AsRef<Path>, U: AsRef<Path>>(
pub fn preprocess<T: AsRef<Path>, U: AsRef<Path>, V: BuildHasher>(
path: T,
pre_defines: &Defines,
pre_defines: &HashMap<String, Option<Define>, V>,
include_paths: &[U],
ignore_include: bool,
) -> Result<(PreprocessedText, Defines), Error> {
@ -131,10 +132,10 @@ pub fn preprocess<T: AsRef<Path>, U: AsRef<Path>>(
preprocess_str(&s, path, pre_defines, include_paths, ignore_include, 0)
}
pub fn preprocess_str<T: AsRef<Path>, U: AsRef<Path>>(
pub fn preprocess_str<T: AsRef<Path>, U: AsRef<Path>, V: BuildHasher>(
s: &str,
path: T,
pre_defines: &Defines,
pre_defines: &HashMap<String, Option<Define>, V>,
include_paths: &[U],
ignore_include: bool,
resolve_depth: usize,
@ -399,14 +400,12 @@ pub fn preprocess_str<T: AsRef<Path>, U: AsRef<Path>>(
}
}
};
if path.is_relative() {
if !path.exists() {
for include_path in include_paths {
let new_path = include_path.as_ref().join(&path);
if new_path.exists() {
path = new_path;
break;
}
if path.is_relative() && !path.exists() {
for include_path in include_paths {
let new_path = include_path.as_ref().join(&path);
if new_path.exists() {
path = new_path;
break;
}
}
}
@ -609,10 +608,10 @@ fn resolve_text_macro_usage<T: AsRef<Path>, U: AsRef<Path>>(
} else {
Ok(None)
}
} else if let Some(_) = define {
} else if define.is_some() {
Ok(None)
} else {
Err(Error::DefineNotFound(String::from(id)))
Err(Error::DefineNotFound(id))
}
}

View File

@ -1,4 +1,9 @@
#![recursion_limit = "256"]
#![allow(
clippy::module_inception,
clippy::large_enum_variant,
clippy::type_complexity
)]
pub mod any_node;
pub mod behavioral_statements;

View File

@ -4,6 +4,7 @@ use nom::combinator::all_consuming;
use nom_greedyerror::error_position;
use std::collections::HashMap;
use std::fmt;
use std::hash::BuildHasher;
use std::path::{Path, PathBuf};
pub use sv_parser_error::Error;
use sv_parser_parser::{lib_parser, sv_parser, Span, SpanInfo};
@ -22,14 +23,11 @@ impl SyntaxTree {
let mut beg = None;
let mut end = 0;
for n in Iter::new(nodes.into()) {
match n {
RefNode::Locate(x) => {
if beg.is_none() {
beg = Some(x.offset);
}
end = x.offset + x.len;
if let RefNode::Locate(x) = n {
if beg.is_none() {
beg = Some(x.offset);
}
_ => (),
end = x.offset + x.len;
}
}
if let Some(beg) = beg {
@ -94,9 +92,9 @@ impl<'a> IntoIterator for &'a SyntaxTree {
}
}
pub fn parse_sv<T: AsRef<Path>, U: AsRef<Path>>(
pub fn parse_sv<T: AsRef<Path>, U: AsRef<Path>, V: BuildHasher>(
path: T,
pre_defines: &HashMap<String, Option<Define>>,
pre_defines: &HashMap<String, Option<Define>, V>,
include_paths: &[U],
ignore_include: bool,
) -> Result<(SyntaxTree, Defines), Error> {
@ -131,10 +129,10 @@ pub fn parse_sv<T: AsRef<Path>, U: AsRef<Path>>(
}
}
pub fn parse_sv_str<T: AsRef<Path>, U: AsRef<Path>>(
pub fn parse_sv_str<T: AsRef<Path>, U: AsRef<Path>, V: BuildHasher>(
s: &str,
path: T,
pre_defines: &HashMap<String, Option<Define>>,
pre_defines: &HashMap<String, Option<Define>, V>,
include_paths: &[U],
ignore_include: bool,
) -> Result<(SyntaxTree, Defines), Error> {
@ -169,9 +167,9 @@ pub fn parse_sv_str<T: AsRef<Path>, U: AsRef<Path>>(
}
}
pub fn parse_lib<T: AsRef<Path>, U: AsRef<Path>>(
pub fn parse_lib<T: AsRef<Path>, U: AsRef<Path>, V: BuildHasher>(
path: T,
pre_defines: &HashMap<String, Option<Define>>,
pre_defines: &HashMap<String, Option<Define>, V>,
include_paths: &[U],
ignore_include: bool,
) -> Result<(SyntaxTree, Defines), Error> {
@ -206,10 +204,10 @@ pub fn parse_lib<T: AsRef<Path>, U: AsRef<Path>>(
}
}
pub fn parse_lib_str<T: AsRef<Path>, U: AsRef<Path>>(
pub fn parse_lib_str<T: AsRef<Path>, U: AsRef<Path>, V: BuildHasher>(
s: &str,
path: T,
pre_defines: &HashMap<String, Option<Define>>,
pre_defines: &HashMap<String, Option<Define>, V>,
include_paths: &[U],
ignore_include: bool,
) -> Result<(SyntaxTree, Defines), Error> {