diff options
author | Alex Orlenko <zxteam@protonmail.com> | 2021-05-06 00:36:47 +0100 |
---|---|---|
committer | Alex Orlenko <zxteam@protonmail.com> | 2021-05-06 00:36:47 +0100 |
commit | 20cba5de5b18ee144fac975d04e04214d580105d (patch) | |
tree | 6b46b835e58f20aab90af093c65d60359880bedf | |
parent | 3e03f4201cf474c09598081eb367849c36f051c0 (diff) | |
download | mlua-20cba5de5b18ee144fac975d04e04214d580105d.zip |
Fix clippy warnings
-rw-r--r-- | mlua_derive/src/chunk.rs | 5 | ||||
-rw-r--r-- | mlua_derive/src/lib.rs | 2 | ||||
-rw-r--r-- | mlua_derive/src/token.rs | 8 |
3 files changed, 8 insertions, 7 deletions
diff --git a/mlua_derive/src/chunk.rs b/mlua_derive/src/chunk.rs index 9ff61a9..ee9b822 100644 --- a/mlua_derive/src/chunk.rs +++ b/mlua_derive/src/chunk.rs @@ -76,11 +76,12 @@ impl Chunk { .map(|lc| (lc.line, lc.column)) .unwrap_or_else(|| (line, col)); + #[allow(clippy::comparison_chain)] if line > prev_line { - source.push_str("\n"); + source.push('\n'); } else if line == prev_line { for _ in 0..col.saturating_sub(prev_col) { - source.push_str(" "); + source.push(' '); } } source.push_str(&t.to_string()); diff --git a/mlua_derive/src/lib.rs b/mlua_derive/src/lib.rs index b735b6f..a1e70d6 100644 --- a/mlua_derive/src/lib.rs +++ b/mlua_derive/src/lib.rs @@ -14,7 +14,7 @@ pub fn lua_module(attr: TokenStream, item: TokenStream) -> TokenStream { let args = parse_macro_input!(attr as AttributeArgs); let item = parse_macro_input!(item as ItemFn); - if args.len() > 0 { + if !args.is_empty() { let err = Error::new(Span::call_site(), "the number of arguments must be zero") .to_compile_error(); return err.into(); diff --git a/mlua_derive/src/token.rs b/mlua_derive/src/token.rs index 24182d4..0173118 100644 --- a/mlua_derive/src/token.rs +++ b/mlua_derive/src/token.rs @@ -38,7 +38,7 @@ impl Pos { } fn span_pos(span: &Span) -> (Pos, Pos) { - let span2: Span2 = span.clone().into(); + let span2: Span2 = (*span).into(); let start = span2.start(); let end = span2.end(); @@ -175,7 +175,7 @@ impl Tokens { pub(crate) fn retokenize(tt: TokenStream) -> Tokens { Tokens( tt.into_iter() - .map(|tt| Tokens::from(tt)) + .map(Tokens::from) .flatten() .peekable() .batching(|iter| { @@ -217,8 +217,8 @@ impl From<TokenTree> for Tokens { vec![Token::new_delim(b, tt.clone(), true)] .into_iter() - .chain(g.stream().into_iter().map(|tt| Tokens::from(tt)).flatten()) - .chain(vec![Token::new_delim(e, tt.clone(), false)]) + .chain(g.stream().into_iter().map(Tokens::from).flatten()) + .chain(vec![Token::new_delim(e, tt, false)]) .collect() } _ => vec![Token::new(tt)], |