Skip to content

Commit

Permalink
Finish setting up rustc/Clippy lints and fix those (#695)
Browse files Browse the repository at this point in the history
Part of #155

Commit messages have a bit more context on the relevant changes.

I've decided to drop very few lints from the EDR list like
deny-by-default ones, `inconsistent_struct_constructor` (not that
useful) or `implicit_clone` (a lot of false-positives/unidiomatic
suggestions; let's wait for Rust 1.74 and
rust-lang/rust-clippy#11281 to include it
again) or irrelevant ones for us like `suboptimal_flops`.
  • Loading branch information
Xanewok authored Dec 7, 2023
1 parent 7a3c1fb commit 0868683
Show file tree
Hide file tree
Showing 101 changed files with 1,248 additions and 1,129 deletions.
25 changes: 20 additions & 5 deletions .cargo/config.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,24 @@ lto = true
# https://blog.rust-lang.org/2023/11/16/Rust-1.74.0.html#lint-configuration-through-cargo
rustflags = [
# rustc additional warnings:
"--warn",
"unused_crate_dependencies",
# clippy additional warnings:
"--warn",
"clippy::wildcard_imports",
"-Wunused_crate_dependencies",
# Rust 2018 idioms that are not yet warn-by-default:
"-Welided_lifetimes_in_paths",
"-Wunused_extern_crates",
"-Wexplicit_outlives_requirements",
# 📎 Lints that are enabled (warn/deny) by default
"-Wclippy::all",
# Restriction (optional, neutral lints)
"-Wclippy::exit", # Prefer not `process::exit`ing directly
"-Wclippy::rest_pat_in_fully_bound_structs", # Prefer not to use `..` in fully bound structs
"-Wclippy::verbose_file_reads", # Prefer simpler and more concise `fs::read_to_string`
# Pedantic
"-Wclippy::pedantic", # Warn about pedantic lints, except...
"-Aclippy::implicit_clone", # A lot of false positives, tuned down in Clippy bundled with Rust 1.73
"-Aclippy::match_same_arms", # It's often clearer to have the same arm twice
"-Aclippy::missing_errors_doc", # Most of our code is internal; let's not clutter the docs until...
"-Aclippy::missing_panics_doc", # ... we care about the public documentation in our shipped crates
"-Aclippy::module_name_repetitions", # It seems we prefer it this way; we'd need to discuss that
"-Aclippy::must_use_candidate", # Overzealous, we'd have to `[must_use]` a lot of things
"-Aclippy::redundant_closure_for_method_calls", # Not always clearer, let's not pepper `allow`s whenever needed
]
27 changes: 11 additions & 16 deletions crates/codegen/ebnf/src/serialization.rs
Original file line number Diff line number Diff line change
Expand Up @@ -87,17 +87,15 @@ impl EbnfSerializer {
}

/// Naive version of formatting for long EBNF statements.
/// Tries to break long lines, which are usually choices of references (PrecedenceParser) or keywords (Scanner).
/// Tries to break long lines, which are usually choices of references
/// ([`PrecedenceParser`](ProductionDefinition::PrecedenceParser)) or keywords ([`Scanner`](ProductionDefinition::Scanner)).
/// Otherwise, prints everything on a single line.
fn serialize_root_node(&mut self, name: &str, root_node: &EbnfNode) -> String {
let choices = match &root_node {
EbnfNode::Choice { nodes } => nodes,
_ => {
// Not a choice: Just flush everything on a single line:
let mut buffer = String::new();
self.serialize_node(root_node, &mut buffer);
return buffer;
}
let EbnfNode::Choice { nodes: choices } = &root_node else {
// Not a choice: Just flush everything on a single line:
let mut buffer = String::new();
self.serialize_node(root_node, &mut buffer);
return buffer;
};

let choices = choices
Expand Down Expand Up @@ -202,13 +200,10 @@ impl EbnfSerializer {
fn display_name(&self, name: &str) -> String {
let mut name = name.to_owned();

let production = match self.language.productions.get(&name) {
Some(production) => production,
None => {
// Not a top-level production, so it is an named parser.
// Therefore, it is neither inlined nor a scanner. Return name as-is:
return name;
}
let Some(production) = self.language.productions.get(&name) else {
// Not a top-level production, so it is an named parser.
// Therefore, it is neither inlined nor a scanner. Return name as-is:
return name;
};

if matches!(production.definition, ProductionDefinition::Scanner { .. }) {
Expand Down
2 changes: 1 addition & 1 deletion crates/codegen/grammar/src/grammar.rs
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ impl Visitable for GrammarElement {
Self::TriviaParserDefinition(trivia_parser) => trivia_parser.accept_visitor(visitor),
Self::ParserDefinition(parser) => parser.accept_visitor(visitor),
Self::PrecedenceParserDefinition(precedence_parser) => {
precedence_parser.accept_visitor(visitor)
precedence_parser.accept_visitor(visitor);
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ fn calculate_defined_in(analysis: &mut Analysis, item: &SpannedItem) -> VersionS
}
SpannedItem::Token { item } => {
for definition in &item.definitions {
try_add_specifier(&definition.enabled)
try_add_specifier(&definition.enabled);
}
}
SpannedItem::Fragment { item } => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ fn check_unreachabable_items(analysis: &mut Analysis) {
collect_trivia(&language.leading_trivia, &mut queue);
collect_trivia(&language.trailing_trivia, &mut queue);

let mut visited = queue.iter().cloned().collect::<HashSet<_>>();
let mut visited = queue.iter().copied().collect::<HashSet<_>>();

while let Some(name) = queue.pop() {
for referenced_item in &analysis.metadata[name].referenced_items {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,9 @@ use crate::{
model::{
Identifier, SpannedEnumItem, SpannedEnumVariant, SpannedField, SpannedFragmentItem,
SpannedItem,
SpannedItemDiscriminants::{self, *},
SpannedItemDiscriminants::{
self, Enum, Fragment, Keyword, Precedence, Repeated, Separated, Struct, Token, Trivia,
},
SpannedKeywordDefinition, SpannedKeywordItem, SpannedPrecedenceExpression,
SpannedPrecedenceItem, SpannedPrecedenceOperator, SpannedPrimaryExpression,
SpannedRepeatedItem, SpannedScanner, SpannedSeparatedItem, SpannedStructItem,
Expand Down Expand Up @@ -363,14 +365,11 @@ fn check_reference(
enablement: &VersionSet,
expected_kinds: &[SpannedItemDiscriminants],
) {
let target = match analysis.metadata.get_mut(&**reference) {
Some(target) => target,
None => {
analysis
.errors
.add(reference, &Errors::UnknownReference(reference));
return;
}
let Some(target) = analysis.metadata.get_mut(&**reference) else {
analysis
.errors
.add(reference, &Errors::UnknownReference(reference));
return;
};

let not_defined_in = enablement.difference(&target.defined_in);
Expand Down
22 changes: 15 additions & 7 deletions crates/codegen/language/definition/src/internals/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use std::fmt::Display;

pub type Result<T> = std::result::Result<T, Error>;

/// Our own proxy for [syn::Error] since the latter does not expose the underlying sub-errors.
/// Our own proxy for [`syn::Error`] since the latter does not expose the underlying sub-errors.
#[derive(Debug)]
pub struct Error {
message: String,
Expand All @@ -18,7 +18,7 @@ impl Error {
})
}

pub fn from_syn(error: syn::Error) -> Self {
pub fn from_syn(error: &syn::Error) -> Self {
Self {
message: error.to_string(),
span: error.span(),
Expand All @@ -34,6 +34,18 @@ impl Error {
}
}

impl From<syn::Error> for Error {
fn from(error: syn::Error) -> Self {
Self::from_syn(&error)
}
}

impl From<Error> for syn::Error {
fn from(error: Error) -> Self {
Error::to_syn(&error)
}
}

#[derive(Debug)]
pub struct ErrorsCollection {
errors: Vec<Error>,
Expand All @@ -60,11 +72,7 @@ impl ErrorsCollection {
}

pub fn to_compile_errors(&self) -> TokenStream {
return self
.errors
.iter()
.map(|error| error.to_compile_error())
.collect();
self.errors.iter().map(Error::to_compile_error).collect()
}
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use crate::{
internals::{Error, ErrorsCollection, ParseInputTokens, Result},
internals::{ErrorsCollection, ParseInputTokens, Result},
model::SpannedLanguage,
};
use proc_macro2::TokenStream;
Expand All @@ -9,7 +9,7 @@ pub(crate) struct ParseAdapter;

impl ParseAdapter {
pub fn parse(input: TokenStream) -> Result<ParseOutput> {
syn::parse2(input).map_err(Error::from_syn)
Ok(syn::parse2(input)?)
}
}

Expand All @@ -18,14 +18,13 @@ pub(crate) struct ParseOutput {
pub errors: ErrorsCollection,
}

/// A wrapper around [syn::parse::Parse] to convert to/from our own error types.
/// A wrapper around [`syn::parse::Parse`] to convert to/from our own error types.
impl Parse for ParseOutput {
fn parse(input: ParseStream) -> syn::Result<Self> {
fn parse(input: ParseStream<'_>) -> syn::Result<Self> {
let mut errors = ErrorsCollection::new();

match SpannedLanguage::parse_named_value(input, &mut errors) {
Ok(language) => Ok(Self { language, errors }),
Err(error) => Err(error.to_syn()),
}
let language = SpannedLanguage::parse_named_value(input, &mut errors)?;

Ok(Self { language, errors })
}
}
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use crate::internals::{
parse_input_tokens::ParseHelpers, Error, ErrorsCollection, ParseInputTokens, Result, Spanned,
parse_input_tokens::ParseHelpers, ErrorsCollection, ParseInputTokens, Result, Spanned,
};
use indexmap::{IndexMap, IndexSet};
use proc_macro2::Ident;
Expand All @@ -8,23 +8,23 @@ use std::{fmt::Debug, rc::Rc};
use syn::{parse::ParseStream, LitBool, LitChar, LitStr};

impl ParseInputTokens for bool {
fn parse_value(input: ParseStream, _: &mut ErrorsCollection) -> Result<Self> {
fn parse_value(input: ParseStream<'_>, _: &mut ErrorsCollection) -> Result<Self> {
let literal = ParseHelpers::syn::<LitBool>(input)?;

Ok(literal.value())
}
}

impl<T: ParseInputTokens> ParseInputTokens for Box<T> {
fn parse_value(input: ParseStream, errors: &mut ErrorsCollection) -> Result<Self> {
fn parse_value(input: ParseStream<'_>, errors: &mut ErrorsCollection) -> Result<Self> {
let value = T::parse_value(input, errors)?;

Ok(value.into())
}
}

impl ParseInputTokens for char {
fn parse_value(input: ParseStream, _: &mut ErrorsCollection) -> Result<Self> {
fn parse_value(input: ParseStream<'_>, _: &mut ErrorsCollection) -> Result<Self> {
let literal = ParseHelpers::syn::<LitChar>(input)?;

Ok(literal.value())
Expand All @@ -34,14 +34,14 @@ impl ParseInputTokens for char {
impl<K: ParseInputTokens + std::hash::Hash + Eq, V: ParseInputTokens> ParseInputTokens
for IndexMap<K, V>
{
fn parse_value(input: ParseStream, errors: &mut ErrorsCollection) -> Result<Self> {
ParseHelpers::map(input, errors)
fn parse_value(input: ParseStream<'_>, errors: &mut ErrorsCollection) -> Result<Self> {
Ok(ParseHelpers::map(input, errors))
}
}

impl<T: ParseInputTokens + std::hash::Hash + Ord> ParseInputTokens for IndexSet<Spanned<T>> {
fn parse_value(input: ParseStream, errors: &mut ErrorsCollection) -> Result<Self> {
let sequence: Vec<Spanned<T>> = ParseHelpers::sequence(input, errors)?;
fn parse_value(input: ParseStream<'_>, errors: &mut ErrorsCollection) -> Result<Self> {
let sequence: Vec<Spanned<T>> = ParseHelpers::sequence(input, errors);

let mut set = Self::new();

Expand All @@ -58,15 +58,19 @@ impl<T: ParseInputTokens + std::hash::Hash + Ord> ParseInputTokens for IndexSet<
}

impl<T: ParseInputTokens> ParseInputTokens for Option<T> {
fn parse_value(input: ParseStream, errors: &mut ErrorsCollection) -> Result<Self> {
fn parse_value(input: ParseStream<'_>, errors: &mut ErrorsCollection) -> Result<Self> {
if input.is_empty() {
Ok(None)
} else {
Ok(Some(T::parse_value(input, errors)?))
}
}

fn parse_field(name: &str, input: ParseStream, errors: &mut ErrorsCollection) -> Result<Self> {
fn parse_field(
name: &str,
input: ParseStream<'_>,
errors: &mut ErrorsCollection,
) -> Result<Self> {
match ParseHelpers::syn::<Ident>(&input.fork()) {
Ok(key) if key == name => Ok(Some(ParseHelpers::field(name, input, errors)?)),
_ => Ok(None),
Expand All @@ -75,15 +79,15 @@ impl<T: ParseInputTokens> ParseInputTokens for Option<T> {
}

impl<T: ParseInputTokens> ParseInputTokens for Rc<T> {
fn parse_value(input: ParseStream, errors: &mut ErrorsCollection) -> Result<Self> {
fn parse_value(input: ParseStream<'_>, errors: &mut ErrorsCollection) -> Result<Self> {
let value = T::parse_value(input, errors)?;

Ok(value.into())
}
}

impl ParseInputTokens for String {
fn parse_value(input: ParseStream, errors: &mut ErrorsCollection) -> Result<Self> {
fn parse_value(input: ParseStream<'_>, errors: &mut ErrorsCollection) -> Result<Self> {
let literal = ParseHelpers::syn::<LitStr>(input)?;
let value = literal.value();

Expand All @@ -96,21 +100,22 @@ impl ParseInputTokens for String {
}

impl ParseInputTokens for usize {
fn parse_value(input: ParseStream, _: &mut ErrorsCollection) -> Result<Self> {
fn parse_value(input: ParseStream<'_>, _: &mut ErrorsCollection) -> Result<Self> {
let literal = ParseHelpers::syn::<syn::LitInt>(input)?;
let value = literal.base10_parse::<usize>()?;

literal.base10_parse::<usize>().map_err(Error::from_syn)
Ok(value)
}
}

impl<T: ParseInputTokens> ParseInputTokens for Vec<T> {
fn parse_value(input: ParseStream, errors: &mut ErrorsCollection) -> Result<Self> {
ParseHelpers::sequence(input, errors)
fn parse_value(input: ParseStream<'_>, errors: &mut ErrorsCollection) -> Result<Self> {
Ok(ParseHelpers::sequence(input, errors))
}
}

impl ParseInputTokens for Version {
fn parse_value(input: ParseStream, errors: &mut ErrorsCollection) -> Result<Self> {
fn parse_value(input: ParseStream<'_>, errors: &mut ErrorsCollection) -> Result<Self> {
let literal = ParseHelpers::syn::<LitStr>(input)?;

match Self::parse(&literal.value()) {
Expand Down
Loading

0 comments on commit 0868683

Please sign in to comment.