Skip to content

Commit

Permalink
Pick a simpler set of Clippy rules that we want to follow
Browse files Browse the repository at this point in the history
Prefer linting against default ones and attempt to lint against some
pedantic ones with select, documented exceptions.
  • Loading branch information
Xanewok committed Dec 5, 2023
1 parent cd1010f commit 2b9527e
Show file tree
Hide file tree
Showing 78 changed files with 378 additions and 352 deletions.
66 changes: 15 additions & 51 deletions .cargo/config.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,57 +15,21 @@ rustflags = [
"-Welided_lifetimes_in_paths",
"-Wunused_extern_crates",
"-Wexplicit_outlives_requirements",
# clippy additional warnings:
# Lints that are enabled (warn/deny) by default
# 📎 Lints that are enabled (warn/deny) by default
"-Wclippy::all",
# Restriction
"-Wclippy::dbg_macro",
"-Wclippy::exit",
"-Wclippy::rest_pat_in_fully_bound_structs",
"-Wclippy::todo",
"-Wclippy::verbose_file_reads",
# Restriction (optional, neutral lints)
"-Wclippy::dbg_macro", # Lint against leftover `dbg!` macros
"-Wclippy::todo", # Lint against leftover `todo!` macros
"-Wclippy::exit", # Prefer not `process::exit`ing directly
"-Wclippy::rest_pat_in_fully_bound_structs", # Prefer not to use `..` in fully bound structs
"-Wclippy::verbose_file_reads", # Prefer simpler and more concise `fs::read_to_string`
# Pedantic
"-Wclippy::bool-to-int-with-if",
"-Wclippy::cast_lossless",
"-Wclippy::default_trait_access",
"-Wclippy::doc_markdown",
"-Wclippy::enum_glob_use",
"-Wclippy::expl_impl_clone_on_copy",
"-Wclippy::explicit_deref_methods",
"-Wclippy::explicit_into_iter_loop",
"-Wclippy::filter_map_next",
"-Wclippy::flat_map_option",
"-Wclippy::fn_params_excessive_bools",
"-Wclippy::inefficient_to_string",
"-Wclippy::invalid_upcast_comparisons",
"-Wclippy::items_after_statements",
"-Wclippy::large_digit_groups",
"-Wclippy::large_stack_arrays",
"-Wclippy::large_types_passed_by_value",
"-Wclippy::linkedlist",
"-Wclippy::macro_use_imports",
"-Wclippy::manual_assert",
"-Wclippy::manual_ok_or",
"-Wclippy::map_unwrap_or",
"-Wclippy::match_on_vec_items",
"-Wclippy::match_wild_err_arm",
"-Wclippy::mut_mut",
"-Wclippy::needless_continue",
"-Wclippy::needless_for_each",
"-Wclippy::option_option",
"-Wclippy::ptr_as_ptr",
"-Wclippy::ref_option_ref",
"-Wclippy::same_functions_in_if_condition",
"-Wclippy::string_add_assign",
"-Wclippy::uninlined_format_args",
"-Wclippy::unnested_or_patterns",
"-Wclippy::wildcard_imports",
"-Wclippy::zero_sized_map_values",
# Nursery
"-Wclippy::debug_assert_with_mut_call",
"-Wclippy::fallible_impl_from",
"-Wclippy::mutex_integer",
"-Wclippy::path_buf_push_overwrite",
"-Wclippy::string_lit_as_bytes",
"-Wclippy::trait_duplication_in_bounds",
"-Wclippy::pedantic", # Warn about pedantic lints, except...
"-Aclippy::implicit_clone", # A lot of false positives, tuned down in Clippy bundled with Rust 1.73
"-Aclippy::match_same_arms", # It's often clearer to have the same arm twice
"-Aclippy::missing_errors_doc", # Most of our code is internal; let's not clutter the docs until...
"-Aclippy::missing_panics_doc", # ... we care about the public documentation in our shipped crates
"-Aclippy::module_name_repetitions", # It seems we prefer it this way; we'd need to discuss that
"-Aclippy::must_use_candidate", # Overzealous, we'd have to `[must_use]` a lot of things
"-Aclippy::redundant_closure_for_method_calls", # Not always clearer, let's not pepper `allow`s whenever needed
]
24 changes: 9 additions & 15 deletions crates/codegen/ebnf/src/serialization.rs
Original file line number Diff line number Diff line change
Expand Up @@ -91,14 +91,11 @@ impl EbnfSerializer {
/// ([`PrecedenceParser`](ProductionDefinition::PrecedenceParser)) or keywords ([`Scanner`](ProductionDefinition::Scanner)).
/// Otherwise, prints everything on a single line.
fn serialize_root_node(&mut self, name: &str, root_node: &EbnfNode) -> String {
let choices = match &root_node {
EbnfNode::Choice { nodes } => nodes,
_ => {
// Not a choice: Just flush everything on a single line:
let mut buffer = String::new();
self.serialize_node(root_node, &mut buffer);
return buffer;
}
let EbnfNode::Choice { nodes: choices } = &root_node else {
// Not a choice: Just flush everything on a single line:
let mut buffer = String::new();
self.serialize_node(root_node, &mut buffer);
return buffer;
};

let choices = choices
Expand Down Expand Up @@ -203,13 +200,10 @@ impl EbnfSerializer {
fn display_name(&self, name: &str) -> String {
let mut name = name.to_owned();

let production = match self.language.productions.get(&name) {
Some(production) => production,
None => {
// Not a top-level production, so it is an named parser.
// Therefore, it is neither inlined nor a scanner. Return name as-is:
return name;
}
let Some(production) = self.language.productions.get(&name) else {
// Not a top-level production, so it is an named parser.
// Therefore, it is neither inlined nor a scanner. Return name as-is:
return name;
};

if matches!(production.definition, ProductionDefinition::Scanner { .. }) {
Expand Down
2 changes: 1 addition & 1 deletion crates/codegen/grammar/src/grammar.rs
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ impl Visitable for GrammarElement {
Self::TriviaParserDefinition(trivia_parser) => trivia_parser.accept_visitor(visitor),
Self::ParserDefinition(parser) => parser.accept_visitor(visitor),
Self::PrecedenceParserDefinition(precedence_parser) => {
precedence_parser.accept_visitor(visitor)
precedence_parser.accept_visitor(visitor);
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ fn calculate_defined_in(analysis: &mut Analysis, item: &SpannedItem) -> VersionS
}
SpannedItem::Token { item } => {
for definition in &item.definitions {
try_add_specifier(&definition.enabled)
try_add_specifier(&definition.enabled);
}
}
SpannedItem::Fragment { item } => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ fn check_unreachabable_items(analysis: &mut Analysis) {
collect_trivia(&language.leading_trivia, &mut queue);
collect_trivia(&language.trailing_trivia, &mut queue);

let mut visited = queue.iter().cloned().collect::<HashSet<_>>();
let mut visited = queue.iter().copied().collect::<HashSet<_>>();

while let Some(name) = queue.pop() {
for referenced_item in &analysis.metadata[name].referenced_items {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -369,14 +369,11 @@ fn check_reference(
enablement: &VersionSet,
expected_kinds: &[SpannedItemDiscriminants],
) {
let target = match analysis.metadata.get_mut(&**reference) {
Some(target) => target,
None => {
analysis
.errors
.add(reference, &Errors::UnknownReference(reference));
return;
}
let Some(target) = analysis.metadata.get_mut(&**reference) else {
analysis
.errors
.add(reference, &Errors::UnknownReference(reference));
return;
};

let not_defined_in = enablement.difference(&target.defined_in);
Expand Down
20 changes: 14 additions & 6 deletions crates/codegen/language/definition/src/internals/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ impl Error {
})
}

pub fn from_syn(error: syn::Error) -> Self {
pub fn from_syn(error: &syn::Error) -> Self {
Self {
message: error.to_string(),
span: error.span(),
Expand All @@ -34,6 +34,18 @@ impl Error {
}
}

impl From<syn::Error> for Error {
fn from(error: syn::Error) -> Self {
Self::from_syn(&error)
}
}

impl From<Error> for syn::Error {
fn from(error: Error) -> Self {
Error::to_syn(&error)
}
}

#[derive(Debug)]
pub struct ErrorsCollection {
errors: Vec<Error>,
Expand All @@ -60,11 +72,7 @@ impl ErrorsCollection {
}

pub fn to_compile_errors(&self) -> TokenStream {
return self
.errors
.iter()
.map(|error| error.to_compile_error())
.collect();
self.errors.iter().map(Error::to_compile_error).collect()
}
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use crate::{
internals::{Error, ErrorsCollection, ParseInputTokens, Result},
internals::{ErrorsCollection, ParseInputTokens, Result},
model::SpannedLanguage,
};
use proc_macro2::TokenStream;
Expand All @@ -9,7 +9,7 @@ pub(crate) struct ParseAdapter;

impl ParseAdapter {
pub fn parse(input: TokenStream) -> Result<ParseOutput> {
syn::parse2(input).map_err(Error::from_syn)
Ok(syn::parse2(input)?)
}
}

Expand All @@ -23,9 +23,8 @@ impl Parse for ParseOutput {
fn parse(input: ParseStream<'_>) -> syn::Result<Self> {
let mut errors = ErrorsCollection::new();

match SpannedLanguage::parse_named_value(input, &mut errors) {
Ok(language) => Ok(Self { language, errors }),
Err(error) => Err(error.to_syn()),
}
let language = SpannedLanguage::parse_named_value(input, &mut errors)?;

Ok(Self { language, errors })
}
}
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use crate::internals::{
parse_input_tokens::ParseHelpers, Error, ErrorsCollection, ParseInputTokens, Result, Spanned,
parse_input_tokens::ParseHelpers, ErrorsCollection, ParseInputTokens, Result, Spanned,
};
use indexmap::{IndexMap, IndexSet};
use proc_macro2::Ident;
Expand Down Expand Up @@ -35,13 +35,13 @@ impl<K: ParseInputTokens + std::hash::Hash + Eq, V: ParseInputTokens> ParseInput
for IndexMap<K, V>
{
fn parse_value(input: ParseStream<'_>, errors: &mut ErrorsCollection) -> Result<Self> {
ParseHelpers::map(input, errors)
Ok(ParseHelpers::map(input, errors))
}
}

impl<T: ParseInputTokens + std::hash::Hash + Ord> ParseInputTokens for IndexSet<Spanned<T>> {
fn parse_value(input: ParseStream<'_>, errors: &mut ErrorsCollection) -> Result<Self> {
let sequence: Vec<Spanned<T>> = ParseHelpers::sequence(input, errors)?;
let sequence: Vec<Spanned<T>> = ParseHelpers::sequence(input, errors);

let mut set = Self::new();

Expand Down Expand Up @@ -102,14 +102,15 @@ impl ParseInputTokens for String {
impl ParseInputTokens for usize {
fn parse_value(input: ParseStream<'_>, _: &mut ErrorsCollection) -> Result<Self> {
let literal = ParseHelpers::syn::<syn::LitInt>(input)?;
let value = literal.base10_parse::<usize>()?;

literal.base10_parse::<usize>().map_err(Error::from_syn)
Ok(value)
}
}

impl<T: ParseInputTokens> ParseInputTokens for Vec<T> {
fn parse_value(input: ParseStream<'_>, errors: &mut ErrorsCollection) -> Result<Self> {
ParseHelpers::sequence(input, errors)
Ok(ParseHelpers::sequence(input, errors))
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,21 +8,21 @@ pub struct ParseHelpers;

impl ParseHelpers {
pub fn syn<T: syn::parse::Parse>(input: ParseStream<'_>) -> Result<T> {
input.parse::<T>().map_err(Error::from_syn)
Ok(input.parse::<T>()?)
}

pub fn delimited<T>(
delimiter: Delimiter,
input: ParseStream<'_>,
inner_parser: impl FnOnce(DelimSpan, ParseStream<'_>) -> Result<T>,
) -> Result<T> {
delimited(delimiter, input, inner_parser).map_err(Error::from_syn)
Ok(delimited(delimiter, input, inner_parser)?)
}

pub fn sequence<T: ParseInputTokens>(
input: ParseStream<'_>,
errors: &mut ErrorsCollection,
) -> Result<Vec<T>> {
) -> Vec<T> {
match Self::delimited(Delimiter::Bracket, input, |_, inner_input| {
let mut result = Vec::new();

Expand All @@ -40,19 +40,19 @@ impl ParseHelpers {

Ok(result)
}) {
Ok(value) => Ok(value),
Ok(value) => value,
Err(error) => {
errors.push(error);

Ok(vec![])
vec![]
}
}
}

pub fn map<K: ParseInputTokens + std::hash::Hash + Eq, V: ParseInputTokens>(
input: ParseStream<'_>,
errors: &mut ErrorsCollection,
) -> Result<IndexMap<K, V>> {
) -> indexmap::IndexMap<K, V> {
match Self::delimited(Delimiter::Parenthesis, input, |_, inner_input| {
let mut result = IndexMap::new();

Expand Down Expand Up @@ -80,11 +80,11 @@ impl ParseHelpers {

Ok(result)
}) {
Ok(value) => Ok(value),
Ok(value) => value,
Err(error) => {
errors.push(error);

Ok(IndexMap::new())
IndexMap::new()
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ impl<T: Eq> Eq for Spanned<T> {}

impl<T: std::hash::Hash> std::hash::Hash for Spanned<T> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.value.hash(state)
self.value.hash(state);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@ impl WriteOutputTokens for char {

impl<K: WriteOutputTokens, V: WriteOutputTokens> WriteOutputTokens for IndexMap<K, V> {
fn write_output_tokens(&self) -> TokenStream {
let keys = self.keys().map(|key| key.write_output_tokens());
let values = self.values().map(|value| value.write_output_tokens());
let keys = self.keys().map(WriteOutputTokens::write_output_tokens);
let values = self.values().map(WriteOutputTokens::write_output_tokens);

quote! {
[ #( (#keys, #values) ),* ].into()
Expand All @@ -48,7 +48,7 @@ impl<K: WriteOutputTokens, V: WriteOutputTokens> WriteOutputTokens for IndexMap<

impl<T: WriteOutputTokens> WriteOutputTokens for IndexSet<T> {
fn write_output_tokens(&self) -> TokenStream {
let items = self.iter().map(|item| item.write_output_tokens());
let items = self.iter().map(WriteOutputTokens::write_output_tokens);

quote! {
[ #( #items ),* ].into()
Expand Down Expand Up @@ -107,7 +107,7 @@ impl WriteOutputTokens for usize {

impl<T: WriteOutputTokens> WriteOutputTokens for Vec<T> {
fn write_output_tokens(&self) -> TokenStream {
let items = self.iter().map(|item| item.write_output_tokens());
let items = self.iter().map(WriteOutputTokens::write_output_tokens);

quote! {
[ #( #items ),* ].into()
Expand Down
Loading

0 comments on commit 2b9527e

Please sign in to comment.