From 2b9527e465cf11488e63b9e5e76313307da8ff3d Mon Sep 17 00:00:00 2001 From: Igor Matuszewski Date: Tue, 5 Dec 2023 16:41:10 +0100 Subject: [PATCH] Pick a simpler set of Clippy rules that we want to follow Prefer linting against default ones and attempt to lint against some pedantic ones with select, documented exceptions. --- .cargo/config.toml | 66 +++++-------------- crates/codegen/ebnf/src/serialization.rs | 24 +++---- crates/codegen/grammar/src/grammar.rs | 2 +- .../src/compiler/analysis/definitions.rs | 2 +- .../src/compiler/analysis/reachability.rs | 2 +- .../src/compiler/analysis/references.rs | 13 ++-- .../definition/src/internals/errors.rs | 20 ++++-- .../internals/parse_input_tokens/adapter.rs | 11 ++-- .../parse_input_tokens/external_types.rs | 11 ++-- .../internals/parse_input_tokens/helpers.rs | 16 ++--- .../definition/src/internals/spanned/mod.rs | 2 +- .../write_output_tokens/external_types.rs | 8 +-- .../definition/src/model/terminals/keyword.rs | 17 ++--- .../src/derive/parse_input_tokens.rs | 14 ++-- .../internal_macros/src/derive/spanned.rs | 4 +- .../src/derive/write_output_tokens.rs | 8 +-- .../internal_macros/src/input_model.rs | 6 +- .../parser/generator/src/code_generator.rs | 8 +-- .../parser/generator/src/parser_definition.rs | 1 + .../src/precedence_parser_definition.rs | 7 +- .../generator/src/scanner_definition.rs | 2 +- crates/codegen/parser/runtime/src/cst.rs | 1 + crates/codegen/parser/runtime/src/cursor.rs | 16 ++--- .../parser/runtime/src/napi/napi_cursor.rs | 10 ++- .../runtime/src/napi/napi_parse_error.rs | 9 ++- .../runtime/src/napi/napi_text_index.rs | 2 + .../codegen/parser/runtime/src/parse_error.rs | 3 +- .../runtime/src/support/parser_result.rs | 1 + .../runtime/src/support/precedence_helper.rs | 3 + .../parser/runtime/src/support/recovery.rs | 57 ++++++++-------- .../runtime/src/support/scanner_macros.rs | 1 + .../runtime/src/support/separated_helper.rs | 8 +-- .../runtime/src/support/sequence_helper.rs | 5 +- .../runtime/src/templates/language.rs.jinja2 | 7 +- .../rules/definitions/keywords/collector.rs | 15 ++--- .../rules/definitions/operators/mod.rs | 2 +- .../rules/definitions/versions/mod.rs | 5 +- .../validation/rules/references/validator.rs | 10 ++- .../src/validation/visitors/version_set.rs | 6 +- crates/codegen/spec/src/snippets.rs | 2 +- crates/infra/cli/src/commands/ci/mod.rs | 1 + crates/infra/cli/src/toolchains/napi/cli.rs | 6 +- .../infra/cli/src/toolchains/napi/compiler.rs | 1 + .../utils/src/codegen/common/formatting.rs | 2 +- crates/infra/utils/src/commands/mod.rs | 7 ++ crates/infra/utils/src/lib.rs | 2 + .../solidity/inputs/language/src/grammar.rs | 11 ++-- .../outputs/cargo/crate/src/generated/cst.rs | 1 + .../cargo/crate/src/generated/cursor.rs | 16 ++--- .../cargo/crate/src/generated/language.rs | 11 ++++ .../crate/src/generated/napi/napi_cursor.rs | 10 ++- .../src/generated/napi/napi_parse_error.rs | 9 ++- .../src/generated/napi/napi_text_index.rs | 2 + .../cargo/crate/src/generated/parse_error.rs | 3 +- .../src/generated/support/parser_result.rs | 1 + .../generated/support/precedence_helper.rs | 3 + .../crate/src/generated/support/recovery.rs | 57 ++++++++-------- .../src/generated/support/scanner_macros.rs | 1 + .../src/generated/support/separated_helper.rs | 8 +-- .../src/generated/support/sequence_helper.rs | 5 +- .../solidity/outputs/cargo/crate/src/main.rs | 6 +- .../outputs/npm/crate/src/generated/cst.rs | 1 + .../outputs/npm/crate/src/generated/cursor.rs | 16 ++--- .../npm/crate/src/generated/language.rs | 11 ++++ .../crate/src/generated/napi/napi_cursor.rs | 10 ++- .../src/generated/napi/napi_parse_error.rs | 9 ++- .../src/generated/napi/napi_text_index.rs | 2 + .../npm/crate/src/generated/parse_error.rs | 3 +- .../src/generated/support/parser_result.rs | 1 + .../generated/support/precedence_helper.rs | 3 + .../crate/src/generated/support/recovery.rs | 57 ++++++++-------- .../src/generated/support/scanner_macros.rs | 1 + .../src/generated/support/separated_helper.rs | 8 +-- .../src/generated/support/sequence_helper.rs | 5 +- crates/solidity/testing/sanctuary/src/main.rs | 8 +-- .../testing/sanctuary/src/reporting.rs | 3 +- .../testing/utils/src/cst_snapshots/mod.rs | 4 +- .../testing/utils/src/version_pragmas/mod.rs | 18 ++--- 78 files changed, 378 insertions(+), 352 deletions(-) diff --git a/.cargo/config.toml b/.cargo/config.toml index 52b8f7a1cc..4008ad84ab 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -15,57 +15,21 @@ rustflags = [ "-Welided_lifetimes_in_paths", "-Wunused_extern_crates", "-Wexplicit_outlives_requirements", - # clippy additional warnings: - # Lints that are enabled (warn/deny) by default + # 📎 Lints that are enabled (warn/deny) by default "-Wclippy::all", - # Restriction - "-Wclippy::dbg_macro", - "-Wclippy::exit", - "-Wclippy::rest_pat_in_fully_bound_structs", - "-Wclippy::todo", - "-Wclippy::verbose_file_reads", + # Restriction (optional, neutral lints) + "-Wclippy::dbg_macro", # Lint against leftover `dbg!` macros + "-Wclippy::todo", # Lint against leftover `todo!` macros + "-Wclippy::exit", # Prefer not `process::exit`ing directly + "-Wclippy::rest_pat_in_fully_bound_structs", # Prefer not to use `..` in fully bound structs + "-Wclippy::verbose_file_reads", # Prefer simpler and more concise `fs::read_to_string` # Pedantic - "-Wclippy::bool-to-int-with-if", - "-Wclippy::cast_lossless", - "-Wclippy::default_trait_access", - "-Wclippy::doc_markdown", - "-Wclippy::enum_glob_use", - "-Wclippy::expl_impl_clone_on_copy", - "-Wclippy::explicit_deref_methods", - "-Wclippy::explicit_into_iter_loop", - "-Wclippy::filter_map_next", - "-Wclippy::flat_map_option", - "-Wclippy::fn_params_excessive_bools", - "-Wclippy::inefficient_to_string", - "-Wclippy::invalid_upcast_comparisons", - "-Wclippy::items_after_statements", - "-Wclippy::large_digit_groups", - "-Wclippy::large_stack_arrays", - "-Wclippy::large_types_passed_by_value", - "-Wclippy::linkedlist", - "-Wclippy::macro_use_imports", - "-Wclippy::manual_assert", - "-Wclippy::manual_ok_or", - "-Wclippy::map_unwrap_or", - "-Wclippy::match_on_vec_items", - "-Wclippy::match_wild_err_arm", - "-Wclippy::mut_mut", - "-Wclippy::needless_continue", - "-Wclippy::needless_for_each", - "-Wclippy::option_option", - "-Wclippy::ptr_as_ptr", - "-Wclippy::ref_option_ref", - "-Wclippy::same_functions_in_if_condition", - "-Wclippy::string_add_assign", - "-Wclippy::uninlined_format_args", - "-Wclippy::unnested_or_patterns", - "-Wclippy::wildcard_imports", - "-Wclippy::zero_sized_map_values", - # Nursery - "-Wclippy::debug_assert_with_mut_call", - "-Wclippy::fallible_impl_from", - "-Wclippy::mutex_integer", - "-Wclippy::path_buf_push_overwrite", - "-Wclippy::string_lit_as_bytes", - "-Wclippy::trait_duplication_in_bounds", + "-Wclippy::pedantic", # Warn about pedantic lints, except... + "-Aclippy::implicit_clone", # A lot of false positives, tuned down in Clippy bundled with Rust 1.73 + "-Aclippy::match_same_arms", # It's often clearer to have the same arm twice + "-Aclippy::missing_errors_doc", # Most of our code is internal; let's not clutter the docs until... + "-Aclippy::missing_panics_doc", # ... we care about the public documentation in our shipped crates + "-Aclippy::module_name_repetitions", # It seems we prefer it this way; we'd need to discuss that + "-Aclippy::must_use_candidate", # Overzealous, we'd have to `[must_use]` a lot of things + "-Aclippy::redundant_closure_for_method_calls", # Not always clearer, let's not pepper `allow`s whenever needed ] diff --git a/crates/codegen/ebnf/src/serialization.rs b/crates/codegen/ebnf/src/serialization.rs index c7cfdbd3cf..c3b9f477ab 100644 --- a/crates/codegen/ebnf/src/serialization.rs +++ b/crates/codegen/ebnf/src/serialization.rs @@ -91,14 +91,11 @@ impl EbnfSerializer { /// ([`PrecedenceParser`](ProductionDefinition::PrecedenceParser)) or keywords ([`Scanner`](ProductionDefinition::Scanner)). /// Otherwise, prints everything on a single line. fn serialize_root_node(&mut self, name: &str, root_node: &EbnfNode) -> String { - let choices = match &root_node { - EbnfNode::Choice { nodes } => nodes, - _ => { - // Not a choice: Just flush everything on a single line: - let mut buffer = String::new(); - self.serialize_node(root_node, &mut buffer); - return buffer; - } + let EbnfNode::Choice { nodes: choices } = &root_node else { + // Not a choice: Just flush everything on a single line: + let mut buffer = String::new(); + self.serialize_node(root_node, &mut buffer); + return buffer; }; let choices = choices @@ -203,13 +200,10 @@ impl EbnfSerializer { fn display_name(&self, name: &str) -> String { let mut name = name.to_owned(); - let production = match self.language.productions.get(&name) { - Some(production) => production, - None => { - // Not a top-level production, so it is an named parser. - // Therefore, it is neither inlined nor a scanner. Return name as-is: - return name; - } + let Some(production) = self.language.productions.get(&name) else { + // Not a top-level production, so it is an named parser. + // Therefore, it is neither inlined nor a scanner. Return name as-is: + return name; }; if matches!(production.definition, ProductionDefinition::Scanner { .. }) { diff --git a/crates/codegen/grammar/src/grammar.rs b/crates/codegen/grammar/src/grammar.rs index ae3aa6c539..72790ddea5 100644 --- a/crates/codegen/grammar/src/grammar.rs +++ b/crates/codegen/grammar/src/grammar.rs @@ -84,7 +84,7 @@ impl Visitable for GrammarElement { Self::TriviaParserDefinition(trivia_parser) => trivia_parser.accept_visitor(visitor), Self::ParserDefinition(parser) => parser.accept_visitor(visitor), Self::PrecedenceParserDefinition(precedence_parser) => { - precedence_parser.accept_visitor(visitor) + precedence_parser.accept_visitor(visitor); } } } diff --git a/crates/codegen/language/definition/src/compiler/analysis/definitions.rs b/crates/codegen/language/definition/src/compiler/analysis/definitions.rs index 44df213fe8..30ab1268a6 100644 --- a/crates/codegen/language/definition/src/compiler/analysis/definitions.rs +++ b/crates/codegen/language/definition/src/compiler/analysis/definitions.rs @@ -149,7 +149,7 @@ fn calculate_defined_in(analysis: &mut Analysis, item: &SpannedItem) -> VersionS } SpannedItem::Token { item } => { for definition in &item.definitions { - try_add_specifier(&definition.enabled) + try_add_specifier(&definition.enabled); } } SpannedItem::Fragment { item } => { diff --git a/crates/codegen/language/definition/src/compiler/analysis/reachability.rs b/crates/codegen/language/definition/src/compiler/analysis/reachability.rs index eed5958836..dce3343791 100644 --- a/crates/codegen/language/definition/src/compiler/analysis/reachability.rs +++ b/crates/codegen/language/definition/src/compiler/analysis/reachability.rs @@ -33,7 +33,7 @@ fn check_unreachabable_items(analysis: &mut Analysis) { collect_trivia(&language.leading_trivia, &mut queue); collect_trivia(&language.trailing_trivia, &mut queue); - let mut visited = queue.iter().cloned().collect::>(); + let mut visited = queue.iter().copied().collect::>(); while let Some(name) = queue.pop() { for referenced_item in &analysis.metadata[name].referenced_items { diff --git a/crates/codegen/language/definition/src/compiler/analysis/references.rs b/crates/codegen/language/definition/src/compiler/analysis/references.rs index 3ed2f36463..49475aaf9b 100644 --- a/crates/codegen/language/definition/src/compiler/analysis/references.rs +++ b/crates/codegen/language/definition/src/compiler/analysis/references.rs @@ -369,14 +369,11 @@ fn check_reference( enablement: &VersionSet, expected_kinds: &[SpannedItemDiscriminants], ) { - let target = match analysis.metadata.get_mut(&**reference) { - Some(target) => target, - None => { - analysis - .errors - .add(reference, &Errors::UnknownReference(reference)); - return; - } + let Some(target) = analysis.metadata.get_mut(&**reference) else { + analysis + .errors + .add(reference, &Errors::UnknownReference(reference)); + return; }; let not_defined_in = enablement.difference(&target.defined_in); diff --git a/crates/codegen/language/definition/src/internals/errors.rs b/crates/codegen/language/definition/src/internals/errors.rs index c2019d9c85..f9315c291a 100644 --- a/crates/codegen/language/definition/src/internals/errors.rs +++ b/crates/codegen/language/definition/src/internals/errors.rs @@ -18,7 +18,7 @@ impl Error { }) } - pub fn from_syn(error: syn::Error) -> Self { + pub fn from_syn(error: &syn::Error) -> Self { Self { message: error.to_string(), span: error.span(), @@ -34,6 +34,18 @@ impl Error { } } +impl From for Error { + fn from(error: syn::Error) -> Self { + Self::from_syn(&error) + } +} + +impl From for syn::Error { + fn from(error: Error) -> Self { + Error::to_syn(&error) + } +} + #[derive(Debug)] pub struct ErrorsCollection { errors: Vec, @@ -60,11 +72,7 @@ impl ErrorsCollection { } pub fn to_compile_errors(&self) -> TokenStream { - return self - .errors - .iter() - .map(|error| error.to_compile_error()) - .collect(); + self.errors.iter().map(Error::to_compile_error).collect() } } diff --git a/crates/codegen/language/definition/src/internals/parse_input_tokens/adapter.rs b/crates/codegen/language/definition/src/internals/parse_input_tokens/adapter.rs index 1f5cea1704..34db2bd001 100644 --- a/crates/codegen/language/definition/src/internals/parse_input_tokens/adapter.rs +++ b/crates/codegen/language/definition/src/internals/parse_input_tokens/adapter.rs @@ -1,5 +1,5 @@ use crate::{ - internals::{Error, ErrorsCollection, ParseInputTokens, Result}, + internals::{ErrorsCollection, ParseInputTokens, Result}, model::SpannedLanguage, }; use proc_macro2::TokenStream; @@ -9,7 +9,7 @@ pub(crate) struct ParseAdapter; impl ParseAdapter { pub fn parse(input: TokenStream) -> Result { - syn::parse2(input).map_err(Error::from_syn) + Ok(syn::parse2(input)?) } } @@ -23,9 +23,8 @@ impl Parse for ParseOutput { fn parse(input: ParseStream<'_>) -> syn::Result { let mut errors = ErrorsCollection::new(); - match SpannedLanguage::parse_named_value(input, &mut errors) { - Ok(language) => Ok(Self { language, errors }), - Err(error) => Err(error.to_syn()), - } + let language = SpannedLanguage::parse_named_value(input, &mut errors)?; + + Ok(Self { language, errors }) } } diff --git a/crates/codegen/language/definition/src/internals/parse_input_tokens/external_types.rs b/crates/codegen/language/definition/src/internals/parse_input_tokens/external_types.rs index 55cc9afc4b..64425d1230 100644 --- a/crates/codegen/language/definition/src/internals/parse_input_tokens/external_types.rs +++ b/crates/codegen/language/definition/src/internals/parse_input_tokens/external_types.rs @@ -1,5 +1,5 @@ use crate::internals::{ - parse_input_tokens::ParseHelpers, Error, ErrorsCollection, ParseInputTokens, Result, Spanned, + parse_input_tokens::ParseHelpers, ErrorsCollection, ParseInputTokens, Result, Spanned, }; use indexmap::{IndexMap, IndexSet}; use proc_macro2::Ident; @@ -35,13 +35,13 @@ impl ParseInput for IndexMap { fn parse_value(input: ParseStream<'_>, errors: &mut ErrorsCollection) -> Result { - ParseHelpers::map(input, errors) + Ok(ParseHelpers::map(input, errors)) } } impl ParseInputTokens for IndexSet> { fn parse_value(input: ParseStream<'_>, errors: &mut ErrorsCollection) -> Result { - let sequence: Vec> = ParseHelpers::sequence(input, errors)?; + let sequence: Vec> = ParseHelpers::sequence(input, errors); let mut set = Self::new(); @@ -102,14 +102,15 @@ impl ParseInputTokens for String { impl ParseInputTokens for usize { fn parse_value(input: ParseStream<'_>, _: &mut ErrorsCollection) -> Result { let literal = ParseHelpers::syn::(input)?; + let value = literal.base10_parse::()?; - literal.base10_parse::().map_err(Error::from_syn) + Ok(value) } } impl ParseInputTokens for Vec { fn parse_value(input: ParseStream<'_>, errors: &mut ErrorsCollection) -> Result { - ParseHelpers::sequence(input, errors) + Ok(ParseHelpers::sequence(input, errors)) } } diff --git a/crates/codegen/language/definition/src/internals/parse_input_tokens/helpers.rs b/crates/codegen/language/definition/src/internals/parse_input_tokens/helpers.rs index 3ce04ad9d6..667f0707ca 100644 --- a/crates/codegen/language/definition/src/internals/parse_input_tokens/helpers.rs +++ b/crates/codegen/language/definition/src/internals/parse_input_tokens/helpers.rs @@ -8,7 +8,7 @@ pub struct ParseHelpers; impl ParseHelpers { pub fn syn(input: ParseStream<'_>) -> Result { - input.parse::().map_err(Error::from_syn) + Ok(input.parse::()?) } pub fn delimited( @@ -16,13 +16,13 @@ impl ParseHelpers { input: ParseStream<'_>, inner_parser: impl FnOnce(DelimSpan, ParseStream<'_>) -> Result, ) -> Result { - delimited(delimiter, input, inner_parser).map_err(Error::from_syn) + Ok(delimited(delimiter, input, inner_parser)?) } pub fn sequence( input: ParseStream<'_>, errors: &mut ErrorsCollection, - ) -> Result> { + ) -> Vec { match Self::delimited(Delimiter::Bracket, input, |_, inner_input| { let mut result = Vec::new(); @@ -40,11 +40,11 @@ impl ParseHelpers { Ok(result) }) { - Ok(value) => Ok(value), + Ok(value) => value, Err(error) => { errors.push(error); - Ok(vec![]) + vec![] } } } @@ -52,7 +52,7 @@ impl ParseHelpers { pub fn map( input: ParseStream<'_>, errors: &mut ErrorsCollection, - ) -> Result> { + ) -> indexmap::IndexMap { match Self::delimited(Delimiter::Parenthesis, input, |_, inner_input| { let mut result = IndexMap::new(); @@ -80,11 +80,11 @@ impl ParseHelpers { Ok(result) }) { - Ok(value) => Ok(value), + Ok(value) => value, Err(error) => { errors.push(error); - Ok(IndexMap::new()) + IndexMap::new() } } } diff --git a/crates/codegen/language/definition/src/internals/spanned/mod.rs b/crates/codegen/language/definition/src/internals/spanned/mod.rs index 600b1f680e..2ea4d4e8e3 100644 --- a/crates/codegen/language/definition/src/internals/spanned/mod.rs +++ b/crates/codegen/language/definition/src/internals/spanned/mod.rs @@ -44,7 +44,7 @@ impl Eq for Spanned {} impl std::hash::Hash for Spanned { fn hash(&self, state: &mut H) { - self.value.hash(state) + self.value.hash(state); } } diff --git a/crates/codegen/language/definition/src/internals/write_output_tokens/external_types.rs b/crates/codegen/language/definition/src/internals/write_output_tokens/external_types.rs index bcd48ac998..0d7a74a728 100644 --- a/crates/codegen/language/definition/src/internals/write_output_tokens/external_types.rs +++ b/crates/codegen/language/definition/src/internals/write_output_tokens/external_types.rs @@ -37,8 +37,8 @@ impl WriteOutputTokens for char { impl WriteOutputTokens for IndexMap { fn write_output_tokens(&self) -> TokenStream { - let keys = self.keys().map(|key| key.write_output_tokens()); - let values = self.values().map(|value| value.write_output_tokens()); + let keys = self.keys().map(WriteOutputTokens::write_output_tokens); + let values = self.values().map(WriteOutputTokens::write_output_tokens); quote! { [ #( (#keys, #values) ),* ].into() @@ -48,7 +48,7 @@ impl WriteOutputTokens for IndexMap< impl WriteOutputTokens for IndexSet { fn write_output_tokens(&self) -> TokenStream { - let items = self.iter().map(|item| item.write_output_tokens()); + let items = self.iter().map(WriteOutputTokens::write_output_tokens); quote! { [ #( #items ),* ].into() @@ -107,7 +107,7 @@ impl WriteOutputTokens for usize { impl WriteOutputTokens for Vec { fn write_output_tokens(&self) -> TokenStream { - let items = self.iter().map(|item| item.write_output_tokens()); + let items = self.iter().map(WriteOutputTokens::write_output_tokens); quote! { [ #( #items ),* ].into() diff --git a/crates/codegen/language/definition/src/model/terminals/keyword.rs b/crates/codegen/language/definition/src/model/terminals/keyword.rs index 6d066912a7..65567a0b5e 100644 --- a/crates/codegen/language/definition/src/model/terminals/keyword.rs +++ b/crates/codegen/language/definition/src/model/terminals/keyword.rs @@ -34,27 +34,20 @@ impl KeywordValue { /// Collects all possible variations generated by this value. pub fn collect_variations(&self) -> Vec { match self { - KeywordValue::Atom { atom } => { - vec![atom.to_owned()] - } + KeywordValue::Atom { atom } => vec![atom.to_owned()], KeywordValue::Optional { value } => { let mut results = value.collect_variations(); results.insert(0, String::new()); results } KeywordValue::Choice { values } => { - return values - .iter() - .flat_map(|value| value.collect_variations()) - .collect_vec(); + values.iter().flat_map(Self::collect_variations).collect() } + KeywordValue::Sequence { values } => { - let matrix = values - .iter() - .map(|value| value.collect_variations()) - .collect_vec(); + let matrix = values.iter().map(Self::collect_variations).collect_vec(); - let results_len = matrix.iter().map(|values| values.len()).product(); + let results_len = matrix.iter().map(Vec::len).product(); let mut results = (0..results_len).map(|_| String::new()).collect_vec(); let mut span = results_len; diff --git a/crates/codegen/language/internal_macros/src/derive/parse_input_tokens.rs b/crates/codegen/language/internal_macros/src/derive/parse_input_tokens.rs index e6bfa24bcc..cbfa3e27e3 100644 --- a/crates/codegen/language/internal_macros/src/derive/parse_input_tokens.rs +++ b/crates/codegen/language/internal_macros/src/derive/parse_input_tokens.rs @@ -5,16 +5,16 @@ use quote::quote; pub fn parse_input_tokens(item: InputItem) -> TokenStream { match item { - InputItem::Struct { name, fields } => derive_struct(name, &fields), - InputItem::Enum { name, variants } => derive_enum(name, &variants), + InputItem::Struct { name, fields } => derive_struct(&name, &fields), + InputItem::Enum { name, variants } => derive_enum(&name, &variants), } } -fn derive_struct(name: Ident, fields: &[InputField]) -> TokenStream { +fn derive_struct(name: &Ident, fields: &[InputField]) -> TokenStream { let stripped_name = Literal::string(&strip_spanned_prefix(name.to_string())); let unexpected_type_error = Literal::string(&format!("Expected type: {stripped_name}")); - let fields_return = derive_fields_return(quote!(Self), fields); + let fields_return = derive_fields_return("e!(Self), fields); quote! { impl crate::internals::ParseInputTokens for #name { @@ -44,13 +44,13 @@ fn derive_struct(name: Ident, fields: &[InputField]) -> TokenStream { } } -fn derive_enum(name: Ident, variants: &[InputVariant]) -> TokenStream { +fn derive_enum(name: &Ident, variants: &[InputVariant]) -> TokenStream { let match_arms = variants.iter().map(|variant| { let variant_ident = &variant.name; let variant_name = variant_ident.to_string(); if let Some(fields) = &variant.fields { - let fields_return = derive_fields_return(quote!(Self::#variant_ident), fields); + let fields_return = derive_fields_return("e!(Self::#variant_ident), fields); quote! { #variant_name => { @@ -98,7 +98,7 @@ fn derive_enum(name: Ident, variants: &[InputVariant]) -> TokenStream { } } -fn derive_fields_return(type_name: TokenStream, fields: &[InputField]) -> TokenStream { +fn derive_fields_return(type_name: &TokenStream, fields: &[InputField]) -> TokenStream { // When there is only one field, we omit the `key = ` part. // This way, we can just write `Foo(Bar)` instead of `Foo(key = Bar)`. let assignments = if let [single_field] = fields { diff --git a/crates/codegen/language/internal_macros/src/derive/spanned.rs b/crates/codegen/language/internal_macros/src/derive/spanned.rs index b0c7532d50..2b7376501c 100644 --- a/crates/codegen/language/internal_macros/src/derive/spanned.rs +++ b/crates/codegen/language/internal_macros/src/derive/spanned.rs @@ -14,7 +14,7 @@ pub fn spanned(item: InputItem, spanned_derive_args: TokenStream) -> TokenStream match item { InputItem::Struct { name, fields } => { - let name = format_ident!("{}", add_spanned_prefix(name.to_string())); + let name = format_ident!("{}", add_spanned_prefix(name)); let fields = fields.into_iter().map(derive_field); quote! { @@ -25,7 +25,7 @@ pub fn spanned(item: InputItem, spanned_derive_args: TokenStream) -> TokenStream } } InputItem::Enum { name, variants } => { - let name = format_ident!("{}", add_spanned_prefix(name.to_string())); + let name = format_ident!("{}", add_spanned_prefix(name)); let variants = variants.into_iter().map(derive_variant); quote! { diff --git a/crates/codegen/language/internal_macros/src/derive/write_output_tokens.rs b/crates/codegen/language/internal_macros/src/derive/write_output_tokens.rs index 39e8f13928..9937245818 100644 --- a/crates/codegen/language/internal_macros/src/derive/write_output_tokens.rs +++ b/crates/codegen/language/internal_macros/src/derive/write_output_tokens.rs @@ -5,12 +5,12 @@ use quote::{format_ident, quote}; pub fn write_output_tokens(item: InputItem) -> TokenStream { match item { - InputItem::Struct { name, fields } => derive_struct(name, &fields), - InputItem::Enum { name, variants } => derive_enum(name, &variants), + InputItem::Struct { name, fields } => derive_struct(&name, &fields), + InputItem::Enum { name, variants } => derive_enum(&name, &variants), } } -fn derive_struct(name: Ident, fields: &[InputField]) -> TokenStream { +fn derive_struct(name: &Ident, fields: &[InputField]) -> TokenStream { let stripped_name = format_ident!("{}", strip_spanned_prefix(name.to_string())); let keys = fields.iter().map(|field| &field.name).collect_vec(); @@ -30,7 +30,7 @@ fn derive_struct(name: Ident, fields: &[InputField]) -> TokenStream { } } -fn derive_enum(name: Ident, variants: &[InputVariant]) -> TokenStream { +fn derive_enum(name: &Ident, variants: &[InputVariant]) -> TokenStream { let stripped_name = format_ident!("{}", strip_spanned_prefix(name.to_string())); let match_arms = variants.iter().map(|variant| { diff --git a/crates/codegen/language/internal_macros/src/input_model.rs b/crates/codegen/language/internal_macros/src/input_model.rs index 1cfe01d890..48720427f7 100644 --- a/crates/codegen/language/internal_macros/src/input_model.rs +++ b/crates/codegen/language/internal_macros/src/input_model.rs @@ -90,15 +90,15 @@ impl InputField { } } -pub fn add_spanned_prefix(input: String) -> String { +pub fn add_spanned_prefix(input: impl Display) -> String { format!("Spanned{input}") } pub fn strip_spanned_prefix(input: String) -> String { - return match input.strip_prefix("Spanned") { + match input.strip_prefix("Spanned") { Some(suffix) if !suffix.is_empty() => suffix.to_owned(), _ => input, - }; + } } fn error(spanned: impl ToTokens, message: impl Display) -> Result { diff --git a/crates/codegen/parser/generator/src/code_generator.rs b/crates/codegen/parser/generator/src/code_generator.rs index e4ab766305..0ea95c30d3 100644 --- a/crates/codegen/parser/generator/src/code_generator.rs +++ b/crates/codegen/parser/generator/src/code_generator.rs @@ -145,8 +145,8 @@ impl CodeGenerator { .or_insert_with(|| ScannerContext { name, scanner_definitions: BTreeSet::default(), - alpha_literal_scanner: "".to_string(), - non_alpha_literal_scanner: "".to_string(), + alpha_literal_scanner: String::new(), + non_alpha_literal_scanner: String::new(), compound_scanner_names: vec![], delimiters: BTreeMap::default(), }); @@ -228,7 +228,7 @@ impl GrammarVisitor for CodeGenerator { quote! { #code.with_kind(RuleKind::#rule_kind) } } .to_string(), - )) + )); } fn parser_definition_enter(&mut self, parser: &ParserDefinitionRef) { @@ -264,7 +264,7 @@ impl GrammarVisitor for CodeGenerator { quote! { #code.with_kind(RuleKind::#rule_kind) } } .to_string(), - )) + )); } fn scanner_definition_node_enter(&mut self, node: &ScannerDefinitionNode) { diff --git a/crates/codegen/parser/generator/src/parser_definition.rs b/crates/codegen/parser/generator/src/parser_definition.rs index 40eea95993..8fd5fd02f0 100644 --- a/crates/codegen/parser/generator/src/parser_definition.rs +++ b/crates/codegen/parser/generator/src/parser_definition.rs @@ -32,6 +32,7 @@ pub trait ParserDefinitionNodeExtensions { } impl ParserDefinitionNodeExtensions for ParserDefinitionNode { + #[allow(clippy::too_many_lines)] // giant switch over parser definition node types fn to_parser_code(&self, context_name: &'static str, is_trivia: bool) -> TokenStream { let context = format_ident!("{context_name}"); let lex_ctx = quote! { LexicalContextType::#context }; diff --git a/crates/codegen/parser/generator/src/precedence_parser_definition.rs b/crates/codegen/parser/generator/src/precedence_parser_definition.rs index 4f47256fd7..d97e68e3f9 100644 --- a/crates/codegen/parser/generator/src/precedence_parser_definition.rs +++ b/crates/codegen/parser/generator/src/precedence_parser_definition.rs @@ -71,6 +71,7 @@ impl PrecedenceParserDefinitionNodeExtensions for PrecedenceParserDefinitionNode // The second pass is in the method `PrecedenceHelper::reduce_precedence_result` because it // is independent of the grammar. + #[allow(clippy::too_many_lines)] // Repetition-heavy with 4 kinds of precedence operators fn to_parser_code( &self, context_name: &'static str, @@ -86,7 +87,7 @@ impl PrecedenceParserDefinitionNodeExtensions for PrecedenceParserDefinitionNode let mut operator_closures = Vec::new(); let mut binding_power = 1u8; - for (version_quality_ranges, model, name, operator_definition) in self.operators.iter() { + for (version_quality_ranges, model, name, operator_definition) in &self.operators { let operator_code = operator_definition .node() .to_parser_code(context_name, false); @@ -161,7 +162,7 @@ impl PrecedenceParserDefinitionNodeExtensions for PrecedenceParserDefinitionNode #[allow(clippy::items_after_statements)] fn make_sequence(parsers: Vec) -> TokenStream { let parsers = parsers - .iter() + .into_iter() .map(|parser| quote! { seq.elem(#parser)?; }) .collect::>(); quote! { @@ -175,7 +176,7 @@ impl PrecedenceParserDefinitionNodeExtensions for PrecedenceParserDefinitionNode #[allow(clippy::items_after_statements)] fn make_choice(parsers: Vec) -> TokenStream { let parsers = parsers - .iter() + .into_iter() .map(|(parser, version_quality_ranges)| { version_quality_ranges.wrap_code( quote! { diff --git a/crates/codegen/parser/generator/src/scanner_definition.rs b/crates/codegen/parser/generator/src/scanner_definition.rs index 324c73792d..831ee85e38 100644 --- a/crates/codegen/parser/generator/src/scanner_definition.rs +++ b/crates/codegen/parser/generator/src/scanner_definition.rs @@ -95,7 +95,7 @@ impl ScannerDefinitionNodeExtensions for ScannerDefinitionNode { if let ScannerDefinitionNode::Literal(string) = node { scanners.push(string); } else { - non_literal_scanners.push(node.to_scanner_code()) + non_literal_scanners.push(node.to_scanner_code()); } } scanners.sort(); diff --git a/crates/codegen/parser/runtime/src/cst.rs b/crates/codegen/parser/runtime/src/cst.rs index c8f9a650b0..1ef1ff9800 100644 --- a/crates/codegen/parser/runtime/src/cst.rs +++ b/crates/codegen/parser/runtime/src/cst.rs @@ -28,6 +28,7 @@ pub enum Node { Token(Rc), } +#[allow(clippy::match_wildcard_for_single_variants)] impl Node { pub fn rule(kind: RuleKind, children: Vec) -> Self { let text_len = children.iter().map(Node::text_len).sum(); diff --git a/crates/codegen/parser/runtime/src/cursor.rs b/crates/codegen/parser/runtime/src/cursor.rs index 39bc35f1bf..e8f92483fd 100644 --- a/crates/codegen/parser/runtime/src/cursor.rs +++ b/crates/codegen/parser/runtime/src/cursor.rs @@ -116,6 +116,7 @@ impl Cursor { /// Unlike `clone`, this re-roots at the current node. /// It does preserve the correct text offset however, /// even though the path is reset. + #[must_use] pub fn spawn(&self) -> Self { Self { path: vec![], @@ -205,17 +206,14 @@ impl Cursor { /// /// Returns `false` if the cursor is finished and at the root. pub fn go_to_parent(&mut self) -> bool { - match self.path.pop() { - Some(parent) => { - self.current = parent.into_path_node(); + if let Some(parent) = self.path.pop() { + self.current = parent.into_path_node(); - true - } - None => { - self.is_completed = true; + true + } else { + self.is_completed = true; - false - } + false } } diff --git a/crates/codegen/parser/runtime/src/napi/napi_cursor.rs b/crates/codegen/parser/runtime/src/napi/napi_cursor.rs index f2b224a4d1..e621260b67 100644 --- a/crates/codegen/parser/runtime/src/napi/napi_cursor.rs +++ b/crates/codegen/parser/runtime/src/napi/napi_cursor.rs @@ -1,3 +1,8 @@ +// NAPI-exposed functions have to accept owned values +#![allow(clippy::needless_pass_by_value)] +// The functions are meant to be definitions for export, so they're not really used +#![allow(clippy::return_self_not_must_use)] + use { napi::{bindgen_prelude::Env, JsObject}, napi_derive::napi, @@ -24,12 +29,12 @@ impl Cursor { #[napi] pub fn reset(&mut self) { - self.0.reset() + self.0.reset(); } #[napi] pub fn complete(&mut self) { - self.0.complete() + self.0.complete(); } #[napi] @@ -63,6 +68,7 @@ impl Cursor { (&self.0.text_range()).into() } + #[allow(clippy::cast_possible_truncation)] // Cursor depth can't reasonably be larger than u32 #[napi(getter)] pub fn depth(&self) -> u32 { self.0.depth() as u32 diff --git a/crates/codegen/parser/runtime/src/napi/napi_parse_error.rs b/crates/codegen/parser/runtime/src/napi/napi_parse_error.rs index 379ea7f622..349250ae5f 100644 --- a/crates/codegen/parser/runtime/src/napi/napi_parse_error.rs +++ b/crates/codegen/parser/runtime/src/napi/napi_parse_error.rs @@ -1,3 +1,6 @@ +// NAPI-exposed functions have to accept owned values. +#![allow(clippy::needless_pass_by_value)] + use napi_derive::napi; use super::{napi_text_index, RustParseError}; @@ -21,11 +24,7 @@ impl ParseError { } pub fn tokens_that_would_have_allowed_more_progress(&self) -> Vec { - self.0 - .tokens_that_would_have_allowed_more_progress() - .iter() - .map(|x| x.to_string()) - .collect() + self.0.tokens_that_would_have_allowed_more_progress() } #[napi(namespace = "parse_error")] diff --git a/crates/codegen/parser/runtime/src/napi/napi_text_index.rs b/crates/codegen/parser/runtime/src/napi/napi_text_index.rs index a08d0a0503..ab45af8221 100644 --- a/crates/codegen/parser/runtime/src/napi/napi_text_index.rs +++ b/crates/codegen/parser/runtime/src/napi/napi_text_index.rs @@ -12,6 +12,8 @@ pub struct TextIndex { impl From<&RustTextIndex> for TextIndex { fn from(value: &RustTextIndex) -> Self { + // We only support 32-byte indices on TS side. + #[allow(clippy::cast_possible_truncation)] Self { utf8: value.utf8 as u32, utf16: value.utf16 as u32, diff --git a/crates/codegen/parser/runtime/src/parse_error.rs b/crates/codegen/parser/runtime/src/parse_error.rs index 49481fc541..01bc6c48d7 100644 --- a/crates/codegen/parser/runtime/src/parse_error.rs +++ b/crates/codegen/parser/runtime/src/parse_error.rs @@ -20,12 +20,11 @@ impl ParseError { let tokens_that_would_have_allowed_more_progress = self .tokens_that_would_have_allowed_more_progress .iter() - .map(|kind| kind.as_ref()) .collect::>(); tokens_that_would_have_allowed_more_progress .into_iter() - .map(|token| token.to_string()) + .map(ToString::to_string) .collect() } diff --git a/crates/codegen/parser/runtime/src/support/parser_result.rs b/crates/codegen/parser/runtime/src/support/parser_result.rs index 7bc5159651..0d29c13dd4 100644 --- a/crates/codegen/parser/runtime/src/support/parser_result.rs +++ b/crates/codegen/parser/runtime/src/support/parser_result.rs @@ -54,6 +54,7 @@ impl ParserResult { matches!(self, ParserResult::NoMatch(_)) } + #[must_use] pub fn with_kind(self, new_kind: RuleKind) -> ParserResult { match self { ParserResult::Match(r#match) => ParserResult::r#match( diff --git a/crates/codegen/parser/runtime/src/support/precedence_helper.rs b/crates/codegen/parser/runtime/src/support/precedence_helper.rs index 6ab10b7200..bc93d2e7d8 100644 --- a/crates/codegen/parser/runtime/src/support/precedence_helper.rs +++ b/crates/codegen/parser/runtime/src/support/precedence_helper.rs @@ -56,6 +56,8 @@ impl PrecedenceHelper { _ => result, } } + + #[allow(clippy::too_many_lines)] // Explicit on purpose, see below. pub fn reduce_precedence_result( child_kind: Option, result: ParserResult, @@ -67,6 +69,7 @@ impl PrecedenceHelper { // If the input is valid this should be correct by construction. + #[allow(clippy::redundant_else)] match result { ParserResult::PrattOperatorMatch(pratt_operator_match) => { let mut pratt_elements = pratt_operator_match.elements; diff --git a/crates/codegen/parser/runtime/src/support/recovery.rs b/crates/codegen/parser/runtime/src/support/recovery.rs index 0f040311de..66e3f50ccc 100644 --- a/crates/codegen/parser/runtime/src/support/recovery.rs +++ b/crates/codegen/parser/runtime/src/support/recovery.rs @@ -9,13 +9,14 @@ use super::parser_result::SkippedUntil; use super::ParserContext; /// An explicit parameter for the [`ParserResult::recover_until_with_nested_delims`] method. +#[derive(Clone, Copy)] pub enum RecoverFromNoMatch { Yes, No, } impl RecoverFromNoMatch { - pub fn as_bool(&self) -> bool { + pub fn as_bool(self) -> bool { matches!(self, RecoverFromNoMatch::Yes) } } @@ -40,6 +41,7 @@ impl ParserResult { /// /// Respects nested delimiters, i.e. the `expected` token is only accepted if it's not nested inside. /// Does not consume the `expected` token. + #[must_use] pub fn recover_until_with_nested_delims( self, input: &mut ParserContext<'_>, @@ -75,38 +77,37 @@ impl ParserResult { let leading_trivia = opt_parse(input, |input| lexer.leading_trivia(input)); - match skip_until_with_nested_delims::<_, LexCtx>(input, lexer, expected) { - Some((found, skipped_range)) => { - nodes.extend(leading_trivia); - if matches!(result_kind, ParseResultKind::Match) { - expected_tokens.push(expected); - } + if let Some((found, skipped_range)) = + skip_until_with_nested_delims::<_, LexCtx>(input, lexer, expected) + { + nodes.extend(leading_trivia); + if matches!(result_kind, ParseResultKind::Match) { + expected_tokens.push(expected); + } - let skipped = input.content(skipped_range.utf8()); + let skipped = input.content(skipped_range.utf8()); - input.emit(ParseError { - text_range: skipped_range, - tokens_that_would_have_allowed_more_progress: expected_tokens.clone(), - }); + input.emit(ParseError { + text_range: skipped_range, + tokens_that_would_have_allowed_more_progress: expected_tokens.clone(), + }); - ParserResult::SkippedUntil(SkippedUntil { - nodes, - skipped, - found, - expected, - }) - } + ParserResult::SkippedUntil(SkippedUntil { + nodes, + skipped, + found, + expected, + }) + } else { // Not found till EOF, revert any recovery attempt - None => { - input.set_position(before_recovery); - - match result_kind { - ParseResultKind::Match => ParserResult::r#match(nodes, expected_tokens), - ParseResultKind::Incomplete => { - ParserResult::incomplete_match(nodes, expected_tokens) - } - ParseResultKind::NoMatch => ParserResult::no_match(expected_tokens), + input.set_position(before_recovery); + + match result_kind { + ParseResultKind::Match => ParserResult::r#match(nodes, expected_tokens), + ParseResultKind::Incomplete => { + ParserResult::incomplete_match(nodes, expected_tokens) } + ParseResultKind::NoMatch => ParserResult::no_match(expected_tokens), } } } diff --git a/crates/codegen/parser/runtime/src/support/scanner_macros.rs b/crates/codegen/parser/runtime/src/support/scanner_macros.rs index be45a7e28c..b35b5694d9 100644 --- a/crates/codegen/parser/runtime/src/support/scanner_macros.rs +++ b/crates/codegen/parser/runtime/src/support/scanner_macros.rs @@ -83,6 +83,7 @@ macro_rules! scan_zero_or_more { macro_rules! scan_one_or_more { ($stream:ident, $scanner:expr) => {{ let mut count = 0; + #[allow(clippy::redundant_else)] loop { let save = $stream.position(); if !($scanner) { diff --git a/crates/codegen/parser/runtime/src/support/separated_helper.rs b/crates/codegen/parser/runtime/src/support/separated_helper.rs index eb3d3fce8e..14d4d52ed9 100644 --- a/crates/codegen/parser/runtime/src/support/separated_helper.rs +++ b/crates/codegen/parser/runtime/src/support/separated_helper.rs @@ -86,11 +86,11 @@ impl SeparatedHelper { } } ParserResult::NoMatch(no_match) => { - if accum.is_empty() { - return ParserResult::no_match(no_match.expected_tokens); + return if accum.is_empty() { + ParserResult::no_match(no_match.expected_tokens) } else { - return ParserResult::incomplete_match(accum, no_match.expected_tokens); - } + ParserResult::incomplete_match(accum, no_match.expected_tokens) + }; } ParserResult::SkippedUntil(skipped) => { diff --git a/crates/codegen/parser/runtime/src/support/sequence_helper.rs b/crates/codegen/parser/runtime/src/support/sequence_helper.rs index ee16c74316..68f67d4a3e 100644 --- a/crates/codegen/parser/runtime/src/support/sequence_helper.rs +++ b/crates/codegen/parser/runtime/src/support/sequence_helper.rs @@ -33,6 +33,7 @@ impl SequenceHelper { ) } + #[allow(clippy::too_many_lines)] // Big switch that purely defines the sequence logic /// Attempts to append the next result until we hit an incomplete/no match. fn attempt_append(&mut self, next_result: ParserResult) { match self.result { @@ -93,7 +94,7 @@ impl SequenceHelper { self.result = State::Running(ParserResult::incomplete_match( std::mem::take(&mut cur.elements) .into_iter() - .flat_map(|pratt| pratt.into_nodes()) + .flat_map(PrattElement::into_nodes) .chain(next.nodes) .collect(), next.expected_tokens, @@ -103,7 +104,7 @@ impl SequenceHelper { self.result = State::Running(ParserResult::incomplete_match( std::mem::take(&mut cur.elements) .into_iter() - .flat_map(|pratt| pratt.into_nodes()) + .flat_map(PrattElement::into_nodes) .collect(), next.expected_tokens, )); diff --git a/crates/codegen/parser/runtime/src/templates/language.rs.jinja2 b/crates/codegen/parser/runtime/src/templates/language.rs.jinja2 index c2c726a321..9615655d9b 100644 --- a/crates/codegen/parser/runtime/src/templates/language.rs.jinja2 +++ b/crates/codegen/parser/runtime/src/templates/language.rs.jinja2 @@ -1,3 +1,6 @@ +// This file is generated; we can't reasonably satisfy some of these lints. +#![allow(clippy::if_not_else, clippy::too_many_lines, clippy::unused_self, clippy::struct_excessive_bools, clippy::similar_names)] + #[cfg(feature = "slang_napi_interfaces")] use {napi::bindgen_prelude::*, napi_derive::napi}; @@ -188,6 +191,8 @@ impl Lexer for Language { } #[cfg(feature = "slang_napi_interfaces")] +// NAPI-exposed functions have to accept owned values. +#[allow(clippy::needless_pass_by_value)] #[napi(namespace = "language")] impl Language { @@ -214,7 +219,7 @@ impl Language { #[napi(js_name = "parse", ts_return_type = "parse_output.ParseOutput")] pub fn parse_napi( - &self, + &self, #[napi(ts_arg_type = "kinds.ProductionKind")] production_kind: ProductionKind, input: String ) -> NAPIParseOutput { diff --git a/crates/codegen/schema/src/validation/rules/definitions/keywords/collector.rs b/crates/codegen/schema/src/validation/rules/definitions/keywords/collector.rs index 021134a05d..2a304e332e 100644 --- a/crates/codegen/schema/src/validation/rules/definitions/keywords/collector.rs +++ b/crates/codegen/schema/src/validation/rules/definitions/keywords/collector.rs @@ -52,16 +52,11 @@ impl Visitor for KeywordsCollector { location: &LocationRef, reporter: &mut Reporter, ) -> bool { - let identifier = - if let ScannerDefinition::TrailingContext { scanner, .. } = &scanner.definition { - scanner - } else { - return true; - }; + let ScannerDefinition::TrailingContext { scanner, .. } = &scanner.definition else { + return true; + }; - let variations = if let Some(variations) = Self::collect_variations(identifier) { - variations - } else { + let Some(variations) = Self::collect_variations(scanner) else { return false; }; @@ -107,7 +102,7 @@ impl KeywordsCollector { ScannerDefinition::Difference { minuend, .. } => Self::collect_variations(minuend), ScannerDefinition::Optional(child) => { let mut variations = Self::collect_variations(child)?; - variations.push("".to_owned()); + variations.push(String::new()); Some(variations) } ScannerDefinition::Range { from, to } => { diff --git a/crates/codegen/schema/src/validation/rules/definitions/operators/mod.rs b/crates/codegen/schema/src/validation/rules/definitions/operators/mod.rs index b7d7aca01d..66f784319e 100644 --- a/crates/codegen/schema/src/validation/rules/definitions/operators/mod.rs +++ b/crates/codegen/schema/src/validation/rules/definitions/operators/mod.rs @@ -59,7 +59,7 @@ impl Visitor for Operators { location: &LocationRef, reporter: &mut Reporter, ) -> bool { - for expression in parser.operator_expressions.iter() { + for expression in &parser.operator_expressions { let name = &expression.name; if self.language.productions.contains_key(name) { reporter.report(location, Errors::OperatorNamedAsProduction(name.to_owned())); diff --git a/crates/codegen/schema/src/validation/rules/definitions/versions/mod.rs b/crates/codegen/schema/src/validation/rules/definitions/versions/mod.rs index b74a8b2f22..4e59aa6401 100644 --- a/crates/codegen/schema/src/validation/rules/definitions/versions/mod.rs +++ b/crates/codegen/schema/src/validation/rules/definitions/versions/mod.rs @@ -45,9 +45,8 @@ impl Visitor for Versions { location: &LocationRef, reporter: &mut Reporter, ) -> bool { - let versions = match production.versions() { - Some(versions) => versions, - None => return false, + let Some(versions) = production.versions() else { + return false; }; if versions.is_empty() { diff --git a/crates/codegen/schema/src/validation/rules/references/validator.rs b/crates/codegen/schema/src/validation/rules/references/validator.rs index 89befbaa63..d03d397e9e 100644 --- a/crates/codegen/schema/src/validation/rules/references/validator.rs +++ b/crates/codegen/schema/src/validation/rules/references/validator.rs @@ -124,6 +124,7 @@ impl Visitor for Validator<'_> { } } +#[derive(Clone, Copy)] enum ReferenceKind { ParserToAnything, ParserToScanner, @@ -151,12 +152,9 @@ impl Validator<'_> { return; } - let reference = match self.language.productions.get(reference_name) { - Some(reference) => reference, - None => { - reporter.report(location, Errors::NotDefined(reference_name.to_owned())); - return; - } + let Some(reference) = self.language.productions.get(reference_name) else { + reporter.report(location, Errors::NotDefined(reference_name.to_owned())); + return; }; if !self.metadata.is_defined_over(reference_name, version_set) { diff --git a/crates/codegen/schema/src/validation/visitors/version_set.rs b/crates/codegen/schema/src/validation/visitors/version_set.rs index 8d921254dc..81353fa7a6 100644 --- a/crates/codegen/schema/src/validation/visitors/version_set.rs +++ b/crates/codegen/schema/src/validation/visitors/version_set.rs @@ -29,11 +29,11 @@ impl VersionSet { } #[cfg(test)] - fn from_ranges(ranges: Vec) -> Self { + fn from_ranges(ranges: impl IntoIterator) -> Self { let mut instance = Self::empty(); - for range in &ranges { - instance.add(range); + for range in ranges { + instance.add(&range); } instance diff --git a/crates/codegen/spec/src/snippets.rs b/crates/codegen/spec/src/snippets.rs index c934e6774a..28958c8c1f 100644 --- a/crates/codegen/spec/src/snippets.rs +++ b/crates/codegen/spec/src/snippets.rs @@ -36,7 +36,7 @@ impl Snippets { for version in versions { if let Some(snippet_path) = self.get_snippet_path(production, version) { let snippet = self.get_snippet(production, version).unwrap_or_default(); - codegen.write_file(snippet_path, snippet)? + codegen.write_file(snippet_path, snippet)?; }; } } diff --git a/crates/infra/cli/src/commands/ci/mod.rs b/crates/infra/cli/src/commands/ci/mod.rs index 71f0730114..3635c8df47 100644 --- a/crates/infra/cli/src/commands/ci/mod.rs +++ b/crates/infra/cli/src/commands/ci/mod.rs @@ -9,6 +9,7 @@ use crate::commands::{ pub struct CiController; impl CiController { + #[allow(clippy::unused_self)] // For symmetry with other commands pub fn execute(&self) -> Result<()> { // Run all CI steps in order: _SLANG_INFRA_CI_STEPS_ORDERED_ (keep in sync) diff --git a/crates/infra/cli/src/toolchains/napi/cli.rs b/crates/infra/cli/src/toolchains/napi/cli.rs index 66d7ff13d8..14f53dcb62 100644 --- a/crates/infra/cli/src/toolchains/napi/cli.rs +++ b/crates/infra/cli/src/toolchains/napi/cli.rs @@ -69,7 +69,11 @@ impl NapiCli { "index.js" | "index.d.ts" => { source_files.push(output_dir.join(file_name)); } - file if file.ends_with(".node") && node_binary.is_none() => { + file if Path::new(file) + .extension() + .map_or(false, |ext| ext.eq_ignore_ascii_case("node")) + && node_binary.is_none() => + { node_binary = Some(output_dir.join(file)); } _ => { diff --git a/crates/infra/cli/src/toolchains/napi/compiler.rs b/crates/infra/cli/src/toolchains/napi/compiler.rs index 5f55d03a39..2a37a77dd2 100644 --- a/crates/infra/cli/src/toolchains/napi/compiler.rs +++ b/crates/infra/cli/src/toolchains/napi/compiler.rs @@ -12,6 +12,7 @@ use crate::toolchains::napi::{ resolver::{NapiPackageKind, NapiResolver}, }; +#[derive(Clone, Copy)] pub enum NapiProfile { /// Build only the main package for local development. Debug, diff --git a/crates/infra/utils/src/codegen/common/formatting.rs b/crates/infra/utils/src/codegen/common/formatting.rs index 5046b7604d..a1c5a8eb30 100644 --- a/crates/infra/utils/src/codegen/common/formatting.rs +++ b/crates/infra/utils/src/codegen/common/formatting.rs @@ -56,7 +56,7 @@ fn generate_header(file_path: &Path) -> String { "This file is generated automatically by infrastructure scripts. Please don't edit by hand."; return match get_extension(file_path) { - "json" => "".to_string(), + "json" => String::new(), "html" | "md" => format!(""), "js" | "rs" | "ts" => format!("// {warning_line}"), "yml" | "zsh-completions" => format!("# {warning_line}"), diff --git a/crates/infra/utils/src/commands/mod.rs b/crates/infra/utils/src/commands/mod.rs index a0c108aa81..46a9f79929 100644 --- a/crates/infra/utils/src/commands/mod.rs +++ b/crates/infra/utils/src/commands/mod.rs @@ -26,6 +26,7 @@ pub struct Command { } impl Command { + #[must_use] pub fn new(name: impl Into) -> Self { Self { name: name.into(), @@ -35,18 +36,21 @@ impl Command { } } + #[must_use] pub fn flag(mut self, flag: impl Into) -> Self { self.args.push(flag.into()); self } + #[must_use] pub fn arg(mut self, arg: impl Into) -> Self { self.args.push(arg.into()); self } + #[must_use] pub fn args(mut self, args: impl IntoIterator>) -> Self { for arg in args { self.args.push(arg.into()); @@ -55,6 +59,7 @@ impl Command { self } + #[must_use] pub fn property(mut self, key: impl Into, value: impl Into) -> Self { self.args.push(key.into()); self.args.push(value.into()); @@ -62,12 +67,14 @@ impl Command { self } + #[must_use] pub fn env(mut self, key: impl Into, value: impl Into) -> Self { self.environment.insert(key.into(), value.into()); self } + #[must_use] pub fn current_dir(mut self, current_dir: impl Into) -> Self { let current_dir = current_dir.into(); if current_dir != Path::repo_root() { diff --git a/crates/infra/utils/src/lib.rs b/crates/infra/utils/src/lib.rs index 898c7a5ac4..62597335ae 100644 --- a/crates/infra/utils/src/lib.rs +++ b/crates/infra/utils/src/lib.rs @@ -1,3 +1,5 @@ +#![allow(clippy::missing_errors_doc)] + pub mod cargo; pub mod codegen; pub mod commands; diff --git a/crates/solidity/inputs/language/src/grammar.rs b/crates/solidity/inputs/language/src/grammar.rs index 8d5dbc8904..aa755e1ab0 100644 --- a/crates/solidity/inputs/language/src/grammar.rs +++ b/crates/solidity/inputs/language/src/grammar.rs @@ -29,6 +29,7 @@ pub trait GrammarConstructorDslV2 { } impl GrammarConstructorDslV2 for Grammar { + #[allow(clippy::too_many_lines)] // TODO: Remove me once the hack below is removed fn from_dsl_v2(lang: &model::Language) -> Grammar { // Collect language items into a lookup table to speed up resolution let mut items: HashMap<_, _> = lang @@ -156,13 +157,13 @@ impl GrammarConstructorDslV2 for Grammar { versions: BTreeSet::from_iter(lang.versions.clone()), leading_trivia_parser: leading_trivia.clone(), trailing_trivia_parser: trailing_trivia.clone(), - elements: HashMap::from_iter( - resolved_items.chain( + elements: resolved_items + .chain( [leading_trivia, trailing_trivia, eof_trivia] .into_iter() .map(|elem| (elem.name(), elem.into())), - ), - ), + ) + .collect(), } } } @@ -253,6 +254,7 @@ enum ParserThunk { Regular(Rc), Precedence(Rc), } +#[allow(clippy::match_wildcard_for_single_variants)] impl ParserThunk { fn as_regular_def(&self) -> &OnceCell { match self { @@ -301,6 +303,7 @@ struct ResolveCtx<'a> { resolved: &'a mut HashMap, } +#[allow(clippy::too_many_lines)] // FIXME: Simplify me when we simplify the v2-to-v1 interface fn resolve_grammar_element(ident: &Identifier, ctx: &mut ResolveCtx<'_>) -> GrammarElement { if ident.as_str() == "EndOfFileTrivia" { return ctx.resolved.get(ident).unwrap().clone(); diff --git a/crates/solidity/outputs/cargo/crate/src/generated/cst.rs b/crates/solidity/outputs/cargo/crate/src/generated/cst.rs index b4d799140c..3e1a704040 100644 --- a/crates/solidity/outputs/cargo/crate/src/generated/cst.rs +++ b/crates/solidity/outputs/cargo/crate/src/generated/cst.rs @@ -30,6 +30,7 @@ pub enum Node { Token(Rc), } +#[allow(clippy::match_wildcard_for_single_variants)] impl Node { pub fn rule(kind: RuleKind, children: Vec) -> Self { let text_len = children.iter().map(Node::text_len).sum(); diff --git a/crates/solidity/outputs/cargo/crate/src/generated/cursor.rs b/crates/solidity/outputs/cargo/crate/src/generated/cursor.rs index 0554ec02bf..2b64bdada7 100644 --- a/crates/solidity/outputs/cargo/crate/src/generated/cursor.rs +++ b/crates/solidity/outputs/cargo/crate/src/generated/cursor.rs @@ -118,6 +118,7 @@ impl Cursor { /// Unlike `clone`, this re-roots at the current node. /// It does preserve the correct text offset however, /// even though the path is reset. + #[must_use] pub fn spawn(&self) -> Self { Self { path: vec![], @@ -207,17 +208,14 @@ impl Cursor { /// /// Returns `false` if the cursor is finished and at the root. pub fn go_to_parent(&mut self) -> bool { - match self.path.pop() { - Some(parent) => { - self.current = parent.into_path_node(); + if let Some(parent) = self.path.pop() { + self.current = parent.into_path_node(); - true - } - None => { - self.is_completed = true; + true + } else { + self.is_completed = true; - false - } + false } } diff --git a/crates/solidity/outputs/cargo/crate/src/generated/language.rs b/crates/solidity/outputs/cargo/crate/src/generated/language.rs index f3d74911f2..183df95f28 100644 --- a/crates/solidity/outputs/cargo/crate/src/generated/language.rs +++ b/crates/solidity/outputs/cargo/crate/src/generated/language.rs @@ -1,5 +1,14 @@ // This file is generated automatically by infrastructure scripts. Please don't edit by hand. +// This file is generated; we can't reasonably satisfy some of these lints. +#![allow( + clippy::if_not_else, + clippy::too_many_lines, + clippy::unused_self, + clippy::struct_excessive_bools, + clippy::similar_names +)] + #[cfg(feature = "slang_napi_interfaces")] use {napi::bindgen_prelude::*, napi_derive::napi}; @@ -11439,6 +11448,8 @@ impl Lexer for Language { } #[cfg(feature = "slang_napi_interfaces")] +// NAPI-exposed functions have to accept owned values. +#[allow(clippy::needless_pass_by_value)] #[napi(namespace = "language")] impl Language { #[napi(constructor)] diff --git a/crates/solidity/outputs/cargo/crate/src/generated/napi/napi_cursor.rs b/crates/solidity/outputs/cargo/crate/src/generated/napi/napi_cursor.rs index 7ea067c144..9a531b242d 100644 --- a/crates/solidity/outputs/cargo/crate/src/generated/napi/napi_cursor.rs +++ b/crates/solidity/outputs/cargo/crate/src/generated/napi/napi_cursor.rs @@ -1,5 +1,10 @@ // This file is generated automatically by infrastructure scripts. Please don't edit by hand. +// NAPI-exposed functions have to accept owned values +#![allow(clippy::needless_pass_by_value)] +// The functions are meant to be definitions for export, so they're not really used +#![allow(clippy::return_self_not_must_use)] + use { napi::{bindgen_prelude::Env, JsObject}, napi_derive::napi, @@ -26,12 +31,12 @@ impl Cursor { #[napi] pub fn reset(&mut self) { - self.0.reset() + self.0.reset(); } #[napi] pub fn complete(&mut self) { - self.0.complete() + self.0.complete(); } #[napi] @@ -65,6 +70,7 @@ impl Cursor { (&self.0.text_range()).into() } + #[allow(clippy::cast_possible_truncation)] // Cursor depth can't reasonably be larger than u32 #[napi(getter)] pub fn depth(&self) -> u32 { self.0.depth() as u32 diff --git a/crates/solidity/outputs/cargo/crate/src/generated/napi/napi_parse_error.rs b/crates/solidity/outputs/cargo/crate/src/generated/napi/napi_parse_error.rs index 1013a69213..1b1959dcaf 100644 --- a/crates/solidity/outputs/cargo/crate/src/generated/napi/napi_parse_error.rs +++ b/crates/solidity/outputs/cargo/crate/src/generated/napi/napi_parse_error.rs @@ -1,5 +1,8 @@ // This file is generated automatically by infrastructure scripts. Please don't edit by hand. +// NAPI-exposed functions have to accept owned values. +#![allow(clippy::needless_pass_by_value)] + use napi_derive::napi; use super::{napi_text_index, RustParseError}; @@ -23,11 +26,7 @@ impl ParseError { } pub fn tokens_that_would_have_allowed_more_progress(&self) -> Vec { - self.0 - .tokens_that_would_have_allowed_more_progress() - .iter() - .map(|x| x.to_string()) - .collect() + self.0.tokens_that_would_have_allowed_more_progress() } #[napi(namespace = "parse_error")] diff --git a/crates/solidity/outputs/cargo/crate/src/generated/napi/napi_text_index.rs b/crates/solidity/outputs/cargo/crate/src/generated/napi/napi_text_index.rs index 64f69d576a..e95cad36f2 100644 --- a/crates/solidity/outputs/cargo/crate/src/generated/napi/napi_text_index.rs +++ b/crates/solidity/outputs/cargo/crate/src/generated/napi/napi_text_index.rs @@ -14,6 +14,8 @@ pub struct TextIndex { impl From<&RustTextIndex> for TextIndex { fn from(value: &RustTextIndex) -> Self { + // We only support 32-byte indices on TS side. + #[allow(clippy::cast_possible_truncation)] Self { utf8: value.utf8 as u32, utf16: value.utf16 as u32, diff --git a/crates/solidity/outputs/cargo/crate/src/generated/parse_error.rs b/crates/solidity/outputs/cargo/crate/src/generated/parse_error.rs index fce07cfa9a..7ca261a7dd 100644 --- a/crates/solidity/outputs/cargo/crate/src/generated/parse_error.rs +++ b/crates/solidity/outputs/cargo/crate/src/generated/parse_error.rs @@ -22,12 +22,11 @@ impl ParseError { let tokens_that_would_have_allowed_more_progress = self .tokens_that_would_have_allowed_more_progress .iter() - .map(|kind| kind.as_ref()) .collect::>(); tokens_that_would_have_allowed_more_progress .into_iter() - .map(|token| token.to_string()) + .map(ToString::to_string) .collect() } diff --git a/crates/solidity/outputs/cargo/crate/src/generated/support/parser_result.rs b/crates/solidity/outputs/cargo/crate/src/generated/support/parser_result.rs index f88dfcfae2..49e7f51785 100644 --- a/crates/solidity/outputs/cargo/crate/src/generated/support/parser_result.rs +++ b/crates/solidity/outputs/cargo/crate/src/generated/support/parser_result.rs @@ -56,6 +56,7 @@ impl ParserResult { matches!(self, ParserResult::NoMatch(_)) } + #[must_use] pub fn with_kind(self, new_kind: RuleKind) -> ParserResult { match self { ParserResult::Match(r#match) => ParserResult::r#match( diff --git a/crates/solidity/outputs/cargo/crate/src/generated/support/precedence_helper.rs b/crates/solidity/outputs/cargo/crate/src/generated/support/precedence_helper.rs index 850ca7401e..0ee251bda3 100644 --- a/crates/solidity/outputs/cargo/crate/src/generated/support/precedence_helper.rs +++ b/crates/solidity/outputs/cargo/crate/src/generated/support/precedence_helper.rs @@ -58,6 +58,8 @@ impl PrecedenceHelper { _ => result, } } + + #[allow(clippy::too_many_lines)] // Explicit on purpose, see below. pub fn reduce_precedence_result( child_kind: Option, result: ParserResult, @@ -69,6 +71,7 @@ impl PrecedenceHelper { // If the input is valid this should be correct by construction. + #[allow(clippy::redundant_else)] match result { ParserResult::PrattOperatorMatch(pratt_operator_match) => { let mut pratt_elements = pratt_operator_match.elements; diff --git a/crates/solidity/outputs/cargo/crate/src/generated/support/recovery.rs b/crates/solidity/outputs/cargo/crate/src/generated/support/recovery.rs index f013cb60c0..d3e158ed02 100644 --- a/crates/solidity/outputs/cargo/crate/src/generated/support/recovery.rs +++ b/crates/solidity/outputs/cargo/crate/src/generated/support/recovery.rs @@ -11,13 +11,14 @@ use super::parser_result::SkippedUntil; use super::ParserContext; /// An explicit parameter for the [`ParserResult::recover_until_with_nested_delims`] method. +#[derive(Clone, Copy)] pub enum RecoverFromNoMatch { Yes, No, } impl RecoverFromNoMatch { - pub fn as_bool(&self) -> bool { + pub fn as_bool(self) -> bool { matches!(self, RecoverFromNoMatch::Yes) } } @@ -42,6 +43,7 @@ impl ParserResult { /// /// Respects nested delimiters, i.e. the `expected` token is only accepted if it's not nested inside. /// Does not consume the `expected` token. + #[must_use] pub fn recover_until_with_nested_delims( self, input: &mut ParserContext<'_>, @@ -77,38 +79,37 @@ impl ParserResult { let leading_trivia = opt_parse(input, |input| lexer.leading_trivia(input)); - match skip_until_with_nested_delims::<_, LexCtx>(input, lexer, expected) { - Some((found, skipped_range)) => { - nodes.extend(leading_trivia); - if matches!(result_kind, ParseResultKind::Match) { - expected_tokens.push(expected); - } + if let Some((found, skipped_range)) = + skip_until_with_nested_delims::<_, LexCtx>(input, lexer, expected) + { + nodes.extend(leading_trivia); + if matches!(result_kind, ParseResultKind::Match) { + expected_tokens.push(expected); + } - let skipped = input.content(skipped_range.utf8()); + let skipped = input.content(skipped_range.utf8()); - input.emit(ParseError { - text_range: skipped_range, - tokens_that_would_have_allowed_more_progress: expected_tokens.clone(), - }); + input.emit(ParseError { + text_range: skipped_range, + tokens_that_would_have_allowed_more_progress: expected_tokens.clone(), + }); - ParserResult::SkippedUntil(SkippedUntil { - nodes, - skipped, - found, - expected, - }) - } + ParserResult::SkippedUntil(SkippedUntil { + nodes, + skipped, + found, + expected, + }) + } else { // Not found till EOF, revert any recovery attempt - None => { - input.set_position(before_recovery); - - match result_kind { - ParseResultKind::Match => ParserResult::r#match(nodes, expected_tokens), - ParseResultKind::Incomplete => { - ParserResult::incomplete_match(nodes, expected_tokens) - } - ParseResultKind::NoMatch => ParserResult::no_match(expected_tokens), + input.set_position(before_recovery); + + match result_kind { + ParseResultKind::Match => ParserResult::r#match(nodes, expected_tokens), + ParseResultKind::Incomplete => { + ParserResult::incomplete_match(nodes, expected_tokens) } + ParseResultKind::NoMatch => ParserResult::no_match(expected_tokens), } } } diff --git a/crates/solidity/outputs/cargo/crate/src/generated/support/scanner_macros.rs b/crates/solidity/outputs/cargo/crate/src/generated/support/scanner_macros.rs index cd8c824e35..8bc08d4c11 100644 --- a/crates/solidity/outputs/cargo/crate/src/generated/support/scanner_macros.rs +++ b/crates/solidity/outputs/cargo/crate/src/generated/support/scanner_macros.rs @@ -85,6 +85,7 @@ macro_rules! scan_zero_or_more { macro_rules! scan_one_or_more { ($stream:ident, $scanner:expr) => {{ let mut count = 0; + #[allow(clippy::redundant_else)] loop { let save = $stream.position(); if !($scanner) { diff --git a/crates/solidity/outputs/cargo/crate/src/generated/support/separated_helper.rs b/crates/solidity/outputs/cargo/crate/src/generated/support/separated_helper.rs index d2e9702b92..5e5efa5f0a 100644 --- a/crates/solidity/outputs/cargo/crate/src/generated/support/separated_helper.rs +++ b/crates/solidity/outputs/cargo/crate/src/generated/support/separated_helper.rs @@ -88,11 +88,11 @@ impl SeparatedHelper { } } ParserResult::NoMatch(no_match) => { - if accum.is_empty() { - return ParserResult::no_match(no_match.expected_tokens); + return if accum.is_empty() { + ParserResult::no_match(no_match.expected_tokens) } else { - return ParserResult::incomplete_match(accum, no_match.expected_tokens); - } + ParserResult::incomplete_match(accum, no_match.expected_tokens) + }; } ParserResult::SkippedUntil(skipped) => { diff --git a/crates/solidity/outputs/cargo/crate/src/generated/support/sequence_helper.rs b/crates/solidity/outputs/cargo/crate/src/generated/support/sequence_helper.rs index 28e933b0a4..422fab9d33 100644 --- a/crates/solidity/outputs/cargo/crate/src/generated/support/sequence_helper.rs +++ b/crates/solidity/outputs/cargo/crate/src/generated/support/sequence_helper.rs @@ -35,6 +35,7 @@ impl SequenceHelper { ) } + #[allow(clippy::too_many_lines)] // Big switch that purely defines the sequence logic /// Attempts to append the next result until we hit an incomplete/no match. fn attempt_append(&mut self, next_result: ParserResult) { match self.result { @@ -95,7 +96,7 @@ impl SequenceHelper { self.result = State::Running(ParserResult::incomplete_match( std::mem::take(&mut cur.elements) .into_iter() - .flat_map(|pratt| pratt.into_nodes()) + .flat_map(PrattElement::into_nodes) .chain(next.nodes) .collect(), next.expected_tokens, @@ -105,7 +106,7 @@ impl SequenceHelper { self.result = State::Running(ParserResult::incomplete_match( std::mem::take(&mut cur.elements) .into_iter() - .flat_map(|pratt| pratt.into_nodes()) + .flat_map(PrattElement::into_nodes) .collect(), next.expected_tokens, )); diff --git a/crates/solidity/outputs/cargo/crate/src/main.rs b/crates/solidity/outputs/cargo/crate/src/main.rs index 5b31cdd08a..50b4b1ce8e 100644 --- a/crates/solidity/outputs/cargo/crate/src/main.rs +++ b/crates/solidity/outputs/cargo/crate/src/main.rs @@ -48,11 +48,11 @@ fn main() -> Result<()> { file_path, version, json, - } => execute_parse_command(file_path, version, json), + } => execute_parse_command(&file_path, version, json), } } -fn execute_parse_command(file_path_string: String, version: Version, json: bool) -> Result<()> { +fn execute_parse_command(file_path_string: &str, version: Version, json: bool) -> Result<()> { let file_path = PathBuf::from(&file_path_string) .canonicalize() .with_context(|| format!("Failed to find file path: {file_path_string:?}"))?; @@ -63,7 +63,7 @@ fn execute_parse_command(file_path_string: String, version: Version, json: bool) let errors = output.errors(); for error in errors { - let report = error.to_error_report(&file_path_string, &input, /* with_color */ true); + let report = error.to_error_report(file_path_string, &input, /* with_color */ true); eprintln!("{report}"); } diff --git a/crates/solidity/outputs/npm/crate/src/generated/cst.rs b/crates/solidity/outputs/npm/crate/src/generated/cst.rs index b4d799140c..3e1a704040 100644 --- a/crates/solidity/outputs/npm/crate/src/generated/cst.rs +++ b/crates/solidity/outputs/npm/crate/src/generated/cst.rs @@ -30,6 +30,7 @@ pub enum Node { Token(Rc), } +#[allow(clippy::match_wildcard_for_single_variants)] impl Node { pub fn rule(kind: RuleKind, children: Vec) -> Self { let text_len = children.iter().map(Node::text_len).sum(); diff --git a/crates/solidity/outputs/npm/crate/src/generated/cursor.rs b/crates/solidity/outputs/npm/crate/src/generated/cursor.rs index 0554ec02bf..2b64bdada7 100644 --- a/crates/solidity/outputs/npm/crate/src/generated/cursor.rs +++ b/crates/solidity/outputs/npm/crate/src/generated/cursor.rs @@ -118,6 +118,7 @@ impl Cursor { /// Unlike `clone`, this re-roots at the current node. /// It does preserve the correct text offset however, /// even though the path is reset. + #[must_use] pub fn spawn(&self) -> Self { Self { path: vec![], @@ -207,17 +208,14 @@ impl Cursor { /// /// Returns `false` if the cursor is finished and at the root. pub fn go_to_parent(&mut self) -> bool { - match self.path.pop() { - Some(parent) => { - self.current = parent.into_path_node(); + if let Some(parent) = self.path.pop() { + self.current = parent.into_path_node(); - true - } - None => { - self.is_completed = true; + true + } else { + self.is_completed = true; - false - } + false } } diff --git a/crates/solidity/outputs/npm/crate/src/generated/language.rs b/crates/solidity/outputs/npm/crate/src/generated/language.rs index f3d74911f2..183df95f28 100644 --- a/crates/solidity/outputs/npm/crate/src/generated/language.rs +++ b/crates/solidity/outputs/npm/crate/src/generated/language.rs @@ -1,5 +1,14 @@ // This file is generated automatically by infrastructure scripts. Please don't edit by hand. +// This file is generated; we can't reasonably satisfy some of these lints. +#![allow( + clippy::if_not_else, + clippy::too_many_lines, + clippy::unused_self, + clippy::struct_excessive_bools, + clippy::similar_names +)] + #[cfg(feature = "slang_napi_interfaces")] use {napi::bindgen_prelude::*, napi_derive::napi}; @@ -11439,6 +11448,8 @@ impl Lexer for Language { } #[cfg(feature = "slang_napi_interfaces")] +// NAPI-exposed functions have to accept owned values. +#[allow(clippy::needless_pass_by_value)] #[napi(namespace = "language")] impl Language { #[napi(constructor)] diff --git a/crates/solidity/outputs/npm/crate/src/generated/napi/napi_cursor.rs b/crates/solidity/outputs/npm/crate/src/generated/napi/napi_cursor.rs index 7ea067c144..9a531b242d 100644 --- a/crates/solidity/outputs/npm/crate/src/generated/napi/napi_cursor.rs +++ b/crates/solidity/outputs/npm/crate/src/generated/napi/napi_cursor.rs @@ -1,5 +1,10 @@ // This file is generated automatically by infrastructure scripts. Please don't edit by hand. +// NAPI-exposed functions have to accept owned values +#![allow(clippy::needless_pass_by_value)] +// The functions are meant to be definitions for export, so they're not really used +#![allow(clippy::return_self_not_must_use)] + use { napi::{bindgen_prelude::Env, JsObject}, napi_derive::napi, @@ -26,12 +31,12 @@ impl Cursor { #[napi] pub fn reset(&mut self) { - self.0.reset() + self.0.reset(); } #[napi] pub fn complete(&mut self) { - self.0.complete() + self.0.complete(); } #[napi] @@ -65,6 +70,7 @@ impl Cursor { (&self.0.text_range()).into() } + #[allow(clippy::cast_possible_truncation)] // Cursor depth can't reasonably be larger than u32 #[napi(getter)] pub fn depth(&self) -> u32 { self.0.depth() as u32 diff --git a/crates/solidity/outputs/npm/crate/src/generated/napi/napi_parse_error.rs b/crates/solidity/outputs/npm/crate/src/generated/napi/napi_parse_error.rs index 1013a69213..1b1959dcaf 100644 --- a/crates/solidity/outputs/npm/crate/src/generated/napi/napi_parse_error.rs +++ b/crates/solidity/outputs/npm/crate/src/generated/napi/napi_parse_error.rs @@ -1,5 +1,8 @@ // This file is generated automatically by infrastructure scripts. Please don't edit by hand. +// NAPI-exposed functions have to accept owned values. +#![allow(clippy::needless_pass_by_value)] + use napi_derive::napi; use super::{napi_text_index, RustParseError}; @@ -23,11 +26,7 @@ impl ParseError { } pub fn tokens_that_would_have_allowed_more_progress(&self) -> Vec { - self.0 - .tokens_that_would_have_allowed_more_progress() - .iter() - .map(|x| x.to_string()) - .collect() + self.0.tokens_that_would_have_allowed_more_progress() } #[napi(namespace = "parse_error")] diff --git a/crates/solidity/outputs/npm/crate/src/generated/napi/napi_text_index.rs b/crates/solidity/outputs/npm/crate/src/generated/napi/napi_text_index.rs index 64f69d576a..e95cad36f2 100644 --- a/crates/solidity/outputs/npm/crate/src/generated/napi/napi_text_index.rs +++ b/crates/solidity/outputs/npm/crate/src/generated/napi/napi_text_index.rs @@ -14,6 +14,8 @@ pub struct TextIndex { impl From<&RustTextIndex> for TextIndex { fn from(value: &RustTextIndex) -> Self { + // We only support 32-byte indices on TS side. + #[allow(clippy::cast_possible_truncation)] Self { utf8: value.utf8 as u32, utf16: value.utf16 as u32, diff --git a/crates/solidity/outputs/npm/crate/src/generated/parse_error.rs b/crates/solidity/outputs/npm/crate/src/generated/parse_error.rs index fce07cfa9a..7ca261a7dd 100644 --- a/crates/solidity/outputs/npm/crate/src/generated/parse_error.rs +++ b/crates/solidity/outputs/npm/crate/src/generated/parse_error.rs @@ -22,12 +22,11 @@ impl ParseError { let tokens_that_would_have_allowed_more_progress = self .tokens_that_would_have_allowed_more_progress .iter() - .map(|kind| kind.as_ref()) .collect::>(); tokens_that_would_have_allowed_more_progress .into_iter() - .map(|token| token.to_string()) + .map(ToString::to_string) .collect() } diff --git a/crates/solidity/outputs/npm/crate/src/generated/support/parser_result.rs b/crates/solidity/outputs/npm/crate/src/generated/support/parser_result.rs index f88dfcfae2..49e7f51785 100644 --- a/crates/solidity/outputs/npm/crate/src/generated/support/parser_result.rs +++ b/crates/solidity/outputs/npm/crate/src/generated/support/parser_result.rs @@ -56,6 +56,7 @@ impl ParserResult { matches!(self, ParserResult::NoMatch(_)) } + #[must_use] pub fn with_kind(self, new_kind: RuleKind) -> ParserResult { match self { ParserResult::Match(r#match) => ParserResult::r#match( diff --git a/crates/solidity/outputs/npm/crate/src/generated/support/precedence_helper.rs b/crates/solidity/outputs/npm/crate/src/generated/support/precedence_helper.rs index 850ca7401e..0ee251bda3 100644 --- a/crates/solidity/outputs/npm/crate/src/generated/support/precedence_helper.rs +++ b/crates/solidity/outputs/npm/crate/src/generated/support/precedence_helper.rs @@ -58,6 +58,8 @@ impl PrecedenceHelper { _ => result, } } + + #[allow(clippy::too_many_lines)] // Explicit on purpose, see below. pub fn reduce_precedence_result( child_kind: Option, result: ParserResult, @@ -69,6 +71,7 @@ impl PrecedenceHelper { // If the input is valid this should be correct by construction. + #[allow(clippy::redundant_else)] match result { ParserResult::PrattOperatorMatch(pratt_operator_match) => { let mut pratt_elements = pratt_operator_match.elements; diff --git a/crates/solidity/outputs/npm/crate/src/generated/support/recovery.rs b/crates/solidity/outputs/npm/crate/src/generated/support/recovery.rs index f013cb60c0..d3e158ed02 100644 --- a/crates/solidity/outputs/npm/crate/src/generated/support/recovery.rs +++ b/crates/solidity/outputs/npm/crate/src/generated/support/recovery.rs @@ -11,13 +11,14 @@ use super::parser_result::SkippedUntil; use super::ParserContext; /// An explicit parameter for the [`ParserResult::recover_until_with_nested_delims`] method. +#[derive(Clone, Copy)] pub enum RecoverFromNoMatch { Yes, No, } impl RecoverFromNoMatch { - pub fn as_bool(&self) -> bool { + pub fn as_bool(self) -> bool { matches!(self, RecoverFromNoMatch::Yes) } } @@ -42,6 +43,7 @@ impl ParserResult { /// /// Respects nested delimiters, i.e. the `expected` token is only accepted if it's not nested inside. /// Does not consume the `expected` token. + #[must_use] pub fn recover_until_with_nested_delims( self, input: &mut ParserContext<'_>, @@ -77,38 +79,37 @@ impl ParserResult { let leading_trivia = opt_parse(input, |input| lexer.leading_trivia(input)); - match skip_until_with_nested_delims::<_, LexCtx>(input, lexer, expected) { - Some((found, skipped_range)) => { - nodes.extend(leading_trivia); - if matches!(result_kind, ParseResultKind::Match) { - expected_tokens.push(expected); - } + if let Some((found, skipped_range)) = + skip_until_with_nested_delims::<_, LexCtx>(input, lexer, expected) + { + nodes.extend(leading_trivia); + if matches!(result_kind, ParseResultKind::Match) { + expected_tokens.push(expected); + } - let skipped = input.content(skipped_range.utf8()); + let skipped = input.content(skipped_range.utf8()); - input.emit(ParseError { - text_range: skipped_range, - tokens_that_would_have_allowed_more_progress: expected_tokens.clone(), - }); + input.emit(ParseError { + text_range: skipped_range, + tokens_that_would_have_allowed_more_progress: expected_tokens.clone(), + }); - ParserResult::SkippedUntil(SkippedUntil { - nodes, - skipped, - found, - expected, - }) - } + ParserResult::SkippedUntil(SkippedUntil { + nodes, + skipped, + found, + expected, + }) + } else { // Not found till EOF, revert any recovery attempt - None => { - input.set_position(before_recovery); - - match result_kind { - ParseResultKind::Match => ParserResult::r#match(nodes, expected_tokens), - ParseResultKind::Incomplete => { - ParserResult::incomplete_match(nodes, expected_tokens) - } - ParseResultKind::NoMatch => ParserResult::no_match(expected_tokens), + input.set_position(before_recovery); + + match result_kind { + ParseResultKind::Match => ParserResult::r#match(nodes, expected_tokens), + ParseResultKind::Incomplete => { + ParserResult::incomplete_match(nodes, expected_tokens) } + ParseResultKind::NoMatch => ParserResult::no_match(expected_tokens), } } } diff --git a/crates/solidity/outputs/npm/crate/src/generated/support/scanner_macros.rs b/crates/solidity/outputs/npm/crate/src/generated/support/scanner_macros.rs index cd8c824e35..8bc08d4c11 100644 --- a/crates/solidity/outputs/npm/crate/src/generated/support/scanner_macros.rs +++ b/crates/solidity/outputs/npm/crate/src/generated/support/scanner_macros.rs @@ -85,6 +85,7 @@ macro_rules! scan_zero_or_more { macro_rules! scan_one_or_more { ($stream:ident, $scanner:expr) => {{ let mut count = 0; + #[allow(clippy::redundant_else)] loop { let save = $stream.position(); if !($scanner) { diff --git a/crates/solidity/outputs/npm/crate/src/generated/support/separated_helper.rs b/crates/solidity/outputs/npm/crate/src/generated/support/separated_helper.rs index d2e9702b92..5e5efa5f0a 100644 --- a/crates/solidity/outputs/npm/crate/src/generated/support/separated_helper.rs +++ b/crates/solidity/outputs/npm/crate/src/generated/support/separated_helper.rs @@ -88,11 +88,11 @@ impl SeparatedHelper { } } ParserResult::NoMatch(no_match) => { - if accum.is_empty() { - return ParserResult::no_match(no_match.expected_tokens); + return if accum.is_empty() { + ParserResult::no_match(no_match.expected_tokens) } else { - return ParserResult::incomplete_match(accum, no_match.expected_tokens); - } + ParserResult::incomplete_match(accum, no_match.expected_tokens) + }; } ParserResult::SkippedUntil(skipped) => { diff --git a/crates/solidity/outputs/npm/crate/src/generated/support/sequence_helper.rs b/crates/solidity/outputs/npm/crate/src/generated/support/sequence_helper.rs index 28e933b0a4..422fab9d33 100644 --- a/crates/solidity/outputs/npm/crate/src/generated/support/sequence_helper.rs +++ b/crates/solidity/outputs/npm/crate/src/generated/support/sequence_helper.rs @@ -35,6 +35,7 @@ impl SequenceHelper { ) } + #[allow(clippy::too_many_lines)] // Big switch that purely defines the sequence logic /// Attempts to append the next result until we hit an incomplete/no match. fn attempt_append(&mut self, next_result: ParserResult) { match self.result { @@ -95,7 +96,7 @@ impl SequenceHelper { self.result = State::Running(ParserResult::incomplete_match( std::mem::take(&mut cur.elements) .into_iter() - .flat_map(|pratt| pratt.into_nodes()) + .flat_map(PrattElement::into_nodes) .chain(next.nodes) .collect(), next.expected_tokens, @@ -105,7 +106,7 @@ impl SequenceHelper { self.result = State::Running(ParserResult::incomplete_match( std::mem::take(&mut cur.elements) .into_iter() - .flat_map(|pratt| pratt.into_nodes()) + .flat_map(PrattElement::into_nodes) .collect(), next.expected_tokens, )); diff --git a/crates/solidity/testing/sanctuary/src/main.rs b/crates/solidity/testing/sanctuary/src/main.rs index dd94fb89bc..8b720973c5 100644 --- a/crates/solidity/testing/sanctuary/src/main.rs +++ b/crates/solidity/testing/sanctuary/src/main.rs @@ -3,7 +3,7 @@ mod reporting; use std::{collections::BTreeSet, path::Path}; -use anyhow::{bail, Result}; +use anyhow::{anyhow, Result}; use infra_utils::paths::PathExtensions; use rayon::prelude::{IntoParallelRefIterator, ParallelIterator}; use semver::Version; @@ -60,7 +60,7 @@ fn process_dataset(dataset: &impl Dataset, versions: &BTreeSet) -> Resu let total_errors = reporter.finish(); if total_errors > 0 { - bail!("There were errors processing the dataset.") + Err(anyhow!("There were errors processing the dataset.")) } else { Ok(()) } @@ -75,9 +75,7 @@ fn process_source_file( let source = &file_path.read_to_string()?; let latest_version = versions.iter().max().unwrap(); - let pragmas = if let Ok(pragmas) = extract_version_pragmas(source, latest_version) { - pragmas - } else { + let Ok(pragmas) = extract_version_pragmas(source, latest_version) else { // Skip this file if we failed to filter compatible versions. return Ok(()); }; diff --git a/crates/solidity/testing/sanctuary/src/reporting.rs b/crates/solidity/testing/sanctuary/src/reporting.rs index fb00e39b86..ec5059ae50 100644 --- a/crates/solidity/testing/sanctuary/src/reporting.rs +++ b/crates/solidity/testing/sanctuary/src/reporting.rs @@ -44,7 +44,8 @@ impl Reporter { let failed_tests = self.failed_tests.load(Ordering::Relaxed); let total_tests = self.total_tests.load(Ordering::Relaxed); - let failure_percent = (100_f64 * (failed_tests as f64) / (total_tests as f64)) as usize; + #[allow(clippy::cast_possible_truncation, clippy::cast_precision_loss)] + let failure_percent = (100_f64 * (failed_tests as f64) / (total_tests as f64)) as isize; self.progress_bar.set_message(format!( "{failed_tests}/{total_tests} tests failed ({failure_percent}%)", diff --git a/crates/solidity/testing/utils/src/cst_snapshots/mod.rs b/crates/solidity/testing/utils/src/cst_snapshots/mod.rs index 4c9dabe4de..fe3580b0a3 100644 --- a/crates/solidity/testing/utils/src/cst_snapshots/mod.rs +++ b/crates/solidity/testing/utils/src/cst_snapshots/mod.rs @@ -53,7 +53,7 @@ fn write_source(w: &mut W, source: &str) -> Result<()> { writeln!(w, "Source: >")?; let mut offset = 0; - for (index, line, bytes, chars) in line_data.iter() { + for (index, line, bytes, chars) in &line_data { let range = offset..(offset + bytes); writeln!( w, @@ -141,7 +141,7 @@ fn write_node( (format!(" {preview}"), range_string) } else { // # 1..2 "foo" - ("".to_owned(), format!("{range_string} {preview}")) + (String::new(), format!("{range_string} {preview}")) } }; diff --git a/crates/solidity/testing/utils/src/version_pragmas/mod.rs b/crates/solidity/testing/utils/src/version_pragmas/mod.rs index f48bb65a28..ea9b722b2f 100644 --- a/crates/solidity/testing/utils/src/version_pragmas/mod.rs +++ b/crates/solidity/testing/utils/src/version_pragmas/mod.rs @@ -46,9 +46,8 @@ pub fn extract_version_pragmas( } fn extract_pragma(expression_node: &Node) -> Result { - let expression_rule = match expression_node { - Node::Rule(rule) => rule, - _ => bail!("Expected rule: {expression_node:?}"), + let Node::Rule(expression_rule) = expression_node else { + bail!("Expected rule: {expression_node:?}") }; ensure!( @@ -57,10 +56,8 @@ fn extract_pragma(expression_node: &Node) -> Result { ); let inner_expression = match &expression_rule.children[..] { - [child] => match child { - Node::Rule(rule) => rule, - _ => bail!("Expected rule: {child:?}"), - }, + [Node::Rule(rule)] => rule, + [Node::Token(token)] => bail!("Expected rule: {token:?}"), _ => unreachable!("Expected single child: {expression_rule:?}"), }; @@ -73,12 +70,11 @@ fn extract_pragma(expression_node: &Node) -> Result { match inner_expression.kind { RuleKind::VersionPragmaBinaryExpression => match &inner_children[..] { [left, operator, right] => { - let operator_kind = match operator { - Node::Token(token) => token.kind, - _ => bail!("Expected rule: {operator:?}"), + let Node::Token(operator) = operator else { + bail!("Expected rule: {operator:?}"); }; - match operator_kind { + match operator.kind { TokenKind::BarBar => { let left = extract_pragma(left)?; let right = extract_pragma(right)?;