Skip to content

Commit

Permalink
Optimise TokenMap to only hold either a typed or parsed AST token (#6585
Browse files Browse the repository at this point in the history
)

## Description

This PR optimizes the `TokenMap` structure by modifying the `Token` type
to store either a parsed or typed AST token, but not both
simultaneously. Parsed tokens are now only accessible if a typed token
does not exist. Previously, both parsed and typed variants were stored
which was inefficient in terms of memory usage.

Key changes:
- Replaced separate `parsed` and `typed` fields in `Token` with a single
`ast_node` field using an enum `TokenAstNode`
- Added helper methods `as_parsed()` and `as_typed()` to safely access
the token variants
- Updated all token creation and access code throughout the codebase to
use the new structure
- Refactored the attributes handling code to properly check both parsed
and typed variants

Works towards the memory optimization goals in #6226.

## Checklist

- [x] I have linked to any relevant issues.
- [x] I have commented my code, particularly in hard-to-understand
areas.
- [x] I have updated the documentation where relevant (API docs, the
reference, and the Sway book).
- [ ] If my change requires substantial documentation changes, I have
[requested support from the DevRel
team](https://github.com/FuelLabs/devrel-requests/issues/new/choose)
- [x] I have added tests that prove my fix is effective or that my
feature works.
- [x] I have added (or requested a maintainer to add) the necessary
`Breaking*` or `New Feature` labels where relevant.
- [x] I have done my best to ensure that my PR adheres to [the Fuel Labs
Code Review
Standards](https://github.com/FuelLabs/rfcs/blob/master/text/code-standards/external-contributors.md).
- [x] I have requested a review from the relevant team or maintainers.
  • Loading branch information
JoshuaBatty authored Oct 31, 2024
1 parent 5b7d720 commit b6bbbf8
Show file tree
Hide file tree
Showing 15 changed files with 369 additions and 208 deletions.
11 changes: 6 additions & 5 deletions sway-lsp/src/capabilities/code_actions/diagnostic/auto_import.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use crate::{
code_actions::{CodeActionContext, CODE_ACTION_IMPORT_TITLE},
diagnostic::DiagnosticData,
},
core::token::{get_range_from_span, AstToken, SymbolKind, TypedAstToken},
core::token::{get_range_from_span, ParsedAstToken, SymbolKind, TypedAstToken},
};
use lsp_types::{
CodeAction as LspCodeAction, CodeActionKind, CodeActionOrCommand, Position, Range, TextEdit,
Expand Down Expand Up @@ -41,13 +41,14 @@ pub(crate) fn import_code_action(
let mut program_type_keyword = None;

ctx.tokens.tokens_for_file(ctx.temp_uri).for_each(|item| {
if let Some(TypedAstToken::TypedUseStatement(use_stmt)) = &item.value().typed {
if let Some(TypedAstToken::TypedUseStatement(use_stmt)) = &item.value().as_typed() {
use_statements.push(use_stmt.clone());
} else if let Some(TypedAstToken::TypedIncludeStatement(include_stmt)) = &item.value().typed
} else if let Some(TypedAstToken::TypedIncludeStatement(include_stmt)) =
&item.value().as_typed()
{
include_statements.push(include_stmt.clone());
} else if item.value().kind == SymbolKind::ProgramTypeKeyword {
if let AstToken::Keyword(ident) = &item.value().parsed {
if let Some(ParsedAstToken::Keyword(ident)) = &item.value().as_parsed() {
program_type_keyword = Some(ident.clone());
}
}
Expand Down Expand Up @@ -96,7 +97,7 @@ pub(crate) fn get_call_paths_for_name<'s>(
.tokens_for_name(symbol_name)
.filter_map(move |item| {
// If the typed token is a declaration, then we can import it.
match item.value().typed.as_ref() {
match item.value().as_typed().as_ref() {
Some(TypedAstToken::TypedDeclaration(ty_decl)) => {
return match ty_decl {
TyDecl::StructDecl(decl) => {
Expand Down
2 changes: 1 addition & 1 deletion sway-lsp/src/capabilities/code_actions/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ pub fn code_actions(
};

let actions_by_type = token
.typed
.as_typed()
.as_ref()
.map(|typed_token| match typed_token {
TypedAstToken::TypedDeclaration(decl) => match decl {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,9 @@ impl<'a> CodeAction<'a, TyStructDecl> for StructNewCodeAction<'a> {
.find_map(|item| {
if let Some(TypedAstToken::TypedDeclaration(ty::TyDecl::ImplSelfOrTrait(
ty::ImplSelfOrTrait { decl_id, .. },
))) = item.value().typed
))) = item.value().as_typed()
{
Some((*ctx.engines.de().get_impl_self_or_trait(&decl_id)).clone())
Some((*ctx.engines.de().get_impl_self_or_trait(decl_id)).clone())
} else {
None
}
Expand Down
2 changes: 1 addition & 1 deletion sway-lsp/src/capabilities/hover/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ fn hover_format(
let mut hover_link_contents = HoverLinkContents::new(session, engines);

let sway_block = token
.typed
.as_typed()
.as_ref()
.and_then(|typed_token| match typed_token {
TypedAstToken::TypedDeclaration(decl) => match decl {
Expand Down
2 changes: 1 addition & 1 deletion sway-lsp/src/capabilities/inlay_hints.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ pub fn inlay_hints(
.tokens_for_file(uri)
.filter_map(|item| {
let token = item.value();
token.typed.as_ref().and_then(|t| match t {
token.as_typed().as_ref().and_then(|t| match t {
TypedAstToken::TypedDeclaration(TyDecl::VariableDecl(var_decl)) => {
let var_range = get_range_from_span(&var_decl.name.span());
if var_range.start >= range.start && var_range.end <= range.end {
Expand Down
2 changes: 1 addition & 1 deletion sway-lsp/src/capabilities/rename.rs
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,7 @@ fn find_all_methods_for_decl<'a>(
.all_references_of_token(decl_token, engines)
.filter_map(|item| {
let token = item.value();
token.typed.as_ref().and_then(|typed| match typed {
token.as_typed().as_ref().and_then(|typed| match typed {
TypedAstToken::TypedDeclaration(decl) => match decl {
ty::TyDecl::AbiDecl(ty::AbiDecl { decl_id, .. }) => {
let abi_decl = engines.de().get_abi(decl_id);
Expand Down
4 changes: 2 additions & 2 deletions sway-lsp/src/core/session.rs
Original file line number Diff line number Diff line change
Expand Up @@ -227,14 +227,14 @@ impl Session {
.tokens_at_position(&engines, uri, shifted_position, Some(true));
let fn_token = fn_tokens.first()?.value();
let compiled_program = &*self.compiled_program.read();
if let Some(TypedAstToken::TypedFunctionDeclaration(fn_decl)) = fn_token.typed.clone() {
if let Some(TypedAstToken::TypedFunctionDeclaration(fn_decl)) = fn_token.as_typed() {
let program = compiled_program.typed.clone()?;
let engines = self.engines.read();
return Some(capabilities::completion::to_completion_items(
program.root.namespace.module(&engines).current_items(),
&engines,
ident_to_complete,
&fn_decl,
fn_decl,
position,
));
}
Expand Down
34 changes: 27 additions & 7 deletions sway-lsp/src/core/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,12 @@ use sway_core::{
};
use sway_types::{Ident, SourceEngine, Span, Spanned};

/// The `AstToken` holds the types produced by the [sway_core::language::parsed::ParseProgram].
/// The `ParsedAstToken` holds the types produced by the [sway_core::language::parsed::ParseProgram].
/// These tokens have not been type-checked.
/// See this issue https://github.com/FuelLabs/sway/issues/2257 for more information about why they are
/// useful to the language server.
#[derive(Debug, Clone)]
pub enum AstToken {
pub enum ParsedAstToken {
AbiCastExpression(AbiCastExpression),
AmbiguousPathExpression(AmbiguousPathExpression),
Attribute(Attribute),
Expand Down Expand Up @@ -148,14 +148,19 @@ pub enum TypeDefinition {
Ident(Ident),
}

#[derive(Debug, Clone)]
pub enum TokenAstNode {
Parsed(ParsedAstToken),
Typed(TypedAstToken),
}

/// The `Token` type is created during traversal of the parsed and typed AST's of a program.
/// It holds the parsed and typed data structures produced by the sway compiler.
/// It also holds the type definition & semantic type of the token if they could be inferred
/// during traversal of the AST's.
#[derive(Debug, Clone)]
pub struct Token {
pub parsed: AstToken,
pub typed: Option<TypedAstToken>,
pub ast_node: TokenAstNode,
pub type_def: Option<TypeDefinition>,
pub kind: SymbolKind,
}
Expand All @@ -164,15 +169,30 @@ impl Token {
/// Create a new token with the given [SymbolKind].
/// This function is intended to be used during traversal of the
/// [sway_core::language::parsed::ParseProgram] AST.
pub fn from_parsed(token: AstToken, kind: SymbolKind) -> Self {
pub fn from_parsed(token: ParsedAstToken, kind: SymbolKind) -> Self {
Self {
parsed: token,
typed: None,
ast_node: TokenAstNode::Parsed(token),
type_def: None,
kind,
}
}

/// Get the `AstToken`, if this is a parsed token.
pub fn as_parsed(&self) -> Option<&ParsedAstToken> {
match &self.ast_node {
TokenAstNode::Parsed(token) => Some(token),
_ => None,
}
}

/// Get the `TypedAstToken`, if this is a typed token.
pub fn as_typed(&self) -> Option<&TypedAstToken> {
match &self.ast_node {
TokenAstNode::Typed(token) => Some(token),
_ => None,
}
}

/// Return the [TokenIdent] of the declaration of the provided token.
pub fn declared_token_ident(&self, engines: &Engines) -> Option<TokenIdent> {
self.type_def.as_ref().and_then(|type_def| match type_def {
Expand Down
9 changes: 5 additions & 4 deletions sway-lsp/src/core/token_map.rs
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ impl<'a> TokenMap {
.into_iter()
.find_map(|entry| {
let (_, token) = entry.pair();
if let Some(TypedAstToken::TypedDeclaration(_)) = &token.typed {
if let Some(TypedAstToken::TypedDeclaration(_)) = &token.as_typed() {
Some(entry)
} else {
None
Expand Down Expand Up @@ -175,7 +175,7 @@ impl<'a> TokenMap {
self.tokens_for_file(uri)
.filter_map(move |entry| {
let (ident, token) = entry.pair();
let token_ident = match &token.typed {
let token_ident = match &token.as_typed() {
Some(TypedAstToken::TypedFunctionDeclaration(decl))
if functions_only == Some(true) =>
{
Expand All @@ -188,7 +188,8 @@ impl<'a> TokenMap {
};
if position >= token_ident.range.start && position <= token_ident.range.end {
if functions_only == Some(true) {
if let Some(TypedAstToken::TypedFunctionDeclaration(_)) = &token.typed {
if let Some(TypedAstToken::TypedFunctionDeclaration(_)) = &token.as_typed()
{
return Some(entry);
}
return None;
Expand All @@ -213,7 +214,7 @@ impl<'a> TokenMap {
token::ident_of_type_id(engines, type_id)
.and_then(|decl_ident| self.try_get(&decl_ident).try_unwrap())
.map(|item| item.value().clone())
.and_then(|token| token.typed)
.and_then(|token| token.as_typed().cloned())
.and_then(|typed_token| match typed_token {
TypedAstToken::TypedDeclaration(dec) => Some(dec),
_ => None,
Expand Down
6 changes: 3 additions & 3 deletions sway-lsp/src/traverse/dependency.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use crate::{
core::token::{AstToken, SymbolKind, Token, TypeDefinition, TypedAstToken},
core::token::{ParsedAstToken, SymbolKind, Token, TokenAstNode, TypeDefinition, TypedAstToken},
traverse::ParseContext,
};
use sway_core::language::{
Expand All @@ -11,7 +11,7 @@ use sway_types::Named;
/// Insert Declaration tokens into the TokenMap.
pub fn collect_parsed_declaration(node: &AstNode, ctx: &ParseContext) {
if let AstNodeContent::Declaration(declaration) = &node.content {
let parsed_token = AstToken::Declaration(declaration.clone());
let parsed_token = ParsedAstToken::Declaration(declaration.clone());

let (ident, symbol_kind) = match declaration {
Declaration::VariableDeclaration(decl_id) => {
Expand Down Expand Up @@ -73,7 +73,7 @@ pub fn collect_typed_declaration(node: &ty::TyAstNode, ctx: &ParseContext) {

let token_ident = ctx.ident(&ident);
if let Some(mut token) = ctx.tokens.try_get_mut_with_retry(&token_ident) {
token.typed = Some(typed_token);
token.ast_node = TokenAstNode::Typed(typed_token);
token.type_def = Some(TypeDefinition::Ident(ident));
}
}
Expand Down
6 changes: 3 additions & 3 deletions sway-lsp/src/traverse/lexed_tree.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use crate::{
core::token::{AstToken, SymbolKind, Token},
core::token::{ParsedAstToken, SymbolKind, Token},
traverse::{adaptive_iter, Parse, ParseContext},
};
use rayon::iter::{ParallelBridge, ParallelIterator};
Expand Down Expand Up @@ -59,15 +59,15 @@ fn insert_module_kind(ctx: &ParseContext, kind: &ModuleKind) {
fn insert_program_type_keyword(ctx: &ParseContext, span: Span) {
let ident = Ident::new(span);
let token = Token::from_parsed(
AstToken::Keyword(ident.clone()),
ParsedAstToken::Keyword(ident.clone()),
SymbolKind::ProgramTypeKeyword,
);
ctx.tokens.insert(ctx.ident(&ident), token);
}

fn insert_keyword(ctx: &ParseContext, span: Span) {
let ident = Ident::new(span);
let token = Token::from_parsed(AstToken::Keyword(ident.clone()), SymbolKind::Keyword);
let token = Token::from_parsed(ParsedAstToken::Keyword(ident.clone()), SymbolKind::Keyword);
ctx.tokens.insert(ctx.ident(&ident), token);
}

Expand Down
Loading

0 comments on commit b6bbbf8

Please sign in to comment.