From ecf7f5f0b6bce1a4d10a4075a54b7ba482e25e29 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Tr=C4=85tnowiecki?= Date: Fri, 25 Oct 2024 23:03:45 +0200 Subject: [PATCH] Remove Clone from TokenStream commit-id:c34c8d99 --- plugins/cairo-lang-macro/src/types/token.rs | 2 +- scarb/src/compiler/plugin/proc_macro/host.rs | 23 +++++++++---------- scarb/src/compiler/plugin/proc_macro/types.rs | 4 ++-- 3 files changed, 14 insertions(+), 15 deletions(-) diff --git a/plugins/cairo-lang-macro/src/types/token.rs b/plugins/cairo-lang-macro/src/types/token.rs index 8835dc9a1..0e462b089 100644 --- a/plugins/cairo-lang-macro/src/types/token.rs +++ b/plugins/cairo-lang-macro/src/types/token.rs @@ -3,7 +3,7 @@ use std::fmt::Display; /// An abstract stream of Cairo tokens. /// /// This is both input and part of an output of a procedural macro. -#[derive(Debug, Clone)] +#[derive(Debug)] pub struct TokenStream { pub tokens: Vec, pub metadata: TokenStreamMetadata, diff --git a/scarb/src/compiler/plugin/proc_macro/host.rs b/scarb/src/compiler/plugin/proc_macro/host.rs index ffc048292..3d89f2bb3 100644 --- a/scarb/src/compiler/plugin/proc_macro/host.rs +++ b/scarb/src/compiler/plugin/proc_macro/host.rs @@ -307,14 +307,15 @@ impl ProcMacroHostPlugin { return all_none; } }; + let original = token_stream.to_string(); let result = self.instance(input.package_id).generate_code( input.expansion.name.clone(), - args.clone(), - token_stream.clone(), + args, + token_stream, ); - let expanded = context.register_result(token_stream.to_string(), input, result, stable_ptr); + let expanded = context.register_result(original, input, result, stable_ptr); item_builder.add_modified(RewriteNode::Mapped { origin: func.as_syntax_node().span(db), node: Box::new(RewriteNode::Text(expanded.to_string())), @@ -525,7 +526,6 @@ impl ProcMacroHostPlugin { let mut token_stream_builder = TokenStreamBuilder::new(db); token_stream_builder.add_node(item_ast.as_syntax_node()); token_stream_builder.with_metadata(stream_metadata.clone()); - let token_stream = token_stream_builder.build(); let mut aux_data = EmittedAuxData::default(); let mut all_diagnostics: Vec = Vec::new(); @@ -535,10 +535,11 @@ impl ProcMacroHostPlugin { let mut derived_code = PatchBuilder::new(db, &item_ast); for derive in derives { + let token_stream = token_stream_builder.build(); let result = self.instance(derive.package_id).generate_code( derive.expansion.name.clone(), TokenStream::empty(), - token_stream.clone(), + token_stream, ); // Register diagnostics. @@ -596,10 +597,11 @@ impl ProcMacroHostPlugin { token_stream: TokenStream, stable_ptr: SyntaxStablePtrId, ) -> PluginResult { + let original = token_stream.to_string(); let result = self.instance(input.package_id).generate_code( input.expansion.name.clone(), - args.clone(), - token_stream.clone(), + args, + token_stream, ); // Handle token stream. @@ -625,10 +627,7 @@ impl ProcMacroHostPlugin { // In essence, `code: None, remove_original_item: false` means `ProcMacroHost` will not be // called again for this AST item. // This optimization limits the number of generated nodes a bit. - if last - && result.aux_data.is_none() - && token_stream.to_string() == result.token_stream.to_string() - { + if last && result.aux_data.is_none() && original == result.token_stream.to_string() { return PluginResult { code: None, remove_original_item: false, @@ -1029,7 +1028,7 @@ impl InlineMacroExprPlugin for ProcMacroInlinePlugin { ); // Handle diagnostics. let diagnostics = into_cairo_diagnostics(result.diagnostics, stable_ptr); - let token_stream = result.token_stream.clone(); + let token_stream = result.token_stream; if token_stream.is_empty() { // Remove original code InlinePluginResult { diff --git a/scarb/src/compiler/plugin/proc_macro/types.rs b/scarb/src/compiler/plugin/proc_macro/types.rs index dc8f5439b..c867d02e3 100644 --- a/scarb/src/compiler/plugin/proc_macro/types.rs +++ b/scarb/src/compiler/plugin/proc_macro/types.rs @@ -26,7 +26,7 @@ impl<'a> TokenStreamBuilder<'a> { self.metadata = Some(metadata); } - pub fn build(self) -> TokenStream { + pub fn build(&self) -> TokenStream { let mut result: Vec = Vec::default(); for node in self.nodes.iter() { let leaves = node.tokens(self.db); @@ -35,7 +35,7 @@ impl<'a> TokenStreamBuilder<'a> { result.extend(tokens); } - match self.metadata { + match self.metadata.as_ref() { Some(metadata) => TokenStream::new(result.clone()).with_metadata(metadata.clone()), None => TokenStream::new(result.clone()), }