From 85cb7c533682839156a6394f47ed5233f13946a7 Mon Sep 17 00:00:00 2001 From: maciektr Date: Wed, 13 Nov 2024 12:34:36 +0100 Subject: [PATCH 01/13] Rework internal TokenStream representation (#1699) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit commit-id:ab3e2fd1 --- **Stack**: - #1734 - #1722 - #1704 - #1700 - #1699 ⬅ ⚠️ *Part of a stack created by [spr](https://github.com/ejoffe/spr). Do not merge manually using the UI - doing so may have unexpected results.* --- Cargo.lock | 8 +- plugins/cairo-lang-macro-stable/src/lib.rs | 34 ++-- plugins/cairo-lang-macro/Cargo.toml | 2 +- .../cairo-lang-macro/src/types/conversion.rs | 129 ++++++++++++- plugins/cairo-lang-macro/src/types/mod.rs | 92 ++------- plugins/cairo-lang-macro/src/types/token.rs | 129 +++++++++++++ scarb/Cargo.toml | 4 +- scarb/src/compiler/plugin/proc_macro/ffi.rs | 15 -- scarb/src/compiler/plugin/proc_macro/host.rs | 148 +++++++-------- scarb/src/compiler/plugin/proc_macro/mod.rs | 2 + scarb/src/compiler/plugin/proc_macro/types.rs | 54 ++++++ .../methods/expand_derive.rs | 6 +- scarb/tests/build_cairo_plugin.rs | 177 +++++++++++------- scarb/tests/proc_macro_server.rs | 48 +++-- .../scarb-proc-macro-server-types/Cargo.toml | 2 +- .../src/proc_macro_server.rs | 12 +- 16 files changed, 585 insertions(+), 277 deletions(-) create mode 100644 plugins/cairo-lang-macro/src/types/token.rs create mode 100644 scarb/src/compiler/plugin/proc_macro/types.rs diff --git a/Cargo.lock b/Cargo.lock index 3c2effa59..ac161ba04 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -749,7 +749,7 @@ name = "cairo-lang-macro" version = "0.1.1" dependencies = [ "cairo-lang-macro-attributes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "cairo-lang-macro-stable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cairo-lang-macro-stable 1.0.0", "linkme", "serde", "serde_json", @@ -5094,8 +5094,8 @@ dependencies = [ "cairo-lang-filesystem", "cairo-lang-formatter", "cairo-lang-lowering", - "cairo-lang-macro 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "cairo-lang-macro-stable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cairo-lang-macro 0.1.1", + "cairo-lang-macro-stable 1.0.0", "cairo-lang-parser", "cairo-lang-semantic", "cairo-lang-sierra", @@ -5308,7 +5308,7 @@ dependencies = [ name = "scarb-proc-macro-server-types" version = "0.1.0" dependencies = [ - "cairo-lang-macro 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "cairo-lang-macro 0.1.1", "serde", "serde_json", ] diff --git a/plugins/cairo-lang-macro-stable/src/lib.rs b/plugins/cairo-lang-macro-stable/src/lib.rs index dd91b21a8..fc5542987 100644 --- a/plugins/cairo-lang-macro-stable/src/lib.rs +++ b/plugins/cairo-lang-macro-stable/src/lib.rs @@ -1,11 +1,30 @@ use crate::ffi::{StableOption, StableSlice}; -use std::ffi::CStr; use std::num::NonZeroU8; use std::os::raw::c_char; use std::ptr::NonNull; pub mod ffi; +#[repr(C)] +#[derive(Debug)] +pub struct StableToken { + pub span: StableTextSpan, + pub content: *mut c_char, +} + +#[repr(C)] +#[derive(Debug)] +pub struct StableTextSpan { + pub start: usize, + pub end: usize, +} + +#[repr(C)] +#[derive(Debug)] +pub enum StableTokenTree { + Ident(StableToken), +} + #[repr(C)] #[derive(Debug)] pub struct StableExpansion { @@ -23,7 +42,7 @@ pub type StableExpansionsList = StableSlice; #[repr(C)] #[derive(Debug)] pub struct StableTokenStream { - pub value: *mut c_char, + pub tokens: StableSlice, pub metadata: StableTokenStreamMetadata, } @@ -76,17 +95,6 @@ pub struct StableResultWrapper { pub output: StableProcMacroResult, } -impl StableTokenStream { - /// Convert to String. - /// - /// # Safety - pub unsafe fn to_string(&self) -> String { - // Note that this does not deallocate the c-string. - // The memory must still be freed with `CString::from_raw`. - CStr::from_ptr(self.value).to_string_lossy().to_string() - } -} - #[repr(C)] pub struct StablePostProcessContext { pub aux_data: StableSlice, diff --git a/plugins/cairo-lang-macro/Cargo.toml b/plugins/cairo-lang-macro/Cargo.toml index bec89f24c..40fe2249c 100644 --- a/plugins/cairo-lang-macro/Cargo.toml +++ b/plugins/cairo-lang-macro/Cargo.toml @@ -15,7 +15,7 @@ repository.workspace = true [dependencies] cairo-lang-macro-attributes = "0.1" -cairo-lang-macro-stable = "1" +cairo-lang-macro-stable = { path = "../cairo-lang-macro-stable" } linkme.workspace = true serde = { workspace = true, optional = true } diff --git a/plugins/cairo-lang-macro/src/types/conversion.rs b/plugins/cairo-lang-macro/src/types/conversion.rs index 2e5516d0a..3242afd2f 100644 --- a/plugins/cairo-lang-macro/src/types/conversion.rs +++ b/plugins/cairo-lang-macro/src/types/conversion.rs @@ -1,12 +1,12 @@ use crate::{ AuxData, Diagnostic, ExpansionDefinition, FullPathMarker, PostProcessContext, ProcMacroResult, - Severity, TokenStream, TokenStreamMetadata, + Severity, TextSpan, Token, TokenStream, TokenStreamMetadata, TokenTree, }; use cairo_lang_macro_stable::ffi::StableSlice; use cairo_lang_macro_stable::{ StableAuxData, StableDiagnostic, StableExpansion, StableFullPathMarker, - StablePostProcessContext, StableProcMacroResult, StableSeverity, StableTokenStream, - StableTokenStreamMetadata, + StablePostProcessContext, StableProcMacroResult, StableSeverity, StableTextSpan, StableToken, + StableTokenStream, StableTokenStreamMetadata, StableTokenTree, }; use std::ffi::{c_char, CStr, CString}; use std::num::NonZeroU8; @@ -90,15 +90,120 @@ impl ProcMacroResult { } } +impl TextSpan { + /// Convert to FFI-safe representation. + #[doc(hidden)] + pub fn into_stable(self) -> StableTextSpan { + StableTextSpan { + start: self.start, + end: self.end, + } + } + + #[doc(hidden)] + pub fn from_stable(span: &StableTextSpan) -> Self { + Self { + start: span.start, + end: span.end, + } + } + + #[doc(hidden)] + pub fn from_owned_stable(span: StableTextSpan) -> Self { + Self { + start: span.start, + end: span.end, + } + } +} + +impl Token { + /// Convert to FFI-safe representation. + #[doc(hidden)] + pub fn into_stable(self) -> StableToken { + let cstr = CString::new(self.content.as_bytes()).unwrap(); + StableToken { + span: self.span.into_stable(), + content: cstr.into_raw(), + } + } + + /// Convert to native Rust representation, without taking the ownership of the string. + /// + /// Note that you still need to free the memory by calling `from_owned_stable`. + /// + /// # Safety + #[doc(hidden)] + pub unsafe fn from_stable(token: &StableToken) -> Self { + Self { + content: from_raw_cstr(token.content), + span: TextSpan::from_stable(&token.span), + } + } + + /// Convert to native Rust representation, with taking the ownership of the string. + /// + /// Useful when you need to free the allocated memory. + /// Only use on the same side of FFI-barrier, where the memory has been allocated. + /// + /// # Safety + #[doc(hidden)] + pub unsafe fn from_owned_stable(token: StableToken) -> Self { + Self { + content: from_raw_cstring(token.content), + span: TextSpan::from_owned_stable(token.span), + } + } +} + +impl TokenTree { + /// Convert to FFI-safe representation. + #[doc(hidden)] + pub fn into_stable(self) -> StableTokenTree { + match self { + Self::Ident(token) => StableTokenTree::Ident(token.into_stable()), + } + } + + /// Convert to native Rust representation, without taking the ownership of the string. + /// + /// Note that you still need to free the memory by calling `from_owned_stable`. + /// + /// # Safety + #[doc(hidden)] + pub unsafe fn from_stable(token_tree: &StableTokenTree) -> Self { + match token_tree { + StableTokenTree::Ident(token) => Self::Ident(Token::from_stable(token)), + } + } + + /// Convert to native Rust representation, with taking the ownership of the string. + /// + /// Useful when you need to free the allocated memory. + /// Only use on the same side of FFI-barrier, where the memory has been allocated. + /// + /// # Safety + #[doc(hidden)] + pub unsafe fn from_owned_stable(token_tree: StableTokenTree) -> Self { + match token_tree { + StableTokenTree::Ident(token) => Self::Ident(Token::from_owned_stable(token)), + } + } +} + impl TokenStream { /// Convert to FFI-safe representation. /// /// # Safety #[doc(hidden)] pub fn into_stable(self) -> StableTokenStream { - let cstr = CString::new(self.value).unwrap(); + let tokens = self + .tokens + .into_iter() + .map(|token| token.into_stable()) + .collect::>(); StableTokenStream { - value: cstr.into_raw(), + tokens: StableSlice::new(tokens), metadata: self.metadata.into_stable(), } } @@ -110,8 +215,13 @@ impl TokenStream { /// # Safety #[doc(hidden)] pub unsafe fn from_stable(token_stream: &StableTokenStream) -> Self { + let (ptr, n) = token_stream.tokens.raw_parts(); + let tokens = slice::from_raw_parts(ptr, n) + .iter() + .map(|token_tree| TokenTree::from_stable(token_tree)) + .collect::>(); Self { - value: from_raw_cstr(token_stream.value), + tokens, metadata: TokenStreamMetadata::from_stable(&token_stream.metadata), } } @@ -124,8 +234,13 @@ impl TokenStream { /// # Safety #[doc(hidden)] pub unsafe fn from_owned_stable(token_stream: StableTokenStream) -> Self { + let tokens = token_stream.tokens.into_owned(); + let tokens = tokens + .into_iter() + .map(|token_tree| TokenTree::from_owned_stable(token_tree)) + .collect::>(); Self { - value: from_raw_cstring(token_stream.value), + tokens, metadata: TokenStreamMetadata::from_owned_stable(token_stream.metadata), } } diff --git a/plugins/cairo-lang-macro/src/types/mod.rs b/plugins/cairo-lang-macro/src/types/mod.rs index a811d7558..618d61746 100644 --- a/plugins/cairo-lang-macro/src/types/mod.rs +++ b/plugins/cairo-lang-macro/src/types/mod.rs @@ -1,10 +1,11 @@ -use std::fmt::Display; use std::vec::IntoIter; mod conversion; mod expansions; +mod token; pub use expansions::*; +pub use token::*; /// Result of procedural macro code generation. #[derive(Debug, Clone)] @@ -15,78 +16,6 @@ pub struct ProcMacroResult { pub full_path_markers: Vec, } -/// An abstract stream of Cairo tokens. -/// -/// This is both input and part of an output of a procedural macro. -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] -pub struct TokenStream { - value: String, - metadata: TokenStreamMetadata, -} - -/// Metadata of [`TokenStream`]. -/// -/// This struct can be used to describe the origin of the [`TokenStream`]. -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] -pub struct TokenStreamMetadata { - /// The path to the file from which the [`TokenStream`] has been created. - pub original_file_path: Option, - /// ID of the file from which the [`TokenStream`] has been created. - /// - /// It is guaranteed, that the `file_id` will be unique for each file. - pub file_id: Option, -} - -impl TokenStream { - #[doc(hidden)] - pub fn new(value: String) -> Self { - Self { - value, - metadata: TokenStreamMetadata::default(), - } - } - - #[doc(hidden)] - pub fn empty() -> Self { - Self::new("".to_string()) - } - - #[doc(hidden)] - pub fn with_metadata(mut self, metadata: TokenStreamMetadata) -> Self { - self.metadata = metadata; - self - } - - /// Get `[TokenStreamMetadata`] associated with this [`TokenStream`]. - /// - /// The metadata struct can be used to describe the [`TokenStream`] origin. - pub fn metadata(&self) -> &TokenStreamMetadata { - &self.metadata - } - - pub fn is_empty(&self) -> bool { - self.to_string().is_empty() - } -} - -impl Display for TokenStream { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.value) - } -} - -impl TokenStreamMetadata { - #[doc(hidden)] - pub fn new(file_path: impl ToString, file_id: impl ToString) -> Self { - Self { - original_file_path: Some(file_path.to_string()), - file_id: Some(file_id.to_string()), - } - } -} - /// **Auxiliary data** returned by procedural macro code generation. /// /// This struct can be used to collect additional information from the Cairo source code of @@ -101,7 +30,7 @@ impl TokenStreamMetadata { /// For instance, auxiliary data can be serialized as JSON. /// /// ``` -/// use cairo_lang_macro::{AuxData, ProcMacroResult, TokenStream, attribute_macro, post_process, PostProcessContext}; +/// use cairo_lang_macro::{AuxData, ProcMacroResult, TokenStream, TokenTree, Token, TextSpan, attribute_macro, post_process, PostProcessContext}; /// use serde::{Serialize, Deserialize}; /// #[derive(Debug, Serialize, Deserialize)] /// struct SomeAuxDataFormat { @@ -110,11 +39,16 @@ impl TokenStreamMetadata { /// /// #[attribute_macro] /// pub fn some_macro(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { -/// let token_stream = TokenStream::new( -/// token_stream.to_string() -/// // Remove macro call to avoid infinite loop. -/// .replace("#[some]", "") -/// ); +/// // Remove macro call to avoid infinite loop. +/// let code = token_stream.to_string().replace("#[some]", ""); +/// let token_stream = TokenStream::new(vec![ +/// TokenTree::Ident( +/// Token::new( +/// code.clone(), +/// TextSpan::new(0, code.len()) +/// ) +/// ) +/// ]); /// let value = SomeAuxDataFormat { some_message: "Hello from some macro!".to_string() }; /// let value = serde_json::to_string(&value).unwrap(); /// let value: Vec = value.into_bytes(); diff --git a/plugins/cairo-lang-macro/src/types/token.rs b/plugins/cairo-lang-macro/src/types/token.rs new file mode 100644 index 000000000..52b50068f --- /dev/null +++ b/plugins/cairo-lang-macro/src/types/token.rs @@ -0,0 +1,129 @@ +use std::fmt::Display; + +/// An abstract stream of Cairo tokens. +/// +/// This is both input and part of an output of a procedural macro. +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] +pub struct TokenStream { + pub tokens: Vec, + pub metadata: TokenStreamMetadata, +} + +/// A single token or a delimited sequence of token trees. +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum TokenTree { + Ident(Token), +} + +impl Default for TokenTree { + fn default() -> Self { + Self::Ident(Default::default()) + } +} + +/// A range of text offsets that form a span (like text selection). +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] +pub struct TextSpan { + pub start: usize, + pub end: usize, +} + +/// A single Cairo token. +/// +/// The most atomic item of Cairo code representation. +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] +pub struct Token { + pub content: String, + pub span: TextSpan, +} + +/// Metadata of [`TokenStream`]. +/// +/// This struct describes the origin of the [`TokenStream`]. +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] +pub struct TokenStreamMetadata { + /// The path to the file from which the [`TokenStream`] has been created. + pub original_file_path: Option, + /// ID of the file from which the [`TokenStream`] has been created. + /// + /// It is guaranteed, that the `file_id` will be unique for each file. + pub file_id: Option, +} + +impl TokenStream { + #[doc(hidden)] + pub fn new(tokens: Vec) -> Self { + Self { + tokens, + metadata: TokenStreamMetadata::default(), + } + } + + #[doc(hidden)] + pub fn empty() -> Self { + Self::new(Vec::default()) + } + + #[doc(hidden)] + pub fn with_metadata(mut self, metadata: TokenStreamMetadata) -> Self { + self.metadata = metadata; + self + } + + /// Get `[TokenStreamMetadata`] associated with this [`TokenStream`]. + /// + /// The metadata struct can be used to describe the [`TokenStream`] origin. + pub fn metadata(&self) -> &TokenStreamMetadata { + &self.metadata + } + + pub fn is_empty(&self) -> bool { + self.to_string().is_empty() + } +} + +impl Display for TokenStream { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + for token in &self.tokens { + match token { + TokenTree::Ident(token) => { + write!(f, "{}", token.content.clone())?; + } + } + } + Ok(()) + } +} + +impl TokenStreamMetadata { + #[doc(hidden)] + pub fn new(file_path: impl ToString, file_id: impl ToString) -> Self { + Self { + original_file_path: Some(file_path.to_string()), + file_id: Some(file_id.to_string()), + } + } +} + +impl TokenTree { + pub fn from_ident(token: Token) -> Self { + Self::Ident(token) + } +} + +impl TextSpan { + pub fn new(start: usize, end: usize) -> TextSpan { + TextSpan { start, end } + } +} + +impl Token { + pub fn new(content: String, span: TextSpan) -> Self { + Self { content, span } + } +} diff --git a/scarb/Cargo.toml b/scarb/Cargo.toml index 6bfbb5822..4c3b0a940 100644 --- a/scarb/Cargo.toml +++ b/scarb/Cargo.toml @@ -23,8 +23,8 @@ cairo-lang-executable.workspace = true cairo-lang-filesystem.workspace = true cairo-lang-formatter.workspace = true cairo-lang-lowering.workspace = true -cairo-lang-macro = "0.1" -cairo-lang-macro-stable = "1" +cairo-lang-macro = { path = "../plugins/cairo-lang-macro" } +cairo-lang-macro-stable = { path = "../plugins/cairo-lang-macro-stable" } cairo-lang-parser.workspace = true cairo-lang-semantic.workspace = true cairo-lang-sierra-to-casm.workspace = true diff --git a/scarb/src/compiler/plugin/proc_macro/ffi.rs b/scarb/src/compiler/plugin/proc_macro/ffi.rs index 2e77441ee..75710f9a9 100644 --- a/scarb/src/compiler/plugin/proc_macro/ffi.rs +++ b/scarb/src/compiler/plugin/proc_macro/ffi.rs @@ -1,6 +1,5 @@ use crate::core::{Package, PackageId}; use anyhow::{ensure, Context, Result}; -use cairo_lang_defs::patcher::PatchBuilder; use cairo_lang_macro::{ ExpansionKind as SharedExpansionKind, FullPathMarker, PostProcessContext, ProcMacroResult, TokenStream, @@ -9,8 +8,6 @@ use cairo_lang_macro_stable::{ StableExpansion, StableExpansionsList, StablePostProcessContext, StableProcMacroResult, StableResultWrapper, StableTokenStream, }; -use cairo_lang_syntax::node::db::SyntaxGroup; -use cairo_lang_syntax::node::TypedSyntaxNode; use camino::Utf8PathBuf; use itertools::Itertools; use libloading::{Library, Symbol}; @@ -28,18 +25,6 @@ use libloading::os::windows::Symbol as RawSymbol; use smol_str::SmolStr; use tracing::trace; -pub trait FromSyntaxNode { - fn from_syntax_node(db: &dyn SyntaxGroup, node: &impl TypedSyntaxNode) -> Self; -} - -impl FromSyntaxNode for TokenStream { - fn from_syntax_node(db: &dyn SyntaxGroup, node: &impl TypedSyntaxNode) -> Self { - let mut builder = PatchBuilder::new(db, node); - builder.add_node(node.as_syntax_node()); - Self::new(builder.build().0) - } -} - const EXEC_ATTR_PREFIX: &str = "__exec_attr_"; /// Representation of a single procedural macro. diff --git a/scarb/src/compiler/plugin/proc_macro/host.rs b/scarb/src/compiler/plugin/proc_macro/host.rs index 7e8765ef9..c19a363e3 100644 --- a/scarb/src/compiler/plugin/proc_macro/host.rs +++ b/scarb/src/compiler/plugin/proc_macro/host.rs @@ -1,6 +1,6 @@ use crate::compiler::plugin::proc_macro::compilation::SharedLibraryProvider; use crate::compiler::plugin::proc_macro::{ - Expansion, ExpansionKind, FromSyntaxNode, ProcMacroInstance, + Expansion, ExpansionKind, ProcMacroInstance, TokenStreamBuilder, }; use crate::core::{Config, Package, PackageId}; use anyhow::{ensure, Context, Result}; @@ -191,15 +191,15 @@ impl ProcMacroHostPlugin { continue; }; - let mut func_builder = PatchBuilder::new(db, func); + let mut token_stream_builder = TokenStreamBuilder::new(db); let attrs = func.attributes(db).elements(db); - let found = self.parse_attrs(db, &mut func_builder, attrs, func); + let found = self.parse_attrs(db, &mut token_stream_builder, attrs); if let Some(name) = found.as_name() { used_attr_names.insert(name); } - func_builder.add_node(func.declaration(db).as_syntax_node()); - func_builder.add_node(func.body(db).as_syntax_node()); - let token_stream = TokenStream::new(func_builder.build().0); + token_stream_builder.add_node(func.declaration(db).as_syntax_node()); + token_stream_builder.add_node(func.body(db).as_syntax_node()); + let token_stream = token_stream_builder.build(); all_none = all_none && self.do_expand_inner_attr( @@ -252,16 +252,16 @@ impl ProcMacroHostPlugin { continue; }; - let mut func_builder = PatchBuilder::new(db, &func); + let mut token_stream_builder = TokenStreamBuilder::new(db); let attrs = func.attributes(db).elements(db); - let found = self.parse_attrs(db, &mut func_builder, attrs, &func); + let found = self.parse_attrs(db, &mut token_stream_builder, attrs); if let Some(name) = found.as_name() { used_attr_names.insert(name); } - func_builder.add_node(func.visibility(db).as_syntax_node()); - func_builder.add_node(func.declaration(db).as_syntax_node()); - func_builder.add_node(func.body(db).as_syntax_node()); - let token_stream = TokenStream::new(func_builder.build().0); + token_stream_builder.add_node(func.visibility(db).as_syntax_node()); + token_stream_builder.add_node(func.declaration(db).as_syntax_node()); + token_stream_builder.add_node(func.body(db).as_syntax_node()); + let token_stream = token_stream_builder.build(); all_none = all_none && self.do_expand_inner_attr( db, @@ -348,103 +348,102 @@ impl ProcMacroHostPlugin { db: &dyn SyntaxGroup, item_ast: ast::ModuleItem, ) -> (AttrExpansionFound, TokenStream) { - let mut item_builder = PatchBuilder::new(db, &item_ast); + let mut token_stream_builder = TokenStreamBuilder::new(db); let input = match item_ast.clone() { ast::ModuleItem::Trait(trait_ast) => { let attrs = trait_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); - item_builder.add_node(trait_ast.visibility(db).as_syntax_node()); - item_builder.add_node(trait_ast.trait_kw(db).as_syntax_node()); - item_builder.add_node(trait_ast.name(db).as_syntax_node()); - item_builder.add_node(trait_ast.generic_params(db).as_syntax_node()); - item_builder.add_node(trait_ast.body(db).as_syntax_node()); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + token_stream_builder.add_node(trait_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(trait_ast.trait_kw(db).as_syntax_node()); + token_stream_builder.add_node(trait_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(trait_ast.generic_params(db).as_syntax_node()); + token_stream_builder.add_node(trait_ast.body(db).as_syntax_node()); expansion } ast::ModuleItem::Impl(impl_ast) => { let attrs = impl_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); - item_builder.add_node(impl_ast.visibility(db).as_syntax_node()); - item_builder.add_node(impl_ast.impl_kw(db).as_syntax_node()); - item_builder.add_node(impl_ast.name(db).as_syntax_node()); - item_builder.add_node(impl_ast.generic_params(db).as_syntax_node()); - item_builder.add_node(impl_ast.of_kw(db).as_syntax_node()); - item_builder.add_node(impl_ast.trait_path(db).as_syntax_node()); - item_builder.add_node(impl_ast.body(db).as_syntax_node()); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + token_stream_builder.add_node(impl_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.impl_kw(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.generic_params(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.of_kw(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.trait_path(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.body(db).as_syntax_node()); expansion } ast::ModuleItem::Module(module_ast) => { let attrs = module_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); - item_builder.add_node(module_ast.visibility(db).as_syntax_node()); - item_builder.add_node(module_ast.module_kw(db).as_syntax_node()); - item_builder.add_node(module_ast.name(db).as_syntax_node()); - item_builder.add_node(module_ast.body(db).as_syntax_node()); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + token_stream_builder.add_node(module_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(module_ast.module_kw(db).as_syntax_node()); + token_stream_builder.add_node(module_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(module_ast.body(db).as_syntax_node()); expansion } ast::ModuleItem::FreeFunction(free_func_ast) => { let attrs = free_func_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); - item_builder.add_node(free_func_ast.visibility(db).as_syntax_node()); - item_builder.add_node(free_func_ast.declaration(db).as_syntax_node()); - item_builder.add_node(free_func_ast.body(db).as_syntax_node()); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + token_stream_builder.add_node(free_func_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(free_func_ast.declaration(db).as_syntax_node()); + token_stream_builder.add_node(free_func_ast.body(db).as_syntax_node()); expansion } ast::ModuleItem::ExternFunction(extern_func_ast) => { let attrs = extern_func_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); - item_builder.add_node(extern_func_ast.visibility(db).as_syntax_node()); - item_builder.add_node(extern_func_ast.extern_kw(db).as_syntax_node()); - item_builder.add_node(extern_func_ast.declaration(db).as_syntax_node()); - item_builder.add_node(extern_func_ast.semicolon(db).as_syntax_node()); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + token_stream_builder.add_node(extern_func_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(extern_func_ast.extern_kw(db).as_syntax_node()); + token_stream_builder.add_node(extern_func_ast.declaration(db).as_syntax_node()); + token_stream_builder.add_node(extern_func_ast.semicolon(db).as_syntax_node()); expansion } ast::ModuleItem::ExternType(extern_type_ast) => { let attrs = extern_type_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); - item_builder.add_node(extern_type_ast.visibility(db).as_syntax_node()); - item_builder.add_node(extern_type_ast.extern_kw(db).as_syntax_node()); - item_builder.add_node(extern_type_ast.type_kw(db).as_syntax_node()); - item_builder.add_node(extern_type_ast.name(db).as_syntax_node()); - item_builder.add_node(extern_type_ast.generic_params(db).as_syntax_node()); - item_builder.add_node(extern_type_ast.semicolon(db).as_syntax_node()); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + token_stream_builder.add_node(extern_type_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(extern_type_ast.extern_kw(db).as_syntax_node()); + token_stream_builder.add_node(extern_type_ast.type_kw(db).as_syntax_node()); + token_stream_builder.add_node(extern_type_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(extern_type_ast.generic_params(db).as_syntax_node()); + token_stream_builder.add_node(extern_type_ast.semicolon(db).as_syntax_node()); expansion } ast::ModuleItem::Struct(struct_ast) => { let attrs = struct_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); - item_builder.add_node(struct_ast.visibility(db).as_syntax_node()); - item_builder.add_node(struct_ast.struct_kw(db).as_syntax_node()); - item_builder.add_node(struct_ast.name(db).as_syntax_node()); - item_builder.add_node(struct_ast.generic_params(db).as_syntax_node()); - item_builder.add_node(struct_ast.lbrace(db).as_syntax_node()); - item_builder.add_node(struct_ast.members(db).as_syntax_node()); - item_builder.add_node(struct_ast.rbrace(db).as_syntax_node()); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + token_stream_builder.add_node(struct_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.struct_kw(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.generic_params(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.lbrace(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.members(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.rbrace(db).as_syntax_node()); expansion } ast::ModuleItem::Enum(enum_ast) => { let attrs = enum_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); - item_builder.add_node(enum_ast.visibility(db).as_syntax_node()); - item_builder.add_node(enum_ast.enum_kw(db).as_syntax_node()); - item_builder.add_node(enum_ast.name(db).as_syntax_node()); - item_builder.add_node(enum_ast.generic_params(db).as_syntax_node()); - item_builder.add_node(enum_ast.lbrace(db).as_syntax_node()); - item_builder.add_node(enum_ast.variants(db).as_syntax_node()); - item_builder.add_node(enum_ast.rbrace(db).as_syntax_node()); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + token_stream_builder.add_node(enum_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.enum_kw(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.generic_params(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.lbrace(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.variants(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.rbrace(db).as_syntax_node()); expansion } _ => AttrExpansionFound::None, }; - let token_stream = TokenStream::new(item_builder.build().0); + let token_stream = token_stream_builder.build(); (input, token_stream) } fn parse_attrs( &self, db: &dyn SyntaxGroup, - builder: &mut PatchBuilder<'_>, + builder: &mut TokenStreamBuilder<'_>, attrs: Vec, - origin: &impl TypedSyntaxNode, ) -> AttrExpansionFound { // This function parses attributes of the item, // checking if those attributes correspond to a procedural macro that should be fired. @@ -467,9 +466,9 @@ impl ProcMacroHostPlugin { )); if let Some(found) = found { if expansion.is_none() { - let mut args_builder = PatchBuilder::new(db, origin); + let mut args_builder = TokenStreamBuilder::new(db); args_builder.add_node(attr.arguments(db).as_syntax_node()); - let args = TokenStream::new(args_builder.build().0); + let args = args_builder.build(); expansion = Some((found, args, attr.stable_ptr().untyped())); // Do not add the attribute for found expansion. continue; @@ -540,9 +539,10 @@ impl ProcMacroHostPlugin { stream_metadata: TokenStreamMetadata, ) -> Option { let stable_ptr = item_ast.clone().stable_ptr().untyped(); - let token_stream = - TokenStream::from_syntax_node(db, &item_ast).with_metadata(stream_metadata.clone()); - + let mut token_stream_builder = TokenStreamBuilder::new(db); + token_stream_builder.add_node(item_ast.as_syntax_node()); + token_stream_builder.with_metadata(stream_metadata.clone()); + let token_stream = token_stream_builder.build(); let mut aux_data = EmittedAuxData::default(); let mut all_diagnostics: Vec = Vec::new(); @@ -1074,7 +1074,9 @@ impl InlineMacroExprPlugin for ProcMacroInlinePlugin { ) -> InlinePluginResult { let stable_ptr = syntax.clone().stable_ptr().untyped(); let arguments = syntax.arguments(db); - let token_stream = TokenStream::from_syntax_node(db, &arguments); + let mut token_stream_builder = TokenStreamBuilder::new(db); + token_stream_builder.add_node(arguments.as_syntax_node()); + let token_stream = token_stream_builder.build(); let result = self.instance().generate_code( self.expansion.name.clone(), TokenStream::empty(), diff --git a/scarb/src/compiler/plugin/proc_macro/mod.rs b/scarb/src/compiler/plugin/proc_macro/mod.rs index 888c012fc..83a4e7822 100644 --- a/scarb/src/compiler/plugin/proc_macro/mod.rs +++ b/scarb/src/compiler/plugin/proc_macro/mod.rs @@ -1,7 +1,9 @@ pub mod compilation; mod ffi; mod host; +mod types; pub use compilation::{check_unit, compile_unit, fetch_crate}; pub use ffi::*; pub use host::*; +pub use types::*; diff --git a/scarb/src/compiler/plugin/proc_macro/types.rs b/scarb/src/compiler/plugin/proc_macro/types.rs new file mode 100644 index 000000000..dc8f5439b --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/types.rs @@ -0,0 +1,54 @@ +use cairo_lang_macro::{TextSpan, Token, TokenStream, TokenStreamMetadata, TokenTree}; +use cairo_lang_syntax::node::{db::SyntaxGroup, SyntaxNode}; + +/// Helps creating TokenStream based on multiple SyntaxNodes, +/// which aren't descendants or ascendants of each other inside the SyntaxTree. +pub struct TokenStreamBuilder<'a> { + db: &'a dyn SyntaxGroup, + nodes: Vec, + metadata: Option, +} + +impl<'a> TokenStreamBuilder<'a> { + pub fn new(db: &'a dyn SyntaxGroup) -> Self { + Self { + db, + nodes: Vec::default(), + metadata: None, + } + } + + pub fn add_node(&mut self, node: SyntaxNode) { + self.nodes.push(node); + } + + pub fn with_metadata(&mut self, metadata: TokenStreamMetadata) { + self.metadata = Some(metadata); + } + + pub fn build(self) -> TokenStream { + let mut result: Vec = Vec::default(); + for node in self.nodes.iter() { + let leaves = node.tokens(self.db); + let tokens = + leaves.map(|node| TokenTree::Ident(self.token_from_syntax_node(node.clone()))); + result.extend(tokens); + } + + match self.metadata { + Some(metadata) => TokenStream::new(result.clone()).with_metadata(metadata.clone()), + None => TokenStream::new(result.clone()), + } + } + + pub fn token_from_syntax_node(&self, node: SyntaxNode) -> Token { + let span = node.span(self.db).to_str_range(); + Token::new( + node.get_text(self.db), + TextSpan { + start: span.start, + end: span.end, + }, + ) + } +} diff --git a/scarb/src/ops/proc_macro_server/methods/expand_derive.rs b/scarb/src/ops/proc_macro_server/methods/expand_derive.rs index d792ef51e..fc42b846c 100644 --- a/scarb/src/ops/proc_macro_server/methods/expand_derive.rs +++ b/scarb/src/ops/proc_macro_server/methods/expand_derive.rs @@ -10,7 +10,7 @@ use crate::compiler::plugin::proc_macro::{Expansion, ExpansionKind, ProcMacroHos impl Handler for ExpandDerive { fn handle(proc_macro_host: Arc, params: Self::Params) -> Result { - let mut derived_code = String::new(); + let mut derived_code = TokenStream::empty(); let mut all_diagnostics = vec![]; for derive in params.derives { @@ -30,11 +30,11 @@ impl Handler for ExpandDerive { // Register diagnostics. all_diagnostics.extend(result.diagnostics); // Add generated code. - derived_code.push_str(&result.token_stream.to_string()); + derived_code.tokens.extend(result.token_stream.tokens); } Ok(ProcMacroResult { - token_stream: TokenStream::new(derived_code), + token_stream: derived_code, diagnostics: all_diagnostics, }) } diff --git a/scarb/tests/build_cairo_plugin.rs b/scarb/tests/build_cairo_plugin.rs index fa19bff7d..344b1de2c 100644 --- a/scarb/tests/build_cairo_plugin.rs +++ b/scarb/tests/build_cairo_plugin.rs @@ -405,15 +405,15 @@ fn can_replace_original_node() { let t = temp.child("some"); CairoPluginProjectBuilder::default() .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, TokenTree, Token, TextSpan}; #[attribute_macro] pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let token_stream = TokenStream::new( - token_stream - .to_string() - .replace("12", "34") - ); + let new_token_string = token_stream.to_string().replace("12", "34"); + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { start: 0, end: new_token_string.len() }, + ))]); ProcMacroResult::new(token_stream) } "##}) @@ -571,26 +571,26 @@ fn can_define_multiple_macros() { let t = temp.child("some"); CairoPluginProjectBuilder::default() .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, AuxData, PostProcessContext, post_process}; + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, AuxData, PostProcessContext, post_process, TokenTree, Token, TextSpan}; #[attribute_macro] pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let token_stream = TokenStream::new( - token_stream - .to_string() - .replace("12", "34") - ); + let new_token_string = token_stream.to_string().replace("12", "34"); + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { start: 0, end: new_token_string.len() }, + ))]); let aux_data = AuxData::new(Vec::new()); ProcMacroResult::new(token_stream).with_aux_data(aux_data) } #[attribute_macro] pub fn world(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let token_stream = TokenStream::new( - token_stream - .to_string() - .replace("56", "78") - ); + let new_token_string = token_stream.to_string().replace("56", "78"); + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { start: 0, end: new_token_string.len() }, + ))]); let aux_data = AuxData::new(Vec::new()); ProcMacroResult::new(token_stream).with_aux_data(aux_data) } @@ -606,15 +606,15 @@ fn can_define_multiple_macros() { CairoPluginProjectBuilder::default() .name("other") .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, AuxData, PostProcessContext, post_process}; + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, AuxData, PostProcessContext, post_process, TokenTree, Token, TextSpan}; #[attribute_macro] pub fn beautiful(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let token_stream = TokenStream::new( - token_stream - .to_string() - .replace("90", "09") - ); + let new_token_string = token_stream.to_string().replace("90", "09"); + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { start: 0, end: new_token_string.len() }, + ))]); let aux_data = AuxData::new(Vec::new()); ProcMacroResult::new(token_stream).with_aux_data(aux_data) } @@ -810,7 +810,7 @@ fn can_resolve_full_path_markers() { let t = temp.child("some"); CairoPluginProjectBuilder::default() .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, post_process, PostProcessContext}; + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, post_process, PostProcessContext, TokenTree, Token, TextSpan}; #[attribute_macro] pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { @@ -821,8 +821,14 @@ fn can_resolve_full_path_markers() { token_stream.to_string().replace("12", "34") ); - ProcMacroResult::new(TokenStream::new(code)) - .with_full_path_markers(full_path_markers) + ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new( + code.clone(), + TextSpan { + start: 0, + end: code.len(), + }, + ))]) + ).with_full_path_markers(full_path_markers) } #[post_process] @@ -865,12 +871,18 @@ fn can_implement_inline_macro() { let t = temp.child("some"); CairoPluginProjectBuilder::default() .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, inline_macro}; + use cairo_lang_macro::{ProcMacroResult, TokenStream, inline_macro, TokenTree, Token, TextSpan}; #[inline_macro] pub fn some(token_stream: TokenStream) -> ProcMacroResult { assert_eq!(token_stream.to_string(), "()"); - ProcMacroResult::new(TokenStream::new("34".to_string())) + ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new( + "34".to_string(), + TextSpan { + start: 0, + end: 2, + }, + ))])) } "##}) .build(&t); @@ -955,7 +967,7 @@ fn can_implement_derive_macro() { let t = temp.child("some"); CairoPluginProjectBuilder::default() .lib_rs(indoc! {r##" - use cairo_lang_macro::{derive_macro, ProcMacroResult, TokenStream}; + use cairo_lang_macro::{derive_macro, ProcMacroResult, TokenStream, TokenTree, Token, TextSpan}; #[derive_macro] pub fn custom_derive(token_stream: TokenStream) -> ProcMacroResult { @@ -972,13 +984,21 @@ fn can_implement_derive_macro() { .trim() .to_string(); - let token_stream = TokenStream::new(indoc::formatdoc!{r#" + let code = indoc::formatdoc!{r#" impl SomeImpl of Hello<{name}> {{ fn world(self: @{name}) -> u32 {{ 32 }} }} - "#}); + "#}; + + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + code.clone(), + TextSpan { + start: 0, + end: code.len(), + }, + ))]); ProcMacroResult::new(token_stream) } @@ -1028,37 +1048,58 @@ fn can_use_both_derive_and_attr() { let t = temp.child("some"); CairoPluginProjectBuilder::default() .lib_rs(indoc! {r##" - use cairo_lang_macro::{derive_macro, attribute_macro, ProcMacroResult, TokenStream}; + use cairo_lang_macro::{derive_macro, attribute_macro, ProcMacroResult, TokenStream, TokenTree, TextSpan, Token}; #[attribute_macro] pub fn first_attribute(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(TokenStream::new( - token_stream.to_string() - .replace("SomeType", "OtherType") - )) + let new_token_string = token_stream.to_string().replace("SomeType", "OtherType"); + ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { + start: 0, + end: new_token_string.len(), + }, + ))])) } #[attribute_macro] pub fn second_attribute(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let token_stream = TokenStream::new( - token_stream.to_string().replace("OtherType", "RenamedStruct") - ); - ProcMacroResult::new(TokenStream::new( - format!("#[derive(Drop)]\n{token_stream}") - )) + let code = token_stream.to_string().replace("OtherType", "RenamedStruct"); + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + code.clone(), + TextSpan { + start: 0, + end: code.len(), + }, + ))]); + + let result_string = format!("#[derive(Drop)]\n{token_stream}"); + ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new( + result_string.clone(), + TextSpan { + start: 0, + end: result_string.len(), + }, + ))])) } #[derive_macro] pub fn custom_derive(_token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(TokenStream::new( - indoc::formatdoc!{r#" + let code = indoc::formatdoc!{r#" impl SomeImpl of Hello {{ fn world(self: @RenamedStruct) -> u32 {{ 32 }} }} - "#} - )) + "#}; + + ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new( + code.clone(), + TextSpan { + start: 0, + end: code.len(), + }, + ))])) } "##}) .add_dep(r#"indoc = "*""#) @@ -1256,15 +1297,15 @@ fn can_be_expanded() { let t = temp.child("some"); CairoPluginProjectBuilder::default() .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, derive_macro}; + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, derive_macro, TokenTree, Token, TextSpan}; #[attribute_macro] pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let token_stream = TokenStream::new( - token_stream - .to_string() - .replace("12", "34") - ); + let new_token_string = token_stream.to_string().replace("12", "34"); + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { start: 0, end: new_token_string.len() }, + ))]); ProcMacroResult::new(token_stream) } @@ -1283,13 +1324,18 @@ fn can_be_expanded() { .trim() .to_string(); - let token_stream = TokenStream::new(indoc::formatdoc!{r#" + let code = indoc::formatdoc!{r#" impl SomeImpl of Hello<{name}> {{ fn world(self: @{name}) -> u32 {{ 32 }} }} - "#}); + "#}; + + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + code.clone(), + TextSpan { start: 0, end: code.len() }, + ))]); ProcMacroResult::new(token_stream) } @@ -1365,15 +1411,17 @@ fn can_expand_trait_inner_func_attrr() { let t = temp.child("some"); CairoPluginProjectBuilder::default() .lib_rs(indoc! {r##" - use cairo_lang_macro::{attribute_macro, ProcMacroResult, TokenStream}; + use cairo_lang_macro::{attribute_macro, ProcMacroResult, TokenStream, TokenTree, Token, TextSpan}; #[attribute_macro] pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(TokenStream::new( - token_stream.to_string() + let new_token_string = token_stream.to_string() .replace("hello", "world") - .replace("12", "34") - )) + .replace("12", "34"); + ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { start: 0, end: new_token_string.len() }, + ))])) } "##}) .build(&t); @@ -1425,14 +1473,15 @@ fn can_expand_impl_inner_func_attrr() { let t = temp.child("some"); CairoPluginProjectBuilder::default() .lib_rs(indoc! {r##" - use cairo_lang_macro::{attribute_macro, ProcMacroResult, TokenStream}; + use cairo_lang_macro::{attribute_macro, ProcMacroResult, TokenStream, Token, TokenTree, TextSpan}; #[attribute_macro] pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(TokenStream::new( - token_stream.to_string() - .replace("1", "2") - )) + let new_token_string = token_stream.to_string().replace("1", "2"); + ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { start: 0, end: new_token_string.len() }, + ))])) } "##}) .build(&t); diff --git a/scarb/tests/proc_macro_server.rs b/scarb/tests/proc_macro_server.rs index b01f865dc..c2a29d2fc 100644 --- a/scarb/tests/proc_macro_server.rs +++ b/scarb/tests/proc_macro_server.rs @@ -1,6 +1,6 @@ use assert_fs::prelude::PathChild; use assert_fs::TempDir; -use cairo_lang_macro::TokenStream; +use cairo_lang_macro::{TextSpan, Token, TokenStream, TokenTree}; use scarb_proc_macro_server_types::methods::defined_macros::DefinedMacros; use scarb_proc_macro_server_types::methods::defined_macros::DefinedMacrosParams; use scarb_proc_macro_server_types::methods::expand::ExpandAttribute; @@ -58,7 +58,14 @@ fn expand_attribute() { let output = input.replace(name, "very_new_name"); - ProcMacroResult::new(TokenStream::new(output)) + let span = TextSpan { start: 0, end: output.len() }; + ProcMacroResult::new( + TokenStream::new(vec![ + TokenTree::Ident( + Token::new(output, span) + ) + ]) + ) }} "##; @@ -82,14 +89,17 @@ fn expand_attribute() { .request_and_wait::(ExpandAttributeParams { attr: "rename_to_very_new_name".to_string(), args: TokenStream::empty(), - item: TokenStream::new("fn some_test_fn(){}".to_string()), + item: TokenStream::new(vec![TokenTree::Ident(Token::new( + "fn some_test_fn(){}".to_string(), + TextSpan::default(), + ))]), }) .unwrap(); assert_eq!(response.diagnostics, vec![]); assert_eq!( - response.token_stream, - TokenStream::new("fn very_new_name(){}".to_string()) + response.token_stream.to_string(), + "fn very_new_name(){}".to_string() ); } @@ -113,7 +123,10 @@ fn expand_derive() { let mut proc_macro_server = ProcMacroClient::new(&project); - let item = TokenStream::new("fn some_test_fn(){}".to_string()); + let item = TokenStream::new(vec![TokenTree::Ident(Token::new( + "fn some_test_fn(){}".to_string(), + TextSpan::default(), + ))]); let response = proc_macro_server .request_and_wait::(ExpandDeriveParams { @@ -124,8 +137,8 @@ fn expand_derive() { assert_eq!(response.diagnostics, vec![]); assert_eq!( - response.token_stream, - TokenStream::new("impl SomeImpl of SomeTrait {}".to_string()) + response.token_stream.to_string(), + "impl SomeImpl of SomeTrait {}".to_string() ); } @@ -137,7 +150,15 @@ fn expand_inline() { let replace_all_15_with_25 = r#" #[inline_macro] pub fn replace_all_15_with_25(token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(TokenStream::new(token_stream.to_string().replace("15", "25"))) + let content = token_stream.to_string().replace("15", "25"); + let span = TextSpan { start: 0, end: content.len() }; + ProcMacroResult::new( + TokenStream::new(vec![ + TokenTree::Ident( + Token::new(content, span) + ) + ]) + ) } "#; @@ -159,15 +180,16 @@ fn expand_inline() { let response = proc_macro_server .request_and_wait::(ExpandInlineMacroParams { name: "replace_all_15_with_25".to_string(), - args: TokenStream::new( + args: TokenStream::new(vec![TokenTree::Ident(Token::new( "struct A { field: 15 , other_field: macro_call!(12)}".to_string(), - ), + TextSpan::default(), + ))]), }) .unwrap(); assert_eq!(response.diagnostics, vec![]); assert_eq!( - response.token_stream, - TokenStream::new("struct A { field: 25 , other_field: macro_call!(12)}".to_string()) + response.token_stream.to_string(), + "struct A { field: 25 , other_field: macro_call!(12)}".to_string() ); } diff --git a/utils/scarb-proc-macro-server-types/Cargo.toml b/utils/scarb-proc-macro-server-types/Cargo.toml index 6263e831e..25db1ecd3 100644 --- a/utils/scarb-proc-macro-server-types/Cargo.toml +++ b/utils/scarb-proc-macro-server-types/Cargo.toml @@ -10,6 +10,6 @@ license.workspace = true repository.workspace = true [dependencies] -cairo-lang-macro = { version = "0.1", features = ["serde"] } +cairo-lang-macro = { path = "../../plugins/cairo-lang-macro", version = "0.1", features = ["serde"] } serde.workspace = true serde_json.workspace = true diff --git a/utils/scarb-test-support/src/proc_macro_server.rs b/utils/scarb-test-support/src/proc_macro_server.rs index cf1fefab0..c2d06bd26 100644 --- a/utils/scarb-test-support/src/proc_macro_server.rs +++ b/utils/scarb-test-support/src/proc_macro_server.rs @@ -19,7 +19,7 @@ use std::process::Stdio; pub const SIMPLE_MACROS: &str = r#" use cairo_lang_macro::{ ProcMacroResult, - TokenStream, + TokenStream, TokenTree, Token, TextSpan, attribute_macro, inline_macro, derive_macro, @@ -40,7 +40,15 @@ pub fn inline_some(token_stream: TokenStream) -> ProcMacroResult { #[derive_macro] fn some_derive(_token_stream: TokenStream)-> ProcMacroResult { - ProcMacroResult::new(TokenStream::new("impl SomeImpl of SomeTrait {}".to_string())) + let content = "impl SomeImpl of SomeTrait {}".to_string(); + let span = TextSpan { start: 0, end: content.len() }; + ProcMacroResult::new( + TokenStream::new(vec![ + TokenTree::Ident( + Token::new(content, span) + ) + ]) + ) } "#; From ff2600cf79ec72bd4e4c6cf5ff0cd8fff532a94b Mon Sep 17 00:00:00 2001 From: maciektr Date: Wed, 13 Nov 2024 12:40:26 +0100 Subject: [PATCH 02/13] Avoid unnecessary allocations when freeing stable structs (#1700) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit commit-id:55ec12c0 --- **Stack**: - #1734 - #1722 - #1704 - #1700 ⬅ ⚠️ *Part of a stack created by [spr](https://github.com/ejoffe/spr). Do not merge manually using the UI - doing so may have unexpected results.* --- plugins/cairo-lang-macro/src/lib.rs | 2 +- .../cairo-lang-macro/src/types/conversion.rs | 172 +++++++----------- scarb/src/compiler/plugin/proc_macro/ffi.rs | 8 +- 3 files changed, 74 insertions(+), 108 deletions(-) diff --git a/plugins/cairo-lang-macro/src/lib.rs b/plugins/cairo-lang-macro/src/lib.rs index 382f0c66a..0eb6bd16b 100644 --- a/plugins/cairo-lang-macro/src/lib.rs +++ b/plugins/cairo-lang-macro/src/lib.rs @@ -134,7 +134,7 @@ pub unsafe extern "C" fn expand( #[doc(hidden)] #[no_mangle] pub unsafe extern "C" fn free_result(result: StableProcMacroResult) { - ProcMacroResult::from_owned_stable(result); + ProcMacroResult::free_owned_stable(result); } /// Distributed slice for storing auxiliary data collection callback pointers. diff --git a/plugins/cairo-lang-macro/src/types/conversion.rs b/plugins/cairo-lang-macro/src/types/conversion.rs index 3242afd2f..2db534751 100644 --- a/plugins/cairo-lang-macro/src/types/conversion.rs +++ b/plugins/cairo-lang-macro/src/types/conversion.rs @@ -39,7 +39,7 @@ impl ProcMacroResult { /// Convert to native Rust representation, without taking the ownership of the string. /// - /// Note that you still need to free the memory by calling `from_owned_stable`. + /// Note that you still need to free the memory by calling `free_owned_stable`. /// /// # Safety #[doc(hidden)] @@ -62,31 +62,22 @@ impl ProcMacroResult { } } - /// Convert to native Rust representation, with taking the ownership of the string. + /// Take the ownership of memory under the pointer and drop it. /// /// Useful when you need to free the allocated memory. /// Only use on the same side of FFI-barrier, where the memory has been allocated. /// /// # Safety #[doc(hidden)] - pub unsafe fn from_owned_stable(result: StableProcMacroResult) -> Self { - let diagnostics = result.diagnostics.into_owned(); - let diagnostics = diagnostics - .into_iter() - .map(|d| Diagnostic::from_owned_stable(d)) - .collect::>(); - let full_path_markers = result - .full_path_markers - .into_owned() - .iter() - .map(|m| from_raw_cstring(*m)) - .collect::>(); - ProcMacroResult { - token_stream: TokenStream::from_owned_stable(result.token_stream), - aux_data: AuxData::from_owned_stable(result.aux_data), - diagnostics, - full_path_markers, + pub unsafe fn free_owned_stable(result: StableProcMacroResult) { + for diagnostic in result.diagnostics.into_owned() { + Diagnostic::free_owned_stable(diagnostic); + } + for marker in result.full_path_markers.into_owned() { + free_raw_cstring(marker) } + TokenStream::free_owned_stable(result.token_stream); + AuxData::free_owned_stable(result.aux_data); } } @@ -100,6 +91,7 @@ impl TextSpan { } } + /// Convert to native Rust representation, without taking the ownership. #[doc(hidden)] pub fn from_stable(span: &StableTextSpan) -> Self { Self { @@ -109,11 +101,8 @@ impl TextSpan { } #[doc(hidden)] - pub fn from_owned_stable(span: StableTextSpan) -> Self { - Self { - start: span.start, - end: span.end, - } + pub fn free_owned_stable(span: StableTextSpan) { + let _ = span; } } @@ -130,7 +119,7 @@ impl Token { /// Convert to native Rust representation, without taking the ownership of the string. /// - /// Note that you still need to free the memory by calling `from_owned_stable`. + /// Note that you still need to free the memory by calling `free_owned_stable`. /// /// # Safety #[doc(hidden)] @@ -141,18 +130,16 @@ impl Token { } } - /// Convert to native Rust representation, with taking the ownership of the string. + /// Take the ownership of memory under the pointer and drop it. /// /// Useful when you need to free the allocated memory. /// Only use on the same side of FFI-barrier, where the memory has been allocated. /// /// # Safety #[doc(hidden)] - pub unsafe fn from_owned_stable(token: StableToken) -> Self { - Self { - content: from_raw_cstring(token.content), - span: TextSpan::from_owned_stable(token.span), - } + pub unsafe fn free_owned_stable(token: StableToken) { + free_raw_cstring(token.content); + TextSpan::free_owned_stable(token.span); } } @@ -167,7 +154,7 @@ impl TokenTree { /// Convert to native Rust representation, without taking the ownership of the string. /// - /// Note that you still need to free the memory by calling `from_owned_stable`. + /// Note that you still need to free the memory by calling `free_owned_stable`. /// /// # Safety #[doc(hidden)] @@ -177,16 +164,18 @@ impl TokenTree { } } - /// Convert to native Rust representation, with taking the ownership of the string. + /// Take the ownership of memory under the pointer and drop it. /// /// Useful when you need to free the allocated memory. /// Only use on the same side of FFI-barrier, where the memory has been allocated. /// /// # Safety #[doc(hidden)] - pub unsafe fn from_owned_stable(token_tree: StableTokenTree) -> Self { + pub unsafe fn free_owned_stable(token_tree: StableTokenTree) { match token_tree { - StableTokenTree::Ident(token) => Self::Ident(Token::from_owned_stable(token)), + StableTokenTree::Ident(token) => { + Token::free_owned_stable(token); + } } } } @@ -210,7 +199,7 @@ impl TokenStream { /// Convert to native Rust representation, without taking the ownership of the string. /// - /// Note that you still need to free the memory by calling `from_owned_stable`. + /// Note that you still need to free the memory by calling `free_owned_stable`. /// /// # Safety #[doc(hidden)] @@ -226,23 +215,18 @@ impl TokenStream { } } - /// Convert to native Rust representation, with taking the ownership of the string. + /// Take the ownership of memory under the pointer and drop it. /// /// Useful when you need to free the allocated memory. /// Only use on the same side of FFI-barrier, where the memory has been allocated. /// /// # Safety #[doc(hidden)] - pub unsafe fn from_owned_stable(token_stream: StableTokenStream) -> Self { - let tokens = token_stream.tokens.into_owned(); - let tokens = tokens - .into_iter() - .map(|token_tree| TokenTree::from_owned_stable(token_tree)) - .collect::>(); - Self { - tokens, - metadata: TokenStreamMetadata::from_owned_stable(token_stream.metadata), + pub unsafe fn free_owned_stable(token_stream: StableTokenStream) { + for token_tree in token_stream.tokens.into_owned() { + TokenTree::free_owned_stable(token_tree); } + TokenStreamMetadata::free_owned_stable(token_stream.metadata); } } @@ -266,7 +250,7 @@ impl TokenStreamMetadata { /// Convert to native Rust representation, without taking the ownership of the string. /// - /// Note that you still need to free the memory by calling `from_owned_stable`. + /// Note that you still need to free the memory by calling `free_owned_stable`. /// /// # Safety #[doc(hidden)] @@ -281,21 +265,19 @@ impl TokenStreamMetadata { } } - /// Convert to native Rust representation, with taking the ownership of the string. + /// Take the ownership of memory under the pointer and drop it. /// /// Useful when you need to free the allocated memory. /// Only use on the same side of FFI-barrier, where the memory has been allocated. /// /// # Safety #[doc(hidden)] - pub unsafe fn from_owned_stable(metadata: StableTokenStreamMetadata) -> Self { - let original_file_path = metadata - .original_file_path - .map(|raw| from_raw_cstring(raw.as_ptr())); - let file_id = metadata.file_id.map(|raw| from_raw_cstring(raw.as_ptr())); - Self { - original_file_path, - file_id, + pub unsafe fn free_owned_stable(metadata: StableTokenStreamMetadata) { + if let Some(raw) = metadata.original_file_path { + free_raw_cstring(raw.as_ptr()); + } + if let Some(raw) = metadata.file_id { + free_raw_cstring(raw.as_ptr()); } } } @@ -324,7 +306,7 @@ impl AuxData { /// Convert to native Rust representation, without taking the ownership of the string. /// - /// Note that you still need to free the memory by calling `from_owned_stable`. + /// Note that you still need to free the memory by calling `free_owned_stable`. /// /// # Safety #[doc(hidden)] @@ -339,18 +321,20 @@ impl AuxData { } } - /// Convert to native Rust representation, with taking the ownership of the string. + /// Take the ownership of memory under the pointer and drop it. /// /// Useful when you need to free the allocated memory. /// Only use on the same side of FFI-barrier, where the memory has been allocated. /// /// # Safety #[doc(hidden)] - pub unsafe fn from_owned_stable(aux_data: StableAuxData) -> Option { + pub unsafe fn free_owned_stable(aux_data: StableAuxData) { match aux_data { - StableAuxData::None => None, - StableAuxData::Some(raw) => Some(Self::new(raw.into_owned())), - } + StableAuxData::None => {} + StableAuxData::Some(raw) => { + let _ = raw.into_owned(); + } + }; } } @@ -368,7 +352,7 @@ impl Diagnostic { /// Convert to native Rust representation, without taking the ownership of the string. /// - /// Note that you still need to free the memory by calling `from_owned_stable`. + /// Note that you still need to free the memory by calling `free_owned_stable`. /// /// # Safety #[doc(hidden)] @@ -379,18 +363,15 @@ impl Diagnostic { } } - /// Convert to native Rust representation, with taking the ownership of the string. + /// Take the ownership of memory under the pointer and drop it. /// /// Useful when you need to free the allocated memory. /// Only use on the same side of FFI-barrier, where the memory has been allocated. /// /// # Safety #[doc(hidden)] - pub unsafe fn from_owned_stable(diagnostic: StableDiagnostic) -> Self { - Self { - message: from_raw_cstring(diagnostic.message), - severity: Severity::from_stable(&diagnostic.severity), - } + pub unsafe fn free_owned_stable(diagnostic: StableDiagnostic) { + free_raw_cstring(diagnostic.message); } } @@ -429,7 +410,7 @@ impl ExpansionDefinition { } } - /// Take the ownership of the string. + /// Take the ownership of memory under the pointer and drop it. /// /// Useful when you need to free the allocated memory. /// Only use on the same side of FFI-barrier, where the memory has been allocated. @@ -437,7 +418,7 @@ impl ExpansionDefinition { /// # Safety #[doc(hidden)] pub unsafe fn free_owned(expansion: StableExpansion) { - let _ = from_raw_cstring(expansion.name); + free_raw_cstring(expansion.name); } } @@ -455,7 +436,7 @@ impl FullPathMarker { /// Convert to native Rust representation, without taking the ownership of the string. /// - /// Note that you still need to free the memory by calling `from_owned_stable`. + /// Note that you still need to free the memory by calling `free_owned_stable`. /// /// # Safety #[doc(hidden)] @@ -466,18 +447,16 @@ impl FullPathMarker { } } - /// Convert to native Rust representation, with taking the ownership of the string. + /// Take the ownership of memory under the pointer and drop it. /// /// Useful when you need to free the allocated memory. /// Only use on the same side of FFI-barrier, where the memory has been allocated. /// /// # Safety #[doc(hidden)] - pub unsafe fn from_owned_stable(marker: StableFullPathMarker) -> Self { - Self { - key: from_raw_cstring(marker.key), - full_path: from_raw_cstring(marker.full_path), - } + pub unsafe fn free_owned_stable(marker: StableFullPathMarker) { + free_raw_cstring(marker.key); + free_raw_cstring(marker.full_path); } } @@ -507,7 +486,7 @@ impl PostProcessContext { /// Convert to native Rust representation, without taking the ownership of the string. /// - /// Note that you still need to free the memory by calling `from_owned_stable`. + /// Note that you still need to free the memory by calling `free_owned_stable`. /// /// # Safety #[doc(hidden)] @@ -528,41 +507,28 @@ impl PostProcessContext { } } - /// Convert to native Rust representation, with taking the ownership of the string. + /// Take the ownership of memory under the pointer and drop it. /// /// Useful when you need to free the allocated memory. /// Only use on the same side of FFI-barrier, where the memory has been allocated. /// /// # Safety #[doc(hidden)] - pub unsafe fn from_owned_stable(diagnostic: StablePostProcessContext) -> Self { - let aux_data = diagnostic - .aux_data - .into_owned() - .into_iter() - .filter_map(|a| AuxData::from_owned_stable(a)) - .collect::>(); - let full_path_markers = diagnostic - .full_path_markers - .into_owned() - .into_iter() - .map(|m| FullPathMarker::from_owned_stable(m)) - .collect::>(); - Self { - aux_data, - full_path_markers, + pub unsafe fn free_owned_stable(diagnostic: StablePostProcessContext) { + for aux_data in diagnostic.aux_data.into_owned() { + AuxData::free_owned_stable(aux_data) + } + for marker in diagnostic.full_path_markers.into_owned() { + FullPathMarker::free_owned_stable(marker); } } } -// Create a string from a raw pointer to a c_char. +// Create a c-string from a raw pointer to a c_char, and drop it immediately. // Note that this will free the underlying memory. -unsafe fn from_raw_cstring(raw: *mut c_char) -> String { - if raw.is_null() { - String::default() - } else { - let cstr = CString::from_raw(raw); - cstr.to_string_lossy().to_string() +unsafe fn free_raw_cstring(raw: *mut c_char) { + if !raw.is_null() { + let _ = CString::from_raw(raw); } } diff --git a/scarb/src/compiler/plugin/proc_macro/ffi.rs b/scarb/src/compiler/plugin/proc_macro/ffi.rs index 75710f9a9..fa93a6b57 100644 --- a/scarb/src/compiler/plugin/proc_macro/ffi.rs +++ b/scarb/src/compiler/plugin/proc_macro/ffi.rs @@ -163,7 +163,7 @@ impl ProcMacroInstance { attr: TokenStream, token_stream: TokenStream, ) -> ProcMacroResult { - // This must be manually freed with call to from_owned_stable. + // This must be manually freed with call to `free_owned_stable`. let stable_token_stream = token_stream.into_stable(); let stable_attr = attr.into_stable(); // Allocate proc macro name. @@ -177,8 +177,8 @@ impl ProcMacroInstance { // Free the memory allocated by the `stable_token_stream`. // This will call `CString::from_raw` under the hood, to take ownership. unsafe { - TokenStream::from_owned_stable(stable_result.input); - TokenStream::from_owned_stable(stable_result.input_attr); + TokenStream::free_owned_stable(stable_result.input); + TokenStream::free_owned_stable(stable_result.input_attr); }; // Create Rust representation of the result. // Note, that the memory still needs to be freed on the allocator side! @@ -203,7 +203,7 @@ impl ProcMacroInstance { // Actual call to FFI interface for aux data callback. let context = (self.plugin.vtable.post_process_callback)(context); // Free the allocated memory. - let _ = unsafe { PostProcessContext::from_owned_stable(context) }; + unsafe { PostProcessContext::free_owned_stable(context) }; } pub fn doc(&self, item_name: SmolStr) -> Option { From 141e6edb1a4384b3a57bb172357c5a237be8a090 Mon Sep 17 00:00:00 2001 From: maciektr Date: Wed, 13 Nov 2024 14:37:35 +0100 Subject: [PATCH 03/13] Add cairo edition to TokenStreamMetadata (#1704) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit commit-id:83c7db1f --- **Stack**: - #1734 - #1722 - #1704 ⬅ ⚠️ *Part of a stack created by [spr](https://github.com/ejoffe/spr). Do not merge manually using the UI - doing so may have unexpected results.* --- plugins/cairo-lang-macro-stable/src/lib.rs | 1 + plugins/cairo-lang-macro/src/types/conversion.rs | 13 +++++++++++-- plugins/cairo-lang-macro/src/types/token.rs | 5 ++++- scarb/src/compiler/plugin/proc_macro/host.rs | 16 +++++++++++----- scarb/tests/build_cairo_plugin.rs | 3 +++ 5 files changed, 30 insertions(+), 8 deletions(-) diff --git a/plugins/cairo-lang-macro-stable/src/lib.rs b/plugins/cairo-lang-macro-stable/src/lib.rs index fc5542987..912a72847 100644 --- a/plugins/cairo-lang-macro-stable/src/lib.rs +++ b/plugins/cairo-lang-macro-stable/src/lib.rs @@ -54,6 +54,7 @@ pub struct StableTokenStream { pub struct StableTokenStreamMetadata { pub original_file_path: Option>, pub file_id: Option>, + pub edition: Option>, } /// Auxiliary data returned by the procedural macro. diff --git a/plugins/cairo-lang-macro/src/types/conversion.rs b/plugins/cairo-lang-macro/src/types/conversion.rs index 2db534751..924ee4c56 100644 --- a/plugins/cairo-lang-macro/src/types/conversion.rs +++ b/plugins/cairo-lang-macro/src/types/conversion.rs @@ -238,13 +238,17 @@ impl TokenStreamMetadata { pub fn into_stable(self) -> StableTokenStreamMetadata { let original_file_path = self .original_file_path - .and_then(|path| NonNull::new(CString::new(path).unwrap().into_raw())); + .and_then(|value| NonNull::new(CString::new(value).unwrap().into_raw())); let file_id = self .file_id - .and_then(|path| NonNull::new(CString::new(path).unwrap().into_raw())); + .and_then(|value| NonNull::new(CString::new(value).unwrap().into_raw())); + let edition = self + .edition + .and_then(|value| NonNull::new(CString::new(value).unwrap().into_raw())); StableTokenStreamMetadata { original_file_path, file_id, + edition, } } @@ -259,9 +263,11 @@ impl TokenStreamMetadata { .original_file_path .map(|raw| from_raw_cstr(raw.as_ptr())); let file_id = metadata.file_id.map(|raw| from_raw_cstr(raw.as_ptr())); + let edition = metadata.edition.map(|raw| from_raw_cstr(raw.as_ptr())); Self { original_file_path, file_id, + edition, } } @@ -279,6 +285,9 @@ impl TokenStreamMetadata { if let Some(raw) = metadata.file_id { free_raw_cstring(raw.as_ptr()); } + if let Some(raw) = metadata.edition { + free_raw_cstring(raw.as_ptr()); + } } } diff --git a/plugins/cairo-lang-macro/src/types/token.rs b/plugins/cairo-lang-macro/src/types/token.rs index 52b50068f..97f622fa1 100644 --- a/plugins/cairo-lang-macro/src/types/token.rs +++ b/plugins/cairo-lang-macro/src/types/token.rs @@ -53,6 +53,8 @@ pub struct TokenStreamMetadata { /// /// It is guaranteed, that the `file_id` will be unique for each file. pub file_id: Option, + /// Cairo edition defined for the token stream. + pub edition: Option, } impl TokenStream { @@ -102,10 +104,11 @@ impl Display for TokenStream { impl TokenStreamMetadata { #[doc(hidden)] - pub fn new(file_path: impl ToString, file_id: impl ToString) -> Self { + pub fn new(file_path: impl ToString, file_id: impl ToString, edition: impl ToString) -> Self { Self { original_file_path: Some(file_path.to_string()), file_id: Some(file_id.to_string()), + edition: Some(edition.to_string()), } } } diff --git a/scarb/src/compiler/plugin/proc_macro/host.rs b/scarb/src/compiler/plugin/proc_macro/host.rs index c19a363e3..4056191f9 100644 --- a/scarb/src/compiler/plugin/proc_macro/host.rs +++ b/scarb/src/compiler/plugin/proc_macro/host.rs @@ -2,7 +2,7 @@ use crate::compiler::plugin::proc_macro::compilation::SharedLibraryProvider; use crate::compiler::plugin::proc_macro::{ Expansion, ExpansionKind, ProcMacroInstance, TokenStreamBuilder, }; -use crate::core::{Config, Package, PackageId}; +use crate::core::{edition_variant, Config, Package, PackageId}; use anyhow::{ensure, Context, Result}; use cairo_lang_defs::ids::{ModuleItemId, TopLevelLanguageElementId}; use cairo_lang_defs::patcher::{PatchBuilder, RewriteNode}; @@ -12,6 +12,7 @@ use cairo_lang_defs::plugin::{ }; use cairo_lang_defs::plugin::{InlineMacroExprPlugin, InlinePluginResult, PluginDiagnostic}; use cairo_lang_diagnostics::ToOption; +use cairo_lang_filesystem::db::Edition; use cairo_lang_filesystem::ids::CodeMapping; use cairo_lang_macro::{ AuxData, Diagnostic, FullPathMarker, ProcMacroResult, Severity, TokenStream, @@ -845,11 +846,16 @@ impl ProcMacroHostPlugin { .or_insert(markers); } - fn calculate_metadata(db: &dyn SyntaxGroup, item_ast: ast::ModuleItem) -> TokenStreamMetadata { + fn calculate_metadata( + db: &dyn SyntaxGroup, + item_ast: ast::ModuleItem, + edition: Edition, + ) -> TokenStreamMetadata { let stable_ptr = item_ast.clone().stable_ptr().untyped(); let file_path = stable_ptr.file_id(db).full_path(db.upcast()); let file_id = short_hash(file_path.clone()); - TokenStreamMetadata::new(file_path, file_id) + let edition = edition_variant(edition); + TokenStreamMetadata::new(file_path, file_id, edition) } } @@ -939,9 +945,9 @@ impl MacroPlugin for ProcMacroHostPlugin { &self, db: &dyn SyntaxGroup, item_ast: ast::ModuleItem, - _metadata: &MacroPluginMetadata<'_>, + metadata: &MacroPluginMetadata<'_>, ) -> PluginResult { - let stream_metadata = Self::calculate_metadata(db, item_ast.clone()); + let stream_metadata = Self::calculate_metadata(db, item_ast.clone(), metadata.edition); // Handle inner functions. if let InnerAttrExpansionResult::Some(result) = self.expand_inner_attr(db, item_ast.clone()) diff --git a/scarb/tests/build_cairo_plugin.rs b/scarb/tests/build_cairo_plugin.rs index 344b1de2c..7ccbbcd21 100644 --- a/scarb/tests/build_cairo_plugin.rs +++ b/scarb/tests/build_cairo_plugin.rs @@ -560,6 +560,9 @@ fn can_read_token_stream_metadata() { file_id: Some( "[..]", ), + edition: Some( + "[..]", + ), } [..]Finished `dev` profile target(s) in [..] "#}); From 9444997923dd882262871a95ffb87147b2030fda Mon Sep 17 00:00:00 2001 From: maciektr Date: Fri, 15 Nov 2024 11:38:44 +0100 Subject: [PATCH 04/13] Allocate token's content into an arena (#1722) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit commit-id:5e4b122f Add token stream ser/de tests --- **Stack**: - #1749 - #1748 - #1745 - #1747 - #1722 ⬅ ⚠️ *Part of a stack created by [spr](https://github.com/ejoffe/spr). Do not merge manually using the UI - doing so may have unexpected results.* --- Cargo.lock | 3 +- Cargo.toml | 1 + plugins/cairo-lang-macro-stable/src/lib.rs | 4 +- plugins/cairo-lang-macro/Cargo.toml | 3 +- plugins/cairo-lang-macro/src/lib.rs | 64 ++-- .../cairo-lang-macro/src/types/conversion.rs | 55 ++-- plugins/cairo-lang-macro/src/types/mod.rs | 2 +- plugins/cairo-lang-macro/src/types/token.rs | 277 +++++++++++++++++- scarb/src/compiler/plugin/proc_macro/ffi.rs | 4 +- scarb/src/compiler/plugin/proc_macro/host.rs | 74 +++-- scarb/src/compiler/plugin/proc_macro/types.rs | 41 +-- scarb/tests/build_cairo_plugin.rs | 11 +- scarb/tests/package.rs | 1 + scarb/tests/proc_macro_server.rs | 6 +- 14 files changed, 426 insertions(+), 120 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ac161ba04..c09bcef81 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -748,7 +748,8 @@ dependencies = [ name = "cairo-lang-macro" version = "0.1.1" dependencies = [ - "cairo-lang-macro-attributes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bumpalo", + "cairo-lang-macro-attributes 0.1.0", "cairo-lang-macro-stable 1.0.0", "linkme", "serde", diff --git a/Cargo.toml b/Cargo.toml index 626a5d29c..be633fef8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -51,6 +51,7 @@ anyhow = "1" assert_fs = "1" async-trait = "0.1" axum = { version = "0.6", features = ["http2"] } +bumpalo = "3.16.0" cairo-lang-compiler = "*" cairo-lang-defs = "*" cairo-lang-diagnostics = "*" diff --git a/plugins/cairo-lang-macro-stable/src/lib.rs b/plugins/cairo-lang-macro-stable/src/lib.rs index 912a72847..c836ed850 100644 --- a/plugins/cairo-lang-macro-stable/src/lib.rs +++ b/plugins/cairo-lang-macro-stable/src/lib.rs @@ -9,7 +9,8 @@ pub mod ffi; #[derive(Debug)] pub struct StableToken { pub span: StableTextSpan, - pub content: *mut c_char, + pub ptr: *const u8, + pub len: usize, } #[repr(C)] @@ -44,6 +45,7 @@ pub type StableExpansionsList = StableSlice; pub struct StableTokenStream { pub tokens: StableSlice, pub metadata: StableTokenStreamMetadata, + pub size_hint: usize, } /// Token stream metadata. diff --git a/plugins/cairo-lang-macro/Cargo.toml b/plugins/cairo-lang-macro/Cargo.toml index 40fe2249c..17e6c393b 100644 --- a/plugins/cairo-lang-macro/Cargo.toml +++ b/plugins/cairo-lang-macro/Cargo.toml @@ -14,7 +14,8 @@ readme = "README.md" repository.workspace = true [dependencies] -cairo-lang-macro-attributes = "0.1" +bumpalo.workspace = true +cairo-lang-macro-attributes = { path = "../cairo-lang-macro-attributes" } cairo-lang-macro-stable = { path = "../cairo-lang-macro-stable" } linkme.workspace = true serde = { workspace = true, optional = true } diff --git a/plugins/cairo-lang-macro/src/lib.rs b/plugins/cairo-lang-macro/src/lib.rs index 0eb6bd16b..4b31e04ad 100644 --- a/plugins/cairo-lang-macro/src/lib.rs +++ b/plugins/cairo-lang-macro/src/lib.rs @@ -18,17 +18,22 @@ pub use cairo_lang_macro_attributes::*; #[doc(hidden)] pub use linkme; +use std::cell::RefCell; use cairo_lang_macro_stable::ffi::StableSlice; use cairo_lang_macro_stable::{ StableExpansionsList, StablePostProcessContext, StableProcMacroResult, }; use std::ffi::{c_char, CStr, CString}; +use std::ops::Deref; mod types; pub use types::*; +// A thread-local allocation context for allocating tokens on proc macro side. +thread_local!(static CONTEXT: RefCell = RefCell::default() ); + #[doc(hidden)] #[derive(Clone)] pub struct ExpansionDefinition { @@ -97,29 +102,42 @@ pub unsafe extern "C" fn expand( stable_attr: cairo_lang_macro_stable::StableTokenStream, stable_token_stream: cairo_lang_macro_stable::StableTokenStream, ) -> cairo_lang_macro_stable::StableResultWrapper { - let token_stream = TokenStream::from_stable(&stable_token_stream); - let attr_token_stream = TokenStream::from_stable(&stable_attr); - let item_name = CStr::from_ptr(item_name).to_string_lossy().to_string(); - let fun = MACRO_DEFINITIONS_SLICE - .iter() - .find_map(|m| { - if m.name == item_name.as_str() { - Some(m.fun.clone()) - } else { - None - } - }) - .expect("procedural macro not found"); - let result = match fun { - ExpansionFunc::Attr(fun) => fun(attr_token_stream, token_stream), - ExpansionFunc::Other(fun) => fun(token_stream), - }; - let result: StableProcMacroResult = result.into_stable(); - cairo_lang_macro_stable::StableResultWrapper { - input: stable_token_stream, - input_attr: stable_attr, - output: result, - } + CONTEXT.with(|ctx_cell| { + // Read size hint from stable token stream. This will be used to create a sufficiently + // large bump allocation buffer. + let size_hint: usize = stable_token_stream.size_hint + stable_attr.size_hint; + // Replace the allocation context with a new one. + // If there is no interned string guards, the old context will be de-allocated. + ctx_cell.replace(AllocationContext::with_capacity(size_hint)); + let ctx_borrow = ctx_cell.borrow(); + let ctx: &AllocationContext = ctx_borrow.deref(); + // Copy the stable token stream into current context. + let token_stream = TokenStream::from_stable_in(&stable_token_stream, ctx); + let attr_token_stream = TokenStream::from_stable_in(&stable_attr, ctx); + let item_name = CStr::from_ptr(item_name) + .to_str() + .expect("item name must be a valid string"); + let fun = MACRO_DEFINITIONS_SLICE + .iter() + .find_map(|m| { + if m.name == item_name { + Some(m.fun.clone()) + } else { + None + } + }) + .expect("procedural macro not found"); + let result = match fun { + ExpansionFunc::Attr(fun) => fun(attr_token_stream, token_stream), + ExpansionFunc::Other(fun) => fun(token_stream), + }; + let result: StableProcMacroResult = result.into_stable(); + cairo_lang_macro_stable::StableResultWrapper { + input: stable_token_stream, + input_attr: stable_attr, + output: result, + } + }) } /// Free the memory allocated for the [`StableProcMacroResult`]. diff --git a/plugins/cairo-lang-macro/src/types/conversion.rs b/plugins/cairo-lang-macro/src/types/conversion.rs index 924ee4c56..2a9e6338d 100644 --- a/plugins/cairo-lang-macro/src/types/conversion.rs +++ b/plugins/cairo-lang-macro/src/types/conversion.rs @@ -1,6 +1,7 @@ use crate::{ - AuxData, Diagnostic, ExpansionDefinition, FullPathMarker, PostProcessContext, ProcMacroResult, - Severity, TextSpan, Token, TokenStream, TokenStreamMetadata, TokenTree, + AllocationContext, AuxData, Diagnostic, ExpansionDefinition, FullPathMarker, + PostProcessContext, ProcMacroResult, Severity, TextSpan, Token, TokenStream, + TokenStreamMetadata, TokenTree, }; use cairo_lang_macro_stable::ffi::StableSlice; use cairo_lang_macro_stable::{ @@ -30,7 +31,7 @@ impl ProcMacroResult { .map(|m| CString::new(m).unwrap().into_raw()) .collect::>(); StableProcMacroResult { - token_stream: self.token_stream.into_stable(), + token_stream: self.token_stream.as_stable(), aux_data: AuxData::maybe_into_stable(self.aux_data), diagnostics: StableSlice::new(diagnostics), full_path_markers: StableSlice::new(full_path_markers), @@ -44,6 +45,7 @@ impl ProcMacroResult { /// # Safety #[doc(hidden)] pub unsafe fn from_stable(result: &StableProcMacroResult) -> Self { + let ctx = AllocationContext::with_capacity(result.token_stream.size_hint); let (ptr, n) = result.diagnostics.raw_parts(); let diagnostics = slice::from_raw_parts(ptr, n) .iter() @@ -55,7 +57,7 @@ impl ProcMacroResult { .map(|m| from_raw_cstr(*m)) .collect::>(); ProcMacroResult { - token_stream: TokenStream::from_stable(&result.token_stream), + token_stream: TokenStream::from_stable_in(&result.token_stream, &ctx), diagnostics, full_path_markers, aux_data: AuxData::from_stable(&result.aux_data), @@ -109,11 +111,13 @@ impl TextSpan { impl Token { /// Convert to FFI-safe representation. #[doc(hidden)] - pub fn into_stable(self) -> StableToken { - let cstr = CString::new(self.content.as_bytes()).unwrap(); + pub fn as_stable(&self) -> StableToken { + let ptr = self.content.as_ptr(); + let len = self.content.len(); StableToken { - span: self.span.into_stable(), - content: cstr.into_raw(), + span: self.span.clone().into_stable(), + ptr, + len, } } @@ -123,9 +127,11 @@ impl Token { /// /// # Safety #[doc(hidden)] - pub unsafe fn from_stable(token: &StableToken) -> Self { + pub unsafe fn from_stable_in(token: &StableToken, ctx: &AllocationContext) -> Self { + let content = slice::from_raw_parts(token.ptr, token.len); + let content = ctx.intern(std::str::from_utf8(content).unwrap()); Self { - content: from_raw_cstr(token.content), + content, span: TextSpan::from_stable(&token.span), } } @@ -138,7 +144,6 @@ impl Token { /// # Safety #[doc(hidden)] pub unsafe fn free_owned_stable(token: StableToken) { - free_raw_cstring(token.content); TextSpan::free_owned_stable(token.span); } } @@ -146,9 +151,9 @@ impl Token { impl TokenTree { /// Convert to FFI-safe representation. #[doc(hidden)] - pub fn into_stable(self) -> StableTokenTree { + pub fn as_stable(&self) -> StableTokenTree { match self { - Self::Ident(token) => StableTokenTree::Ident(token.into_stable()), + Self::Ident(token) => StableTokenTree::Ident(token.as_stable()), } } @@ -158,9 +163,9 @@ impl TokenTree { /// /// # Safety #[doc(hidden)] - pub unsafe fn from_stable(token_tree: &StableTokenTree) -> Self { + pub unsafe fn from_stable_in(token_tree: &StableTokenTree, ctx: &AllocationContext) -> Self { match token_tree { - StableTokenTree::Ident(token) => Self::Ident(Token::from_stable(token)), + StableTokenTree::Ident(token) => Self::Ident(Token::from_stable_in(token, ctx)), } } @@ -185,15 +190,20 @@ impl TokenStream { /// /// # Safety #[doc(hidden)] - pub fn into_stable(self) -> StableTokenStream { + pub fn as_stable(&self) -> StableTokenStream { + let mut size_hint: usize = 0; let tokens = self .tokens - .into_iter() - .map(|token| token.into_stable()) + .iter() + .map(|token| { + size_hint += token.size_hint(); + token.as_stable() + }) .collect::>(); StableTokenStream { tokens: StableSlice::new(tokens), - metadata: self.metadata.into_stable(), + metadata: self.metadata.clone().into_stable(), + size_hint, } } @@ -203,11 +213,14 @@ impl TokenStream { /// /// # Safety #[doc(hidden)] - pub unsafe fn from_stable(token_stream: &StableTokenStream) -> Self { + pub unsafe fn from_stable_in( + token_stream: &StableTokenStream, + ctx: &AllocationContext, + ) -> Self { let (ptr, n) = token_stream.tokens.raw_parts(); let tokens = slice::from_raw_parts(ptr, n) .iter() - .map(|token_tree| TokenTree::from_stable(token_tree)) + .map(|token_tree| TokenTree::from_stable_in(token_tree, ctx)) .collect::>(); Self { tokens, diff --git a/plugins/cairo-lang-macro/src/types/mod.rs b/plugins/cairo-lang-macro/src/types/mod.rs index 618d61746..45ab5fca2 100644 --- a/plugins/cairo-lang-macro/src/types/mod.rs +++ b/plugins/cairo-lang-macro/src/types/mod.rs @@ -44,7 +44,7 @@ pub struct ProcMacroResult { /// let token_stream = TokenStream::new(vec![ /// TokenTree::Ident( /// Token::new( -/// code.clone(), +/// &code, /// TextSpan::new(0, code.len()) /// ) /// ) diff --git a/plugins/cairo-lang-macro/src/types/token.rs b/plugins/cairo-lang-macro/src/types/token.rs index 97f622fa1..95dab2cb5 100644 --- a/plugins/cairo-lang-macro/src/types/token.rs +++ b/plugins/cairo-lang-macro/src/types/token.rs @@ -1,17 +1,85 @@ +use crate::CONTEXT; +use bumpalo::Bump; use std::fmt::Display; +use std::hash::{Hash, Hasher}; +use std::ops::Deref; +use std::rc::Rc; /// An abstract stream of Cairo tokens. /// /// This is both input and part of an output of a procedural macro. #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "serde", serde(try_from = "deserializer::TokenStream"))] #[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] pub struct TokenStream { pub tokens: Vec, pub metadata: TokenStreamMetadata, } +/// This module implements deserialization of the token stream, for the serde feature. +/// This is intermediate representation is needed, as real [`Token`] only contains a reference to the +/// represented string, which needs to be allocated outside the [`Token`] struct. +/// Here we allocate each token to an owned String with SerDe and then copy it's content into context. +#[cfg(feature = "serde")] +#[doc(hidden)] +mod deserializer { + use crate::{AllocationContext, TextSpan, TokenStreamMetadata}; + use std::fmt::{Display, Formatter}; + + #[derive(serde::Serialize, serde::Deserialize)] + pub struct TokenStream { + pub tokens: Vec, + pub metadata: TokenStreamMetadata, + } + + #[derive(serde::Serialize, serde::Deserialize)] + pub enum TokenTree { + Ident(Token), + } + + #[derive(serde::Serialize, serde::Deserialize)] + pub struct Token { + pub content: String, + pub span: TextSpan, + } + + pub struct Error {} + + impl Display for Error { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.write_str("TokenStream deserialization error") + } + } + + impl TryFrom for crate::TokenStream { + type Error = Error; + + fn try_from(value: TokenStream) -> Result { + let ctx = AllocationContext::default(); + let tokens = value + .tokens + .into_iter() + .map(|token| match token { + TokenTree::Ident(token) => { + let content = ctx.intern(token.content.as_str()); + let token = crate::Token { + content, + span: token.span, + }; + crate::TokenTree::Ident(token) + } + }) + .collect::>(); + Ok(Self { + tokens, + metadata: value.metadata, + }) + } + } +} + /// A single token or a delimited sequence of token trees. -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "serde", derive(serde::Serialize))] #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum TokenTree { Ident(Token), @@ -23,6 +91,16 @@ impl Default for TokenTree { } } +impl TokenTree { + /// Get the size hint for the [`TokenTree`]. + /// This can be used to estimate size of a buffer needed for allocating this [`TokenTree`]. + pub(crate) fn size_hint(&self) -> usize { + match self { + Self::Ident(token) => token.size_hint(), + } + } +} + /// A range of text offsets that form a span (like text selection). #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] @@ -34,13 +112,130 @@ pub struct TextSpan { /// A single Cairo token. /// /// The most atomic item of Cairo code representation. -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "serde", derive(serde::Serialize))] #[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] pub struct Token { - pub content: String, + pub content: InternedStr, pub span: TextSpan, } +impl Token { + /// Get the size hint for the [`Token`]. + /// This can be used to estimate size of a buffer needed for allocating this [`Token`]. + pub(crate) fn size_hint(&self) -> usize { + self.content.deref().len() + } +} + +/// A wrapper over a string pointer. +/// This contains a pointer to a string allocated in a bump allocator +/// and a guard which keeps the buffer alive. +/// This way we do not need to allocate a new string, +/// but also do not need to worry about the lifetime of the string. +#[derive(Debug, Clone)] +pub struct InternedStr { + ptr: *const str, + // Holding a rc to the underlying buffer, so that ptr will always point to valid memory. + _bump: Rc, +} + +impl InternedStr { + #[allow(unknown_lints)] + #[allow(private_interfaces)] + #[doc(hidden)] + pub(crate) fn new_in(s: &str, bump: Rc) -> Self { + let allocated = bump.0.alloc_str(s); + let ptr = allocated as *const str; + Self { ptr, _bump: bump } + } +} + +impl Default for InternedStr { + fn default() -> Self { + Self { + ptr: "" as *const str, + _bump: Rc::default(), + } + } +} + +impl AsRef for InternedStr { + fn as_ref(&self) -> &str { + self.deref() + } +} + +impl Deref for InternedStr { + type Target = str; + + fn deref(&self) -> &Self::Target { + unsafe { &*self.ptr } + } +} + +#[cfg(feature = "serde")] +impl serde::Serialize for InternedStr { + fn serialize(&self, s: S) -> Result { + s.serialize_str(self.as_ref()) + } +} + +impl PartialEq for InternedStr { + fn eq(&self, other: &Self) -> bool { + self.as_ref().eq(other.as_ref()) + } +} + +impl Eq for InternedStr {} + +impl Hash for InternedStr { + fn hash(&self, state: &mut H) { + self.as_ref().hash(state); + } +} + +/// This wrapper de-allocates the underlying buffer on drop. +#[derive(Debug, Default)] +pub(crate) struct BumpWrap(pub Bump); + +impl Drop for BumpWrap { + fn drop(&mut self) { + self.0.reset(); + } +} + +/// A context for allocating Cairo tokens. +/// This wrapper contains a bump allocator, which is used to allocate strings for tokens. +#[derive(Clone)] +pub struct AllocationContext { + bump: Rc, +} + +impl AllocationContext { + /// Allocate a new context with pre-determined buffer size. + pub fn with_capacity(size_hint: usize) -> Self { + Self { + bump: Rc::new(BumpWrap(Bump::with_capacity(size_hint))), + } + } + + /// Allocate a string in the context. + /// This returned a string pointer, guarded by reference counter to the buffer. + /// The buffer will be deallocated when the last reference to the buffer is dropped. + /// No special handling or lifetimes are needed for the string. + pub(crate) fn intern(&self, value: &str) -> InternedStr { + InternedStr::new_in(value, self.bump.clone()) + } +} + +impl Default for AllocationContext { + fn default() -> Self { + Self { + bump: Rc::new(BumpWrap(Bump::new())), + } + } +} + /// Metadata of [`TokenStream`]. /// /// This struct describes the origin of the [`TokenStream`]. @@ -84,8 +279,9 @@ impl TokenStream { &self.metadata } + /// Check if the [`TokenStream`] is empty. pub fn is_empty(&self) -> bool { - self.to_string().is_empty() + self.tokens.is_empty() } } @@ -94,7 +290,7 @@ impl Display for TokenStream { for token in &self.tokens { match token { TokenTree::Ident(token) => { - write!(f, "{}", token.content.clone())?; + write!(f, "{}", token.content.as_ref())?; } } } @@ -114,19 +310,88 @@ impl TokenStreamMetadata { } impl TokenTree { + /// Create a new [`TokenTree`] from an identifier [`Token`]. pub fn from_ident(token: Token) -> Self { Self::Ident(token) } } impl TextSpan { + /// Create a new [`TextSpan`]. pub fn new(start: usize, end: usize) -> TextSpan { TextSpan { start, end } } } impl Token { - pub fn new(content: String, span: TextSpan) -> Self { + /// Create [`Token`] in thread-local context. + pub fn new(content: impl AsRef, span: TextSpan) -> Self { + CONTEXT.with(|ctx| { + let ctx_borrow = ctx.borrow(); + let ctx: &AllocationContext = ctx_borrow.deref(); + Self::new_in(content, span, ctx) + }) + } + + /// Create [`Token`] in specified context. + pub fn new_in(content: impl AsRef, span: TextSpan, ctx: &AllocationContext) -> Self { + let content = ctx.intern(content.as_ref()); Self { content, span } } } + +#[cfg(test)] +mod test { + use crate::{AllocationContext, TextSpan, Token, TokenStream, TokenTree}; + + #[test] + pub fn can_serde_empty_token_stream() { + let original = TokenStream::empty(); + let serialized = serde_json::to_string(&original).unwrap(); + let derived: TokenStream = serde_json::from_str(serialized.as_str()).unwrap(); + assert_eq!(original, derived); + let val: serde_json::Value = serde_json::from_str(serialized.as_str()).unwrap(); + assert_eq!( + val, + serde_json::json!({ + "tokens": [], + "metadata": { + "original_file_path": null, + "file_id": null, + "edition": null + } + }) + ); + } + + #[test] + pub fn can_serde_token_stream() { + let ctx = AllocationContext::default(); + let original = TokenStream::new(vec![ + TokenTree::Ident(Token::new_in("first", TextSpan::new(0, 1), &ctx)), + TokenTree::Ident(Token::new_in("second", TextSpan::new(2, 3), &ctx)), + TokenTree::Ident(Token::new_in("third", TextSpan::new(4, 5), &ctx)), + TokenTree::Ident(Token::new_in("fourth", TextSpan::new(6, 7), &ctx)), + ]); + let serialized = serde_json::to_string(&original).unwrap(); + let derived: TokenStream = serde_json::from_str(serialized.as_str()).unwrap(); + assert_eq!(original, derived); + let val: serde_json::Value = serde_json::from_str(serialized.as_str()).unwrap(); + assert_eq!( + val, + serde_json::json!({ + "tokens": [ + {"Ident": {"content": "first", "span": {"start": 0, "end": 1}}}, + {"Ident": {"content": "second", "span": {"start": 2, "end": 3}}}, + {"Ident": {"content": "third", "span": {"start": 4, "end": 5}}}, + {"Ident": {"content": "fourth", "span": {"start": 6, "end": 7}}}, + ], + "metadata": { + "original_file_path": null, + "file_id": null, + "edition": null + } + }) + ); + } +} diff --git a/scarb/src/compiler/plugin/proc_macro/ffi.rs b/scarb/src/compiler/plugin/proc_macro/ffi.rs index fa93a6b57..b77e56861 100644 --- a/scarb/src/compiler/plugin/proc_macro/ffi.rs +++ b/scarb/src/compiler/plugin/proc_macro/ffi.rs @@ -164,8 +164,8 @@ impl ProcMacroInstance { token_stream: TokenStream, ) -> ProcMacroResult { // This must be manually freed with call to `free_owned_stable`. - let stable_token_stream = token_stream.into_stable(); - let stable_attr = attr.into_stable(); + let stable_token_stream = token_stream.as_stable(); + let stable_attr = attr.as_stable(); // Allocate proc macro name. let item_name = CString::new(item_name.to_string()).unwrap().into_raw(); // Call FFI interface for code expansion. diff --git a/scarb/src/compiler/plugin/proc_macro/host.rs b/scarb/src/compiler/plugin/proc_macro/host.rs index 4056191f9..76b01e365 100644 --- a/scarb/src/compiler/plugin/proc_macro/host.rs +++ b/scarb/src/compiler/plugin/proc_macro/host.rs @@ -15,8 +15,8 @@ use cairo_lang_diagnostics::ToOption; use cairo_lang_filesystem::db::Edition; use cairo_lang_filesystem::ids::CodeMapping; use cairo_lang_macro::{ - AuxData, Diagnostic, FullPathMarker, ProcMacroResult, Severity, TokenStream, - TokenStreamMetadata, + AllocationContext, AuxData, Diagnostic, FullPathMarker, ProcMacroResult, Severity, TokenStream, + TokenStreamMetadata, TokenTree, }; use cairo_lang_semantic::db::SemanticGroup; use cairo_lang_semantic::items::attribute::SemanticQueryAttrs; @@ -167,6 +167,7 @@ impl ProcMacroHostPlugin { let mut item_builder = PatchBuilder::new(db, &item_ast); let mut used_attr_names: HashSet = Default::default(); let mut all_none = true; + let ctx = AllocationContext::default(); match item_ast.clone() { ast::ModuleItem::Trait(trait_ast) => { @@ -194,13 +195,14 @@ impl ProcMacroHostPlugin { let mut token_stream_builder = TokenStreamBuilder::new(db); let attrs = func.attributes(db).elements(db); - let found = self.parse_attrs(db, &mut token_stream_builder, attrs); + let found = + self.parse_attrs(db, &mut token_stream_builder, attrs, &ctx); if let Some(name) = found.as_name() { used_attr_names.insert(name); } token_stream_builder.add_node(func.declaration(db).as_syntax_node()); token_stream_builder.add_node(func.body(db).as_syntax_node()); - let token_stream = token_stream_builder.build(); + let token_stream = token_stream_builder.build(&ctx); all_none = all_none && self.do_expand_inner_attr( @@ -255,14 +257,15 @@ impl ProcMacroHostPlugin { let mut token_stream_builder = TokenStreamBuilder::new(db); let attrs = func.attributes(db).elements(db); - let found = self.parse_attrs(db, &mut token_stream_builder, attrs); + let found = + self.parse_attrs(db, &mut token_stream_builder, attrs, &ctx); if let Some(name) = found.as_name() { used_attr_names.insert(name); } token_stream_builder.add_node(func.visibility(db).as_syntax_node()); token_stream_builder.add_node(func.declaration(db).as_syntax_node()); token_stream_builder.add_node(func.body(db).as_syntax_node()); - let token_stream = token_stream_builder.build(); + let token_stream = token_stream_builder.build(&ctx); all_none = all_none && self.do_expand_inner_attr( db, @@ -328,7 +331,7 @@ impl ProcMacroHostPlugin { let result = self.instance(input.package_id).generate_code( input.expansion.name.clone(), - args.clone(), + args, token_stream.clone(), ); @@ -348,12 +351,13 @@ impl ProcMacroHostPlugin { &self, db: &dyn SyntaxGroup, item_ast: ast::ModuleItem, + ctx: &AllocationContext, ) -> (AttrExpansionFound, TokenStream) { let mut token_stream_builder = TokenStreamBuilder::new(db); let input = match item_ast.clone() { ast::ModuleItem::Trait(trait_ast) => { let attrs = trait_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); token_stream_builder.add_node(trait_ast.visibility(db).as_syntax_node()); token_stream_builder.add_node(trait_ast.trait_kw(db).as_syntax_node()); token_stream_builder.add_node(trait_ast.name(db).as_syntax_node()); @@ -363,7 +367,7 @@ impl ProcMacroHostPlugin { } ast::ModuleItem::Impl(impl_ast) => { let attrs = impl_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); token_stream_builder.add_node(impl_ast.visibility(db).as_syntax_node()); token_stream_builder.add_node(impl_ast.impl_kw(db).as_syntax_node()); token_stream_builder.add_node(impl_ast.name(db).as_syntax_node()); @@ -375,7 +379,7 @@ impl ProcMacroHostPlugin { } ast::ModuleItem::Module(module_ast) => { let attrs = module_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); token_stream_builder.add_node(module_ast.visibility(db).as_syntax_node()); token_stream_builder.add_node(module_ast.module_kw(db).as_syntax_node()); token_stream_builder.add_node(module_ast.name(db).as_syntax_node()); @@ -384,7 +388,7 @@ impl ProcMacroHostPlugin { } ast::ModuleItem::FreeFunction(free_func_ast) => { let attrs = free_func_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); token_stream_builder.add_node(free_func_ast.visibility(db).as_syntax_node()); token_stream_builder.add_node(free_func_ast.declaration(db).as_syntax_node()); token_stream_builder.add_node(free_func_ast.body(db).as_syntax_node()); @@ -392,7 +396,7 @@ impl ProcMacroHostPlugin { } ast::ModuleItem::ExternFunction(extern_func_ast) => { let attrs = extern_func_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); token_stream_builder.add_node(extern_func_ast.visibility(db).as_syntax_node()); token_stream_builder.add_node(extern_func_ast.extern_kw(db).as_syntax_node()); token_stream_builder.add_node(extern_func_ast.declaration(db).as_syntax_node()); @@ -401,7 +405,7 @@ impl ProcMacroHostPlugin { } ast::ModuleItem::ExternType(extern_type_ast) => { let attrs = extern_type_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); token_stream_builder.add_node(extern_type_ast.visibility(db).as_syntax_node()); token_stream_builder.add_node(extern_type_ast.extern_kw(db).as_syntax_node()); token_stream_builder.add_node(extern_type_ast.type_kw(db).as_syntax_node()); @@ -412,7 +416,7 @@ impl ProcMacroHostPlugin { } ast::ModuleItem::Struct(struct_ast) => { let attrs = struct_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); token_stream_builder.add_node(struct_ast.visibility(db).as_syntax_node()); token_stream_builder.add_node(struct_ast.struct_kw(db).as_syntax_node()); token_stream_builder.add_node(struct_ast.name(db).as_syntax_node()); @@ -424,7 +428,7 @@ impl ProcMacroHostPlugin { } ast::ModuleItem::Enum(enum_ast) => { let attrs = enum_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); token_stream_builder.add_node(enum_ast.visibility(db).as_syntax_node()); token_stream_builder.add_node(enum_ast.enum_kw(db).as_syntax_node()); token_stream_builder.add_node(enum_ast.name(db).as_syntax_node()); @@ -436,7 +440,7 @@ impl ProcMacroHostPlugin { } _ => AttrExpansionFound::None, }; - let token_stream = token_stream_builder.build(); + let token_stream = token_stream_builder.build(ctx); (input, token_stream) } @@ -445,6 +449,7 @@ impl ProcMacroHostPlugin { db: &dyn SyntaxGroup, builder: &mut TokenStreamBuilder<'_>, attrs: Vec, + ctx: &AllocationContext, ) -> AttrExpansionFound { // This function parses attributes of the item, // checking if those attributes correspond to a procedural macro that should be fired. @@ -469,7 +474,7 @@ impl ProcMacroHostPlugin { if expansion.is_none() { let mut args_builder = TokenStreamBuilder::new(db); args_builder.add_node(attr.arguments(db).as_syntax_node()); - let args = args_builder.build(); + let args = args_builder.build(ctx); expansion = Some((found, args, attr.stable_ptr().untyped())); // Do not add the attribute for found expansion. continue; @@ -543,7 +548,6 @@ impl ProcMacroHostPlugin { let mut token_stream_builder = TokenStreamBuilder::new(db); token_stream_builder.add_node(item_ast.as_syntax_node()); token_stream_builder.with_metadata(stream_metadata.clone()); - let token_stream = token_stream_builder.build(); let mut aux_data = EmittedAuxData::default(); let mut all_diagnostics: Vec = Vec::new(); @@ -551,12 +555,14 @@ impl ProcMacroHostPlugin { let derives = self.parse_derive(db, item_ast.clone()); let any_derives = !derives.is_empty(); + let ctx = AllocationContext::default(); let mut derived_code = PatchBuilder::new(db, &item_ast); for derive in derives.iter() { + let token_stream = token_stream_builder.build(&ctx); let result = self.instance(derive.package_id).generate_code( derive.expansion.name.clone(), TokenStream::empty(), - token_stream.clone(), + token_stream, ); // Register diagnostics. @@ -576,7 +582,13 @@ impl ProcMacroHostPlugin { continue; } - derived_code.add_str(result.token_stream.to_string().as_str()); + for token in result.token_stream.tokens { + match token { + TokenTree::Ident(token) => { + derived_code.add_str(token.content.as_ref()); + } + } + } } if any_derives { @@ -625,10 +637,11 @@ impl ProcMacroHostPlugin { token_stream: TokenStream, stable_ptr: SyntaxStablePtrId, ) -> PluginResult { + let original = token_stream.to_string(); let result = self.instance(input.package_id).generate_code( input.expansion.name.clone(), - args.clone(), - token_stream.clone(), + args, + token_stream, ); // Handle token stream. @@ -654,10 +667,7 @@ impl ProcMacroHostPlugin { // In essence, `code: None, remove_original_item: false` means `ProcMacroHost` will not be // called again for this AST item. // This optimization limits the number of generated nodes a bit. - if last - && result.aux_data.is_none() - && token_stream.to_string() == result.token_stream.to_string() - { + if last && result.aux_data.is_none() && original == result.token_stream.to_string() { return PluginResult { code: None, remove_original_item: false, @@ -884,8 +894,8 @@ impl<'a> InnerAttrExpansionContext<'a> { result: ProcMacroResult, stable_ptr: SyntaxStablePtrId, ) -> String { - let expanded = result.token_stream.to_string(); - let changed = expanded.as_str() != original; + let result_str = result.token_stream.to_string(); + let changed = result_str != original; if changed { self.host @@ -902,7 +912,7 @@ impl<'a> InnerAttrExpansionContext<'a> { self.any_changed = self.any_changed || changed; - expanded + result_str } pub fn into_result( self, @@ -958,7 +968,8 @@ impl MacroPlugin for ProcMacroHostPlugin { // Expand first attribute. // Note that we only expand the first attribute, as we assume that the rest of the attributes // will be handled by a subsequent call to this function. - let (input, body) = self.parse_attribute(db, item_ast.clone()); + let ctx = AllocationContext::default(); + let (input, body) = self.parse_attribute(db, item_ast.clone(), &ctx); if let Some(result) = match input { AttrExpansionFound::Last { @@ -1078,11 +1089,12 @@ impl InlineMacroExprPlugin for ProcMacroInlinePlugin { syntax: &ast::ExprInlineMacro, _metadata: &MacroPluginMetadata<'_>, ) -> InlinePluginResult { + let ctx = AllocationContext::default(); let stable_ptr = syntax.clone().stable_ptr().untyped(); let arguments = syntax.arguments(db); let mut token_stream_builder = TokenStreamBuilder::new(db); token_stream_builder.add_node(arguments.as_syntax_node()); - let token_stream = token_stream_builder.build(); + let token_stream = token_stream_builder.build(&ctx); let result = self.instance().generate_code( self.expansion.name.clone(), TokenStream::empty(), diff --git a/scarb/src/compiler/plugin/proc_macro/types.rs b/scarb/src/compiler/plugin/proc_macro/types.rs index dc8f5439b..30ad3e2db 100644 --- a/scarb/src/compiler/plugin/proc_macro/types.rs +++ b/scarb/src/compiler/plugin/proc_macro/types.rs @@ -1,4 +1,6 @@ -use cairo_lang_macro::{TextSpan, Token, TokenStream, TokenStreamMetadata, TokenTree}; +use cairo_lang_macro::{ + AllocationContext, TextSpan, Token, TokenStream, TokenStreamMetadata, TokenTree, +}; use cairo_lang_syntax::node::{db::SyntaxGroup, SyntaxNode}; /// Helps creating TokenStream based on multiple SyntaxNodes, @@ -26,29 +28,28 @@ impl<'a> TokenStreamBuilder<'a> { self.metadata = Some(metadata); } - pub fn build(self) -> TokenStream { - let mut result: Vec = Vec::default(); - for node in self.nodes.iter() { - let leaves = node.tokens(self.db); - let tokens = - leaves.map(|node| TokenTree::Ident(self.token_from_syntax_node(node.clone()))); - result.extend(tokens); - } + pub fn build(&self, ctx: &AllocationContext) -> TokenStream { + let result: Vec = self + .nodes + .iter() + .flat_map(|node| { + let leaves = node.tokens(self.db); + leaves.map(|node| TokenTree::Ident(self.token_from_syntax_node(node.clone(), ctx))) + }) + .collect(); - match self.metadata { - Some(metadata) => TokenStream::new(result.clone()).with_metadata(metadata.clone()), - None => TokenStream::new(result.clone()), + match self.metadata.as_ref() { + Some(metadata) => TokenStream::new(result).with_metadata(metadata.clone()), + None => TokenStream::new(result), } } - pub fn token_from_syntax_node(&self, node: SyntaxNode) -> Token { + pub fn token_from_syntax_node(&self, node: SyntaxNode, ctx: &AllocationContext) -> Token { let span = node.span(self.db).to_str_range(); - Token::new( - node.get_text(self.db), - TextSpan { - start: span.start, - end: span.end, - }, - ) + let span = TextSpan { + start: span.start, + end: span.end, + }; + Token::new_in(node.get_text(self.db), span, ctx) } } diff --git a/scarb/tests/build_cairo_plugin.rs b/scarb/tests/build_cairo_plugin.rs index 7ccbbcd21..f2b010ad9 100644 --- a/scarb/tests/build_cairo_plugin.rs +++ b/scarb/tests/build_cairo_plugin.rs @@ -767,16 +767,7 @@ fn cannot_duplicate_macros_across_packages() { fn cannot_use_undefined_macro() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); - CairoPluginProjectBuilder::default() - .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; - - #[attribute_macro] - pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(token_stream) - } - "##}) - .build(&t); + CairoPluginProjectBuilder::default().build(&t); let project = temp.child("hello"); ProjectBuilder::start() .name("hello") diff --git a/scarb/tests/package.rs b/scarb/tests/package.rs index 2ef36f8a7..eaa42608e 100644 --- a/scarb/tests/package.rs +++ b/scarb/tests/package.rs @@ -492,6 +492,7 @@ fn workspace() { #[test] fn cairo_plugin() { let t = TempDir::new().unwrap(); + // Note this will be packaged with `cairo-lang-macro` from crates, not the local one. CairoPluginProjectBuilder::default().build(&t); Scarb::quick_snapbox() diff --git a/scarb/tests/proc_macro_server.rs b/scarb/tests/proc_macro_server.rs index c2a29d2fc..8950a5b12 100644 --- a/scarb/tests/proc_macro_server.rs +++ b/scarb/tests/proc_macro_server.rs @@ -90,7 +90,7 @@ fn expand_attribute() { attr: "rename_to_very_new_name".to_string(), args: TokenStream::empty(), item: TokenStream::new(vec![TokenTree::Ident(Token::new( - "fn some_test_fn(){}".to_string(), + "fn some_test_fn(){}", TextSpan::default(), ))]), }) @@ -124,7 +124,7 @@ fn expand_derive() { let mut proc_macro_server = ProcMacroClient::new(&project); let item = TokenStream::new(vec![TokenTree::Ident(Token::new( - "fn some_test_fn(){}".to_string(), + "fn some_test_fn(){}", TextSpan::default(), ))]); @@ -181,7 +181,7 @@ fn expand_inline() { .request_and_wait::(ExpandInlineMacroParams { name: "replace_all_15_with_25".to_string(), args: TokenStream::new(vec![TokenTree::Ident(Token::new( - "struct A { field: 15 , other_field: macro_call!(12)}".to_string(), + "struct A { field: 15 , other_field: macro_call!(12)}", TextSpan::default(), ))]), }) From 6814904d03633e114faaa201671a368767369114 Mon Sep 17 00:00:00 2001 From: maciektr Date: Thu, 21 Nov 2024 17:53:31 +0100 Subject: [PATCH 05/13] Implement Debug for InternedStr (#1760) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit commit-id:919c7f28 --- **Stack**: - #1749 - #1748 - #1745 - #1747 - #1760 ⬅ ⚠️ *Part of a stack created by [spr](https://github.com/ejoffe/spr). Do not merge manually using the UI - doing so may have unexpected results.* --- plugins/cairo-lang-macro/src/types/token.rs | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/plugins/cairo-lang-macro/src/types/token.rs b/plugins/cairo-lang-macro/src/types/token.rs index 95dab2cb5..6d1bf5adc 100644 --- a/plugins/cairo-lang-macro/src/types/token.rs +++ b/plugins/cairo-lang-macro/src/types/token.rs @@ -1,6 +1,6 @@ use crate::CONTEXT; use bumpalo::Bump; -use std::fmt::Display; +use std::fmt::{Debug, Display, Write}; use std::hash::{Hash, Hasher}; use std::ops::Deref; use std::rc::Rc; @@ -132,13 +132,21 @@ impl Token { /// and a guard which keeps the buffer alive. /// This way we do not need to allocate a new string, /// but also do not need to worry about the lifetime of the string. -#[derive(Debug, Clone)] +#[derive(Clone)] pub struct InternedStr { ptr: *const str, // Holding a rc to the underlying buffer, so that ptr will always point to valid memory. _bump: Rc, } +impl Debug for InternedStr { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_char('"')?; + f.write_str(self.as_ref())?; + f.write_char('"') + } +} + impl InternedStr { #[allow(unknown_lints)] #[allow(private_interfaces)] From f5bd4c7522fc95e4cd856ff9ea177f42b26b455b Mon Sep 17 00:00:00 2001 From: maciektr Date: Thu, 21 Nov 2024 17:54:57 +0100 Subject: [PATCH 06/13] Refactor: split proc macro host into submodules (#1747) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit commit-id:6d8f33f9 --- **Stack**: - #1749 - #1748 - #1745 - #1747 ⬅ ⚠️ *Part of a stack created by [spr](https://github.com/ejoffe/spr). Do not merge manually using the UI - doing so may have unexpected results.* --- scarb/src/compiler/plugin/proc_macro/ffi.rs | 3 +- scarb/src/compiler/plugin/proc_macro/host.rs | 1194 ----------------- .../plugin/proc_macro/host/attribute.rs | 537 ++++++++ .../plugin/proc_macro/host/aux_data.rs | 90 ++ .../compiler/plugin/proc_macro/host/derive.rs | 148 ++ .../compiler/plugin/proc_macro/host/inline.rs | 101 ++ .../compiler/plugin/proc_macro/host/mod.rs | 269 ++++ .../compiler/plugin/proc_macro/host/post.rs | 113 ++ scarb/tests/proc_macro_prebuilt.rs | 15 +- 9 files changed, 1271 insertions(+), 1199 deletions(-) delete mode 100644 scarb/src/compiler/plugin/proc_macro/host.rs create mode 100644 scarb/src/compiler/plugin/proc_macro/host/attribute.rs create mode 100644 scarb/src/compiler/plugin/proc_macro/host/aux_data.rs create mode 100644 scarb/src/compiler/plugin/proc_macro/host/derive.rs create mode 100644 scarb/src/compiler/plugin/proc_macro/host/inline.rs create mode 100644 scarb/src/compiler/plugin/proc_macro/host/mod.rs create mode 100644 scarb/src/compiler/plugin/proc_macro/host/post.rs diff --git a/scarb/src/compiler/plugin/proc_macro/ffi.rs b/scarb/src/compiler/plugin/proc_macro/ffi.rs index b77e56861..5f45de181 100644 --- a/scarb/src/compiler/plugin/proc_macro/ffi.rs +++ b/scarb/src/compiler/plugin/proc_macro/ffi.rs @@ -1,3 +1,4 @@ +use crate::compiler::plugin::proc_macro::compilation::SharedLibraryProvider; use crate::core::{Package, PackageId}; use anyhow::{ensure, Context, Result}; use cairo_lang_macro::{ @@ -15,9 +16,7 @@ use std::ffi::{c_char, CStr, CString}; use std::fmt::Debug; use std::slice; -use crate::compiler::plugin::proc_macro::compilation::SharedLibraryProvider; use crate::compiler::plugin::proc_macro::ProcMacroAuxData; - #[cfg(not(windows))] use libloading::os::unix::Symbol as RawSymbol; #[cfg(windows)] diff --git a/scarb/src/compiler/plugin/proc_macro/host.rs b/scarb/src/compiler/plugin/proc_macro/host.rs deleted file mode 100644 index 76b01e365..000000000 --- a/scarb/src/compiler/plugin/proc_macro/host.rs +++ /dev/null @@ -1,1194 +0,0 @@ -use crate::compiler::plugin::proc_macro::compilation::SharedLibraryProvider; -use crate::compiler::plugin::proc_macro::{ - Expansion, ExpansionKind, ProcMacroInstance, TokenStreamBuilder, -}; -use crate::core::{edition_variant, Config, Package, PackageId}; -use anyhow::{ensure, Context, Result}; -use cairo_lang_defs::ids::{ModuleItemId, TopLevelLanguageElementId}; -use cairo_lang_defs::patcher::{PatchBuilder, RewriteNode}; -use cairo_lang_defs::plugin::{ - DynGeneratedFileAuxData, GeneratedFileAuxData, MacroPlugin, MacroPluginMetadata, - PluginGeneratedFile, PluginResult, -}; -use cairo_lang_defs::plugin::{InlineMacroExprPlugin, InlinePluginResult, PluginDiagnostic}; -use cairo_lang_diagnostics::ToOption; -use cairo_lang_filesystem::db::Edition; -use cairo_lang_filesystem::ids::CodeMapping; -use cairo_lang_macro::{ - AllocationContext, AuxData, Diagnostic, FullPathMarker, ProcMacroResult, Severity, TokenStream, - TokenStreamMetadata, TokenTree, -}; -use cairo_lang_semantic::db::SemanticGroup; -use cairo_lang_semantic::items::attribute::SemanticQueryAttrs; -use cairo_lang_semantic::plugin::PluginSuite; -use cairo_lang_syntax::attribute::structured::{ - Attribute, AttributeArgVariant, AttributeStructurize, -}; -use cairo_lang_syntax::node::ast::{Expr, ImplItem, MaybeImplBody, MaybeTraitBody, PathSegment}; -use cairo_lang_syntax::node::db::SyntaxGroup; -use cairo_lang_syntax::node::helpers::QueryAttrs; -use cairo_lang_syntax::node::ids::SyntaxStablePtrId; -use cairo_lang_syntax::node::{ast, Terminal, TypedStablePtr, TypedSyntaxNode}; -use convert_case::{Case, Casing}; -use itertools::Itertools; -use scarb_stable_hash::short_hash; -use smol_str::SmolStr; -use std::any::Any; -use std::collections::{HashMap, HashSet}; -use std::fmt::Debug; -use std::sync::{Arc, OnceLock, RwLock}; -use std::vec::IntoIter; -use tracing::{debug, trace_span}; - -const FULL_PATH_MARKER_KEY: &str = "macro::full_path_marker"; -const DERIVE_ATTR: &str = "derive"; - -/// A Cairo compiler plugin controlling the procedural macro execution. -/// -/// This plugin decides which macro plugins (if any) should be applied to the processed AST item. -/// It then redirects the item to the appropriate macro plugin for code expansion. -#[derive(Debug)] -pub struct ProcMacroHostPlugin { - macros: Vec>, - full_path_markers: RwLock>>, -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct ProcMacroId { - pub package_id: PackageId, - pub expansion: Expansion, -} - -impl ProcMacroId { - pub fn new(package_id: PackageId, expansion: Expansion) -> Self { - Self { - package_id, - expansion, - } - } -} - -#[derive(Clone, Debug, Eq, PartialEq)] -pub struct ProcMacroAuxData { - value: Vec, - macro_id: ProcMacroId, -} - -impl ProcMacroAuxData { - pub fn new(value: Vec, macro_id: ProcMacroId) -> Self { - Self { value, macro_id } - } -} - -impl From for AuxData { - fn from(data: ProcMacroAuxData) -> Self { - Self::new(data.value) - } -} - -#[derive(Debug, Clone, Default)] -pub struct EmittedAuxData(Vec); - -impl GeneratedFileAuxData for EmittedAuxData { - fn as_any(&self) -> &dyn Any { - self - } - - fn eq(&self, other: &dyn GeneratedFileAuxData) -> bool { - self.0 == other.as_any().downcast_ref::().unwrap().0 - } -} - -impl EmittedAuxData { - pub fn new(aux_data: ProcMacroAuxData) -> Self { - Self(vec![aux_data]) - } - - pub fn push(&mut self, aux_data: ProcMacroAuxData) { - self.0.push(aux_data); - } - - pub fn is_empty(&self) -> bool { - self.0.is_empty() - } -} - -impl IntoIterator for EmittedAuxData { - type Item = ProcMacroAuxData; - type IntoIter = IntoIter; - - fn into_iter(self) -> IntoIter { - self.0.into_iter() - } -} - -impl ProcMacroHostPlugin { - pub fn try_new(macros: Vec>) -> Result { - // Validate expansions. - let mut expansions = macros - .iter() - .flat_map(|m| { - m.get_expansions() - .iter() - .map(|e| ProcMacroId::new(m.package_id(), e.clone())) - .collect_vec() - }) - .collect::>(); - expansions.sort_unstable_by_key(|e| e.expansion.name.clone()); - ensure!( - expansions - .windows(2) - .all(|w| w[0].expansion.name != w[1].expansion.name), - "duplicate expansions defined for procedural macros: {duplicates}", - duplicates = expansions - .windows(2) - .filter(|w| w[0].expansion.name == w[1].expansion.name) - .map(|w| format!( - "{} ({} and {})", - w[0].expansion.name.as_str(), - w[0].package_id, - w[1].package_id - )) - .collect::>() - .join(", ") - ); - Ok(Self { - macros, - full_path_markers: RwLock::new(Default::default()), - }) - } - - fn expand_inner_attr( - &self, - db: &dyn SyntaxGroup, - item_ast: ast::ModuleItem, - ) -> InnerAttrExpansionResult { - let mut context = InnerAttrExpansionContext::new(self); - let mut item_builder = PatchBuilder::new(db, &item_ast); - let mut used_attr_names: HashSet = Default::default(); - let mut all_none = true; - let ctx = AllocationContext::default(); - - match item_ast.clone() { - ast::ModuleItem::Trait(trait_ast) => { - item_builder.add_node(trait_ast.attributes(db).as_syntax_node()); - item_builder.add_node(trait_ast.visibility(db).as_syntax_node()); - item_builder.add_node(trait_ast.trait_kw(db).as_syntax_node()); - item_builder.add_node(trait_ast.name(db).as_syntax_node()); - item_builder.add_node(trait_ast.generic_params(db).as_syntax_node()); - - // Parser attributes for inner functions. - match trait_ast.body(db) { - MaybeTraitBody::None(terminal) => { - item_builder.add_node(terminal.as_syntax_node()); - InnerAttrExpansionResult::None - } - MaybeTraitBody::Some(body) => { - item_builder.add_node(body.lbrace(db).as_syntax_node()); - - let item_list = body.items(db); - for item in item_list.elements(db).iter() { - let ast::TraitItem::Function(func) = item else { - item_builder.add_node(item.as_syntax_node()); - continue; - }; - - let mut token_stream_builder = TokenStreamBuilder::new(db); - let attrs = func.attributes(db).elements(db); - let found = - self.parse_attrs(db, &mut token_stream_builder, attrs, &ctx); - if let Some(name) = found.as_name() { - used_attr_names.insert(name); - } - token_stream_builder.add_node(func.declaration(db).as_syntax_node()); - token_stream_builder.add_node(func.body(db).as_syntax_node()); - let token_stream = token_stream_builder.build(&ctx); - - all_none = all_none - && self.do_expand_inner_attr( - db, - &mut context, - &mut item_builder, - found, - func, - token_stream, - ); - } - - item_builder.add_node(body.rbrace(db).as_syntax_node()); - - if all_none { - InnerAttrExpansionResult::None - } else { - let (code, mappings) = item_builder.build(); - InnerAttrExpansionResult::Some(context.into_result( - code, - mappings, - used_attr_names.into_iter().collect(), - )) - } - } - } - } - - ast::ModuleItem::Impl(impl_ast) => { - item_builder.add_node(impl_ast.attributes(db).as_syntax_node()); - item_builder.add_node(impl_ast.visibility(db).as_syntax_node()); - item_builder.add_node(impl_ast.impl_kw(db).as_syntax_node()); - item_builder.add_node(impl_ast.name(db).as_syntax_node()); - item_builder.add_node(impl_ast.generic_params(db).as_syntax_node()); - item_builder.add_node(impl_ast.of_kw(db).as_syntax_node()); - item_builder.add_node(impl_ast.trait_path(db).as_syntax_node()); - - match impl_ast.body(db) { - MaybeImplBody::None(terminal) => { - item_builder.add_node(terminal.as_syntax_node()); - InnerAttrExpansionResult::None - } - MaybeImplBody::Some(body) => { - item_builder.add_node(body.lbrace(db).as_syntax_node()); - - let items = body.items(db); - for item in items.elements(db) { - let ImplItem::Function(func) = item else { - item_builder.add_node(item.as_syntax_node()); - continue; - }; - - let mut token_stream_builder = TokenStreamBuilder::new(db); - let attrs = func.attributes(db).elements(db); - let found = - self.parse_attrs(db, &mut token_stream_builder, attrs, &ctx); - if let Some(name) = found.as_name() { - used_attr_names.insert(name); - } - token_stream_builder.add_node(func.visibility(db).as_syntax_node()); - token_stream_builder.add_node(func.declaration(db).as_syntax_node()); - token_stream_builder.add_node(func.body(db).as_syntax_node()); - let token_stream = token_stream_builder.build(&ctx); - all_none = all_none - && self.do_expand_inner_attr( - db, - &mut context, - &mut item_builder, - found, - &func, - token_stream, - ); - } - - item_builder.add_node(body.rbrace(db).as_syntax_node()); - - if all_none { - InnerAttrExpansionResult::None - } else { - let (code, mappings) = item_builder.build(); - InnerAttrExpansionResult::Some(context.into_result( - code, - mappings, - used_attr_names.into_iter().collect(), - )) - } - } - } - } - _ => InnerAttrExpansionResult::None, - } - } - - fn do_expand_inner_attr( - &self, - db: &dyn SyntaxGroup, - context: &mut InnerAttrExpansionContext<'_>, - item_builder: &mut PatchBuilder<'_>, - found: AttrExpansionFound, - func: &impl TypedSyntaxNode, - token_stream: TokenStream, - ) -> bool { - let mut all_none = true; - let (input, args, stable_ptr) = match found { - AttrExpansionFound::Last { - expansion, - args, - stable_ptr, - } => { - all_none = false; - (expansion, args, stable_ptr) - } - AttrExpansionFound::Some { - expansion, - args, - stable_ptr, - } => { - all_none = false; - (expansion, args, stable_ptr) - } - AttrExpansionFound::None => { - item_builder.add_node(func.as_syntax_node()); - return all_none; - } - }; - - let result = self.instance(input.package_id).generate_code( - input.expansion.name.clone(), - args, - token_stream.clone(), - ); - - let expanded = context.register_result(token_stream.to_string(), input, result, stable_ptr); - item_builder.add_modified(RewriteNode::Mapped { - origin: func.as_syntax_node().span(db), - node: Box::new(RewriteNode::Text(expanded.to_string())), - }); - - all_none - } - - /// Find first attribute procedural macros that should be expanded. - /// - /// Remove the attribute from the code. - fn parse_attribute( - &self, - db: &dyn SyntaxGroup, - item_ast: ast::ModuleItem, - ctx: &AllocationContext, - ) -> (AttrExpansionFound, TokenStream) { - let mut token_stream_builder = TokenStreamBuilder::new(db); - let input = match item_ast.clone() { - ast::ModuleItem::Trait(trait_ast) => { - let attrs = trait_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); - token_stream_builder.add_node(trait_ast.visibility(db).as_syntax_node()); - token_stream_builder.add_node(trait_ast.trait_kw(db).as_syntax_node()); - token_stream_builder.add_node(trait_ast.name(db).as_syntax_node()); - token_stream_builder.add_node(trait_ast.generic_params(db).as_syntax_node()); - token_stream_builder.add_node(trait_ast.body(db).as_syntax_node()); - expansion - } - ast::ModuleItem::Impl(impl_ast) => { - let attrs = impl_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); - token_stream_builder.add_node(impl_ast.visibility(db).as_syntax_node()); - token_stream_builder.add_node(impl_ast.impl_kw(db).as_syntax_node()); - token_stream_builder.add_node(impl_ast.name(db).as_syntax_node()); - token_stream_builder.add_node(impl_ast.generic_params(db).as_syntax_node()); - token_stream_builder.add_node(impl_ast.of_kw(db).as_syntax_node()); - token_stream_builder.add_node(impl_ast.trait_path(db).as_syntax_node()); - token_stream_builder.add_node(impl_ast.body(db).as_syntax_node()); - expansion - } - ast::ModuleItem::Module(module_ast) => { - let attrs = module_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); - token_stream_builder.add_node(module_ast.visibility(db).as_syntax_node()); - token_stream_builder.add_node(module_ast.module_kw(db).as_syntax_node()); - token_stream_builder.add_node(module_ast.name(db).as_syntax_node()); - token_stream_builder.add_node(module_ast.body(db).as_syntax_node()); - expansion - } - ast::ModuleItem::FreeFunction(free_func_ast) => { - let attrs = free_func_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); - token_stream_builder.add_node(free_func_ast.visibility(db).as_syntax_node()); - token_stream_builder.add_node(free_func_ast.declaration(db).as_syntax_node()); - token_stream_builder.add_node(free_func_ast.body(db).as_syntax_node()); - expansion - } - ast::ModuleItem::ExternFunction(extern_func_ast) => { - let attrs = extern_func_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); - token_stream_builder.add_node(extern_func_ast.visibility(db).as_syntax_node()); - token_stream_builder.add_node(extern_func_ast.extern_kw(db).as_syntax_node()); - token_stream_builder.add_node(extern_func_ast.declaration(db).as_syntax_node()); - token_stream_builder.add_node(extern_func_ast.semicolon(db).as_syntax_node()); - expansion - } - ast::ModuleItem::ExternType(extern_type_ast) => { - let attrs = extern_type_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); - token_stream_builder.add_node(extern_type_ast.visibility(db).as_syntax_node()); - token_stream_builder.add_node(extern_type_ast.extern_kw(db).as_syntax_node()); - token_stream_builder.add_node(extern_type_ast.type_kw(db).as_syntax_node()); - token_stream_builder.add_node(extern_type_ast.name(db).as_syntax_node()); - token_stream_builder.add_node(extern_type_ast.generic_params(db).as_syntax_node()); - token_stream_builder.add_node(extern_type_ast.semicolon(db).as_syntax_node()); - expansion - } - ast::ModuleItem::Struct(struct_ast) => { - let attrs = struct_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); - token_stream_builder.add_node(struct_ast.visibility(db).as_syntax_node()); - token_stream_builder.add_node(struct_ast.struct_kw(db).as_syntax_node()); - token_stream_builder.add_node(struct_ast.name(db).as_syntax_node()); - token_stream_builder.add_node(struct_ast.generic_params(db).as_syntax_node()); - token_stream_builder.add_node(struct_ast.lbrace(db).as_syntax_node()); - token_stream_builder.add_node(struct_ast.members(db).as_syntax_node()); - token_stream_builder.add_node(struct_ast.rbrace(db).as_syntax_node()); - expansion - } - ast::ModuleItem::Enum(enum_ast) => { - let attrs = enum_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); - token_stream_builder.add_node(enum_ast.visibility(db).as_syntax_node()); - token_stream_builder.add_node(enum_ast.enum_kw(db).as_syntax_node()); - token_stream_builder.add_node(enum_ast.name(db).as_syntax_node()); - token_stream_builder.add_node(enum_ast.generic_params(db).as_syntax_node()); - token_stream_builder.add_node(enum_ast.lbrace(db).as_syntax_node()); - token_stream_builder.add_node(enum_ast.variants(db).as_syntax_node()); - token_stream_builder.add_node(enum_ast.rbrace(db).as_syntax_node()); - expansion - } - _ => AttrExpansionFound::None, - }; - let token_stream = token_stream_builder.build(ctx); - (input, token_stream) - } - - fn parse_attrs( - &self, - db: &dyn SyntaxGroup, - builder: &mut TokenStreamBuilder<'_>, - attrs: Vec, - ctx: &AllocationContext, - ) -> AttrExpansionFound { - // This function parses attributes of the item, - // checking if those attributes correspond to a procedural macro that should be fired. - // The proc macro attribute found is removed from attributes list, - // while other attributes are appended to the `PathBuilder` passed as an argument. - - // Note this function does not affect the executable attributes, - // as it only pulls `ExpansionKind::Attr` from the plugin. - // This means that executable attributes will neither be removed from the item, - // nor will they cause the item to be rewritten. - let mut expansion = None; - let mut last = true; - for attr in attrs { - // We ensure that this flag is changed *after* the expansion is found. - if last { - let structured_attr = attr.clone().structurize(db); - let found = self.find_expansion(&Expansion::new( - structured_attr.id.clone(), - ExpansionKind::Attr, - )); - if let Some(found) = found { - if expansion.is_none() { - let mut args_builder = TokenStreamBuilder::new(db); - args_builder.add_node(attr.arguments(db).as_syntax_node()); - let args = args_builder.build(ctx); - expansion = Some((found, args, attr.stable_ptr().untyped())); - // Do not add the attribute for found expansion. - continue; - } else { - last = false; - } - } - } - builder.add_node(attr.as_syntax_node()); - } - match (expansion, last) { - (Some((expansion, args, stable_ptr)), true) => AttrExpansionFound::Last { - expansion, - args, - stable_ptr, - }, - (Some((expansion, args, stable_ptr)), false) => AttrExpansionFound::Some { - expansion, - args, - stable_ptr, - }, - (None, _) => AttrExpansionFound::None, - } - } - - /// Handle `#[derive(...)]` attribute. - /// - /// Returns a list of expansions that this plugin should apply. - fn parse_derive(&self, db: &dyn SyntaxGroup, item_ast: ast::ModuleItem) -> Vec { - let attrs = match item_ast { - ast::ModuleItem::Struct(struct_ast) => Some(struct_ast.query_attr(db, DERIVE_ATTR)), - ast::ModuleItem::Enum(enum_ast) => Some(enum_ast.query_attr(db, DERIVE_ATTR)), - _ => None, - }; - - attrs - .unwrap_or_default() - .iter() - .map(|attr| attr.clone().structurize(db)) - .flat_map(|attr| attr.args.into_iter()) - .filter_map(|attr| { - let AttributeArgVariant::Unnamed(value) = attr.clone().variant else { - return None; - }; - let Expr::Path(path) = value else { - return None; - }; - let path = path.elements(db); - let path = path.last()?; - let PathSegment::Simple(segment) = path else { - return None; - }; - let ident = segment.ident(db); - let value = ident.text(db).to_string(); - - self.find_expansion(&Expansion::new( - value.to_case(Case::Snake), - ExpansionKind::Derive, - )) - }) - .collect_vec() - } - - fn expand_derives( - &self, - db: &dyn SyntaxGroup, - item_ast: ast::ModuleItem, - stream_metadata: TokenStreamMetadata, - ) -> Option { - let stable_ptr = item_ast.clone().stable_ptr().untyped(); - let mut token_stream_builder = TokenStreamBuilder::new(db); - token_stream_builder.add_node(item_ast.as_syntax_node()); - token_stream_builder.with_metadata(stream_metadata.clone()); - let mut aux_data = EmittedAuxData::default(); - let mut all_diagnostics: Vec = Vec::new(); - - // All derives to be applied. - let derives = self.parse_derive(db, item_ast.clone()); - let any_derives = !derives.is_empty(); - - let ctx = AllocationContext::default(); - let mut derived_code = PatchBuilder::new(db, &item_ast); - for derive in derives.iter() { - let token_stream = token_stream_builder.build(&ctx); - let result = self.instance(derive.package_id).generate_code( - derive.expansion.name.clone(), - TokenStream::empty(), - token_stream, - ); - - // Register diagnostics. - all_diagnostics.extend(result.diagnostics); - - // Register aux data. - if let Some(new_aux_data) = result.aux_data { - aux_data.push(ProcMacroAuxData::new( - new_aux_data.into(), - ProcMacroId::new(derive.package_id, derive.expansion.clone()), - )); - } - - if result.token_stream.is_empty() { - // No code has been generated. - // We do not need to do anything. - continue; - } - - for token in result.token_stream.tokens { - match token { - TokenTree::Ident(token) => { - derived_code.add_str(token.content.as_ref()); - } - } - } - } - - if any_derives { - let derived_code = derived_code.build().0; - return Some(PluginResult { - code: if derived_code.is_empty() { - None - } else { - let msg = if derives.len() == 1 { - "the derive macro" - } else { - "one of the derive macros" - }; - let derive_names = derives - .iter() - .map(|derive| derive.expansion.name.to_string()) - .join("`, `"); - let note = format!("this error originates in {msg}: `{derive_names}`"); - Some(PluginGeneratedFile { - name: "proc_macro_derive".into(), - code_mappings: Vec::new(), - content: derived_code, - aux_data: if aux_data.is_empty() { - None - } else { - Some(DynGeneratedFileAuxData::new(aux_data)) - }, - diagnostics_note: Some(note), - }) - }, - diagnostics: into_cairo_diagnostics(all_diagnostics, stable_ptr), - // Note that we don't remove the original item here, unlike for attributes. - // We do not add the original code to the generated file either. - remove_original_item: false, - }); - } - - None - } - - fn expand_attribute( - &self, - input: ProcMacroId, - last: bool, - args: TokenStream, - token_stream: TokenStream, - stable_ptr: SyntaxStablePtrId, - ) -> PluginResult { - let original = token_stream.to_string(); - let result = self.instance(input.package_id).generate_code( - input.expansion.name.clone(), - args, - token_stream, - ); - - // Handle token stream. - if result.token_stream.is_empty() { - // Remove original code - return PluginResult { - diagnostics: into_cairo_diagnostics(result.diagnostics, stable_ptr), - code: None, - remove_original_item: true, - }; - } - - // Full path markers require code modification. - self.register_full_path_markers(input.package_id, result.full_path_markers.clone()); - - // This is a minor optimization. - // If the expanded macro attribute is the only one that will be expanded by `ProcMacroHost` - // in this `generate_code` call (i.e. all the other macro attributes has been expanded by - // previous calls), and the expansion did not produce any changes, we can skip rewriting the - // expanded node by simply returning no generated code, and leaving the original item as is. - // However, if we have other macro attributes to expand, we must rewrite the node even if no - // changes have been produced, so that we can parse the attributes once again and expand them. - // In essence, `code: None, remove_original_item: false` means `ProcMacroHost` will not be - // called again for this AST item. - // This optimization limits the number of generated nodes a bit. - if last && result.aux_data.is_none() && original == result.token_stream.to_string() { - return PluginResult { - code: None, - remove_original_item: false, - diagnostics: into_cairo_diagnostics(result.diagnostics, stable_ptr), - }; - } - - let file_name = format!("proc_{}", input.expansion.name); - let content = result.token_stream.to_string(); - PluginResult { - code: Some(PluginGeneratedFile { - name: file_name.into(), - code_mappings: Vec::new(), - content, - diagnostics_note: Some(format!( - "this error originates in the attribute macro: `{}`", - input.expansion.name - )), - aux_data: result.aux_data.map(|new_aux_data| { - DynGeneratedFileAuxData::new(EmittedAuxData::new(ProcMacroAuxData::new( - new_aux_data.into(), - input, - ))) - }), - }), - diagnostics: into_cairo_diagnostics(result.diagnostics, stable_ptr), - remove_original_item: true, - } - } - - fn find_expansion(&self, expansion: &Expansion) -> Option { - self.macros - .iter() - .find(|m| m.get_expansions().contains(expansion)) - .map(|m| m.package_id()) - .map(|package_id| ProcMacroId::new(package_id, expansion.clone())) - } - - pub fn build_plugin_suite(macro_host: Arc) -> PluginSuite { - let mut suite = PluginSuite::default(); - // Register inline macro plugins. - for proc_macro in ¯o_host.macros { - let expansions = proc_macro - .get_expansions() - .iter() - .filter(|exp| matches!(exp.kind, ExpansionKind::Inline)); - for expansion in expansions { - let plugin = Arc::new(ProcMacroInlinePlugin::new( - proc_macro.clone(), - expansion.clone(), - )); - suite.add_inline_macro_plugin_ex(expansion.name.as_str(), plugin); - } - } - // Register procedural macro host plugin. - suite.add_plugin_ex(macro_host); - suite - } - - #[tracing::instrument(level = "trace", skip_all)] - pub fn post_process(&self, db: &dyn SemanticGroup) -> Result<()> { - let markers = self.collect_full_path_markers(db); - - let aux_data = self.collect_aux_data(db); - for instance in self.macros.iter() { - let _ = trace_span!( - "post_process_callback", - instance = %instance.package_id() - ) - .entered(); - let instance_markers = self - .full_path_markers - .read() - .unwrap() - .get(&instance.package_id()) - .cloned() - .unwrap_or_default(); - let markers_for_instance = markers - .iter() - .filter(|(key, _)| instance_markers.contains(key)) - .map(|(key, full_path)| FullPathMarker { - key: key.clone(), - full_path: full_path.clone(), - }) - .collect_vec(); - let data = aux_data - .get(&instance.package_id()) - .cloned() - .unwrap_or_default(); - debug!("calling post processing callback with: {data:?}"); - instance.post_process_callback(data.clone(), markers_for_instance); - } - Ok(()) - } - - fn collect_full_path_markers(&self, db: &dyn SemanticGroup) -> HashMap { - let mut markers: HashMap = HashMap::new(); - // FULL_PATH_MARKER_KEY - for crate_id in db.crates() { - let modules = db.crate_modules(crate_id); - for module_id in modules.iter() { - let Ok(module_items) = db.module_items(*module_id) else { - continue; - }; - for item_id in module_items.iter() { - let attr = match item_id { - ModuleItemId::Struct(id) => { - id.query_attr(db, FULL_PATH_MARKER_KEY).to_option() - } - ModuleItemId::Enum(id) => { - id.query_attr(db, FULL_PATH_MARKER_KEY).to_option() - } - ModuleItemId::FreeFunction(id) => { - id.query_attr(db, FULL_PATH_MARKER_KEY).to_option() - } - _ => None, - }; - - let keys = attr - .unwrap_or_default() - .into_iter() - .filter_map(|attr| Self::extract_key(db, attr)) - .collect_vec(); - let full_path = item_id.full_path(db.upcast()); - for key in keys { - markers.insert(key, full_path.clone()); - } - } - } - } - markers - } - - fn extract_key(db: &dyn SemanticGroup, attr: Attribute) -> Option { - if attr.id != FULL_PATH_MARKER_KEY { - return None; - } - - for arg in attr.args.clone() { - if let AttributeArgVariant::Unnamed(Expr::String(s)) = arg.variant { - return s.string_value(db.upcast()); - } - } - - None - } - - fn collect_aux_data( - &self, - db: &dyn SemanticGroup, - ) -> HashMap> { - let mut data = Vec::new(); - for crate_id in db.crates() { - let crate_modules = db.crate_modules(crate_id); - for module in crate_modules.iter() { - let file_infos = db.module_generated_file_aux_data(*module); - if let Ok(file_infos) = file_infos { - for file_info in file_infos.iter() { - let aux_data = file_info - .as_ref() - .and_then(|ad| ad.as_any().downcast_ref::()); - if let Some(aux_data) = aux_data { - data.extend(aux_data.clone().into_iter()); - } - } - } - } - } - data.into_iter() - .into_group_map_by(|d| d.macro_id.package_id) - } - - pub fn instance(&self, package_id: PackageId) -> &ProcMacroInstance { - self.macros - .iter() - .find(|m| m.package_id() == package_id) - .expect("procedural macro must be registered in proc macro host") - } - - fn register_full_path_markers(&self, package_id: PackageId, markers: Vec) { - self.full_path_markers - .write() - .unwrap() - .entry(package_id) - .and_modify(|markers| markers.extend(markers.clone())) - .or_insert(markers); - } - - fn calculate_metadata( - db: &dyn SyntaxGroup, - item_ast: ast::ModuleItem, - edition: Edition, - ) -> TokenStreamMetadata { - let stable_ptr = item_ast.clone().stable_ptr().untyped(); - let file_path = stable_ptr.file_id(db).full_path(db.upcast()); - let file_id = short_hash(file_path.clone()); - let edition = edition_variant(edition); - TokenStreamMetadata::new(file_path, file_id, edition) - } -} - -struct InnerAttrExpansionContext<'a> { - host: &'a ProcMacroHostPlugin, - // Metadata returned for expansions. - diagnostics: Vec, - aux_data: EmittedAuxData, - any_changed: bool, -} - -impl<'a> InnerAttrExpansionContext<'a> { - pub fn new<'b: 'a>(host: &'b ProcMacroHostPlugin) -> Self { - Self { - diagnostics: Vec::new(), - aux_data: EmittedAuxData::default(), - any_changed: false, - host, - } - } - - pub fn register_result( - &mut self, - original: String, - input: ProcMacroId, - result: ProcMacroResult, - stable_ptr: SyntaxStablePtrId, - ) -> String { - let result_str = result.token_stream.to_string(); - let changed = result_str != original; - - if changed { - self.host - .register_full_path_markers(input.package_id, result.full_path_markers.clone()); - } - - self.diagnostics - .extend(into_cairo_diagnostics(result.diagnostics, stable_ptr)); - - if let Some(new_aux_data) = result.aux_data { - self.aux_data - .push(ProcMacroAuxData::new(new_aux_data.into(), input)); - } - - self.any_changed = self.any_changed || changed; - - result_str - } - pub fn into_result( - self, - expanded: String, - code_mappings: Vec, - attr_names: Vec, - ) -> PluginResult { - let msg = if attr_names.len() == 1 { - "the attribute macro" - } else { - "one of the attribute macros" - }; - let derive_names = attr_names.iter().map(ToString::to_string).join("`, `"); - let note = format!("this error originates in {msg}: `{derive_names}`"); - PluginResult { - code: Some(PluginGeneratedFile { - name: "proc_attr_inner".into(), - content: expanded, - aux_data: if self.aux_data.is_empty() { - None - } else { - Some(DynGeneratedFileAuxData::new(self.aux_data)) - }, - code_mappings, - diagnostics_note: Some(note), - }), - diagnostics: self.diagnostics, - remove_original_item: true, - } - } -} - -enum InnerAttrExpansionResult { - None, - Some(PluginResult), -} - -impl MacroPlugin for ProcMacroHostPlugin { - fn generate_code( - &self, - db: &dyn SyntaxGroup, - item_ast: ast::ModuleItem, - metadata: &MacroPluginMetadata<'_>, - ) -> PluginResult { - let stream_metadata = Self::calculate_metadata(db, item_ast.clone(), metadata.edition); - - // Handle inner functions. - if let InnerAttrExpansionResult::Some(result) = self.expand_inner_attr(db, item_ast.clone()) - { - return result; - } - - // Expand first attribute. - // Note that we only expand the first attribute, as we assume that the rest of the attributes - // will be handled by a subsequent call to this function. - let ctx = AllocationContext::default(); - let (input, body) = self.parse_attribute(db, item_ast.clone(), &ctx); - - if let Some(result) = match input { - AttrExpansionFound::Last { - expansion, - args, - stable_ptr, - } => Some((expansion, args, stable_ptr, true)), - AttrExpansionFound::Some { - expansion, - args, - stable_ptr, - } => Some((expansion, args, stable_ptr, false)), - AttrExpansionFound::None => None, - } - .map(|(expansion, args, stable_ptr, last)| { - let token_stream = body.with_metadata(stream_metadata.clone()); - self.expand_attribute(expansion, last, args, token_stream, stable_ptr) - }) { - return result; - } - - // Expand all derives. - // Note that all proc macro attributes should be already expanded at this point. - if let Some(result) = self.expand_derives(db, item_ast.clone(), stream_metadata.clone()) { - return result; - } - - // No expansions can be applied. - PluginResult { - code: None, - diagnostics: Vec::new(), - remove_original_item: false, - } - } - - fn declared_attributes(&self) -> Vec { - self.macros - .iter() - .flat_map(|m| m.declared_attributes_and_executables()) - .chain(vec![FULL_PATH_MARKER_KEY.to_string()]) - .collect() - } - - fn declared_derives(&self) -> Vec { - self.macros - .iter() - .flat_map(|m| m.declared_derives()) - .map(|s| s.to_case(Case::UpperCamel)) - .collect() - } - - fn executable_attributes(&self) -> Vec { - self.macros - .iter() - .flat_map(|m| m.executable_attributes()) - .collect() - } -} - -enum AttrExpansionFound { - Some { - expansion: ProcMacroId, - args: TokenStream, - stable_ptr: SyntaxStablePtrId, - }, - None, - Last { - expansion: ProcMacroId, - args: TokenStream, - stable_ptr: SyntaxStablePtrId, - }, -} -impl AttrExpansionFound { - pub fn as_name(&self) -> Option { - match self { - AttrExpansionFound::Some { expansion, .. } - | AttrExpansionFound::Last { expansion, .. } => Some(expansion.expansion.name.clone()), - AttrExpansionFound::None => None, - } - } -} - -/// A Cairo compiler inline macro plugin controlling the inline procedural macro execution. -/// -/// This plugin represents a single expansion capable of handling inline procedural macros. -/// The plugin triggers code expansion in a corresponding procedural macro instance. -#[derive(Debug)] -pub struct ProcMacroInlinePlugin { - instance: Arc, - expansion: Expansion, - doc: OnceLock>, -} - -impl ProcMacroInlinePlugin { - pub fn new(instance: Arc, expansion: Expansion) -> Self { - assert!(instance.get_expansions().contains(&expansion)); - Self { - instance, - expansion, - doc: Default::default(), - } - } - - pub fn name(&self) -> &str { - self.expansion.name.as_str() - } - - fn instance(&self) -> &ProcMacroInstance { - &self.instance - } -} - -impl InlineMacroExprPlugin for ProcMacroInlinePlugin { - fn generate_code( - &self, - db: &dyn SyntaxGroup, - syntax: &ast::ExprInlineMacro, - _metadata: &MacroPluginMetadata<'_>, - ) -> InlinePluginResult { - let ctx = AllocationContext::default(); - let stable_ptr = syntax.clone().stable_ptr().untyped(); - let arguments = syntax.arguments(db); - let mut token_stream_builder = TokenStreamBuilder::new(db); - token_stream_builder.add_node(arguments.as_syntax_node()); - let token_stream = token_stream_builder.build(&ctx); - let result = self.instance().generate_code( - self.expansion.name.clone(), - TokenStream::empty(), - token_stream, - ); - // Handle diagnostics. - let diagnostics = into_cairo_diagnostics(result.diagnostics, stable_ptr); - let token_stream = result.token_stream.clone(); - if token_stream.is_empty() { - // Remove original code - InlinePluginResult { - code: None, - diagnostics, - } - } else { - // Replace - let aux_data = result.aux_data.map(|aux_data| { - let aux_data = ProcMacroAuxData::new( - aux_data.into(), - ProcMacroId::new(self.instance.package_id(), self.expansion.clone()), - ); - let mut emitted = EmittedAuxData::default(); - emitted.push(aux_data); - DynGeneratedFileAuxData::new(emitted) - }); - let content = token_stream.to_string(); - InlinePluginResult { - code: Some(PluginGeneratedFile { - name: "inline_proc_macro".into(), - code_mappings: Vec::new(), - content, - aux_data, - diagnostics_note: Some(format!( - "this error originates in the inline macro: `{}`", - self.expansion.name - )), - }), - diagnostics, - } - } - } - - fn documentation(&self) -> Option { - self.doc - .get_or_init(|| self.instance().doc(self.expansion.name.clone())) - .clone() - } -} - -fn into_cairo_diagnostics( - diagnostics: Vec, - stable_ptr: SyntaxStablePtrId, -) -> Vec { - diagnostics - .into_iter() - .map(|diag| PluginDiagnostic { - stable_ptr, - message: diag.message, - severity: match diag.severity { - Severity::Error => cairo_lang_diagnostics::Severity::Error, - Severity::Warning => cairo_lang_diagnostics::Severity::Warning, - }, - }) - .collect_vec() -} - -/// A Scarb wrapper around the `ProcMacroHost` compiler plugin. -/// -/// This struct represent the compiler plugin in terms of Scarb data model. -/// It also builds a plugin suite that enables the compiler plugin. -#[derive(Default)] -pub struct ProcMacroHost { - macros: Vec>, -} - -impl ProcMacroHost { - pub fn register_instance(&mut self, instance: Arc) { - self.macros.push(instance); - } - - pub fn register_new(&mut self, package: Package, config: &Config) -> Result<()> { - let lib_path = package - .shared_lib_path(config) - .context("could not resolve shared library path")?; - let instance = ProcMacroInstance::try_new(package.id, lib_path)?; - self.register_instance(Arc::new(instance)); - Ok(()) - } - - pub fn into_plugin(self) -> Result { - ProcMacroHostPlugin::try_new(self.macros) - } - - pub fn macros(&self) -> &[Arc] { - &self.macros - } -} diff --git a/scarb/src/compiler/plugin/proc_macro/host/attribute.rs b/scarb/src/compiler/plugin/proc_macro/host/attribute.rs new file mode 100644 index 000000000..522947965 --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/host/attribute.rs @@ -0,0 +1,537 @@ +use crate::compiler::plugin::proc_macro::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; +use crate::compiler::plugin::proc_macro::host::into_cairo_diagnostics; +use crate::compiler::plugin::proc_macro::{ + Expansion, ExpansionKind, ProcMacroHostPlugin, ProcMacroId, TokenStreamBuilder, +}; +use cairo_lang_defs::patcher::{PatchBuilder, RewriteNode}; +use cairo_lang_defs::plugin::PluginDiagnostic; +use cairo_lang_defs::plugin::{DynGeneratedFileAuxData, PluginGeneratedFile, PluginResult}; +use cairo_lang_filesystem::ids::CodeMapping; +use cairo_lang_macro::{AllocationContext, ProcMacroResult, TokenStream}; +use cairo_lang_syntax::attribute::structured::AttributeStructurize; +use cairo_lang_syntax::node::ast::{ImplItem, MaybeImplBody, MaybeTraitBody}; +use cairo_lang_syntax::node::db::SyntaxGroup; +use cairo_lang_syntax::node::ids::SyntaxStablePtrId; +use cairo_lang_syntax::node::{ast, TypedStablePtr, TypedSyntaxNode}; +use itertools::Itertools; +use smol_str::SmolStr; +use std::collections::HashSet; + +impl ProcMacroHostPlugin { + pub(crate) fn expand_inner_attr( + &self, + db: &dyn SyntaxGroup, + item_ast: ast::ModuleItem, + ) -> InnerAttrExpansionResult { + let mut context = InnerAttrExpansionContext::new(self); + let mut item_builder = PatchBuilder::new(db, &item_ast); + let mut used_attr_names: HashSet = Default::default(); + let mut all_none = true; + let ctx = AllocationContext::default(); + + match item_ast.clone() { + ast::ModuleItem::Trait(trait_ast) => { + item_builder.add_node(trait_ast.attributes(db).as_syntax_node()); + item_builder.add_node(trait_ast.visibility(db).as_syntax_node()); + item_builder.add_node(trait_ast.trait_kw(db).as_syntax_node()); + item_builder.add_node(trait_ast.name(db).as_syntax_node()); + item_builder.add_node(trait_ast.generic_params(db).as_syntax_node()); + + // Parser attributes for inner functions. + match trait_ast.body(db) { + MaybeTraitBody::None(terminal) => { + item_builder.add_node(terminal.as_syntax_node()); + InnerAttrExpansionResult::None + } + MaybeTraitBody::Some(body) => { + item_builder.add_node(body.lbrace(db).as_syntax_node()); + + let item_list = body.items(db); + for item in item_list.elements(db).iter() { + let ast::TraitItem::Function(func) = item else { + item_builder.add_node(item.as_syntax_node()); + continue; + }; + + let mut token_stream_builder = TokenStreamBuilder::new(db); + let attrs = func.attributes(db).elements(db); + let found = + self.parse_attrs(db, &mut token_stream_builder, attrs, &ctx); + if let Some(name) = found.as_name() { + used_attr_names.insert(name); + } + token_stream_builder.add_node(func.declaration(db).as_syntax_node()); + token_stream_builder.add_node(func.body(db).as_syntax_node()); + let token_stream = token_stream_builder.build(&ctx); + + all_none = all_none + && self.do_expand_inner_attr( + db, + &mut context, + &mut item_builder, + found, + func, + token_stream, + ); + } + + item_builder.add_node(body.rbrace(db).as_syntax_node()); + + if all_none { + InnerAttrExpansionResult::None + } else { + let (code, mappings) = item_builder.build(); + InnerAttrExpansionResult::Some(context.into_result( + code, + mappings, + used_attr_names.into_iter().collect(), + )) + } + } + } + } + + ast::ModuleItem::Impl(impl_ast) => { + item_builder.add_node(impl_ast.attributes(db).as_syntax_node()); + item_builder.add_node(impl_ast.visibility(db).as_syntax_node()); + item_builder.add_node(impl_ast.impl_kw(db).as_syntax_node()); + item_builder.add_node(impl_ast.name(db).as_syntax_node()); + item_builder.add_node(impl_ast.generic_params(db).as_syntax_node()); + item_builder.add_node(impl_ast.of_kw(db).as_syntax_node()); + item_builder.add_node(impl_ast.trait_path(db).as_syntax_node()); + + match impl_ast.body(db) { + MaybeImplBody::None(terminal) => { + item_builder.add_node(terminal.as_syntax_node()); + InnerAttrExpansionResult::None + } + MaybeImplBody::Some(body) => { + item_builder.add_node(body.lbrace(db).as_syntax_node()); + + let items = body.items(db); + for item in items.elements(db) { + let ImplItem::Function(func) = item else { + item_builder.add_node(item.as_syntax_node()); + continue; + }; + + let mut token_stream_builder = TokenStreamBuilder::new(db); + let attrs = func.attributes(db).elements(db); + let found = + self.parse_attrs(db, &mut token_stream_builder, attrs, &ctx); + if let Some(name) = found.as_name() { + used_attr_names.insert(name); + } + token_stream_builder.add_node(func.visibility(db).as_syntax_node()); + token_stream_builder.add_node(func.declaration(db).as_syntax_node()); + token_stream_builder.add_node(func.body(db).as_syntax_node()); + let token_stream = token_stream_builder.build(&ctx); + all_none = all_none + && self.do_expand_inner_attr( + db, + &mut context, + &mut item_builder, + found, + &func, + token_stream, + ); + } + + item_builder.add_node(body.rbrace(db).as_syntax_node()); + + if all_none { + InnerAttrExpansionResult::None + } else { + let (code, mappings) = item_builder.build(); + InnerAttrExpansionResult::Some(context.into_result( + code, + mappings, + used_attr_names.into_iter().collect(), + )) + } + } + } + } + _ => InnerAttrExpansionResult::None, + } + } + + fn do_expand_inner_attr( + &self, + db: &dyn SyntaxGroup, + context: &mut InnerAttrExpansionContext<'_>, + item_builder: &mut PatchBuilder<'_>, + found: AttrExpansionFound, + func: &impl TypedSyntaxNode, + token_stream: TokenStream, + ) -> bool { + let mut all_none = true; + let (input, args, stable_ptr) = match found { + AttrExpansionFound::Last { + expansion, + args, + stable_ptr, + } => { + all_none = false; + (expansion, args, stable_ptr) + } + AttrExpansionFound::Some { + expansion, + args, + stable_ptr, + } => { + all_none = false; + (expansion, args, stable_ptr) + } + AttrExpansionFound::None => { + item_builder.add_node(func.as_syntax_node()); + return all_none; + } + }; + + let result = self.instance(input.package_id).generate_code( + input.expansion.name.clone(), + args, + token_stream.clone(), + ); + + let expanded = context.register_result(token_stream.to_string(), input, result, stable_ptr); + item_builder.add_modified(RewriteNode::Mapped { + origin: func.as_syntax_node().span(db), + node: Box::new(RewriteNode::Text(expanded.to_string())), + }); + + all_none + } + + /// Find first attribute procedural macros that should be expanded. + /// + /// Remove the attribute from the code. + pub(crate) fn parse_attribute( + &self, + db: &dyn SyntaxGroup, + item_ast: ast::ModuleItem, + ctx: &AllocationContext, + ) -> (AttrExpansionFound, TokenStream) { + let mut token_stream_builder = TokenStreamBuilder::new(db); + let input = match item_ast.clone() { + ast::ModuleItem::Trait(trait_ast) => { + let attrs = trait_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); + token_stream_builder.add_node(trait_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(trait_ast.trait_kw(db).as_syntax_node()); + token_stream_builder.add_node(trait_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(trait_ast.generic_params(db).as_syntax_node()); + token_stream_builder.add_node(trait_ast.body(db).as_syntax_node()); + expansion + } + ast::ModuleItem::Impl(impl_ast) => { + let attrs = impl_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); + token_stream_builder.add_node(impl_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.impl_kw(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.generic_params(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.of_kw(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.trait_path(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.body(db).as_syntax_node()); + expansion + } + ast::ModuleItem::Module(module_ast) => { + let attrs = module_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); + token_stream_builder.add_node(module_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(module_ast.module_kw(db).as_syntax_node()); + token_stream_builder.add_node(module_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(module_ast.body(db).as_syntax_node()); + expansion + } + ast::ModuleItem::FreeFunction(free_func_ast) => { + let attrs = free_func_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); + token_stream_builder.add_node(free_func_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(free_func_ast.declaration(db).as_syntax_node()); + token_stream_builder.add_node(free_func_ast.body(db).as_syntax_node()); + expansion + } + ast::ModuleItem::ExternFunction(extern_func_ast) => { + let attrs = extern_func_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); + token_stream_builder.add_node(extern_func_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(extern_func_ast.extern_kw(db).as_syntax_node()); + token_stream_builder.add_node(extern_func_ast.declaration(db).as_syntax_node()); + token_stream_builder.add_node(extern_func_ast.semicolon(db).as_syntax_node()); + expansion + } + ast::ModuleItem::ExternType(extern_type_ast) => { + let attrs = extern_type_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); + token_stream_builder.add_node(extern_type_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(extern_type_ast.extern_kw(db).as_syntax_node()); + token_stream_builder.add_node(extern_type_ast.type_kw(db).as_syntax_node()); + token_stream_builder.add_node(extern_type_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(extern_type_ast.generic_params(db).as_syntax_node()); + token_stream_builder.add_node(extern_type_ast.semicolon(db).as_syntax_node()); + expansion + } + ast::ModuleItem::Struct(struct_ast) => { + let attrs = struct_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); + token_stream_builder.add_node(struct_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.struct_kw(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.generic_params(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.lbrace(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.members(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.rbrace(db).as_syntax_node()); + expansion + } + ast::ModuleItem::Enum(enum_ast) => { + let attrs = enum_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); + token_stream_builder.add_node(enum_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.enum_kw(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.generic_params(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.lbrace(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.variants(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.rbrace(db).as_syntax_node()); + expansion + } + _ => AttrExpansionFound::None, + }; + let token_stream = token_stream_builder.build(ctx); + (input, token_stream) + } + + fn parse_attrs( + &self, + db: &dyn SyntaxGroup, + builder: &mut TokenStreamBuilder<'_>, + attrs: Vec, + ctx: &AllocationContext, + ) -> AttrExpansionFound { + // This function parses attributes of the item, + // checking if those attributes correspond to a procedural macro that should be fired. + // The proc macro attribute found is removed from attributes list, + // while other attributes are appended to the `PathBuilder` passed as an argument. + + // Note this function does not affect the executable attributes, + // as it only pulls `ExpansionKind::Attr` from the plugin. + // This means that executable attributes will neither be removed from the item, + // nor will they cause the item to be rewritten. + let mut expansion = None; + let mut last = true; + for attr in attrs { + // We ensure that this flag is changed *after* the expansion is found. + if last { + let structured_attr = attr.clone().structurize(db); + let found = self.find_expansion(&Expansion::new( + structured_attr.id.clone(), + ExpansionKind::Attr, + )); + if let Some(found) = found { + if expansion.is_none() { + let mut args_builder = TokenStreamBuilder::new(db); + args_builder.add_node(attr.arguments(db).as_syntax_node()); + let args = args_builder.build(ctx); + expansion = Some((found, args, attr.stable_ptr().untyped())); + // Do not add the attribute for found expansion. + continue; + } else { + last = false; + } + } + } + builder.add_node(attr.as_syntax_node()); + } + match (expansion, last) { + (Some((expansion, args, stable_ptr)), true) => AttrExpansionFound::Last { + expansion, + args, + stable_ptr, + }, + (Some((expansion, args, stable_ptr)), false) => AttrExpansionFound::Some { + expansion, + args, + stable_ptr, + }, + (None, _) => AttrExpansionFound::None, + } + } + + pub fn expand_attribute( + &self, + input: ProcMacroId, + last: bool, + args: TokenStream, + token_stream: TokenStream, + stable_ptr: SyntaxStablePtrId, + ) -> PluginResult { + let original = token_stream.to_string(); + let result = self.instance(input.package_id).generate_code( + input.expansion.name.clone(), + args, + token_stream, + ); + + // Handle token stream. + if result.token_stream.is_empty() { + // Remove original code + return PluginResult { + diagnostics: into_cairo_diagnostics(result.diagnostics, stable_ptr), + code: None, + remove_original_item: true, + }; + } + + // Full path markers require code modification. + self.register_full_path_markers(input.package_id, result.full_path_markers.clone()); + + // This is a minor optimization. + // If the expanded macro attribute is the only one that will be expanded by `ProcMacroHost` + // in this `generate_code` call (i.e. all the other macro attributes has been expanded by + // previous calls), and the expansion did not produce any changes, we can skip rewriting the + // expanded node by simply returning no generated code, and leaving the original item as is. + // However, if we have other macro attributes to expand, we must rewrite the node even if no + // changes have been produced, so that we can parse the attributes once again and expand them. + // In essence, `code: None, remove_original_item: false` means `ProcMacroHost` will not be + // called again for this AST item. + // This optimization limits the number of generated nodes a bit. + if last && result.aux_data.is_none() && original == result.token_stream.to_string() { + return PluginResult { + code: None, + remove_original_item: false, + diagnostics: into_cairo_diagnostics(result.diagnostics, stable_ptr), + }; + } + + let file_name = format!("proc_{}", input.expansion.name); + let content = result.token_stream.to_string(); + PluginResult { + code: Some(PluginGeneratedFile { + name: file_name.into(), + code_mappings: Vec::new(), + content, + diagnostics_note: Some(format!( + "this error originates in the attribute macro: `{}`", + input.expansion.name + )), + aux_data: result.aux_data.map(|new_aux_data| { + DynGeneratedFileAuxData::new(EmittedAuxData::new(ProcMacroAuxData::new( + new_aux_data.into(), + input, + ))) + }), + }), + diagnostics: into_cairo_diagnostics(result.diagnostics, stable_ptr), + remove_original_item: true, + } + } +} + +pub enum AttrExpansionFound { + Some { + expansion: ProcMacroId, + args: TokenStream, + stable_ptr: SyntaxStablePtrId, + }, + None, + Last { + expansion: ProcMacroId, + args: TokenStream, + stable_ptr: SyntaxStablePtrId, + }, +} + +impl AttrExpansionFound { + pub fn as_name(&self) -> Option { + match self { + AttrExpansionFound::Some { expansion, .. } + | AttrExpansionFound::Last { expansion, .. } => Some(expansion.expansion.name.clone()), + AttrExpansionFound::None => None, + } + } +} + +pub enum InnerAttrExpansionResult { + None, + Some(PluginResult), +} + +pub struct InnerAttrExpansionContext<'a> { + host: &'a ProcMacroHostPlugin, + // Metadata returned for expansions. + diagnostics: Vec, + aux_data: EmittedAuxData, + any_changed: bool, +} + +impl<'a> InnerAttrExpansionContext<'a> { + pub fn new<'b: 'a>(host: &'b ProcMacroHostPlugin) -> Self { + Self { + diagnostics: Vec::new(), + aux_data: EmittedAuxData::default(), + any_changed: false, + host, + } + } + + pub fn register_result( + &mut self, + original: String, + input: ProcMacroId, + result: ProcMacroResult, + stable_ptr: SyntaxStablePtrId, + ) -> String { + let result_str = result.token_stream.to_string(); + let changed = result_str != original; + + if changed { + self.host + .register_full_path_markers(input.package_id, result.full_path_markers.clone()); + } + + self.diagnostics + .extend(into_cairo_diagnostics(result.diagnostics, stable_ptr)); + + if let Some(new_aux_data) = result.aux_data { + self.aux_data + .push(ProcMacroAuxData::new(new_aux_data.into(), input)); + } + + self.any_changed = self.any_changed || changed; + + result_str + } + + pub fn into_result( + self, + expanded: String, + code_mappings: Vec, + attr_names: Vec, + ) -> PluginResult { + let msg = if attr_names.len() == 1 { + "the attribute macro" + } else { + "one of the attribute macros" + }; + let derive_names = attr_names.iter().map(ToString::to_string).join("`, `"); + let note = format!("this error originates in {msg}: `{derive_names}`"); + PluginResult { + code: Some(PluginGeneratedFile { + name: "proc_attr_inner".into(), + content: expanded, + aux_data: if self.aux_data.is_empty() { + None + } else { + Some(DynGeneratedFileAuxData::new(self.aux_data)) + }, + code_mappings, + diagnostics_note: Some(note), + }), + diagnostics: self.diagnostics, + remove_original_item: true, + } + } +} diff --git a/scarb/src/compiler/plugin/proc_macro/host/aux_data.rs b/scarb/src/compiler/plugin/proc_macro/host/aux_data.rs new file mode 100644 index 000000000..a1fa0d54a --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/host/aux_data.rs @@ -0,0 +1,90 @@ +use crate::compiler::plugin::proc_macro::{ProcMacroHostPlugin, ProcMacroId}; +use crate::core::PackageId; +use cairo_lang_defs::plugin::GeneratedFileAuxData; +use cairo_lang_macro::AuxData; +use cairo_lang_semantic::db::SemanticGroup; +use itertools::Itertools; +use std::any::Any; +use std::collections::HashMap; +use std::vec::IntoIter; + +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct ProcMacroAuxData { + value: Vec, + macro_id: ProcMacroId, +} + +impl ProcMacroAuxData { + pub fn new(value: Vec, macro_id: ProcMacroId) -> Self { + Self { value, macro_id } + } +} + +impl From for AuxData { + fn from(data: ProcMacroAuxData) -> Self { + Self::new(data.value) + } +} + +#[derive(Debug, Clone, Default)] +pub struct EmittedAuxData(Vec); + +impl GeneratedFileAuxData for EmittedAuxData { + fn as_any(&self) -> &dyn Any { + self + } + + fn eq(&self, other: &dyn GeneratedFileAuxData) -> bool { + self.0 == other.as_any().downcast_ref::().unwrap().0 + } +} + +impl EmittedAuxData { + pub fn new(aux_data: ProcMacroAuxData) -> Self { + Self(vec![aux_data]) + } + + pub fn push(&mut self, aux_data: ProcMacroAuxData) { + self.0.push(aux_data); + } + + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } +} + +impl IntoIterator for EmittedAuxData { + type Item = ProcMacroAuxData; + type IntoIter = IntoIter; + + fn into_iter(self) -> IntoIter { + self.0.into_iter() + } +} + +impl ProcMacroHostPlugin { + pub(crate) fn collect_aux_data( + &self, + db: &dyn SemanticGroup, + ) -> HashMap> { + let mut data = Vec::new(); + for crate_id in db.crates() { + let crate_modules = db.crate_modules(crate_id); + for module in crate_modules.iter() { + let file_infos = db.module_generated_file_aux_data(*module); + if let Ok(file_infos) = file_infos { + for file_info in file_infos.iter() { + let aux_data = file_info + .as_ref() + .and_then(|ad| ad.as_any().downcast_ref::()); + if let Some(aux_data) = aux_data { + data.extend(aux_data.clone().into_iter()); + } + } + } + } + } + data.into_iter() + .into_group_map_by(|d| d.macro_id.package_id) + } +} diff --git a/scarb/src/compiler/plugin/proc_macro/host/derive.rs b/scarb/src/compiler/plugin/proc_macro/host/derive.rs new file mode 100644 index 000000000..6c771d88c --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/host/derive.rs @@ -0,0 +1,148 @@ +use crate::compiler::plugin::proc_macro::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; +use crate::compiler::plugin::proc_macro::host::{into_cairo_diagnostics, DERIVE_ATTR}; +use crate::compiler::plugin::proc_macro::{ + Expansion, ExpansionKind, ProcMacroHostPlugin, ProcMacroId, TokenStreamBuilder, +}; +use cairo_lang_defs::patcher::PatchBuilder; +use cairo_lang_defs::plugin::{DynGeneratedFileAuxData, PluginGeneratedFile, PluginResult}; +use cairo_lang_macro::{ + AllocationContext, Diagnostic, TokenStream, TokenStreamMetadata, TokenTree, +}; +use cairo_lang_syntax::attribute::structured::{AttributeArgVariant, AttributeStructurize}; +use cairo_lang_syntax::node::ast::{Expr, PathSegment}; +use cairo_lang_syntax::node::db::SyntaxGroup; +use cairo_lang_syntax::node::helpers::QueryAttrs; +use cairo_lang_syntax::node::{ast, Terminal, TypedStablePtr, TypedSyntaxNode}; +use convert_case::{Case, Casing}; +use itertools::Itertools; + +impl ProcMacroHostPlugin { + /// Handle `#[derive(...)]` attribute. + /// + /// Returns a list of expansions that this plugin should apply. + fn parse_derive(&self, db: &dyn SyntaxGroup, item_ast: ast::ModuleItem) -> Vec { + let attrs = match item_ast { + ast::ModuleItem::Struct(struct_ast) => Some(struct_ast.query_attr(db, DERIVE_ATTR)), + ast::ModuleItem::Enum(enum_ast) => Some(enum_ast.query_attr(db, DERIVE_ATTR)), + _ => None, + }; + + attrs + .unwrap_or_default() + .iter() + .map(|attr| attr.clone().structurize(db)) + .flat_map(|attr| attr.args.into_iter()) + .filter_map(|attr| { + let AttributeArgVariant::Unnamed(value) = attr.clone().variant else { + return None; + }; + let Expr::Path(path) = value else { + return None; + }; + let path = path.elements(db); + let path = path.last()?; + let PathSegment::Simple(segment) = path else { + return None; + }; + let ident = segment.ident(db); + let value = ident.text(db).to_string(); + + self.find_expansion(&Expansion::new( + value.to_case(Case::Snake), + ExpansionKind::Derive, + )) + }) + .collect_vec() + } + + pub fn expand_derives( + &self, + db: &dyn SyntaxGroup, + item_ast: ast::ModuleItem, + stream_metadata: TokenStreamMetadata, + ) -> Option { + let stable_ptr = item_ast.clone().stable_ptr().untyped(); + let mut token_stream_builder = TokenStreamBuilder::new(db); + token_stream_builder.add_node(item_ast.as_syntax_node()); + token_stream_builder.with_metadata(stream_metadata.clone()); + let mut aux_data = EmittedAuxData::default(); + let mut all_diagnostics: Vec = Vec::new(); + + // All derives to be applied. + let derives = self.parse_derive(db, item_ast.clone()); + let any_derives = !derives.is_empty(); + + let ctx = AllocationContext::default(); + let mut derived_code = PatchBuilder::new(db, &item_ast); + for derive in derives.iter() { + let token_stream = token_stream_builder.build(&ctx); + let result = self.instance(derive.package_id).generate_code( + derive.expansion.name.clone(), + TokenStream::empty(), + token_stream, + ); + + // Register diagnostics. + all_diagnostics.extend(result.diagnostics); + + // Register aux data. + if let Some(new_aux_data) = result.aux_data { + aux_data.push(ProcMacroAuxData::new( + new_aux_data.into(), + ProcMacroId::new(derive.package_id, derive.expansion.clone()), + )); + } + + if result.token_stream.is_empty() { + // No code has been generated. + // We do not need to do anything. + continue; + } + + for token in result.token_stream.tokens { + match token { + TokenTree::Ident(token) => { + derived_code.add_str(token.content.as_ref()); + } + } + } + } + + if any_derives { + let derived_code = derived_code.build().0; + return Some(PluginResult { + code: if derived_code.is_empty() { + None + } else { + let msg = if derives.len() == 1 { + "the derive macro" + } else { + "one of the derive macros" + }; + let derive_names = derives + .iter() + .map(|derive| derive.expansion.name.to_string()) + .join("`, `"); + let note = format!("this error originates in {msg}: `{derive_names}`"); + Some(PluginGeneratedFile { + name: "proc_macro_derive".into(), + code_mappings: Vec::new(), + content: derived_code, + aux_data: if aux_data.is_empty() { + None + } else { + Some(DynGeneratedFileAuxData::new(aux_data)) + }, + diagnostics_note: Some(note), + }) + }, + diagnostics: into_cairo_diagnostics(all_diagnostics, stable_ptr), + // Note that we don't remove the original item here, unlike for attributes. + // We do not add the original code to the generated file either. + remove_original_item: false, + }); + } + + None + } +} diff --git a/scarb/src/compiler/plugin/proc_macro/host/inline.rs b/scarb/src/compiler/plugin/proc_macro/host/inline.rs new file mode 100644 index 000000000..225a1b72d --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/host/inline.rs @@ -0,0 +1,101 @@ +use crate::compiler::plugin::proc_macro::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; +use crate::compiler::plugin::proc_macro::host::into_cairo_diagnostics; +use crate::compiler::plugin::proc_macro::{ + Expansion, ProcMacroId, ProcMacroInstance, TokenStreamBuilder, +}; +use cairo_lang_defs::plugin::{ + DynGeneratedFileAuxData, InlineMacroExprPlugin, InlinePluginResult, MacroPluginMetadata, + PluginGeneratedFile, +}; +use cairo_lang_macro::{AllocationContext, TokenStream}; +use cairo_lang_syntax::node::db::SyntaxGroup; +use cairo_lang_syntax::node::{ast, TypedStablePtr, TypedSyntaxNode}; +use std::sync::{Arc, OnceLock}; + +/// A Cairo compiler inline macro plugin controlling the inline procedural macro execution. +/// +/// This plugin represents a single expansion capable of handling inline procedural macros. +/// The plugin triggers code expansion in a corresponding procedural macro instance. +#[derive(Debug)] +pub struct ProcMacroInlinePlugin { + instance: Arc, + expansion: Expansion, + doc: OnceLock>, +} + +impl ProcMacroInlinePlugin { + pub fn new(instance: Arc, expansion: Expansion) -> Self { + assert!(instance.get_expansions().contains(&expansion)); + Self { + instance, + expansion, + doc: Default::default(), + } + } + + fn instance(&self) -> &ProcMacroInstance { + &self.instance + } +} + +impl InlineMacroExprPlugin for ProcMacroInlinePlugin { + fn generate_code( + &self, + db: &dyn SyntaxGroup, + syntax: &ast::ExprInlineMacro, + _metadata: &MacroPluginMetadata<'_>, + ) -> InlinePluginResult { + let ctx = AllocationContext::default(); + let stable_ptr = syntax.clone().stable_ptr().untyped(); + let arguments = syntax.arguments(db); + let mut token_stream_builder = TokenStreamBuilder::new(db); + token_stream_builder.add_node(arguments.as_syntax_node()); + let token_stream = token_stream_builder.build(&ctx); + let result = self.instance().generate_code( + self.expansion.name.clone(), + TokenStream::empty(), + token_stream, + ); + // Handle diagnostics. + let diagnostics = into_cairo_diagnostics(result.diagnostics, stable_ptr); + let token_stream = result.token_stream.clone(); + if token_stream.is_empty() { + // Remove original code + InlinePluginResult { + code: None, + diagnostics, + } + } else { + // Replace + let aux_data = result.aux_data.map(|aux_data| { + let aux_data = ProcMacroAuxData::new( + aux_data.into(), + ProcMacroId::new(self.instance.package_id(), self.expansion.clone()), + ); + let mut emitted = EmittedAuxData::default(); + emitted.push(aux_data); + DynGeneratedFileAuxData::new(emitted) + }); + let content = token_stream.to_string(); + InlinePluginResult { + code: Some(PluginGeneratedFile { + name: "inline_proc_macro".into(), + code_mappings: Vec::new(), + content, + aux_data, + diagnostics_note: Some(format!( + "this error originates in the inline macro: `{}`", + self.expansion.name + )), + }), + diagnostics, + } + } + } + + fn documentation(&self) -> Option { + self.doc + .get_or_init(|| self.instance().doc(self.expansion.name.clone())) + .clone() + } +} diff --git a/scarb/src/compiler/plugin/proc_macro/host/mod.rs b/scarb/src/compiler/plugin/proc_macro/host/mod.rs new file mode 100644 index 000000000..b6248245e --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/host/mod.rs @@ -0,0 +1,269 @@ +mod attribute; +mod aux_data; +mod derive; +mod inline; +mod post; + +use attribute::*; +pub use aux_data::ProcMacroAuxData; +use inline::*; + +use crate::compiler::plugin::proc_macro::compilation::SharedLibraryProvider; +use crate::compiler::plugin::proc_macro::{Expansion, ExpansionKind, ProcMacroInstance}; +use crate::core::{edition_variant, Config, Package, PackageId}; +use anyhow::{ensure, Context, Result}; +use cairo_lang_defs::plugin::PluginDiagnostic; +use cairo_lang_defs::plugin::{MacroPlugin, MacroPluginMetadata, PluginResult}; +use cairo_lang_filesystem::db::Edition; +use cairo_lang_macro::{AllocationContext, Diagnostic, Severity, TokenStreamMetadata}; +use cairo_lang_semantic::plugin::PluginSuite; +use cairo_lang_syntax::node::db::SyntaxGroup; +use cairo_lang_syntax::node::ids::SyntaxStablePtrId; +use cairo_lang_syntax::node::{ast, TypedStablePtr, TypedSyntaxNode}; +use convert_case::{Case, Casing}; +use itertools::Itertools; +use scarb_stable_hash::short_hash; +use std::collections::HashMap; +use std::fmt::Debug; +use std::sync::{Arc, RwLock}; + +const FULL_PATH_MARKER_KEY: &str = "macro::full_path_marker"; +const DERIVE_ATTR: &str = "derive"; + +/// A Cairo compiler plugin controlling the procedural macro execution. +/// +/// This plugin decides which macro plugins (if any) should be applied to the processed AST item. +/// It then redirects the item to the appropriate macro plugin for code expansion. +#[derive(Debug)] +pub struct ProcMacroHostPlugin { + macros: Vec>, + full_path_markers: RwLock>>, +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct ProcMacroId { + pub package_id: PackageId, + pub expansion: Expansion, +} + +impl ProcMacroId { + pub fn new(package_id: PackageId, expansion: Expansion) -> Self { + Self { + package_id, + expansion, + } + } +} + +impl ProcMacroHostPlugin { + pub fn try_new(macros: Vec>) -> Result { + // Validate expansions. + let mut expansions = macros + .iter() + .flat_map(|m| { + m.get_expansions() + .iter() + .map(|e| ProcMacroId::new(m.package_id(), e.clone())) + .collect_vec() + }) + .collect::>(); + expansions.sort_unstable_by_key(|e| e.expansion.name.clone()); + ensure!( + expansions + .windows(2) + .all(|w| w[0].expansion.name != w[1].expansion.name), + "duplicate expansions defined for procedural macros: {duplicates}", + duplicates = expansions + .windows(2) + .filter(|w| w[0].expansion.name == w[1].expansion.name) + .map(|w| format!( + "{} ({} and {})", + w[0].expansion.name.as_str(), + w[0].package_id, + w[1].package_id + )) + .collect::>() + .join(", ") + ); + Ok(Self { + macros, + full_path_markers: RwLock::new(Default::default()), + }) + } + + fn find_expansion(&self, expansion: &Expansion) -> Option { + self.macros + .iter() + .find(|m| m.get_expansions().contains(expansion)) + .map(|m| m.package_id()) + .map(|package_id| ProcMacroId::new(package_id, expansion.clone())) + } + + pub fn build_plugin_suite(macro_host: Arc) -> PluginSuite { + let mut suite = PluginSuite::default(); + // Register inline macro plugins. + for proc_macro in ¯o_host.macros { + let expansions = proc_macro + .get_expansions() + .iter() + .filter(|exp| matches!(exp.kind, ExpansionKind::Inline)); + for expansion in expansions { + let plugin = Arc::new(ProcMacroInlinePlugin::new( + proc_macro.clone(), + expansion.clone(), + )); + suite.add_inline_macro_plugin_ex(expansion.name.as_str(), plugin); + } + } + // Register procedural macro host plugin. + suite.add_plugin_ex(macro_host); + suite + } + + pub fn instance(&self, package_id: PackageId) -> &ProcMacroInstance { + self.macros + .iter() + .find(|m| m.package_id() == package_id) + .expect("procedural macro must be registered in proc macro host") + } + + fn calculate_metadata( + db: &dyn SyntaxGroup, + item_ast: ast::ModuleItem, + edition: Edition, + ) -> TokenStreamMetadata { + let stable_ptr = item_ast.clone().stable_ptr().untyped(); + let file_path = stable_ptr.file_id(db).full_path(db.upcast()); + let file_id = short_hash(file_path.clone()); + let edition = edition_variant(edition); + TokenStreamMetadata::new(file_path, file_id, edition) + } +} + +impl MacroPlugin for ProcMacroHostPlugin { + fn generate_code( + &self, + db: &dyn SyntaxGroup, + item_ast: ast::ModuleItem, + metadata: &MacroPluginMetadata<'_>, + ) -> PluginResult { + let stream_metadata = Self::calculate_metadata(db, item_ast.clone(), metadata.edition); + + // Handle inner functions. + if let InnerAttrExpansionResult::Some(result) = self.expand_inner_attr(db, item_ast.clone()) + { + return result; + } + + // Expand first attribute. + // Note that we only expand the first attribute, as we assume that the rest of the attributes + // will be handled by a subsequent call to this function. + let ctx = AllocationContext::default(); + let (input, body) = self.parse_attribute(db, item_ast.clone(), &ctx); + + if let Some(result) = match input { + AttrExpansionFound::Last { + expansion, + args, + stable_ptr, + } => Some((expansion, args, stable_ptr, true)), + AttrExpansionFound::Some { + expansion, + args, + stable_ptr, + } => Some((expansion, args, stable_ptr, false)), + AttrExpansionFound::None => None, + } + .map(|(expansion, args, stable_ptr, last)| { + let token_stream = body.with_metadata(stream_metadata.clone()); + self.expand_attribute(expansion, last, args, token_stream, stable_ptr) + }) { + return result; + } + + // Expand all derives. + // Note that all proc macro attributes should be already expanded at this point. + if let Some(result) = self.expand_derives(db, item_ast.clone(), stream_metadata.clone()) { + return result; + } + + // No expansions can be applied. + PluginResult { + code: None, + diagnostics: Vec::new(), + remove_original_item: false, + } + } + + fn declared_attributes(&self) -> Vec { + self.macros + .iter() + .flat_map(|m| m.declared_attributes_and_executables()) + .chain(vec![FULL_PATH_MARKER_KEY.to_string()]) + .collect() + } + + fn declared_derives(&self) -> Vec { + self.macros + .iter() + .flat_map(|m| m.declared_derives()) + .map(|s| s.to_case(Case::UpperCamel)) + .collect() + } + + fn executable_attributes(&self) -> Vec { + self.macros + .iter() + .flat_map(|m| m.executable_attributes()) + .collect() + } +} + +fn into_cairo_diagnostics( + diagnostics: Vec, + stable_ptr: SyntaxStablePtrId, +) -> Vec { + diagnostics + .into_iter() + .map(|diag| PluginDiagnostic { + stable_ptr, + message: diag.message, + severity: match diag.severity { + Severity::Error => cairo_lang_diagnostics::Severity::Error, + Severity::Warning => cairo_lang_diagnostics::Severity::Warning, + }, + }) + .collect_vec() +} + +/// A Scarb wrapper around the `ProcMacroHost` compiler plugin. +/// +/// This struct represent the compiler plugin in terms of Scarb data model. +/// It also builds a plugin suite that enables the compiler plugin. +#[derive(Default)] +pub struct ProcMacroHost { + macros: Vec>, +} + +impl ProcMacroHost { + pub fn register_instance(&mut self, instance: Arc) { + self.macros.push(instance); + } + + pub fn register_new(&mut self, package: Package, config: &Config) -> Result<()> { + let lib_path = package + .shared_lib_path(config) + .context("could not resolve shared library path")?; + let instance = ProcMacroInstance::try_new(package.id, lib_path)?; + self.register_instance(Arc::new(instance)); + Ok(()) + } + + pub fn into_plugin(self) -> Result { + ProcMacroHostPlugin::try_new(self.macros) + } + + pub fn macros(&self) -> &[Arc] { + &self.macros + } +} diff --git a/scarb/src/compiler/plugin/proc_macro/host/post.rs b/scarb/src/compiler/plugin/proc_macro/host/post.rs new file mode 100644 index 000000000..078bfef0e --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/host/post.rs @@ -0,0 +1,113 @@ +use crate::compiler::plugin::proc_macro::host::FULL_PATH_MARKER_KEY; +use crate::compiler::plugin::proc_macro::ProcMacroHostPlugin; +use crate::core::PackageId; +use anyhow::Result; +use cairo_lang_defs::ids::{ModuleItemId, TopLevelLanguageElementId}; +use cairo_lang_diagnostics::ToOption; +use cairo_lang_macro::FullPathMarker; +use cairo_lang_semantic::db::SemanticGroup; +use cairo_lang_semantic::items::attribute::SemanticQueryAttrs; +use cairo_lang_syntax::attribute::structured::{Attribute, AttributeArgVariant}; +use cairo_lang_syntax::node::ast::Expr; +use itertools::Itertools; +use std::collections::HashMap; +use tracing::{debug, trace_span}; + +impl ProcMacroHostPlugin { + #[tracing::instrument(level = "trace", skip_all)] + pub fn post_process(&self, db: &dyn SemanticGroup) -> Result<()> { + let markers = self.collect_full_path_markers(db); + + let aux_data = self.collect_aux_data(db); + for instance in self.macros.iter() { + let _ = trace_span!( + "post_process_callback", + instance = %instance.package_id() + ) + .entered(); + let instance_markers = self + .full_path_markers + .read() + .unwrap() + .get(&instance.package_id()) + .cloned() + .unwrap_or_default(); + let markers_for_instance = markers + .iter() + .filter(|(key, _)| instance_markers.contains(key)) + .map(|(key, full_path)| FullPathMarker { + key: key.clone(), + full_path: full_path.clone(), + }) + .collect_vec(); + let data = aux_data + .get(&instance.package_id()) + .cloned() + .unwrap_or_default(); + debug!("calling post processing callback with: {data:?}"); + instance.post_process_callback(data.clone(), markers_for_instance); + } + Ok(()) + } + + fn collect_full_path_markers(&self, db: &dyn SemanticGroup) -> HashMap { + let mut markers: HashMap = HashMap::new(); + // FULL_PATH_MARKER_KEY + for crate_id in db.crates() { + let modules = db.crate_modules(crate_id); + for module_id in modules.iter() { + let Ok(module_items) = db.module_items(*module_id) else { + continue; + }; + for item_id in module_items.iter() { + let attr = match item_id { + ModuleItemId::Struct(id) => { + id.query_attr(db, FULL_PATH_MARKER_KEY).to_option() + } + ModuleItemId::Enum(id) => { + id.query_attr(db, FULL_PATH_MARKER_KEY).to_option() + } + ModuleItemId::FreeFunction(id) => { + id.query_attr(db, FULL_PATH_MARKER_KEY).to_option() + } + _ => None, + }; + + let keys = attr + .unwrap_or_default() + .into_iter() + .filter_map(|attr| Self::extract_key(db, attr)) + .collect_vec(); + let full_path = item_id.full_path(db.upcast()); + for key in keys { + markers.insert(key, full_path.clone()); + } + } + } + } + markers + } + + fn extract_key(db: &dyn SemanticGroup, attr: Attribute) -> Option { + if attr.id != FULL_PATH_MARKER_KEY { + return None; + } + + for arg in attr.args.clone() { + if let AttributeArgVariant::Unnamed(Expr::String(s)) = arg.variant { + return s.string_value(db.upcast()); + } + } + + None + } + + pub(crate) fn register_full_path_markers(&self, package_id: PackageId, markers: Vec) { + self.full_path_markers + .write() + .unwrap() + .entry(package_id) + .and_modify(|markers| markers.extend(markers.clone())) + .or_insert(markers); + } +} diff --git a/scarb/tests/proc_macro_prebuilt.rs b/scarb/tests/proc_macro_prebuilt.rs index d58de21b3..d7b142a95 100644 --- a/scarb/tests/proc_macro_prebuilt.rs +++ b/scarb/tests/proc_macro_prebuilt.rs @@ -1,7 +1,7 @@ use assert_fs::fixture::{ChildPath, FileWriteStr, PathCreateDir}; use assert_fs::prelude::PathChild; use assert_fs::TempDir; -use cairo_lang_macro::TokenStream; +use cairo_lang_macro::{TextSpan, Token, TokenStream, TokenTree}; use indoc::indoc; use libloading::library_filename; use scarb_proc_macro_server_types::methods::expand::{ExpandInline, ExpandInlineMacroParams}; @@ -220,9 +220,18 @@ fn load_prebuilt_proc_macros() { let response = proc_macro_server .request_and_wait::(ExpandInlineMacroParams { name: "some".to_string(), - args: TokenStream::new("42".to_string()), + args: TokenStream::new(vec![TokenTree::Ident(Token::new( + "42", + TextSpan::default(), + ))]), }) .unwrap(); assert_eq!(response.diagnostics, vec![]); - assert_eq!(response.token_stream, TokenStream::new("42".to_string())); + assert_eq!( + response.token_stream, + TokenStream::new(vec![TokenTree::Ident(Token::new( + "42", + TextSpan::default(), + ))]) + ); } From b653765250da0f2cbfad8ac24b40cb74d6f6015d Mon Sep 17 00:00:00 2001 From: maciektr Date: Mon, 25 Nov 2024 17:41:32 +0100 Subject: [PATCH 07/13] Skip whitespace prefix when creating the span (#1761) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit commit-id:f0ce5a05 --- **Stack**: - #1749 - #1748 - #1745 - #1761 ⬅ ⚠️ *Part of a stack created by [spr](https://github.com/ejoffe/spr). Do not merge manually using the UI - doing so may have unexpected results.* --- scarb/src/compiler/plugin/proc_macro/types.rs | 50 ++++++++++++++++++- 1 file changed, 48 insertions(+), 2 deletions(-) diff --git a/scarb/src/compiler/plugin/proc_macro/types.rs b/scarb/src/compiler/plugin/proc_macro/types.rs index 30ad3e2db..412d81cf8 100644 --- a/scarb/src/compiler/plugin/proc_macro/types.rs +++ b/scarb/src/compiler/plugin/proc_macro/types.rs @@ -2,6 +2,7 @@ use cairo_lang_macro::{ AllocationContext, TextSpan, Token, TokenStream, TokenStreamMetadata, TokenTree, }; use cairo_lang_syntax::node::{db::SyntaxGroup, SyntaxNode}; +use std::ops::Add; /// Helps creating TokenStream based on multiple SyntaxNodes, /// which aren't descendants or ascendants of each other inside the SyntaxTree. @@ -46,10 +47,55 @@ impl<'a> TokenStreamBuilder<'a> { pub fn token_from_syntax_node(&self, node: SyntaxNode, ctx: &AllocationContext) -> Token { let span = node.span(self.db).to_str_range(); + let text = node.get_text(self.db); let span = TextSpan { - start: span.start, + // We skip the whitespace prefix, so that diagnostics start where the actual token contents is. + start: span.start.add(whitespace_prefix_len(&text)), end: span.end, }; - Token::new_in(node.get_text(self.db), span, ctx) + Token::new_in(text, span, ctx) + } +} + +fn whitespace_prefix_len(s: &str) -> usize { + s.chars().take_while(|c| c.is_whitespace()).count() +} + +#[cfg(test)] +mod tests { + use crate::compiler::plugin::proc_macro::TokenStreamBuilder; + use cairo_lang_macro::{AllocationContext, TextSpan, TokenStream, TokenTree}; + use cairo_lang_parser::utils::SimpleParserDatabase; + use indoc::indoc; + + #[test] + fn whitespace_skipped() { + let db = SimpleParserDatabase::default(); + let mut builder = TokenStreamBuilder::new(&db); + let content = indoc! {r#" + fn main() { + let x = 42; + } + "#}; + let parsed = db.parse_virtual(content).unwrap(); + builder.add_node(parsed); + let ctx = AllocationContext::default(); + let token_stream = builder.build(&ctx); + let token_at = |token_stream: &TokenStream, idx: usize| { + let token: TokenTree = token_stream.tokens[idx].clone(); + match token { + TokenTree::Ident(token) => token, + } + }; + let token = token_at(&token_stream, 4); + assert_eq!(token.content.as_ref(), "{\n"); + assert_eq!(token.span, TextSpan { start: 10, end: 12 }); + let token = token_at(&token_stream, 5); + assert_eq!(token.content.as_ref(), " let "); + // Note we skip 4 whitespaces characters in the span. + assert_eq!(token.span, TextSpan { start: 16, end: 20 }); + let token = token_at(&token_stream, 6); + assert_eq!(token.content.as_ref(), "x "); + assert_eq!(token.span, TextSpan { start: 20, end: 22 }); } } From 5fa5fa1e1b3f4a456bee0283ed15c93ba50f8e61 Mon Sep 17 00:00:00 2001 From: maciektr Date: Tue, 26 Nov 2024 09:37:04 +0100 Subject: [PATCH 08/13] Remove confusing default from token stream types (#1745) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit commit-id:e6e81399 --- **Stack**: - #1749 - #1748 - #1745 ⬅ ⚠️ *Part of a stack created by [spr](https://github.com/ejoffe/spr). Do not merge manually using the UI - doing so may have unexpected results.* --- plugins/cairo-lang-macro/src/types/token.rs | 23 ++++--------------- scarb/tests/proc_macro_server.rs | 6 ++--- .../src/methods/mod.rs | 11 ++++++++- 3 files changed, 17 insertions(+), 23 deletions(-) diff --git a/plugins/cairo-lang-macro/src/types/token.rs b/plugins/cairo-lang-macro/src/types/token.rs index 6d1bf5adc..24a4fe7ec 100644 --- a/plugins/cairo-lang-macro/src/types/token.rs +++ b/plugins/cairo-lang-macro/src/types/token.rs @@ -10,7 +10,7 @@ use std::rc::Rc; /// This is both input and part of an output of a procedural macro. #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "serde", serde(try_from = "deserializer::TokenStream"))] -#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TokenStream { pub tokens: Vec, pub metadata: TokenStreamMetadata, @@ -85,12 +85,6 @@ pub enum TokenTree { Ident(Token), } -impl Default for TokenTree { - fn default() -> Self { - Self::Ident(Default::default()) - } -} - impl TokenTree { /// Get the size hint for the [`TokenTree`]. /// This can be used to estimate size of a buffer needed for allocating this [`TokenTree`]. @@ -103,7 +97,7 @@ impl TokenTree { /// A range of text offsets that form a span (like text selection). #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TextSpan { pub start: usize, pub end: usize, @@ -113,7 +107,7 @@ pub struct TextSpan { /// /// The most atomic item of Cairo code representation. #[cfg_attr(feature = "serde", derive(serde::Serialize))] -#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Token { pub content: InternedStr, pub span: TextSpan, @@ -158,15 +152,6 @@ impl InternedStr { } } -impl Default for InternedStr { - fn default() -> Self { - Self { - ptr: "" as *const str, - _bump: Rc::default(), - } - } -} - impl AsRef for InternedStr { fn as_ref(&self) -> &str { self.deref() @@ -203,7 +188,7 @@ impl Hash for InternedStr { } /// This wrapper de-allocates the underlying buffer on drop. -#[derive(Debug, Default)] +#[derive(Debug)] pub(crate) struct BumpWrap(pub Bump); impl Drop for BumpWrap { diff --git a/scarb/tests/proc_macro_server.rs b/scarb/tests/proc_macro_server.rs index 8950a5b12..455df2c8b 100644 --- a/scarb/tests/proc_macro_server.rs +++ b/scarb/tests/proc_macro_server.rs @@ -91,7 +91,7 @@ fn expand_attribute() { args: TokenStream::empty(), item: TokenStream::new(vec![TokenTree::Ident(Token::new( "fn some_test_fn(){}", - TextSpan::default(), + TextSpan::new(0, 0), ))]), }) .unwrap(); @@ -125,7 +125,7 @@ fn expand_derive() { let item = TokenStream::new(vec![TokenTree::Ident(Token::new( "fn some_test_fn(){}", - TextSpan::default(), + TextSpan::new(0, 0), ))]); let response = proc_macro_server @@ -182,7 +182,7 @@ fn expand_inline() { name: "replace_all_15_with_25".to_string(), args: TokenStream::new(vec![TokenTree::Ident(Token::new( "struct A { field: 15 , other_field: macro_call!(12)}", - TextSpan::default(), + TextSpan::new(0, 0), ))]), }) .unwrap(); diff --git a/utils/scarb-proc-macro-server-types/src/methods/mod.rs b/utils/scarb-proc-macro-server-types/src/methods/mod.rs index de8377f4a..102d355f7 100644 --- a/utils/scarb-proc-macro-server-types/src/methods/mod.rs +++ b/utils/scarb-proc-macro-server-types/src/methods/mod.rs @@ -15,10 +15,19 @@ pub trait Method { /// /// This struct encapsulates both the resulting token stream from macro expansion /// and any diagnostic messages (e.g., errors or warnings) that were generated during processing. -#[derive(Debug, Clone, Default, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] pub struct ProcMacroResult { /// The resultant token stream produced after the macro expansion. pub token_stream: TokenStream, /// A list of diagnostics produced during the macro execution. pub diagnostics: Vec, } + +impl Default for ProcMacroResult { + fn default() -> Self { + Self { + token_stream: TokenStream::empty(), + diagnostics: Vec::new(), + } + } +} From febd350aacdfcc0a645e05c14d6282a3c80a5031 Mon Sep 17 00:00:00 2001 From: maciektr Date: Wed, 4 Dec 2024 11:50:13 +0100 Subject: [PATCH 09/13] Make TextSpan u32 to match the compiler one (#1748) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit commit-id:107b5e17 --- **Stack**: - #1749 - #1748 ⬅ ⚠️ *Part of a stack created by [spr](https://github.com/ejoffe/spr). Do not merge manually using the UI - doing so may have unexpected results.* --- plugins/cairo-lang-macro-stable/src/lib.rs | 4 +-- plugins/cairo-lang-macro/src/types/mod.rs | 2 +- plugins/cairo-lang-macro/src/types/token.rs | 8 ++++-- scarb/src/compiler/plugin/proc_macro/types.rs | 11 ++++---- scarb/tests/build_cairo_plugin.rs | 28 +++++++++---------- scarb/tests/proc_macro_server.rs | 4 +-- .../src/proc_macro_server.rs | 2 +- 7 files changed, 30 insertions(+), 29 deletions(-) diff --git a/plugins/cairo-lang-macro-stable/src/lib.rs b/plugins/cairo-lang-macro-stable/src/lib.rs index c836ed850..f9458a3a5 100644 --- a/plugins/cairo-lang-macro-stable/src/lib.rs +++ b/plugins/cairo-lang-macro-stable/src/lib.rs @@ -16,8 +16,8 @@ pub struct StableToken { #[repr(C)] #[derive(Debug)] pub struct StableTextSpan { - pub start: usize, - pub end: usize, + pub start: u32, + pub end: u32, } #[repr(C)] diff --git a/plugins/cairo-lang-macro/src/types/mod.rs b/plugins/cairo-lang-macro/src/types/mod.rs index 45ab5fca2..a679ed3f9 100644 --- a/plugins/cairo-lang-macro/src/types/mod.rs +++ b/plugins/cairo-lang-macro/src/types/mod.rs @@ -45,7 +45,7 @@ pub struct ProcMacroResult { /// TokenTree::Ident( /// Token::new( /// &code, -/// TextSpan::new(0, code.len()) +/// TextSpan::new(0, code.len() as u32) /// ) /// ) /// ]); diff --git a/plugins/cairo-lang-macro/src/types/token.rs b/plugins/cairo-lang-macro/src/types/token.rs index 24a4fe7ec..f6cfc2cea 100644 --- a/plugins/cairo-lang-macro/src/types/token.rs +++ b/plugins/cairo-lang-macro/src/types/token.rs @@ -95,12 +95,14 @@ impl TokenTree { } } +pub type TextOffset = u32; + /// A range of text offsets that form a span (like text selection). #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TextSpan { - pub start: usize, - pub end: usize, + pub start: TextOffset, + pub end: TextOffset, } /// A single Cairo token. @@ -311,7 +313,7 @@ impl TokenTree { impl TextSpan { /// Create a new [`TextSpan`]. - pub fn new(start: usize, end: usize) -> TextSpan { + pub fn new(start: TextOffset, end: TextOffset) -> TextSpan { TextSpan { start, end } } } diff --git a/scarb/src/compiler/plugin/proc_macro/types.rs b/scarb/src/compiler/plugin/proc_macro/types.rs index 412d81cf8..ca4090484 100644 --- a/scarb/src/compiler/plugin/proc_macro/types.rs +++ b/scarb/src/compiler/plugin/proc_macro/types.rs @@ -2,7 +2,6 @@ use cairo_lang_macro::{ AllocationContext, TextSpan, Token, TokenStream, TokenStreamMetadata, TokenTree, }; use cairo_lang_syntax::node::{db::SyntaxGroup, SyntaxNode}; -use std::ops::Add; /// Helps creating TokenStream based on multiple SyntaxNodes, /// which aren't descendants or ascendants of each other inside the SyntaxTree. @@ -46,19 +45,19 @@ impl<'a> TokenStreamBuilder<'a> { } pub fn token_from_syntax_node(&self, node: SyntaxNode, ctx: &AllocationContext) -> Token { - let span = node.span(self.db).to_str_range(); + let span = node.span(self.db); let text = node.get_text(self.db); let span = TextSpan { // We skip the whitespace prefix, so that diagnostics start where the actual token contents is. - start: span.start.add(whitespace_prefix_len(&text)), - end: span.end, + start: span.start.as_u32() + whitespace_prefix_len(&text), + end: span.end.as_u32(), }; Token::new_in(text, span, ctx) } } -fn whitespace_prefix_len(s: &str) -> usize { - s.chars().take_while(|c| c.is_whitespace()).count() +fn whitespace_prefix_len(s: &str) -> u32 { + s.chars().take_while(|c| c.is_whitespace()).count() as u32 } #[cfg(test)] diff --git a/scarb/tests/build_cairo_plugin.rs b/scarb/tests/build_cairo_plugin.rs index f2b010ad9..2974621c7 100644 --- a/scarb/tests/build_cairo_plugin.rs +++ b/scarb/tests/build_cairo_plugin.rs @@ -412,7 +412,7 @@ fn can_replace_original_node() { let new_token_string = token_stream.to_string().replace("12", "34"); let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( new_token_string.clone(), - TextSpan { start: 0, end: new_token_string.len() }, + TextSpan { start: 0, end: new_token_string.len() as u32 }, ))]); ProcMacroResult::new(token_stream) } @@ -581,7 +581,7 @@ fn can_define_multiple_macros() { let new_token_string = token_stream.to_string().replace("12", "34"); let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( new_token_string.clone(), - TextSpan { start: 0, end: new_token_string.len() }, + TextSpan { start: 0, end: new_token_string.len() as u32 }, ))]); let aux_data = AuxData::new(Vec::new()); ProcMacroResult::new(token_stream).with_aux_data(aux_data) @@ -592,7 +592,7 @@ fn can_define_multiple_macros() { let new_token_string = token_stream.to_string().replace("56", "78"); let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( new_token_string.clone(), - TextSpan { start: 0, end: new_token_string.len() }, + TextSpan { start: 0, end: new_token_string.len() as u32 }, ))]); let aux_data = AuxData::new(Vec::new()); ProcMacroResult::new(token_stream).with_aux_data(aux_data) @@ -616,7 +616,7 @@ fn can_define_multiple_macros() { let new_token_string = token_stream.to_string().replace("90", "09"); let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( new_token_string.clone(), - TextSpan { start: 0, end: new_token_string.len() }, + TextSpan { start: 0, end: new_token_string.len() as u32 }, ))]); let aux_data = AuxData::new(Vec::new()); ProcMacroResult::new(token_stream).with_aux_data(aux_data) @@ -819,7 +819,7 @@ fn can_resolve_full_path_markers() { code.clone(), TextSpan { start: 0, - end: code.len(), + end: code.len() as u32, }, ))]) ).with_full_path_markers(full_path_markers) @@ -990,7 +990,7 @@ fn can_implement_derive_macro() { code.clone(), TextSpan { start: 0, - end: code.len(), + end: code.len() as u32, }, ))]); @@ -1051,7 +1051,7 @@ fn can_use_both_derive_and_attr() { new_token_string.clone(), TextSpan { start: 0, - end: new_token_string.len(), + end: new_token_string.len() as u32, }, ))])) } @@ -1063,7 +1063,7 @@ fn can_use_both_derive_and_attr() { code.clone(), TextSpan { start: 0, - end: code.len(), + end: code.len() as u32, }, ))]); @@ -1072,7 +1072,7 @@ fn can_use_both_derive_and_attr() { result_string.clone(), TextSpan { start: 0, - end: result_string.len(), + end: result_string.len() as u32, }, ))])) } @@ -1091,7 +1091,7 @@ fn can_use_both_derive_and_attr() { code.clone(), TextSpan { start: 0, - end: code.len(), + end: code.len() as u32, }, ))])) } @@ -1298,7 +1298,7 @@ fn can_be_expanded() { let new_token_string = token_stream.to_string().replace("12", "34"); let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( new_token_string.clone(), - TextSpan { start: 0, end: new_token_string.len() }, + TextSpan { start: 0, end: new_token_string.len() as u32 }, ))]); ProcMacroResult::new(token_stream) } @@ -1328,7 +1328,7 @@ fn can_be_expanded() { let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( code.clone(), - TextSpan { start: 0, end: code.len() }, + TextSpan { start: 0, end: code.len() as u32 }, ))]); ProcMacroResult::new(token_stream) @@ -1414,7 +1414,7 @@ fn can_expand_trait_inner_func_attrr() { .replace("12", "34"); ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new( new_token_string.clone(), - TextSpan { start: 0, end: new_token_string.len() }, + TextSpan { start: 0, end: new_token_string.len() as u32 }, ))])) } "##}) @@ -1474,7 +1474,7 @@ fn can_expand_impl_inner_func_attrr() { let new_token_string = token_stream.to_string().replace("1", "2"); ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new( new_token_string.clone(), - TextSpan { start: 0, end: new_token_string.len() }, + TextSpan { start: 0, end: new_token_string.len() as u32 }, ))])) } "##}) diff --git a/scarb/tests/proc_macro_server.rs b/scarb/tests/proc_macro_server.rs index 455df2c8b..58890a4fa 100644 --- a/scarb/tests/proc_macro_server.rs +++ b/scarb/tests/proc_macro_server.rs @@ -58,7 +58,7 @@ fn expand_attribute() { let output = input.replace(name, "very_new_name"); - let span = TextSpan { start: 0, end: output.len() }; + let span = TextSpan { start: 0, end: output.len() as u32 }; ProcMacroResult::new( TokenStream::new(vec![ TokenTree::Ident( @@ -151,7 +151,7 @@ fn expand_inline() { #[inline_macro] pub fn replace_all_15_with_25(token_stream: TokenStream) -> ProcMacroResult { let content = token_stream.to_string().replace("15", "25"); - let span = TextSpan { start: 0, end: content.len() }; + let span = TextSpan { start: 0, end: content.len() as u32 }; ProcMacroResult::new( TokenStream::new(vec![ TokenTree::Ident( diff --git a/utils/scarb-test-support/src/proc_macro_server.rs b/utils/scarb-test-support/src/proc_macro_server.rs index c2d06bd26..2efd314b0 100644 --- a/utils/scarb-test-support/src/proc_macro_server.rs +++ b/utils/scarb-test-support/src/proc_macro_server.rs @@ -41,7 +41,7 @@ pub fn inline_some(token_stream: TokenStream) -> ProcMacroResult { #[derive_macro] fn some_derive(_token_stream: TokenStream)-> ProcMacroResult { let content = "impl SomeImpl of SomeTrait {}".to_string(); - let span = TextSpan { start: 0, end: content.len() }; + let span = TextSpan { start: 0, end: content.len() as u32 }; ProcMacroResult::new( TokenStream::new(vec![ TokenTree::Ident( From 393ad684d607589e8921cb464c7e1c98b89fdfac Mon Sep 17 00:00:00 2001 From: Maksim Zdobnikau <43750648+DelevoXDG@users.noreply.github.com> Date: Wed, 4 Dec 2024 11:50:48 +0100 Subject: [PATCH 10/13] Implement code mappings (#1756) Closes #1647 --------- Co-authored-by: maciektr --- .../plugin/proc_macro/host/attribute.rs | 5 +- .../compiler/plugin/proc_macro/host/derive.rs | 44 ++-- .../compiler/plugin/proc_macro/host/inline.rs | 5 +- .../compiler/plugin/proc_macro/host/mod.rs | 33 ++- scarb/tests/build_cairo_plugin.rs | 198 +++++++++++++++++- 5 files changed, 264 insertions(+), 21 deletions(-) diff --git a/scarb/src/compiler/plugin/proc_macro/host/attribute.rs b/scarb/src/compiler/plugin/proc_macro/host/attribute.rs index 522947965..e9ecaa122 100644 --- a/scarb/src/compiler/plugin/proc_macro/host/attribute.rs +++ b/scarb/src/compiler/plugin/proc_macro/host/attribute.rs @@ -1,5 +1,5 @@ use crate::compiler::plugin::proc_macro::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; -use crate::compiler::plugin::proc_macro::host::into_cairo_diagnostics; +use crate::compiler::plugin::proc_macro::host::{generate_code_mappings, into_cairo_diagnostics}; use crate::compiler::plugin::proc_macro::{ Expansion, ExpansionKind, ProcMacroHostPlugin, ProcMacroId, TokenStreamBuilder, }; @@ -407,11 +407,12 @@ impl ProcMacroHostPlugin { } let file_name = format!("proc_{}", input.expansion.name); + let code_mappings = generate_code_mappings(&result.token_stream); let content = result.token_stream.to_string(); PluginResult { code: Some(PluginGeneratedFile { name: file_name.into(), - code_mappings: Vec::new(), + code_mappings, content, diagnostics_note: Some(format!( "this error originates in the attribute macro: `{}`", diff --git a/scarb/src/compiler/plugin/proc_macro/host/derive.rs b/scarb/src/compiler/plugin/proc_macro/host/derive.rs index 6c771d88c..3f1f82d61 100644 --- a/scarb/src/compiler/plugin/proc_macro/host/derive.rs +++ b/scarb/src/compiler/plugin/proc_macro/host/derive.rs @@ -1,13 +1,14 @@ use crate::compiler::plugin::proc_macro::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; -use crate::compiler::plugin::proc_macro::host::{into_cairo_diagnostics, DERIVE_ATTR}; +use crate::compiler::plugin::proc_macro::host::{ + generate_code_mappings, into_cairo_diagnostics, DERIVE_ATTR, +}; use crate::compiler::plugin::proc_macro::{ Expansion, ExpansionKind, ProcMacroHostPlugin, ProcMacroId, TokenStreamBuilder, }; -use cairo_lang_defs::patcher::PatchBuilder; use cairo_lang_defs::plugin::{DynGeneratedFileAuxData, PluginGeneratedFile, PluginResult}; -use cairo_lang_macro::{ - AllocationContext, Diagnostic, TokenStream, TokenStreamMetadata, TokenTree, -}; +use cairo_lang_filesystem::ids::CodeMapping; +use cairo_lang_filesystem::span::TextWidth; +use cairo_lang_macro::{AllocationContext, Diagnostic, TokenStream, TokenStreamMetadata}; use cairo_lang_syntax::attribute::structured::{AttributeArgVariant, AttributeStructurize}; use cairo_lang_syntax::node::ast::{Expr, PathSegment}; use cairo_lang_syntax::node::db::SyntaxGroup; @@ -73,7 +74,10 @@ impl ProcMacroHostPlugin { let any_derives = !derives.is_empty(); let ctx = AllocationContext::default(); - let mut derived_code = PatchBuilder::new(db, &item_ast); + let mut derived_code = String::new(); + let mut code_mappings = Vec::new(); + let mut current_width = TextWidth::default(); + for derive in derives.iter() { let token_stream = token_stream_builder.build(&ctx); let result = self.instance(derive.package_id).generate_code( @@ -99,17 +103,15 @@ impl ProcMacroHostPlugin { continue; } - for token in result.token_stream.tokens { - match token { - TokenTree::Ident(token) => { - derived_code.add_str(token.content.as_ref()); - } - } - } + code_mappings.extend(generate_code_mappings_with_offset( + &result.token_stream, + current_width, + )); + current_width = current_width + TextWidth::from_str(&result.token_stream.to_string()); + derived_code.push_str(&result.token_stream.to_string()); } if any_derives { - let derived_code = derived_code.build().0; return Some(PluginResult { code: if derived_code.is_empty() { None @@ -126,7 +128,7 @@ impl ProcMacroHostPlugin { let note = format!("this error originates in {msg}: `{derive_names}`"); Some(PluginGeneratedFile { name: "proc_macro_derive".into(), - code_mappings: Vec::new(), + code_mappings, content: derived_code, aux_data: if aux_data.is_empty() { None @@ -146,3 +148,15 @@ impl ProcMacroHostPlugin { None } } + +fn generate_code_mappings_with_offset( + token_stream: &TokenStream, + offset: TextWidth, +) -> Vec { + let mut mappings = generate_code_mappings(token_stream); + for mapping in &mut mappings { + mapping.span.start = mapping.span.start.add_width(offset); + mapping.span.end = mapping.span.end.add_width(offset); + } + mappings +} diff --git a/scarb/src/compiler/plugin/proc_macro/host/inline.rs b/scarb/src/compiler/plugin/proc_macro/host/inline.rs index 225a1b72d..c8da34d55 100644 --- a/scarb/src/compiler/plugin/proc_macro/host/inline.rs +++ b/scarb/src/compiler/plugin/proc_macro/host/inline.rs @@ -1,5 +1,5 @@ use crate::compiler::plugin::proc_macro::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; -use crate::compiler::plugin::proc_macro::host::into_cairo_diagnostics; +use crate::compiler::plugin::proc_macro::host::{generate_code_mappings, into_cairo_diagnostics}; use crate::compiler::plugin::proc_macro::{ Expansion, ProcMacroId, ProcMacroInstance, TokenStreamBuilder, }; @@ -77,10 +77,11 @@ impl InlineMacroExprPlugin for ProcMacroInlinePlugin { DynGeneratedFileAuxData::new(emitted) }); let content = token_stream.to_string(); + let code_mappings = generate_code_mappings(&token_stream); InlinePluginResult { code: Some(PluginGeneratedFile { name: "inline_proc_macro".into(), - code_mappings: Vec::new(), + code_mappings, content, aux_data, diagnostics_note: Some(format!( diff --git a/scarb/src/compiler/plugin/proc_macro/host/mod.rs b/scarb/src/compiler/plugin/proc_macro/host/mod.rs index b6248245e..bf8e81f6d 100644 --- a/scarb/src/compiler/plugin/proc_macro/host/mod.rs +++ b/scarb/src/compiler/plugin/proc_macro/host/mod.rs @@ -15,7 +15,11 @@ use anyhow::{ensure, Context, Result}; use cairo_lang_defs::plugin::PluginDiagnostic; use cairo_lang_defs::plugin::{MacroPlugin, MacroPluginMetadata, PluginResult}; use cairo_lang_filesystem::db::Edition; -use cairo_lang_macro::{AllocationContext, Diagnostic, Severity, TokenStreamMetadata}; +use cairo_lang_filesystem::ids::{CodeMapping, CodeOrigin}; +use cairo_lang_filesystem::span::{TextOffset, TextSpan, TextWidth}; +use cairo_lang_macro::{ + AllocationContext, Diagnostic, Severity, TokenStream, TokenStreamMetadata, TokenTree, +}; use cairo_lang_semantic::plugin::PluginSuite; use cairo_lang_syntax::node::db::SyntaxGroup; use cairo_lang_syntax::node::ids::SyntaxStablePtrId; @@ -267,3 +271,30 @@ impl ProcMacroHost { &self.macros } } + +fn generate_code_mappings(token_stream: &TokenStream) -> Vec { + token_stream + .tokens + .iter() + .scan(TextOffset::default(), |current_pos, token| { + let TokenTree::Ident(token) = token; + let token_width = TextWidth::from_str(token.content.as_ref()); + + let mapping = CodeMapping { + span: TextSpan { + start: *current_pos, + end: current_pos.add_width(token_width), + }, + origin: CodeOrigin::Span(TextSpan { + start: TextOffset::default() + .add_width(TextWidth::new_for_testing(token.span.start)), + end: TextOffset::default() + .add_width(TextWidth::new_for_testing(token.span.end)), + }), + }; + + *current_pos = current_pos.add_width(token_width); + Some(mapping) + }) + .collect() +} diff --git a/scarb/tests/build_cairo_plugin.rs b/scarb/tests/build_cairo_plugin.rs index 2974621c7..d0530cdbc 100644 --- a/scarb/tests/build_cairo_plugin.rs +++ b/scarb/tests/build_cairo_plugin.rs @@ -984,7 +984,7 @@ fn can_implement_derive_macro() { 32 }} }} - "#}; + "#}; let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( code.clone(), @@ -1573,3 +1573,199 @@ fn can_expand_impl_inner_func_attrr() { "#}); } + +#[test] +fn code_mappings_preserve_attribute_error_locations() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r#" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, TokenTree, Token, TextSpan}; + + #[attribute_macro] + pub fn some(_attr: TokenStream, mut token_stream: TokenStream) -> ProcMacroResult { + let token_stream_length = token_stream.to_string().len() as u32; + token_stream.tokens.push(TokenTree::Ident(Token::new(" ", TextSpan { start: token_stream_length + 1, end: token_stream_length + 5 }))); + ProcMacroResult::new(token_stream) + } + "#}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn f() -> felt252 { + let x = 1; + x = 2; + x + } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .failure() + .stdout_matches(indoc! {r#" + [..] Compiling some v1.0.0 ([..]Scarb.toml) + [..] Compiling hello v1.0.0 ([..]Scarb.toml) + error: Cannot assign to an immutable variable. + --> [..]lib.cairo[proc_some]:3:5 + x = 2; + ^***^ + note: this error originates in the attribute macro: `some` + + error: could not compile `hello` due to previous error + "#}); +} + +#[test] +fn code_mappings_preserve_inline_macro_error_locations() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{inline_macro, ProcMacroResult, TokenStream, TokenTree, Token, TextSpan}; + + #[inline_macro] + pub fn some(_token_stream: TokenStream) -> ProcMacroResult { + let mut tokens = Vec::new(); + tokens.push(TokenTree::Ident(Token::new( + "undefined".to_string(), + TextSpan::new(0, 9), + ))); + + ProcMacroResult::new(TokenStream::new(tokens)) + } + "##}) + .build(&t); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + fn main() -> felt252 { + let _x = some!(); + 12 + } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .failure() + .stdout_matches(indoc! {r#" + [..] Compiling some v1.0.0 ([..]Scarb.toml) + [..] Compiling hello v1.0.0 ([..]Scarb.toml) + error: Identifier not found. + --> [..]lib.cairo:1:1 + fn main() -> felt252 { + ^*******^ + + error: could not compile `hello` due to previous error + "#}); +} + +#[test] +fn code_mappings_preserve_derive_error_locations() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{derive_macro, ProcMacroResult, TokenStream, TokenTree, Token, TextSpan}; + + #[derive_macro] + pub fn custom_derive(token_stream: TokenStream) -> ProcMacroResult { + let name = token_stream + .clone() + .to_string() + .lines() + .find(|l| l.starts_with("struct")) + .unwrap() + .to_string() + .replace("struct", "") + .replace("}", "") + .replace("{", "") + .trim() + .to_string(); + + let code = indoc::formatdoc!{r#" + impl SomeImpl{name} of Hello<{name}> {{ + fn world(self: @{name}) -> u8 {{ + 256 + }} + }} + "#}; + + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + code.clone(), + TextSpan { + start: 0, + end: code.len() as u32, + }, + ))]); + + ProcMacroResult::new(token_stream) + } + "##}) + .add_dep(r#"indoc = "*""#) + .build(&t); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + trait Hello { + fn world(self: @T) -> u8; + } + + #[derive(CustomDerive, Drop)] + struct SomeType {} + + #[derive(CustomDerive, Drop)] + struct AnotherType {} + + fn main() -> u8 { + let a = SomeType {}; + a.world() + } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .failure() + .stdout_matches(indoc! {r#" + [..] Compiling some v1.0.0 ([..]Scarb.toml) + [..] Compiling hello v1.0.0 ([..]Scarb.toml) + error: The value does not fit within the range of type core::integer::u8. + --> [..]lib.cairo:1:1 + trait Hello { + ^**************^ + note: this error originates in the derive macro: `custom_derive` + + error: The value does not fit within the range of type core::integer::u8. + --> [..]lib.cairo:1:1 + trait Hello { + ^**************^ + note: this error originates in the derive macro: `custom_derive` + + error: could not compile `hello` due to previous error + "#}); +} From f4249959949e240b950fc0c926099dad170d7394 Mon Sep 17 00:00:00 2001 From: maciektr Date: Thu, 5 Dec 2024 10:56:10 +0100 Subject: [PATCH 11/13] Pass call site to macro expansion (#1749) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit commit-id:206643a1 --- **Stack**: - #1811 - #1810 - #1749 ⬅ ⚠️ *Part of a stack created by [spr](https://github.com/ejoffe/spr). Do not merge manually using the UI - doing so may have unexpected results.* --- plugins/cairo-lang-macro/Cargo.toml | 2 +- plugins/cairo-lang-macro/src/lib.rs | 7 +- plugins/cairo-lang-macro/src/types/token.rs | 22 ++++- scarb/src/compiler/plugin/proc_macro/ffi.rs | 16 +++- .../plugin/proc_macro/host/attribute.rs | 90 +++++++++--------- .../plugin/proc_macro/host/conversion.rs | 49 ++++++++++ .../compiler/plugin/proc_macro/host/derive.rs | 94 +++++++++++-------- .../compiler/plugin/proc_macro/host/inline.rs | 12 ++- .../compiler/plugin/proc_macro/host/mod.rs | 39 ++------ .../methods/expand_attribute.rs | 7 +- .../methods/expand_derive.rs | 1 + .../methods/expand_inline.rs | 7 +- scarb/tests/proc_macro_server.rs | 3 + .../src/methods/expand.rs | 8 +- 14 files changed, 224 insertions(+), 133 deletions(-) create mode 100644 scarb/src/compiler/plugin/proc_macro/host/conversion.rs diff --git a/plugins/cairo-lang-macro/Cargo.toml b/plugins/cairo-lang-macro/Cargo.toml index 17e6c393b..a17aa86f6 100644 --- a/plugins/cairo-lang-macro/Cargo.toml +++ b/plugins/cairo-lang-macro/Cargo.toml @@ -2,7 +2,7 @@ name = "cairo-lang-macro" version = "0.1.1" edition.workspace = true -rust-version = "1.64" +rust-version = "1.73" authors.workspace = true categories = ["development-tools"] diff --git a/plugins/cairo-lang-macro/src/lib.rs b/plugins/cairo-lang-macro/src/lib.rs index 4b31e04ad..8fa21d448 100644 --- a/plugins/cairo-lang-macro/src/lib.rs +++ b/plugins/cairo-lang-macro/src/lib.rs @@ -22,7 +22,7 @@ use std::cell::RefCell; use cairo_lang_macro_stable::ffi::StableSlice; use cairo_lang_macro_stable::{ - StableExpansionsList, StablePostProcessContext, StableProcMacroResult, + StableExpansionsList, StablePostProcessContext, StableProcMacroResult, StableTextSpan, }; use std::ffi::{c_char, CStr, CString}; use std::ops::Deref; @@ -34,6 +34,8 @@ pub use types::*; // A thread-local allocation context for allocating tokens on proc macro side. thread_local!(static CONTEXT: RefCell = RefCell::default() ); +thread_local!(static CALL_SITE: RefCell<(u32, u32)> = RefCell::default()); + #[doc(hidden)] #[derive(Clone)] pub struct ExpansionDefinition { @@ -99,6 +101,7 @@ pub unsafe extern "C" fn free_expansions_list(list: StableExpansionsList) { #[no_mangle] pub unsafe extern "C" fn expand( item_name: *const c_char, + call_site: StableTextSpan, stable_attr: cairo_lang_macro_stable::StableTokenStream, stable_token_stream: cairo_lang_macro_stable::StableTokenStream, ) -> cairo_lang_macro_stable::StableResultWrapper { @@ -111,6 +114,8 @@ pub unsafe extern "C" fn expand( ctx_cell.replace(AllocationContext::with_capacity(size_hint)); let ctx_borrow = ctx_cell.borrow(); let ctx: &AllocationContext = ctx_borrow.deref(); + // Set the call site for the current expand call. + CALL_SITE.replace((call_site.start, call_site.end)); // Copy the stable token stream into current context. let token_stream = TokenStream::from_stable_in(&stable_token_stream, ctx); let attr_token_stream = TokenStream::from_stable_in(&stable_attr, ctx); diff --git a/plugins/cairo-lang-macro/src/types/token.rs b/plugins/cairo-lang-macro/src/types/token.rs index f6cfc2cea..36cc5645b 100644 --- a/plugins/cairo-lang-macro/src/types/token.rs +++ b/plugins/cairo-lang-macro/src/types/token.rs @@ -1,4 +1,4 @@ -use crate::CONTEXT; +use crate::{CALL_SITE, CONTEXT}; use bumpalo::Bump; use std::fmt::{Debug, Display, Write}; use std::hash::{Hash, Hasher}; @@ -316,6 +316,26 @@ impl TextSpan { pub fn new(start: TextOffset, end: TextOffset) -> TextSpan { TextSpan { start, end } } + + /// Create a new [`TextSpan`], located at the invocation of the current procedural macro. + /// Identifiers created with this span will be resolved as if they were written directly at + /// the macro call location (call-site hygiene). + pub fn call_site() -> Self { + CALL_SITE.with(|call_site| { + let call_site = call_site.borrow(); + Self::new(call_site.0, call_site.1) + }) + } + + /// Create a new [`TextSpan`], with width `0`, located right before this span. + pub fn start(self) -> Self { + Self::new(self.start, self.start) + } + + /// Create a new [`TextSpan`], with width `0`, located right after this span. + pub fn end(self) -> Self { + Self::new(self.end, self.end) + } } impl Token { diff --git a/scarb/src/compiler/plugin/proc_macro/ffi.rs b/scarb/src/compiler/plugin/proc_macro/ffi.rs index 5f45de181..319161209 100644 --- a/scarb/src/compiler/plugin/proc_macro/ffi.rs +++ b/scarb/src/compiler/plugin/proc_macro/ffi.rs @@ -3,11 +3,11 @@ use crate::core::{Package, PackageId}; use anyhow::{ensure, Context, Result}; use cairo_lang_macro::{ ExpansionKind as SharedExpansionKind, FullPathMarker, PostProcessContext, ProcMacroResult, - TokenStream, + TextSpan, TokenStream, }; use cairo_lang_macro_stable::{ StableExpansion, StableExpansionsList, StablePostProcessContext, StableProcMacroResult, - StableResultWrapper, StableTokenStream, + StableResultWrapper, StableTextSpan, StableTokenStream, }; use camino::Utf8PathBuf; use itertools::Itertools; @@ -159,6 +159,7 @@ impl ProcMacroInstance { pub(crate) fn generate_code( &self, item_name: SmolStr, + call_site: TextSpan, attr: TokenStream, token_stream: TokenStream, ) -> ProcMacroResult { @@ -169,8 +170,9 @@ impl ProcMacroInstance { let item_name = CString::new(item_name.to_string()).unwrap().into_raw(); // Call FFI interface for code expansion. // Note that `stable_result` has been allocated by the dynamic library. + let call_site: StableTextSpan = call_site.into_stable(); let stable_result = - (self.plugin.vtable.expand)(item_name, stable_attr, stable_token_stream); + (self.plugin.vtable.expand)(item_name, call_site, stable_attr, stable_token_stream); // Free proc macro name. let _ = unsafe { CString::from_raw(item_name) }; // Free the memory allocated by the `stable_token_stream`. @@ -282,8 +284,12 @@ impl Expansion { type ListExpansions = extern "C" fn() -> StableExpansionsList; type FreeExpansionsList = extern "C" fn(StableExpansionsList); -type ExpandCode = - extern "C" fn(*const c_char, StableTokenStream, StableTokenStream) -> StableResultWrapper; +type ExpandCode = extern "C" fn( + *const c_char, + StableTextSpan, + StableTokenStream, + StableTokenStream, +) -> StableResultWrapper; type FreeResult = extern "C" fn(StableProcMacroResult); type PostProcessCallback = extern "C" fn(StablePostProcessContext) -> StablePostProcessContext; type DocExpansion = extern "C" fn(*const c_char) -> *mut c_char; diff --git a/scarb/src/compiler/plugin/proc_macro/host/attribute.rs b/scarb/src/compiler/plugin/proc_macro/host/attribute.rs index e9ecaa122..1174eea28 100644 --- a/scarb/src/compiler/plugin/proc_macro/host/attribute.rs +++ b/scarb/src/compiler/plugin/proc_macro/host/attribute.rs @@ -1,5 +1,8 @@ use crate::compiler::plugin::proc_macro::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; -use crate::compiler::plugin::proc_macro::host::{generate_code_mappings, into_cairo_diagnostics}; +use crate::compiler::plugin::proc_macro::host::conversion::{ + into_cairo_diagnostics, CallSiteLocation, +}; +use crate::compiler::plugin::proc_macro::host::generate_code_mappings; use crate::compiler::plugin::proc_macro::{ Expansion, ExpansionKind, ProcMacroHostPlugin, ProcMacroId, TokenStreamBuilder, }; @@ -12,7 +15,7 @@ use cairo_lang_syntax::attribute::structured::AttributeStructurize; use cairo_lang_syntax::node::ast::{ImplItem, MaybeImplBody, MaybeTraitBody}; use cairo_lang_syntax::node::db::SyntaxGroup; use cairo_lang_syntax::node::ids::SyntaxStablePtrId; -use cairo_lang_syntax::node::{ast, TypedStablePtr, TypedSyntaxNode}; +use cairo_lang_syntax::node::{ast, TypedSyntaxNode}; use itertools::Itertools; use smol_str::SmolStr; use std::collections::HashSet; @@ -166,22 +169,14 @@ impl ProcMacroHostPlugin { token_stream: TokenStream, ) -> bool { let mut all_none = true; - let (input, args, stable_ptr) = match found { - AttrExpansionFound::Last { - expansion, - args, - stable_ptr, - } => { + let input = match found { + AttrExpansionFound::Last(input) => { all_none = false; - (expansion, args, stable_ptr) + input } - AttrExpansionFound::Some { - expansion, - args, - stable_ptr, - } => { + AttrExpansionFound::Some(input) => { all_none = false; - (expansion, args, stable_ptr) + input } AttrExpansionFound::None => { item_builder.add_node(func.as_syntax_node()); @@ -189,13 +184,20 @@ impl ProcMacroHostPlugin { } }; - let result = self.instance(input.package_id).generate_code( - input.expansion.name.clone(), - args, + let result = self.instance(input.id.package_id).generate_code( + input.id.expansion.name.clone(), + input.call_site.span, + input.args, token_stream.clone(), ); - let expanded = context.register_result(token_stream.to_string(), input, result, stable_ptr); + let expanded = context.register_result( + token_stream.to_string(), + input.id, + result, + input.call_site.stable_ptr, + ); + item_builder.add_modified(RewriteNode::Mapped { origin: func.as_syntax_node().span(db), node: Box::new(RewriteNode::Text(expanded.to_string())), @@ -335,7 +337,11 @@ impl ProcMacroHostPlugin { let mut args_builder = TokenStreamBuilder::new(db); args_builder.add_node(attr.arguments(db).as_syntax_node()); let args = args_builder.build(ctx); - expansion = Some((found, args, attr.stable_ptr().untyped())); + expansion = Some(AttrExpansionArgs { + id: found, + args, + call_site: CallSiteLocation::new(&attr, db), + }); // Do not add the attribute for found expansion. continue; } else { @@ -346,16 +352,8 @@ impl ProcMacroHostPlugin { builder.add_node(attr.as_syntax_node()); } match (expansion, last) { - (Some((expansion, args, stable_ptr)), true) => AttrExpansionFound::Last { - expansion, - args, - stable_ptr, - }, - (Some((expansion, args, stable_ptr)), false) => AttrExpansionFound::Some { - expansion, - args, - stable_ptr, - }, + (Some(args), true) => AttrExpansionFound::Last(args), + (Some(args), false) => AttrExpansionFound::Some(args), (None, _) => AttrExpansionFound::None, } } @@ -366,11 +364,12 @@ impl ProcMacroHostPlugin { last: bool, args: TokenStream, token_stream: TokenStream, - stable_ptr: SyntaxStablePtrId, + call_site: CallSiteLocation, ) -> PluginResult { let original = token_stream.to_string(); let result = self.instance(input.package_id).generate_code( input.expansion.name.clone(), + call_site.span, args, token_stream, ); @@ -379,7 +378,7 @@ impl ProcMacroHostPlugin { if result.token_stream.is_empty() { // Remove original code return PluginResult { - diagnostics: into_cairo_diagnostics(result.diagnostics, stable_ptr), + diagnostics: into_cairo_diagnostics(result.diagnostics, call_site.stable_ptr), code: None, remove_original_item: true, }; @@ -402,7 +401,7 @@ impl ProcMacroHostPlugin { return PluginResult { code: None, remove_original_item: false, - diagnostics: into_cairo_diagnostics(result.diagnostics, stable_ptr), + diagnostics: into_cairo_diagnostics(result.diagnostics, call_site.stable_ptr), }; } @@ -425,31 +424,30 @@ impl ProcMacroHostPlugin { ))) }), }), - diagnostics: into_cairo_diagnostics(result.diagnostics, stable_ptr), + diagnostics: into_cairo_diagnostics(result.diagnostics, call_site.stable_ptr), remove_original_item: true, } } } pub enum AttrExpansionFound { - Some { - expansion: ProcMacroId, - args: TokenStream, - stable_ptr: SyntaxStablePtrId, - }, + Some(AttrExpansionArgs), + Last(AttrExpansionArgs), None, - Last { - expansion: ProcMacroId, - args: TokenStream, - stable_ptr: SyntaxStablePtrId, - }, +} + +pub struct AttrExpansionArgs { + pub id: ProcMacroId, + pub args: TokenStream, + pub call_site: CallSiteLocation, } impl AttrExpansionFound { pub fn as_name(&self) -> Option { match self { - AttrExpansionFound::Some { expansion, .. } - | AttrExpansionFound::Last { expansion, .. } => Some(expansion.expansion.name.clone()), + AttrExpansionFound::Some(args) | AttrExpansionFound::Last(args) => { + Some(args.id.expansion.name.clone()) + } AttrExpansionFound::None => None, } } diff --git a/scarb/src/compiler/plugin/proc_macro/host/conversion.rs b/scarb/src/compiler/plugin/proc_macro/host/conversion.rs new file mode 100644 index 000000000..aef4ac5f6 --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/host/conversion.rs @@ -0,0 +1,49 @@ +use cairo_lang_defs::plugin::PluginDiagnostic; +use cairo_lang_macro::{Diagnostic, Severity, TextSpan}; +use cairo_lang_syntax::node::db::SyntaxGroup; +use cairo_lang_syntax::node::ids::SyntaxStablePtrId; +use cairo_lang_syntax::node::{TypedStablePtr, TypedSyntaxNode}; +use itertools::Itertools; + +pub trait SpanSource { + fn text_span(&self, db: &dyn SyntaxGroup) -> TextSpan; +} + +impl SpanSource for T { + fn text_span(&self, db: &dyn SyntaxGroup) -> TextSpan { + let node = self.as_syntax_node(); + let span = node.span(db); + TextSpan::new(span.start.as_u32(), span.end.as_u32()) + } +} + +pub struct CallSiteLocation { + pub stable_ptr: SyntaxStablePtrId, + pub span: TextSpan, +} + +impl CallSiteLocation { + pub fn new(node: &T, db: &dyn SyntaxGroup) -> Self { + Self { + stable_ptr: node.stable_ptr().untyped(), + span: node.text_span(db), + } + } +} + +pub fn into_cairo_diagnostics( + diagnostics: Vec, + stable_ptr: SyntaxStablePtrId, +) -> Vec { + diagnostics + .into_iter() + .map(|diag| PluginDiagnostic { + stable_ptr, + message: diag.message, + severity: match diag.severity { + Severity::Error => cairo_lang_diagnostics::Severity::Error, + Severity::Warning => cairo_lang_diagnostics::Severity::Warning, + }, + }) + .collect_vec() +} diff --git a/scarb/src/compiler/plugin/proc_macro/host/derive.rs b/scarb/src/compiler/plugin/proc_macro/host/derive.rs index 3f1f82d61..0834f9b5e 100644 --- a/scarb/src/compiler/plugin/proc_macro/host/derive.rs +++ b/scarb/src/compiler/plugin/proc_macro/host/derive.rs @@ -1,7 +1,8 @@ use crate::compiler::plugin::proc_macro::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; -use crate::compiler::plugin::proc_macro::host::{ - generate_code_mappings, into_cairo_diagnostics, DERIVE_ATTR, +use crate::compiler::plugin::proc_macro::host::conversion::{ + into_cairo_diagnostics, CallSiteLocation, }; +use crate::compiler::plugin::proc_macro::host::{generate_code_mappings, DERIVE_ATTR}; use crate::compiler::plugin::proc_macro::{ Expansion, ExpansionKind, ProcMacroHostPlugin, ProcMacroId, TokenStreamBuilder, }; @@ -13,7 +14,7 @@ use cairo_lang_syntax::attribute::structured::{AttributeArgVariant, AttributeStr use cairo_lang_syntax::node::ast::{Expr, PathSegment}; use cairo_lang_syntax::node::db::SyntaxGroup; use cairo_lang_syntax::node::helpers::QueryAttrs; -use cairo_lang_syntax::node::{ast, Terminal, TypedStablePtr, TypedSyntaxNode}; +use cairo_lang_syntax::node::{ast, Terminal, TypedSyntaxNode}; use convert_case::{Case, Casing}; use itertools::Itertools; @@ -21,7 +22,7 @@ impl ProcMacroHostPlugin { /// Handle `#[derive(...)]` attribute. /// /// Returns a list of expansions that this plugin should apply. - fn parse_derive(&self, db: &dyn SyntaxGroup, item_ast: ast::ModuleItem) -> Vec { + fn parse_derive(&self, db: &dyn SyntaxGroup, item_ast: ast::ModuleItem) -> Vec { let attrs = match item_ast { ast::ModuleItem::Struct(struct_ast) => Some(struct_ast.query_attr(db, DERIVE_ATTR)), ast::ModuleItem::Enum(enum_ast) => Some(enum_ast.query_attr(db, DERIVE_ATTR)), @@ -52,6 +53,10 @@ impl ProcMacroHostPlugin { value.to_case(Case::Snake), ExpansionKind::Derive, )) + .map(|id| DeriveFound { + id, + call_site: CallSiteLocation::new(segment, db), + }) }) .collect_vec() } @@ -62,7 +67,6 @@ impl ProcMacroHostPlugin { item_ast: ast::ModuleItem, stream_metadata: TokenStreamMetadata, ) -> Option { - let stable_ptr = item_ast.clone().stable_ptr().untyped(); let mut token_stream_builder = TokenStreamBuilder::new(db); token_stream_builder.add_node(item_ast.as_syntax_node()); token_stream_builder.with_metadata(stream_metadata.clone()); @@ -71,7 +75,14 @@ impl ProcMacroHostPlugin { // All derives to be applied. let derives = self.parse_derive(db, item_ast.clone()); - let any_derives = !derives.is_empty(); + + if derives.is_empty() { + // No derives found - returning early. + return None; + } + + // We use call site of first derive found. + let stable_ptr = derives[0].call_site.stable_ptr; let ctx = AllocationContext::default(); let mut derived_code = String::new(); @@ -79,9 +90,12 @@ impl ProcMacroHostPlugin { let mut current_width = TextWidth::default(); for derive in derives.iter() { + let call_site = &derive.call_site; + let derive = &derive.id; let token_stream = token_stream_builder.build(&ctx); let result = self.instance(derive.package_id).generate_code( derive.expansion.name.clone(), + call_site.span.clone(), TokenStream::empty(), token_stream, ); @@ -111,44 +125,46 @@ impl ProcMacroHostPlugin { derived_code.push_str(&result.token_stream.to_string()); } - if any_derives { - return Some(PluginResult { - code: if derived_code.is_empty() { - None + Some(PluginResult { + code: if derived_code.is_empty() { + None + } else { + let msg = if derives.len() == 1 { + "the derive macro" } else { - let msg = if derives.len() == 1 { - "the derive macro" - } else { - "one of the derive macros" - }; - let derive_names = derives - .iter() - .map(|derive| derive.expansion.name.to_string()) - .join("`, `"); - let note = format!("this error originates in {msg}: `{derive_names}`"); - Some(PluginGeneratedFile { - name: "proc_macro_derive".into(), - code_mappings, - content: derived_code, - aux_data: if aux_data.is_empty() { - None - } else { - Some(DynGeneratedFileAuxData::new(aux_data)) - }, - diagnostics_note: Some(note), - }) - }, - diagnostics: into_cairo_diagnostics(all_diagnostics, stable_ptr), - // Note that we don't remove the original item here, unlike for attributes. - // We do not add the original code to the generated file either. - remove_original_item: false, - }); - } + "one of the derive macros" + }; + let derive_names = derives + .iter() + .map(|derive| derive.id.expansion.name.to_string()) + .join("`, `"); + let note = format!("this error originates in {msg}: `{derive_names}`"); - None + Some(PluginGeneratedFile { + name: "proc_macro_derive".into(), + code_mappings, + content: derived_code, + diagnostics_note: Some(note), + aux_data: if aux_data.is_empty() { + None + } else { + Some(DynGeneratedFileAuxData::new(aux_data)) + }, + }) + }, + diagnostics: into_cairo_diagnostics(all_diagnostics, stable_ptr), + // Note that we don't remove the original item here, unlike for attributes. + // We do not add the original code to the generated file either. + remove_original_item: false, + }) } } +struct DeriveFound { + id: ProcMacroId, + call_site: CallSiteLocation, +} + fn generate_code_mappings_with_offset( token_stream: &TokenStream, offset: TextWidth, diff --git a/scarb/src/compiler/plugin/proc_macro/host/inline.rs b/scarb/src/compiler/plugin/proc_macro/host/inline.rs index c8da34d55..b1df6a04c 100644 --- a/scarb/src/compiler/plugin/proc_macro/host/inline.rs +++ b/scarb/src/compiler/plugin/proc_macro/host/inline.rs @@ -1,5 +1,8 @@ use crate::compiler::plugin::proc_macro::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; -use crate::compiler::plugin::proc_macro::host::{generate_code_mappings, into_cairo_diagnostics}; +use crate::compiler::plugin::proc_macro::host::conversion::{ + into_cairo_diagnostics, CallSiteLocation, +}; +use crate::compiler::plugin::proc_macro::host::generate_code_mappings; use crate::compiler::plugin::proc_macro::{ Expansion, ProcMacroId, ProcMacroInstance, TokenStreamBuilder, }; @@ -9,7 +12,7 @@ use cairo_lang_defs::plugin::{ }; use cairo_lang_macro::{AllocationContext, TokenStream}; use cairo_lang_syntax::node::db::SyntaxGroup; -use cairo_lang_syntax::node::{ast, TypedStablePtr, TypedSyntaxNode}; +use cairo_lang_syntax::node::{ast, TypedSyntaxNode}; use std::sync::{Arc, OnceLock}; /// A Cairo compiler inline macro plugin controlling the inline procedural macro execution. @@ -45,19 +48,20 @@ impl InlineMacroExprPlugin for ProcMacroInlinePlugin { syntax: &ast::ExprInlineMacro, _metadata: &MacroPluginMetadata<'_>, ) -> InlinePluginResult { + let call_site = CallSiteLocation::new(syntax, db); let ctx = AllocationContext::default(); - let stable_ptr = syntax.clone().stable_ptr().untyped(); let arguments = syntax.arguments(db); let mut token_stream_builder = TokenStreamBuilder::new(db); token_stream_builder.add_node(arguments.as_syntax_node()); let token_stream = token_stream_builder.build(&ctx); let result = self.instance().generate_code( self.expansion.name.clone(), + call_site.span, TokenStream::empty(), token_stream, ); // Handle diagnostics. - let diagnostics = into_cairo_diagnostics(result.diagnostics, stable_ptr); + let diagnostics = into_cairo_diagnostics(result.diagnostics, call_site.stable_ptr); let token_stream = result.token_stream.clone(); if token_stream.is_empty() { // Remove original code diff --git a/scarb/src/compiler/plugin/proc_macro/host/mod.rs b/scarb/src/compiler/plugin/proc_macro/host/mod.rs index bf8e81f6d..617cef0e3 100644 --- a/scarb/src/compiler/plugin/proc_macro/host/mod.rs +++ b/scarb/src/compiler/plugin/proc_macro/host/mod.rs @@ -1,5 +1,6 @@ mod attribute; mod aux_data; +mod conversion; mod derive; mod inline; mod post; @@ -17,12 +18,9 @@ use cairo_lang_defs::plugin::{MacroPlugin, MacroPluginMetadata, PluginResult}; use cairo_lang_filesystem::db::Edition; use cairo_lang_filesystem::ids::{CodeMapping, CodeOrigin}; use cairo_lang_filesystem::span::{TextOffset, TextSpan, TextWidth}; -use cairo_lang_macro::{ - AllocationContext, Diagnostic, Severity, TokenStream, TokenStreamMetadata, TokenTree, -}; +use cairo_lang_macro::{AllocationContext, TokenStream, TokenStreamMetadata, TokenTree}; use cairo_lang_semantic::plugin::PluginSuite; use cairo_lang_syntax::node::db::SyntaxGroup; -use cairo_lang_syntax::node::ids::SyntaxStablePtrId; use cairo_lang_syntax::node::{ast, TypedStablePtr, TypedSyntaxNode}; use convert_case::{Case, Casing}; use itertools::Itertools; @@ -166,21 +164,13 @@ impl MacroPlugin for ProcMacroHostPlugin { let (input, body) = self.parse_attribute(db, item_ast.clone(), &ctx); if let Some(result) = match input { - AttrExpansionFound::Last { - expansion, - args, - stable_ptr, - } => Some((expansion, args, stable_ptr, true)), - AttrExpansionFound::Some { - expansion, - args, - stable_ptr, - } => Some((expansion, args, stable_ptr, false)), + AttrExpansionFound::Last(input) => Some((input, true)), + AttrExpansionFound::Some(input) => Some((input, false)), AttrExpansionFound::None => None, } - .map(|(expansion, args, stable_ptr, last)| { + .map(|(input, last)| { let token_stream = body.with_metadata(stream_metadata.clone()); - self.expand_attribute(expansion, last, args, token_stream, stable_ptr) + self.expand_attribute(input.id, last, input.args, token_stream, input.call_site) }) { return result; } @@ -223,23 +213,6 @@ impl MacroPlugin for ProcMacroHostPlugin { } } -fn into_cairo_diagnostics( - diagnostics: Vec, - stable_ptr: SyntaxStablePtrId, -) -> Vec { - diagnostics - .into_iter() - .map(|diag| PluginDiagnostic { - stable_ptr, - message: diag.message, - severity: match diag.severity { - Severity::Error => cairo_lang_diagnostics::Severity::Error, - Severity::Warning => cairo_lang_diagnostics::Severity::Warning, - }, - }) - .collect_vec() -} - /// A Scarb wrapper around the `ProcMacroHost` compiler plugin. /// /// This struct represent the compiler plugin in terms of Scarb data model. diff --git a/scarb/src/ops/proc_macro_server/methods/expand_attribute.rs b/scarb/src/ops/proc_macro_server/methods/expand_attribute.rs index 89fc10480..264e5eb68 100644 --- a/scarb/src/ops/proc_macro_server/methods/expand_attribute.rs +++ b/scarb/src/ops/proc_macro_server/methods/expand_attribute.rs @@ -19,7 +19,12 @@ impl Handler for ExpandAttribute { }) .unwrap(); - let result = instance.generate_code(params.attr.into(), params.args, params.item); + let result = instance.generate_code( + params.attr.into(), + params.call_site, + params.args, + params.item, + ); Ok(ProcMacroResult { token_stream: result.token_stream, diff --git a/scarb/src/ops/proc_macro_server/methods/expand_derive.rs b/scarb/src/ops/proc_macro_server/methods/expand_derive.rs index fc42b846c..091aa342d 100644 --- a/scarb/src/ops/proc_macro_server/methods/expand_derive.rs +++ b/scarb/src/ops/proc_macro_server/methods/expand_derive.rs @@ -23,6 +23,7 @@ impl Handler for ExpandDerive { let result = instance.generate_code( expansion.name.clone(), + params.call_site.clone(), TokenStream::empty(), params.item.clone(), ); diff --git a/scarb/src/ops/proc_macro_server/methods/expand_inline.rs b/scarb/src/ops/proc_macro_server/methods/expand_inline.rs index 1e209fae1..c4d26d5ce 100644 --- a/scarb/src/ops/proc_macro_server/methods/expand_inline.rs +++ b/scarb/src/ops/proc_macro_server/methods/expand_inline.rs @@ -20,7 +20,12 @@ impl Handler for ExpandInline { }) .unwrap(); - let result = instance.generate_code(params.name.into(), TokenStream::empty(), params.args); + let result = instance.generate_code( + params.name.into(), + params.call_site, + TokenStream::empty(), + params.args, + ); Ok(ProcMacroResult { token_stream: result.token_stream, diff --git a/scarb/tests/proc_macro_server.rs b/scarb/tests/proc_macro_server.rs index 58890a4fa..a117cf5b8 100644 --- a/scarb/tests/proc_macro_server.rs +++ b/scarb/tests/proc_macro_server.rs @@ -89,6 +89,7 @@ fn expand_attribute() { .request_and_wait::(ExpandAttributeParams { attr: "rename_to_very_new_name".to_string(), args: TokenStream::empty(), + call_site: TextSpan::new(0, 0), item: TokenStream::new(vec![TokenTree::Ident(Token::new( "fn some_test_fn(){}", TextSpan::new(0, 0), @@ -131,6 +132,7 @@ fn expand_derive() { let response = proc_macro_server .request_and_wait::(ExpandDeriveParams { derives: vec!["some_derive".to_string()], + call_site: TextSpan::new(0, 0), item, }) .unwrap(); @@ -180,6 +182,7 @@ fn expand_inline() { let response = proc_macro_server .request_and_wait::(ExpandInlineMacroParams { name: "replace_all_15_with_25".to_string(), + call_site: TextSpan::new(0, 0), args: TokenStream::new(vec![TokenTree::Ident(Token::new( "struct A { field: 15 , other_field: macro_call!(12)}", TextSpan::new(0, 0), diff --git a/utils/scarb-proc-macro-server-types/src/methods/expand.rs b/utils/scarb-proc-macro-server-types/src/methods/expand.rs index cb33541b9..dba2d6cb9 100644 --- a/utils/scarb-proc-macro-server-types/src/methods/expand.rs +++ b/utils/scarb-proc-macro-server-types/src/methods/expand.rs @@ -1,6 +1,6 @@ use super::Method; use super::ProcMacroResult; -use cairo_lang_macro::TokenStream; +use cairo_lang_macro::{TextSpan, TokenStream}; use serde::{Deserialize, Serialize}; /// Parameters for expanding a specific attribute macro. @@ -15,6 +15,8 @@ pub struct ExpandAttributeParams { pub args: TokenStream, /// The token stream representing the item on which the macro is applied. pub item: TokenStream, + // Call site span. + pub call_site: TextSpan, } /// Represents a request to expand a single attribute macro. @@ -36,6 +38,8 @@ pub struct ExpandDeriveParams { pub derives: Vec, /// The token stream of the item to which the derive macros are applied. pub item: TokenStream, + // Call site span. + pub call_site: TextSpan, } /// Represents a request to expand derive macros. @@ -57,6 +61,8 @@ pub struct ExpandInlineMacroParams { pub name: String, /// The token stream representing arguments passed to the macro. pub args: TokenStream, + // Call site span. + pub call_site: TextSpan, } /// Represents a request to expand a single inline macro. From 6886c057c092ffb6fc0b71d534daedd0f12e5ae4 Mon Sep 17 00:00:00 2001 From: Mateusz Kowalski Date: Thu, 5 Dec 2024 10:57:05 +0100 Subject: [PATCH 12/13] Quote macro (#1808) (#1810) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: dependabot[bot] Co-authored-by: Mateusz Kowalski Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Maksim Zdobnikau <43750648+DelevoXDG@users.noreply.github.com> commit-id:face0a80 --- **Stack**: - #1811 - #1810 ⬅ ⚠️ *Part of a stack created by [spr](https://github.com/ejoffe/spr). Do not merge manually using the UI - doing so may have unexpected results.* --- Cargo.lock | 10 + Cargo.toml | 1 + plugins/cairo-lang-macro/Cargo.toml | 2 + plugins/cairo-lang-macro/src/lib.rs | 4 +- plugins/cairo-lang-macro/src/types/token.rs | 76 ++++ plugins/cairo-lang-quote/Cargo.toml | 19 + plugins/cairo-lang-quote/src/lib.rs | 115 ++++++ scarb/tests/proc_macro_quote.rs | 370 ++++++++++++++++++ .../src/cairo_plugin_project_builder.rs | 4 + 9 files changed, 600 insertions(+), 1 deletion(-) create mode 100644 plugins/cairo-lang-quote/Cargo.toml create mode 100644 plugins/cairo-lang-quote/src/lib.rs create mode 100644 scarb/tests/proc_macro_quote.rs diff --git a/Cargo.lock b/Cargo.lock index c09bcef81..b6050f81c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -751,6 +751,8 @@ dependencies = [ "bumpalo", "cairo-lang-macro-attributes 0.1.0", "cairo-lang-macro-stable 1.0.0", + "cairo-lang-primitive-token 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cairo-lang-quote", "linkme", "serde", "serde_json", @@ -862,6 +864,14 @@ dependencies = [ "toml", ] +[[package]] +name = "cairo-lang-quote" +version = "0.1.0" +dependencies = [ + "proc-macro2", + "quote", +] + [[package]] name = "cairo-lang-runnable-utils" version = "2.9.2" diff --git a/Cargo.toml b/Cargo.toml index be633fef8..7005f4351 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -9,6 +9,7 @@ members = [ "plugins/cairo-lang-macro", "plugins/cairo-lang-macro-attributes", "plugins/cairo-lang-macro-stable", + "plugins/cairo-lang-quote", "utils/create-output-dir", "utils/once-map", "utils/scarb-proc-macro-server-types", diff --git a/plugins/cairo-lang-macro/Cargo.toml b/plugins/cairo-lang-macro/Cargo.toml index a17aa86f6..40ac4da49 100644 --- a/plugins/cairo-lang-macro/Cargo.toml +++ b/plugins/cairo-lang-macro/Cargo.toml @@ -17,6 +17,8 @@ repository.workspace = true bumpalo.workspace = true cairo-lang-macro-attributes = { path = "../cairo-lang-macro-attributes" } cairo-lang-macro-stable = { path = "../cairo-lang-macro-stable" } +cairo-lang-primitive-token = "1.0.0" +cairo-lang-quote = { path = "../cairo-lang-quote", version = "0.1.0" } linkme.workspace = true serde = { workspace = true, optional = true } diff --git a/plugins/cairo-lang-macro/src/lib.rs b/plugins/cairo-lang-macro/src/lib.rs index 8fa21d448..48ae98171 100644 --- a/plugins/cairo-lang-macro/src/lib.rs +++ b/plugins/cairo-lang-macro/src/lib.rs @@ -16,8 +16,11 @@ //! pub use cairo_lang_macro_attributes::*; +pub use cairo_lang_quote::*; + #[doc(hidden)] pub use linkme; + use std::cell::RefCell; use cairo_lang_macro_stable::ffi::StableSlice; @@ -28,7 +31,6 @@ use std::ffi::{c_char, CStr, CString}; use std::ops::Deref; mod types; - pub use types::*; // A thread-local allocation context for allocating tokens on proc macro side. diff --git a/plugins/cairo-lang-macro/src/types/token.rs b/plugins/cairo-lang-macro/src/types/token.rs index 36cc5645b..b9d0ca4b8 100644 --- a/plugins/cairo-lang-macro/src/types/token.rs +++ b/plugins/cairo-lang-macro/src/types/token.rs @@ -1,9 +1,12 @@ use crate::{CALL_SITE, CONTEXT}; use bumpalo::Bump; +use cairo_lang_primitive_token::{PrimitiveSpan, PrimitiveToken, ToPrimitiveTokenStream}; use std::fmt::{Debug, Display, Write}; use std::hash::{Hash, Hasher}; +use std::iter::{once, Map, Once}; use std::ops::Deref; use std::rc::Rc; +use std::vec::IntoIter; /// An abstract stream of Cairo tokens. /// @@ -278,6 +281,46 @@ impl TokenStream { pub fn is_empty(&self) -> bool { self.tokens.is_empty() } + + pub fn from_primitive_token_stream( + stable_token_stream: impl Iterator, + ) -> Self { + Self::new( + stable_token_stream + .map(|stable_token| { + TokenTree::Ident(Token::new( + stable_token.content, + stable_token + .span + .map(|stable_span| TextSpan { + start: stable_span.start as u32, + end: stable_span.end as u32, + }) + .unwrap_or(TextSpan::call_site()), + )) + }) + .collect(), + ) + } + + pub fn push_token(&mut self, token_tree: TokenTree) { + self.tokens.push(token_tree); + } +} + +impl IntoIterator for TokenStream { + type Item = TokenTree; + type IntoIter = IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.tokens.into_iter() + } +} + +impl Extend for TokenStream { + fn extend>(&mut self, iter: T) { + self.tokens.extend(iter); + } } impl Display for TokenStream { @@ -355,6 +398,39 @@ impl Token { } } +impl ToPrimitiveTokenStream for TokenStream { + type Iter = Map, fn(TokenTree) -> PrimitiveToken>; + fn to_primitive_token_stream(&self) -> Self::Iter { + self.tokens + .clone() + .into_iter() + .map(|token_tree| match token_tree { + TokenTree::Ident(token) => PrimitiveToken::new( + token.content.to_string(), + Some(PrimitiveSpan { + start: token.span.start as usize, + end: token.span.end as usize, + }), + ), + }) + } +} + +impl ToPrimitiveTokenStream for TokenTree { + type Iter = Once; + fn to_primitive_token_stream(&self) -> Self::Iter { + once(match self { + TokenTree::Ident(token) => PrimitiveToken::new( + token.content.to_string(), + Some(PrimitiveSpan { + start: token.span.start as usize, + end: token.span.end as usize, + }), + ), + }) + } +} + #[cfg(test)] mod test { use crate::{AllocationContext, TextSpan, Token, TokenStream, TokenTree}; diff --git a/plugins/cairo-lang-quote/Cargo.toml b/plugins/cairo-lang-quote/Cargo.toml new file mode 100644 index 000000000..7249b2a57 --- /dev/null +++ b/plugins/cairo-lang-quote/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "cairo-lang-quote" +version = "0.1.0" +edition.workspace = true + +authors.workspace = true +categories = ["development-tools"] +description = "Cairo procedural macro helper for constructing procedural macro results." +homepage.workspace = true +keywords = ["scarb"] +license.workspace = true +repository.workspace = true + +[lib] +proc-macro = true + +[dependencies] +proc-macro2.workspace = true +quote.workspace = true diff --git a/plugins/cairo-lang-quote/src/lib.rs b/plugins/cairo-lang-quote/src/lib.rs new file mode 100644 index 000000000..332bcb98d --- /dev/null +++ b/plugins/cairo-lang-quote/src/lib.rs @@ -0,0 +1,115 @@ +use std::iter::Peekable; + +use proc_macro::{Delimiter, TokenStream as RustTokenStream, TokenTree as RustTokenTree}; +use proc_macro2::{Ident, Span}; + +extern crate proc_macro; +use quote::quote as rust_quote; + +#[derive(Debug)] +enum QuoteToken { + Var(Ident), + Content(String), + Whitespace, +} + +enum DelimiterVariant { + Open, + Close, +} + +impl QuoteToken { + pub fn from_delimiter(delimiter: Delimiter, variant: DelimiterVariant) -> Self { + match (delimiter, variant) { + (Delimiter::Brace, DelimiterVariant::Open) => Self::Content("{".to_string()), + (Delimiter::Brace, DelimiterVariant::Close) => Self::Content("}".to_string()), + (Delimiter::Bracket, DelimiterVariant::Open) => Self::Content("[".to_string()), + (Delimiter::Bracket, DelimiterVariant::Close) => Self::Content("]".to_string()), + (Delimiter::Parenthesis, DelimiterVariant::Open) => Self::Content("(".to_string()), + (Delimiter::Parenthesis, DelimiterVariant::Close) => Self::Content(")".to_string()), + (Delimiter::None, _) => Self::Content(String::default()), + } + } +} + +fn process_token_stream( + mut token_stream: Peekable>, + output: &mut Vec, +) { + // Rust proc macro parser to TokenStream gets rid of all whitespaces. + // Here we just make sure no two identifiers are without a space between them. + let mut was_previous_ident: bool = false; + while let Some(token_tree) = token_stream.next() { + match token_tree { + RustTokenTree::Group(group) => { + let token_iter = group.stream().into_iter().peekable(); + let delimiter = group.delimiter(); + output.push(QuoteToken::from_delimiter( + delimiter, + DelimiterVariant::Open, + )); + process_token_stream(token_iter, output); + output.push(QuoteToken::from_delimiter( + delimiter, + DelimiterVariant::Close, + )); + was_previous_ident = false; + } + RustTokenTree::Punct(punct) => { + if punct.as_char() == '#' { + if let Some(RustTokenTree::Ident(ident)) = token_stream.next() { + let var_ident = Ident::new(&ident.to_string(), Span::call_site()); + output.push(QuoteToken::Var(var_ident)) + } + } else { + output.push(QuoteToken::Content(punct.to_string())); + } + was_previous_ident = false; + } + RustTokenTree::Ident(ident) => { + if was_previous_ident { + output.push(QuoteToken::Whitespace); + } + output.push(QuoteToken::Content(ident.to_string())); + was_previous_ident = true; + } + RustTokenTree::Literal(literal) => { + output.push(QuoteToken::Content(literal.to_string())); + was_previous_ident = false; + } + } + } +} + +#[proc_macro] +pub fn quote(input: RustTokenStream) -> RustTokenStream { + let mut parsed_input: Vec = Vec::new(); + let mut output_token_stream = rust_quote! { + let mut quote_macro_result = ::cairo_lang_macro::TokenStream::empty(); + }; + + let token_iter = input.into_iter().peekable(); + process_token_stream(token_iter, &mut parsed_input); + + for quote_token in parsed_input.iter() { + match quote_token { + QuoteToken::Content(content) => { + output_token_stream.extend(rust_quote! { + quote_macro_result.push_token(::cairo_lang_macro::TokenTree::Ident(::cairo_lang_macro::Token::new(::std::string::ToString::to_string(#content), ::cairo_lang_macro::TextSpan::call_site()))); + }); + } + QuoteToken::Var(ident) => { + output_token_stream.extend(rust_quote! { + quote_macro_result.extend(::cairo_lang_macro::TokenStream::from_primitive_token_stream(::cairo_lang_primitive_token::ToPrimitiveTokenStream::to_primitive_token_stream(&#ident)).into_iter()); + }); + } + QuoteToken::Whitespace => output_token_stream.extend(rust_quote! { + quote_macro_result.push_token(::cairo_lang_macro::TokenTree::Ident(::cairo_lang_macro::Token::new(" ".to_string(), ::cairo_lang_macro::TextSpan::call_site()))); + }), + } + } + RustTokenStream::from(rust_quote!({ + #output_token_stream + quote_macro_result + })) +} diff --git a/scarb/tests/proc_macro_quote.rs b/scarb/tests/proc_macro_quote.rs new file mode 100644 index 000000000..f39b17a7a --- /dev/null +++ b/scarb/tests/proc_macro_quote.rs @@ -0,0 +1,370 @@ +use assert_fs::fixture::PathChild; +use assert_fs::TempDir; +use indoc::indoc; +use scarb_test_support::cairo_plugin_project_builder::CairoPluginProjectBuilder; +use scarb_test_support::command::Scarb; +use scarb_test_support::fsx::ChildPathEx; +use scarb_test_support::project_builder::ProjectBuilder; + +#[test] +fn can_use_quote() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .add_primitive_token_dep() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, inline_macro, quote}; + #[inline_macro] + pub fn some(_token_stream: TokenStream) -> ProcMacroResult { + let tokens = quote! { + 5 + }; + ProcMacroResult::new(tokens) + } + "##}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + fn main() -> felt252 { some!() } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("cairo-run") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .stdout_matches(indoc! {r#" + [..] Compiling some v1.0.0 [..] + [..] Compiling hello v1.0.0 [..] + [..] Finished `dev` profile [..] + [..] Running hello + Run completed successfully, returning [5] + "#}) + .success(); +} + +#[test] +fn can_use_quote_with_token_tree() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .add_primitive_token_dep() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, inline_macro, TokenTree, Token, TextSpan, quote}; + #[inline_macro] + pub fn some(_token_stream: TokenStream) -> ProcMacroResult { + let token = TokenTree::Ident(Token::new("5".to_string(), TextSpan::call_site())); + let tokens = quote! { + #token + }; + ProcMacroResult::new(tokens) + } + "##}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + fn main() -> felt252 { + some!() + } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("cairo-run") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .stdout_matches(indoc! {r#" + [..] Compiling some v1.0.0 [..] + [..] Compiling hello v1.0.0 [..] + [..] Finished `dev` profile [..] + [..] Running hello + Run completed successfully, returning [5] + "#}) + .success(); +} + +#[test] +fn can_use_quote_with_token_stream() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .add_primitive_token_dep() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, inline_macro, TokenTree, Token, TextSpan, quote}; + #[inline_macro] + pub fn some(_token_stream: TokenStream) -> ProcMacroResult { + let token = TokenStream::new(vec![TokenTree::Ident(Token::new("5".to_string(), TextSpan::call_site()))]); + let tokens = quote! { + #token + }; + ProcMacroResult::new(tokens) + } + "##}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + fn main() -> felt252 { + some!() + } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("cairo-run") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .stdout_matches(indoc! {r#" + [..] Compiling some v1.0.0 [..] + [..] Compiling hello v1.0.0 [..] + [..] Finished `dev` profile [..] + [..] Running hello + Run completed successfully, returning [5] + "#}) + .success(); +} + +#[test] +fn can_use_quote_with_syntax_node() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .add_primitive_token_dep() + .add_dep(r#"cairo-lang-syntax = "2.9.1""#) + .add_dep(r#"cairo-lang-parser = "2.9.1""#) + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, quote}; + use cairo_lang_parser::utils::SimpleParserDatabase; + use cairo_lang_syntax::node::with_db::SyntaxNodeWithDb; + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let db_val = SimpleParserDatabase::default(); + let db = &db_val; + let code = r#" + fn main() -> felt252 { + 5 + } + "#; + let syntax_node = db.parse_virtual(code).unwrap(); + let syntax_node_with_db = SyntaxNodeWithDb::new(&syntax_node, db); + let tokens = quote! { + #syntax_node_with_db + }; + ProcMacroResult::new(tokens) + } + "##}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> u32 { + // completly wrong type + true + } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("expand") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success(); + + assert_eq!( + project.child("target/dev").files(), + vec!["hello.expanded.cairo"] + ); + + let expanded = project + .child("target/dev/hello.expanded.cairo") + .read_to_string(); + + snapbox::assert_eq( + indoc! {r#" + mod hello { + fn main() -> felt252 { + 5 + } + } + "#}, + expanded, + ); +} + +#[test] +fn can_use_quote_with_cairo_specific_syntax() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default().add_primitive_token_dep() + .add_dep(r#"cairo-lang-syntax = "2.9.1""#) + .add_dep(r#"cairo-lang-parser = "2.9.1""#) + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, quote}; + use cairo_lang_parser::utils::SimpleParserDatabase; + use cairo_lang_syntax::node::with_db::SyntaxNodeWithDb; + #[attribute_macro] + pub fn some(_attr: TokenStream, _token_stream: TokenStream) -> ProcMacroResult { + let db_val = SimpleParserDatabase::default(); + let db = &db_val; + let code = r#" + #[derive(Drop)] + struct Rectangle { + width: u64, + height: u64, + } + #[derive(Drop, PartialEq)] + struct Square { + side_length: u64, + } + impl RectangleIntoSquare of TryInto { + fn try_into(self: Rectangle) -> Option { + if self.height == self.width { + Option::Some(Square { side_length: self.height }) + } else { + Option::None + } + } + } + fn main() { + let rectangle = Rectangle { width: 8, height: 8 }; + let result: Square = rectangle.try_into().unwrap(); + let expected = Square { side_length: 8 }; + assert!( + result == expected, + "Rectangle with equal width and height should be convertible to a square." + ); + let rectangle = Rectangle { width: 5, height: 8 }; + let result: Option = rectangle.try_into(); + assert!( + result.is_none(), + "Rectangle with different width and height should not be convertible to a square." + ); + } + "#; + let syntax_node = db.parse_virtual(code).unwrap(); + let syntax_node_with_db = SyntaxNodeWithDb::new(&syntax_node, db); + let tokens = quote! { + #syntax_node_with_db + trait Circle { + fn print() -> (); + } + impl CircleImpl of Circle { + fn print() -> () { + println!("This is a circle!"); + } + } + }; + ProcMacroResult::new(tokens) + } + "##}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> u32 { + // completly wrong type + true + } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("expand") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success(); + + assert_eq!( + project.child("target/dev").files(), + vec!["hello.expanded.cairo"] + ); + + let expanded = project + .child("target/dev/hello.expanded.cairo") + .read_to_string(); + + snapbox::assert_eq( + indoc! {r#" + mod hello { + #[derive(Drop)] + struct Rectangle { + width: u64, + height: u64, + } + #[derive(Drop, PartialEq)] + struct Square { + side_length: u64, + } + impl RectangleIntoSquare of TryInto { + fn try_into(self: Rectangle) -> Option { + if self.height == self.width { + Option::Some(Square { side_length: self.height }) + } else { + Option::None + } + } + } + fn main() { + let rectangle = Rectangle { width: 8, height: 8 }; + let result: Square = rectangle.try_into().unwrap(); + let expected = Square { side_length: 8 }; + assert!( + result == expected, + "Rectangle with equal width and height should be convertible to a square.", + ); + let rectangle = Rectangle { width: 5, height: 8 }; + let result: Option = rectangle.try_into(); + assert!( + result.is_none(), + "Rectangle with different width and height should not be convertible to a square.", + ); + } + trait Circle { + fn print() -> (); + } + impl CircleImpl of Circle { + fn print() -> () { + println!("This is a circle!"); + } + } + impl RectangleDrop of core::traits::Drop; + impl SquareDrop of core::traits::Drop; + impl SquarePartialEq of core::traits::PartialEq { + fn eq(lhs: @Square, rhs: @Square) -> bool { + lhs.side_length == rhs.side_length + } + } + } + "#}, + expanded, + ); +} diff --git a/utils/scarb-test-support/src/cairo_plugin_project_builder.rs b/utils/scarb-test-support/src/cairo_plugin_project_builder.rs index f869b5ff0..bec8bddf2 100644 --- a/utils/scarb-test-support/src/cairo_plugin_project_builder.rs +++ b/utils/scarb-test-support/src/cairo_plugin_project_builder.rs @@ -97,6 +97,10 @@ impl CairoPluginProjectBuilder { self.project.just_manifest(t); self.just_code(t); } + + pub fn add_primitive_token_dep(self) -> Self { + self.add_dep(r#"cairo-lang-primitive-token = "1.0.0""#) + } } impl Default for CairoPluginProjectBuilder { From b2667609038669bd6c7d6f7ca0ce87da51ce132a Mon Sep 17 00:00:00 2001 From: maciektr Date: Fri, 6 Dec 2024 00:09:23 +0100 Subject: [PATCH 13/13] Refactor: split proc macro tests into multiple files (#1811) commit-id:b0254722 --- Cargo.lock | 8 +- .../compiler/plugin/proc_macro/host/mod.rs | 1 - scarb/tests/proc_macro_build.rs | 436 ++++++++++++++++++ scarb/tests/proc_macro_executable.rs | 107 +++++ ...d_cairo_plugin.rs => proc_macro_expand.rs} | 218 +++------ scarb/tests/proc_macro_metadata.rs | 190 ++++++++ scarb/tests/proc_macro_prebuilt.rs | 5 +- 7 files changed, 805 insertions(+), 160 deletions(-) create mode 100644 scarb/tests/proc_macro_build.rs create mode 100644 scarb/tests/proc_macro_executable.rs rename scarb/tests/{build_cairo_plugin.rs => proc_macro_expand.rs} (94%) create mode 100644 scarb/tests/proc_macro_metadata.rs diff --git a/Cargo.lock b/Cargo.lock index b6050f81c..3f476dc48 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -837,6 +837,12 @@ dependencies = [ "smol_str", ] +[[package]] +name = "cairo-lang-primitive-token" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "123ac0ecadf31bacae77436d72b88fa9caef2b8e92c89ce63a125ae911a12fae" + [[package]] name = "cairo-lang-primitive-token" version = "1.0.0" @@ -1110,7 +1116,7 @@ source = "git+https://github.com/starkware-libs/cairo?rev=03944ce36c4b37ef954d7f dependencies = [ "cairo-lang-debug", "cairo-lang-filesystem", - "cairo-lang-primitive-token", + "cairo-lang-primitive-token 1.0.0 (git+https://github.com/starkware-libs/cairo?rev=03944ce36c4b37ef954d7f462d23edce8669e692)", "cairo-lang-utils", "num-bigint", "num-traits 0.2.19", diff --git a/scarb/src/compiler/plugin/proc_macro/host/mod.rs b/scarb/src/compiler/plugin/proc_macro/host/mod.rs index 617cef0e3..feb05ee4d 100644 --- a/scarb/src/compiler/plugin/proc_macro/host/mod.rs +++ b/scarb/src/compiler/plugin/proc_macro/host/mod.rs @@ -13,7 +13,6 @@ use crate::compiler::plugin::proc_macro::compilation::SharedLibraryProvider; use crate::compiler::plugin::proc_macro::{Expansion, ExpansionKind, ProcMacroInstance}; use crate::core::{edition_variant, Config, Package, PackageId}; use anyhow::{ensure, Context, Result}; -use cairo_lang_defs::plugin::PluginDiagnostic; use cairo_lang_defs::plugin::{MacroPlugin, MacroPluginMetadata, PluginResult}; use cairo_lang_filesystem::db::Edition; use cairo_lang_filesystem::ids::{CodeMapping, CodeOrigin}; diff --git a/scarb/tests/proc_macro_build.rs b/scarb/tests/proc_macro_build.rs new file mode 100644 index 000000000..0f60347ca --- /dev/null +++ b/scarb/tests/proc_macro_build.rs @@ -0,0 +1,436 @@ +use assert_fs::fixture::PathChild; +use assert_fs::TempDir; +use indoc::indoc; +use scarb_test_support::cairo_plugin_project_builder::CairoPluginProjectBuilder; +use scarb_test_support::command::Scarb; +use scarb_test_support::project_builder::ProjectBuilder; +use scarb_test_support::workspace_builder::WorkspaceBuilder; +use snapbox::assert_matches; + +#[test] +fn compile_cairo_plugin() { + let t = TempDir::new().unwrap(); + CairoPluginProjectBuilder::default().build(&t); + let output = Scarb::quick_snapbox() + .arg("build") + // Disable colors in Cargo output. + .env("CARGO_TERM_COLOR", "never") + .current_dir(&t) + .output() + .unwrap(); + assert!( + output.status.success(), + "stdout={}\n stderr={}", + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr), + ); + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + assert!(stdout.contains("Compiling some v1.0.0")); + let lines = stdout.lines().map(ToString::to_string).collect::>(); + let (last, lines) = lines.split_last().unwrap(); + assert_matches(r#"[..] Finished `dev` profile target(s) in [..]"#, last); + let (last, _lines) = lines.split_last().unwrap(); + // Line from Cargo output + assert_matches( + r#"[..]Finished `release` profile [optimized] target(s) in[..]"#, + last, + ); +} + +#[test] +fn check_cairo_plugin() { + let t = TempDir::new().unwrap(); + CairoPluginProjectBuilder::default().build(&t); + let output = Scarb::quick_snapbox() + .arg("check") + // Disable colors in Cargo output. + .env("CARGO_TERM_COLOR", "never") + .current_dir(&t) + .output() + .unwrap(); + assert!( + output.status.success(), + "{}", + String::from_utf8_lossy(&output.stderr) + ); + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + assert!(stdout.contains("Checking some v1.0.0")); + let lines = stdout.lines().map(ToString::to_string).collect::>(); + let (last, lines) = lines.split_last().unwrap(); + assert_matches( + r#"[..] Finished checking `dev` profile target(s) in [..]"#, + last, + ); + let (last, _lines) = lines.split_last().unwrap(); + // Line from Cargo output + assert_matches( + r#"[..]Finished `release` profile [optimized] target(s) in[..]"#, + last, + ); +} + +#[test] +fn can_check_cairo_project_with_plugins() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default().build(&t); + let project = temp.child("hello"); + let y = project.child("other"); + CairoPluginProjectBuilder::default().name("other").build(&y); + WorkspaceBuilder::start() + .add_member("other") + .package( + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t), + ) + .build(&project); + Scarb::quick_snapbox() + .arg("check") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + [..]Checking other v1.0.0 ([..]Scarb.toml) + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Checking hello v1.0.0 ([..]Scarb.toml) + [..]Finished checking `dev` profile target(s) in [..] + "#}); +} + +#[test] +fn resolve_fetched_plugins() { + let t = TempDir::new().unwrap(); + CairoPluginProjectBuilder::default().build(&t); + assert!(!t.child("Cargo.lock").exists()); + let output = Scarb::quick_snapbox() + .arg("fetch") + // Disable colors in Cargo output. + .env("CARGO_TERM_COLOR", "never") + .current_dir(&t) + .output() + .unwrap(); + assert!( + output.status.success(), + "{}", + String::from_utf8_lossy(&output.stderr) + ); + assert!(t.child("Cargo.lock").exists()) +} + +#[test] +fn can_use_json_output() { + let t = TempDir::new().unwrap(); + CairoPluginProjectBuilder::default().build(&t); + let output = Scarb::quick_snapbox() + .arg("--json") + .arg("check") + // Disable colors in Cargo output. + .env("CARGO_TERM_COLOR", "never") + .current_dir(&t) + .output() + .unwrap(); + assert!( + output.status.success(), + "{}", + String::from_utf8_lossy(&output.stderr) + ); + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + let lines = stdout.lines().map(ToString::to_string).collect::>(); + let (first, lines) = lines.split_first().unwrap(); + assert_matches( + r#"{"status":"checking","message":"some v1.0.0 ([..]Scarb.toml)"}"#, + first, + ); + let (last, lines) = lines.split_last().unwrap(); + assert_matches( + r#"{"status":"finished","message":"checking `dev` profile target(s) in [..]"}"#, + last, + ); + // Line from Cargo. + let (last, _lines) = lines.split_last().unwrap(); + assert_matches(r#"{"reason":"build-finished","success":true}"#, last); +} + +#[test] +fn compile_cairo_plugin_with_lib_target() { + let t = TempDir::new().unwrap(); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .manifest_extra(indoc! {r#" + [lib] + [cairo-plugin] + "#}) + .build(&t); + + Scarb::quick_snapbox() + .arg("build") + .current_dir(&t) + .assert() + .failure() + .stdout_matches(indoc! {r#" + error: failed to parse manifest at: [..]/Scarb.toml + + Caused by: + target `cairo-plugin` cannot be mixed with other targets + "#}); +} + +#[test] +fn compile_cairo_plugin_with_other_target() { + let t = TempDir::new().unwrap(); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .manifest_extra(indoc! {r#" + [cairo-plugin] + [[target.starknet-contract]] + "#}) + .build(&t); + + Scarb::quick_snapbox() + .arg("build") + .current_dir(&t) + .assert() + .failure() + .stdout_matches(indoc! {r#" + error: failed to parse manifest at: [..]/Scarb.toml + + Caused by: + target `cairo-plugin` cannot be mixed with other targets + "#}); +} + +#[test] +fn can_define_multiple_macros() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, AuxData, PostProcessContext, post_process, TokenTree, Token, TextSpan}; + + #[attribute_macro] + pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let new_token_string = token_stream.to_string().replace("12", "34"); + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { start: 0, end: new_token_string.len() as u32 }, + ))]); + let aux_data = AuxData::new(Vec::new()); + ProcMacroResult::new(token_stream).with_aux_data(aux_data) + } + + #[attribute_macro] + pub fn world(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let new_token_string = token_stream.to_string().replace("56", "78"); + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { start: 0, end: new_token_string.len() as u32 }, + ))]); + let aux_data = AuxData::new(Vec::new()); + ProcMacroResult::new(token_stream).with_aux_data(aux_data) + } + + #[post_process] + pub fn callback(context: PostProcessContext) { + assert_eq!(context.aux_data.len(), 2); + } + "##}) + .build(&t); + + let w = temp.child("other"); + CairoPluginProjectBuilder::default() + .name("other") + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, AuxData, PostProcessContext, post_process, TokenTree, Token, TextSpan}; + + #[attribute_macro] + pub fn beautiful(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let new_token_string = token_stream.to_string().replace("90", "09"); + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { start: 0, end: new_token_string.len() as u32 }, + ))]); + let aux_data = AuxData::new(Vec::new()); + ProcMacroResult::new(token_stream).with_aux_data(aux_data) + } + + #[post_process] + pub fn callback(context: PostProcessContext) { + assert_eq!(context.aux_data.len(), 1); + } + "##}) + .build(&w); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .dep("other", &w) + .lib_cairo(indoc! {r#" + #[hello] + #[beautiful] + #[world] + fn main() -> felt252 { 12 + 56 + 90 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("cairo-run") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + [..]Compiling other v1.0.0 ([..]Scarb.toml) + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + [..]Finished `dev` profile target(s) in [..] + [..]Running hello + Run completed successfully, returning [121] + "#}); +} + +#[test] +fn cannot_duplicate_macros() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + + #[attribute_macro] + pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(token_stream) + } + + #[attribute_macro] + pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(token_stream) + } + "##}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[hello] + fn main() -> felt252 { 12 + 56 + 90 } + "#}) + .build(&project); + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + // Fails with Cargo compile error. + .failure(); +} + +#[test] +fn cannot_duplicate_macros_across_packages() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r#" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + + #[attribute_macro] + pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(token_stream) + } + + #[attribute_macro] + pub fn world(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(token_stream) + } + "#}) + .build(&t); + + let w = temp.child("other"); + CairoPluginProjectBuilder::default() + .name("other") + .lib_rs(indoc! {r#" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + + #[attribute_macro] + pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(token_stream) + } + "#}) + .build(&w); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .dep("other", &w) + .lib_cairo(indoc! {r#" + #[hello] + #[world] + fn main() -> felt252 { 12 + 56 + 90 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .failure() + .stdout_matches(indoc! {r#" + [..]Compiling other v1.0.0 ([..]Scarb.toml) + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + error: duplicate expansions defined for procedural macros: hello (some v1.0.0 ([..]Scarb.toml) and other v1.0.0 ([..]Scarb.toml)) + "#}); +} + +#[test] +fn cannot_use_undefined_macro() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default().build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[world] + fn main() -> felt252 { 12 + 56 + 90 } + "#}) + .build(&project); + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .failure() + .stdout_matches(indoc! {r#" + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + error: Plugin diagnostic: Unsupported attribute. + --> [..]lib.cairo:1:1 + #[world] + ^^^^^^^^ + + error: could not compile `hello` due to previous error + "#}); +} diff --git a/scarb/tests/proc_macro_executable.rs b/scarb/tests/proc_macro_executable.rs new file mode 100644 index 000000000..2385c3025 --- /dev/null +++ b/scarb/tests/proc_macro_executable.rs @@ -0,0 +1,107 @@ +use assert_fs::fixture::PathChild; +use assert_fs::TempDir; +use cairo_lang_sierra::program::VersionedProgram; +use indoc::indoc; +use scarb_test_support::cairo_plugin_project_builder::CairoPluginProjectBuilder; +use scarb_test_support::command::Scarb; +use scarb_test_support::fsx::ChildPathEx; +use scarb_test_support::project_builder::ProjectBuilder; + +#[test] +fn can_create_executable_attribute() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r##" + use cairo_lang_macro::executable_attribute; + + executable_attribute!("some"); + "##}) + .build(&t); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> felt252 { 12 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + [..]Finished `dev` profile target(s) in [..] + "#}); + let sierra = project + .child("target") + .child("dev") + .child("hello.sierra.json") + .read_to_string(); + let sierra = serde_json::from_str::(&sierra).unwrap(); + let sierra = sierra.into_v1().unwrap(); + let executables = sierra.debug_info.unwrap().executables; + assert_eq!(executables.len(), 1); + let executables = executables.get("some").unwrap(); + assert_eq!(executables.len(), 1); + let fid = executables.first().unwrap().clone(); + assert_eq!(fid.clone().debug_name.unwrap(), "hello::main"); + assert!(sierra + .program + .funcs + .iter() + .any(|f| f.id.clone() == fid.clone())); +} + +#[test] +fn executable_name_cannot_clash_attr() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{executable_attribute, attribute_macro, TokenStream, ProcMacroResult}; + + executable_attribute!("some"); + + #[attribute_macro] + fn some(_args: TokenStream, input: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(input) + } + "##}) + .build(&t); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> felt252 { 12 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .failure() + .stdout_matches(indoc! {r#" + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + error: duplicate expansions defined for procedural macro some v1.0.0 ([..]Scarb.toml): some + "#}); +} diff --git a/scarb/tests/build_cairo_plugin.rs b/scarb/tests/proc_macro_expand.rs similarity index 94% rename from scarb/tests/build_cairo_plugin.rs rename to scarb/tests/proc_macro_expand.rs index d0530cdbc..46bad039b 100644 --- a/scarb/tests/build_cairo_plugin.rs +++ b/scarb/tests/proc_macro_expand.rs @@ -1,6 +1,5 @@ use assert_fs::fixture::PathChild; use assert_fs::TempDir; -use cairo_lang_sierra::program::VersionedProgram; use indoc::indoc; use scarb_test_support::cairo_plugin_project_builder::CairoPluginProjectBuilder; use scarb_test_support::command::Scarb; @@ -253,52 +252,6 @@ fn can_emit_plugin_warning() { "#}); } -#[test] -fn can_emit_plugin_error() { - let temp = TempDir::new().unwrap(); - let t = temp.child("some"); - CairoPluginProjectBuilder::default() - .lib_rs(indoc! {r#" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, Diagnostic}; - - #[attribute_macro] - pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let diag = Diagnostic::error("Some error from macro."); - ProcMacroResult::new(token_stream) - .with_diagnostics(diag.into()) - } - "#}) - .build(&t); - let project = temp.child("hello"); - ProjectBuilder::start() - .name("hello") - .version("1.0.0") - .dep("some", &t) - .lib_cairo(indoc! {r#" - #[some] - fn f() -> felt252 { 12 } - "#}) - .build(&project); - - Scarb::quick_snapbox() - .arg("build") - // Disable output from Cargo. - .env("CARGO_TERM_QUIET", "true") - .current_dir(&project) - .assert() - .failure() - .stdout_matches(indoc! {r#" - [..] Compiling some v1.0.0 ([..]Scarb.toml) - [..] Compiling hello v1.0.0 ([..]Scarb.toml) - error: Plugin diagnostic: Some error from macro. - --> [..]lib.cairo:1:1 - #[some] - ^^^^^^^ - - error: could not compile `hello` due to previous error - "#}); -} - #[test] fn diags_from_generated_code_mapped_correctly() { let temp = TempDir::new().unwrap(); @@ -1186,105 +1139,6 @@ fn can_read_attribute_args() { "#}); } -#[test] -fn can_create_executable_attribute() { - let temp = TempDir::new().unwrap(); - let t = temp.child("some"); - CairoPluginProjectBuilder::default() - .lib_rs(indoc! {r##" - use cairo_lang_macro::executable_attribute; - - executable_attribute!("some"); - "##}) - .build(&t); - - let project = temp.child("hello"); - ProjectBuilder::start() - .name("hello") - .version("1.0.0") - .dep_starknet() - .dep("some", &t) - .lib_cairo(indoc! {r#" - #[some] - fn main() -> felt252 { 12 } - "#}) - .build(&project); - - Scarb::quick_snapbox() - .arg("build") - // Disable output from Cargo. - .env("CARGO_TERM_QUIET", "true") - .current_dir(&project) - .assert() - .success() - .stdout_matches(indoc! {r#" - [..]Compiling some v1.0.0 ([..]Scarb.toml) - [..]Compiling hello v1.0.0 ([..]Scarb.toml) - [..]Finished `dev` profile target(s) in [..] - "#}); - let sierra = project - .child("target") - .child("dev") - .child("hello.sierra.json") - .read_to_string(); - let sierra = serde_json::from_str::(&sierra).unwrap(); - let sierra = sierra.into_v1().unwrap(); - let executables = sierra.debug_info.unwrap().executables; - assert_eq!(executables.len(), 1); - let executables = executables.get("some").unwrap(); - assert_eq!(executables.len(), 1); - let fid = executables.first().unwrap().clone(); - assert_eq!(fid.clone().debug_name.unwrap(), "hello::main"); - assert!(sierra - .program - .funcs - .iter() - .any(|f| f.id.clone() == fid.clone())); -} - -#[test] -fn executable_name_cannot_clash_attr() { - let temp = TempDir::new().unwrap(); - let t = temp.child("some"); - CairoPluginProjectBuilder::default() - .lib_rs(indoc! {r##" - use cairo_lang_macro::{executable_attribute, attribute_macro, TokenStream, ProcMacroResult}; - - executable_attribute!("some"); - - #[attribute_macro] - fn some(_args: TokenStream, input: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(input) - } - "##}) - .build(&t); - - let project = temp.child("hello"); - ProjectBuilder::start() - .name("hello") - .version("1.0.0") - .dep_starknet() - .dep("some", &t) - .lib_cairo(indoc! {r#" - #[some] - fn main() -> felt252 { 12 } - "#}) - .build(&project); - - Scarb::quick_snapbox() - .arg("build") - // Disable output from Cargo. - .env("CARGO_TERM_QUIET", "true") - .current_dir(&project) - .assert() - .failure() - .stdout_matches(indoc! {r#" - [..]Compiling some v1.0.0 ([..]Scarb.toml) - [..]Compiling hello v1.0.0 ([..]Scarb.toml) - error: duplicate expansions defined for procedural macro some v1.0.0 ([..]Scarb.toml): some - "#}); -} - #[test] fn can_be_expanded() { let temp = TempDir::new().unwrap(); @@ -1574,6 +1428,52 @@ fn can_expand_impl_inner_func_attrr() { "#}); } +#[test] +fn can_emit_plugin_error() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r#" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, Diagnostic}; + + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let diag = Diagnostic::error("Some error from macro."); + ProcMacroResult::new(token_stream) + .with_diagnostics(diag.into()) + } + "#}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn f() -> felt252 { 12 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .failure() + .stdout_matches(indoc! {r#" + [..] Compiling some v1.0.0 ([..]Scarb.toml) + [..] Compiling hello v1.0.0 ([..]Scarb.toml) + error: Plugin diagnostic: Some error from macro. + --> [..]lib.cairo:1:1 + #[some] + ^^^^^^^ + + error: could not compile `hello` due to previous error + "#}); +} + #[test] fn code_mappings_preserve_attribute_error_locations() { let temp = TempDir::new().unwrap(); @@ -1618,7 +1518,7 @@ fn code_mappings_preserve_attribute_error_locations() { error: Cannot assign to an immutable variable. --> [..]lib.cairo[proc_some]:3:5 x = 2; - ^***^ + ^^^^^ note: this error originates in the attribute macro: `some` error: could not compile `hello` due to previous error @@ -1671,7 +1571,7 @@ fn code_mappings_preserve_inline_macro_error_locations() { error: Identifier not found. --> [..]lib.cairo:1:1 fn main() -> felt252 { - ^*******^ + ^^^^^^^^^ error: could not compile `hello` due to previous error "#}); @@ -1752,18 +1652,24 @@ fn code_mappings_preserve_derive_error_locations() { .assert() .failure() .stdout_matches(indoc! {r#" - [..] Compiling some v1.0.0 ([..]Scarb.toml) - [..] Compiling hello v1.0.0 ([..]Scarb.toml) + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) error: The value does not fit within the range of type core::integer::u8. - --> [..]lib.cairo:1:1 - trait Hello { - ^**************^ + --> [..]lib.cairo:1:1-8:1 + trait Hello { + _^ + | ... + | #[derive(CustomDerive, Drop)] + |_^ note: this error originates in the derive macro: `custom_derive` error: The value does not fit within the range of type core::integer::u8. - --> [..]lib.cairo:1:1 - trait Hello { - ^**************^ + --> [..]lib.cairo:1:1-8:10 + trait Hello { + _^ + | ... + | #[derive(CustomDerive, Drop)] + |__________^ note: this error originates in the derive macro: `custom_derive` error: could not compile `hello` due to previous error diff --git a/scarb/tests/proc_macro_metadata.rs b/scarb/tests/proc_macro_metadata.rs new file mode 100644 index 000000000..ee120a142 --- /dev/null +++ b/scarb/tests/proc_macro_metadata.rs @@ -0,0 +1,190 @@ +use assert_fs::fixture::PathChild; +use assert_fs::TempDir; +use indoc::indoc; +use scarb_test_support::cairo_plugin_project_builder::CairoPluginProjectBuilder; +use scarb_test_support::command::Scarb; +use scarb_test_support::project_builder::ProjectBuilder; + +#[test] +fn can_return_aux_data_from_plugin() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, AuxData, PostProcessContext, post_process}; + use serde::{Serialize, Deserialize}; + + #[derive(Debug, Serialize, Deserialize)] + struct SomeMacroDataFormat { + msg: String + } + + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let value = SomeMacroDataFormat { msg: "Hello from some macro!".to_string() }; + let value = serde_json::to_string(&value).unwrap(); + let value: Vec = value.into_bytes(); + let aux_data = AuxData::new(value); + ProcMacroResult::new(token_stream).with_aux_data(aux_data) + } + + #[post_process] + pub fn callback(context: PostProcessContext) { + let aux_data = context.aux_data.into_iter() + .map(|aux_data| { + let value: Vec = aux_data.into(); + let aux_data: SomeMacroDataFormat = serde_json::from_slice(&value).unwrap(); + aux_data + }) + .collect::>(); + println!("{:?}", aux_data); + } + + #[post_process] + pub fn some_no_op_callback(context: PostProcessContext) { + drop(context.aux_data); + } + "##}) + .add_dep(r#"serde = { version = "*", features = ["derive"] }"#) + .add_dep(r#"serde_json = "*""#) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> felt252 { 12 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + [SomeMacroDataFormat { msg: "Hello from some macro!" }] + [..]Finished `dev` profile target(s) in [..] + "#}); +} + +#[test] +fn can_read_token_stream_metadata() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + println!("{:#?}", token_stream.metadata()); + ProcMacroResult::new(token_stream) + } + "##}) + .build(&t); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> felt252 { 12 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + TokenStreamMetadata { + original_file_path: Some( + "[..]lib.cairo", + ), + file_id: Some( + "[..]", + ), + edition: Some( + "[..]", + ), + } + [..]Finished `dev` profile target(s) in [..] + "#}); +} + +#[test] +fn can_resolve_full_path_markers() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, post_process, PostProcessContext, TokenTree, Token, TextSpan}; + + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let full_path_markers = vec!["some-key".to_string()]; + + let code = format!( + r#"#[macro::full_path_marker("some-key")] {}"#, + token_stream.to_string().replace("12", "34") + ); + + ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new( + code.clone(), + TextSpan { + start: 0, + end: code.len() as u32, + }, + ))]) + ).with_full_path_markers(full_path_markers) + } + + #[post_process] + pub fn callback(context: PostProcessContext) { + println!("{:?}", context.full_path_markers); + } + "##}) + .build(&t); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> felt252 { 12 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + [FullPathMarker { key: "some-key", full_path: "hello::main" }] + [..]Finished `dev` profile target(s) in [..] + "#}); +} diff --git a/scarb/tests/proc_macro_prebuilt.rs b/scarb/tests/proc_macro_prebuilt.rs index d7b142a95..09ec23777 100644 --- a/scarb/tests/proc_macro_prebuilt.rs +++ b/scarb/tests/proc_macro_prebuilt.rs @@ -222,8 +222,9 @@ fn load_prebuilt_proc_macros() { name: "some".to_string(), args: TokenStream::new(vec![TokenTree::Ident(Token::new( "42", - TextSpan::default(), + TextSpan::call_site(), ))]), + call_site: TextSpan::new(0, 0), }) .unwrap(); assert_eq!(response.diagnostics, vec![]); @@ -231,7 +232,7 @@ fn load_prebuilt_proc_macros() { response.token_stream, TokenStream::new(vec![TokenTree::Ident(Token::new( "42", - TextSpan::default(), + TextSpan::call_site(), ))]) ); }