From 1c99a21d255df00e74aba09431d8a8198d77378e Mon Sep 17 00:00:00 2001 From: maciektr Date: Wed, 13 Nov 2024 12:34:36 +0100 Subject: [PATCH 01/38] Rework internal TokenStream representation (#1699) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit commit-id:ab3e2fd1 --- **Stack**: - #1734 - #1722 - #1704 - #1700 - #1699 ⬅ ⚠️ *Part of a stack created by [spr](https://github.com/ejoffe/spr). Do not merge manually using the UI - doing so may have unexpected results.* --- Cargo.lock | 8 +- plugins/cairo-lang-macro-stable/src/lib.rs | 34 ++-- plugins/cairo-lang-macro/Cargo.toml | 2 +- .../cairo-lang-macro/src/types/conversion.rs | 129 ++++++++++++- plugins/cairo-lang-macro/src/types/mod.rs | 92 ++------- plugins/cairo-lang-macro/src/types/token.rs | 129 +++++++++++++ scarb/Cargo.toml | 4 +- scarb/src/compiler/plugin/proc_macro/ffi.rs | 15 -- scarb/src/compiler/plugin/proc_macro/host.rs | 148 +++++++-------- scarb/src/compiler/plugin/proc_macro/mod.rs | 2 + scarb/src/compiler/plugin/proc_macro/types.rs | 54 ++++++ .../methods/expand_derive.rs | 6 +- scarb/tests/build_cairo_plugin.rs | 177 +++++++++++------- scarb/tests/proc_macro_prebuilt.rs | 15 +- scarb/tests/proc_macro_server.rs | 48 +++-- .../scarb-proc-macro-server-types/Cargo.toml | 2 +- .../src/proc_macro_server.rs | 12 +- 17 files changed, 597 insertions(+), 280 deletions(-) create mode 100644 plugins/cairo-lang-macro/src/types/token.rs create mode 100644 scarb/src/compiler/plugin/proc_macro/types.rs diff --git a/Cargo.lock b/Cargo.lock index 6e914b5f5..2649cfe8f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -947,7 +947,7 @@ name = "cairo-lang-macro" version = "0.1.1" dependencies = [ "cairo-lang-macro-attributes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "cairo-lang-macro-stable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cairo-lang-macro-stable 1.0.0", "linkme", "serde", "serde_json", @@ -5706,8 +5706,8 @@ dependencies = [ "cairo-lang-filesystem", "cairo-lang-formatter", "cairo-lang-lowering", - "cairo-lang-macro 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "cairo-lang-macro-stable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cairo-lang-macro 0.1.1", + "cairo-lang-macro-stable 1.0.0", "cairo-lang-parser", "cairo-lang-semantic", "cairo-lang-sierra", @@ -5948,7 +5948,7 @@ dependencies = [ name = "scarb-proc-macro-server-types" version = "0.2.0" dependencies = [ - "cairo-lang-macro 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "cairo-lang-macro 0.1.1", "serde", "serde_json", ] diff --git a/plugins/cairo-lang-macro-stable/src/lib.rs b/plugins/cairo-lang-macro-stable/src/lib.rs index dd91b21a8..fc5542987 100644 --- a/plugins/cairo-lang-macro-stable/src/lib.rs +++ b/plugins/cairo-lang-macro-stable/src/lib.rs @@ -1,11 +1,30 @@ use crate::ffi::{StableOption, StableSlice}; -use std::ffi::CStr; use std::num::NonZeroU8; use std::os::raw::c_char; use std::ptr::NonNull; pub mod ffi; +#[repr(C)] +#[derive(Debug)] +pub struct StableToken { + pub span: StableTextSpan, + pub content: *mut c_char, +} + +#[repr(C)] +#[derive(Debug)] +pub struct StableTextSpan { + pub start: usize, + pub end: usize, +} + +#[repr(C)] +#[derive(Debug)] +pub enum StableTokenTree { + Ident(StableToken), +} + #[repr(C)] #[derive(Debug)] pub struct StableExpansion { @@ -23,7 +42,7 @@ pub type StableExpansionsList = StableSlice; #[repr(C)] #[derive(Debug)] pub struct StableTokenStream { - pub value: *mut c_char, + pub tokens: StableSlice, pub metadata: StableTokenStreamMetadata, } @@ -76,17 +95,6 @@ pub struct StableResultWrapper { pub output: StableProcMacroResult, } -impl StableTokenStream { - /// Convert to String. - /// - /// # Safety - pub unsafe fn to_string(&self) -> String { - // Note that this does not deallocate the c-string. - // The memory must still be freed with `CString::from_raw`. - CStr::from_ptr(self.value).to_string_lossy().to_string() - } -} - #[repr(C)] pub struct StablePostProcessContext { pub aux_data: StableSlice, diff --git a/plugins/cairo-lang-macro/Cargo.toml b/plugins/cairo-lang-macro/Cargo.toml index c59d1a5dd..0f3fcc1e5 100644 --- a/plugins/cairo-lang-macro/Cargo.toml +++ b/plugins/cairo-lang-macro/Cargo.toml @@ -15,7 +15,7 @@ repository.workspace = true [dependencies] cairo-lang-macro-attributes = "0.1" -cairo-lang-macro-stable = "1" +cairo-lang-macro-stable = { path = "../cairo-lang-macro-stable" } linkme.workspace = true serde = { workspace = true, optional = true } diff --git a/plugins/cairo-lang-macro/src/types/conversion.rs b/plugins/cairo-lang-macro/src/types/conversion.rs index f9537e292..1fb0128e0 100644 --- a/plugins/cairo-lang-macro/src/types/conversion.rs +++ b/plugins/cairo-lang-macro/src/types/conversion.rs @@ -1,12 +1,12 @@ use crate::{ AuxData, Diagnostic, ExpansionDefinition, FullPathMarker, PostProcessContext, ProcMacroResult, - Severity, TokenStream, TokenStreamMetadata, + Severity, TextSpan, Token, TokenStream, TokenStreamMetadata, TokenTree, }; use cairo_lang_macro_stable::ffi::StableSlice; use cairo_lang_macro_stable::{ StableAuxData, StableDiagnostic, StableExpansion, StableFullPathMarker, - StablePostProcessContext, StableProcMacroResult, StableSeverity, StableTokenStream, - StableTokenStreamMetadata, + StablePostProcessContext, StableProcMacroResult, StableSeverity, StableTextSpan, StableToken, + StableTokenStream, StableTokenStreamMetadata, StableTokenTree, }; use std::ffi::{CStr, CString, c_char}; use std::num::NonZeroU8; @@ -90,15 +90,120 @@ impl ProcMacroResult { } } +impl TextSpan { + /// Convert to FFI-safe representation. + #[doc(hidden)] + pub fn into_stable(self) -> StableTextSpan { + StableTextSpan { + start: self.start, + end: self.end, + } + } + + #[doc(hidden)] + pub fn from_stable(span: &StableTextSpan) -> Self { + Self { + start: span.start, + end: span.end, + } + } + + #[doc(hidden)] + pub fn from_owned_stable(span: StableTextSpan) -> Self { + Self { + start: span.start, + end: span.end, + } + } +} + +impl Token { + /// Convert to FFI-safe representation. + #[doc(hidden)] + pub fn into_stable(self) -> StableToken { + let cstr = CString::new(self.content.as_bytes()).unwrap(); + StableToken { + span: self.span.into_stable(), + content: cstr.into_raw(), + } + } + + /// Convert to native Rust representation, without taking the ownership of the string. + /// + /// Note that you still need to free the memory by calling `from_owned_stable`. + /// + /// # Safety + #[doc(hidden)] + pub unsafe fn from_stable(token: &StableToken) -> Self { + Self { + content: from_raw_cstr(token.content), + span: TextSpan::from_stable(&token.span), + } + } + + /// Convert to native Rust representation, with taking the ownership of the string. + /// + /// Useful when you need to free the allocated memory. + /// Only use on the same side of FFI-barrier, where the memory has been allocated. + /// + /// # Safety + #[doc(hidden)] + pub unsafe fn from_owned_stable(token: StableToken) -> Self { + Self { + content: from_raw_cstring(token.content), + span: TextSpan::from_owned_stable(token.span), + } + } +} + +impl TokenTree { + /// Convert to FFI-safe representation. + #[doc(hidden)] + pub fn into_stable(self) -> StableTokenTree { + match self { + Self::Ident(token) => StableTokenTree::Ident(token.into_stable()), + } + } + + /// Convert to native Rust representation, without taking the ownership of the string. + /// + /// Note that you still need to free the memory by calling `from_owned_stable`. + /// + /// # Safety + #[doc(hidden)] + pub unsafe fn from_stable(token_tree: &StableTokenTree) -> Self { + match token_tree { + StableTokenTree::Ident(token) => Self::Ident(Token::from_stable(token)), + } + } + + /// Convert to native Rust representation, with taking the ownership of the string. + /// + /// Useful when you need to free the allocated memory. + /// Only use on the same side of FFI-barrier, where the memory has been allocated. + /// + /// # Safety + #[doc(hidden)] + pub unsafe fn from_owned_stable(token_tree: StableTokenTree) -> Self { + match token_tree { + StableTokenTree::Ident(token) => Self::Ident(Token::from_owned_stable(token)), + } + } +} + impl TokenStream { /// Convert to FFI-safe representation. /// /// # Safety #[doc(hidden)] pub fn into_stable(self) -> StableTokenStream { - let cstr = CString::new(self.value).unwrap(); + let tokens = self + .tokens + .into_iter() + .map(|token| token.into_stable()) + .collect::>(); StableTokenStream { - value: cstr.into_raw(), + tokens: StableSlice::new(tokens), metadata: self.metadata.into_stable(), } } @@ -110,8 +215,13 @@ impl TokenStream { /// # Safety #[doc(hidden)] pub unsafe fn from_stable(token_stream: &StableTokenStream) -> Self { + let (ptr, n) = token_stream.tokens.raw_parts(); + let tokens = slice::from_raw_parts(ptr, n) + .iter() + .map(|token_tree| TokenTree::from_stable(token_tree)) + .collect::>(); Self { - value: from_raw_cstr(token_stream.value), + tokens, metadata: TokenStreamMetadata::from_stable(&token_stream.metadata), } } @@ -124,8 +234,13 @@ impl TokenStream { /// # Safety #[doc(hidden)] pub unsafe fn from_owned_stable(token_stream: StableTokenStream) -> Self { + let tokens = token_stream.tokens.into_owned(); + let tokens = tokens + .into_iter() + .map(|token_tree| TokenTree::from_owned_stable(token_tree)) + .collect::>(); Self { - value: from_raw_cstring(token_stream.value), + tokens, metadata: TokenStreamMetadata::from_owned_stable(token_stream.metadata), } } diff --git a/plugins/cairo-lang-macro/src/types/mod.rs b/plugins/cairo-lang-macro/src/types/mod.rs index a811d7558..618d61746 100644 --- a/plugins/cairo-lang-macro/src/types/mod.rs +++ b/plugins/cairo-lang-macro/src/types/mod.rs @@ -1,10 +1,11 @@ -use std::fmt::Display; use std::vec::IntoIter; mod conversion; mod expansions; +mod token; pub use expansions::*; +pub use token::*; /// Result of procedural macro code generation. #[derive(Debug, Clone)] @@ -15,78 +16,6 @@ pub struct ProcMacroResult { pub full_path_markers: Vec, } -/// An abstract stream of Cairo tokens. -/// -/// This is both input and part of an output of a procedural macro. -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] -pub struct TokenStream { - value: String, - metadata: TokenStreamMetadata, -} - -/// Metadata of [`TokenStream`]. -/// -/// This struct can be used to describe the origin of the [`TokenStream`]. -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] -pub struct TokenStreamMetadata { - /// The path to the file from which the [`TokenStream`] has been created. - pub original_file_path: Option, - /// ID of the file from which the [`TokenStream`] has been created. - /// - /// It is guaranteed, that the `file_id` will be unique for each file. - pub file_id: Option, -} - -impl TokenStream { - #[doc(hidden)] - pub fn new(value: String) -> Self { - Self { - value, - metadata: TokenStreamMetadata::default(), - } - } - - #[doc(hidden)] - pub fn empty() -> Self { - Self::new("".to_string()) - } - - #[doc(hidden)] - pub fn with_metadata(mut self, metadata: TokenStreamMetadata) -> Self { - self.metadata = metadata; - self - } - - /// Get `[TokenStreamMetadata`] associated with this [`TokenStream`]. - /// - /// The metadata struct can be used to describe the [`TokenStream`] origin. - pub fn metadata(&self) -> &TokenStreamMetadata { - &self.metadata - } - - pub fn is_empty(&self) -> bool { - self.to_string().is_empty() - } -} - -impl Display for TokenStream { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.value) - } -} - -impl TokenStreamMetadata { - #[doc(hidden)] - pub fn new(file_path: impl ToString, file_id: impl ToString) -> Self { - Self { - original_file_path: Some(file_path.to_string()), - file_id: Some(file_id.to_string()), - } - } -} - /// **Auxiliary data** returned by procedural macro code generation. /// /// This struct can be used to collect additional information from the Cairo source code of @@ -101,7 +30,7 @@ impl TokenStreamMetadata { /// For instance, auxiliary data can be serialized as JSON. /// /// ``` -/// use cairo_lang_macro::{AuxData, ProcMacroResult, TokenStream, attribute_macro, post_process, PostProcessContext}; +/// use cairo_lang_macro::{AuxData, ProcMacroResult, TokenStream, TokenTree, Token, TextSpan, attribute_macro, post_process, PostProcessContext}; /// use serde::{Serialize, Deserialize}; /// #[derive(Debug, Serialize, Deserialize)] /// struct SomeAuxDataFormat { @@ -110,11 +39,16 @@ impl TokenStreamMetadata { /// /// #[attribute_macro] /// pub fn some_macro(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { -/// let token_stream = TokenStream::new( -/// token_stream.to_string() -/// // Remove macro call to avoid infinite loop. -/// .replace("#[some]", "") -/// ); +/// // Remove macro call to avoid infinite loop. +/// let code = token_stream.to_string().replace("#[some]", ""); +/// let token_stream = TokenStream::new(vec![ +/// TokenTree::Ident( +/// Token::new( +/// code.clone(), +/// TextSpan::new(0, code.len()) +/// ) +/// ) +/// ]); /// let value = SomeAuxDataFormat { some_message: "Hello from some macro!".to_string() }; /// let value = serde_json::to_string(&value).unwrap(); /// let value: Vec = value.into_bytes(); diff --git a/plugins/cairo-lang-macro/src/types/token.rs b/plugins/cairo-lang-macro/src/types/token.rs new file mode 100644 index 000000000..52b50068f --- /dev/null +++ b/plugins/cairo-lang-macro/src/types/token.rs @@ -0,0 +1,129 @@ +use std::fmt::Display; + +/// An abstract stream of Cairo tokens. +/// +/// This is both input and part of an output of a procedural macro. +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] +pub struct TokenStream { + pub tokens: Vec, + pub metadata: TokenStreamMetadata, +} + +/// A single token or a delimited sequence of token trees. +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum TokenTree { + Ident(Token), +} + +impl Default for TokenTree { + fn default() -> Self { + Self::Ident(Default::default()) + } +} + +/// A range of text offsets that form a span (like text selection). +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] +pub struct TextSpan { + pub start: usize, + pub end: usize, +} + +/// A single Cairo token. +/// +/// The most atomic item of Cairo code representation. +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] +pub struct Token { + pub content: String, + pub span: TextSpan, +} + +/// Metadata of [`TokenStream`]. +/// +/// This struct describes the origin of the [`TokenStream`]. +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] +pub struct TokenStreamMetadata { + /// The path to the file from which the [`TokenStream`] has been created. + pub original_file_path: Option, + /// ID of the file from which the [`TokenStream`] has been created. + /// + /// It is guaranteed, that the `file_id` will be unique for each file. + pub file_id: Option, +} + +impl TokenStream { + #[doc(hidden)] + pub fn new(tokens: Vec) -> Self { + Self { + tokens, + metadata: TokenStreamMetadata::default(), + } + } + + #[doc(hidden)] + pub fn empty() -> Self { + Self::new(Vec::default()) + } + + #[doc(hidden)] + pub fn with_metadata(mut self, metadata: TokenStreamMetadata) -> Self { + self.metadata = metadata; + self + } + + /// Get `[TokenStreamMetadata`] associated with this [`TokenStream`]. + /// + /// The metadata struct can be used to describe the [`TokenStream`] origin. + pub fn metadata(&self) -> &TokenStreamMetadata { + &self.metadata + } + + pub fn is_empty(&self) -> bool { + self.to_string().is_empty() + } +} + +impl Display for TokenStream { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + for token in &self.tokens { + match token { + TokenTree::Ident(token) => { + write!(f, "{}", token.content.clone())?; + } + } + } + Ok(()) + } +} + +impl TokenStreamMetadata { + #[doc(hidden)] + pub fn new(file_path: impl ToString, file_id: impl ToString) -> Self { + Self { + original_file_path: Some(file_path.to_string()), + file_id: Some(file_id.to_string()), + } + } +} + +impl TokenTree { + pub fn from_ident(token: Token) -> Self { + Self::Ident(token) + } +} + +impl TextSpan { + pub fn new(start: usize, end: usize) -> TextSpan { + TextSpan { start, end } + } +} + +impl Token { + pub fn new(content: String, span: TextSpan) -> Self { + Self { content, span } + } +} diff --git a/scarb/Cargo.toml b/scarb/Cargo.toml index b488ca3ee..9f69ba41a 100644 --- a/scarb/Cargo.toml +++ b/scarb/Cargo.toml @@ -23,8 +23,8 @@ cairo-lang-executable.workspace = true cairo-lang-filesystem.workspace = true cairo-lang-formatter.workspace = true cairo-lang-lowering.workspace = true -cairo-lang-macro = "0.1" -cairo-lang-macro-stable = "1" +cairo-lang-macro = { path = "../plugins/cairo-lang-macro" } +cairo-lang-macro-stable = { path = "../plugins/cairo-lang-macro-stable" } cairo-lang-parser.workspace = true cairo-lang-semantic.workspace = true cairo-lang-sierra-generator.workspace = true diff --git a/scarb/src/compiler/plugin/proc_macro/ffi.rs b/scarb/src/compiler/plugin/proc_macro/ffi.rs index 465037a17..bba326e84 100644 --- a/scarb/src/compiler/plugin/proc_macro/ffi.rs +++ b/scarb/src/compiler/plugin/proc_macro/ffi.rs @@ -1,6 +1,5 @@ use crate::core::{Package, PackageId}; use anyhow::{Context, Result, ensure}; -use cairo_lang_defs::patcher::PatchBuilder; use cairo_lang_macro::{ ExpansionKind as SharedExpansionKind, FullPathMarker, PostProcessContext, ProcMacroResult, TokenStream, @@ -9,8 +8,6 @@ use cairo_lang_macro_stable::{ StableExpansion, StableExpansionsList, StablePostProcessContext, StableProcMacroResult, StableResultWrapper, StableTokenStream, }; -use cairo_lang_syntax::node::TypedSyntaxNode; -use cairo_lang_syntax::node::db::SyntaxGroup; use camino::Utf8PathBuf; use itertools::Itertools; use libloading::{Library, Symbol}; @@ -28,18 +25,6 @@ use libloading::os::windows::Symbol as RawSymbol; use smol_str::SmolStr; use tracing::trace; -pub trait FromSyntaxNode { - fn from_syntax_node(db: &dyn SyntaxGroup, node: &impl TypedSyntaxNode) -> Self; -} - -impl FromSyntaxNode for TokenStream { - fn from_syntax_node(db: &dyn SyntaxGroup, node: &impl TypedSyntaxNode) -> Self { - let mut builder = PatchBuilder::new(db, node); - builder.add_node(node.as_syntax_node()); - Self::new(builder.build().0) - } -} - const EXEC_ATTR_PREFIX: &str = "__exec_attr_"; /// Representation of a single procedural macro. diff --git a/scarb/src/compiler/plugin/proc_macro/host.rs b/scarb/src/compiler/plugin/proc_macro/host.rs index c76606739..cfc154310 100644 --- a/scarb/src/compiler/plugin/proc_macro/host.rs +++ b/scarb/src/compiler/plugin/proc_macro/host.rs @@ -1,6 +1,6 @@ use crate::compiler::plugin::proc_macro::compilation::SharedLibraryProvider; use crate::compiler::plugin::proc_macro::{ - Expansion, ExpansionKind, FromSyntaxNode, ProcMacroInstance, + Expansion, ExpansionKind, ProcMacroInstance, TokenStreamBuilder, }; use crate::core::{Config, Package, PackageId}; use anyhow::{Context, Result, bail, ensure}; @@ -213,15 +213,15 @@ impl ProcMacroHostPlugin { continue; }; - let mut func_builder = PatchBuilder::new(db, func); + let mut token_stream_builder = TokenStreamBuilder::new(db); let attrs = func.attributes(db).elements(db); - let found = self.parse_attrs(db, &mut func_builder, attrs, func); + let found = self.parse_attrs(db, &mut token_stream_builder, attrs); if let Some(name) = found.as_name() { used_attr_names.insert(name); } - func_builder.add_node(func.declaration(db).as_syntax_node()); - func_builder.add_node(func.body(db).as_syntax_node()); - let token_stream = TokenStream::new(func_builder.build().0); + token_stream_builder.add_node(func.declaration(db).as_syntax_node()); + token_stream_builder.add_node(func.body(db).as_syntax_node()); + let token_stream = token_stream_builder.build(); all_none = all_none && self.do_expand_inner_attr( @@ -274,16 +274,16 @@ impl ProcMacroHostPlugin { continue; }; - let mut func_builder = PatchBuilder::new(db, &func); + let mut token_stream_builder = TokenStreamBuilder::new(db); let attrs = func.attributes(db).elements(db); - let found = self.parse_attrs(db, &mut func_builder, attrs, &func); + let found = self.parse_attrs(db, &mut token_stream_builder, attrs); if let Some(name) = found.as_name() { used_attr_names.insert(name); } - func_builder.add_node(func.visibility(db).as_syntax_node()); - func_builder.add_node(func.declaration(db).as_syntax_node()); - func_builder.add_node(func.body(db).as_syntax_node()); - let token_stream = TokenStream::new(func_builder.build().0); + token_stream_builder.add_node(func.visibility(db).as_syntax_node()); + token_stream_builder.add_node(func.declaration(db).as_syntax_node()); + token_stream_builder.add_node(func.body(db).as_syntax_node()); + let token_stream = token_stream_builder.build(); all_none = all_none && self.do_expand_inner_attr( db, @@ -370,103 +370,102 @@ impl ProcMacroHostPlugin { db: &dyn SyntaxGroup, item_ast: ast::ModuleItem, ) -> (AttrExpansionFound, TokenStream) { - let mut item_builder = PatchBuilder::new(db, &item_ast); + let mut token_stream_builder = TokenStreamBuilder::new(db); let input = match item_ast.clone() { ast::ModuleItem::Trait(trait_ast) => { let attrs = trait_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); - item_builder.add_node(trait_ast.visibility(db).as_syntax_node()); - item_builder.add_node(trait_ast.trait_kw(db).as_syntax_node()); - item_builder.add_node(trait_ast.name(db).as_syntax_node()); - item_builder.add_node(trait_ast.generic_params(db).as_syntax_node()); - item_builder.add_node(trait_ast.body(db).as_syntax_node()); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + token_stream_builder.add_node(trait_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(trait_ast.trait_kw(db).as_syntax_node()); + token_stream_builder.add_node(trait_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(trait_ast.generic_params(db).as_syntax_node()); + token_stream_builder.add_node(trait_ast.body(db).as_syntax_node()); expansion } ast::ModuleItem::Impl(impl_ast) => { let attrs = impl_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); - item_builder.add_node(impl_ast.visibility(db).as_syntax_node()); - item_builder.add_node(impl_ast.impl_kw(db).as_syntax_node()); - item_builder.add_node(impl_ast.name(db).as_syntax_node()); - item_builder.add_node(impl_ast.generic_params(db).as_syntax_node()); - item_builder.add_node(impl_ast.of_kw(db).as_syntax_node()); - item_builder.add_node(impl_ast.trait_path(db).as_syntax_node()); - item_builder.add_node(impl_ast.body(db).as_syntax_node()); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + token_stream_builder.add_node(impl_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.impl_kw(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.generic_params(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.of_kw(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.trait_path(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.body(db).as_syntax_node()); expansion } ast::ModuleItem::Module(module_ast) => { let attrs = module_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); - item_builder.add_node(module_ast.visibility(db).as_syntax_node()); - item_builder.add_node(module_ast.module_kw(db).as_syntax_node()); - item_builder.add_node(module_ast.name(db).as_syntax_node()); - item_builder.add_node(module_ast.body(db).as_syntax_node()); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + token_stream_builder.add_node(module_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(module_ast.module_kw(db).as_syntax_node()); + token_stream_builder.add_node(module_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(module_ast.body(db).as_syntax_node()); expansion } ast::ModuleItem::FreeFunction(free_func_ast) => { let attrs = free_func_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); - item_builder.add_node(free_func_ast.visibility(db).as_syntax_node()); - item_builder.add_node(free_func_ast.declaration(db).as_syntax_node()); - item_builder.add_node(free_func_ast.body(db).as_syntax_node()); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + token_stream_builder.add_node(free_func_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(free_func_ast.declaration(db).as_syntax_node()); + token_stream_builder.add_node(free_func_ast.body(db).as_syntax_node()); expansion } ast::ModuleItem::ExternFunction(extern_func_ast) => { let attrs = extern_func_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); - item_builder.add_node(extern_func_ast.visibility(db).as_syntax_node()); - item_builder.add_node(extern_func_ast.extern_kw(db).as_syntax_node()); - item_builder.add_node(extern_func_ast.declaration(db).as_syntax_node()); - item_builder.add_node(extern_func_ast.semicolon(db).as_syntax_node()); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + token_stream_builder.add_node(extern_func_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(extern_func_ast.extern_kw(db).as_syntax_node()); + token_stream_builder.add_node(extern_func_ast.declaration(db).as_syntax_node()); + token_stream_builder.add_node(extern_func_ast.semicolon(db).as_syntax_node()); expansion } ast::ModuleItem::ExternType(extern_type_ast) => { let attrs = extern_type_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); - item_builder.add_node(extern_type_ast.visibility(db).as_syntax_node()); - item_builder.add_node(extern_type_ast.extern_kw(db).as_syntax_node()); - item_builder.add_node(extern_type_ast.type_kw(db).as_syntax_node()); - item_builder.add_node(extern_type_ast.name(db).as_syntax_node()); - item_builder.add_node(extern_type_ast.generic_params(db).as_syntax_node()); - item_builder.add_node(extern_type_ast.semicolon(db).as_syntax_node()); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + token_stream_builder.add_node(extern_type_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(extern_type_ast.extern_kw(db).as_syntax_node()); + token_stream_builder.add_node(extern_type_ast.type_kw(db).as_syntax_node()); + token_stream_builder.add_node(extern_type_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(extern_type_ast.generic_params(db).as_syntax_node()); + token_stream_builder.add_node(extern_type_ast.semicolon(db).as_syntax_node()); expansion } ast::ModuleItem::Struct(struct_ast) => { let attrs = struct_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); - item_builder.add_node(struct_ast.visibility(db).as_syntax_node()); - item_builder.add_node(struct_ast.struct_kw(db).as_syntax_node()); - item_builder.add_node(struct_ast.name(db).as_syntax_node()); - item_builder.add_node(struct_ast.generic_params(db).as_syntax_node()); - item_builder.add_node(struct_ast.lbrace(db).as_syntax_node()); - item_builder.add_node(struct_ast.members(db).as_syntax_node()); - item_builder.add_node(struct_ast.rbrace(db).as_syntax_node()); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + token_stream_builder.add_node(struct_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.struct_kw(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.generic_params(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.lbrace(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.members(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.rbrace(db).as_syntax_node()); expansion } ast::ModuleItem::Enum(enum_ast) => { let attrs = enum_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); - item_builder.add_node(enum_ast.visibility(db).as_syntax_node()); - item_builder.add_node(enum_ast.enum_kw(db).as_syntax_node()); - item_builder.add_node(enum_ast.name(db).as_syntax_node()); - item_builder.add_node(enum_ast.generic_params(db).as_syntax_node()); - item_builder.add_node(enum_ast.lbrace(db).as_syntax_node()); - item_builder.add_node(enum_ast.variants(db).as_syntax_node()); - item_builder.add_node(enum_ast.rbrace(db).as_syntax_node()); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + token_stream_builder.add_node(enum_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.enum_kw(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.generic_params(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.lbrace(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.variants(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.rbrace(db).as_syntax_node()); expansion } _ => AttrExpansionFound::None, }; - let token_stream = TokenStream::new(item_builder.build().0); + let token_stream = token_stream_builder.build(); (input, token_stream) } fn parse_attrs( &self, db: &dyn SyntaxGroup, - builder: &mut PatchBuilder<'_>, + builder: &mut TokenStreamBuilder<'_>, attrs: Vec, - origin: &impl TypedSyntaxNode, ) -> AttrExpansionFound { // This function parses attributes of the item, // checking if those attributes correspond to a procedural macro that should be fired. @@ -489,9 +488,9 @@ impl ProcMacroHostPlugin { )); if let Some(found) = found { if expansion.is_none() { - let mut args_builder = PatchBuilder::new(db, origin); + let mut args_builder = TokenStreamBuilder::new(db); args_builder.add_node(attr.arguments(db).as_syntax_node()); - let args = TokenStream::new(args_builder.build().0); + let args = args_builder.build(); expansion = Some((found, args, attr.stable_ptr().untyped())); // Do not add the attribute for found expansion. continue; @@ -562,9 +561,10 @@ impl ProcMacroHostPlugin { stream_metadata: TokenStreamMetadata, ) -> Option { let stable_ptr = item_ast.clone().stable_ptr().untyped(); - let token_stream = - TokenStream::from_syntax_node(db, &item_ast).with_metadata(stream_metadata.clone()); - + let mut token_stream_builder = TokenStreamBuilder::new(db); + token_stream_builder.add_node(item_ast.as_syntax_node()); + token_stream_builder.with_metadata(stream_metadata.clone()); + let token_stream = token_stream_builder.build(); let mut aux_data = EmittedAuxData::default(); let mut all_diagnostics: Vec = Vec::new(); @@ -1096,7 +1096,9 @@ impl InlineMacroExprPlugin for ProcMacroInlinePlugin { ) -> InlinePluginResult { let stable_ptr = syntax.clone().stable_ptr().untyped(); let arguments = syntax.arguments(db); - let token_stream = TokenStream::from_syntax_node(db, &arguments); + let mut token_stream_builder = TokenStreamBuilder::new(db); + token_stream_builder.add_node(arguments.as_syntax_node()); + let token_stream = token_stream_builder.build(); let result = self.instance().generate_code( self.expansion.name.clone(), TokenStream::empty(), diff --git a/scarb/src/compiler/plugin/proc_macro/mod.rs b/scarb/src/compiler/plugin/proc_macro/mod.rs index 888c012fc..83a4e7822 100644 --- a/scarb/src/compiler/plugin/proc_macro/mod.rs +++ b/scarb/src/compiler/plugin/proc_macro/mod.rs @@ -1,7 +1,9 @@ pub mod compilation; mod ffi; mod host; +mod types; pub use compilation::{check_unit, compile_unit, fetch_crate}; pub use ffi::*; pub use host::*; +pub use types::*; diff --git a/scarb/src/compiler/plugin/proc_macro/types.rs b/scarb/src/compiler/plugin/proc_macro/types.rs new file mode 100644 index 000000000..10b16885a --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/types.rs @@ -0,0 +1,54 @@ +use cairo_lang_macro::{TextSpan, Token, TokenStream, TokenStreamMetadata, TokenTree}; +use cairo_lang_syntax::node::{SyntaxNode, db::SyntaxGroup}; + +/// Helps creating TokenStream based on multiple SyntaxNodes, +/// which aren't descendants or ascendants of each other inside the SyntaxTree. +pub struct TokenStreamBuilder<'a> { + db: &'a dyn SyntaxGroup, + nodes: Vec, + metadata: Option, +} + +impl<'a> TokenStreamBuilder<'a> { + pub fn new(db: &'a dyn SyntaxGroup) -> Self { + Self { + db, + nodes: Vec::default(), + metadata: None, + } + } + + pub fn add_node(&mut self, node: SyntaxNode) { + self.nodes.push(node); + } + + pub fn with_metadata(&mut self, metadata: TokenStreamMetadata) { + self.metadata = Some(metadata); + } + + pub fn build(self) -> TokenStream { + let mut result: Vec = Vec::default(); + for node in self.nodes.iter() { + let leaves = node.tokens(self.db); + let tokens = + leaves.map(|node| TokenTree::Ident(self.token_from_syntax_node(node.clone()))); + result.extend(tokens); + } + + match self.metadata { + Some(metadata) => TokenStream::new(result.clone()).with_metadata(metadata.clone()), + None => TokenStream::new(result.clone()), + } + } + + pub fn token_from_syntax_node(&self, node: SyntaxNode) -> Token { + let span = node.span(self.db).to_str_range(); + Token::new( + node.get_text(self.db), + TextSpan { + start: span.start, + end: span.end, + }, + ) + } +} diff --git a/scarb/src/ops/proc_macro_server/methods/expand_derive.rs b/scarb/src/ops/proc_macro_server/methods/expand_derive.rs index c6b3425f7..7a26f8594 100644 --- a/scarb/src/ops/proc_macro_server/methods/expand_derive.rs +++ b/scarb/src/ops/proc_macro_server/methods/expand_derive.rs @@ -22,7 +22,7 @@ impl Handler for ExpandDerive { item, } = params; - let mut derived_code = String::new(); + let mut derived_code = TokenStream::empty(); let mut all_diagnostics = vec![]; for derive in derives { @@ -44,11 +44,11 @@ impl Handler for ExpandDerive { // Register diagnostics. all_diagnostics.extend(result.diagnostics); // Add generated code. - derived_code.push_str(&result.token_stream.to_string()); + derived_code.tokens.extend(result.token_stream.tokens); } Ok(ProcMacroResult { - token_stream: TokenStream::new(derived_code), + token_stream: derived_code, diagnostics: all_diagnostics, }) } diff --git a/scarb/tests/build_cairo_plugin.rs b/scarb/tests/build_cairo_plugin.rs index 762346a69..452094af3 100644 --- a/scarb/tests/build_cairo_plugin.rs +++ b/scarb/tests/build_cairo_plugin.rs @@ -407,15 +407,15 @@ fn can_replace_original_node() { let t = temp.child("some"); CairoPluginProjectBuilder::default() .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, TokenTree, Token, TextSpan}; #[attribute_macro] pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let token_stream = TokenStream::new( - token_stream - .to_string() - .replace("12", "34") - ); + let new_token_string = token_stream.to_string().replace("12", "34"); + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { start: 0, end: new_token_string.len() }, + ))]); ProcMacroResult::new(token_stream) } "##}) @@ -575,26 +575,26 @@ fn can_define_multiple_macros() { let t = temp.child("some"); CairoPluginProjectBuilder::default() .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, AuxData, PostProcessContext, post_process}; + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, AuxData, PostProcessContext, post_process, TokenTree, Token, TextSpan}; #[attribute_macro] pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let token_stream = TokenStream::new( - token_stream - .to_string() - .replace("12", "34") - ); + let new_token_string = token_stream.to_string().replace("12", "34"); + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { start: 0, end: new_token_string.len() }, + ))]); let aux_data = AuxData::new(Vec::new()); ProcMacroResult::new(token_stream).with_aux_data(aux_data) } #[attribute_macro] pub fn world(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let token_stream = TokenStream::new( - token_stream - .to_string() - .replace("56", "78") - ); + let new_token_string = token_stream.to_string().replace("56", "78"); + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { start: 0, end: new_token_string.len() }, + ))]); let aux_data = AuxData::new(Vec::new()); ProcMacroResult::new(token_stream).with_aux_data(aux_data) } @@ -610,15 +610,15 @@ fn can_define_multiple_macros() { CairoPluginProjectBuilder::default() .name("other") .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, AuxData, PostProcessContext, post_process}; + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, AuxData, PostProcessContext, post_process, TokenTree, Token, TextSpan}; #[attribute_macro] pub fn beautiful(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let token_stream = TokenStream::new( - token_stream - .to_string() - .replace("90", "09") - ); + let new_token_string = token_stream.to_string().replace("90", "09"); + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { start: 0, end: new_token_string.len() }, + ))]); let aux_data = AuxData::new(Vec::new()); ProcMacroResult::new(token_stream).with_aux_data(aux_data) } @@ -831,7 +831,7 @@ fn can_resolve_full_path_markers() { let t = temp.child("some"); CairoPluginProjectBuilder::default() .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, post_process, PostProcessContext}; + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, post_process, PostProcessContext, TokenTree, Token, TextSpan}; #[attribute_macro] pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { @@ -842,8 +842,14 @@ fn can_resolve_full_path_markers() { token_stream.to_string().replace("12", "34") ); - ProcMacroResult::new(TokenStream::new(code)) - .with_full_path_markers(full_path_markers) + ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new( + code.clone(), + TextSpan { + start: 0, + end: code.len(), + }, + ))]) + ).with_full_path_markers(full_path_markers) } #[post_process] @@ -886,12 +892,18 @@ fn can_implement_inline_macro() { let t = temp.child("some"); CairoPluginProjectBuilder::default() .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, inline_macro}; + use cairo_lang_macro::{ProcMacroResult, TokenStream, inline_macro, TokenTree, Token, TextSpan}; #[inline_macro] pub fn some(token_stream: TokenStream) -> ProcMacroResult { assert_eq!(token_stream.to_string(), "()"); - ProcMacroResult::new(TokenStream::new("34".to_string())) + ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new( + "34".to_string(), + TextSpan { + start: 0, + end: 2, + }, + ))])) } "##}) .build(&t); @@ -978,7 +990,7 @@ fn can_implement_derive_macro() { let t = temp.child("some"); CairoPluginProjectBuilder::default() .lib_rs(indoc! {r##" - use cairo_lang_macro::{derive_macro, ProcMacroResult, TokenStream}; + use cairo_lang_macro::{derive_macro, ProcMacroResult, TokenStream, TokenTree, Token, TextSpan}; #[derive_macro] pub fn custom_derive(token_stream: TokenStream) -> ProcMacroResult { @@ -995,13 +1007,21 @@ fn can_implement_derive_macro() { .trim() .to_string(); - let token_stream = TokenStream::new(indoc::formatdoc!{r#" + let code = indoc::formatdoc!{r#" impl SomeImpl of Hello<{name}> {{ fn world(self: @{name}) -> u32 {{ 32 }} }} - "#}); + "#}; + + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + code.clone(), + TextSpan { + start: 0, + end: code.len(), + }, + ))]); ProcMacroResult::new(token_stream) } @@ -1053,37 +1073,58 @@ fn can_use_both_derive_and_attr() { let t = temp.child("some"); CairoPluginProjectBuilder::default() .lib_rs(indoc! {r##" - use cairo_lang_macro::{derive_macro, attribute_macro, ProcMacroResult, TokenStream}; + use cairo_lang_macro::{derive_macro, attribute_macro, ProcMacroResult, TokenStream, TokenTree, TextSpan, Token}; #[attribute_macro] pub fn first_attribute(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(TokenStream::new( - token_stream.to_string() - .replace("SomeType", "OtherType") - )) + let new_token_string = token_stream.to_string().replace("SomeType", "OtherType"); + ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { + start: 0, + end: new_token_string.len(), + }, + ))])) } #[attribute_macro] pub fn second_attribute(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let token_stream = TokenStream::new( - token_stream.to_string().replace("OtherType", "RenamedStruct") - ); - ProcMacroResult::new(TokenStream::new( - format!("#[derive(Drop)]\n{token_stream}") - )) + let code = token_stream.to_string().replace("OtherType", "RenamedStruct"); + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + code.clone(), + TextSpan { + start: 0, + end: code.len(), + }, + ))]); + + let result_string = format!("#[derive(Drop)]\n{token_stream}"); + ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new( + result_string.clone(), + TextSpan { + start: 0, + end: result_string.len(), + }, + ))])) } #[derive_macro] pub fn custom_derive(_token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(TokenStream::new( - indoc::formatdoc!{r#" + let code = indoc::formatdoc!{r#" impl SomeImpl of Hello {{ fn world(self: @RenamedStruct) -> u32 {{ 32 }} }} - "#} - )) + "#}; + + ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new( + code.clone(), + TextSpan { + start: 0, + end: code.len(), + }, + ))])) } "##}) .add_dep(r#"indoc = "*""#) @@ -1285,15 +1326,15 @@ fn can_be_expanded() { let t = temp.child("some"); CairoPluginProjectBuilder::default() .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, derive_macro}; + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, derive_macro, TokenTree, Token, TextSpan}; #[attribute_macro] pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let token_stream = TokenStream::new( - token_stream - .to_string() - .replace("12", "34") - ); + let new_token_string = token_stream.to_string().replace("12", "34"); + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { start: 0, end: new_token_string.len() }, + ))]); ProcMacroResult::new(token_stream) } @@ -1312,13 +1353,18 @@ fn can_be_expanded() { .trim() .to_string(); - let token_stream = TokenStream::new(indoc::formatdoc!{r#" + let code = indoc::formatdoc!{r#" impl SomeImpl of Hello<{name}> {{ fn world(self: @{name}) -> u32 {{ 32 }} }} - "#}); + "#}; + + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + code.clone(), + TextSpan { start: 0, end: code.len() }, + ))]); ProcMacroResult::new(token_stream) } @@ -1394,15 +1440,17 @@ fn can_expand_trait_inner_func_attrr() { let t = temp.child("some"); CairoPluginProjectBuilder::default() .lib_rs(indoc! {r##" - use cairo_lang_macro::{attribute_macro, ProcMacroResult, TokenStream}; + use cairo_lang_macro::{attribute_macro, ProcMacroResult, TokenStream, TokenTree, Token, TextSpan}; #[attribute_macro] pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(TokenStream::new( - token_stream.to_string() + let new_token_string = token_stream.to_string() .replace("hello", "world") - .replace("12", "34") - )) + .replace("12", "34"); + ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { start: 0, end: new_token_string.len() }, + ))])) } "##}) .build(&t); @@ -1456,14 +1504,15 @@ fn can_expand_impl_inner_func_attrr() { let t = temp.child("some"); CairoPluginProjectBuilder::default() .lib_rs(indoc! {r##" - use cairo_lang_macro::{attribute_macro, ProcMacroResult, TokenStream}; + use cairo_lang_macro::{attribute_macro, ProcMacroResult, TokenStream, Token, TokenTree, TextSpan}; #[attribute_macro] pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(TokenStream::new( - token_stream.to_string() - .replace("1", "2") - )) + let new_token_string = token_stream.to_string().replace("1", "2"); + ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { start: 0, end: new_token_string.len() }, + ))])) } "##}) .build(&t); diff --git a/scarb/tests/proc_macro_prebuilt.rs b/scarb/tests/proc_macro_prebuilt.rs index b23a30418..9b76e986c 100644 --- a/scarb/tests/proc_macro_prebuilt.rs +++ b/scarb/tests/proc_macro_prebuilt.rs @@ -1,7 +1,7 @@ use assert_fs::TempDir; use assert_fs::fixture::{ChildPath, FileWriteStr, PathCreateDir}; use assert_fs::prelude::PathChild; -use cairo_lang_macro::TokenStream; +use cairo_lang_macro::{TextSpan, Token, TokenStream, TokenTree}; use indoc::indoc; use libloading::library_filename; use scarb_proc_macro_server_types::methods::expand::{ExpandInline, ExpandInlineMacroParams}; @@ -226,10 +226,19 @@ fn load_prebuilt_proc_macros() { .request_and_wait::(ExpandInlineMacroParams { context: ProcMacroScope { component }, name: "some".to_string(), - args: TokenStream::new("42".to_string()), + args: TokenStream::new(vec![TokenTree::Ident(Token::new( + "42".to_string(), + TextSpan::default(), + ))]), }) .unwrap(); assert_eq!(response.diagnostics, vec![]); - assert_eq!(response.token_stream, TokenStream::new("42".to_string())); + assert_eq!( + response.token_stream, + TokenStream::new(vec![TokenTree::Ident(Token::new( + "42".to_string(), + TextSpan::default(), + ))]) + ); } diff --git a/scarb/tests/proc_macro_server.rs b/scarb/tests/proc_macro_server.rs index 1617074a8..500e086be 100644 --- a/scarb/tests/proc_macro_server.rs +++ b/scarb/tests/proc_macro_server.rs @@ -1,6 +1,6 @@ use assert_fs::TempDir; use assert_fs::prelude::PathChild; -use cairo_lang_macro::TokenStream; +use cairo_lang_macro::{TextSpan, Token, TokenStream, TokenTree}; use scarb_proc_macro_server_types::methods::expand::ExpandAttribute; use scarb_proc_macro_server_types::methods::expand::ExpandAttributeParams; use scarb_proc_macro_server_types::methods::expand::ExpandDerive; @@ -58,7 +58,14 @@ fn expand_attribute() { let output = input.replace(name, "very_new_name"); - ProcMacroResult::new(TokenStream::new(output)) + let span = TextSpan { start: 0, end: output.len() }; + ProcMacroResult::new( + TokenStream::new(vec![ + TokenTree::Ident( + Token::new(output, span) + ) + ]) + ) }} "##; @@ -87,14 +94,17 @@ fn expand_attribute() { context: ProcMacroScope { component }, attr: "rename_to_very_new_name".to_string(), args: TokenStream::empty(), - item: TokenStream::new("fn some_test_fn(){}".to_string()), + item: TokenStream::new(vec![TokenTree::Ident(Token::new( + "fn some_test_fn(){}".to_string(), + TextSpan::default(), + ))]), }) .unwrap(); assert_eq!(response.diagnostics, vec![]); assert_eq!( - response.token_stream, - TokenStream::new("fn very_new_name(){}".to_string()) + response.token_stream.to_string(), + "fn very_new_name(){}".to_string() ); } @@ -122,7 +132,10 @@ fn expand_derive() { .defined_macros_for_package("test_package") .component; - let item = TokenStream::new("fn some_test_fn(){}".to_string()); + let item = TokenStream::new(vec![TokenTree::Ident(Token::new( + "fn some_test_fn(){}".to_string(), + TextSpan::default(), + ))]); let response = proc_macro_client .request_and_wait::(ExpandDeriveParams { @@ -134,8 +147,8 @@ fn expand_derive() { assert_eq!(response.diagnostics, vec![]); assert_eq!( - response.token_stream, - TokenStream::new("impl SomeImpl of SomeTrait {}".to_string()) + response.token_stream.to_string(), + "impl SomeImpl of SomeTrait {}".to_string() ); } @@ -147,7 +160,15 @@ fn expand_inline() { let replace_all_15_with_25 = r#" #[inline_macro] pub fn replace_all_15_with_25(token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(TokenStream::new(token_stream.to_string().replace("15", "25"))) + let content = token_stream.to_string().replace("15", "25"); + let span = TextSpan { start: 0, end: content.len() }; + ProcMacroResult::new( + TokenStream::new(vec![ + TokenTree::Ident( + Token::new(content, span) + ) + ]) + ) } "#; @@ -174,15 +195,16 @@ fn expand_inline() { .request_and_wait::(ExpandInlineMacroParams { context: ProcMacroScope { component }, name: "replace_all_15_with_25".to_string(), - args: TokenStream::new( + args: TokenStream::new(vec![TokenTree::Ident(Token::new( "struct A { field: 15 , other_field: macro_call!(12)}".to_string(), - ), + TextSpan::default(), + ))]), }) .unwrap(); assert_eq!(response.diagnostics, vec![]); assert_eq!( - response.token_stream, - TokenStream::new("struct A { field: 25 , other_field: macro_call!(12)}".to_string()) + response.token_stream.to_string(), + "struct A { field: 25 , other_field: macro_call!(12)}".to_string() ); } diff --git a/utils/scarb-proc-macro-server-types/Cargo.toml b/utils/scarb-proc-macro-server-types/Cargo.toml index 9592d8150..5408b789d 100644 --- a/utils/scarb-proc-macro-server-types/Cargo.toml +++ b/utils/scarb-proc-macro-server-types/Cargo.toml @@ -10,6 +10,6 @@ license.workspace = true repository.workspace = true [dependencies] -cairo-lang-macro = { version = "0.1", features = ["serde"] } +cairo-lang-macro = { path = "../../plugins/cairo-lang-macro", version = "0.1", features = ["serde"] } serde.workspace = true serde_json.workspace = true diff --git a/utils/scarb-test-support/src/proc_macro_server.rs b/utils/scarb-test-support/src/proc_macro_server.rs index b34bc6fc4..49e811d5a 100644 --- a/utils/scarb-test-support/src/proc_macro_server.rs +++ b/utils/scarb-test-support/src/proc_macro_server.rs @@ -22,7 +22,7 @@ use std::process::Stdio; pub const SIMPLE_MACROS: &str = r#" use cairo_lang_macro::{ ProcMacroResult, - TokenStream, + TokenStream, TokenTree, Token, TextSpan, attribute_macro, inline_macro, derive_macro, @@ -43,7 +43,15 @@ pub fn inline_some(token_stream: TokenStream) -> ProcMacroResult { #[derive_macro] fn some_derive(_token_stream: TokenStream)-> ProcMacroResult { - ProcMacroResult::new(TokenStream::new("impl SomeImpl of SomeTrait {}".to_string())) + let content = "impl SomeImpl of SomeTrait {}".to_string(); + let span = TextSpan { start: 0, end: content.len() }; + ProcMacroResult::new( + TokenStream::new(vec![ + TokenTree::Ident( + Token::new(content, span) + ) + ]) + ) } "#; From 201f36ecc9114fb67a9eb37aa655649c8cebf7a7 Mon Sep 17 00:00:00 2001 From: maciektr Date: Wed, 13 Nov 2024 12:40:26 +0100 Subject: [PATCH 02/38] Avoid unnecessary allocations when freeing stable structs (#1700) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit commit-id:55ec12c0 --- **Stack**: - #1734 - #1722 - #1704 - #1700 ⬅ ⚠️ *Part of a stack created by [spr](https://github.com/ejoffe/spr). Do not merge manually using the UI - doing so may have unexpected results.* --- plugins/cairo-lang-macro/src/lib.rs | 2 +- .../cairo-lang-macro/src/types/conversion.rs | 172 +++++++----------- scarb/src/compiler/plugin/proc_macro/ffi.rs | 8 +- 3 files changed, 74 insertions(+), 108 deletions(-) diff --git a/plugins/cairo-lang-macro/src/lib.rs b/plugins/cairo-lang-macro/src/lib.rs index dcdbeb4af..1f7d1add6 100644 --- a/plugins/cairo-lang-macro/src/lib.rs +++ b/plugins/cairo-lang-macro/src/lib.rs @@ -134,7 +134,7 @@ pub unsafe extern "C" fn expand( #[doc(hidden)] #[no_mangle] pub unsafe extern "C" fn free_result(result: StableProcMacroResult) { - ProcMacroResult::from_owned_stable(result); + ProcMacroResult::free_owned_stable(result); } /// Distributed slice for storing auxiliary data collection callback pointers. diff --git a/plugins/cairo-lang-macro/src/types/conversion.rs b/plugins/cairo-lang-macro/src/types/conversion.rs index 1fb0128e0..69d375047 100644 --- a/plugins/cairo-lang-macro/src/types/conversion.rs +++ b/plugins/cairo-lang-macro/src/types/conversion.rs @@ -39,7 +39,7 @@ impl ProcMacroResult { /// Convert to native Rust representation, without taking the ownership of the string. /// - /// Note that you still need to free the memory by calling `from_owned_stable`. + /// Note that you still need to free the memory by calling `free_owned_stable`. /// /// # Safety #[doc(hidden)] @@ -62,31 +62,22 @@ impl ProcMacroResult { } } - /// Convert to native Rust representation, with taking the ownership of the string. + /// Take the ownership of memory under the pointer and drop it. /// /// Useful when you need to free the allocated memory. /// Only use on the same side of FFI-barrier, where the memory has been allocated. /// /// # Safety #[doc(hidden)] - pub unsafe fn from_owned_stable(result: StableProcMacroResult) -> Self { - let diagnostics = result.diagnostics.into_owned(); - let diagnostics = diagnostics - .into_iter() - .map(|d| Diagnostic::from_owned_stable(d)) - .collect::>(); - let full_path_markers = result - .full_path_markers - .into_owned() - .iter() - .map(|m| from_raw_cstring(*m)) - .collect::>(); - ProcMacroResult { - token_stream: TokenStream::from_owned_stable(result.token_stream), - aux_data: AuxData::from_owned_stable(result.aux_data), - diagnostics, - full_path_markers, + pub unsafe fn free_owned_stable(result: StableProcMacroResult) { + for diagnostic in result.diagnostics.into_owned() { + Diagnostic::free_owned_stable(diagnostic); + } + for marker in result.full_path_markers.into_owned() { + free_raw_cstring(marker) } + TokenStream::free_owned_stable(result.token_stream); + AuxData::free_owned_stable(result.aux_data); } } @@ -100,6 +91,7 @@ impl TextSpan { } } + /// Convert to native Rust representation, without taking the ownership. #[doc(hidden)] pub fn from_stable(span: &StableTextSpan) -> Self { Self { @@ -109,11 +101,8 @@ impl TextSpan { } #[doc(hidden)] - pub fn from_owned_stable(span: StableTextSpan) -> Self { - Self { - start: span.start, - end: span.end, - } + pub fn free_owned_stable(span: StableTextSpan) { + let _ = span; } } @@ -130,7 +119,7 @@ impl Token { /// Convert to native Rust representation, without taking the ownership of the string. /// - /// Note that you still need to free the memory by calling `from_owned_stable`. + /// Note that you still need to free the memory by calling `free_owned_stable`. /// /// # Safety #[doc(hidden)] @@ -141,18 +130,16 @@ impl Token { } } - /// Convert to native Rust representation, with taking the ownership of the string. + /// Take the ownership of memory under the pointer and drop it. /// /// Useful when you need to free the allocated memory. /// Only use on the same side of FFI-barrier, where the memory has been allocated. /// /// # Safety #[doc(hidden)] - pub unsafe fn from_owned_stable(token: StableToken) -> Self { - Self { - content: from_raw_cstring(token.content), - span: TextSpan::from_owned_stable(token.span), - } + pub unsafe fn free_owned_stable(token: StableToken) { + free_raw_cstring(token.content); + TextSpan::free_owned_stable(token.span); } } @@ -167,7 +154,7 @@ impl TokenTree { /// Convert to native Rust representation, without taking the ownership of the string. /// - /// Note that you still need to free the memory by calling `from_owned_stable`. + /// Note that you still need to free the memory by calling `free_owned_stable`. /// /// # Safety #[doc(hidden)] @@ -177,16 +164,18 @@ impl TokenTree { } } - /// Convert to native Rust representation, with taking the ownership of the string. + /// Take the ownership of memory under the pointer and drop it. /// /// Useful when you need to free the allocated memory. /// Only use on the same side of FFI-barrier, where the memory has been allocated. /// /// # Safety #[doc(hidden)] - pub unsafe fn from_owned_stable(token_tree: StableTokenTree) -> Self { + pub unsafe fn free_owned_stable(token_tree: StableTokenTree) { match token_tree { - StableTokenTree::Ident(token) => Self::Ident(Token::from_owned_stable(token)), + StableTokenTree::Ident(token) => { + Token::free_owned_stable(token); + } } } } @@ -210,7 +199,7 @@ impl TokenStream { /// Convert to native Rust representation, without taking the ownership of the string. /// - /// Note that you still need to free the memory by calling `from_owned_stable`. + /// Note that you still need to free the memory by calling `free_owned_stable`. /// /// # Safety #[doc(hidden)] @@ -226,23 +215,18 @@ impl TokenStream { } } - /// Convert to native Rust representation, with taking the ownership of the string. + /// Take the ownership of memory under the pointer and drop it. /// /// Useful when you need to free the allocated memory. /// Only use on the same side of FFI-barrier, where the memory has been allocated. /// /// # Safety #[doc(hidden)] - pub unsafe fn from_owned_stable(token_stream: StableTokenStream) -> Self { - let tokens = token_stream.tokens.into_owned(); - let tokens = tokens - .into_iter() - .map(|token_tree| TokenTree::from_owned_stable(token_tree)) - .collect::>(); - Self { - tokens, - metadata: TokenStreamMetadata::from_owned_stable(token_stream.metadata), + pub unsafe fn free_owned_stable(token_stream: StableTokenStream) { + for token_tree in token_stream.tokens.into_owned() { + TokenTree::free_owned_stable(token_tree); } + TokenStreamMetadata::free_owned_stable(token_stream.metadata); } } @@ -266,7 +250,7 @@ impl TokenStreamMetadata { /// Convert to native Rust representation, without taking the ownership of the string. /// - /// Note that you still need to free the memory by calling `from_owned_stable`. + /// Note that you still need to free the memory by calling `free_owned_stable`. /// /// # Safety #[doc(hidden)] @@ -281,21 +265,19 @@ impl TokenStreamMetadata { } } - /// Convert to native Rust representation, with taking the ownership of the string. + /// Take the ownership of memory under the pointer and drop it. /// /// Useful when you need to free the allocated memory. /// Only use on the same side of FFI-barrier, where the memory has been allocated. /// /// # Safety #[doc(hidden)] - pub unsafe fn from_owned_stable(metadata: StableTokenStreamMetadata) -> Self { - let original_file_path = metadata - .original_file_path - .map(|raw| from_raw_cstring(raw.as_ptr())); - let file_id = metadata.file_id.map(|raw| from_raw_cstring(raw.as_ptr())); - Self { - original_file_path, - file_id, + pub unsafe fn free_owned_stable(metadata: StableTokenStreamMetadata) { + if let Some(raw) = metadata.original_file_path { + free_raw_cstring(raw.as_ptr()); + } + if let Some(raw) = metadata.file_id { + free_raw_cstring(raw.as_ptr()); } } } @@ -324,7 +306,7 @@ impl AuxData { /// Convert to native Rust representation, without taking the ownership of the string. /// - /// Note that you still need to free the memory by calling `from_owned_stable`. + /// Note that you still need to free the memory by calling `free_owned_stable`. /// /// # Safety #[doc(hidden)] @@ -339,18 +321,20 @@ impl AuxData { } } - /// Convert to native Rust representation, with taking the ownership of the string. + /// Take the ownership of memory under the pointer and drop it. /// /// Useful when you need to free the allocated memory. /// Only use on the same side of FFI-barrier, where the memory has been allocated. /// /// # Safety #[doc(hidden)] - pub unsafe fn from_owned_stable(aux_data: StableAuxData) -> Option { + pub unsafe fn free_owned_stable(aux_data: StableAuxData) { match aux_data { - StableAuxData::None => None, - StableAuxData::Some(raw) => Some(Self::new(raw.into_owned())), - } + StableAuxData::None => {} + StableAuxData::Some(raw) => { + let _ = raw.into_owned(); + } + }; } } @@ -368,7 +352,7 @@ impl Diagnostic { /// Convert to native Rust representation, without taking the ownership of the string. /// - /// Note that you still need to free the memory by calling `from_owned_stable`. + /// Note that you still need to free the memory by calling `free_owned_stable`. /// /// # Safety #[doc(hidden)] @@ -379,18 +363,15 @@ impl Diagnostic { } } - /// Convert to native Rust representation, with taking the ownership of the string. + /// Take the ownership of memory under the pointer and drop it. /// /// Useful when you need to free the allocated memory. /// Only use on the same side of FFI-barrier, where the memory has been allocated. /// /// # Safety #[doc(hidden)] - pub unsafe fn from_owned_stable(diagnostic: StableDiagnostic) -> Self { - Self { - message: from_raw_cstring(diagnostic.message), - severity: Severity::from_stable(&diagnostic.severity), - } + pub unsafe fn free_owned_stable(diagnostic: StableDiagnostic) { + free_raw_cstring(diagnostic.message); } } @@ -429,7 +410,7 @@ impl ExpansionDefinition { } } - /// Take the ownership of the string. + /// Take the ownership of memory under the pointer and drop it. /// /// Useful when you need to free the allocated memory. /// Only use on the same side of FFI-barrier, where the memory has been allocated. @@ -437,7 +418,7 @@ impl ExpansionDefinition { /// # Safety #[doc(hidden)] pub unsafe fn free_owned(expansion: StableExpansion) { - let _ = from_raw_cstring(expansion.name); + free_raw_cstring(expansion.name); } } @@ -455,7 +436,7 @@ impl FullPathMarker { /// Convert to native Rust representation, without taking the ownership of the string. /// - /// Note that you still need to free the memory by calling `from_owned_stable`. + /// Note that you still need to free the memory by calling `free_owned_stable`. /// /// # Safety #[doc(hidden)] @@ -466,18 +447,16 @@ impl FullPathMarker { } } - /// Convert to native Rust representation, with taking the ownership of the string. + /// Take the ownership of memory under the pointer and drop it. /// /// Useful when you need to free the allocated memory. /// Only use on the same side of FFI-barrier, where the memory has been allocated. /// /// # Safety #[doc(hidden)] - pub unsafe fn from_owned_stable(marker: StableFullPathMarker) -> Self { - Self { - key: from_raw_cstring(marker.key), - full_path: from_raw_cstring(marker.full_path), - } + pub unsafe fn free_owned_stable(marker: StableFullPathMarker) { + free_raw_cstring(marker.key); + free_raw_cstring(marker.full_path); } } @@ -507,7 +486,7 @@ impl PostProcessContext { /// Convert to native Rust representation, without taking the ownership of the string. /// - /// Note that you still need to free the memory by calling `from_owned_stable`. + /// Note that you still need to free the memory by calling `free_owned_stable`. /// /// # Safety #[doc(hidden)] @@ -528,41 +507,28 @@ impl PostProcessContext { } } - /// Convert to native Rust representation, with taking the ownership of the string. + /// Take the ownership of memory under the pointer and drop it. /// /// Useful when you need to free the allocated memory. /// Only use on the same side of FFI-barrier, where the memory has been allocated. /// /// # Safety #[doc(hidden)] - pub unsafe fn from_owned_stable(diagnostic: StablePostProcessContext) -> Self { - let aux_data = diagnostic - .aux_data - .into_owned() - .into_iter() - .filter_map(|a| AuxData::from_owned_stable(a)) - .collect::>(); - let full_path_markers = diagnostic - .full_path_markers - .into_owned() - .into_iter() - .map(|m| FullPathMarker::from_owned_stable(m)) - .collect::>(); - Self { - aux_data, - full_path_markers, + pub unsafe fn free_owned_stable(diagnostic: StablePostProcessContext) { + for aux_data in diagnostic.aux_data.into_owned() { + AuxData::free_owned_stable(aux_data) + } + for marker in diagnostic.full_path_markers.into_owned() { + FullPathMarker::free_owned_stable(marker); } } } -// Create a string from a raw pointer to a c_char. +// Create a c-string from a raw pointer to a c_char, and drop it immediately. // Note that this will free the underlying memory. -unsafe fn from_raw_cstring(raw: *mut c_char) -> String { - if raw.is_null() { - String::default() - } else { - let cstr = CString::from_raw(raw); - cstr.to_string_lossy().to_string() +unsafe fn free_raw_cstring(raw: *mut c_char) { + if !raw.is_null() { + let _ = CString::from_raw(raw); } } diff --git a/scarb/src/compiler/plugin/proc_macro/ffi.rs b/scarb/src/compiler/plugin/proc_macro/ffi.rs index bba326e84..26b5e74e6 100644 --- a/scarb/src/compiler/plugin/proc_macro/ffi.rs +++ b/scarb/src/compiler/plugin/proc_macro/ffi.rs @@ -163,7 +163,7 @@ impl ProcMacroInstance { attr: TokenStream, token_stream: TokenStream, ) -> ProcMacroResult { - // This must be manually freed with call to from_owned_stable. + // This must be manually freed with call to `free_owned_stable`. let stable_token_stream = token_stream.into_stable(); let stable_attr = attr.into_stable(); // Allocate proc macro name. @@ -177,8 +177,8 @@ impl ProcMacroInstance { // Free the memory allocated by the `stable_token_stream`. // This will call `CString::from_raw` under the hood, to take ownership. unsafe { - TokenStream::from_owned_stable(stable_result.input); - TokenStream::from_owned_stable(stable_result.input_attr); + TokenStream::free_owned_stable(stable_result.input); + TokenStream::free_owned_stable(stable_result.input_attr); }; // Create Rust representation of the result. // Note, that the memory still needs to be freed on the allocator side! @@ -203,7 +203,7 @@ impl ProcMacroInstance { // Actual call to FFI interface for aux data callback. let context = (self.plugin.vtable.post_process_callback)(context); // Free the allocated memory. - let _ = unsafe { PostProcessContext::from_owned_stable(context) }; + unsafe { PostProcessContext::free_owned_stable(context) }; } pub fn doc(&self, item_name: SmolStr) -> Option { From c954d6c379d66af6367618db5f51e59fdcbcc3a3 Mon Sep 17 00:00:00 2001 From: maciektr Date: Wed, 13 Nov 2024 14:37:35 +0100 Subject: [PATCH 03/38] Add cairo edition to TokenStreamMetadata (#1704) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit commit-id:83c7db1f --- **Stack**: - #1734 - #1722 - #1704 ⬅ ⚠️ *Part of a stack created by [spr](https://github.com/ejoffe/spr). Do not merge manually using the UI - doing so may have unexpected results.* --- plugins/cairo-lang-macro-stable/src/lib.rs | 1 + plugins/cairo-lang-macro/src/types/conversion.rs | 13 +++++++++++-- plugins/cairo-lang-macro/src/types/token.rs | 5 ++++- scarb/src/compiler/plugin/proc_macro/host.rs | 16 +++++++++++----- scarb/tests/build_cairo_plugin.rs | 3 +++ 5 files changed, 30 insertions(+), 8 deletions(-) diff --git a/plugins/cairo-lang-macro-stable/src/lib.rs b/plugins/cairo-lang-macro-stable/src/lib.rs index fc5542987..912a72847 100644 --- a/plugins/cairo-lang-macro-stable/src/lib.rs +++ b/plugins/cairo-lang-macro-stable/src/lib.rs @@ -54,6 +54,7 @@ pub struct StableTokenStream { pub struct StableTokenStreamMetadata { pub original_file_path: Option>, pub file_id: Option>, + pub edition: Option>, } /// Auxiliary data returned by the procedural macro. diff --git a/plugins/cairo-lang-macro/src/types/conversion.rs b/plugins/cairo-lang-macro/src/types/conversion.rs index 69d375047..883620ebd 100644 --- a/plugins/cairo-lang-macro/src/types/conversion.rs +++ b/plugins/cairo-lang-macro/src/types/conversion.rs @@ -238,13 +238,17 @@ impl TokenStreamMetadata { pub fn into_stable(self) -> StableTokenStreamMetadata { let original_file_path = self .original_file_path - .and_then(|path| NonNull::new(CString::new(path).unwrap().into_raw())); + .and_then(|value| NonNull::new(CString::new(value).unwrap().into_raw())); let file_id = self .file_id - .and_then(|path| NonNull::new(CString::new(path).unwrap().into_raw())); + .and_then(|value| NonNull::new(CString::new(value).unwrap().into_raw())); + let edition = self + .edition + .and_then(|value| NonNull::new(CString::new(value).unwrap().into_raw())); StableTokenStreamMetadata { original_file_path, file_id, + edition, } } @@ -259,9 +263,11 @@ impl TokenStreamMetadata { .original_file_path .map(|raw| from_raw_cstr(raw.as_ptr())); let file_id = metadata.file_id.map(|raw| from_raw_cstr(raw.as_ptr())); + let edition = metadata.edition.map(|raw| from_raw_cstr(raw.as_ptr())); Self { original_file_path, file_id, + edition, } } @@ -279,6 +285,9 @@ impl TokenStreamMetadata { if let Some(raw) = metadata.file_id { free_raw_cstring(raw.as_ptr()); } + if let Some(raw) = metadata.edition { + free_raw_cstring(raw.as_ptr()); + } } } diff --git a/plugins/cairo-lang-macro/src/types/token.rs b/plugins/cairo-lang-macro/src/types/token.rs index 52b50068f..97f622fa1 100644 --- a/plugins/cairo-lang-macro/src/types/token.rs +++ b/plugins/cairo-lang-macro/src/types/token.rs @@ -53,6 +53,8 @@ pub struct TokenStreamMetadata { /// /// It is guaranteed, that the `file_id` will be unique for each file. pub file_id: Option, + /// Cairo edition defined for the token stream. + pub edition: Option, } impl TokenStream { @@ -102,10 +104,11 @@ impl Display for TokenStream { impl TokenStreamMetadata { #[doc(hidden)] - pub fn new(file_path: impl ToString, file_id: impl ToString) -> Self { + pub fn new(file_path: impl ToString, file_id: impl ToString, edition: impl ToString) -> Self { Self { original_file_path: Some(file_path.to_string()), file_id: Some(file_id.to_string()), + edition: Some(edition.to_string()), } } } diff --git a/scarb/src/compiler/plugin/proc_macro/host.rs b/scarb/src/compiler/plugin/proc_macro/host.rs index cfc154310..ce1898c00 100644 --- a/scarb/src/compiler/plugin/proc_macro/host.rs +++ b/scarb/src/compiler/plugin/proc_macro/host.rs @@ -2,7 +2,7 @@ use crate::compiler::plugin::proc_macro::compilation::SharedLibraryProvider; use crate::compiler::plugin::proc_macro::{ Expansion, ExpansionKind, ProcMacroInstance, TokenStreamBuilder, }; -use crate::core::{Config, Package, PackageId}; +use crate::core::{Config, Package, PackageId, edition_variant}; use anyhow::{Context, Result, bail, ensure}; use cairo_lang_defs::ids::{ModuleItemId, TopLevelLanguageElementId}; use cairo_lang_defs::patcher::{PatchBuilder, RewriteNode}; @@ -12,6 +12,7 @@ use cairo_lang_defs::plugin::{ }; use cairo_lang_defs::plugin::{InlineMacroExprPlugin, InlinePluginResult, PluginDiagnostic}; use cairo_lang_diagnostics::ToOption; +use cairo_lang_filesystem::db::Edition; use cairo_lang_filesystem::ids::CodeMapping; use cairo_lang_macro::{ AuxData, Diagnostic, FullPathMarker, ProcMacroResult, Severity, TokenStream, @@ -867,11 +868,16 @@ impl ProcMacroHostPlugin { .or_insert(markers); } - fn calculate_metadata(db: &dyn SyntaxGroup, item_ast: ast::ModuleItem) -> TokenStreamMetadata { + fn calculate_metadata( + db: &dyn SyntaxGroup, + item_ast: ast::ModuleItem, + edition: Edition, + ) -> TokenStreamMetadata { let stable_ptr = item_ast.clone().stable_ptr().untyped(); let file_path = stable_ptr.file_id(db).full_path(db.upcast()); let file_id = short_hash(file_path.clone()); - TokenStreamMetadata::new(file_path, file_id) + let edition = edition_variant(edition); + TokenStreamMetadata::new(file_path, file_id, edition) } } @@ -961,9 +967,9 @@ impl MacroPlugin for ProcMacroHostPlugin { &self, db: &dyn SyntaxGroup, item_ast: ast::ModuleItem, - _metadata: &MacroPluginMetadata<'_>, + metadata: &MacroPluginMetadata<'_>, ) -> PluginResult { - let stream_metadata = Self::calculate_metadata(db, item_ast.clone()); + let stream_metadata = Self::calculate_metadata(db, item_ast.clone(), metadata.edition); // Handle inner functions. if let InnerAttrExpansionResult::Some(result) = self.expand_inner_attr(db, item_ast.clone()) diff --git a/scarb/tests/build_cairo_plugin.rs b/scarb/tests/build_cairo_plugin.rs index 452094af3..b0a572972 100644 --- a/scarb/tests/build_cairo_plugin.rs +++ b/scarb/tests/build_cairo_plugin.rs @@ -564,6 +564,9 @@ fn can_read_token_stream_metadata() { file_id: Some( "[..]", ), + edition: Some( + "[..]", + ), } [..]Finished `dev` profile target(s) in [..] "#}); From 5f8a2389b6d0b1570441196a1b13c1b08811a640 Mon Sep 17 00:00:00 2001 From: maciektr Date: Fri, 15 Nov 2024 11:38:44 +0100 Subject: [PATCH 04/38] Allocate token's content into an arena (#1722) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit commit-id:5e4b122f Add token stream ser/de tests --- **Stack**: - #1749 - #1748 - #1745 - #1747 - #1722 ⬅ ⚠️ *Part of a stack created by [spr](https://github.com/ejoffe/spr). Do not merge manually using the UI - doing so may have unexpected results.* --- Cargo.lock | 3 +- Cargo.toml | 1 + plugins/cairo-lang-macro-stable/src/lib.rs | 4 +- plugins/cairo-lang-macro/Cargo.toml | 3 +- plugins/cairo-lang-macro/src/lib.rs | 64 ++-- .../cairo-lang-macro/src/types/conversion.rs | 55 ++-- plugins/cairo-lang-macro/src/types/mod.rs | 2 +- plugins/cairo-lang-macro/src/types/token.rs | 277 +++++++++++++++++- scarb/src/compiler/plugin/proc_macro/ffi.rs | 4 +- scarb/src/compiler/plugin/proc_macro/host.rs | 74 +++-- scarb/src/compiler/plugin/proc_macro/types.rs | 41 +-- scarb/tests/build_cairo_plugin.rs | 11 +- scarb/tests/package.rs | 1 + scarb/tests/proc_macro_server.rs | 6 +- 14 files changed, 426 insertions(+), 120 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2649cfe8f..05d1a8db7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -946,7 +946,8 @@ dependencies = [ name = "cairo-lang-macro" version = "0.1.1" dependencies = [ - "cairo-lang-macro-attributes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bumpalo", + "cairo-lang-macro-attributes 0.1.0", "cairo-lang-macro-stable 1.0.0", "linkme", "serde", diff --git a/Cargo.toml b/Cargo.toml index 47c8d21cb..074dc4909 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -39,6 +39,7 @@ anyhow = "1" assert_fs = "1" async-trait = "0.1" axum = { version = "0.6", features = ["http2"] } +bumpalo = "3.16.0" bincode = "2.0.1" cairo-lang-compiler = "*" cairo-lang-defs = "*" diff --git a/plugins/cairo-lang-macro-stable/src/lib.rs b/plugins/cairo-lang-macro-stable/src/lib.rs index 912a72847..c836ed850 100644 --- a/plugins/cairo-lang-macro-stable/src/lib.rs +++ b/plugins/cairo-lang-macro-stable/src/lib.rs @@ -9,7 +9,8 @@ pub mod ffi; #[derive(Debug)] pub struct StableToken { pub span: StableTextSpan, - pub content: *mut c_char, + pub ptr: *const u8, + pub len: usize, } #[repr(C)] @@ -44,6 +45,7 @@ pub type StableExpansionsList = StableSlice; pub struct StableTokenStream { pub tokens: StableSlice, pub metadata: StableTokenStreamMetadata, + pub size_hint: usize, } /// Token stream metadata. diff --git a/plugins/cairo-lang-macro/Cargo.toml b/plugins/cairo-lang-macro/Cargo.toml index 0f3fcc1e5..36af1a0ce 100644 --- a/plugins/cairo-lang-macro/Cargo.toml +++ b/plugins/cairo-lang-macro/Cargo.toml @@ -14,7 +14,8 @@ readme = "README.md" repository.workspace = true [dependencies] -cairo-lang-macro-attributes = "0.1" +bumpalo.workspace = true +cairo-lang-macro-attributes = { path = "../cairo-lang-macro-attributes" } cairo-lang-macro-stable = { path = "../cairo-lang-macro-stable" } linkme.workspace = true serde = { workspace = true, optional = true } diff --git a/plugins/cairo-lang-macro/src/lib.rs b/plugins/cairo-lang-macro/src/lib.rs index 1f7d1add6..27296dc43 100644 --- a/plugins/cairo-lang-macro/src/lib.rs +++ b/plugins/cairo-lang-macro/src/lib.rs @@ -18,17 +18,22 @@ pub use cairo_lang_macro_attributes::*; #[doc(hidden)] pub use linkme; +use std::cell::RefCell; use cairo_lang_macro_stable::ffi::StableSlice; use cairo_lang_macro_stable::{ StableExpansionsList, StablePostProcessContext, StableProcMacroResult, }; use std::ffi::{CStr, CString, c_char}; +use std::ops::Deref; mod types; pub use types::*; +// A thread-local allocation context for allocating tokens on proc macro side. +thread_local!(static CONTEXT: RefCell = RefCell::default() ); + #[doc(hidden)] #[derive(Clone)] pub struct ExpansionDefinition { @@ -97,29 +102,42 @@ pub unsafe extern "C" fn expand( stable_attr: cairo_lang_macro_stable::StableTokenStream, stable_token_stream: cairo_lang_macro_stable::StableTokenStream, ) -> cairo_lang_macro_stable::StableResultWrapper { - let token_stream = TokenStream::from_stable(&stable_token_stream); - let attr_token_stream = TokenStream::from_stable(&stable_attr); - let item_name = CStr::from_ptr(item_name).to_string_lossy().to_string(); - let fun = MACRO_DEFINITIONS_SLICE - .iter() - .find_map(|m| { - if m.name == item_name.as_str() { - Some(m.fun.clone()) - } else { - None - } - }) - .expect("procedural macro not found"); - let result = match fun { - ExpansionFunc::Attr(fun) => fun(attr_token_stream, token_stream), - ExpansionFunc::Other(fun) => fun(token_stream), - }; - let result: StableProcMacroResult = result.into_stable(); - cairo_lang_macro_stable::StableResultWrapper { - input: stable_token_stream, - input_attr: stable_attr, - output: result, - } + CONTEXT.with(|ctx_cell| { + // Read size hint from stable token stream. This will be used to create a sufficiently + // large bump allocation buffer. + let size_hint: usize = stable_token_stream.size_hint + stable_attr.size_hint; + // Replace the allocation context with a new one. + // If there is no interned string guards, the old context will be de-allocated. + ctx_cell.replace(AllocationContext::with_capacity(size_hint)); + let ctx_borrow = ctx_cell.borrow(); + let ctx: &AllocationContext = ctx_borrow.deref(); + // Copy the stable token stream into current context. + let token_stream = TokenStream::from_stable_in(&stable_token_stream, ctx); + let attr_token_stream = TokenStream::from_stable_in(&stable_attr, ctx); + let item_name = CStr::from_ptr(item_name) + .to_str() + .expect("item name must be a valid string"); + let fun = MACRO_DEFINITIONS_SLICE + .iter() + .find_map(|m| { + if m.name == item_name { + Some(m.fun.clone()) + } else { + None + } + }) + .expect("procedural macro not found"); + let result = match fun { + ExpansionFunc::Attr(fun) => fun(attr_token_stream, token_stream), + ExpansionFunc::Other(fun) => fun(token_stream), + }; + let result: StableProcMacroResult = result.into_stable(); + cairo_lang_macro_stable::StableResultWrapper { + input: stable_token_stream, + input_attr: stable_attr, + output: result, + } + }) } /// Free the memory allocated for the [`StableProcMacroResult`]. diff --git a/plugins/cairo-lang-macro/src/types/conversion.rs b/plugins/cairo-lang-macro/src/types/conversion.rs index 883620ebd..53b590fa3 100644 --- a/plugins/cairo-lang-macro/src/types/conversion.rs +++ b/plugins/cairo-lang-macro/src/types/conversion.rs @@ -1,6 +1,7 @@ use crate::{ - AuxData, Diagnostic, ExpansionDefinition, FullPathMarker, PostProcessContext, ProcMacroResult, - Severity, TextSpan, Token, TokenStream, TokenStreamMetadata, TokenTree, + AllocationContext, AuxData, Diagnostic, ExpansionDefinition, FullPathMarker, + PostProcessContext, ProcMacroResult, Severity, TextSpan, Token, TokenStream, + TokenStreamMetadata, TokenTree, }; use cairo_lang_macro_stable::ffi::StableSlice; use cairo_lang_macro_stable::{ @@ -30,7 +31,7 @@ impl ProcMacroResult { .map(|m| CString::new(m).unwrap().into_raw()) .collect::>(); StableProcMacroResult { - token_stream: self.token_stream.into_stable(), + token_stream: self.token_stream.as_stable(), aux_data: AuxData::maybe_into_stable(self.aux_data), diagnostics: StableSlice::new(diagnostics), full_path_markers: StableSlice::new(full_path_markers), @@ -44,6 +45,7 @@ impl ProcMacroResult { /// # Safety #[doc(hidden)] pub unsafe fn from_stable(result: &StableProcMacroResult) -> Self { + let ctx = AllocationContext::with_capacity(result.token_stream.size_hint); let (ptr, n) = result.diagnostics.raw_parts(); let diagnostics = slice::from_raw_parts(ptr, n) .iter() @@ -55,7 +57,7 @@ impl ProcMacroResult { .map(|m| from_raw_cstr(*m)) .collect::>(); ProcMacroResult { - token_stream: TokenStream::from_stable(&result.token_stream), + token_stream: TokenStream::from_stable_in(&result.token_stream, &ctx), diagnostics, full_path_markers, aux_data: AuxData::from_stable(&result.aux_data), @@ -109,11 +111,13 @@ impl TextSpan { impl Token { /// Convert to FFI-safe representation. #[doc(hidden)] - pub fn into_stable(self) -> StableToken { - let cstr = CString::new(self.content.as_bytes()).unwrap(); + pub fn as_stable(&self) -> StableToken { + let ptr = self.content.as_ptr(); + let len = self.content.len(); StableToken { - span: self.span.into_stable(), - content: cstr.into_raw(), + span: self.span.clone().into_stable(), + ptr, + len, } } @@ -123,9 +127,11 @@ impl Token { /// /// # Safety #[doc(hidden)] - pub unsafe fn from_stable(token: &StableToken) -> Self { + pub unsafe fn from_stable_in(token: &StableToken, ctx: &AllocationContext) -> Self { + let content = slice::from_raw_parts(token.ptr, token.len); + let content = ctx.intern(std::str::from_utf8(content).unwrap()); Self { - content: from_raw_cstr(token.content), + content, span: TextSpan::from_stable(&token.span), } } @@ -138,7 +144,6 @@ impl Token { /// # Safety #[doc(hidden)] pub unsafe fn free_owned_stable(token: StableToken) { - free_raw_cstring(token.content); TextSpan::free_owned_stable(token.span); } } @@ -146,9 +151,9 @@ impl Token { impl TokenTree { /// Convert to FFI-safe representation. #[doc(hidden)] - pub fn into_stable(self) -> StableTokenTree { + pub fn as_stable(&self) -> StableTokenTree { match self { - Self::Ident(token) => StableTokenTree::Ident(token.into_stable()), + Self::Ident(token) => StableTokenTree::Ident(token.as_stable()), } } @@ -158,9 +163,9 @@ impl TokenTree { /// /// # Safety #[doc(hidden)] - pub unsafe fn from_stable(token_tree: &StableTokenTree) -> Self { + pub unsafe fn from_stable_in(token_tree: &StableTokenTree, ctx: &AllocationContext) -> Self { match token_tree { - StableTokenTree::Ident(token) => Self::Ident(Token::from_stable(token)), + StableTokenTree::Ident(token) => Self::Ident(Token::from_stable_in(token, ctx)), } } @@ -185,15 +190,20 @@ impl TokenStream { /// /// # Safety #[doc(hidden)] - pub fn into_stable(self) -> StableTokenStream { + pub fn as_stable(&self) -> StableTokenStream { + let mut size_hint: usize = 0; let tokens = self .tokens - .into_iter() - .map(|token| token.into_stable()) + .iter() + .map(|token| { + size_hint += token.size_hint(); + token.as_stable() + }) .collect::>(); StableTokenStream { tokens: StableSlice::new(tokens), - metadata: self.metadata.into_stable(), + metadata: self.metadata.clone().into_stable(), + size_hint, } } @@ -203,11 +213,14 @@ impl TokenStream { /// /// # Safety #[doc(hidden)] - pub unsafe fn from_stable(token_stream: &StableTokenStream) -> Self { + pub unsafe fn from_stable_in( + token_stream: &StableTokenStream, + ctx: &AllocationContext, + ) -> Self { let (ptr, n) = token_stream.tokens.raw_parts(); let tokens = slice::from_raw_parts(ptr, n) .iter() - .map(|token_tree| TokenTree::from_stable(token_tree)) + .map(|token_tree| TokenTree::from_stable_in(token_tree, ctx)) .collect::>(); Self { tokens, diff --git a/plugins/cairo-lang-macro/src/types/mod.rs b/plugins/cairo-lang-macro/src/types/mod.rs index 618d61746..45ab5fca2 100644 --- a/plugins/cairo-lang-macro/src/types/mod.rs +++ b/plugins/cairo-lang-macro/src/types/mod.rs @@ -44,7 +44,7 @@ pub struct ProcMacroResult { /// let token_stream = TokenStream::new(vec![ /// TokenTree::Ident( /// Token::new( -/// code.clone(), +/// &code, /// TextSpan::new(0, code.len()) /// ) /// ) diff --git a/plugins/cairo-lang-macro/src/types/token.rs b/plugins/cairo-lang-macro/src/types/token.rs index 97f622fa1..95dab2cb5 100644 --- a/plugins/cairo-lang-macro/src/types/token.rs +++ b/plugins/cairo-lang-macro/src/types/token.rs @@ -1,17 +1,85 @@ +use crate::CONTEXT; +use bumpalo::Bump; use std::fmt::Display; +use std::hash::{Hash, Hasher}; +use std::ops::Deref; +use std::rc::Rc; /// An abstract stream of Cairo tokens. /// /// This is both input and part of an output of a procedural macro. #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "serde", serde(try_from = "deserializer::TokenStream"))] #[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] pub struct TokenStream { pub tokens: Vec, pub metadata: TokenStreamMetadata, } +/// This module implements deserialization of the token stream, for the serde feature. +/// This is intermediate representation is needed, as real [`Token`] only contains a reference to the +/// represented string, which needs to be allocated outside the [`Token`] struct. +/// Here we allocate each token to an owned String with SerDe and then copy it's content into context. +#[cfg(feature = "serde")] +#[doc(hidden)] +mod deserializer { + use crate::{AllocationContext, TextSpan, TokenStreamMetadata}; + use std::fmt::{Display, Formatter}; + + #[derive(serde::Serialize, serde::Deserialize)] + pub struct TokenStream { + pub tokens: Vec, + pub metadata: TokenStreamMetadata, + } + + #[derive(serde::Serialize, serde::Deserialize)] + pub enum TokenTree { + Ident(Token), + } + + #[derive(serde::Serialize, serde::Deserialize)] + pub struct Token { + pub content: String, + pub span: TextSpan, + } + + pub struct Error {} + + impl Display for Error { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.write_str("TokenStream deserialization error") + } + } + + impl TryFrom for crate::TokenStream { + type Error = Error; + + fn try_from(value: TokenStream) -> Result { + let ctx = AllocationContext::default(); + let tokens = value + .tokens + .into_iter() + .map(|token| match token { + TokenTree::Ident(token) => { + let content = ctx.intern(token.content.as_str()); + let token = crate::Token { + content, + span: token.span, + }; + crate::TokenTree::Ident(token) + } + }) + .collect::>(); + Ok(Self { + tokens, + metadata: value.metadata, + }) + } + } +} + /// A single token or a delimited sequence of token trees. -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "serde", derive(serde::Serialize))] #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum TokenTree { Ident(Token), @@ -23,6 +91,16 @@ impl Default for TokenTree { } } +impl TokenTree { + /// Get the size hint for the [`TokenTree`]. + /// This can be used to estimate size of a buffer needed for allocating this [`TokenTree`]. + pub(crate) fn size_hint(&self) -> usize { + match self { + Self::Ident(token) => token.size_hint(), + } + } +} + /// A range of text offsets that form a span (like text selection). #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] @@ -34,13 +112,130 @@ pub struct TextSpan { /// A single Cairo token. /// /// The most atomic item of Cairo code representation. -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "serde", derive(serde::Serialize))] #[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] pub struct Token { - pub content: String, + pub content: InternedStr, pub span: TextSpan, } +impl Token { + /// Get the size hint for the [`Token`]. + /// This can be used to estimate size of a buffer needed for allocating this [`Token`]. + pub(crate) fn size_hint(&self) -> usize { + self.content.deref().len() + } +} + +/// A wrapper over a string pointer. +/// This contains a pointer to a string allocated in a bump allocator +/// and a guard which keeps the buffer alive. +/// This way we do not need to allocate a new string, +/// but also do not need to worry about the lifetime of the string. +#[derive(Debug, Clone)] +pub struct InternedStr { + ptr: *const str, + // Holding a rc to the underlying buffer, so that ptr will always point to valid memory. + _bump: Rc, +} + +impl InternedStr { + #[allow(unknown_lints)] + #[allow(private_interfaces)] + #[doc(hidden)] + pub(crate) fn new_in(s: &str, bump: Rc) -> Self { + let allocated = bump.0.alloc_str(s); + let ptr = allocated as *const str; + Self { ptr, _bump: bump } + } +} + +impl Default for InternedStr { + fn default() -> Self { + Self { + ptr: "" as *const str, + _bump: Rc::default(), + } + } +} + +impl AsRef for InternedStr { + fn as_ref(&self) -> &str { + self.deref() + } +} + +impl Deref for InternedStr { + type Target = str; + + fn deref(&self) -> &Self::Target { + unsafe { &*self.ptr } + } +} + +#[cfg(feature = "serde")] +impl serde::Serialize for InternedStr { + fn serialize(&self, s: S) -> Result { + s.serialize_str(self.as_ref()) + } +} + +impl PartialEq for InternedStr { + fn eq(&self, other: &Self) -> bool { + self.as_ref().eq(other.as_ref()) + } +} + +impl Eq for InternedStr {} + +impl Hash for InternedStr { + fn hash(&self, state: &mut H) { + self.as_ref().hash(state); + } +} + +/// This wrapper de-allocates the underlying buffer on drop. +#[derive(Debug, Default)] +pub(crate) struct BumpWrap(pub Bump); + +impl Drop for BumpWrap { + fn drop(&mut self) { + self.0.reset(); + } +} + +/// A context for allocating Cairo tokens. +/// This wrapper contains a bump allocator, which is used to allocate strings for tokens. +#[derive(Clone)] +pub struct AllocationContext { + bump: Rc, +} + +impl AllocationContext { + /// Allocate a new context with pre-determined buffer size. + pub fn with_capacity(size_hint: usize) -> Self { + Self { + bump: Rc::new(BumpWrap(Bump::with_capacity(size_hint))), + } + } + + /// Allocate a string in the context. + /// This returned a string pointer, guarded by reference counter to the buffer. + /// The buffer will be deallocated when the last reference to the buffer is dropped. + /// No special handling or lifetimes are needed for the string. + pub(crate) fn intern(&self, value: &str) -> InternedStr { + InternedStr::new_in(value, self.bump.clone()) + } +} + +impl Default for AllocationContext { + fn default() -> Self { + Self { + bump: Rc::new(BumpWrap(Bump::new())), + } + } +} + /// Metadata of [`TokenStream`]. /// /// This struct describes the origin of the [`TokenStream`]. @@ -84,8 +279,9 @@ impl TokenStream { &self.metadata } + /// Check if the [`TokenStream`] is empty. pub fn is_empty(&self) -> bool { - self.to_string().is_empty() + self.tokens.is_empty() } } @@ -94,7 +290,7 @@ impl Display for TokenStream { for token in &self.tokens { match token { TokenTree::Ident(token) => { - write!(f, "{}", token.content.clone())?; + write!(f, "{}", token.content.as_ref())?; } } } @@ -114,19 +310,88 @@ impl TokenStreamMetadata { } impl TokenTree { + /// Create a new [`TokenTree`] from an identifier [`Token`]. pub fn from_ident(token: Token) -> Self { Self::Ident(token) } } impl TextSpan { + /// Create a new [`TextSpan`]. pub fn new(start: usize, end: usize) -> TextSpan { TextSpan { start, end } } } impl Token { - pub fn new(content: String, span: TextSpan) -> Self { + /// Create [`Token`] in thread-local context. + pub fn new(content: impl AsRef, span: TextSpan) -> Self { + CONTEXT.with(|ctx| { + let ctx_borrow = ctx.borrow(); + let ctx: &AllocationContext = ctx_borrow.deref(); + Self::new_in(content, span, ctx) + }) + } + + /// Create [`Token`] in specified context. + pub fn new_in(content: impl AsRef, span: TextSpan, ctx: &AllocationContext) -> Self { + let content = ctx.intern(content.as_ref()); Self { content, span } } } + +#[cfg(test)] +mod test { + use crate::{AllocationContext, TextSpan, Token, TokenStream, TokenTree}; + + #[test] + pub fn can_serde_empty_token_stream() { + let original = TokenStream::empty(); + let serialized = serde_json::to_string(&original).unwrap(); + let derived: TokenStream = serde_json::from_str(serialized.as_str()).unwrap(); + assert_eq!(original, derived); + let val: serde_json::Value = serde_json::from_str(serialized.as_str()).unwrap(); + assert_eq!( + val, + serde_json::json!({ + "tokens": [], + "metadata": { + "original_file_path": null, + "file_id": null, + "edition": null + } + }) + ); + } + + #[test] + pub fn can_serde_token_stream() { + let ctx = AllocationContext::default(); + let original = TokenStream::new(vec![ + TokenTree::Ident(Token::new_in("first", TextSpan::new(0, 1), &ctx)), + TokenTree::Ident(Token::new_in("second", TextSpan::new(2, 3), &ctx)), + TokenTree::Ident(Token::new_in("third", TextSpan::new(4, 5), &ctx)), + TokenTree::Ident(Token::new_in("fourth", TextSpan::new(6, 7), &ctx)), + ]); + let serialized = serde_json::to_string(&original).unwrap(); + let derived: TokenStream = serde_json::from_str(serialized.as_str()).unwrap(); + assert_eq!(original, derived); + let val: serde_json::Value = serde_json::from_str(serialized.as_str()).unwrap(); + assert_eq!( + val, + serde_json::json!({ + "tokens": [ + {"Ident": {"content": "first", "span": {"start": 0, "end": 1}}}, + {"Ident": {"content": "second", "span": {"start": 2, "end": 3}}}, + {"Ident": {"content": "third", "span": {"start": 4, "end": 5}}}, + {"Ident": {"content": "fourth", "span": {"start": 6, "end": 7}}}, + ], + "metadata": { + "original_file_path": null, + "file_id": null, + "edition": null + } + }) + ); + } +} diff --git a/scarb/src/compiler/plugin/proc_macro/ffi.rs b/scarb/src/compiler/plugin/proc_macro/ffi.rs index 26b5e74e6..3fbf7cecd 100644 --- a/scarb/src/compiler/plugin/proc_macro/ffi.rs +++ b/scarb/src/compiler/plugin/proc_macro/ffi.rs @@ -164,8 +164,8 @@ impl ProcMacroInstance { token_stream: TokenStream, ) -> ProcMacroResult { // This must be manually freed with call to `free_owned_stable`. - let stable_token_stream = token_stream.into_stable(); - let stable_attr = attr.into_stable(); + let stable_token_stream = token_stream.as_stable(); + let stable_attr = attr.as_stable(); // Allocate proc macro name. let item_name = CString::new(item_name.to_string()).unwrap().into_raw(); // Call FFI interface for code expansion. diff --git a/scarb/src/compiler/plugin/proc_macro/host.rs b/scarb/src/compiler/plugin/proc_macro/host.rs index ce1898c00..eed44f510 100644 --- a/scarb/src/compiler/plugin/proc_macro/host.rs +++ b/scarb/src/compiler/plugin/proc_macro/host.rs @@ -15,8 +15,8 @@ use cairo_lang_diagnostics::ToOption; use cairo_lang_filesystem::db::Edition; use cairo_lang_filesystem::ids::CodeMapping; use cairo_lang_macro::{ - AuxData, Diagnostic, FullPathMarker, ProcMacroResult, Severity, TokenStream, - TokenStreamMetadata, + AllocationContext, AuxData, Diagnostic, FullPathMarker, ProcMacroResult, Severity, TokenStream, + TokenStreamMetadata, TokenTree, }; use cairo_lang_semantic::db::SemanticGroup; use cairo_lang_semantic::items::attribute::SemanticQueryAttrs; @@ -189,6 +189,7 @@ impl ProcMacroHostPlugin { let mut item_builder = PatchBuilder::new(db, &item_ast); let mut used_attr_names: HashSet = Default::default(); let mut all_none = true; + let ctx = AllocationContext::default(); match item_ast.clone() { ast::ModuleItem::Trait(trait_ast) => { @@ -216,13 +217,14 @@ impl ProcMacroHostPlugin { let mut token_stream_builder = TokenStreamBuilder::new(db); let attrs = func.attributes(db).elements(db); - let found = self.parse_attrs(db, &mut token_stream_builder, attrs); + let found = + self.parse_attrs(db, &mut token_stream_builder, attrs, &ctx); if let Some(name) = found.as_name() { used_attr_names.insert(name); } token_stream_builder.add_node(func.declaration(db).as_syntax_node()); token_stream_builder.add_node(func.body(db).as_syntax_node()); - let token_stream = token_stream_builder.build(); + let token_stream = token_stream_builder.build(&ctx); all_none = all_none && self.do_expand_inner_attr( @@ -277,14 +279,15 @@ impl ProcMacroHostPlugin { let mut token_stream_builder = TokenStreamBuilder::new(db); let attrs = func.attributes(db).elements(db); - let found = self.parse_attrs(db, &mut token_stream_builder, attrs); + let found = + self.parse_attrs(db, &mut token_stream_builder, attrs, &ctx); if let Some(name) = found.as_name() { used_attr_names.insert(name); } token_stream_builder.add_node(func.visibility(db).as_syntax_node()); token_stream_builder.add_node(func.declaration(db).as_syntax_node()); token_stream_builder.add_node(func.body(db).as_syntax_node()); - let token_stream = token_stream_builder.build(); + let token_stream = token_stream_builder.build(&ctx); all_none = all_none && self.do_expand_inner_attr( db, @@ -350,7 +353,7 @@ impl ProcMacroHostPlugin { let result = self.instance(input.package_id).generate_code( input.expansion.name.clone(), - args.clone(), + args, token_stream.clone(), ); @@ -370,12 +373,13 @@ impl ProcMacroHostPlugin { &self, db: &dyn SyntaxGroup, item_ast: ast::ModuleItem, + ctx: &AllocationContext, ) -> (AttrExpansionFound, TokenStream) { let mut token_stream_builder = TokenStreamBuilder::new(db); let input = match item_ast.clone() { ast::ModuleItem::Trait(trait_ast) => { let attrs = trait_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); token_stream_builder.add_node(trait_ast.visibility(db).as_syntax_node()); token_stream_builder.add_node(trait_ast.trait_kw(db).as_syntax_node()); token_stream_builder.add_node(trait_ast.name(db).as_syntax_node()); @@ -385,7 +389,7 @@ impl ProcMacroHostPlugin { } ast::ModuleItem::Impl(impl_ast) => { let attrs = impl_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); token_stream_builder.add_node(impl_ast.visibility(db).as_syntax_node()); token_stream_builder.add_node(impl_ast.impl_kw(db).as_syntax_node()); token_stream_builder.add_node(impl_ast.name(db).as_syntax_node()); @@ -397,7 +401,7 @@ impl ProcMacroHostPlugin { } ast::ModuleItem::Module(module_ast) => { let attrs = module_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); token_stream_builder.add_node(module_ast.visibility(db).as_syntax_node()); token_stream_builder.add_node(module_ast.module_kw(db).as_syntax_node()); token_stream_builder.add_node(module_ast.name(db).as_syntax_node()); @@ -406,7 +410,7 @@ impl ProcMacroHostPlugin { } ast::ModuleItem::FreeFunction(free_func_ast) => { let attrs = free_func_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); token_stream_builder.add_node(free_func_ast.visibility(db).as_syntax_node()); token_stream_builder.add_node(free_func_ast.declaration(db).as_syntax_node()); token_stream_builder.add_node(free_func_ast.body(db).as_syntax_node()); @@ -414,7 +418,7 @@ impl ProcMacroHostPlugin { } ast::ModuleItem::ExternFunction(extern_func_ast) => { let attrs = extern_func_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); token_stream_builder.add_node(extern_func_ast.visibility(db).as_syntax_node()); token_stream_builder.add_node(extern_func_ast.extern_kw(db).as_syntax_node()); token_stream_builder.add_node(extern_func_ast.declaration(db).as_syntax_node()); @@ -423,7 +427,7 @@ impl ProcMacroHostPlugin { } ast::ModuleItem::ExternType(extern_type_ast) => { let attrs = extern_type_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); token_stream_builder.add_node(extern_type_ast.visibility(db).as_syntax_node()); token_stream_builder.add_node(extern_type_ast.extern_kw(db).as_syntax_node()); token_stream_builder.add_node(extern_type_ast.type_kw(db).as_syntax_node()); @@ -434,7 +438,7 @@ impl ProcMacroHostPlugin { } ast::ModuleItem::Struct(struct_ast) => { let attrs = struct_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); token_stream_builder.add_node(struct_ast.visibility(db).as_syntax_node()); token_stream_builder.add_node(struct_ast.struct_kw(db).as_syntax_node()); token_stream_builder.add_node(struct_ast.name(db).as_syntax_node()); @@ -446,7 +450,7 @@ impl ProcMacroHostPlugin { } ast::ModuleItem::Enum(enum_ast) => { let attrs = enum_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); token_stream_builder.add_node(enum_ast.visibility(db).as_syntax_node()); token_stream_builder.add_node(enum_ast.enum_kw(db).as_syntax_node()); token_stream_builder.add_node(enum_ast.name(db).as_syntax_node()); @@ -458,7 +462,7 @@ impl ProcMacroHostPlugin { } _ => AttrExpansionFound::None, }; - let token_stream = token_stream_builder.build(); + let token_stream = token_stream_builder.build(ctx); (input, token_stream) } @@ -467,6 +471,7 @@ impl ProcMacroHostPlugin { db: &dyn SyntaxGroup, builder: &mut TokenStreamBuilder<'_>, attrs: Vec, + ctx: &AllocationContext, ) -> AttrExpansionFound { // This function parses attributes of the item, // checking if those attributes correspond to a procedural macro that should be fired. @@ -491,7 +496,7 @@ impl ProcMacroHostPlugin { if expansion.is_none() { let mut args_builder = TokenStreamBuilder::new(db); args_builder.add_node(attr.arguments(db).as_syntax_node()); - let args = args_builder.build(); + let args = args_builder.build(ctx); expansion = Some((found, args, attr.stable_ptr().untyped())); // Do not add the attribute for found expansion. continue; @@ -565,7 +570,6 @@ impl ProcMacroHostPlugin { let mut token_stream_builder = TokenStreamBuilder::new(db); token_stream_builder.add_node(item_ast.as_syntax_node()); token_stream_builder.with_metadata(stream_metadata.clone()); - let token_stream = token_stream_builder.build(); let mut aux_data = EmittedAuxData::default(); let mut all_diagnostics: Vec = Vec::new(); @@ -573,12 +577,14 @@ impl ProcMacroHostPlugin { let derives = self.parse_derive(db, item_ast.clone()); let any_derives = !derives.is_empty(); + let ctx = AllocationContext::default(); let mut derived_code = PatchBuilder::new(db, &item_ast); for derive in derives.iter() { + let token_stream = token_stream_builder.build(&ctx); let result = self.instance(derive.package_id).generate_code( derive.expansion.name.clone(), TokenStream::empty(), - token_stream.clone(), + token_stream, ); // Register diagnostics. @@ -598,7 +604,13 @@ impl ProcMacroHostPlugin { continue; } - derived_code.add_str(result.token_stream.to_string().as_str()); + for token in result.token_stream.tokens { + match token { + TokenTree::Ident(token) => { + derived_code.add_str(token.content.as_ref()); + } + } + } } if any_derives { @@ -647,10 +659,11 @@ impl ProcMacroHostPlugin { token_stream: TokenStream, stable_ptr: SyntaxStablePtrId, ) -> PluginResult { + let original = token_stream.to_string(); let result = self.instance(input.package_id).generate_code( input.expansion.name.clone(), - args.clone(), - token_stream.clone(), + args, + token_stream, ); // Handle token stream. @@ -676,10 +689,7 @@ impl ProcMacroHostPlugin { // In essence, `code: None, remove_original_item: false` means `ProcMacroHost` will not be // called again for this AST item. // This optimization limits the number of generated nodes a bit. - if last - && result.aux_data.is_none() - && token_stream.to_string() == result.token_stream.to_string() - { + if last && result.aux_data.is_none() && original == result.token_stream.to_string() { return PluginResult { code: None, remove_original_item: false, @@ -906,8 +916,8 @@ impl<'a> InnerAttrExpansionContext<'a> { result: ProcMacroResult, stable_ptr: SyntaxStablePtrId, ) -> String { - let expanded = result.token_stream.to_string(); - let changed = expanded.as_str() != original; + let result_str = result.token_stream.to_string(); + let changed = result_str != original; if changed { self.host @@ -924,7 +934,7 @@ impl<'a> InnerAttrExpansionContext<'a> { self.any_changed = self.any_changed || changed; - expanded + result_str } pub fn into_result( self, @@ -980,7 +990,8 @@ impl MacroPlugin for ProcMacroHostPlugin { // Expand first attribute. // Note that we only expand the first attribute, as we assume that the rest of the attributes // will be handled by a subsequent call to this function. - let (input, body) = self.parse_attribute(db, item_ast.clone()); + let ctx = AllocationContext::default(); + let (input, body) = self.parse_attribute(db, item_ast.clone(), &ctx); if let Some(result) = match input { AttrExpansionFound::Last { @@ -1100,11 +1111,12 @@ impl InlineMacroExprPlugin for ProcMacroInlinePlugin { syntax: &ast::ExprInlineMacro, _metadata: &MacroPluginMetadata<'_>, ) -> InlinePluginResult { + let ctx = AllocationContext::default(); let stable_ptr = syntax.clone().stable_ptr().untyped(); let arguments = syntax.arguments(db); let mut token_stream_builder = TokenStreamBuilder::new(db); token_stream_builder.add_node(arguments.as_syntax_node()); - let token_stream = token_stream_builder.build(); + let token_stream = token_stream_builder.build(&ctx); let result = self.instance().generate_code( self.expansion.name.clone(), TokenStream::empty(), diff --git a/scarb/src/compiler/plugin/proc_macro/types.rs b/scarb/src/compiler/plugin/proc_macro/types.rs index 10b16885a..51f34faff 100644 --- a/scarb/src/compiler/plugin/proc_macro/types.rs +++ b/scarb/src/compiler/plugin/proc_macro/types.rs @@ -1,4 +1,6 @@ -use cairo_lang_macro::{TextSpan, Token, TokenStream, TokenStreamMetadata, TokenTree}; +use cairo_lang_macro::{ + AllocationContext, TextSpan, Token, TokenStream, TokenStreamMetadata, TokenTree, +}; use cairo_lang_syntax::node::{SyntaxNode, db::SyntaxGroup}; /// Helps creating TokenStream based on multiple SyntaxNodes, @@ -26,29 +28,28 @@ impl<'a> TokenStreamBuilder<'a> { self.metadata = Some(metadata); } - pub fn build(self) -> TokenStream { - let mut result: Vec = Vec::default(); - for node in self.nodes.iter() { - let leaves = node.tokens(self.db); - let tokens = - leaves.map(|node| TokenTree::Ident(self.token_from_syntax_node(node.clone()))); - result.extend(tokens); - } + pub fn build(&self, ctx: &AllocationContext) -> TokenStream { + let result: Vec = self + .nodes + .iter() + .flat_map(|node| { + let leaves = node.tokens(self.db); + leaves.map(|node| TokenTree::Ident(self.token_from_syntax_node(node.clone(), ctx))) + }) + .collect(); - match self.metadata { - Some(metadata) => TokenStream::new(result.clone()).with_metadata(metadata.clone()), - None => TokenStream::new(result.clone()), + match self.metadata.as_ref() { + Some(metadata) => TokenStream::new(result).with_metadata(metadata.clone()), + None => TokenStream::new(result), } } - pub fn token_from_syntax_node(&self, node: SyntaxNode) -> Token { + pub fn token_from_syntax_node(&self, node: SyntaxNode, ctx: &AllocationContext) -> Token { let span = node.span(self.db).to_str_range(); - Token::new( - node.get_text(self.db), - TextSpan { - start: span.start, - end: span.end, - }, - ) + let span = TextSpan { + start: span.start, + end: span.end, + }; + Token::new_in(node.get_text(self.db), span, ctx) } } diff --git a/scarb/tests/build_cairo_plugin.rs b/scarb/tests/build_cairo_plugin.rs index b0a572972..70810a49b 100644 --- a/scarb/tests/build_cairo_plugin.rs +++ b/scarb/tests/build_cairo_plugin.rs @@ -788,16 +788,7 @@ fn cannot_duplicate_macros_across_packages() { fn cannot_use_undefined_macro() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); - CairoPluginProjectBuilder::default() - .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; - - #[attribute_macro] - pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(token_stream) - } - "##}) - .build(&t); + CairoPluginProjectBuilder::default().build(&t); let project = temp.child("hello"); ProjectBuilder::start() .name("hello") diff --git a/scarb/tests/package.rs b/scarb/tests/package.rs index 4ee0781e1..e5d4c31e9 100644 --- a/scarb/tests/package.rs +++ b/scarb/tests/package.rs @@ -495,6 +495,7 @@ fn workspace() { #[test] fn cairo_plugin() { let t = TempDir::new().unwrap(); + // Note this will be packaged with `cairo-lang-macro` from crates, not the local one. CairoPluginProjectBuilder::default().build(&t); Scarb::quick_snapbox() diff --git a/scarb/tests/proc_macro_server.rs b/scarb/tests/proc_macro_server.rs index 500e086be..7287a8de6 100644 --- a/scarb/tests/proc_macro_server.rs +++ b/scarb/tests/proc_macro_server.rs @@ -95,7 +95,7 @@ fn expand_attribute() { attr: "rename_to_very_new_name".to_string(), args: TokenStream::empty(), item: TokenStream::new(vec![TokenTree::Ident(Token::new( - "fn some_test_fn(){}".to_string(), + "fn some_test_fn(){}", TextSpan::default(), ))]), }) @@ -133,7 +133,7 @@ fn expand_derive() { .component; let item = TokenStream::new(vec![TokenTree::Ident(Token::new( - "fn some_test_fn(){}".to_string(), + "fn some_test_fn(){}", TextSpan::default(), ))]); @@ -196,7 +196,7 @@ fn expand_inline() { context: ProcMacroScope { component }, name: "replace_all_15_with_25".to_string(), args: TokenStream::new(vec![TokenTree::Ident(Token::new( - "struct A { field: 15 , other_field: macro_call!(12)}".to_string(), + "struct A { field: 15 , other_field: macro_call!(12)}", TextSpan::default(), ))]), }) From d86bfa5fd35e543b7ed579cf44e4e00588b67d43 Mon Sep 17 00:00:00 2001 From: maciektr Date: Thu, 21 Nov 2024 17:53:31 +0100 Subject: [PATCH 05/38] Implement Debug for InternedStr (#1760) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit commit-id:919c7f28 --- **Stack**: - #1749 - #1748 - #1745 - #1747 - #1760 ⬅ ⚠️ *Part of a stack created by [spr](https://github.com/ejoffe/spr). Do not merge manually using the UI - doing so may have unexpected results.* --- plugins/cairo-lang-macro/src/types/token.rs | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/plugins/cairo-lang-macro/src/types/token.rs b/plugins/cairo-lang-macro/src/types/token.rs index 95dab2cb5..6d1bf5adc 100644 --- a/plugins/cairo-lang-macro/src/types/token.rs +++ b/plugins/cairo-lang-macro/src/types/token.rs @@ -1,6 +1,6 @@ use crate::CONTEXT; use bumpalo::Bump; -use std::fmt::Display; +use std::fmt::{Debug, Display, Write}; use std::hash::{Hash, Hasher}; use std::ops::Deref; use std::rc::Rc; @@ -132,13 +132,21 @@ impl Token { /// and a guard which keeps the buffer alive. /// This way we do not need to allocate a new string, /// but also do not need to worry about the lifetime of the string. -#[derive(Debug, Clone)] +#[derive(Clone)] pub struct InternedStr { ptr: *const str, // Holding a rc to the underlying buffer, so that ptr will always point to valid memory. _bump: Rc, } +impl Debug for InternedStr { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_char('"')?; + f.write_str(self.as_ref())?; + f.write_char('"') + } +} + impl InternedStr { #[allow(unknown_lints)] #[allow(private_interfaces)] From 319e70c96840d47fdf94d783ef3fa1fa53ec6277 Mon Sep 17 00:00:00 2001 From: maciektr Date: Thu, 21 Nov 2024 17:54:57 +0100 Subject: [PATCH 06/38] Refactor: split proc macro host into submodules (#1747) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit commit-id:6d8f33f9 --- **Stack**: - #1749 - #1748 - #1745 - #1747 ⬅ ⚠️ *Part of a stack created by [spr](https://github.com/ejoffe/spr). Do not merge manually using the UI - doing so may have unexpected results.* --- scarb/src/compiler/plugin/proc_macro/ffi.rs | 3 +- scarb/src/compiler/plugin/proc_macro/host.rs | 1224 ----------------- .../plugin/proc_macro/host/attribute.rs | 537 ++++++++ .../plugin/proc_macro/host/aux_data.rs | 90 ++ .../compiler/plugin/proc_macro/host/derive.rs | 148 ++ .../compiler/plugin/proc_macro/host/inline.rs | 101 ++ .../compiler/plugin/proc_macro/host/mod.rs | 289 ++++ .../compiler/plugin/proc_macro/host/post.rs | 113 ++ scarb/src/compiler/plugin/proc_macro/mod.rs | 2 + .../compiler/plugin/proc_macro/repository.rs | 46 + scarb/tests/proc_macro_prebuilt.rs | 4 +- 11 files changed, 1329 insertions(+), 1228 deletions(-) delete mode 100644 scarb/src/compiler/plugin/proc_macro/host.rs create mode 100644 scarb/src/compiler/plugin/proc_macro/host/attribute.rs create mode 100644 scarb/src/compiler/plugin/proc_macro/host/aux_data.rs create mode 100644 scarb/src/compiler/plugin/proc_macro/host/derive.rs create mode 100644 scarb/src/compiler/plugin/proc_macro/host/inline.rs create mode 100644 scarb/src/compiler/plugin/proc_macro/host/mod.rs create mode 100644 scarb/src/compiler/plugin/proc_macro/host/post.rs create mode 100644 scarb/src/compiler/plugin/proc_macro/repository.rs diff --git a/scarb/src/compiler/plugin/proc_macro/ffi.rs b/scarb/src/compiler/plugin/proc_macro/ffi.rs index 3fbf7cecd..ae1089737 100644 --- a/scarb/src/compiler/plugin/proc_macro/ffi.rs +++ b/scarb/src/compiler/plugin/proc_macro/ffi.rs @@ -1,3 +1,4 @@ +use crate::compiler::plugin::proc_macro::compilation::SharedLibraryProvider; use crate::core::{Package, PackageId}; use anyhow::{Context, Result, ensure}; use cairo_lang_macro::{ @@ -16,8 +17,6 @@ use std::fmt::Debug; use std::slice; use crate::compiler::plugin::proc_macro::ProcMacroAuxData; -use crate::compiler::plugin::proc_macro::compilation::SharedLibraryProvider; - #[cfg(not(windows))] use libloading::os::unix::Symbol as RawSymbol; #[cfg(windows)] diff --git a/scarb/src/compiler/plugin/proc_macro/host.rs b/scarb/src/compiler/plugin/proc_macro/host.rs deleted file mode 100644 index eed44f510..000000000 --- a/scarb/src/compiler/plugin/proc_macro/host.rs +++ /dev/null @@ -1,1224 +0,0 @@ -use crate::compiler::plugin::proc_macro::compilation::SharedLibraryProvider; -use crate::compiler::plugin::proc_macro::{ - Expansion, ExpansionKind, ProcMacroInstance, TokenStreamBuilder, -}; -use crate::core::{Config, Package, PackageId, edition_variant}; -use anyhow::{Context, Result, bail, ensure}; -use cairo_lang_defs::ids::{ModuleItemId, TopLevelLanguageElementId}; -use cairo_lang_defs::patcher::{PatchBuilder, RewriteNode}; -use cairo_lang_defs::plugin::{ - DynGeneratedFileAuxData, GeneratedFileAuxData, MacroPlugin, MacroPluginMetadata, - PluginGeneratedFile, PluginResult, -}; -use cairo_lang_defs::plugin::{InlineMacroExprPlugin, InlinePluginResult, PluginDiagnostic}; -use cairo_lang_diagnostics::ToOption; -use cairo_lang_filesystem::db::Edition; -use cairo_lang_filesystem::ids::CodeMapping; -use cairo_lang_macro::{ - AllocationContext, AuxData, Diagnostic, FullPathMarker, ProcMacroResult, Severity, TokenStream, - TokenStreamMetadata, TokenTree, -}; -use cairo_lang_semantic::db::SemanticGroup; -use cairo_lang_semantic::items::attribute::SemanticQueryAttrs; -use cairo_lang_semantic::plugin::PluginSuite; -use cairo_lang_syntax::attribute::structured::{ - Attribute, AttributeArgVariant, AttributeStructurize, -}; -use cairo_lang_syntax::node::ast::{Expr, ImplItem, MaybeImplBody, MaybeTraitBody, PathSegment}; -use cairo_lang_syntax::node::db::SyntaxGroup; -use cairo_lang_syntax::node::helpers::QueryAttrs; -use cairo_lang_syntax::node::ids::SyntaxStablePtrId; -use cairo_lang_syntax::node::{Terminal, TypedStablePtr, TypedSyntaxNode, ast}; -use convert_case::{Case, Casing}; -use itertools::Itertools; -use scarb_stable_hash::short_hash; -use smol_str::SmolStr; -use std::any::Any; -use std::collections::{HashMap, HashSet}; -use std::fmt::Debug; -use std::sync::{Arc, OnceLock, RwLock}; -use std::vec::IntoIter; -use tracing::{debug, trace_span}; - -const FULL_PATH_MARKER_KEY: &str = "macro::full_path_marker"; -const DERIVE_ATTR: &str = "derive"; - -/// A Cairo compiler plugin controlling the procedural macro execution. -/// -/// This plugin decides which macro plugins (if any) should be applied to the processed AST item. -/// It then redirects the item to the appropriate macro plugin for code expansion. -#[derive(Debug)] -pub struct ProcMacroHostPlugin { - macros: Vec>, - full_path_markers: RwLock>>, -} - -impl ProcMacroHostPlugin { - pub fn macros(&self) -> &[Arc] { - &self.macros - } - - // NOTE: Required for proc macro server. `::declared_attributes` - // returns attributes **and** executables. In PMS, we only need the former because the latter is handled separately. - pub fn declared_attributes_without_executables(&self) -> Vec { - self.macros - .iter() - .flat_map(|instance| instance.declared_attributes()) - .collect() - } - - pub fn declared_inline_macros(&self) -> Vec { - self.macros - .iter() - .flat_map(|instance| instance.inline_macros()) - .collect() - } -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct ProcMacroId { - pub package_id: PackageId, - pub expansion: Expansion, -} - -impl ProcMacroId { - pub fn new(package_id: PackageId, expansion: Expansion) -> Self { - Self { - package_id, - expansion, - } - } -} - -#[derive(Clone, Debug, Eq, PartialEq)] -pub struct ProcMacroAuxData { - value: Vec, - macro_id: ProcMacroId, -} - -impl ProcMacroAuxData { - pub fn new(value: Vec, macro_id: ProcMacroId) -> Self { - Self { value, macro_id } - } -} - -impl From for AuxData { - fn from(data: ProcMacroAuxData) -> Self { - Self::new(data.value) - } -} - -#[derive(Debug, Clone, Default)] -pub struct EmittedAuxData(Vec); - -impl GeneratedFileAuxData for EmittedAuxData { - fn as_any(&self) -> &dyn Any { - self - } - - fn eq(&self, other: &dyn GeneratedFileAuxData) -> bool { - self.0 == other.as_any().downcast_ref::().unwrap().0 - } -} - -impl EmittedAuxData { - pub fn new(aux_data: ProcMacroAuxData) -> Self { - Self(vec![aux_data]) - } - - pub fn push(&mut self, aux_data: ProcMacroAuxData) { - self.0.push(aux_data); - } - - pub fn is_empty(&self) -> bool { - self.0.is_empty() - } -} - -impl IntoIterator for EmittedAuxData { - type Item = ProcMacroAuxData; - type IntoIter = IntoIter; - - fn into_iter(self) -> IntoIter { - self.0.into_iter() - } -} - -impl ProcMacroHostPlugin { - pub fn try_new(macros: Vec>) -> Result { - // Validate expansions. - let mut expansions = macros - .iter() - .flat_map(|m| { - m.get_expansions() - .iter() - .map(|e| ProcMacroId::new(m.package_id(), e.clone())) - .collect_vec() - }) - .collect::>(); - expansions.sort_unstable_by_key(|e| (e.expansion.name.clone(), e.package_id)); - ensure!( - expansions - .windows(2) - .all(|w| w[0].expansion.name != w[1].expansion.name), - "duplicate expansions defined for procedural macros: {duplicates}", - duplicates = expansions - .windows(2) - .filter(|w| w[0].expansion.name == w[1].expansion.name) - .map(|w| format!( - "{} ({} and {})", - w[0].expansion.name.as_str(), - w[0].package_id, - w[1].package_id - )) - .collect::>() - .join(", ") - ); - Ok(Self { - macros, - full_path_markers: RwLock::new(Default::default()), - }) - } - - fn expand_inner_attr( - &self, - db: &dyn SyntaxGroup, - item_ast: ast::ModuleItem, - ) -> InnerAttrExpansionResult { - let mut context = InnerAttrExpansionContext::new(self); - let mut item_builder = PatchBuilder::new(db, &item_ast); - let mut used_attr_names: HashSet = Default::default(); - let mut all_none = true; - let ctx = AllocationContext::default(); - - match item_ast.clone() { - ast::ModuleItem::Trait(trait_ast) => { - item_builder.add_node(trait_ast.attributes(db).as_syntax_node()); - item_builder.add_node(trait_ast.visibility(db).as_syntax_node()); - item_builder.add_node(trait_ast.trait_kw(db).as_syntax_node()); - item_builder.add_node(trait_ast.name(db).as_syntax_node()); - item_builder.add_node(trait_ast.generic_params(db).as_syntax_node()); - - // Parser attributes for inner functions. - match trait_ast.body(db) { - MaybeTraitBody::None(terminal) => { - item_builder.add_node(terminal.as_syntax_node()); - InnerAttrExpansionResult::None - } - MaybeTraitBody::Some(body) => { - item_builder.add_node(body.lbrace(db).as_syntax_node()); - - let item_list = body.items(db); - for item in item_list.elements(db).iter() { - let ast::TraitItem::Function(func) = item else { - item_builder.add_node(item.as_syntax_node()); - continue; - }; - - let mut token_stream_builder = TokenStreamBuilder::new(db); - let attrs = func.attributes(db).elements(db); - let found = - self.parse_attrs(db, &mut token_stream_builder, attrs, &ctx); - if let Some(name) = found.as_name() { - used_attr_names.insert(name); - } - token_stream_builder.add_node(func.declaration(db).as_syntax_node()); - token_stream_builder.add_node(func.body(db).as_syntax_node()); - let token_stream = token_stream_builder.build(&ctx); - - all_none = all_none - && self.do_expand_inner_attr( - db, - &mut context, - &mut item_builder, - found, - func, - token_stream, - ); - } - - item_builder.add_node(body.rbrace(db).as_syntax_node()); - - if all_none { - InnerAttrExpansionResult::None - } else { - let (code, mappings) = item_builder.build(); - InnerAttrExpansionResult::Some(context.into_result( - code, - mappings, - used_attr_names.into_iter().collect(), - )) - } - } - } - } - - ast::ModuleItem::Impl(impl_ast) => { - item_builder.add_node(impl_ast.attributes(db).as_syntax_node()); - item_builder.add_node(impl_ast.visibility(db).as_syntax_node()); - item_builder.add_node(impl_ast.impl_kw(db).as_syntax_node()); - item_builder.add_node(impl_ast.name(db).as_syntax_node()); - item_builder.add_node(impl_ast.generic_params(db).as_syntax_node()); - item_builder.add_node(impl_ast.of_kw(db).as_syntax_node()); - item_builder.add_node(impl_ast.trait_path(db).as_syntax_node()); - - match impl_ast.body(db) { - MaybeImplBody::None(terminal) => { - item_builder.add_node(terminal.as_syntax_node()); - InnerAttrExpansionResult::None - } - MaybeImplBody::Some(body) => { - item_builder.add_node(body.lbrace(db).as_syntax_node()); - - let items = body.items(db); - for item in items.elements(db) { - let ImplItem::Function(func) = item else { - item_builder.add_node(item.as_syntax_node()); - continue; - }; - - let mut token_stream_builder = TokenStreamBuilder::new(db); - let attrs = func.attributes(db).elements(db); - let found = - self.parse_attrs(db, &mut token_stream_builder, attrs, &ctx); - if let Some(name) = found.as_name() { - used_attr_names.insert(name); - } - token_stream_builder.add_node(func.visibility(db).as_syntax_node()); - token_stream_builder.add_node(func.declaration(db).as_syntax_node()); - token_stream_builder.add_node(func.body(db).as_syntax_node()); - let token_stream = token_stream_builder.build(&ctx); - all_none = all_none - && self.do_expand_inner_attr( - db, - &mut context, - &mut item_builder, - found, - &func, - token_stream, - ); - } - - item_builder.add_node(body.rbrace(db).as_syntax_node()); - - if all_none { - InnerAttrExpansionResult::None - } else { - let (code, mappings) = item_builder.build(); - InnerAttrExpansionResult::Some(context.into_result( - code, - mappings, - used_attr_names.into_iter().collect(), - )) - } - } - } - } - _ => InnerAttrExpansionResult::None, - } - } - - fn do_expand_inner_attr( - &self, - db: &dyn SyntaxGroup, - context: &mut InnerAttrExpansionContext<'_>, - item_builder: &mut PatchBuilder<'_>, - found: AttrExpansionFound, - func: &impl TypedSyntaxNode, - token_stream: TokenStream, - ) -> bool { - let mut all_none = true; - let (input, args, stable_ptr) = match found { - AttrExpansionFound::Last { - expansion, - args, - stable_ptr, - } => { - all_none = false; - (expansion, args, stable_ptr) - } - AttrExpansionFound::Some { - expansion, - args, - stable_ptr, - } => { - all_none = false; - (expansion, args, stable_ptr) - } - AttrExpansionFound::None => { - item_builder.add_node(func.as_syntax_node()); - return all_none; - } - }; - - let result = self.instance(input.package_id).generate_code( - input.expansion.name.clone(), - args, - token_stream.clone(), - ); - - let expanded = context.register_result(token_stream.to_string(), input, result, stable_ptr); - item_builder.add_modified(RewriteNode::Mapped { - origin: func.as_syntax_node().span(db), - node: Box::new(RewriteNode::Text(expanded.to_string())), - }); - - all_none - } - - /// Find first attribute procedural macros that should be expanded. - /// - /// Remove the attribute from the code. - fn parse_attribute( - &self, - db: &dyn SyntaxGroup, - item_ast: ast::ModuleItem, - ctx: &AllocationContext, - ) -> (AttrExpansionFound, TokenStream) { - let mut token_stream_builder = TokenStreamBuilder::new(db); - let input = match item_ast.clone() { - ast::ModuleItem::Trait(trait_ast) => { - let attrs = trait_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); - token_stream_builder.add_node(trait_ast.visibility(db).as_syntax_node()); - token_stream_builder.add_node(trait_ast.trait_kw(db).as_syntax_node()); - token_stream_builder.add_node(trait_ast.name(db).as_syntax_node()); - token_stream_builder.add_node(trait_ast.generic_params(db).as_syntax_node()); - token_stream_builder.add_node(trait_ast.body(db).as_syntax_node()); - expansion - } - ast::ModuleItem::Impl(impl_ast) => { - let attrs = impl_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); - token_stream_builder.add_node(impl_ast.visibility(db).as_syntax_node()); - token_stream_builder.add_node(impl_ast.impl_kw(db).as_syntax_node()); - token_stream_builder.add_node(impl_ast.name(db).as_syntax_node()); - token_stream_builder.add_node(impl_ast.generic_params(db).as_syntax_node()); - token_stream_builder.add_node(impl_ast.of_kw(db).as_syntax_node()); - token_stream_builder.add_node(impl_ast.trait_path(db).as_syntax_node()); - token_stream_builder.add_node(impl_ast.body(db).as_syntax_node()); - expansion - } - ast::ModuleItem::Module(module_ast) => { - let attrs = module_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); - token_stream_builder.add_node(module_ast.visibility(db).as_syntax_node()); - token_stream_builder.add_node(module_ast.module_kw(db).as_syntax_node()); - token_stream_builder.add_node(module_ast.name(db).as_syntax_node()); - token_stream_builder.add_node(module_ast.body(db).as_syntax_node()); - expansion - } - ast::ModuleItem::FreeFunction(free_func_ast) => { - let attrs = free_func_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); - token_stream_builder.add_node(free_func_ast.visibility(db).as_syntax_node()); - token_stream_builder.add_node(free_func_ast.declaration(db).as_syntax_node()); - token_stream_builder.add_node(free_func_ast.body(db).as_syntax_node()); - expansion - } - ast::ModuleItem::ExternFunction(extern_func_ast) => { - let attrs = extern_func_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); - token_stream_builder.add_node(extern_func_ast.visibility(db).as_syntax_node()); - token_stream_builder.add_node(extern_func_ast.extern_kw(db).as_syntax_node()); - token_stream_builder.add_node(extern_func_ast.declaration(db).as_syntax_node()); - token_stream_builder.add_node(extern_func_ast.semicolon(db).as_syntax_node()); - expansion - } - ast::ModuleItem::ExternType(extern_type_ast) => { - let attrs = extern_type_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); - token_stream_builder.add_node(extern_type_ast.visibility(db).as_syntax_node()); - token_stream_builder.add_node(extern_type_ast.extern_kw(db).as_syntax_node()); - token_stream_builder.add_node(extern_type_ast.type_kw(db).as_syntax_node()); - token_stream_builder.add_node(extern_type_ast.name(db).as_syntax_node()); - token_stream_builder.add_node(extern_type_ast.generic_params(db).as_syntax_node()); - token_stream_builder.add_node(extern_type_ast.semicolon(db).as_syntax_node()); - expansion - } - ast::ModuleItem::Struct(struct_ast) => { - let attrs = struct_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); - token_stream_builder.add_node(struct_ast.visibility(db).as_syntax_node()); - token_stream_builder.add_node(struct_ast.struct_kw(db).as_syntax_node()); - token_stream_builder.add_node(struct_ast.name(db).as_syntax_node()); - token_stream_builder.add_node(struct_ast.generic_params(db).as_syntax_node()); - token_stream_builder.add_node(struct_ast.lbrace(db).as_syntax_node()); - token_stream_builder.add_node(struct_ast.members(db).as_syntax_node()); - token_stream_builder.add_node(struct_ast.rbrace(db).as_syntax_node()); - expansion - } - ast::ModuleItem::Enum(enum_ast) => { - let attrs = enum_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); - token_stream_builder.add_node(enum_ast.visibility(db).as_syntax_node()); - token_stream_builder.add_node(enum_ast.enum_kw(db).as_syntax_node()); - token_stream_builder.add_node(enum_ast.name(db).as_syntax_node()); - token_stream_builder.add_node(enum_ast.generic_params(db).as_syntax_node()); - token_stream_builder.add_node(enum_ast.lbrace(db).as_syntax_node()); - token_stream_builder.add_node(enum_ast.variants(db).as_syntax_node()); - token_stream_builder.add_node(enum_ast.rbrace(db).as_syntax_node()); - expansion - } - _ => AttrExpansionFound::None, - }; - let token_stream = token_stream_builder.build(ctx); - (input, token_stream) - } - - fn parse_attrs( - &self, - db: &dyn SyntaxGroup, - builder: &mut TokenStreamBuilder<'_>, - attrs: Vec, - ctx: &AllocationContext, - ) -> AttrExpansionFound { - // This function parses attributes of the item, - // checking if those attributes correspond to a procedural macro that should be fired. - // The proc macro attribute found is removed from attributes list, - // while other attributes are appended to the `PathBuilder` passed as an argument. - - // Note this function does not affect the executable attributes, - // as it only pulls `ExpansionKind::Attr` from the plugin. - // This means that executable attributes will neither be removed from the item, - // nor will they cause the item to be rewritten. - let mut expansion = None; - let mut last = true; - for attr in attrs { - // We ensure that this flag is changed *after* the expansion is found. - if last { - let structured_attr = attr.clone().structurize(db); - let found = self.find_expansion(&Expansion::new( - structured_attr.id.clone(), - ExpansionKind::Attr, - )); - if let Some(found) = found { - if expansion.is_none() { - let mut args_builder = TokenStreamBuilder::new(db); - args_builder.add_node(attr.arguments(db).as_syntax_node()); - let args = args_builder.build(ctx); - expansion = Some((found, args, attr.stable_ptr().untyped())); - // Do not add the attribute for found expansion. - continue; - } else { - last = false; - } - } - } - builder.add_node(attr.as_syntax_node()); - } - match (expansion, last) { - (Some((expansion, args, stable_ptr)), true) => AttrExpansionFound::Last { - expansion, - args, - stable_ptr, - }, - (Some((expansion, args, stable_ptr)), false) => AttrExpansionFound::Some { - expansion, - args, - stable_ptr, - }, - (None, _) => AttrExpansionFound::None, - } - } - - /// Handle `#[derive(...)]` attribute. - /// - /// Returns a list of expansions that this plugin should apply. - fn parse_derive(&self, db: &dyn SyntaxGroup, item_ast: ast::ModuleItem) -> Vec { - let attrs = match item_ast { - ast::ModuleItem::Struct(struct_ast) => Some(struct_ast.query_attr(db, DERIVE_ATTR)), - ast::ModuleItem::Enum(enum_ast) => Some(enum_ast.query_attr(db, DERIVE_ATTR)), - _ => None, - }; - - attrs - .unwrap_or_default() - .iter() - .map(|attr| attr.clone().structurize(db)) - .flat_map(|attr| attr.args.into_iter()) - .filter_map(|attr| { - let AttributeArgVariant::Unnamed(value) = attr.clone().variant else { - return None; - }; - let Expr::Path(path) = value else { - return None; - }; - let path = path.elements(db); - let path = path.last()?; - let PathSegment::Simple(segment) = path else { - return None; - }; - let ident = segment.ident(db); - let value = ident.text(db).to_string(); - - self.find_expansion(&Expansion::new( - value.to_case(Case::Snake), - ExpansionKind::Derive, - )) - }) - .collect_vec() - } - - fn expand_derives( - &self, - db: &dyn SyntaxGroup, - item_ast: ast::ModuleItem, - stream_metadata: TokenStreamMetadata, - ) -> Option { - let stable_ptr = item_ast.clone().stable_ptr().untyped(); - let mut token_stream_builder = TokenStreamBuilder::new(db); - token_stream_builder.add_node(item_ast.as_syntax_node()); - token_stream_builder.with_metadata(stream_metadata.clone()); - let mut aux_data = EmittedAuxData::default(); - let mut all_diagnostics: Vec = Vec::new(); - - // All derives to be applied. - let derives = self.parse_derive(db, item_ast.clone()); - let any_derives = !derives.is_empty(); - - let ctx = AllocationContext::default(); - let mut derived_code = PatchBuilder::new(db, &item_ast); - for derive in derives.iter() { - let token_stream = token_stream_builder.build(&ctx); - let result = self.instance(derive.package_id).generate_code( - derive.expansion.name.clone(), - TokenStream::empty(), - token_stream, - ); - - // Register diagnostics. - all_diagnostics.extend(result.diagnostics); - - // Register aux data. - if let Some(new_aux_data) = result.aux_data { - aux_data.push(ProcMacroAuxData::new( - new_aux_data.into(), - ProcMacroId::new(derive.package_id, derive.expansion.clone()), - )); - } - - if result.token_stream.is_empty() { - // No code has been generated. - // We do not need to do anything. - continue; - } - - for token in result.token_stream.tokens { - match token { - TokenTree::Ident(token) => { - derived_code.add_str(token.content.as_ref()); - } - } - } - } - - if any_derives { - let derived_code = derived_code.build().0; - return Some(PluginResult { - code: if derived_code.is_empty() { - None - } else { - let msg = if derives.len() == 1 { - "the derive macro" - } else { - "one of the derive macros" - }; - let derive_names = derives - .iter() - .map(|derive| derive.expansion.name.to_string()) - .join("`, `"); - let note = format!("this error originates in {msg}: `{derive_names}`"); - Some(PluginGeneratedFile { - name: "proc_macro_derive".into(), - code_mappings: Vec::new(), - content: derived_code, - aux_data: if aux_data.is_empty() { - None - } else { - Some(DynGeneratedFileAuxData::new(aux_data)) - }, - diagnostics_note: Some(note), - }) - }, - diagnostics: into_cairo_diagnostics(all_diagnostics, stable_ptr), - // Note that we don't remove the original item here, unlike for attributes. - // We do not add the original code to the generated file either. - remove_original_item: false, - }); - } - - None - } - - fn expand_attribute( - &self, - input: ProcMacroId, - last: bool, - args: TokenStream, - token_stream: TokenStream, - stable_ptr: SyntaxStablePtrId, - ) -> PluginResult { - let original = token_stream.to_string(); - let result = self.instance(input.package_id).generate_code( - input.expansion.name.clone(), - args, - token_stream, - ); - - // Handle token stream. - if result.token_stream.is_empty() { - // Remove original code - return PluginResult { - diagnostics: into_cairo_diagnostics(result.diagnostics, stable_ptr), - code: None, - remove_original_item: true, - }; - } - - // Full path markers require code modification. - self.register_full_path_markers(input.package_id, result.full_path_markers.clone()); - - // This is a minor optimization. - // If the expanded macro attribute is the only one that will be expanded by `ProcMacroHost` - // in this `generate_code` call (i.e. all the other macro attributes has been expanded by - // previous calls), and the expansion did not produce any changes, we can skip rewriting the - // expanded node by simply returning no generated code, and leaving the original item as is. - // However, if we have other macro attributes to expand, we must rewrite the node even if no - // changes have been produced, so that we can parse the attributes once again and expand them. - // In essence, `code: None, remove_original_item: false` means `ProcMacroHost` will not be - // called again for this AST item. - // This optimization limits the number of generated nodes a bit. - if last && result.aux_data.is_none() && original == result.token_stream.to_string() { - return PluginResult { - code: None, - remove_original_item: false, - diagnostics: into_cairo_diagnostics(result.diagnostics, stable_ptr), - }; - } - - let file_name = format!("proc_{}", input.expansion.name); - let content = result.token_stream.to_string(); - PluginResult { - code: Some(PluginGeneratedFile { - name: file_name.into(), - code_mappings: Vec::new(), - content, - diagnostics_note: Some(format!( - "this error originates in the attribute macro: `{}`", - input.expansion.name - )), - aux_data: result.aux_data.map(|new_aux_data| { - DynGeneratedFileAuxData::new(EmittedAuxData::new(ProcMacroAuxData::new( - new_aux_data.into(), - input, - ))) - }), - }), - diagnostics: into_cairo_diagnostics(result.diagnostics, stable_ptr), - remove_original_item: true, - } - } - - fn find_expansion(&self, expansion: &Expansion) -> Option { - self.macros - .iter() - .find(|m| m.get_expansions().contains(expansion)) - .map(|m| m.package_id()) - .map(|package_id| ProcMacroId::new(package_id, expansion.clone())) - } - - pub fn build_plugin_suite(macro_host: Arc) -> PluginSuite { - let mut suite = PluginSuite::default(); - // Register inline macro plugins. - for proc_macro in ¯o_host.macros { - let expansions = proc_macro - .get_expansions() - .iter() - .filter(|exp| matches!(exp.kind, ExpansionKind::Inline)); - for expansion in expansions { - let plugin = Arc::new(ProcMacroInlinePlugin::new( - proc_macro.clone(), - expansion.clone(), - )); - suite.add_inline_macro_plugin_ex(expansion.name.as_str(), plugin); - } - } - // Register procedural macro host plugin. - suite.add_plugin_ex(macro_host); - suite - } - - #[tracing::instrument(level = "trace", skip_all)] - pub fn post_process(&self, db: &dyn SemanticGroup) -> Result<()> { - let markers = self.collect_full_path_markers(db); - - let aux_data = self.collect_aux_data(db); - for instance in self.macros.iter() { - let _ = trace_span!( - "post_process_callback", - instance = %instance.package_id() - ) - .entered(); - let instance_markers = self - .full_path_markers - .read() - .unwrap() - .get(&instance.package_id()) - .cloned() - .unwrap_or_default(); - let markers_for_instance = markers - .iter() - .filter(|(key, _)| instance_markers.contains(key)) - .map(|(key, full_path)| FullPathMarker { - key: key.clone(), - full_path: full_path.clone(), - }) - .collect_vec(); - let data = aux_data - .get(&instance.package_id()) - .cloned() - .unwrap_or_default(); - debug!("calling post processing callback with: {data:?}"); - instance.post_process_callback(data.clone(), markers_for_instance); - } - Ok(()) - } - - fn collect_full_path_markers(&self, db: &dyn SemanticGroup) -> HashMap { - let mut markers: HashMap = HashMap::new(); - // FULL_PATH_MARKER_KEY - for crate_id in db.crates() { - let modules = db.crate_modules(crate_id); - for module_id in modules.iter() { - let Ok(module_items) = db.module_items(*module_id) else { - continue; - }; - for item_id in module_items.iter() { - let attr = match item_id { - ModuleItemId::Struct(id) => { - id.query_attr(db, FULL_PATH_MARKER_KEY).to_option() - } - ModuleItemId::Enum(id) => { - id.query_attr(db, FULL_PATH_MARKER_KEY).to_option() - } - ModuleItemId::FreeFunction(id) => { - id.query_attr(db, FULL_PATH_MARKER_KEY).to_option() - } - _ => None, - }; - - let keys = attr - .unwrap_or_default() - .into_iter() - .filter_map(|attr| Self::extract_key(db, attr)) - .collect_vec(); - let full_path = item_id.full_path(db.upcast()); - for key in keys { - markers.insert(key, full_path.clone()); - } - } - } - } - markers - } - - fn extract_key(db: &dyn SemanticGroup, attr: Attribute) -> Option { - if attr.id != FULL_PATH_MARKER_KEY { - return None; - } - - for arg in attr.args.clone() { - if let AttributeArgVariant::Unnamed(Expr::String(s)) = arg.variant { - return s.string_value(db.upcast()); - } - } - - None - } - - fn collect_aux_data( - &self, - db: &dyn SemanticGroup, - ) -> HashMap> { - let mut data = Vec::new(); - for crate_id in db.crates() { - let crate_modules = db.crate_modules(crate_id); - for module in crate_modules.iter() { - let file_infos = db.module_generated_file_aux_data(*module); - if let Ok(file_infos) = file_infos { - for file_info in file_infos.iter() { - let aux_data = file_info - .as_ref() - .and_then(|ad| ad.as_any().downcast_ref::()); - if let Some(aux_data) = aux_data { - data.extend(aux_data.clone().into_iter()); - } - } - } - } - } - data.into_iter() - .into_group_map_by(|d| d.macro_id.package_id) - } - - pub fn instance(&self, package_id: PackageId) -> &ProcMacroInstance { - self.macros - .iter() - .find(|m| m.package_id() == package_id) - .expect("procedural macro must be registered in proc macro host") - } - - fn register_full_path_markers(&self, package_id: PackageId, markers: Vec) { - self.full_path_markers - .write() - .unwrap() - .entry(package_id) - .and_modify(|markers| markers.extend(markers.clone())) - .or_insert(markers); - } - - fn calculate_metadata( - db: &dyn SyntaxGroup, - item_ast: ast::ModuleItem, - edition: Edition, - ) -> TokenStreamMetadata { - let stable_ptr = item_ast.clone().stable_ptr().untyped(); - let file_path = stable_ptr.file_id(db).full_path(db.upcast()); - let file_id = short_hash(file_path.clone()); - let edition = edition_variant(edition); - TokenStreamMetadata::new(file_path, file_id, edition) - } -} - -struct InnerAttrExpansionContext<'a> { - host: &'a ProcMacroHostPlugin, - // Metadata returned for expansions. - diagnostics: Vec, - aux_data: EmittedAuxData, - any_changed: bool, -} - -impl<'a> InnerAttrExpansionContext<'a> { - pub fn new<'b: 'a>(host: &'b ProcMacroHostPlugin) -> Self { - Self { - diagnostics: Vec::new(), - aux_data: EmittedAuxData::default(), - any_changed: false, - host, - } - } - - pub fn register_result( - &mut self, - original: String, - input: ProcMacroId, - result: ProcMacroResult, - stable_ptr: SyntaxStablePtrId, - ) -> String { - let result_str = result.token_stream.to_string(); - let changed = result_str != original; - - if changed { - self.host - .register_full_path_markers(input.package_id, result.full_path_markers.clone()); - } - - self.diagnostics - .extend(into_cairo_diagnostics(result.diagnostics, stable_ptr)); - - if let Some(new_aux_data) = result.aux_data { - self.aux_data - .push(ProcMacroAuxData::new(new_aux_data.into(), input)); - } - - self.any_changed = self.any_changed || changed; - - result_str - } - pub fn into_result( - self, - expanded: String, - code_mappings: Vec, - attr_names: Vec, - ) -> PluginResult { - let msg = if attr_names.len() == 1 { - "the attribute macro" - } else { - "one of the attribute macros" - }; - let derive_names = attr_names.iter().map(ToString::to_string).join("`, `"); - let note = format!("this error originates in {msg}: `{derive_names}`"); - PluginResult { - code: Some(PluginGeneratedFile { - name: "proc_attr_inner".into(), - content: expanded, - aux_data: if self.aux_data.is_empty() { - None - } else { - Some(DynGeneratedFileAuxData::new(self.aux_data)) - }, - code_mappings, - diagnostics_note: Some(note), - }), - diagnostics: self.diagnostics, - remove_original_item: true, - } - } -} - -enum InnerAttrExpansionResult { - None, - Some(PluginResult), -} - -impl MacroPlugin for ProcMacroHostPlugin { - fn generate_code( - &self, - db: &dyn SyntaxGroup, - item_ast: ast::ModuleItem, - metadata: &MacroPluginMetadata<'_>, - ) -> PluginResult { - let stream_metadata = Self::calculate_metadata(db, item_ast.clone(), metadata.edition); - - // Handle inner functions. - if let InnerAttrExpansionResult::Some(result) = self.expand_inner_attr(db, item_ast.clone()) - { - return result; - } - - // Expand first attribute. - // Note that we only expand the first attribute, as we assume that the rest of the attributes - // will be handled by a subsequent call to this function. - let ctx = AllocationContext::default(); - let (input, body) = self.parse_attribute(db, item_ast.clone(), &ctx); - - if let Some(result) = match input { - AttrExpansionFound::Last { - expansion, - args, - stable_ptr, - } => Some((expansion, args, stable_ptr, true)), - AttrExpansionFound::Some { - expansion, - args, - stable_ptr, - } => Some((expansion, args, stable_ptr, false)), - AttrExpansionFound::None => None, - } - .map(|(expansion, args, stable_ptr, last)| { - let token_stream = body.with_metadata(stream_metadata.clone()); - self.expand_attribute(expansion, last, args, token_stream, stable_ptr) - }) { - return result; - } - - // Expand all derives. - // Note that all proc macro attributes should be already expanded at this point. - if let Some(result) = self.expand_derives(db, item_ast.clone(), stream_metadata.clone()) { - return result; - } - - // No expansions can be applied. - PluginResult { - code: None, - diagnostics: Vec::new(), - remove_original_item: false, - } - } - - fn declared_attributes(&self) -> Vec { - self.macros - .iter() - .flat_map(|m| m.declared_attributes_and_executables()) - .chain(vec![FULL_PATH_MARKER_KEY.to_string()]) - .collect() - } - - fn declared_derives(&self) -> Vec { - self.macros - .iter() - .flat_map(|m| m.declared_derives()) - .map(|s| s.to_case(Case::UpperCamel)) - .collect() - } - - fn executable_attributes(&self) -> Vec { - self.macros - .iter() - .flat_map(|m| m.executable_attributes()) - .collect() - } -} - -enum AttrExpansionFound { - Some { - expansion: ProcMacroId, - args: TokenStream, - stable_ptr: SyntaxStablePtrId, - }, - None, - Last { - expansion: ProcMacroId, - args: TokenStream, - stable_ptr: SyntaxStablePtrId, - }, -} -impl AttrExpansionFound { - pub fn as_name(&self) -> Option { - match self { - AttrExpansionFound::Some { expansion, .. } - | AttrExpansionFound::Last { expansion, .. } => Some(expansion.expansion.name.clone()), - AttrExpansionFound::None => None, - } - } -} - -/// A Cairo compiler inline macro plugin controlling the inline procedural macro execution. -/// -/// This plugin represents a single expansion capable of handling inline procedural macros. -/// The plugin triggers code expansion in a corresponding procedural macro instance. -#[derive(Debug)] -pub struct ProcMacroInlinePlugin { - instance: Arc, - expansion: Expansion, - doc: OnceLock>, -} - -impl ProcMacroInlinePlugin { - pub fn new(instance: Arc, expansion: Expansion) -> Self { - assert!(instance.get_expansions().contains(&expansion)); - Self { - instance, - expansion, - doc: Default::default(), - } - } - - pub fn name(&self) -> &str { - self.expansion.name.as_str() - } - - fn instance(&self) -> &ProcMacroInstance { - &self.instance - } -} - -impl InlineMacroExprPlugin for ProcMacroInlinePlugin { - fn generate_code( - &self, - db: &dyn SyntaxGroup, - syntax: &ast::ExprInlineMacro, - _metadata: &MacroPluginMetadata<'_>, - ) -> InlinePluginResult { - let ctx = AllocationContext::default(); - let stable_ptr = syntax.clone().stable_ptr().untyped(); - let arguments = syntax.arguments(db); - let mut token_stream_builder = TokenStreamBuilder::new(db); - token_stream_builder.add_node(arguments.as_syntax_node()); - let token_stream = token_stream_builder.build(&ctx); - let result = self.instance().generate_code( - self.expansion.name.clone(), - TokenStream::empty(), - token_stream, - ); - // Handle diagnostics. - let diagnostics = into_cairo_diagnostics(result.diagnostics, stable_ptr); - let token_stream = result.token_stream.clone(); - if token_stream.is_empty() { - // Remove original code - InlinePluginResult { - code: None, - diagnostics, - } - } else { - // Replace - let aux_data = result.aux_data.map(|aux_data| { - let aux_data = ProcMacroAuxData::new( - aux_data.into(), - ProcMacroId::new(self.instance.package_id(), self.expansion.clone()), - ); - let mut emitted = EmittedAuxData::default(); - emitted.push(aux_data); - DynGeneratedFileAuxData::new(emitted) - }); - let content = token_stream.to_string(); - InlinePluginResult { - code: Some(PluginGeneratedFile { - name: "inline_proc_macro".into(), - code_mappings: Vec::new(), - content, - aux_data, - diagnostics_note: Some(format!( - "this error originates in the inline macro: `{}`", - self.expansion.name - )), - }), - diagnostics, - } - } - } - - fn documentation(&self) -> Option { - self.doc - .get_or_init(|| self.instance().doc(self.expansion.name.clone())) - .clone() - } -} - -fn into_cairo_diagnostics( - diagnostics: Vec, - stable_ptr: SyntaxStablePtrId, -) -> Vec { - diagnostics - .into_iter() - .map(|diag| PluginDiagnostic { - stable_ptr, - message: diag.message, - severity: match diag.severity { - Severity::Error => cairo_lang_diagnostics::Severity::Error, - Severity::Warning => cairo_lang_diagnostics::Severity::Warning, - }, - }) - .collect_vec() -} - -/// A global storage for dynamically-loaded procedural macros. -/// Loads dynamic shared libraries and hides them beside [`ProcMacroInstance`]. -/// Guarantees that every library is loaded exactly once, -/// but does not prevent loading multiple versions of the same library. -#[derive(Default)] -pub struct ProcMacroRepository { - /// A mapping between the [`PackageId`] of the package which defines the plugin - /// and the [`ProcMacroInstance`] holding the underlying shared library. - macros: RwLock>>, -} - -impl ProcMacroRepository { - /// Returns the [`ProcMacroInstance`] representing the procedural macros defined in the [`Package`]. - /// Loads the underlying shared library if it has not been loaded yet. - pub fn get_or_load(&self, package: Package, config: &Config) -> Result> { - let Ok(macros) = self.macros.read() else { - bail!("could not get a read access to the ProcMacroRepository"); - }; - - if let Some(instance) = macros.get(&package.id) { - return Ok(instance.clone()); - } - - drop(macros); - - let Ok(mut macros) = self.macros.write() else { - bail!("could not get a write access to the ProcMacroRepository"); - }; - - let lib_path = package - .shared_lib_path(config) - .context("could not resolve shared library path")?; - - let instance = Arc::new(ProcMacroInstance::try_new(package.id, lib_path)?); - macros.insert(package.id, instance.clone()); - - Ok(instance) - } -} diff --git a/scarb/src/compiler/plugin/proc_macro/host/attribute.rs b/scarb/src/compiler/plugin/proc_macro/host/attribute.rs new file mode 100644 index 000000000..e452153ce --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/host/attribute.rs @@ -0,0 +1,537 @@ +use crate::compiler::plugin::proc_macro::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; +use crate::compiler::plugin::proc_macro::host::into_cairo_diagnostics; +use crate::compiler::plugin::proc_macro::{ + Expansion, ExpansionKind, ProcMacroHostPlugin, ProcMacroId, TokenStreamBuilder, +}; +use cairo_lang_defs::patcher::{PatchBuilder, RewriteNode}; +use cairo_lang_defs::plugin::PluginDiagnostic; +use cairo_lang_defs::plugin::{DynGeneratedFileAuxData, PluginGeneratedFile, PluginResult}; +use cairo_lang_filesystem::ids::CodeMapping; +use cairo_lang_macro::{AllocationContext, ProcMacroResult, TokenStream}; +use cairo_lang_syntax::attribute::structured::AttributeStructurize; +use cairo_lang_syntax::node::ast::{ImplItem, MaybeImplBody, MaybeTraitBody}; +use cairo_lang_syntax::node::db::SyntaxGroup; +use cairo_lang_syntax::node::ids::SyntaxStablePtrId; +use cairo_lang_syntax::node::{TypedStablePtr, TypedSyntaxNode, ast}; +use itertools::Itertools; +use smol_str::SmolStr; +use std::collections::HashSet; + +impl ProcMacroHostPlugin { + pub(crate) fn expand_inner_attr( + &self, + db: &dyn SyntaxGroup, + item_ast: ast::ModuleItem, + ) -> InnerAttrExpansionResult { + let mut context = InnerAttrExpansionContext::new(self); + let mut item_builder = PatchBuilder::new(db, &item_ast); + let mut used_attr_names: HashSet = Default::default(); + let mut all_none = true; + let ctx = AllocationContext::default(); + + match item_ast.clone() { + ast::ModuleItem::Trait(trait_ast) => { + item_builder.add_node(trait_ast.attributes(db).as_syntax_node()); + item_builder.add_node(trait_ast.visibility(db).as_syntax_node()); + item_builder.add_node(trait_ast.trait_kw(db).as_syntax_node()); + item_builder.add_node(trait_ast.name(db).as_syntax_node()); + item_builder.add_node(trait_ast.generic_params(db).as_syntax_node()); + + // Parser attributes for inner functions. + match trait_ast.body(db) { + MaybeTraitBody::None(terminal) => { + item_builder.add_node(terminal.as_syntax_node()); + InnerAttrExpansionResult::None + } + MaybeTraitBody::Some(body) => { + item_builder.add_node(body.lbrace(db).as_syntax_node()); + + let item_list = body.items(db); + for item in item_list.elements(db).iter() { + let ast::TraitItem::Function(func) = item else { + item_builder.add_node(item.as_syntax_node()); + continue; + }; + + let mut token_stream_builder = TokenStreamBuilder::new(db); + let attrs = func.attributes(db).elements(db); + let found = + self.parse_attrs(db, &mut token_stream_builder, attrs, &ctx); + if let Some(name) = found.as_name() { + used_attr_names.insert(name); + } + token_stream_builder.add_node(func.declaration(db).as_syntax_node()); + token_stream_builder.add_node(func.body(db).as_syntax_node()); + let token_stream = token_stream_builder.build(&ctx); + + all_none = all_none + && self.do_expand_inner_attr( + db, + &mut context, + &mut item_builder, + found, + func, + token_stream, + ); + } + + item_builder.add_node(body.rbrace(db).as_syntax_node()); + + if all_none { + InnerAttrExpansionResult::None + } else { + let (code, mappings) = item_builder.build(); + InnerAttrExpansionResult::Some(context.into_result( + code, + mappings, + used_attr_names.into_iter().collect(), + )) + } + } + } + } + + ast::ModuleItem::Impl(impl_ast) => { + item_builder.add_node(impl_ast.attributes(db).as_syntax_node()); + item_builder.add_node(impl_ast.visibility(db).as_syntax_node()); + item_builder.add_node(impl_ast.impl_kw(db).as_syntax_node()); + item_builder.add_node(impl_ast.name(db).as_syntax_node()); + item_builder.add_node(impl_ast.generic_params(db).as_syntax_node()); + item_builder.add_node(impl_ast.of_kw(db).as_syntax_node()); + item_builder.add_node(impl_ast.trait_path(db).as_syntax_node()); + + match impl_ast.body(db) { + MaybeImplBody::None(terminal) => { + item_builder.add_node(terminal.as_syntax_node()); + InnerAttrExpansionResult::None + } + MaybeImplBody::Some(body) => { + item_builder.add_node(body.lbrace(db).as_syntax_node()); + + let items = body.items(db); + for item in items.elements(db) { + let ImplItem::Function(func) = item else { + item_builder.add_node(item.as_syntax_node()); + continue; + }; + + let mut token_stream_builder = TokenStreamBuilder::new(db); + let attrs = func.attributes(db).elements(db); + let found = + self.parse_attrs(db, &mut token_stream_builder, attrs, &ctx); + if let Some(name) = found.as_name() { + used_attr_names.insert(name); + } + token_stream_builder.add_node(func.visibility(db).as_syntax_node()); + token_stream_builder.add_node(func.declaration(db).as_syntax_node()); + token_stream_builder.add_node(func.body(db).as_syntax_node()); + let token_stream = token_stream_builder.build(&ctx); + all_none = all_none + && self.do_expand_inner_attr( + db, + &mut context, + &mut item_builder, + found, + &func, + token_stream, + ); + } + + item_builder.add_node(body.rbrace(db).as_syntax_node()); + + if all_none { + InnerAttrExpansionResult::None + } else { + let (code, mappings) = item_builder.build(); + InnerAttrExpansionResult::Some(context.into_result( + code, + mappings, + used_attr_names.into_iter().collect(), + )) + } + } + } + } + _ => InnerAttrExpansionResult::None, + } + } + + fn do_expand_inner_attr( + &self, + db: &dyn SyntaxGroup, + context: &mut InnerAttrExpansionContext<'_>, + item_builder: &mut PatchBuilder<'_>, + found: AttrExpansionFound, + func: &impl TypedSyntaxNode, + token_stream: TokenStream, + ) -> bool { + let mut all_none = true; + let (input, args, stable_ptr) = match found { + AttrExpansionFound::Last { + expansion, + args, + stable_ptr, + } => { + all_none = false; + (expansion, args, stable_ptr) + } + AttrExpansionFound::Some { + expansion, + args, + stable_ptr, + } => { + all_none = false; + (expansion, args, stable_ptr) + } + AttrExpansionFound::None => { + item_builder.add_node(func.as_syntax_node()); + return all_none; + } + }; + + let result = self.instance(input.package_id).generate_code( + input.expansion.name.clone(), + args, + token_stream.clone(), + ); + + let expanded = context.register_result(token_stream.to_string(), input, result, stable_ptr); + item_builder.add_modified(RewriteNode::Mapped { + origin: func.as_syntax_node().span(db), + node: Box::new(RewriteNode::Text(expanded.to_string())), + }); + + all_none + } + + /// Find first attribute procedural macros that should be expanded. + /// + /// Remove the attribute from the code. + pub(crate) fn parse_attribute( + &self, + db: &dyn SyntaxGroup, + item_ast: ast::ModuleItem, + ctx: &AllocationContext, + ) -> (AttrExpansionFound, TokenStream) { + let mut token_stream_builder = TokenStreamBuilder::new(db); + let input = match item_ast.clone() { + ast::ModuleItem::Trait(trait_ast) => { + let attrs = trait_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); + token_stream_builder.add_node(trait_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(trait_ast.trait_kw(db).as_syntax_node()); + token_stream_builder.add_node(trait_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(trait_ast.generic_params(db).as_syntax_node()); + token_stream_builder.add_node(trait_ast.body(db).as_syntax_node()); + expansion + } + ast::ModuleItem::Impl(impl_ast) => { + let attrs = impl_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); + token_stream_builder.add_node(impl_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.impl_kw(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.generic_params(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.of_kw(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.trait_path(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.body(db).as_syntax_node()); + expansion + } + ast::ModuleItem::Module(module_ast) => { + let attrs = module_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); + token_stream_builder.add_node(module_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(module_ast.module_kw(db).as_syntax_node()); + token_stream_builder.add_node(module_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(module_ast.body(db).as_syntax_node()); + expansion + } + ast::ModuleItem::FreeFunction(free_func_ast) => { + let attrs = free_func_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); + token_stream_builder.add_node(free_func_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(free_func_ast.declaration(db).as_syntax_node()); + token_stream_builder.add_node(free_func_ast.body(db).as_syntax_node()); + expansion + } + ast::ModuleItem::ExternFunction(extern_func_ast) => { + let attrs = extern_func_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); + token_stream_builder.add_node(extern_func_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(extern_func_ast.extern_kw(db).as_syntax_node()); + token_stream_builder.add_node(extern_func_ast.declaration(db).as_syntax_node()); + token_stream_builder.add_node(extern_func_ast.semicolon(db).as_syntax_node()); + expansion + } + ast::ModuleItem::ExternType(extern_type_ast) => { + let attrs = extern_type_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); + token_stream_builder.add_node(extern_type_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(extern_type_ast.extern_kw(db).as_syntax_node()); + token_stream_builder.add_node(extern_type_ast.type_kw(db).as_syntax_node()); + token_stream_builder.add_node(extern_type_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(extern_type_ast.generic_params(db).as_syntax_node()); + token_stream_builder.add_node(extern_type_ast.semicolon(db).as_syntax_node()); + expansion + } + ast::ModuleItem::Struct(struct_ast) => { + let attrs = struct_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); + token_stream_builder.add_node(struct_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.struct_kw(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.generic_params(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.lbrace(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.members(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.rbrace(db).as_syntax_node()); + expansion + } + ast::ModuleItem::Enum(enum_ast) => { + let attrs = enum_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); + token_stream_builder.add_node(enum_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.enum_kw(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.generic_params(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.lbrace(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.variants(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.rbrace(db).as_syntax_node()); + expansion + } + _ => AttrExpansionFound::None, + }; + let token_stream = token_stream_builder.build(ctx); + (input, token_stream) + } + + fn parse_attrs( + &self, + db: &dyn SyntaxGroup, + builder: &mut TokenStreamBuilder<'_>, + attrs: Vec, + ctx: &AllocationContext, + ) -> AttrExpansionFound { + // This function parses attributes of the item, + // checking if those attributes correspond to a procedural macro that should be fired. + // The proc macro attribute found is removed from attributes list, + // while other attributes are appended to the `PathBuilder` passed as an argument. + + // Note this function does not affect the executable attributes, + // as it only pulls `ExpansionKind::Attr` from the plugin. + // This means that executable attributes will neither be removed from the item, + // nor will they cause the item to be rewritten. + let mut expansion = None; + let mut last = true; + for attr in attrs { + // We ensure that this flag is changed *after* the expansion is found. + if last { + let structured_attr = attr.clone().structurize(db); + let found = self.find_expansion(&Expansion::new( + structured_attr.id.clone(), + ExpansionKind::Attr, + )); + if let Some(found) = found { + if expansion.is_none() { + let mut args_builder = TokenStreamBuilder::new(db); + args_builder.add_node(attr.arguments(db).as_syntax_node()); + let args = args_builder.build(ctx); + expansion = Some((found, args, attr.stable_ptr().untyped())); + // Do not add the attribute for found expansion. + continue; + } else { + last = false; + } + } + } + builder.add_node(attr.as_syntax_node()); + } + match (expansion, last) { + (Some((expansion, args, stable_ptr)), true) => AttrExpansionFound::Last { + expansion, + args, + stable_ptr, + }, + (Some((expansion, args, stable_ptr)), false) => AttrExpansionFound::Some { + expansion, + args, + stable_ptr, + }, + (None, _) => AttrExpansionFound::None, + } + } + + pub fn expand_attribute( + &self, + input: ProcMacroId, + last: bool, + args: TokenStream, + token_stream: TokenStream, + stable_ptr: SyntaxStablePtrId, + ) -> PluginResult { + let original = token_stream.to_string(); + let result = self.instance(input.package_id).generate_code( + input.expansion.name.clone(), + args, + token_stream, + ); + + // Handle token stream. + if result.token_stream.is_empty() { + // Remove original code + return PluginResult { + diagnostics: into_cairo_diagnostics(result.diagnostics, stable_ptr), + code: None, + remove_original_item: true, + }; + } + + // Full path markers require code modification. + self.register_full_path_markers(input.package_id, result.full_path_markers.clone()); + + // This is a minor optimization. + // If the expanded macro attribute is the only one that will be expanded by `ProcMacroHost` + // in this `generate_code` call (i.e. all the other macro attributes has been expanded by + // previous calls), and the expansion did not produce any changes, we can skip rewriting the + // expanded node by simply returning no generated code, and leaving the original item as is. + // However, if we have other macro attributes to expand, we must rewrite the node even if no + // changes have been produced, so that we can parse the attributes once again and expand them. + // In essence, `code: None, remove_original_item: false` means `ProcMacroHost` will not be + // called again for this AST item. + // This optimization limits the number of generated nodes a bit. + if last && result.aux_data.is_none() && original == result.token_stream.to_string() { + return PluginResult { + code: None, + remove_original_item: false, + diagnostics: into_cairo_diagnostics(result.diagnostics, stable_ptr), + }; + } + + let file_name = format!("proc_{}", input.expansion.name); + let content = result.token_stream.to_string(); + PluginResult { + code: Some(PluginGeneratedFile { + name: file_name.into(), + code_mappings: Vec::new(), + content, + diagnostics_note: Some(format!( + "this error originates in the attribute macro: `{}`", + input.expansion.name + )), + aux_data: result.aux_data.map(|new_aux_data| { + DynGeneratedFileAuxData::new(EmittedAuxData::new(ProcMacroAuxData::new( + new_aux_data.into(), + input, + ))) + }), + }), + diagnostics: into_cairo_diagnostics(result.diagnostics, stable_ptr), + remove_original_item: true, + } + } +} + +pub enum AttrExpansionFound { + Some { + expansion: ProcMacroId, + args: TokenStream, + stable_ptr: SyntaxStablePtrId, + }, + None, + Last { + expansion: ProcMacroId, + args: TokenStream, + stable_ptr: SyntaxStablePtrId, + }, +} + +impl AttrExpansionFound { + pub fn as_name(&self) -> Option { + match self { + AttrExpansionFound::Some { expansion, .. } + | AttrExpansionFound::Last { expansion, .. } => Some(expansion.expansion.name.clone()), + AttrExpansionFound::None => None, + } + } +} + +pub enum InnerAttrExpansionResult { + None, + Some(PluginResult), +} + +pub struct InnerAttrExpansionContext<'a> { + host: &'a ProcMacroHostPlugin, + // Metadata returned for expansions. + diagnostics: Vec, + aux_data: EmittedAuxData, + any_changed: bool, +} + +impl<'a> InnerAttrExpansionContext<'a> { + pub fn new<'b: 'a>(host: &'b ProcMacroHostPlugin) -> Self { + Self { + diagnostics: Vec::new(), + aux_data: EmittedAuxData::default(), + any_changed: false, + host, + } + } + + pub fn register_result( + &mut self, + original: String, + input: ProcMacroId, + result: ProcMacroResult, + stable_ptr: SyntaxStablePtrId, + ) -> String { + let result_str = result.token_stream.to_string(); + let changed = result_str != original; + + if changed { + self.host + .register_full_path_markers(input.package_id, result.full_path_markers.clone()); + } + + self.diagnostics + .extend(into_cairo_diagnostics(result.diagnostics, stable_ptr)); + + if let Some(new_aux_data) = result.aux_data { + self.aux_data + .push(ProcMacroAuxData::new(new_aux_data.into(), input)); + } + + self.any_changed = self.any_changed || changed; + + result_str + } + + pub fn into_result( + self, + expanded: String, + code_mappings: Vec, + attr_names: Vec, + ) -> PluginResult { + let msg = if attr_names.len() == 1 { + "the attribute macro" + } else { + "one of the attribute macros" + }; + let derive_names = attr_names.iter().map(ToString::to_string).join("`, `"); + let note = format!("this error originates in {msg}: `{derive_names}`"); + PluginResult { + code: Some(PluginGeneratedFile { + name: "proc_attr_inner".into(), + content: expanded, + aux_data: if self.aux_data.is_empty() { + None + } else { + Some(DynGeneratedFileAuxData::new(self.aux_data)) + }, + code_mappings, + diagnostics_note: Some(note), + }), + diagnostics: self.diagnostics, + remove_original_item: true, + } + } +} diff --git a/scarb/src/compiler/plugin/proc_macro/host/aux_data.rs b/scarb/src/compiler/plugin/proc_macro/host/aux_data.rs new file mode 100644 index 000000000..a1fa0d54a --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/host/aux_data.rs @@ -0,0 +1,90 @@ +use crate::compiler::plugin::proc_macro::{ProcMacroHostPlugin, ProcMacroId}; +use crate::core::PackageId; +use cairo_lang_defs::plugin::GeneratedFileAuxData; +use cairo_lang_macro::AuxData; +use cairo_lang_semantic::db::SemanticGroup; +use itertools::Itertools; +use std::any::Any; +use std::collections::HashMap; +use std::vec::IntoIter; + +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct ProcMacroAuxData { + value: Vec, + macro_id: ProcMacroId, +} + +impl ProcMacroAuxData { + pub fn new(value: Vec, macro_id: ProcMacroId) -> Self { + Self { value, macro_id } + } +} + +impl From for AuxData { + fn from(data: ProcMacroAuxData) -> Self { + Self::new(data.value) + } +} + +#[derive(Debug, Clone, Default)] +pub struct EmittedAuxData(Vec); + +impl GeneratedFileAuxData for EmittedAuxData { + fn as_any(&self) -> &dyn Any { + self + } + + fn eq(&self, other: &dyn GeneratedFileAuxData) -> bool { + self.0 == other.as_any().downcast_ref::().unwrap().0 + } +} + +impl EmittedAuxData { + pub fn new(aux_data: ProcMacroAuxData) -> Self { + Self(vec![aux_data]) + } + + pub fn push(&mut self, aux_data: ProcMacroAuxData) { + self.0.push(aux_data); + } + + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } +} + +impl IntoIterator for EmittedAuxData { + type Item = ProcMacroAuxData; + type IntoIter = IntoIter; + + fn into_iter(self) -> IntoIter { + self.0.into_iter() + } +} + +impl ProcMacroHostPlugin { + pub(crate) fn collect_aux_data( + &self, + db: &dyn SemanticGroup, + ) -> HashMap> { + let mut data = Vec::new(); + for crate_id in db.crates() { + let crate_modules = db.crate_modules(crate_id); + for module in crate_modules.iter() { + let file_infos = db.module_generated_file_aux_data(*module); + if let Ok(file_infos) = file_infos { + for file_info in file_infos.iter() { + let aux_data = file_info + .as_ref() + .and_then(|ad| ad.as_any().downcast_ref::()); + if let Some(aux_data) = aux_data { + data.extend(aux_data.clone().into_iter()); + } + } + } + } + } + data.into_iter() + .into_group_map_by(|d| d.macro_id.package_id) + } +} diff --git a/scarb/src/compiler/plugin/proc_macro/host/derive.rs b/scarb/src/compiler/plugin/proc_macro/host/derive.rs new file mode 100644 index 000000000..ea8557c69 --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/host/derive.rs @@ -0,0 +1,148 @@ +use crate::compiler::plugin::proc_macro::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; +use crate::compiler::plugin::proc_macro::host::{DERIVE_ATTR, into_cairo_diagnostics}; +use crate::compiler::plugin::proc_macro::{ + Expansion, ExpansionKind, ProcMacroHostPlugin, ProcMacroId, TokenStreamBuilder, +}; +use cairo_lang_defs::patcher::PatchBuilder; +use cairo_lang_defs::plugin::{DynGeneratedFileAuxData, PluginGeneratedFile, PluginResult}; +use cairo_lang_macro::{ + AllocationContext, Diagnostic, TokenStream, TokenStreamMetadata, TokenTree, +}; +use cairo_lang_syntax::attribute::structured::{AttributeArgVariant, AttributeStructurize}; +use cairo_lang_syntax::node::ast::{Expr, PathSegment}; +use cairo_lang_syntax::node::db::SyntaxGroup; +use cairo_lang_syntax::node::helpers::QueryAttrs; +use cairo_lang_syntax::node::{Terminal, TypedStablePtr, TypedSyntaxNode, ast}; +use convert_case::{Case, Casing}; +use itertools::Itertools; + +impl ProcMacroHostPlugin { + /// Handle `#[derive(...)]` attribute. + /// + /// Returns a list of expansions that this plugin should apply. + fn parse_derive(&self, db: &dyn SyntaxGroup, item_ast: ast::ModuleItem) -> Vec { + let attrs = match item_ast { + ast::ModuleItem::Struct(struct_ast) => Some(struct_ast.query_attr(db, DERIVE_ATTR)), + ast::ModuleItem::Enum(enum_ast) => Some(enum_ast.query_attr(db, DERIVE_ATTR)), + _ => None, + }; + + attrs + .unwrap_or_default() + .iter() + .map(|attr| attr.clone().structurize(db)) + .flat_map(|attr| attr.args.into_iter()) + .filter_map(|attr| { + let AttributeArgVariant::Unnamed(value) = attr.clone().variant else { + return None; + }; + let Expr::Path(path) = value else { + return None; + }; + let path = path.elements(db); + let path = path.last()?; + let PathSegment::Simple(segment) = path else { + return None; + }; + let ident = segment.ident(db); + let value = ident.text(db).to_string(); + + self.find_expansion(&Expansion::new( + value.to_case(Case::Snake), + ExpansionKind::Derive, + )) + }) + .collect_vec() + } + + pub fn expand_derives( + &self, + db: &dyn SyntaxGroup, + item_ast: ast::ModuleItem, + stream_metadata: TokenStreamMetadata, + ) -> Option { + let stable_ptr = item_ast.clone().stable_ptr().untyped(); + let mut token_stream_builder = TokenStreamBuilder::new(db); + token_stream_builder.add_node(item_ast.as_syntax_node()); + token_stream_builder.with_metadata(stream_metadata.clone()); + let mut aux_data = EmittedAuxData::default(); + let mut all_diagnostics: Vec = Vec::new(); + + // All derives to be applied. + let derives = self.parse_derive(db, item_ast.clone()); + let any_derives = !derives.is_empty(); + + let ctx = AllocationContext::default(); + let mut derived_code = PatchBuilder::new(db, &item_ast); + for derive in derives.iter() { + let token_stream = token_stream_builder.build(&ctx); + let result = self.instance(derive.package_id).generate_code( + derive.expansion.name.clone(), + TokenStream::empty(), + token_stream, + ); + + // Register diagnostics. + all_diagnostics.extend(result.diagnostics); + + // Register aux data. + if let Some(new_aux_data) = result.aux_data { + aux_data.push(ProcMacroAuxData::new( + new_aux_data.into(), + ProcMacroId::new(derive.package_id, derive.expansion.clone()), + )); + } + + if result.token_stream.is_empty() { + // No code has been generated. + // We do not need to do anything. + continue; + } + + for token in result.token_stream.tokens { + match token { + TokenTree::Ident(token) => { + derived_code.add_str(token.content.as_ref()); + } + } + } + } + + if any_derives { + let derived_code = derived_code.build().0; + return Some(PluginResult { + code: if derived_code.is_empty() { + None + } else { + let msg = if derives.len() == 1 { + "the derive macro" + } else { + "one of the derive macros" + }; + let derive_names = derives + .iter() + .map(|derive| derive.expansion.name.to_string()) + .join("`, `"); + let note = format!("this error originates in {msg}: `{derive_names}`"); + Some(PluginGeneratedFile { + name: "proc_macro_derive".into(), + code_mappings: Vec::new(), + content: derived_code, + aux_data: if aux_data.is_empty() { + None + } else { + Some(DynGeneratedFileAuxData::new(aux_data)) + }, + diagnostics_note: Some(note), + }) + }, + diagnostics: into_cairo_diagnostics(all_diagnostics, stable_ptr), + // Note that we don't remove the original item here, unlike for attributes. + // We do not add the original code to the generated file either. + remove_original_item: false, + }); + } + + None + } +} diff --git a/scarb/src/compiler/plugin/proc_macro/host/inline.rs b/scarb/src/compiler/plugin/proc_macro/host/inline.rs new file mode 100644 index 000000000..35a124875 --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/host/inline.rs @@ -0,0 +1,101 @@ +use crate::compiler::plugin::proc_macro::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; +use crate::compiler::plugin::proc_macro::host::into_cairo_diagnostics; +use crate::compiler::plugin::proc_macro::{ + Expansion, ProcMacroId, ProcMacroInstance, TokenStreamBuilder, +}; +use cairo_lang_defs::plugin::{ + DynGeneratedFileAuxData, InlineMacroExprPlugin, InlinePluginResult, MacroPluginMetadata, + PluginGeneratedFile, +}; +use cairo_lang_macro::{AllocationContext, TokenStream}; +use cairo_lang_syntax::node::db::SyntaxGroup; +use cairo_lang_syntax::node::{TypedStablePtr, TypedSyntaxNode, ast}; +use std::sync::{Arc, OnceLock}; + +/// A Cairo compiler inline macro plugin controlling the inline procedural macro execution. +/// +/// This plugin represents a single expansion capable of handling inline procedural macros. +/// The plugin triggers code expansion in a corresponding procedural macro instance. +#[derive(Debug)] +pub struct ProcMacroInlinePlugin { + instance: Arc, + expansion: Expansion, + doc: OnceLock>, +} + +impl ProcMacroInlinePlugin { + pub fn new(instance: Arc, expansion: Expansion) -> Self { + assert!(instance.get_expansions().contains(&expansion)); + Self { + instance, + expansion, + doc: Default::default(), + } + } + + fn instance(&self) -> &ProcMacroInstance { + &self.instance + } +} + +impl InlineMacroExprPlugin for ProcMacroInlinePlugin { + fn generate_code( + &self, + db: &dyn SyntaxGroup, + syntax: &ast::ExprInlineMacro, + _metadata: &MacroPluginMetadata<'_>, + ) -> InlinePluginResult { + let ctx = AllocationContext::default(); + let stable_ptr = syntax.clone().stable_ptr().untyped(); + let arguments = syntax.arguments(db); + let mut token_stream_builder = TokenStreamBuilder::new(db); + token_stream_builder.add_node(arguments.as_syntax_node()); + let token_stream = token_stream_builder.build(&ctx); + let result = self.instance().generate_code( + self.expansion.name.clone(), + TokenStream::empty(), + token_stream, + ); + // Handle diagnostics. + let diagnostics = into_cairo_diagnostics(result.diagnostics, stable_ptr); + let token_stream = result.token_stream.clone(); + if token_stream.is_empty() { + // Remove original code + InlinePluginResult { + code: None, + diagnostics, + } + } else { + // Replace + let aux_data = result.aux_data.map(|aux_data| { + let aux_data = ProcMacroAuxData::new( + aux_data.into(), + ProcMacroId::new(self.instance.package_id(), self.expansion.clone()), + ); + let mut emitted = EmittedAuxData::default(); + emitted.push(aux_data); + DynGeneratedFileAuxData::new(emitted) + }); + let content = token_stream.to_string(); + InlinePluginResult { + code: Some(PluginGeneratedFile { + name: "inline_proc_macro".into(), + code_mappings: Vec::new(), + content, + aux_data, + diagnostics_note: Some(format!( + "this error originates in the inline macro: `{}`", + self.expansion.name + )), + }), + diagnostics, + } + } + } + + fn documentation(&self) -> Option { + self.doc + .get_or_init(|| self.instance().doc(self.expansion.name.clone())) + .clone() + } +} diff --git a/scarb/src/compiler/plugin/proc_macro/host/mod.rs b/scarb/src/compiler/plugin/proc_macro/host/mod.rs new file mode 100644 index 000000000..97b80d660 --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/host/mod.rs @@ -0,0 +1,289 @@ +mod attribute; +mod aux_data; +mod derive; +mod inline; +mod post; + +use attribute::*; +pub use aux_data::ProcMacroAuxData; +use inline::*; + +use crate::compiler::plugin::proc_macro::compilation::SharedLibraryProvider; +use crate::compiler::plugin::proc_macro::{Expansion, ExpansionKind, ProcMacroInstance}; +use crate::core::{Config, Package, PackageId, edition_variant}; +use anyhow::{Context, Result, ensure}; +use cairo_lang_defs::plugin::PluginDiagnostic; +use cairo_lang_defs::plugin::{MacroPlugin, MacroPluginMetadata, PluginResult}; +use cairo_lang_filesystem::db::Edition; +use cairo_lang_macro::{AllocationContext, Diagnostic, Severity, TokenStreamMetadata}; +use cairo_lang_semantic::plugin::PluginSuite; +use cairo_lang_syntax::node::db::SyntaxGroup; +use cairo_lang_syntax::node::ids::SyntaxStablePtrId; +use cairo_lang_syntax::node::{TypedStablePtr, TypedSyntaxNode, ast}; +use convert_case::{Case, Casing}; +use itertools::Itertools; +use scarb_stable_hash::short_hash; +use std::collections::HashMap; +use std::fmt::Debug; +use std::sync::{Arc, RwLock}; + +const FULL_PATH_MARKER_KEY: &str = "macro::full_path_marker"; +const DERIVE_ATTR: &str = "derive"; + +/// A Cairo compiler plugin controlling the procedural macro execution. +/// +/// This plugin decides which macro plugins (if any) should be applied to the processed AST item. +/// It then redirects the item to the appropriate macro plugin for code expansion. +#[derive(Debug)] +pub struct ProcMacroHostPlugin { + macros: Vec>, + full_path_markers: RwLock>>, +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct ProcMacroId { + pub package_id: PackageId, + pub expansion: Expansion, +} + +impl ProcMacroId { + pub fn new(package_id: PackageId, expansion: Expansion) -> Self { + Self { + package_id, + expansion, + } + } +} + +impl ProcMacroHostPlugin { + pub fn try_new(macros: Vec>) -> Result { + // Validate expansions. + let mut expansions = macros + .iter() + .flat_map(|m| { + m.get_expansions() + .iter() + .map(|e| ProcMacroId::new(m.package_id(), e.clone())) + .collect_vec() + }) + .collect::>(); + expansions.sort_unstable_by_key(|e| e.expansion.name.clone()); + ensure!( + expansions + .windows(2) + .all(|w| w[0].expansion.name != w[1].expansion.name), + "duplicate expansions defined for procedural macros: {duplicates}", + duplicates = expansions + .windows(2) + .filter(|w| w[0].expansion.name == w[1].expansion.name) + .map(|w| format!( + "{} ({} and {})", + w[0].expansion.name.as_str(), + w[0].package_id, + w[1].package_id + )) + .collect::>() + .join(", ") + ); + Ok(Self { + macros, + full_path_markers: RwLock::new(Default::default()), + }) + } + + fn find_expansion(&self, expansion: &Expansion) -> Option { + self.macros + .iter() + .find(|m| m.get_expansions().contains(expansion)) + .map(|m| m.package_id()) + .map(|package_id| ProcMacroId::new(package_id, expansion.clone())) + } + + pub fn build_plugin_suite(macro_host: Arc) -> PluginSuite { + let mut suite = PluginSuite::default(); + // Register inline macro plugins. + for proc_macro in ¯o_host.macros { + let expansions = proc_macro + .get_expansions() + .iter() + .filter(|exp| matches!(exp.kind, ExpansionKind::Inline)); + for expansion in expansions { + let plugin = Arc::new(ProcMacroInlinePlugin::new( + proc_macro.clone(), + expansion.clone(), + )); + suite.add_inline_macro_plugin_ex(expansion.name.as_str(), plugin); + } + } + // Register procedural macro host plugin. + suite.add_plugin_ex(macro_host); + suite + } + + pub fn instance(&self, package_id: PackageId) -> &ProcMacroInstance { + self.macros + .iter() + .find(|m| m.package_id() == package_id) + .expect("procedural macro must be registered in proc macro host") + } + + fn calculate_metadata( + db: &dyn SyntaxGroup, + item_ast: ast::ModuleItem, + edition: Edition, + ) -> TokenStreamMetadata { + let stable_ptr = item_ast.clone().stable_ptr().untyped(); + let file_path = stable_ptr.file_id(db).full_path(db.upcast()); + let file_id = short_hash(file_path.clone()); + let edition = edition_variant(edition); + TokenStreamMetadata::new(file_path, file_id, edition) + } + + pub fn macros(&self) -> &[Arc] { + &self.macros + } + + // NOTE: Required for proc macro server. `::declared_attributes` + // returns attributes **and** executables. In PMS, we only need the former because the latter is handled separately. + pub fn declared_attributes_without_executables(&self) -> Vec { + self.macros + .iter() + .flat_map(|instance| instance.declared_attributes()) + .collect() + } + + pub fn declared_inline_macros(&self) -> Vec { + self.macros + .iter() + .flat_map(|instance| instance.inline_macros()) + .collect() + } +} + +impl MacroPlugin for ProcMacroHostPlugin { + fn generate_code( + &self, + db: &dyn SyntaxGroup, + item_ast: ast::ModuleItem, + metadata: &MacroPluginMetadata<'_>, + ) -> PluginResult { + let stream_metadata = Self::calculate_metadata(db, item_ast.clone(), metadata.edition); + + // Handle inner functions. + if let InnerAttrExpansionResult::Some(result) = self.expand_inner_attr(db, item_ast.clone()) + { + return result; + } + + // Expand first attribute. + // Note that we only expand the first attribute, as we assume that the rest of the attributes + // will be handled by a subsequent call to this function. + let ctx = AllocationContext::default(); + let (input, body) = self.parse_attribute(db, item_ast.clone(), &ctx); + + if let Some(result) = match input { + AttrExpansionFound::Last { + expansion, + args, + stable_ptr, + } => Some((expansion, args, stable_ptr, true)), + AttrExpansionFound::Some { + expansion, + args, + stable_ptr, + } => Some((expansion, args, stable_ptr, false)), + AttrExpansionFound::None => None, + } + .map(|(expansion, args, stable_ptr, last)| { + let token_stream = body.with_metadata(stream_metadata.clone()); + self.expand_attribute(expansion, last, args, token_stream, stable_ptr) + }) { + return result; + } + + // Expand all derives. + // Note that all proc macro attributes should be already expanded at this point. + if let Some(result) = self.expand_derives(db, item_ast.clone(), stream_metadata.clone()) { + return result; + } + + // No expansions can be applied. + PluginResult { + code: None, + diagnostics: Vec::new(), + remove_original_item: false, + } + } + + fn declared_attributes(&self) -> Vec { + self.macros + .iter() + .flat_map(|m| m.declared_attributes_and_executables()) + .chain(vec![FULL_PATH_MARKER_KEY.to_string()]) + .collect() + } + + fn declared_derives(&self) -> Vec { + self.macros + .iter() + .flat_map(|m| m.declared_derives()) + .map(|s| s.to_case(Case::UpperCamel)) + .collect() + } + + fn executable_attributes(&self) -> Vec { + self.macros + .iter() + .flat_map(|m| m.executable_attributes()) + .collect() + } +} + +fn into_cairo_diagnostics( + diagnostics: Vec, + stable_ptr: SyntaxStablePtrId, +) -> Vec { + diagnostics + .into_iter() + .map(|diag| PluginDiagnostic { + stable_ptr, + message: diag.message, + severity: match diag.severity { + Severity::Error => cairo_lang_diagnostics::Severity::Error, + Severity::Warning => cairo_lang_diagnostics::Severity::Warning, + }, + }) + .collect_vec() +} + +/// A Scarb wrapper around the `ProcMacroHost` compiler plugin. +/// +/// This struct represent the compiler plugin in terms of Scarb data model. +/// It also builds a plugin suite that enables the compiler plugin. +#[derive(Default)] +pub struct ProcMacroHost { + macros: Vec>, +} + +impl ProcMacroHost { + pub fn register_instance(&mut self, instance: Arc) { + self.macros.push(instance); + } + + pub fn register_new(&mut self, package: Package, config: &Config) -> Result<()> { + let lib_path = package + .shared_lib_path(config) + .context("could not resolve shared library path")?; + let instance = ProcMacroInstance::try_new(package.id, lib_path)?; + self.register_instance(Arc::new(instance)); + Ok(()) + } + + pub fn into_plugin(self) -> Result { + ProcMacroHostPlugin::try_new(self.macros) + } + + pub fn macros(&self) -> &[Arc] { + &self.macros + } +} diff --git a/scarb/src/compiler/plugin/proc_macro/host/post.rs b/scarb/src/compiler/plugin/proc_macro/host/post.rs new file mode 100644 index 000000000..07570b7a6 --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/host/post.rs @@ -0,0 +1,113 @@ +use crate::compiler::plugin::proc_macro::ProcMacroHostPlugin; +use crate::compiler::plugin::proc_macro::host::FULL_PATH_MARKER_KEY; +use crate::core::PackageId; +use anyhow::Result; +use cairo_lang_defs::ids::{ModuleItemId, TopLevelLanguageElementId}; +use cairo_lang_diagnostics::ToOption; +use cairo_lang_macro::FullPathMarker; +use cairo_lang_semantic::db::SemanticGroup; +use cairo_lang_semantic::items::attribute::SemanticQueryAttrs; +use cairo_lang_syntax::attribute::structured::{Attribute, AttributeArgVariant}; +use cairo_lang_syntax::node::ast::Expr; +use itertools::Itertools; +use std::collections::HashMap; +use tracing::{debug, trace_span}; + +impl ProcMacroHostPlugin { + #[tracing::instrument(level = "trace", skip_all)] + pub fn post_process(&self, db: &dyn SemanticGroup) -> Result<()> { + let markers = self.collect_full_path_markers(db); + + let aux_data = self.collect_aux_data(db); + for instance in self.macros.iter() { + let _ = trace_span!( + "post_process_callback", + instance = %instance.package_id() + ) + .entered(); + let instance_markers = self + .full_path_markers + .read() + .unwrap() + .get(&instance.package_id()) + .cloned() + .unwrap_or_default(); + let markers_for_instance = markers + .iter() + .filter(|(key, _)| instance_markers.contains(key)) + .map(|(key, full_path)| FullPathMarker { + key: key.clone(), + full_path: full_path.clone(), + }) + .collect_vec(); + let data = aux_data + .get(&instance.package_id()) + .cloned() + .unwrap_or_default(); + debug!("calling post processing callback with: {data:?}"); + instance.post_process_callback(data.clone(), markers_for_instance); + } + Ok(()) + } + + fn collect_full_path_markers(&self, db: &dyn SemanticGroup) -> HashMap { + let mut markers: HashMap = HashMap::new(); + // FULL_PATH_MARKER_KEY + for crate_id in db.crates() { + let modules = db.crate_modules(crate_id); + for module_id in modules.iter() { + let Ok(module_items) = db.module_items(*module_id) else { + continue; + }; + for item_id in module_items.iter() { + let attr = match item_id { + ModuleItemId::Struct(id) => { + id.query_attr(db, FULL_PATH_MARKER_KEY).to_option() + } + ModuleItemId::Enum(id) => { + id.query_attr(db, FULL_PATH_MARKER_KEY).to_option() + } + ModuleItemId::FreeFunction(id) => { + id.query_attr(db, FULL_PATH_MARKER_KEY).to_option() + } + _ => None, + }; + + let keys = attr + .unwrap_or_default() + .into_iter() + .filter_map(|attr| Self::extract_key(db, attr)) + .collect_vec(); + let full_path = item_id.full_path(db.upcast()); + for key in keys { + markers.insert(key, full_path.clone()); + } + } + } + } + markers + } + + fn extract_key(db: &dyn SemanticGroup, attr: Attribute) -> Option { + if attr.id != FULL_PATH_MARKER_KEY { + return None; + } + + for arg in attr.args.clone() { + if let AttributeArgVariant::Unnamed(Expr::String(s)) = arg.variant { + return s.string_value(db.upcast()); + } + } + + None + } + + pub(crate) fn register_full_path_markers(&self, package_id: PackageId, markers: Vec) { + self.full_path_markers + .write() + .unwrap() + .entry(package_id) + .and_modify(|markers| markers.extend(markers.clone())) + .or_insert(markers); + } +} diff --git a/scarb/src/compiler/plugin/proc_macro/mod.rs b/scarb/src/compiler/plugin/proc_macro/mod.rs index 83a4e7822..9b4e375a4 100644 --- a/scarb/src/compiler/plugin/proc_macro/mod.rs +++ b/scarb/src/compiler/plugin/proc_macro/mod.rs @@ -1,9 +1,11 @@ pub mod compilation; mod ffi; mod host; +mod repository; mod types; pub use compilation::{check_unit, compile_unit, fetch_crate}; pub use ffi::*; pub use host::*; +pub use repository::*; pub use types::*; diff --git a/scarb/src/compiler/plugin/proc_macro/repository.rs b/scarb/src/compiler/plugin/proc_macro/repository.rs new file mode 100644 index 000000000..f1f08bc17 --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/repository.rs @@ -0,0 +1,46 @@ +use crate::compiler::plugin::proc_macro::ProcMacroInstance; +use crate::compiler::plugin::proc_macro::compilation::SharedLibraryProvider; +use crate::core::{Config, Package, PackageId}; +use anyhow::{Context, Result, bail}; +use std::collections::HashMap; +use std::sync::{Arc, RwLock}; + +/// A global storage for dynamically-loaded procedural macros. +/// Loads dynamic shared libraries and hides them beside [`ProcMacroInstance`]. +/// Guarantees that every library is loaded exactly once, +/// but does not prevent loading multiple versions of the same library. +#[derive(Default)] +pub struct ProcMacroRepository { + /// A mapping between the [`PackageId`] of the package which defines the plugin + /// and the [`ProcMacroInstance`] holding the underlying shared library. + macros: RwLock>>, +} + +impl ProcMacroRepository { + /// Returns the [`ProcMacroInstance`] representing the procedural macros defined in the [`Package`]. + /// Loads the underlying shared library if it has not been loaded yet. + pub fn get_or_load(&self, package: Package, config: &Config) -> Result> { + let Ok(macros) = self.macros.read() else { + bail!("could not get a read access to the ProcMacroRepository"); + }; + + if let Some(instance) = macros.get(&package.id) { + return Ok(instance.clone()); + } + + drop(macros); + + let Ok(mut macros) = self.macros.write() else { + bail!("could not get a write access to the ProcMacroRepository"); + }; + + let lib_path = package + .shared_lib_path(config) + .context("could not resolve shared library path")?; + + let instance = Arc::new(ProcMacroInstance::try_new(package.id, lib_path)?); + macros.insert(package.id, instance.clone()); + + Ok(instance) + } +} diff --git a/scarb/tests/proc_macro_prebuilt.rs b/scarb/tests/proc_macro_prebuilt.rs index 9b76e986c..53d3e8147 100644 --- a/scarb/tests/proc_macro_prebuilt.rs +++ b/scarb/tests/proc_macro_prebuilt.rs @@ -227,7 +227,7 @@ fn load_prebuilt_proc_macros() { context: ProcMacroScope { component }, name: "some".to_string(), args: TokenStream::new(vec![TokenTree::Ident(Token::new( - "42".to_string(), + "42", TextSpan::default(), ))]), }) @@ -237,7 +237,7 @@ fn load_prebuilt_proc_macros() { assert_eq!( response.token_stream, TokenStream::new(vec![TokenTree::Ident(Token::new( - "42".to_string(), + "42", TextSpan::default(), ))]) ); From fb7c61d9723598f09d7edbde694207d51898f869 Mon Sep 17 00:00:00 2001 From: maciektr Date: Mon, 25 Nov 2024 17:41:32 +0100 Subject: [PATCH 07/38] Skip whitespace prefix when creating the span (#1761) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit commit-id:f0ce5a05 --- **Stack**: - #1749 - #1748 - #1745 - #1761 ⬅ ⚠️ *Part of a stack created by [spr](https://github.com/ejoffe/spr). Do not merge manually using the UI - doing so may have unexpected results.* --- scarb/src/compiler/plugin/proc_macro/types.rs | 50 ++++++++++++++++++- 1 file changed, 48 insertions(+), 2 deletions(-) diff --git a/scarb/src/compiler/plugin/proc_macro/types.rs b/scarb/src/compiler/plugin/proc_macro/types.rs index 51f34faff..8c5514cf6 100644 --- a/scarb/src/compiler/plugin/proc_macro/types.rs +++ b/scarb/src/compiler/plugin/proc_macro/types.rs @@ -2,6 +2,7 @@ use cairo_lang_macro::{ AllocationContext, TextSpan, Token, TokenStream, TokenStreamMetadata, TokenTree, }; use cairo_lang_syntax::node::{SyntaxNode, db::SyntaxGroup}; +use std::ops::Add; /// Helps creating TokenStream based on multiple SyntaxNodes, /// which aren't descendants or ascendants of each other inside the SyntaxTree. @@ -46,10 +47,55 @@ impl<'a> TokenStreamBuilder<'a> { pub fn token_from_syntax_node(&self, node: SyntaxNode, ctx: &AllocationContext) -> Token { let span = node.span(self.db).to_str_range(); + let text = node.get_text(self.db); let span = TextSpan { - start: span.start, + // We skip the whitespace prefix, so that diagnostics start where the actual token contents is. + start: span.start.add(whitespace_prefix_len(&text)), end: span.end, }; - Token::new_in(node.get_text(self.db), span, ctx) + Token::new_in(text, span, ctx) + } +} + +fn whitespace_prefix_len(s: &str) -> usize { + s.chars().take_while(|c| c.is_whitespace()).count() +} + +#[cfg(test)] +mod tests { + use crate::compiler::plugin::proc_macro::TokenStreamBuilder; + use cairo_lang_macro::{AllocationContext, TextSpan, TokenStream, TokenTree}; + use cairo_lang_parser::utils::SimpleParserDatabase; + use indoc::indoc; + + #[test] + fn whitespace_skipped() { + let db = SimpleParserDatabase::default(); + let mut builder = TokenStreamBuilder::new(&db); + let content = indoc! {r#" + fn main() { + let x = 42; + } + "#}; + let parsed = db.parse_virtual(content).unwrap(); + builder.add_node(parsed); + let ctx = AllocationContext::default(); + let token_stream = builder.build(&ctx); + let token_at = |token_stream: &TokenStream, idx: usize| { + let token: TokenTree = token_stream.tokens[idx].clone(); + match token { + TokenTree::Ident(token) => token, + } + }; + let token = token_at(&token_stream, 4); + assert_eq!(token.content.as_ref(), "{\n"); + assert_eq!(token.span, TextSpan { start: 10, end: 12 }); + let token = token_at(&token_stream, 5); + assert_eq!(token.content.as_ref(), " let "); + // Note we skip 4 whitespaces characters in the span. + assert_eq!(token.span, TextSpan { start: 16, end: 20 }); + let token = token_at(&token_stream, 6); + assert_eq!(token.content.as_ref(), "x "); + assert_eq!(token.span, TextSpan { start: 20, end: 22 }); } } From c450e54f752d88a3a547f8bd69a85b34ba2d8206 Mon Sep 17 00:00:00 2001 From: maciektr Date: Tue, 26 Nov 2024 09:37:04 +0100 Subject: [PATCH 08/38] Remove confusing default from token stream types (#1745) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit commit-id:e6e81399 --- **Stack**: - #1749 - #1748 - #1745 ⬅ ⚠️ *Part of a stack created by [spr](https://github.com/ejoffe/spr). Do not merge manually using the UI - doing so may have unexpected results.* --- plugins/cairo-lang-macro/src/types/token.rs | 23 ++++--------------- scarb/tests/proc_macro_server.rs | 6 ++--- .../src/methods/mod.rs | 11 ++++++++- 3 files changed, 17 insertions(+), 23 deletions(-) diff --git a/plugins/cairo-lang-macro/src/types/token.rs b/plugins/cairo-lang-macro/src/types/token.rs index 6d1bf5adc..24a4fe7ec 100644 --- a/plugins/cairo-lang-macro/src/types/token.rs +++ b/plugins/cairo-lang-macro/src/types/token.rs @@ -10,7 +10,7 @@ use std::rc::Rc; /// This is both input and part of an output of a procedural macro. #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "serde", serde(try_from = "deserializer::TokenStream"))] -#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TokenStream { pub tokens: Vec, pub metadata: TokenStreamMetadata, @@ -85,12 +85,6 @@ pub enum TokenTree { Ident(Token), } -impl Default for TokenTree { - fn default() -> Self { - Self::Ident(Default::default()) - } -} - impl TokenTree { /// Get the size hint for the [`TokenTree`]. /// This can be used to estimate size of a buffer needed for allocating this [`TokenTree`]. @@ -103,7 +97,7 @@ impl TokenTree { /// A range of text offsets that form a span (like text selection). #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TextSpan { pub start: usize, pub end: usize, @@ -113,7 +107,7 @@ pub struct TextSpan { /// /// The most atomic item of Cairo code representation. #[cfg_attr(feature = "serde", derive(serde::Serialize))] -#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Token { pub content: InternedStr, pub span: TextSpan, @@ -158,15 +152,6 @@ impl InternedStr { } } -impl Default for InternedStr { - fn default() -> Self { - Self { - ptr: "" as *const str, - _bump: Rc::default(), - } - } -} - impl AsRef for InternedStr { fn as_ref(&self) -> &str { self.deref() @@ -203,7 +188,7 @@ impl Hash for InternedStr { } /// This wrapper de-allocates the underlying buffer on drop. -#[derive(Debug, Default)] +#[derive(Debug)] pub(crate) struct BumpWrap(pub Bump); impl Drop for BumpWrap { diff --git a/scarb/tests/proc_macro_server.rs b/scarb/tests/proc_macro_server.rs index 7287a8de6..616ed169a 100644 --- a/scarb/tests/proc_macro_server.rs +++ b/scarb/tests/proc_macro_server.rs @@ -96,7 +96,7 @@ fn expand_attribute() { args: TokenStream::empty(), item: TokenStream::new(vec![TokenTree::Ident(Token::new( "fn some_test_fn(){}", - TextSpan::default(), + TextSpan::new(0, 0), ))]), }) .unwrap(); @@ -134,7 +134,7 @@ fn expand_derive() { let item = TokenStream::new(vec![TokenTree::Ident(Token::new( "fn some_test_fn(){}", - TextSpan::default(), + TextSpan::new(0, 0), ))]); let response = proc_macro_client @@ -197,7 +197,7 @@ fn expand_inline() { name: "replace_all_15_with_25".to_string(), args: TokenStream::new(vec![TokenTree::Ident(Token::new( "struct A { field: 15 , other_field: macro_call!(12)}", - TextSpan::default(), + TextSpan::new(0, 0), ))]), }) .unwrap(); diff --git a/utils/scarb-proc-macro-server-types/src/methods/mod.rs b/utils/scarb-proc-macro-server-types/src/methods/mod.rs index 1eb1326f8..0101df5c7 100644 --- a/utils/scarb-proc-macro-server-types/src/methods/mod.rs +++ b/utils/scarb-proc-macro-server-types/src/methods/mod.rs @@ -15,10 +15,19 @@ pub trait Method { /// /// This struct encapsulates both the resulting token stream from macro expansion /// and any diagnostic messages (e.g., errors or warnings) that were generated during processing. -#[derive(Debug, Clone, Default, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] pub struct ProcMacroResult { /// The resultant token stream produced after the macro expansion. pub token_stream: TokenStream, /// A list of diagnostics produced during the macro execution. pub diagnostics: Vec, } + +impl Default for ProcMacroResult { + fn default() -> Self { + Self { + token_stream: TokenStream::empty(), + diagnostics: Vec::new(), + } + } +} From e39308e137879977841f8b6548252d30f4072e81 Mon Sep 17 00:00:00 2001 From: maciektr Date: Wed, 4 Dec 2024 11:50:13 +0100 Subject: [PATCH 09/38] Make TextSpan u32 to match the compiler one (#1748) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit commit-id:107b5e17 --- **Stack**: - #1749 - #1748 ⬅ ⚠️ *Part of a stack created by [spr](https://github.com/ejoffe/spr). Do not merge manually using the UI - doing so may have unexpected results.* --- plugins/cairo-lang-macro-stable/src/lib.rs | 4 +-- plugins/cairo-lang-macro/src/types/mod.rs | 2 +- plugins/cairo-lang-macro/src/types/token.rs | 8 ++++-- scarb/src/compiler/plugin/proc_macro/types.rs | 11 ++++---- scarb/tests/build_cairo_plugin.rs | 28 +++++++++---------- scarb/tests/proc_macro_prebuilt.rs | 4 +-- scarb/tests/proc_macro_server.rs | 4 +-- .../src/proc_macro_server.rs | 2 +- 8 files changed, 32 insertions(+), 31 deletions(-) diff --git a/plugins/cairo-lang-macro-stable/src/lib.rs b/plugins/cairo-lang-macro-stable/src/lib.rs index c836ed850..f9458a3a5 100644 --- a/plugins/cairo-lang-macro-stable/src/lib.rs +++ b/plugins/cairo-lang-macro-stable/src/lib.rs @@ -16,8 +16,8 @@ pub struct StableToken { #[repr(C)] #[derive(Debug)] pub struct StableTextSpan { - pub start: usize, - pub end: usize, + pub start: u32, + pub end: u32, } #[repr(C)] diff --git a/plugins/cairo-lang-macro/src/types/mod.rs b/plugins/cairo-lang-macro/src/types/mod.rs index 45ab5fca2..a679ed3f9 100644 --- a/plugins/cairo-lang-macro/src/types/mod.rs +++ b/plugins/cairo-lang-macro/src/types/mod.rs @@ -45,7 +45,7 @@ pub struct ProcMacroResult { /// TokenTree::Ident( /// Token::new( /// &code, -/// TextSpan::new(0, code.len()) +/// TextSpan::new(0, code.len() as u32) /// ) /// ) /// ]); diff --git a/plugins/cairo-lang-macro/src/types/token.rs b/plugins/cairo-lang-macro/src/types/token.rs index 24a4fe7ec..f6cfc2cea 100644 --- a/plugins/cairo-lang-macro/src/types/token.rs +++ b/plugins/cairo-lang-macro/src/types/token.rs @@ -95,12 +95,14 @@ impl TokenTree { } } +pub type TextOffset = u32; + /// A range of text offsets that form a span (like text selection). #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TextSpan { - pub start: usize, - pub end: usize, + pub start: TextOffset, + pub end: TextOffset, } /// A single Cairo token. @@ -311,7 +313,7 @@ impl TokenTree { impl TextSpan { /// Create a new [`TextSpan`]. - pub fn new(start: usize, end: usize) -> TextSpan { + pub fn new(start: TextOffset, end: TextOffset) -> TextSpan { TextSpan { start, end } } } diff --git a/scarb/src/compiler/plugin/proc_macro/types.rs b/scarb/src/compiler/plugin/proc_macro/types.rs index 8c5514cf6..7599f69e0 100644 --- a/scarb/src/compiler/plugin/proc_macro/types.rs +++ b/scarb/src/compiler/plugin/proc_macro/types.rs @@ -2,7 +2,6 @@ use cairo_lang_macro::{ AllocationContext, TextSpan, Token, TokenStream, TokenStreamMetadata, TokenTree, }; use cairo_lang_syntax::node::{SyntaxNode, db::SyntaxGroup}; -use std::ops::Add; /// Helps creating TokenStream based on multiple SyntaxNodes, /// which aren't descendants or ascendants of each other inside the SyntaxTree. @@ -46,19 +45,19 @@ impl<'a> TokenStreamBuilder<'a> { } pub fn token_from_syntax_node(&self, node: SyntaxNode, ctx: &AllocationContext) -> Token { - let span = node.span(self.db).to_str_range(); + let span = node.span(self.db); let text = node.get_text(self.db); let span = TextSpan { // We skip the whitespace prefix, so that diagnostics start where the actual token contents is. - start: span.start.add(whitespace_prefix_len(&text)), - end: span.end, + start: span.start.as_u32() + whitespace_prefix_len(&text), + end: span.end.as_u32(), }; Token::new_in(text, span, ctx) } } -fn whitespace_prefix_len(s: &str) -> usize { - s.chars().take_while(|c| c.is_whitespace()).count() +fn whitespace_prefix_len(s: &str) -> u32 { + s.chars().take_while(|c| c.is_whitespace()).count() as u32 } #[cfg(test)] diff --git a/scarb/tests/build_cairo_plugin.rs b/scarb/tests/build_cairo_plugin.rs index 70810a49b..67329a9a3 100644 --- a/scarb/tests/build_cairo_plugin.rs +++ b/scarb/tests/build_cairo_plugin.rs @@ -414,7 +414,7 @@ fn can_replace_original_node() { let new_token_string = token_stream.to_string().replace("12", "34"); let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( new_token_string.clone(), - TextSpan { start: 0, end: new_token_string.len() }, + TextSpan { start: 0, end: new_token_string.len() as u32 }, ))]); ProcMacroResult::new(token_stream) } @@ -585,7 +585,7 @@ fn can_define_multiple_macros() { let new_token_string = token_stream.to_string().replace("12", "34"); let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( new_token_string.clone(), - TextSpan { start: 0, end: new_token_string.len() }, + TextSpan { start: 0, end: new_token_string.len() as u32 }, ))]); let aux_data = AuxData::new(Vec::new()); ProcMacroResult::new(token_stream).with_aux_data(aux_data) @@ -596,7 +596,7 @@ fn can_define_multiple_macros() { let new_token_string = token_stream.to_string().replace("56", "78"); let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( new_token_string.clone(), - TextSpan { start: 0, end: new_token_string.len() }, + TextSpan { start: 0, end: new_token_string.len() as u32 }, ))]); let aux_data = AuxData::new(Vec::new()); ProcMacroResult::new(token_stream).with_aux_data(aux_data) @@ -620,7 +620,7 @@ fn can_define_multiple_macros() { let new_token_string = token_stream.to_string().replace("90", "09"); let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( new_token_string.clone(), - TextSpan { start: 0, end: new_token_string.len() }, + TextSpan { start: 0, end: new_token_string.len() as u32 }, ))]); let aux_data = AuxData::new(Vec::new()); ProcMacroResult::new(token_stream).with_aux_data(aux_data) @@ -840,7 +840,7 @@ fn can_resolve_full_path_markers() { code.clone(), TextSpan { start: 0, - end: code.len(), + end: code.len() as u32, }, ))]) ).with_full_path_markers(full_path_markers) @@ -1013,7 +1013,7 @@ fn can_implement_derive_macro() { code.clone(), TextSpan { start: 0, - end: code.len(), + end: code.len() as u32, }, ))]); @@ -1076,7 +1076,7 @@ fn can_use_both_derive_and_attr() { new_token_string.clone(), TextSpan { start: 0, - end: new_token_string.len(), + end: new_token_string.len() as u32, }, ))])) } @@ -1088,7 +1088,7 @@ fn can_use_both_derive_and_attr() { code.clone(), TextSpan { start: 0, - end: code.len(), + end: code.len() as u32, }, ))]); @@ -1097,7 +1097,7 @@ fn can_use_both_derive_and_attr() { result_string.clone(), TextSpan { start: 0, - end: result_string.len(), + end: result_string.len() as u32, }, ))])) } @@ -1116,7 +1116,7 @@ fn can_use_both_derive_and_attr() { code.clone(), TextSpan { start: 0, - end: code.len(), + end: code.len() as u32, }, ))])) } @@ -1327,7 +1327,7 @@ fn can_be_expanded() { let new_token_string = token_stream.to_string().replace("12", "34"); let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( new_token_string.clone(), - TextSpan { start: 0, end: new_token_string.len() }, + TextSpan { start: 0, end: new_token_string.len() as u32 }, ))]); ProcMacroResult::new(token_stream) } @@ -1357,7 +1357,7 @@ fn can_be_expanded() { let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( code.clone(), - TextSpan { start: 0, end: code.len() }, + TextSpan { start: 0, end: code.len() as u32 }, ))]); ProcMacroResult::new(token_stream) @@ -1443,7 +1443,7 @@ fn can_expand_trait_inner_func_attrr() { .replace("12", "34"); ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new( new_token_string.clone(), - TextSpan { start: 0, end: new_token_string.len() }, + TextSpan { start: 0, end: new_token_string.len() as u32 }, ))])) } "##}) @@ -1505,7 +1505,7 @@ fn can_expand_impl_inner_func_attrr() { let new_token_string = token_stream.to_string().replace("1", "2"); ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new( new_token_string.clone(), - TextSpan { start: 0, end: new_token_string.len() }, + TextSpan { start: 0, end: new_token_string.len() as u32 }, ))])) } "##}) diff --git a/scarb/tests/proc_macro_prebuilt.rs b/scarb/tests/proc_macro_prebuilt.rs index 53d3e8147..ddae735da 100644 --- a/scarb/tests/proc_macro_prebuilt.rs +++ b/scarb/tests/proc_macro_prebuilt.rs @@ -228,7 +228,7 @@ fn load_prebuilt_proc_macros() { name: "some".to_string(), args: TokenStream::new(vec![TokenTree::Ident(Token::new( "42", - TextSpan::default(), + TextSpan::new(0, 0), ))]), }) .unwrap(); @@ -238,7 +238,7 @@ fn load_prebuilt_proc_macros() { response.token_stream, TokenStream::new(vec![TokenTree::Ident(Token::new( "42", - TextSpan::default(), + TextSpan::new(0, 0), ))]) ); } diff --git a/scarb/tests/proc_macro_server.rs b/scarb/tests/proc_macro_server.rs index 616ed169a..46370d292 100644 --- a/scarb/tests/proc_macro_server.rs +++ b/scarb/tests/proc_macro_server.rs @@ -58,7 +58,7 @@ fn expand_attribute() { let output = input.replace(name, "very_new_name"); - let span = TextSpan { start: 0, end: output.len() }; + let span = TextSpan { start: 0, end: output.len() as u32 }; ProcMacroResult::new( TokenStream::new(vec![ TokenTree::Ident( @@ -161,7 +161,7 @@ fn expand_inline() { #[inline_macro] pub fn replace_all_15_with_25(token_stream: TokenStream) -> ProcMacroResult { let content = token_stream.to_string().replace("15", "25"); - let span = TextSpan { start: 0, end: content.len() }; + let span = TextSpan { start: 0, end: content.len() as u32 }; ProcMacroResult::new( TokenStream::new(vec![ TokenTree::Ident( diff --git a/utils/scarb-test-support/src/proc_macro_server.rs b/utils/scarb-test-support/src/proc_macro_server.rs index 49e811d5a..ee9c74f06 100644 --- a/utils/scarb-test-support/src/proc_macro_server.rs +++ b/utils/scarb-test-support/src/proc_macro_server.rs @@ -44,7 +44,7 @@ pub fn inline_some(token_stream: TokenStream) -> ProcMacroResult { #[derive_macro] fn some_derive(_token_stream: TokenStream)-> ProcMacroResult { let content = "impl SomeImpl of SomeTrait {}".to_string(); - let span = TextSpan { start: 0, end: content.len() }; + let span = TextSpan { start: 0, end: content.len() as u32 }; ProcMacroResult::new( TokenStream::new(vec![ TokenTree::Ident( From 37d23b06385271442876f0b4cd57ba8356696775 Mon Sep 17 00:00:00 2001 From: Maksim Zdobnikau <43750648+DelevoXDG@users.noreply.github.com> Date: Wed, 4 Dec 2024 11:50:48 +0100 Subject: [PATCH 10/38] Implement code mappings (#1756) Closes #1647 --------- Co-authored-by: maciektr --- .../plugin/proc_macro/host/attribute.rs | 5 +- .../compiler/plugin/proc_macro/host/derive.rs | 44 ++-- .../compiler/plugin/proc_macro/host/inline.rs | 5 +- .../compiler/plugin/proc_macro/host/mod.rs | 33 ++- scarb/tests/build_cairo_plugin.rs | 198 +++++++++++++++++- 5 files changed, 264 insertions(+), 21 deletions(-) diff --git a/scarb/src/compiler/plugin/proc_macro/host/attribute.rs b/scarb/src/compiler/plugin/proc_macro/host/attribute.rs index e452153ce..95827f968 100644 --- a/scarb/src/compiler/plugin/proc_macro/host/attribute.rs +++ b/scarb/src/compiler/plugin/proc_macro/host/attribute.rs @@ -1,5 +1,5 @@ use crate::compiler::plugin::proc_macro::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; -use crate::compiler::plugin::proc_macro::host::into_cairo_diagnostics; +use crate::compiler::plugin::proc_macro::host::{generate_code_mappings, into_cairo_diagnostics}; use crate::compiler::plugin::proc_macro::{ Expansion, ExpansionKind, ProcMacroHostPlugin, ProcMacroId, TokenStreamBuilder, }; @@ -407,11 +407,12 @@ impl ProcMacroHostPlugin { } let file_name = format!("proc_{}", input.expansion.name); + let code_mappings = generate_code_mappings(&result.token_stream); let content = result.token_stream.to_string(); PluginResult { code: Some(PluginGeneratedFile { name: file_name.into(), - code_mappings: Vec::new(), + code_mappings, content, diagnostics_note: Some(format!( "this error originates in the attribute macro: `{}`", diff --git a/scarb/src/compiler/plugin/proc_macro/host/derive.rs b/scarb/src/compiler/plugin/proc_macro/host/derive.rs index ea8557c69..93295bd8f 100644 --- a/scarb/src/compiler/plugin/proc_macro/host/derive.rs +++ b/scarb/src/compiler/plugin/proc_macro/host/derive.rs @@ -1,13 +1,14 @@ use crate::compiler::plugin::proc_macro::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; -use crate::compiler::plugin::proc_macro::host::{DERIVE_ATTR, into_cairo_diagnostics}; +use crate::compiler::plugin::proc_macro::host::{ + DERIVE_ATTR, generate_code_mappings, into_cairo_diagnostics, +}; use crate::compiler::plugin::proc_macro::{ Expansion, ExpansionKind, ProcMacroHostPlugin, ProcMacroId, TokenStreamBuilder, }; -use cairo_lang_defs::patcher::PatchBuilder; use cairo_lang_defs::plugin::{DynGeneratedFileAuxData, PluginGeneratedFile, PluginResult}; -use cairo_lang_macro::{ - AllocationContext, Diagnostic, TokenStream, TokenStreamMetadata, TokenTree, -}; +use cairo_lang_filesystem::ids::CodeMapping; +use cairo_lang_filesystem::span::TextWidth; +use cairo_lang_macro::{AllocationContext, Diagnostic, TokenStream, TokenStreamMetadata}; use cairo_lang_syntax::attribute::structured::{AttributeArgVariant, AttributeStructurize}; use cairo_lang_syntax::node::ast::{Expr, PathSegment}; use cairo_lang_syntax::node::db::SyntaxGroup; @@ -73,7 +74,10 @@ impl ProcMacroHostPlugin { let any_derives = !derives.is_empty(); let ctx = AllocationContext::default(); - let mut derived_code = PatchBuilder::new(db, &item_ast); + let mut derived_code = String::new(); + let mut code_mappings = Vec::new(); + let mut current_width = TextWidth::default(); + for derive in derives.iter() { let token_stream = token_stream_builder.build(&ctx); let result = self.instance(derive.package_id).generate_code( @@ -99,17 +103,15 @@ impl ProcMacroHostPlugin { continue; } - for token in result.token_stream.tokens { - match token { - TokenTree::Ident(token) => { - derived_code.add_str(token.content.as_ref()); - } - } - } + code_mappings.extend(generate_code_mappings_with_offset( + &result.token_stream, + current_width, + )); + current_width = current_width + TextWidth::from_str(&result.token_stream.to_string()); + derived_code.push_str(&result.token_stream.to_string()); } if any_derives { - let derived_code = derived_code.build().0; return Some(PluginResult { code: if derived_code.is_empty() { None @@ -126,7 +128,7 @@ impl ProcMacroHostPlugin { let note = format!("this error originates in {msg}: `{derive_names}`"); Some(PluginGeneratedFile { name: "proc_macro_derive".into(), - code_mappings: Vec::new(), + code_mappings, content: derived_code, aux_data: if aux_data.is_empty() { None @@ -146,3 +148,15 @@ impl ProcMacroHostPlugin { None } } + +fn generate_code_mappings_with_offset( + token_stream: &TokenStream, + offset: TextWidth, +) -> Vec { + let mut mappings = generate_code_mappings(token_stream); + for mapping in &mut mappings { + mapping.span.start = mapping.span.start.add_width(offset); + mapping.span.end = mapping.span.end.add_width(offset); + } + mappings +} diff --git a/scarb/src/compiler/plugin/proc_macro/host/inline.rs b/scarb/src/compiler/plugin/proc_macro/host/inline.rs index 35a124875..aeb1db7dd 100644 --- a/scarb/src/compiler/plugin/proc_macro/host/inline.rs +++ b/scarb/src/compiler/plugin/proc_macro/host/inline.rs @@ -1,5 +1,5 @@ use crate::compiler::plugin::proc_macro::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; -use crate::compiler::plugin::proc_macro::host::into_cairo_diagnostics; +use crate::compiler::plugin::proc_macro::host::{generate_code_mappings, into_cairo_diagnostics}; use crate::compiler::plugin::proc_macro::{ Expansion, ProcMacroId, ProcMacroInstance, TokenStreamBuilder, }; @@ -77,10 +77,11 @@ impl InlineMacroExprPlugin for ProcMacroInlinePlugin { DynGeneratedFileAuxData::new(emitted) }); let content = token_stream.to_string(); + let code_mappings = generate_code_mappings(&token_stream); InlinePluginResult { code: Some(PluginGeneratedFile { name: "inline_proc_macro".into(), - code_mappings: Vec::new(), + code_mappings, content, aux_data, diagnostics_note: Some(format!( diff --git a/scarb/src/compiler/plugin/proc_macro/host/mod.rs b/scarb/src/compiler/plugin/proc_macro/host/mod.rs index 97b80d660..e828e035b 100644 --- a/scarb/src/compiler/plugin/proc_macro/host/mod.rs +++ b/scarb/src/compiler/plugin/proc_macro/host/mod.rs @@ -15,7 +15,11 @@ use anyhow::{Context, Result, ensure}; use cairo_lang_defs::plugin::PluginDiagnostic; use cairo_lang_defs::plugin::{MacroPlugin, MacroPluginMetadata, PluginResult}; use cairo_lang_filesystem::db::Edition; -use cairo_lang_macro::{AllocationContext, Diagnostic, Severity, TokenStreamMetadata}; +use cairo_lang_filesystem::ids::{CodeMapping, CodeOrigin}; +use cairo_lang_filesystem::span::{TextOffset, TextSpan, TextWidth}; +use cairo_lang_macro::{ + AllocationContext, Diagnostic, Severity, TokenStream, TokenStreamMetadata, TokenTree, +}; use cairo_lang_semantic::plugin::PluginSuite; use cairo_lang_syntax::node::db::SyntaxGroup; use cairo_lang_syntax::node::ids::SyntaxStablePtrId; @@ -287,3 +291,30 @@ impl ProcMacroHost { &self.macros } } + +fn generate_code_mappings(token_stream: &TokenStream) -> Vec { + token_stream + .tokens + .iter() + .scan(TextOffset::default(), |current_pos, token| { + let TokenTree::Ident(token) = token; + let token_width = TextWidth::from_str(token.content.as_ref()); + + let mapping = CodeMapping { + span: TextSpan { + start: *current_pos, + end: current_pos.add_width(token_width), + }, + origin: CodeOrigin::Span(TextSpan { + start: TextOffset::default() + .add_width(TextWidth::new_for_testing(token.span.start)), + end: TextOffset::default() + .add_width(TextWidth::new_for_testing(token.span.end)), + }), + }; + + *current_pos = current_pos.add_width(token_width); + Some(mapping) + }) + .collect() +} diff --git a/scarb/tests/build_cairo_plugin.rs b/scarb/tests/build_cairo_plugin.rs index 67329a9a3..855126359 100644 --- a/scarb/tests/build_cairo_plugin.rs +++ b/scarb/tests/build_cairo_plugin.rs @@ -1007,7 +1007,7 @@ fn can_implement_derive_macro() { 32 }} }} - "#}; + "#}; let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( code.clone(), @@ -1671,6 +1671,202 @@ fn can_be_used_through_re_export() { ); } +#[test] +fn code_mappings_preserve_attribute_error_locations() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r#" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, TokenTree, Token, TextSpan}; + + #[attribute_macro] + pub fn some(_attr: TokenStream, mut token_stream: TokenStream) -> ProcMacroResult { + let token_stream_length = token_stream.to_string().len() as u32; + token_stream.tokens.push(TokenTree::Ident(Token::new(" ", TextSpan { start: token_stream_length + 1, end: token_stream_length + 5 }))); + ProcMacroResult::new(token_stream) + } + "#}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn f() -> felt252 { + let x = 1; + x = 2; + x + } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .failure() + .stdout_matches(indoc! {r#" + [..] Compiling some v1.0.0 ([..]Scarb.toml) + [..] Compiling hello v1.0.0 ([..]Scarb.toml) + error: Cannot assign to an immutable variable. + --> [..]lib.cairo[proc_some]:3:5 + x = 2; + ^***^ + note: this error originates in the attribute macro: `some` + + error: could not compile `hello` due to previous error + "#}); +} + +#[test] +fn code_mappings_preserve_inline_macro_error_locations() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{inline_macro, ProcMacroResult, TokenStream, TokenTree, Token, TextSpan}; + + #[inline_macro] + pub fn some(_token_stream: TokenStream) -> ProcMacroResult { + let mut tokens = Vec::new(); + tokens.push(TokenTree::Ident(Token::new( + "undefined".to_string(), + TextSpan::new(0, 9), + ))); + + ProcMacroResult::new(TokenStream::new(tokens)) + } + "##}) + .build(&t); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + fn main() -> felt252 { + let _x = some!(); + 12 + } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .failure() + .stdout_matches(indoc! {r#" + [..] Compiling some v1.0.0 ([..]Scarb.toml) + [..] Compiling hello v1.0.0 ([..]Scarb.toml) + error: Identifier not found. + --> [..]lib.cairo:1:1 + fn main() -> felt252 { + ^*******^ + + error: could not compile `hello` due to previous error + "#}); +} + +#[test] +fn code_mappings_preserve_derive_error_locations() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{derive_macro, ProcMacroResult, TokenStream, TokenTree, Token, TextSpan}; + + #[derive_macro] + pub fn custom_derive(token_stream: TokenStream) -> ProcMacroResult { + let name = token_stream + .clone() + .to_string() + .lines() + .find(|l| l.starts_with("struct")) + .unwrap() + .to_string() + .replace("struct", "") + .replace("}", "") + .replace("{", "") + .trim() + .to_string(); + + let code = indoc::formatdoc!{r#" + impl SomeImpl{name} of Hello<{name}> {{ + fn world(self: @{name}) -> u8 {{ + 256 + }} + }} + "#}; + + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + code.clone(), + TextSpan { + start: 0, + end: code.len() as u32, + }, + ))]); + + ProcMacroResult::new(token_stream) + } + "##}) + .add_dep(r#"indoc = "*""#) + .build(&t); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + trait Hello { + fn world(self: @T) -> u8; + } + + #[derive(CustomDerive, Drop)] + struct SomeType {} + + #[derive(CustomDerive, Drop)] + struct AnotherType {} + + fn main() -> u8 { + let a = SomeType {}; + a.world() + } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .failure() + .stdout_matches(indoc! {r#" + [..] Compiling some v1.0.0 ([..]Scarb.toml) + [..] Compiling hello v1.0.0 ([..]Scarb.toml) + error: The value does not fit within the range of type core::integer::u8. + --> [..]lib.cairo:1:1 + trait Hello { + ^**************^ + note: this error originates in the derive macro: `custom_derive` + + error: The value does not fit within the range of type core::integer::u8. + --> [..]lib.cairo:1:1 + trait Hello { + ^**************^ + note: this error originates in the derive macro: `custom_derive` + + error: could not compile `hello` due to previous error + "#}); +} + #[test] fn only_compiles_needed_macros() { let t = TempDir::new().unwrap(); From 156bc5db126a4e96f3b0b4f67fd47b935f22ed25 Mon Sep 17 00:00:00 2001 From: maciektr Date: Thu, 5 Dec 2024 10:56:10 +0100 Subject: [PATCH 11/38] Pass call site to macro expansion (#1749) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit commit-id:206643a1 --- **Stack**: - #1811 - #1810 - #1749 ⬅ ⚠️ *Part of a stack created by [spr](https://github.com/ejoffe/spr). Do not merge manually using the UI - doing so may have unexpected results.* --- plugins/cairo-lang-macro/Cargo.toml | 2 +- plugins/cairo-lang-macro/src/lib.rs | 7 +- plugins/cairo-lang-macro/src/types/token.rs | 22 ++++- scarb/src/compiler/plugin/proc_macro/ffi.rs | 16 +++- .../plugin/proc_macro/host/attribute.rs | 90 +++++++++--------- .../plugin/proc_macro/host/conversion.rs | 49 ++++++++++ .../compiler/plugin/proc_macro/host/derive.rs | 94 +++++++++++-------- .../compiler/plugin/proc_macro/host/inline.rs | 12 ++- .../compiler/plugin/proc_macro/host/mod.rs | 40 ++------ .../methods/expand_attribute.rs | 3 +- .../methods/expand_derive.rs | 9 +- .../methods/expand_inline.rs | 3 +- scarb/tests/proc_macro_prebuilt.rs | 1 + scarb/tests/proc_macro_server.rs | 3 + .../src/methods/expand.rs | 8 +- 15 files changed, 223 insertions(+), 136 deletions(-) create mode 100644 scarb/src/compiler/plugin/proc_macro/host/conversion.rs diff --git a/plugins/cairo-lang-macro/Cargo.toml b/plugins/cairo-lang-macro/Cargo.toml index 36af1a0ce..ad3991855 100644 --- a/plugins/cairo-lang-macro/Cargo.toml +++ b/plugins/cairo-lang-macro/Cargo.toml @@ -2,7 +2,7 @@ name = "cairo-lang-macro" version = "0.1.1" edition = "2021" -rust-version = "1.64" +rust-version = "1.73" authors.workspace = true categories = ["development-tools"] diff --git a/plugins/cairo-lang-macro/src/lib.rs b/plugins/cairo-lang-macro/src/lib.rs index 27296dc43..697c51d55 100644 --- a/plugins/cairo-lang-macro/src/lib.rs +++ b/plugins/cairo-lang-macro/src/lib.rs @@ -22,7 +22,7 @@ use std::cell::RefCell; use cairo_lang_macro_stable::ffi::StableSlice; use cairo_lang_macro_stable::{ - StableExpansionsList, StablePostProcessContext, StableProcMacroResult, + StableExpansionsList, StablePostProcessContext, StableProcMacroResult, StableTextSpan, }; use std::ffi::{CStr, CString, c_char}; use std::ops::Deref; @@ -34,6 +34,8 @@ pub use types::*; // A thread-local allocation context for allocating tokens on proc macro side. thread_local!(static CONTEXT: RefCell = RefCell::default() ); +thread_local!(static CALL_SITE: RefCell<(u32, u32)> = RefCell::default()); + #[doc(hidden)] #[derive(Clone)] pub struct ExpansionDefinition { @@ -99,6 +101,7 @@ pub unsafe extern "C" fn free_expansions_list(list: StableExpansionsList) { #[no_mangle] pub unsafe extern "C" fn expand( item_name: *const c_char, + call_site: StableTextSpan, stable_attr: cairo_lang_macro_stable::StableTokenStream, stable_token_stream: cairo_lang_macro_stable::StableTokenStream, ) -> cairo_lang_macro_stable::StableResultWrapper { @@ -111,6 +114,8 @@ pub unsafe extern "C" fn expand( ctx_cell.replace(AllocationContext::with_capacity(size_hint)); let ctx_borrow = ctx_cell.borrow(); let ctx: &AllocationContext = ctx_borrow.deref(); + // Set the call site for the current expand call. + CALL_SITE.replace((call_site.start, call_site.end)); // Copy the stable token stream into current context. let token_stream = TokenStream::from_stable_in(&stable_token_stream, ctx); let attr_token_stream = TokenStream::from_stable_in(&stable_attr, ctx); diff --git a/plugins/cairo-lang-macro/src/types/token.rs b/plugins/cairo-lang-macro/src/types/token.rs index f6cfc2cea..36cc5645b 100644 --- a/plugins/cairo-lang-macro/src/types/token.rs +++ b/plugins/cairo-lang-macro/src/types/token.rs @@ -1,4 +1,4 @@ -use crate::CONTEXT; +use crate::{CALL_SITE, CONTEXT}; use bumpalo::Bump; use std::fmt::{Debug, Display, Write}; use std::hash::{Hash, Hasher}; @@ -316,6 +316,26 @@ impl TextSpan { pub fn new(start: TextOffset, end: TextOffset) -> TextSpan { TextSpan { start, end } } + + /// Create a new [`TextSpan`], located at the invocation of the current procedural macro. + /// Identifiers created with this span will be resolved as if they were written directly at + /// the macro call location (call-site hygiene). + pub fn call_site() -> Self { + CALL_SITE.with(|call_site| { + let call_site = call_site.borrow(); + Self::new(call_site.0, call_site.1) + }) + } + + /// Create a new [`TextSpan`], with width `0`, located right before this span. + pub fn start(self) -> Self { + Self::new(self.start, self.start) + } + + /// Create a new [`TextSpan`], with width `0`, located right after this span. + pub fn end(self) -> Self { + Self::new(self.end, self.end) + } } impl Token { diff --git a/scarb/src/compiler/plugin/proc_macro/ffi.rs b/scarb/src/compiler/plugin/proc_macro/ffi.rs index ae1089737..a32c2243e 100644 --- a/scarb/src/compiler/plugin/proc_macro/ffi.rs +++ b/scarb/src/compiler/plugin/proc_macro/ffi.rs @@ -3,11 +3,11 @@ use crate::core::{Package, PackageId}; use anyhow::{Context, Result, ensure}; use cairo_lang_macro::{ ExpansionKind as SharedExpansionKind, FullPathMarker, PostProcessContext, ProcMacroResult, - TokenStream, + TextSpan, TokenStream, }; use cairo_lang_macro_stable::{ StableExpansion, StableExpansionsList, StablePostProcessContext, StableProcMacroResult, - StableResultWrapper, StableTokenStream, + StableResultWrapper, StableTextSpan, StableTokenStream, }; use camino::Utf8PathBuf; use itertools::Itertools; @@ -159,6 +159,7 @@ impl ProcMacroInstance { pub(crate) fn generate_code( &self, item_name: SmolStr, + call_site: TextSpan, attr: TokenStream, token_stream: TokenStream, ) -> ProcMacroResult { @@ -169,8 +170,9 @@ impl ProcMacroInstance { let item_name = CString::new(item_name.to_string()).unwrap().into_raw(); // Call FFI interface for code expansion. // Note that `stable_result` has been allocated by the dynamic library. + let call_site: StableTextSpan = call_site.into_stable(); let stable_result = - (self.plugin.vtable.expand)(item_name, stable_attr, stable_token_stream); + (self.plugin.vtable.expand)(item_name, call_site, stable_attr, stable_token_stream); // Free proc macro name. let _ = unsafe { CString::from_raw(item_name) }; // Free the memory allocated by the `stable_token_stream`. @@ -282,8 +284,12 @@ impl Expansion { type ListExpansions = extern "C" fn() -> StableExpansionsList; type FreeExpansionsList = extern "C" fn(StableExpansionsList); -type ExpandCode = - extern "C" fn(*const c_char, StableTokenStream, StableTokenStream) -> StableResultWrapper; +type ExpandCode = extern "C" fn( + *const c_char, + StableTextSpan, + StableTokenStream, + StableTokenStream, +) -> StableResultWrapper; type FreeResult = extern "C" fn(StableProcMacroResult); type PostProcessCallback = extern "C" fn(StablePostProcessContext) -> StablePostProcessContext; type DocExpansion = extern "C" fn(*const c_char) -> *mut c_char; diff --git a/scarb/src/compiler/plugin/proc_macro/host/attribute.rs b/scarb/src/compiler/plugin/proc_macro/host/attribute.rs index 95827f968..8360e76ef 100644 --- a/scarb/src/compiler/plugin/proc_macro/host/attribute.rs +++ b/scarb/src/compiler/plugin/proc_macro/host/attribute.rs @@ -1,5 +1,8 @@ use crate::compiler::plugin::proc_macro::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; -use crate::compiler::plugin::proc_macro::host::{generate_code_mappings, into_cairo_diagnostics}; +use crate::compiler::plugin::proc_macro::host::conversion::{ + CallSiteLocation, into_cairo_diagnostics, +}; +use crate::compiler::plugin::proc_macro::host::generate_code_mappings; use crate::compiler::plugin::proc_macro::{ Expansion, ExpansionKind, ProcMacroHostPlugin, ProcMacroId, TokenStreamBuilder, }; @@ -12,7 +15,7 @@ use cairo_lang_syntax::attribute::structured::AttributeStructurize; use cairo_lang_syntax::node::ast::{ImplItem, MaybeImplBody, MaybeTraitBody}; use cairo_lang_syntax::node::db::SyntaxGroup; use cairo_lang_syntax::node::ids::SyntaxStablePtrId; -use cairo_lang_syntax::node::{TypedStablePtr, TypedSyntaxNode, ast}; +use cairo_lang_syntax::node::{TypedSyntaxNode, ast}; use itertools::Itertools; use smol_str::SmolStr; use std::collections::HashSet; @@ -166,22 +169,14 @@ impl ProcMacroHostPlugin { token_stream: TokenStream, ) -> bool { let mut all_none = true; - let (input, args, stable_ptr) = match found { - AttrExpansionFound::Last { - expansion, - args, - stable_ptr, - } => { + let input = match found { + AttrExpansionFound::Last(input) => { all_none = false; - (expansion, args, stable_ptr) + input } - AttrExpansionFound::Some { - expansion, - args, - stable_ptr, - } => { + AttrExpansionFound::Some(input) => { all_none = false; - (expansion, args, stable_ptr) + input } AttrExpansionFound::None => { item_builder.add_node(func.as_syntax_node()); @@ -189,13 +184,20 @@ impl ProcMacroHostPlugin { } }; - let result = self.instance(input.package_id).generate_code( - input.expansion.name.clone(), - args, + let result = self.instance(input.id.package_id).generate_code( + input.id.expansion.name.clone(), + input.call_site.span, + input.args, token_stream.clone(), ); - let expanded = context.register_result(token_stream.to_string(), input, result, stable_ptr); + let expanded = context.register_result( + token_stream.to_string(), + input.id, + result, + input.call_site.stable_ptr, + ); + item_builder.add_modified(RewriteNode::Mapped { origin: func.as_syntax_node().span(db), node: Box::new(RewriteNode::Text(expanded.to_string())), @@ -335,7 +337,11 @@ impl ProcMacroHostPlugin { let mut args_builder = TokenStreamBuilder::new(db); args_builder.add_node(attr.arguments(db).as_syntax_node()); let args = args_builder.build(ctx); - expansion = Some((found, args, attr.stable_ptr().untyped())); + expansion = Some(AttrExpansionArgs { + id: found, + args, + call_site: CallSiteLocation::new(&attr, db), + }); // Do not add the attribute for found expansion. continue; } else { @@ -346,16 +352,8 @@ impl ProcMacroHostPlugin { builder.add_node(attr.as_syntax_node()); } match (expansion, last) { - (Some((expansion, args, stable_ptr)), true) => AttrExpansionFound::Last { - expansion, - args, - stable_ptr, - }, - (Some((expansion, args, stable_ptr)), false) => AttrExpansionFound::Some { - expansion, - args, - stable_ptr, - }, + (Some(args), true) => AttrExpansionFound::Last(args), + (Some(args), false) => AttrExpansionFound::Some(args), (None, _) => AttrExpansionFound::None, } } @@ -366,11 +364,12 @@ impl ProcMacroHostPlugin { last: bool, args: TokenStream, token_stream: TokenStream, - stable_ptr: SyntaxStablePtrId, + call_site: CallSiteLocation, ) -> PluginResult { let original = token_stream.to_string(); let result = self.instance(input.package_id).generate_code( input.expansion.name.clone(), + call_site.span, args, token_stream, ); @@ -379,7 +378,7 @@ impl ProcMacroHostPlugin { if result.token_stream.is_empty() { // Remove original code return PluginResult { - diagnostics: into_cairo_diagnostics(result.diagnostics, stable_ptr), + diagnostics: into_cairo_diagnostics(result.diagnostics, call_site.stable_ptr), code: None, remove_original_item: true, }; @@ -402,7 +401,7 @@ impl ProcMacroHostPlugin { return PluginResult { code: None, remove_original_item: false, - diagnostics: into_cairo_diagnostics(result.diagnostics, stable_ptr), + diagnostics: into_cairo_diagnostics(result.diagnostics, call_site.stable_ptr), }; } @@ -425,31 +424,30 @@ impl ProcMacroHostPlugin { ))) }), }), - diagnostics: into_cairo_diagnostics(result.diagnostics, stable_ptr), + diagnostics: into_cairo_diagnostics(result.diagnostics, call_site.stable_ptr), remove_original_item: true, } } } pub enum AttrExpansionFound { - Some { - expansion: ProcMacroId, - args: TokenStream, - stable_ptr: SyntaxStablePtrId, - }, + Some(AttrExpansionArgs), + Last(AttrExpansionArgs), None, - Last { - expansion: ProcMacroId, - args: TokenStream, - stable_ptr: SyntaxStablePtrId, - }, +} + +pub struct AttrExpansionArgs { + pub id: ProcMacroId, + pub args: TokenStream, + pub call_site: CallSiteLocation, } impl AttrExpansionFound { pub fn as_name(&self) -> Option { match self { - AttrExpansionFound::Some { expansion, .. } - | AttrExpansionFound::Last { expansion, .. } => Some(expansion.expansion.name.clone()), + AttrExpansionFound::Some(args) | AttrExpansionFound::Last(args) => { + Some(args.id.expansion.name.clone()) + } AttrExpansionFound::None => None, } } diff --git a/scarb/src/compiler/plugin/proc_macro/host/conversion.rs b/scarb/src/compiler/plugin/proc_macro/host/conversion.rs new file mode 100644 index 000000000..aef4ac5f6 --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/host/conversion.rs @@ -0,0 +1,49 @@ +use cairo_lang_defs::plugin::PluginDiagnostic; +use cairo_lang_macro::{Diagnostic, Severity, TextSpan}; +use cairo_lang_syntax::node::db::SyntaxGroup; +use cairo_lang_syntax::node::ids::SyntaxStablePtrId; +use cairo_lang_syntax::node::{TypedStablePtr, TypedSyntaxNode}; +use itertools::Itertools; + +pub trait SpanSource { + fn text_span(&self, db: &dyn SyntaxGroup) -> TextSpan; +} + +impl SpanSource for T { + fn text_span(&self, db: &dyn SyntaxGroup) -> TextSpan { + let node = self.as_syntax_node(); + let span = node.span(db); + TextSpan::new(span.start.as_u32(), span.end.as_u32()) + } +} + +pub struct CallSiteLocation { + pub stable_ptr: SyntaxStablePtrId, + pub span: TextSpan, +} + +impl CallSiteLocation { + pub fn new(node: &T, db: &dyn SyntaxGroup) -> Self { + Self { + stable_ptr: node.stable_ptr().untyped(), + span: node.text_span(db), + } + } +} + +pub fn into_cairo_diagnostics( + diagnostics: Vec, + stable_ptr: SyntaxStablePtrId, +) -> Vec { + diagnostics + .into_iter() + .map(|diag| PluginDiagnostic { + stable_ptr, + message: diag.message, + severity: match diag.severity { + Severity::Error => cairo_lang_diagnostics::Severity::Error, + Severity::Warning => cairo_lang_diagnostics::Severity::Warning, + }, + }) + .collect_vec() +} diff --git a/scarb/src/compiler/plugin/proc_macro/host/derive.rs b/scarb/src/compiler/plugin/proc_macro/host/derive.rs index 93295bd8f..1f4cdb237 100644 --- a/scarb/src/compiler/plugin/proc_macro/host/derive.rs +++ b/scarb/src/compiler/plugin/proc_macro/host/derive.rs @@ -1,7 +1,8 @@ use crate::compiler::plugin::proc_macro::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; -use crate::compiler::plugin::proc_macro::host::{ - DERIVE_ATTR, generate_code_mappings, into_cairo_diagnostics, +use crate::compiler::plugin::proc_macro::host::conversion::{ + CallSiteLocation, into_cairo_diagnostics, }; +use crate::compiler::plugin::proc_macro::host::{DERIVE_ATTR, generate_code_mappings}; use crate::compiler::plugin::proc_macro::{ Expansion, ExpansionKind, ProcMacroHostPlugin, ProcMacroId, TokenStreamBuilder, }; @@ -13,7 +14,7 @@ use cairo_lang_syntax::attribute::structured::{AttributeArgVariant, AttributeStr use cairo_lang_syntax::node::ast::{Expr, PathSegment}; use cairo_lang_syntax::node::db::SyntaxGroup; use cairo_lang_syntax::node::helpers::QueryAttrs; -use cairo_lang_syntax::node::{Terminal, TypedStablePtr, TypedSyntaxNode, ast}; +use cairo_lang_syntax::node::{Terminal, TypedSyntaxNode, ast}; use convert_case::{Case, Casing}; use itertools::Itertools; @@ -21,7 +22,7 @@ impl ProcMacroHostPlugin { /// Handle `#[derive(...)]` attribute. /// /// Returns a list of expansions that this plugin should apply. - fn parse_derive(&self, db: &dyn SyntaxGroup, item_ast: ast::ModuleItem) -> Vec { + fn parse_derive(&self, db: &dyn SyntaxGroup, item_ast: ast::ModuleItem) -> Vec { let attrs = match item_ast { ast::ModuleItem::Struct(struct_ast) => Some(struct_ast.query_attr(db, DERIVE_ATTR)), ast::ModuleItem::Enum(enum_ast) => Some(enum_ast.query_attr(db, DERIVE_ATTR)), @@ -52,6 +53,10 @@ impl ProcMacroHostPlugin { value.to_case(Case::Snake), ExpansionKind::Derive, )) + .map(|id| DeriveFound { + id, + call_site: CallSiteLocation::new(segment, db), + }) }) .collect_vec() } @@ -62,7 +67,6 @@ impl ProcMacroHostPlugin { item_ast: ast::ModuleItem, stream_metadata: TokenStreamMetadata, ) -> Option { - let stable_ptr = item_ast.clone().stable_ptr().untyped(); let mut token_stream_builder = TokenStreamBuilder::new(db); token_stream_builder.add_node(item_ast.as_syntax_node()); token_stream_builder.with_metadata(stream_metadata.clone()); @@ -71,7 +75,14 @@ impl ProcMacroHostPlugin { // All derives to be applied. let derives = self.parse_derive(db, item_ast.clone()); - let any_derives = !derives.is_empty(); + + if derives.is_empty() { + // No derives found - returning early. + return None; + } + + // We use call site of first derive found. + let stable_ptr = derives[0].call_site.stable_ptr; let ctx = AllocationContext::default(); let mut derived_code = String::new(); @@ -79,9 +90,12 @@ impl ProcMacroHostPlugin { let mut current_width = TextWidth::default(); for derive in derives.iter() { + let call_site = &derive.call_site; + let derive = &derive.id; let token_stream = token_stream_builder.build(&ctx); let result = self.instance(derive.package_id).generate_code( derive.expansion.name.clone(), + call_site.span.clone(), TokenStream::empty(), token_stream, ); @@ -111,44 +125,46 @@ impl ProcMacroHostPlugin { derived_code.push_str(&result.token_stream.to_string()); } - if any_derives { - return Some(PluginResult { - code: if derived_code.is_empty() { - None + Some(PluginResult { + code: if derived_code.is_empty() { + None + } else { + let msg = if derives.len() == 1 { + "the derive macro" } else { - let msg = if derives.len() == 1 { - "the derive macro" - } else { - "one of the derive macros" - }; - let derive_names = derives - .iter() - .map(|derive| derive.expansion.name.to_string()) - .join("`, `"); - let note = format!("this error originates in {msg}: `{derive_names}`"); - Some(PluginGeneratedFile { - name: "proc_macro_derive".into(), - code_mappings, - content: derived_code, - aux_data: if aux_data.is_empty() { - None - } else { - Some(DynGeneratedFileAuxData::new(aux_data)) - }, - diagnostics_note: Some(note), - }) - }, - diagnostics: into_cairo_diagnostics(all_diagnostics, stable_ptr), - // Note that we don't remove the original item here, unlike for attributes. - // We do not add the original code to the generated file either. - remove_original_item: false, - }); - } + "one of the derive macros" + }; + let derive_names = derives + .iter() + .map(|derive| derive.id.expansion.name.to_string()) + .join("`, `"); + let note = format!("this error originates in {msg}: `{derive_names}`"); - None + Some(PluginGeneratedFile { + name: "proc_macro_derive".into(), + code_mappings, + content: derived_code, + diagnostics_note: Some(note), + aux_data: if aux_data.is_empty() { + None + } else { + Some(DynGeneratedFileAuxData::new(aux_data)) + }, + }) + }, + diagnostics: into_cairo_diagnostics(all_diagnostics, stable_ptr), + // Note that we don't remove the original item here, unlike for attributes. + // We do not add the original code to the generated file either. + remove_original_item: false, + }) } } +struct DeriveFound { + id: ProcMacroId, + call_site: CallSiteLocation, +} + fn generate_code_mappings_with_offset( token_stream: &TokenStream, offset: TextWidth, diff --git a/scarb/src/compiler/plugin/proc_macro/host/inline.rs b/scarb/src/compiler/plugin/proc_macro/host/inline.rs index aeb1db7dd..13c9843c4 100644 --- a/scarb/src/compiler/plugin/proc_macro/host/inline.rs +++ b/scarb/src/compiler/plugin/proc_macro/host/inline.rs @@ -1,5 +1,8 @@ use crate::compiler::plugin::proc_macro::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; -use crate::compiler::plugin::proc_macro::host::{generate_code_mappings, into_cairo_diagnostics}; +use crate::compiler::plugin::proc_macro::host::conversion::{ + CallSiteLocation, into_cairo_diagnostics, +}; +use crate::compiler::plugin::proc_macro::host::generate_code_mappings; use crate::compiler::plugin::proc_macro::{ Expansion, ProcMacroId, ProcMacroInstance, TokenStreamBuilder, }; @@ -9,7 +12,7 @@ use cairo_lang_defs::plugin::{ }; use cairo_lang_macro::{AllocationContext, TokenStream}; use cairo_lang_syntax::node::db::SyntaxGroup; -use cairo_lang_syntax::node::{TypedStablePtr, TypedSyntaxNode, ast}; +use cairo_lang_syntax::node::{TypedSyntaxNode, ast}; use std::sync::{Arc, OnceLock}; /// A Cairo compiler inline macro plugin controlling the inline procedural macro execution. @@ -45,19 +48,20 @@ impl InlineMacroExprPlugin for ProcMacroInlinePlugin { syntax: &ast::ExprInlineMacro, _metadata: &MacroPluginMetadata<'_>, ) -> InlinePluginResult { + let call_site = CallSiteLocation::new(syntax, db); let ctx = AllocationContext::default(); - let stable_ptr = syntax.clone().stable_ptr().untyped(); let arguments = syntax.arguments(db); let mut token_stream_builder = TokenStreamBuilder::new(db); token_stream_builder.add_node(arguments.as_syntax_node()); let token_stream = token_stream_builder.build(&ctx); let result = self.instance().generate_code( self.expansion.name.clone(), + call_site.span, TokenStream::empty(), token_stream, ); // Handle diagnostics. - let diagnostics = into_cairo_diagnostics(result.diagnostics, stable_ptr); + let diagnostics = into_cairo_diagnostics(result.diagnostics, call_site.stable_ptr); let token_stream = result.token_stream.clone(); if token_stream.is_empty() { // Remove original code diff --git a/scarb/src/compiler/plugin/proc_macro/host/mod.rs b/scarb/src/compiler/plugin/proc_macro/host/mod.rs index e828e035b..6d2ebf2d2 100644 --- a/scarb/src/compiler/plugin/proc_macro/host/mod.rs +++ b/scarb/src/compiler/plugin/proc_macro/host/mod.rs @@ -1,5 +1,6 @@ mod attribute; mod aux_data; +mod conversion; mod derive; mod inline; mod post; @@ -12,17 +13,13 @@ use crate::compiler::plugin::proc_macro::compilation::SharedLibraryProvider; use crate::compiler::plugin::proc_macro::{Expansion, ExpansionKind, ProcMacroInstance}; use crate::core::{Config, Package, PackageId, edition_variant}; use anyhow::{Context, Result, ensure}; -use cairo_lang_defs::plugin::PluginDiagnostic; use cairo_lang_defs::plugin::{MacroPlugin, MacroPluginMetadata, PluginResult}; use cairo_lang_filesystem::db::Edition; use cairo_lang_filesystem::ids::{CodeMapping, CodeOrigin}; use cairo_lang_filesystem::span::{TextOffset, TextSpan, TextWidth}; -use cairo_lang_macro::{ - AllocationContext, Diagnostic, Severity, TokenStream, TokenStreamMetadata, TokenTree, -}; +use cairo_lang_macro::{AllocationContext, TokenStream, TokenStreamMetadata, TokenTree}; use cairo_lang_semantic::plugin::PluginSuite; use cairo_lang_syntax::node::db::SyntaxGroup; -use cairo_lang_syntax::node::ids::SyntaxStablePtrId; use cairo_lang_syntax::node::{TypedStablePtr, TypedSyntaxNode, ast}; use convert_case::{Case, Casing}; use itertools::Itertools; @@ -186,21 +183,13 @@ impl MacroPlugin for ProcMacroHostPlugin { let (input, body) = self.parse_attribute(db, item_ast.clone(), &ctx); if let Some(result) = match input { - AttrExpansionFound::Last { - expansion, - args, - stable_ptr, - } => Some((expansion, args, stable_ptr, true)), - AttrExpansionFound::Some { - expansion, - args, - stable_ptr, - } => Some((expansion, args, stable_ptr, false)), + AttrExpansionFound::Last(input) => Some((input, true)), + AttrExpansionFound::Some(input) => Some((input, false)), AttrExpansionFound::None => None, } - .map(|(expansion, args, stable_ptr, last)| { + .map(|(input, last)| { let token_stream = body.with_metadata(stream_metadata.clone()); - self.expand_attribute(expansion, last, args, token_stream, stable_ptr) + self.expand_attribute(input.id, last, input.args, token_stream, input.call_site) }) { return result; } @@ -243,23 +232,6 @@ impl MacroPlugin for ProcMacroHostPlugin { } } -fn into_cairo_diagnostics( - diagnostics: Vec, - stable_ptr: SyntaxStablePtrId, -) -> Vec { - diagnostics - .into_iter() - .map(|diag| PluginDiagnostic { - stable_ptr, - message: diag.message, - severity: match diag.severity { - Severity::Error => cairo_lang_diagnostics::Severity::Error, - Severity::Warning => cairo_lang_diagnostics::Severity::Warning, - }, - }) - .collect_vec() -} - /// A Scarb wrapper around the `ProcMacroHost` compiler plugin. /// /// This struct represent the compiler plugin in terms of Scarb data model. diff --git a/scarb/src/ops/proc_macro_server/methods/expand_attribute.rs b/scarb/src/ops/proc_macro_server/methods/expand_attribute.rs index 1add4f682..1746a6e43 100644 --- a/scarb/src/ops/proc_macro_server/methods/expand_attribute.rs +++ b/scarb/src/ops/proc_macro_server/methods/expand_attribute.rs @@ -16,6 +16,7 @@ impl Handler for ExpandAttribute { attr, args, item, + call_site, } = params; let plugin = workspace_macros @@ -34,7 +35,7 @@ impl Handler for ExpandAttribute { }) .with_context(|| format!("Unsupported attribute: {attr}"))?; - let result = instance.generate_code(attr.into(), args, item); + let result = instance.generate_code(attr.into(), call_site, args, item); Ok(ProcMacroResult { token_stream: result.token_stream, diff --git a/scarb/src/ops/proc_macro_server/methods/expand_derive.rs b/scarb/src/ops/proc_macro_server/methods/expand_derive.rs index 7a26f8594..705698eea 100644 --- a/scarb/src/ops/proc_macro_server/methods/expand_derive.rs +++ b/scarb/src/ops/proc_macro_server/methods/expand_derive.rs @@ -20,6 +20,7 @@ impl Handler for ExpandDerive { context, derives, item, + call_site, } = params; let mut derived_code = TokenStream::empty(); @@ -38,8 +39,12 @@ impl Handler for ExpandDerive { .find(|instance| instance.get_expansions().contains(&expansion)) .with_context(|| format!("Unsupported derive macro: {derive}"))?; - let result = - instance.generate_code(expansion.name.clone(), TokenStream::empty(), item.clone()); + let result = instance.generate_code( + expansion.name.clone(), + call_site.clone(), + TokenStream::empty(), + item.clone(), + ); // Register diagnostics. all_diagnostics.extend(result.diagnostics); diff --git a/scarb/src/ops/proc_macro_server/methods/expand_inline.rs b/scarb/src/ops/proc_macro_server/methods/expand_inline.rs index bbf4f819a..14298b49e 100644 --- a/scarb/src/ops/proc_macro_server/methods/expand_inline.rs +++ b/scarb/src/ops/proc_macro_server/methods/expand_inline.rs @@ -16,6 +16,7 @@ impl Handler for ExpandInline { context, name, args, + call_site, } = params; let plugin = workspace_macros @@ -34,7 +35,7 @@ impl Handler for ExpandInline { }) .with_context(|| format!("Unsupported inline macro: {name}"))?; - let result = instance.generate_code(name.into(), TokenStream::empty(), args); + let result = instance.generate_code(name.into(), call_site, TokenStream::empty(), args); Ok(ProcMacroResult { token_stream: result.token_stream, diff --git a/scarb/tests/proc_macro_prebuilt.rs b/scarb/tests/proc_macro_prebuilt.rs index ddae735da..f14185e4c 100644 --- a/scarb/tests/proc_macro_prebuilt.rs +++ b/scarb/tests/proc_macro_prebuilt.rs @@ -230,6 +230,7 @@ fn load_prebuilt_proc_macros() { "42", TextSpan::new(0, 0), ))]), + call_site: TextSpan::new(0, 0), }) .unwrap(); diff --git a/scarb/tests/proc_macro_server.rs b/scarb/tests/proc_macro_server.rs index 46370d292..f31bd97cb 100644 --- a/scarb/tests/proc_macro_server.rs +++ b/scarb/tests/proc_macro_server.rs @@ -94,6 +94,7 @@ fn expand_attribute() { context: ProcMacroScope { component }, attr: "rename_to_very_new_name".to_string(), args: TokenStream::empty(), + call_site: TextSpan::new(0, 0), item: TokenStream::new(vec![TokenTree::Ident(Token::new( "fn some_test_fn(){}", TextSpan::new(0, 0), @@ -141,6 +142,7 @@ fn expand_derive() { .request_and_wait::(ExpandDeriveParams { context: ProcMacroScope { component }, derives: vec!["some_derive".to_string()], + call_site: TextSpan::new(0, 0), item, }) .unwrap(); @@ -195,6 +197,7 @@ fn expand_inline() { .request_and_wait::(ExpandInlineMacroParams { context: ProcMacroScope { component }, name: "replace_all_15_with_25".to_string(), + call_site: TextSpan::new(0, 0), args: TokenStream::new(vec![TokenTree::Ident(Token::new( "struct A { field: 15 , other_field: macro_call!(12)}", TextSpan::new(0, 0), diff --git a/utils/scarb-proc-macro-server-types/src/methods/expand.rs b/utils/scarb-proc-macro-server-types/src/methods/expand.rs index faa60fd1b..f33af1562 100644 --- a/utils/scarb-proc-macro-server-types/src/methods/expand.rs +++ b/utils/scarb-proc-macro-server-types/src/methods/expand.rs @@ -2,7 +2,7 @@ use crate::scope::ProcMacroScope; use super::Method; use super::ProcMacroResult; -use cairo_lang_macro::TokenStream; +use cairo_lang_macro::{TextSpan, TokenStream}; use serde::{Deserialize, Serialize}; /// Parameters for expanding a specific attribute macro. @@ -19,6 +19,8 @@ pub struct ExpandAttributeParams { pub args: TokenStream, /// The token stream representing the item on which the macro is applied. pub item: TokenStream, + // Call site span. + pub call_site: TextSpan, } /// Represents a request to expand a single attribute macro. @@ -42,6 +44,8 @@ pub struct ExpandDeriveParams { pub derives: Vec, /// The token stream of the item to which the derive macros are applied. pub item: TokenStream, + // Call site span. + pub call_site: TextSpan, } /// Represents a request to expand derive macros. @@ -65,6 +69,8 @@ pub struct ExpandInlineMacroParams { pub name: String, /// The token stream representing arguments passed to the macro. pub args: TokenStream, + // Call site span. + pub call_site: TextSpan, } /// Represents a request to expand a single inline macro. From b1489fa56f02098077e0bd4b5956c31d1fd42f04 Mon Sep 17 00:00:00 2001 From: Mateusz Kowalski Date: Thu, 5 Dec 2024 10:57:05 +0100 Subject: [PATCH 12/38] Quote macro (#1808) (#1810) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: dependabot[bot] Co-authored-by: Mateusz Kowalski Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Maksim Zdobnikau <43750648+DelevoXDG@users.noreply.github.com> commit-id:face0a80 --- **Stack**: - #1811 - #1810 ⬅ ⚠️ *Part of a stack created by [spr](https://github.com/ejoffe/spr). Do not merge manually using the UI - doing so may have unexpected results.* --- Cargo.lock | 10 + Cargo.toml | 1 + plugins/cairo-lang-macro/Cargo.toml | 2 + plugins/cairo-lang-macro/src/lib.rs | 4 +- plugins/cairo-lang-macro/src/types/token.rs | 76 ++++ plugins/cairo-lang-quote/Cargo.toml | 19 + plugins/cairo-lang-quote/src/lib.rs | 115 ++++++ scarb/tests/proc_macro_quote.rs | 370 ++++++++++++++++++ .../src/cairo_plugin_project_builder.rs | 4 + 9 files changed, 600 insertions(+), 1 deletion(-) create mode 100644 plugins/cairo-lang-quote/Cargo.toml create mode 100644 plugins/cairo-lang-quote/src/lib.rs create mode 100644 scarb/tests/proc_macro_quote.rs diff --git a/Cargo.lock b/Cargo.lock index 05d1a8db7..26178132d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -949,6 +949,8 @@ dependencies = [ "bumpalo", "cairo-lang-macro-attributes 0.1.0", "cairo-lang-macro-stable 1.0.0", + "cairo-lang-primitive-token 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cairo-lang-quote", "linkme", "serde", "serde_json", @@ -1062,6 +1064,14 @@ dependencies = [ "toml", ] +[[package]] +name = "cairo-lang-quote" +version = "0.1.0" +dependencies = [ + "proc-macro2", + "quote", +] + [[package]] name = "cairo-lang-runnable-utils" version = "2.11.2" diff --git a/Cargo.toml b/Cargo.toml index 074dc4909..75ace73c3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,6 +12,7 @@ members = [ "plugins/cairo-lang-macro", "plugins/cairo-lang-macro-attributes", "plugins/cairo-lang-macro-stable", + "plugins/cairo-lang-quote", "utils/create-output-dir", "utils/once-map", "utils/scarb-proc-macro-server-types", diff --git a/plugins/cairo-lang-macro/Cargo.toml b/plugins/cairo-lang-macro/Cargo.toml index ad3991855..4fe886e96 100644 --- a/plugins/cairo-lang-macro/Cargo.toml +++ b/plugins/cairo-lang-macro/Cargo.toml @@ -17,6 +17,8 @@ repository.workspace = true bumpalo.workspace = true cairo-lang-macro-attributes = { path = "../cairo-lang-macro-attributes" } cairo-lang-macro-stable = { path = "../cairo-lang-macro-stable" } +cairo-lang-primitive-token = "1.0.0" +cairo-lang-quote = { path = "../cairo-lang-quote", version = "0.1.0" } linkme.workspace = true serde = { workspace = true, optional = true } diff --git a/plugins/cairo-lang-macro/src/lib.rs b/plugins/cairo-lang-macro/src/lib.rs index 697c51d55..f1c34da98 100644 --- a/plugins/cairo-lang-macro/src/lib.rs +++ b/plugins/cairo-lang-macro/src/lib.rs @@ -16,8 +16,11 @@ //! pub use cairo_lang_macro_attributes::*; +pub use cairo_lang_quote::*; + #[doc(hidden)] pub use linkme; + use std::cell::RefCell; use cairo_lang_macro_stable::ffi::StableSlice; @@ -28,7 +31,6 @@ use std::ffi::{CStr, CString, c_char}; use std::ops::Deref; mod types; - pub use types::*; // A thread-local allocation context for allocating tokens on proc macro side. diff --git a/plugins/cairo-lang-macro/src/types/token.rs b/plugins/cairo-lang-macro/src/types/token.rs index 36cc5645b..b9d0ca4b8 100644 --- a/plugins/cairo-lang-macro/src/types/token.rs +++ b/plugins/cairo-lang-macro/src/types/token.rs @@ -1,9 +1,12 @@ use crate::{CALL_SITE, CONTEXT}; use bumpalo::Bump; +use cairo_lang_primitive_token::{PrimitiveSpan, PrimitiveToken, ToPrimitiveTokenStream}; use std::fmt::{Debug, Display, Write}; use std::hash::{Hash, Hasher}; +use std::iter::{once, Map, Once}; use std::ops::Deref; use std::rc::Rc; +use std::vec::IntoIter; /// An abstract stream of Cairo tokens. /// @@ -278,6 +281,46 @@ impl TokenStream { pub fn is_empty(&self) -> bool { self.tokens.is_empty() } + + pub fn from_primitive_token_stream( + stable_token_stream: impl Iterator, + ) -> Self { + Self::new( + stable_token_stream + .map(|stable_token| { + TokenTree::Ident(Token::new( + stable_token.content, + stable_token + .span + .map(|stable_span| TextSpan { + start: stable_span.start as u32, + end: stable_span.end as u32, + }) + .unwrap_or(TextSpan::call_site()), + )) + }) + .collect(), + ) + } + + pub fn push_token(&mut self, token_tree: TokenTree) { + self.tokens.push(token_tree); + } +} + +impl IntoIterator for TokenStream { + type Item = TokenTree; + type IntoIter = IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.tokens.into_iter() + } +} + +impl Extend for TokenStream { + fn extend>(&mut self, iter: T) { + self.tokens.extend(iter); + } } impl Display for TokenStream { @@ -355,6 +398,39 @@ impl Token { } } +impl ToPrimitiveTokenStream for TokenStream { + type Iter = Map, fn(TokenTree) -> PrimitiveToken>; + fn to_primitive_token_stream(&self) -> Self::Iter { + self.tokens + .clone() + .into_iter() + .map(|token_tree| match token_tree { + TokenTree::Ident(token) => PrimitiveToken::new( + token.content.to_string(), + Some(PrimitiveSpan { + start: token.span.start as usize, + end: token.span.end as usize, + }), + ), + }) + } +} + +impl ToPrimitiveTokenStream for TokenTree { + type Iter = Once; + fn to_primitive_token_stream(&self) -> Self::Iter { + once(match self { + TokenTree::Ident(token) => PrimitiveToken::new( + token.content.to_string(), + Some(PrimitiveSpan { + start: token.span.start as usize, + end: token.span.end as usize, + }), + ), + }) + } +} + #[cfg(test)] mod test { use crate::{AllocationContext, TextSpan, Token, TokenStream, TokenTree}; diff --git a/plugins/cairo-lang-quote/Cargo.toml b/plugins/cairo-lang-quote/Cargo.toml new file mode 100644 index 000000000..7249b2a57 --- /dev/null +++ b/plugins/cairo-lang-quote/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "cairo-lang-quote" +version = "0.1.0" +edition.workspace = true + +authors.workspace = true +categories = ["development-tools"] +description = "Cairo procedural macro helper for constructing procedural macro results." +homepage.workspace = true +keywords = ["scarb"] +license.workspace = true +repository.workspace = true + +[lib] +proc-macro = true + +[dependencies] +proc-macro2.workspace = true +quote.workspace = true diff --git a/plugins/cairo-lang-quote/src/lib.rs b/plugins/cairo-lang-quote/src/lib.rs new file mode 100644 index 000000000..332bcb98d --- /dev/null +++ b/plugins/cairo-lang-quote/src/lib.rs @@ -0,0 +1,115 @@ +use std::iter::Peekable; + +use proc_macro::{Delimiter, TokenStream as RustTokenStream, TokenTree as RustTokenTree}; +use proc_macro2::{Ident, Span}; + +extern crate proc_macro; +use quote::quote as rust_quote; + +#[derive(Debug)] +enum QuoteToken { + Var(Ident), + Content(String), + Whitespace, +} + +enum DelimiterVariant { + Open, + Close, +} + +impl QuoteToken { + pub fn from_delimiter(delimiter: Delimiter, variant: DelimiterVariant) -> Self { + match (delimiter, variant) { + (Delimiter::Brace, DelimiterVariant::Open) => Self::Content("{".to_string()), + (Delimiter::Brace, DelimiterVariant::Close) => Self::Content("}".to_string()), + (Delimiter::Bracket, DelimiterVariant::Open) => Self::Content("[".to_string()), + (Delimiter::Bracket, DelimiterVariant::Close) => Self::Content("]".to_string()), + (Delimiter::Parenthesis, DelimiterVariant::Open) => Self::Content("(".to_string()), + (Delimiter::Parenthesis, DelimiterVariant::Close) => Self::Content(")".to_string()), + (Delimiter::None, _) => Self::Content(String::default()), + } + } +} + +fn process_token_stream( + mut token_stream: Peekable>, + output: &mut Vec, +) { + // Rust proc macro parser to TokenStream gets rid of all whitespaces. + // Here we just make sure no two identifiers are without a space between them. + let mut was_previous_ident: bool = false; + while let Some(token_tree) = token_stream.next() { + match token_tree { + RustTokenTree::Group(group) => { + let token_iter = group.stream().into_iter().peekable(); + let delimiter = group.delimiter(); + output.push(QuoteToken::from_delimiter( + delimiter, + DelimiterVariant::Open, + )); + process_token_stream(token_iter, output); + output.push(QuoteToken::from_delimiter( + delimiter, + DelimiterVariant::Close, + )); + was_previous_ident = false; + } + RustTokenTree::Punct(punct) => { + if punct.as_char() == '#' { + if let Some(RustTokenTree::Ident(ident)) = token_stream.next() { + let var_ident = Ident::new(&ident.to_string(), Span::call_site()); + output.push(QuoteToken::Var(var_ident)) + } + } else { + output.push(QuoteToken::Content(punct.to_string())); + } + was_previous_ident = false; + } + RustTokenTree::Ident(ident) => { + if was_previous_ident { + output.push(QuoteToken::Whitespace); + } + output.push(QuoteToken::Content(ident.to_string())); + was_previous_ident = true; + } + RustTokenTree::Literal(literal) => { + output.push(QuoteToken::Content(literal.to_string())); + was_previous_ident = false; + } + } + } +} + +#[proc_macro] +pub fn quote(input: RustTokenStream) -> RustTokenStream { + let mut parsed_input: Vec = Vec::new(); + let mut output_token_stream = rust_quote! { + let mut quote_macro_result = ::cairo_lang_macro::TokenStream::empty(); + }; + + let token_iter = input.into_iter().peekable(); + process_token_stream(token_iter, &mut parsed_input); + + for quote_token in parsed_input.iter() { + match quote_token { + QuoteToken::Content(content) => { + output_token_stream.extend(rust_quote! { + quote_macro_result.push_token(::cairo_lang_macro::TokenTree::Ident(::cairo_lang_macro::Token::new(::std::string::ToString::to_string(#content), ::cairo_lang_macro::TextSpan::call_site()))); + }); + } + QuoteToken::Var(ident) => { + output_token_stream.extend(rust_quote! { + quote_macro_result.extend(::cairo_lang_macro::TokenStream::from_primitive_token_stream(::cairo_lang_primitive_token::ToPrimitiveTokenStream::to_primitive_token_stream(&#ident)).into_iter()); + }); + } + QuoteToken::Whitespace => output_token_stream.extend(rust_quote! { + quote_macro_result.push_token(::cairo_lang_macro::TokenTree::Ident(::cairo_lang_macro::Token::new(" ".to_string(), ::cairo_lang_macro::TextSpan::call_site()))); + }), + } + } + RustTokenStream::from(rust_quote!({ + #output_token_stream + quote_macro_result + })) +} diff --git a/scarb/tests/proc_macro_quote.rs b/scarb/tests/proc_macro_quote.rs new file mode 100644 index 000000000..f20ac4133 --- /dev/null +++ b/scarb/tests/proc_macro_quote.rs @@ -0,0 +1,370 @@ +use assert_fs::fixture::PathChild; +use assert_fs::TempDir; +use indoc::indoc; +use scarb_test_support::cairo_plugin_project_builder::CairoPluginProjectBuilder; +use scarb_test_support::command::Scarb; +use scarb_test_support::fsx::ChildPathEx; +use scarb_test_support::project_builder::ProjectBuilder; + +#[test] +fn can_use_quote() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .add_primitive_token_dep() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, inline_macro, quote}; + #[inline_macro] + pub fn some(_token_stream: TokenStream) -> ProcMacroResult { + let tokens = quote! { + 5 + }; + ProcMacroResult::new(tokens) + } + "##}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + fn main() -> felt252 { some!() } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("cairo-run") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .stdout_matches(indoc! {r#" + [..] Compiling some v1.0.0 [..] + [..] Compiling hello v1.0.0 [..] + [..] Finished `dev` profile [..] + [..] Running hello + Run completed successfully, returning [5] + "#}) + .success(); +} + +#[test] +fn can_use_quote_with_token_tree() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .add_primitive_token_dep() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, inline_macro, TokenTree, Token, TextSpan, quote}; + #[inline_macro] + pub fn some(_token_stream: TokenStream) -> ProcMacroResult { + let token = TokenTree::Ident(Token::new("5".to_string(), TextSpan::call_site())); + let tokens = quote! { + #token + }; + ProcMacroResult::new(tokens) + } + "##}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + fn main() -> felt252 { + some!() + } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("cairo-run") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .stdout_matches(indoc! {r#" + [..] Compiling some v1.0.0 [..] + [..] Compiling hello v1.0.0 [..] + [..] Finished `dev` profile [..] + [..] Running hello + Run completed successfully, returning [5] + "#}) + .success(); +} + +#[test] +fn can_use_quote_with_token_stream() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .add_primitive_token_dep() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, inline_macro, TokenTree, Token, TextSpan, quote}; + #[inline_macro] + pub fn some(_token_stream: TokenStream) -> ProcMacroResult { + let token = TokenStream::new(vec![TokenTree::Ident(Token::new("5".to_string(), TextSpan::call_site()))]); + let tokens = quote! { + #token + }; + ProcMacroResult::new(tokens) + } + "##}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + fn main() -> felt252 { + some!() + } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("cairo-run") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .stdout_matches(indoc! {r#" + [..] Compiling some v1.0.0 [..] + [..] Compiling hello v1.0.0 [..] + [..] Finished `dev` profile [..] + [..] Running hello + Run completed successfully, returning [5] + "#}) + .success(); +} + +#[test] +fn can_use_quote_with_syntax_node() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .add_primitive_token_dep() + .add_dep(r#"cairo-lang-syntax = "2.9.1""#) + .add_dep(r#"cairo-lang-parser = "2.9.1""#) + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, quote}; + use cairo_lang_parser::utils::SimpleParserDatabase; + use cairo_lang_syntax::node::with_db::SyntaxNodeWithDb; + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let db_val = SimpleParserDatabase::default(); + let db = &db_val; + let code = r#" + fn main() -> felt252 { + 5 + } + "#; + let syntax_node = db.parse_virtual(code).unwrap(); + let syntax_node_with_db = SyntaxNodeWithDb::new(&syntax_node, db); + let tokens = quote! { + #syntax_node_with_db + }; + ProcMacroResult::new(tokens) + } + "##}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> u32 { + // completly wrong type + true + } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("expand") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success(); + + assert_eq!( + project.child("target/dev").files(), + vec!["hello.expanded.cairo"] + ); + + let expanded = project + .child("target/dev/hello.expanded.cairo") + .read_to_string(); + + snapbox::assert_eq( + indoc! {r#" + mod hello { + fn main() -> felt252 { + 5 + } + } + "#}, + expanded, + ); +} + +#[test] +fn can_use_quote_with_cairo_specific_syntax() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default().add_primitive_token_dep() + .add_dep(r#"cairo-lang-syntax = "2.9.1""#) + .add_dep(r#"cairo-lang-parser = "2.9.1""#) + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, quote}; + use cairo_lang_parser::utils::SimpleParserDatabase; + use cairo_lang_syntax::node::with_db::SyntaxNodeWithDb; + #[attribute_macro] + pub fn some(_attr: TokenStream, _token_stream: TokenStream) -> ProcMacroResult { + let db_val = SimpleParserDatabase::default(); + let db = &db_val; + let code = r#" + #[derive(Drop)] + struct Rectangle { + width: u64, + height: u64, + } + #[derive(Drop, PartialEq)] + struct Square { + side_length: u64, + } + impl RectangleIntoSquare of TryInto { + fn try_into(self: Rectangle) -> Option { + if self.height == self.width { + Option::Some(Square { side_length: self.height }) + } else { + Option::None + } + } + } + fn main() { + let rectangle = Rectangle { width: 8, height: 8 }; + let result: Square = rectangle.try_into().unwrap(); + let expected = Square { side_length: 8 }; + assert!( + result == expected, + "Rectangle with equal width and height should be convertible to a square." + ); + let rectangle = Rectangle { width: 5, height: 8 }; + let result: Option = rectangle.try_into(); + assert!( + result.is_none(), + "Rectangle with different width and height should not be convertible to a square." + ); + } + "#; + let syntax_node = db.parse_virtual(code).unwrap(); + let syntax_node_with_db = SyntaxNodeWithDb::new(&syntax_node, db); + let tokens = quote! { + #syntax_node_with_db + trait Circle { + fn print() -> (); + } + impl CircleImpl of Circle { + fn print() -> () { + println!("This is a circle!"); + } + } + }; + ProcMacroResult::new(tokens) + } + "##}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> u32 { + // completly wrong type + true + } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("expand") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success(); + + assert_eq!( + project.child("target/dev").files(), + vec!["hello.expanded.cairo"] + ); + + let expanded = project + .child("target/dev/hello.expanded.cairo") + .read_to_string(); + + snapbox::assert_eq( + indoc! {r#" + mod hello { + #[derive(Drop)] + struct Rectangle { + width: u64, + height: u64, + } + #[derive(Drop, PartialEq)] + struct Square { + side_length: u64, + } + impl RectangleIntoSquare of TryInto { + fn try_into(self: Rectangle) -> Option { + if self.height == self.width { + Option::Some(Square { side_length: self.height }) + } else { + Option::None + } + } + } + fn main() { + let rectangle = Rectangle { width: 8, height: 8 }; + let result: Square = rectangle.try_into().unwrap(); + let expected = Square { side_length: 8 }; + assert!( + result == expected, + "Rectangle with equal width and height should be convertible to a square.", + ); + let rectangle = Rectangle { width: 5, height: 8 }; + let result: Option = rectangle.try_into(); + assert!( + result.is_none(), + "Rectangle with different width and height should not be convertible to a square.", + ); + } + trait Circle { + fn print() -> (); + } + impl CircleImpl of Circle { + fn print() -> () { + println!("This is a circle!"); + } + } + impl RectangleDrop<> of core::traits::Drop; + impl SquareDrop<> of core::traits::Drop; + impl SquarePartialEq<> of core::traits::PartialEq { + fn eq(lhs: @Square, rhs: @Square) -> bool { + core::traits::PartialEq::::eq(lhs.side_length, rhs.side_length) + } + } + } + "#}, + expanded, + ); +} diff --git a/utils/scarb-test-support/src/cairo_plugin_project_builder.rs b/utils/scarb-test-support/src/cairo_plugin_project_builder.rs index f869b5ff0..bec8bddf2 100644 --- a/utils/scarb-test-support/src/cairo_plugin_project_builder.rs +++ b/utils/scarb-test-support/src/cairo_plugin_project_builder.rs @@ -97,6 +97,10 @@ impl CairoPluginProjectBuilder { self.project.just_manifest(t); self.just_code(t); } + + pub fn add_primitive_token_dep(self) -> Self { + self.add_dep(r#"cairo-lang-primitive-token = "1.0.0""#) + } } impl Default for CairoPluginProjectBuilder { From 618cc0c61b256cf8d63972cf7fd28fd3797e6768 Mon Sep 17 00:00:00 2001 From: maciektr Date: Fri, 6 Dec 2024 00:09:23 +0100 Subject: [PATCH 13/38] Refactor: split proc macro tests into multiple files (#1811) commit-id:b0254722 --- Cargo.lock | 2 +- plugins/cairo-lang-macro/src/types/token.rs | 2 +- .../compiler/plugin/proc_macro/host/mod.rs | 2 +- scarb/tests/proc_macro_build.rs | 452 ++++++++++++++++++ scarb/tests/proc_macro_executable.rs | 109 +++++ ...d_cairo_plugin.rs => proc_macro_expand.rs} | 235 +++------ scarb/tests/proc_macro_metadata.rs | 190 ++++++++ scarb/tests/proc_macro_prebuilt.rs | 4 +- scarb/tests/proc_macro_quote.rs | 18 +- 9 files changed, 830 insertions(+), 184 deletions(-) create mode 100644 scarb/tests/proc_macro_build.rs create mode 100644 scarb/tests/proc_macro_executable.rs rename scarb/tests/{build_cairo_plugin.rs => proc_macro_expand.rs} (93%) create mode 100644 scarb/tests/proc_macro_metadata.rs diff --git a/Cargo.lock b/Cargo.lock index 26178132d..bef70fb23 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -949,7 +949,7 @@ dependencies = [ "bumpalo", "cairo-lang-macro-attributes 0.1.0", "cairo-lang-macro-stable 1.0.0", - "cairo-lang-primitive-token 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cairo-lang-primitive-token", "cairo-lang-quote", "linkme", "serde", diff --git a/plugins/cairo-lang-macro/src/types/token.rs b/plugins/cairo-lang-macro/src/types/token.rs index b9d0ca4b8..e1f15bcd1 100644 --- a/plugins/cairo-lang-macro/src/types/token.rs +++ b/plugins/cairo-lang-macro/src/types/token.rs @@ -3,7 +3,7 @@ use bumpalo::Bump; use cairo_lang_primitive_token::{PrimitiveSpan, PrimitiveToken, ToPrimitiveTokenStream}; use std::fmt::{Debug, Display, Write}; use std::hash::{Hash, Hasher}; -use std::iter::{once, Map, Once}; +use std::iter::{Map, Once, once}; use std::ops::Deref; use std::rc::Rc; use std::vec::IntoIter; diff --git a/scarb/src/compiler/plugin/proc_macro/host/mod.rs b/scarb/src/compiler/plugin/proc_macro/host/mod.rs index 6d2ebf2d2..e2d48749d 100644 --- a/scarb/src/compiler/plugin/proc_macro/host/mod.rs +++ b/scarb/src/compiler/plugin/proc_macro/host/mod.rs @@ -68,7 +68,7 @@ impl ProcMacroHostPlugin { .collect_vec() }) .collect::>(); - expansions.sort_unstable_by_key(|e| e.expansion.name.clone()); + expansions.sort_unstable_by_key(|e| (e.expansion.name.clone(), e.package_id)); ensure!( expansions .windows(2) diff --git a/scarb/tests/proc_macro_build.rs b/scarb/tests/proc_macro_build.rs new file mode 100644 index 000000000..763b154d1 --- /dev/null +++ b/scarb/tests/proc_macro_build.rs @@ -0,0 +1,452 @@ +use assert_fs::TempDir; +use assert_fs::fixture::PathChild; +use indoc::indoc; +use scarb_test_support::cairo_plugin_project_builder::CairoPluginProjectBuilder; +use scarb_test_support::command::Scarb; +use scarb_test_support::project_builder::ProjectBuilder; +use scarb_test_support::workspace_builder::WorkspaceBuilder; +use snapbox::assert_matches; + +#[test] +fn compile_cairo_plugin() { + let t = TempDir::new().unwrap(); + CairoPluginProjectBuilder::default().build(&t); + let output = Scarb::quick_snapbox() + .arg("build") + // Disable colors in Cargo output. + .env("CARGO_TERM_COLOR", "never") + .current_dir(&t) + .output() + .unwrap(); + assert!( + output.status.success(), + "stdout={}\n stderr={}", + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr), + ); + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + assert!(stdout.contains("Compiling some v1.0.0")); + let lines = stdout.lines().map(ToString::to_string).collect::>(); + let (last, lines) = lines.split_last().unwrap(); + assert_matches(r#"[..] Finished `dev` profile target(s) in [..]"#, last); + let (last, _lines) = lines.split_last().unwrap(); + // Line from Cargo output + assert_matches( + r#"[..]Finished `release` profile [optimized] target(s) in[..]"#, + last, + ); +} + +#[test] +fn check_cairo_plugin() { + let t = TempDir::new().unwrap(); + CairoPluginProjectBuilder::default().build(&t); + let output = Scarb::quick_snapbox() + .arg("check") + // Disable colors in Cargo output. + .env("CARGO_TERM_COLOR", "never") + .current_dir(&t) + .output() + .unwrap(); + assert!( + output.status.success(), + "{}", + String::from_utf8_lossy(&output.stderr) + ); + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + assert!(stdout.contains("Checking some v1.0.0")); + let lines = stdout.lines().map(ToString::to_string).collect::>(); + let (last, lines) = lines.split_last().unwrap(); + assert_matches( + r#"[..] Finished checking `dev` profile target(s) in [..]"#, + last, + ); + let (last, _lines) = lines.split_last().unwrap(); + // Line from Cargo output + assert_matches( + r#"[..]Finished `release` profile [optimized] target(s) in[..]"#, + last, + ); +} + +#[test] +fn can_check_cairo_project_with_plugins() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default().build(&t); + let project = temp.child("hello"); + let y = project.child("other"); + CairoPluginProjectBuilder::default().name("other").build(&y); + WorkspaceBuilder::start() + .add_member("other") + .package( + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t), + ) + .build(&project); + Scarb::quick_snapbox() + .arg("check") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + [..]Checking other v1.0.0 ([..]Scarb.toml) + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Checking hello v1.0.0 ([..]Scarb.toml) + [..]Finished checking `dev` profile target(s) in [..] + "#}); +} + +#[test] +fn resolve_fetched_plugins() { + let t = TempDir::new().unwrap(); + CairoPluginProjectBuilder::default().build(&t); + assert!(!t.child("Cargo.lock").exists()); + let output = Scarb::quick_snapbox() + .arg("fetch") + // Disable colors in Cargo output. + .env("CARGO_TERM_COLOR", "never") + .current_dir(&t) + .output() + .unwrap(); + assert!( + output.status.success(), + "{}", + String::from_utf8_lossy(&output.stderr) + ); + assert!(t.child("Cargo.lock").exists()) +} + +#[test] +fn can_use_json_output() { + let t = TempDir::new().unwrap(); + CairoPluginProjectBuilder::default().build(&t); + let output = Scarb::quick_snapbox() + .arg("--json") + .arg("check") + // Disable colors in Cargo output. + .env("CARGO_TERM_COLOR", "never") + .current_dir(&t) + .output() + .unwrap(); + assert!( + output.status.success(), + "{}", + String::from_utf8_lossy(&output.stderr) + ); + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + let lines = stdout.lines().map(ToString::to_string).collect::>(); + let (first, lines) = lines.split_first().unwrap(); + assert_matches( + r#"{"status":"checking","message":"some v1.0.0 ([..]Scarb.toml)"}"#, + first, + ); + let (last, lines) = lines.split_last().unwrap(); + assert_matches( + r#"{"status":"finished","message":"checking `dev` profile target(s) in [..]"}"#, + last, + ); + // Line from Cargo. + let (last, _lines) = lines.split_last().unwrap(); + assert_matches(r#"{"reason":"build-finished","success":true}"#, last); +} + +#[test] +fn compile_cairo_plugin_with_lib_target() { + let t = TempDir::new().unwrap(); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .manifest_extra(indoc! {r#" + [lib] + [cairo-plugin] + "#}) + .build(&t); + + Scarb::quick_snapbox() + .arg("build") + .current_dir(&t) + .assert() + .failure() + .stdout_matches(indoc! {r#" + error: failed to parse manifest at: [..]/Scarb.toml + + Caused by: + target `cairo-plugin` cannot be mixed with other targets + "#}); +} + +#[test] +fn compile_cairo_plugin_with_other_target() { + let t = TempDir::new().unwrap(); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .manifest_extra(indoc! {r#" + [cairo-plugin] + [[target.starknet-contract]] + "#}) + .build(&t); + + Scarb::quick_snapbox() + .arg("build") + .current_dir(&t) + .assert() + .failure() + .stdout_matches(indoc! {r#" + error: failed to parse manifest at: [..]/Scarb.toml + + Caused by: + target `cairo-plugin` cannot be mixed with other targets + "#}); +} + +#[test] +fn can_define_multiple_macros() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, AuxData, PostProcessContext, post_process, TokenTree, Token, TextSpan}; + + #[attribute_macro] + pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let new_token_string = token_stream.to_string().replace("12", "34"); + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { start: 0, end: new_token_string.len() as u32 }, + ))]); + let aux_data = AuxData::new(Vec::new()); + ProcMacroResult::new(token_stream).with_aux_data(aux_data) + } + + #[attribute_macro] + pub fn world(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let new_token_string = token_stream.to_string().replace("56", "78"); + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { start: 0, end: new_token_string.len() as u32 }, + ))]); + let aux_data = AuxData::new(Vec::new()); + ProcMacroResult::new(token_stream).with_aux_data(aux_data) + } + + #[post_process] + pub fn callback(context: PostProcessContext) { + assert_eq!(context.aux_data.len(), 2); + } + "##}) + .build(&t); + + let w = temp.child("other"); + CairoPluginProjectBuilder::default() + .name("other") + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, AuxData, PostProcessContext, post_process, TokenTree, Token, TextSpan}; + + #[attribute_macro] + pub fn beautiful(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let new_token_string = token_stream.to_string().replace("90", "09"); + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { start: 0, end: new_token_string.len() as u32 }, + ))]); + let aux_data = AuxData::new(Vec::new()); + ProcMacroResult::new(token_stream).with_aux_data(aux_data) + } + + #[post_process] + pub fn callback(context: PostProcessContext) { + assert_eq!(context.aux_data.len(), 1); + } + "##}) + .build(&w); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .dep("other", &w) + .lib_cairo(indoc! {r#" + #[hello] + #[beautiful] + #[world] + fn main() -> felt252 { 12 + 56 + 90 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("cairo-run") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + warn: `scarb cairo-run` will be deprecated soon + help: use `scarb execute` instead + [..]Compiling other v1.0.0 ([..]Scarb.toml) + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + [..]Finished `dev` profile target(s) in [..] + [..]Running hello + Run completed successfully, returning [121] + "#}); +} + +#[test] +fn cannot_duplicate_macros() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + + #[attribute_macro] + pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(token_stream) + } + + #[attribute_macro] + pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(token_stream) + } + "##}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[hello] + fn main() -> felt252 { 12 + 56 + 90 } + "#}) + .build(&project); + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + // Fails with Cargo compile error. + .failure(); +} + +#[test] +fn cannot_duplicate_macros_across_packages() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r#" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + + #[attribute_macro] + pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(token_stream) + } + + #[attribute_macro] + pub fn world(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(token_stream) + } + "#}) + .build(&t); + + let w = temp.child("other"); + CairoPluginProjectBuilder::default() + .name("other") + .lib_rs(indoc! {r#" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + + #[attribute_macro] + pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(token_stream) + } + "#}) + .build(&w); + + let p = temp.child("pkg"); + CairoPluginProjectBuilder::default() + .name("pkg") + .lib_rs(indoc! {r#" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + + #[attribute_macro] + pub fn foo(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(token_stream) + } + "#}) + .build(&p); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .dep("other", &w) + .dep("pkg", &p) + .lib_cairo(indoc! {r#" + #[hello] + #[world] + fn main() -> felt252 { 12 + 56 + 90 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .failure() + .stdout_matches(indoc! {r#" + [..]Compiling other v1.0.0 ([..]Scarb.toml) + [..]Compiling pkg v1.0.0 ([..]Scarb.toml) + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + error: duplicate expansions defined for procedural macros: hello (other v1.0.0 ([..]Scarb.toml) and some v1.0.0 ([..]Scarb.toml)) + "#}); +} +#[test] +fn cannot_use_undefined_macro() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default().build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[world] + fn main() -> felt252 { 12 + 56 + 90 } + "#}) + .build(&project); + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .failure() + .stdout_matches(indoc! {r#" + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + error: Plugin diagnostic: Unsupported attribute. + --> [..]lib.cairo:1:1 + #[world] + ^^^^^^^^ + + error: could not compile `hello` due to previous error + "#}); +} diff --git a/scarb/tests/proc_macro_executable.rs b/scarb/tests/proc_macro_executable.rs new file mode 100644 index 000000000..0193dd8e6 --- /dev/null +++ b/scarb/tests/proc_macro_executable.rs @@ -0,0 +1,109 @@ +use assert_fs::TempDir; +use assert_fs::fixture::PathChild; +use cairo_lang_sierra::program::VersionedProgram; +use indoc::indoc; +use scarb_test_support::cairo_plugin_project_builder::CairoPluginProjectBuilder; +use scarb_test_support::command::Scarb; +use scarb_test_support::fsx::ChildPathEx; +use scarb_test_support::project_builder::ProjectBuilder; + +#[test] +fn can_create_executable_attribute() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r##" + use cairo_lang_macro::executable_attribute; + + executable_attribute!("some"); + "##}) + .build(&t); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> felt252 { 12 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + [..]Finished `dev` profile target(s) in [..] + "#}); + let sierra = project + .child("target") + .child("dev") + .child("hello.sierra.json") + .read_to_string(); + let sierra = serde_json::from_str::(&sierra).unwrap(); + let sierra = sierra.into_v1().unwrap(); + let executables = sierra.debug_info.unwrap().executables; + assert_eq!(executables.len(), 1); + let executables = executables.get("some").unwrap(); + assert_eq!(executables.len(), 1); + let fid = executables.first().unwrap().clone(); + assert_eq!(fid.clone().debug_name.unwrap(), "hello::main"); + assert!( + sierra + .program + .funcs + .iter() + .any(|f| f.id.clone() == fid.clone()) + ); +} + +#[test] +fn executable_name_cannot_clash_attr() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{executable_attribute, attribute_macro, TokenStream, ProcMacroResult}; + + executable_attribute!("some"); + + #[attribute_macro] + fn some(_args: TokenStream, input: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(input) + } + "##}) + .build(&t); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> felt252 { 12 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .failure() + .stdout_matches(indoc! {r#" + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + error: duplicate expansions defined for procedural macro some v1.0.0 ([..]Scarb.toml): some + "#}); +} diff --git a/scarb/tests/build_cairo_plugin.rs b/scarb/tests/proc_macro_expand.rs similarity index 93% rename from scarb/tests/build_cairo_plugin.rs rename to scarb/tests/proc_macro_expand.rs index 855126359..fec108d6c 100644 --- a/scarb/tests/build_cairo_plugin.rs +++ b/scarb/tests/proc_macro_expand.rs @@ -253,52 +253,6 @@ fn can_emit_plugin_warning() { "#}); } -#[test] -fn can_emit_plugin_error() { - let temp = TempDir::new().unwrap(); - let t = temp.child("some"); - CairoPluginProjectBuilder::default() - .lib_rs(indoc! {r#" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, Diagnostic}; - - #[attribute_macro] - pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let diag = Diagnostic::error("Some error from macro."); - ProcMacroResult::new(token_stream) - .with_diagnostics(diag.into()) - } - "#}) - .build(&t); - let project = temp.child("hello"); - ProjectBuilder::start() - .name("hello") - .version("1.0.0") - .dep("some", &t) - .lib_cairo(indoc! {r#" - #[some] - fn f() -> felt252 { 12 } - "#}) - .build(&project); - - Scarb::quick_snapbox() - .arg("build") - // Disable output from Cargo. - .env("CARGO_TERM_QUIET", "true") - .current_dir(&project) - .assert() - .failure() - .stdout_matches(indoc! {r#" - [..] Compiling some v1.0.0 ([..]Scarb.toml) - [..] Compiling hello v1.0.0 ([..]Scarb.toml) - error: Plugin diagnostic: Some error from macro. - --> [..]lib.cairo:1:1 - #[some] - ^^^^^^^ - - error: could not compile `hello` due to previous error - "#}); -} - #[test] fn diags_from_generated_code_mapped_correctly() { let temp = TempDir::new().unwrap(); @@ -667,123 +621,6 @@ fn can_define_multiple_macros() { "#}); } -#[test] -fn cannot_duplicate_macros() { - let temp = TempDir::new().unwrap(); - let t = temp.child("some"); - CairoPluginProjectBuilder::default() - .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; - - #[attribute_macro] - pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(token_stream) - } - - #[attribute_macro] - pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(token_stream) - } - "##}) - .build(&t); - let project = temp.child("hello"); - ProjectBuilder::start() - .name("hello") - .version("1.0.0") - .dep_starknet() - .dep("some", &t) - .lib_cairo(indoc! {r#" - #[hello] - fn main() -> felt252 { 12 + 56 + 90 } - "#}) - .build(&project); - Scarb::quick_snapbox() - .arg("build") - // Disable output from Cargo. - .env("CARGO_TERM_QUIET", "true") - .current_dir(&project) - .assert() - // Fails with Cargo compile error. - .failure(); -} - -#[test] -fn cannot_duplicate_macros_across_packages() { - let temp = TempDir::new().unwrap(); - let t = temp.child("some"); - CairoPluginProjectBuilder::default() - .lib_rs(indoc! {r#" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; - - #[attribute_macro] - pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(token_stream) - } - - #[attribute_macro] - pub fn world(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(token_stream) - } - "#}) - .build(&t); - - let w = temp.child("other"); - CairoPluginProjectBuilder::default() - .name("other") - .lib_rs(indoc! {r#" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; - - #[attribute_macro] - pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(token_stream) - } - "#}) - .build(&w); - - let p = temp.child("pkg"); - CairoPluginProjectBuilder::default() - .name("pkg") - .lib_rs(indoc! {r#" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; - - #[attribute_macro] - pub fn foo(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(token_stream) - } - "#}) - .build(&p); - - let project = temp.child("hello"); - ProjectBuilder::start() - .name("hello") - .version("1.0.0") - .dep_starknet() - .dep("some", &t) - .dep("other", &w) - .dep("pkg", &p) - .lib_cairo(indoc! {r#" - #[hello] - #[world] - fn main() -> felt252 { 12 + 56 + 90 } - "#}) - .build(&project); - - Scarb::quick_snapbox() - .arg("build") - // Disable output from Cargo. - .env("CARGO_TERM_QUIET", "true") - .current_dir(&project) - .assert() - .failure() - .stdout_matches(indoc! {r#" - [..]Compiling other v1.0.0 ([..]Scarb.toml) - [..]Compiling pkg v1.0.0 ([..]Scarb.toml) - [..]Compiling some v1.0.0 ([..]Scarb.toml) - [..]Compiling hello v1.0.0 ([..]Scarb.toml) - error: duplicate expansions defined for procedural macros: hello (other v1.0.0 ([..]Scarb.toml) and some v1.0.0 ([..]Scarb.toml)) - "#}); -} - #[test] fn cannot_use_undefined_macro() { let temp = TempDir::new().unwrap(); @@ -1671,6 +1508,52 @@ fn can_be_used_through_re_export() { ); } +#[test] +fn can_emit_plugin_error() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r#" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, Diagnostic}; + + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let diag = Diagnostic::error("Some error from macro."); + ProcMacroResult::new(token_stream) + .with_diagnostics(diag.into()) + } + "#}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn f() -> felt252 { 12 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .failure() + .stdout_matches(indoc! {r#" + [..] Compiling some v1.0.0 ([..]Scarb.toml) + [..] Compiling hello v1.0.0 ([..]Scarb.toml) + error: Plugin diagnostic: Some error from macro. + --> [..]lib.cairo:1:1 + #[some] + ^^^^^^^ + + error: could not compile `hello` due to previous error + "#}); +} + #[test] fn code_mappings_preserve_attribute_error_locations() { let temp = TempDir::new().unwrap(); @@ -1715,7 +1598,7 @@ fn code_mappings_preserve_attribute_error_locations() { error: Cannot assign to an immutable variable. --> [..]lib.cairo[proc_some]:3:5 x = 2; - ^***^ + ^^^^^ note: this error originates in the attribute macro: `some` error: could not compile `hello` due to previous error @@ -1768,7 +1651,7 @@ fn code_mappings_preserve_inline_macro_error_locations() { error: Identifier not found. --> [..]lib.cairo:1:1 fn main() -> felt252 { - ^*******^ + ^^^^^^^^^ error: could not compile `hello` due to previous error "#}); @@ -1849,18 +1732,24 @@ fn code_mappings_preserve_derive_error_locations() { .assert() .failure() .stdout_matches(indoc! {r#" - [..] Compiling some v1.0.0 ([..]Scarb.toml) - [..] Compiling hello v1.0.0 ([..]Scarb.toml) + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) error: The value does not fit within the range of type core::integer::u8. - --> [..]lib.cairo:1:1 - trait Hello { - ^**************^ + --> [..]lib.cairo:1:1-8:1 + trait Hello { + _^ + | ... + | #[derive(CustomDerive, Drop)] + |_^ note: this error originates in the derive macro: `custom_derive` error: The value does not fit within the range of type core::integer::u8. - --> [..]lib.cairo:1:1 - trait Hello { - ^**************^ + --> [..]lib.cairo:1:1-8:10 + trait Hello { + _^ + | ... + | #[derive(CustomDerive, Drop)] + |__________^ note: this error originates in the derive macro: `custom_derive` error: could not compile `hello` due to previous error diff --git a/scarb/tests/proc_macro_metadata.rs b/scarb/tests/proc_macro_metadata.rs new file mode 100644 index 000000000..438593601 --- /dev/null +++ b/scarb/tests/proc_macro_metadata.rs @@ -0,0 +1,190 @@ +use assert_fs::TempDir; +use assert_fs::fixture::PathChild; +use indoc::indoc; +use scarb_test_support::cairo_plugin_project_builder::CairoPluginProjectBuilder; +use scarb_test_support::command::Scarb; +use scarb_test_support::project_builder::ProjectBuilder; + +#[test] +fn can_return_aux_data_from_plugin() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, AuxData, PostProcessContext, post_process}; + use serde::{Serialize, Deserialize}; + + #[derive(Debug, Serialize, Deserialize)] + struct SomeMacroDataFormat { + msg: String + } + + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let value = SomeMacroDataFormat { msg: "Hello from some macro!".to_string() }; + let value = serde_json::to_string(&value).unwrap(); + let value: Vec = value.into_bytes(); + let aux_data = AuxData::new(value); + ProcMacroResult::new(token_stream).with_aux_data(aux_data) + } + + #[post_process] + pub fn callback(context: PostProcessContext) { + let aux_data = context.aux_data.into_iter() + .map(|aux_data| { + let value: Vec = aux_data.into(); + let aux_data: SomeMacroDataFormat = serde_json::from_slice(&value).unwrap(); + aux_data + }) + .collect::>(); + println!("{:?}", aux_data); + } + + #[post_process] + pub fn some_no_op_callback(context: PostProcessContext) { + drop(context.aux_data); + } + "##}) + .add_dep(r#"serde = { version = "*", features = ["derive"] }"#) + .add_dep(r#"serde_json = "*""#) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> felt252 { 12 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + [SomeMacroDataFormat { msg: "Hello from some macro!" }] + [..]Finished `dev` profile target(s) in [..] + "#}); +} + +#[test] +fn can_read_token_stream_metadata() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + println!("{:#?}", token_stream.metadata()); + ProcMacroResult::new(token_stream) + } + "##}) + .build(&t); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> felt252 { 12 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + TokenStreamMetadata { + original_file_path: Some( + "[..]lib.cairo", + ), + file_id: Some( + "[..]", + ), + edition: Some( + "[..]", + ), + } + [..]Finished `dev` profile target(s) in [..] + "#}); +} + +#[test] +fn can_resolve_full_path_markers() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, post_process, PostProcessContext, TokenTree, Token, TextSpan}; + + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let full_path_markers = vec!["some-key".to_string()]; + + let code = format!( + r#"#[macro::full_path_marker("some-key")] {}"#, + token_stream.to_string().replace("12", "34") + ); + + ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new( + code.clone(), + TextSpan { + start: 0, + end: code.len() as u32, + }, + ))]) + ).with_full_path_markers(full_path_markers) + } + + #[post_process] + pub fn callback(context: PostProcessContext) { + println!("{:?}", context.full_path_markers); + } + "##}) + .build(&t); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> felt252 { 12 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + [FullPathMarker { key: "some-key", full_path: "hello::main" }] + [..]Finished `dev` profile target(s) in [..] + "#}); +} diff --git a/scarb/tests/proc_macro_prebuilt.rs b/scarb/tests/proc_macro_prebuilt.rs index f14185e4c..6586cd601 100644 --- a/scarb/tests/proc_macro_prebuilt.rs +++ b/scarb/tests/proc_macro_prebuilt.rs @@ -228,7 +228,7 @@ fn load_prebuilt_proc_macros() { name: "some".to_string(), args: TokenStream::new(vec![TokenTree::Ident(Token::new( "42", - TextSpan::new(0, 0), + TextSpan::call_site(), ))]), call_site: TextSpan::new(0, 0), }) @@ -239,7 +239,7 @@ fn load_prebuilt_proc_macros() { response.token_stream, TokenStream::new(vec![TokenTree::Ident(Token::new( "42", - TextSpan::new(0, 0), + TextSpan::call_site(), ))]) ); } diff --git a/scarb/tests/proc_macro_quote.rs b/scarb/tests/proc_macro_quote.rs index f20ac4133..1b3fade7f 100644 --- a/scarb/tests/proc_macro_quote.rs +++ b/scarb/tests/proc_macro_quote.rs @@ -1,5 +1,5 @@ -use assert_fs::fixture::PathChild; use assert_fs::TempDir; +use assert_fs::fixture::PathChild; use indoc::indoc; use scarb_test_support::cairo_plugin_project_builder::CairoPluginProjectBuilder; use scarb_test_support::command::Scarb; @@ -40,6 +40,8 @@ fn can_use_quote() { .current_dir(&project) .assert() .stdout_matches(indoc! {r#" + warn: `scarb cairo-run` will be deprecated soon + help: use `scarb execute` instead [..] Compiling some v1.0.0 [..] [..] Compiling hello v1.0.0 [..] [..] Finished `dev` profile [..] @@ -86,6 +88,8 @@ fn can_use_quote_with_token_tree() { .current_dir(&project) .assert() .stdout_matches(indoc! {r#" + warn: `scarb cairo-run` will be deprecated soon + help: use `scarb execute` instead [..] Compiling some v1.0.0 [..] [..] Compiling hello v1.0.0 [..] [..] Finished `dev` profile [..] @@ -132,11 +136,13 @@ fn can_use_quote_with_token_stream() { .current_dir(&project) .assert() .stdout_matches(indoc! {r#" - [..] Compiling some v1.0.0 [..] - [..] Compiling hello v1.0.0 [..] - [..] Finished `dev` profile [..] - [..] Running hello - Run completed successfully, returning [5] + warn: `scarb cairo-run` will be deprecated soon + help: use `scarb execute` instead + Compiling some v1.0.0 ([..]Scarb.toml) + Compiling hello v1.0.0 ([..]Scarb.toml) + Finished `dev` profile target(s) in [..] + Running hello + Run completed successfully, returning [5] "#}) .success(); } From 90fa72036060af2cfc49800bd66dd07c6325d9d2 Mon Sep 17 00:00:00 2001 From: maciektr Date: Wed, 15 Jan 2025 12:56:18 +0100 Subject: [PATCH 14/38] Proc macro diagnostics Cairo update (#1881) --- scarb/src/compiler/plugin/proc_macro/types.rs | 22 ++++++++++++------- scarb/tests/proc_macro_expand.rs | 4 ++-- 2 files changed, 16 insertions(+), 10 deletions(-) diff --git a/scarb/src/compiler/plugin/proc_macro/types.rs b/scarb/src/compiler/plugin/proc_macro/types.rs index 7599f69e0..304a8bc14 100644 --- a/scarb/src/compiler/plugin/proc_macro/types.rs +++ b/scarb/src/compiler/plugin/proc_macro/types.rs @@ -47,11 +47,13 @@ impl<'a> TokenStreamBuilder<'a> { pub fn token_from_syntax_node(&self, node: SyntaxNode, ctx: &AllocationContext) -> Token { let span = node.span(self.db); let text = node.get_text(self.db); - let span = TextSpan { - // We skip the whitespace prefix, so that diagnostics start where the actual token contents is. - start: span.start.as_u32() + whitespace_prefix_len(&text), - end: span.end.as_u32(), - }; + // We skip the whitespace prefix, so that diagnostics start where the actual token contents is. + let start = span.start.as_u32() + whitespace_prefix_len(&text); + // Then we also skip the whitespace suffix, for the same reason. + let end = span.end.as_u32() - whitespace_suffix_len(&text); + // This handles the case of a whitespace only string. + let end = if end < start { start } else { end }; + let span = TextSpan { start, end }; Token::new_in(text, span, ctx) } } @@ -60,6 +62,10 @@ fn whitespace_prefix_len(s: &str) -> u32 { s.chars().take_while(|c| c.is_whitespace()).count() as u32 } +fn whitespace_suffix_len(s: &str) -> u32 { + s.chars().rev().take_while(|c| c.is_whitespace()).count() as u32 +} + #[cfg(test)] mod tests { use crate::compiler::plugin::proc_macro::TokenStreamBuilder; @@ -88,13 +94,13 @@ mod tests { }; let token = token_at(&token_stream, 4); assert_eq!(token.content.as_ref(), "{\n"); - assert_eq!(token.span, TextSpan { start: 10, end: 12 }); + assert_eq!(token.span, TextSpan { start: 10, end: 11 }); let token = token_at(&token_stream, 5); assert_eq!(token.content.as_ref(), " let "); // Note we skip 4 whitespaces characters in the span. - assert_eq!(token.span, TextSpan { start: 16, end: 20 }); + assert_eq!(token.span, TextSpan { start: 16, end: 19 }); let token = token_at(&token_stream, 6); assert_eq!(token.content.as_ref(), "x "); - assert_eq!(token.span, TextSpan { start: 20, end: 22 }); + assert_eq!(token.span, TextSpan { start: 20, end: 21 }); } } diff --git a/scarb/tests/proc_macro_expand.rs b/scarb/tests/proc_macro_expand.rs index fec108d6c..b0885d62f 100644 --- a/scarb/tests/proc_macro_expand.rs +++ b/scarb/tests/proc_macro_expand.rs @@ -1596,9 +1596,9 @@ fn code_mappings_preserve_attribute_error_locations() { [..] Compiling some v1.0.0 ([..]Scarb.toml) [..] Compiling hello v1.0.0 ([..]Scarb.toml) error: Cannot assign to an immutable variable. - --> [..]lib.cairo[proc_some]:3:5 + --> [..]lib.cairo:4:5 x = 2; - ^^^^^ + ^^^^^^ note: this error originates in the attribute macro: `some` error: could not compile `hello` due to previous error From 5de19b4e0e6e60d12637bc0ea677e27e31237d78 Mon Sep 17 00:00:00 2001 From: maciektr Date: Fri, 17 Jan 2025 17:07:48 +0100 Subject: [PATCH 15/38] Allow renaming macro attributes (#1891) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: maciektr Co-authored-by: Rémy Baranx Co-authored-by: Maksim Zdobnikau <43750648+DelevoXDG@users.noreply.github.com> --- .github/workflows/ci.yml | 23 +++- Cargo.lock | 25 +++++ Cargo.toml | 1 + .../cairo-lang-macro-attributes/src/lib.rs | 101 ++++++++++++++++-- plugins/cairo-lang-macro/Cargo.toml | 1 + .../cairo-lang-macro/tests/args/args_01.rs | 8 ++ .../tests/args/args_01.stderr | 13 +++ .../cairo-lang-macro/tests/args/args_02.rs | 8 ++ .../tests/args/args_02.stderr | 13 +++ .../cairo-lang-macro/tests/args/args_03.rs | 8 ++ .../tests/args/args_03.stderr | 13 +++ .../tests/arguments_parsing.rs | 40 +++++++ 12 files changed, 242 insertions(+), 12 deletions(-) create mode 100644 plugins/cairo-lang-macro/tests/args/args_01.rs create mode 100644 plugins/cairo-lang-macro/tests/args/args_01.stderr create mode 100644 plugins/cairo-lang-macro/tests/args/args_02.rs create mode 100644 plugins/cairo-lang-macro/tests/args/args_02.stderr create mode 100644 plugins/cairo-lang-macro/tests/args/args_03.rs create mode 100644 plugins/cairo-lang-macro/tests/args/args_03.stderr create mode 100644 plugins/cairo-lang-macro/tests/arguments_parsing.rs diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index eace39212..23f23c810 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -33,7 +33,7 @@ jobs: - uses: Swatinem/rust-cache@v2 - uses: taiki-e/install-action@nextest - name: nextest archive - run: cargo nextest archive --workspace --all-features --cargo-profile ci --archive-file 'nextest-archive-${{ matrix.platform.os }}.tar.zst' --exclude scarb-prove --exclude scarb-verify + run: cargo nextest archive --workspace --all-features --cargo-profile ci --archive-file 'nextest-archive-${{ matrix.platform.os }}.tar.zst' --exclude scarb-prove --exclude scarb-verify --exclude cairo-lang-macro - uses: actions/upload-artifact@v4 with: name: nextest-archive-${{ matrix.platform.os }} @@ -151,6 +151,27 @@ jobs: - name: nextest partition run: cargo nextest run --archive-file 'nextest-nightly-archive-${{ matrix.platform.os }}.tar.zst' --extract-to ./ + test-cairo-lang-macro: + name: test cairo-lang-macro ${{ matrix.platform.name }} + runs-on: ${{ matrix.platform.os }} + strategy: + fail-fast: false + matrix: + platform: + - name: linux x86-64 + os: ubuntu-latest + - name: windows x86-64 + os: windows-latest + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + - uses: Swatinem/rust-cache@v2 + - name: Run tests + # Note tests depending on trybuild crate cannot be run with nextest, + # as they require access to cargo build cache of the package, + # which is not archived with nextest-archive. + run: cargo test -p cairo-lang-macro --all-features + check-rust: runs-on: ubuntu-latest steps: diff --git a/Cargo.lock b/Cargo.lock index bef70fb23..b169a18fd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -954,6 +954,7 @@ dependencies = [ "linkme", "serde", "serde_json", + "trybuild", ] [[package]] @@ -6930,6 +6931,15 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "termcolor" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" +dependencies = [ + "winapi-util", +] + [[package]] name = "termtree" version = "0.5.1" @@ -7356,6 +7366,21 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" +[[package]] +name = "trybuild" +version = "1.0.101" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8dcd332a5496c026f1e14b7f3d2b7bd98e509660c04239c58b0ba38a12daded4" +dependencies = [ + "glob", + "serde", + "serde_derive", + "serde_json", + "target-triple", + "termcolor", + "toml", +] + [[package]] name = "typed-builder" version = "0.21.0" diff --git a/Cargo.toml b/Cargo.toml index 75ace73c3..b97bd8f99 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -139,6 +139,7 @@ tower-http = { version = "0.4", features = ["fs"] } tracing = "0.1" tracing-core = "0.1" tracing-subscriber = { version = "0.3", features = ["env-filter"] } +trybuild = "1.0.101" typed-builder = ">=0.17" url = { version = "2", features = ["serde"] } walkdir = "2" diff --git a/plugins/cairo-lang-macro-attributes/src/lib.rs b/plugins/cairo-lang-macro-attributes/src/lib.rs index bc00960ff..ee0c51427 100644 --- a/plugins/cairo-lang-macro-attributes/src/lib.rs +++ b/plugins/cairo-lang-macro-attributes/src/lib.rs @@ -2,7 +2,10 @@ use proc_macro::TokenStream; use quote::{ToTokens, quote}; use scarb_stable_hash::short_hash; use syn::spanned::Spanned; -use syn::{Expr, ItemFn, LitStr, Meta, parse_macro_input}; +use syn::{ + Expr, Ident, ItemFn, LitStr, Meta, Result, Token, parse::Parse, parse::ParseStream, + parse_macro_input, +}; /// Constructs the attribute macro implementation. /// @@ -10,9 +13,10 @@ use syn::{Expr, ItemFn, LitStr, Meta, parse_macro_input}; /// /// Note, that this macro can be used multiple times, to define multiple independent attribute macros. #[proc_macro_attribute] -pub fn attribute_macro(_args: TokenStream, input: TokenStream) -> TokenStream { +pub fn attribute_macro(args: TokenStream, input: TokenStream) -> TokenStream { macro_helper( input, + parse_macro_input!(args as AttributeArgs), quote!(::cairo_lang_macro::ExpansionKind::Attr), quote!(::cairo_lang_macro::ExpansionFunc::Attr), ) @@ -24,9 +28,18 @@ pub fn attribute_macro(_args: TokenStream, input: TokenStream) -> TokenStream { /// /// Note, that this macro can be used multiple times, to define multiple independent attribute macros. #[proc_macro_attribute] -pub fn inline_macro(_args: TokenStream, input: TokenStream) -> TokenStream { +pub fn inline_macro(args: TokenStream, input: TokenStream) -> TokenStream { + // Emit compilation error if `parent` argument is used. + let attribute_args = parse_macro_input!(args as AttributeArgs); + if let Some(path) = attribute_args.parent_module_path { + return syn::Error::new(path.span(), "inline macro cannot use `parent` argument") + .to_compile_error() + .into(); + } + // Otherwise, proceed with the macro expansion. macro_helper( input, + Default::default(), quote!(::cairo_lang_macro::ExpansionKind::Inline), quote!(::cairo_lang_macro::ExpansionFunc::Other), ) @@ -38,18 +51,35 @@ pub fn inline_macro(_args: TokenStream, input: TokenStream) -> TokenStream { /// /// Note, that this macro can be used multiple times, to define multiple independent attribute macros. #[proc_macro_attribute] -pub fn derive_macro(_args: TokenStream, input: TokenStream) -> TokenStream { +pub fn derive_macro(args: TokenStream, input: TokenStream) -> TokenStream { macro_helper( input, + parse_macro_input!(args as AttributeArgs), quote!(::cairo_lang_macro::ExpansionKind::Derive), quote!(::cairo_lang_macro::ExpansionFunc::Other), ) } -fn macro_helper(input: TokenStream, kind: impl ToTokens, func: impl ToTokens) -> TokenStream { +fn macro_helper( + input: TokenStream, + args: AttributeArgs, + kind: impl ToTokens, + func: impl ToTokens, +) -> TokenStream { let item: ItemFn = parse_macro_input!(input as ItemFn); let original_item_name = item.sig.ident.to_string(); + let expansion_name = if let Some(path) = args.parent_module_path { + let value = path.value(); + if !is_valid_path(&value) { + return syn::Error::new(path.span(), "`parent` argument is not a valid path") + .to_compile_error() + .into(); + } + format!("{}::{}", value, original_item_name) + } else { + original_item_name + }; let doc = item .attrs .iter() @@ -74,7 +104,7 @@ fn macro_helper(input: TokenStream, kind: impl ToTokens, func: impl ToTokens) -> item_name.to_string().to_uppercase() ); - let callback_link = syn::Ident::new(callback_link.as_str(), item.span()); + let callback_link = Ident::new(callback_link.as_str(), item.span()); let expanded = quote! { #item @@ -83,7 +113,7 @@ fn macro_helper(input: TokenStream, kind: impl ToTokens, func: impl ToTokens) -> #[linkme(crate = ::cairo_lang_macro::linkme)] static #callback_link: ::cairo_lang_macro::ExpansionDefinition = ::cairo_lang_macro::ExpansionDefinition{ - name: #original_item_name, + name: #expansion_name, doc: #doc, kind: #kind, fun: #func(#item_name), @@ -92,6 +122,55 @@ fn macro_helper(input: TokenStream, kind: impl ToTokens, func: impl ToTokens) -> TokenStream::from(expanded) } +#[derive(Default)] +struct AttributeArgs { + parent_module_path: Option, +} + +impl Parse for AttributeArgs { + fn parse(input: ParseStream) -> Result { + if input.is_empty() { + return Ok(Self { + parent_module_path: None, + }); + } + let parent_identifier: Ident = input.parse()?; + if parent_identifier != "parent" { + return Err(input.error("only `parent` argument is supported")); + } + let _eq_token: Token![=] = input.parse()?; + let parent_module_path: LitStr = input.parse()?; + Ok(Self { + parent_module_path: Some(parent_module_path), + }) + } +} + +fn is_valid_path(path: &str) -> bool { + let mut chars = path.chars().peekable(); + let mut last_was_colon = false; + while let Some(c) = chars.next() { + if c.is_alphanumeric() || c == '_' { + last_was_colon = false; + } else if c == ':' { + if last_was_colon { + // If the last character was also a colon, continue + last_was_colon = false; + } else { + // If the next character is not a colon, it's an error + if chars.peek() != Some(&':') { + return false; + } + last_was_colon = true; + } + } else { + return false; + } + } + // If the loop ends with a colon flag still true, it means the string ended with a single colon. + !last_was_colon +} + /// Constructs the post-processing callback. /// /// This callback will be called after the source code compilation (and thus after all the procedural @@ -123,7 +202,7 @@ pub fn post_process(_args: TokenStream, input: TokenStream) -> TokenStream { "POST_PROCESS_DESERIALIZE_{}", item_name.to_string().to_uppercase() ); - let callback_link = syn::Ident::new(callback_link.as_str(), item.span()); + let callback_link = Ident::new(callback_link.as_str(), item.span()); let expanded = quote! { #item @@ -140,7 +219,7 @@ pub fn post_process(_args: TokenStream, input: TokenStream) -> TokenStream { fn hide_name(mut item: ItemFn) -> ItemFn { let id = short_hash(item.sig.ident.to_string()); let item_name = format!("{}_{}", item.sig.ident, id); - item.sig.ident = syn::Ident::new(item_name.as_str(), item.sig.ident.span()); + item.sig.ident = Ident::new(item_name.as_str(), item.sig.ident.span()); item } @@ -150,9 +229,9 @@ const EXEC_ATTR_PREFIX: &str = "__exec_attr_"; pub fn executable_attribute(input: TokenStream) -> TokenStream { let input: LitStr = parse_macro_input!(input as LitStr); let callback_link = format!("EXEC_ATTR_DESERIALIZE{}", input.value().to_uppercase()); - let callback_link = syn::Ident::new(callback_link.as_str(), input.span()); + let callback_link = Ident::new(callback_link.as_str(), input.span()); let item_name = format!("{EXEC_ATTR_PREFIX}{}", input.value()); - let org_name = syn::Ident::new(item_name.as_str(), input.span()); + let org_name = Ident::new(item_name.as_str(), input.span()); let expanded = quote! { fn #org_name() { // No op to ensure no function with the same name is created. diff --git a/plugins/cairo-lang-macro/Cargo.toml b/plugins/cairo-lang-macro/Cargo.toml index 4fe886e96..021c43d84 100644 --- a/plugins/cairo-lang-macro/Cargo.toml +++ b/plugins/cairo-lang-macro/Cargo.toml @@ -25,6 +25,7 @@ serde = { workspace = true, optional = true } [dev-dependencies] serde.workspace = true serde_json.workspace = true +trybuild.workspace = true [features] serde = ["dep:serde"] diff --git a/plugins/cairo-lang-macro/tests/args/args_01.rs b/plugins/cairo-lang-macro/tests/args/args_01.rs new file mode 100644 index 000000000..8c981619d --- /dev/null +++ b/plugins/cairo-lang-macro/tests/args/args_01.rs @@ -0,0 +1,8 @@ +use cairo_lang_macro::{attribute_macro, ProcMacroResult, TokenStream}; + +#[attribute_macro(unsupported_key = "some::path")] +fn t1(_a: TokenStream, _b: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(TokenStream::empty()) +} + +fn main() {} diff --git a/plugins/cairo-lang-macro/tests/args/args_01.stderr b/plugins/cairo-lang-macro/tests/args/args_01.stderr new file mode 100644 index 000000000..1edf93be5 --- /dev/null +++ b/plugins/cairo-lang-macro/tests/args/args_01.stderr @@ -0,0 +1,13 @@ +error: only `parent` argument is supported + --> tests/args/args_01.rs:3:35 + | +3 | #[attribute_macro(unsupported_key = "some::path")] + | ^ + +warning: unused imports: `ProcMacroResult` and `TokenStream` + --> tests/args/args_01.rs:1:41 + | +1 | use cairo_lang_macro::{attribute_macro, ProcMacroResult, TokenStream}; + | ^^^^^^^^^^^^^^^ ^^^^^^^^^^^ + | + = note: `#[warn(unused_imports)]` on by default diff --git a/plugins/cairo-lang-macro/tests/args/args_02.rs b/plugins/cairo-lang-macro/tests/args/args_02.rs new file mode 100644 index 000000000..79736a53d --- /dev/null +++ b/plugins/cairo-lang-macro/tests/args/args_02.rs @@ -0,0 +1,8 @@ +use cairo_lang_macro::{attribute_macro, ProcMacroResult, TokenStream}; + +#[attribute_macro(parent = "a-b")] +fn t1(_a: TokenStream, _b: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(TokenStream::empty()) +} + +fn main() {} diff --git a/plugins/cairo-lang-macro/tests/args/args_02.stderr b/plugins/cairo-lang-macro/tests/args/args_02.stderr new file mode 100644 index 000000000..8d14a2803 --- /dev/null +++ b/plugins/cairo-lang-macro/tests/args/args_02.stderr @@ -0,0 +1,13 @@ +error: `parent` argument is not a valid path + --> tests/args/args_02.rs:3:28 + | +3 | #[attribute_macro(parent = "a-b")] + | ^^^^^ + +warning: unused imports: `ProcMacroResult` and `TokenStream` + --> tests/args/args_02.rs:1:41 + | +1 | use cairo_lang_macro::{attribute_macro, ProcMacroResult, TokenStream}; + | ^^^^^^^^^^^^^^^ ^^^^^^^^^^^ + | + = note: `#[warn(unused_imports)]` on by default diff --git a/plugins/cairo-lang-macro/tests/args/args_03.rs b/plugins/cairo-lang-macro/tests/args/args_03.rs new file mode 100644 index 000000000..51b54a92e --- /dev/null +++ b/plugins/cairo-lang-macro/tests/args/args_03.rs @@ -0,0 +1,8 @@ +use cairo_lang_macro::{inline_macro, ProcMacroResult, TokenStream, MACRO_DEFINITIONS_SLICE}; + +#[inline_macro(parent = "parent")] +fn t1(_a: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(TokenStream::empty()) +} + +fn main() {} diff --git a/plugins/cairo-lang-macro/tests/args/args_03.stderr b/plugins/cairo-lang-macro/tests/args/args_03.stderr new file mode 100644 index 000000000..9d7bda0e1 --- /dev/null +++ b/plugins/cairo-lang-macro/tests/args/args_03.stderr @@ -0,0 +1,13 @@ +error: inline macro cannot use `parent` argument + --> tests/args/args_03.rs:3:25 + | +3 | #[inline_macro(parent = "parent")] + | ^^^^^^^^ + +warning: unused imports: `MACRO_DEFINITIONS_SLICE`, `ProcMacroResult`, and `TokenStream` + --> tests/args/args_03.rs:1:38 + | +1 | use cairo_lang_macro::{inline_macro, ProcMacroResult, TokenStream, MACRO_DEFINITIONS_SLICE}; + | ^^^^^^^^^^^^^^^ ^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `#[warn(unused_imports)]` on by default diff --git a/plugins/cairo-lang-macro/tests/arguments_parsing.rs b/plugins/cairo-lang-macro/tests/arguments_parsing.rs new file mode 100644 index 000000000..087d9a1a1 --- /dev/null +++ b/plugins/cairo-lang-macro/tests/arguments_parsing.rs @@ -0,0 +1,40 @@ +use cairo_lang_macro::{MACRO_DEFINITIONS_SLICE, ProcMacroResult, TokenStream, attribute_macro}; +use cairo_lang_macro_attributes::derive_macro; + +#[attribute_macro] +fn t1(_a: TokenStream, _b: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(TokenStream::empty()) +} + +#[attribute_macro(parent = "parent_1::module")] +fn t2(_a: TokenStream, _b: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(TokenStream::empty()) +} + +#[attribute_macro(parent = "::parent")] +fn t3(_a: TokenStream, _b: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(TokenStream::empty()) +} + +#[derive_macro(parent = "parent")] +fn t4(_a: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(TokenStream::empty()) +} + +#[test] +fn happy_path() { + let list: Vec = MACRO_DEFINITIONS_SLICE + .iter() + .map(|m| m.name.to_string()) + .collect(); + assert_eq!( + list, + vec!["t1", "parent_1::module::t2", "::parent::t3", "parent::t4"] + ); +} + +#[test] +fn test_parsing_errors() { + let t = trybuild::TestCases::new(); + t.compile_fail("tests/args/args_*.rs"); +} From 831d28ecc843c41ba8cb06392efcb73bec656bdf Mon Sep 17 00:00:00 2001 From: maciektr Date: Wed, 29 Jan 2025 15:32:47 +0100 Subject: [PATCH 16/38] Fix quote! parsing for puncts not followed by ident & emit a whitespace preceding a quote var (#1930) --- plugins/cairo-lang-quote/src/lib.rs | 111 +++++++++++++++++++++++++--- 1 file changed, 99 insertions(+), 12 deletions(-) diff --git a/plugins/cairo-lang-quote/src/lib.rs b/plugins/cairo-lang-quote/src/lib.rs index 332bcb98d..e3627445b 100644 --- a/plugins/cairo-lang-quote/src/lib.rs +++ b/plugins/cairo-lang-quote/src/lib.rs @@ -1,12 +1,12 @@ use std::iter::Peekable; -use proc_macro::{Delimiter, TokenStream as RustTokenStream, TokenTree as RustTokenTree}; -use proc_macro2::{Ident, Span}; +use proc_macro2::{Delimiter, Ident, Span, TokenTree}; extern crate proc_macro; use quote::quote as rust_quote; #[derive(Debug)] +#[cfg_attr(test, derive(PartialEq))] enum QuoteToken { Var(Ident), Content(String), @@ -33,7 +33,7 @@ impl QuoteToken { } fn process_token_stream( - mut token_stream: Peekable>, + mut token_stream: Peekable>, output: &mut Vec, ) { // Rust proc macro parser to TokenStream gets rid of all whitespaces. @@ -41,7 +41,7 @@ fn process_token_stream( let mut was_previous_ident: bool = false; while let Some(token_tree) = token_stream.next() { match token_tree { - RustTokenTree::Group(group) => { + TokenTree::Group(group) => { let token_iter = group.stream().into_iter().peekable(); let delimiter = group.delimiter(); output.push(QuoteToken::from_delimiter( @@ -55,25 +55,34 @@ fn process_token_stream( )); was_previous_ident = false; } - RustTokenTree::Punct(punct) => { + TokenTree::Punct(punct) => { if punct.as_char() == '#' { - if let Some(RustTokenTree::Ident(ident)) = token_stream.next() { + // Only peek, so items precessed with punct can be handled in next iteration. + if let Some(TokenTree::Ident(ident)) = token_stream.peek() { + if was_previous_ident { + output.push(QuoteToken::Whitespace); + } let var_ident = Ident::new(&ident.to_string(), Span::call_site()); - output.push(QuoteToken::Var(var_ident)) + output.push(QuoteToken::Var(var_ident)); + // Move iterator, as we only did peek before. + let _ = token_stream.next(); + } else { + // E.g. to support Cairo attributes (i.e. punct followed by non-ident `#[`). + output.push(QuoteToken::Content(punct.to_string())); } } else { output.push(QuoteToken::Content(punct.to_string())); } was_previous_ident = false; } - RustTokenTree::Ident(ident) => { + TokenTree::Ident(ident) => { if was_previous_ident { output.push(QuoteToken::Whitespace); } output.push(QuoteToken::Content(ident.to_string())); was_previous_ident = true; } - RustTokenTree::Literal(literal) => { + TokenTree::Literal(literal) => { output.push(QuoteToken::Content(literal.to_string())); was_previous_ident = false; } @@ -82,13 +91,15 @@ fn process_token_stream( } #[proc_macro] -pub fn quote(input: RustTokenStream) -> RustTokenStream { - let mut parsed_input: Vec = Vec::new(); +pub fn quote(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let mut output_token_stream = rust_quote! { let mut quote_macro_result = ::cairo_lang_macro::TokenStream::empty(); }; + let input: proc_macro2::TokenStream = input.into(); let token_iter = input.into_iter().peekable(); + let (size_hint_lower, _) = token_iter.size_hint(); + let mut parsed_input: Vec = Vec::with_capacity(size_hint_lower); process_token_stream(token_iter, &mut parsed_input); for quote_token in parsed_input.iter() { @@ -108,8 +119,84 @@ pub fn quote(input: RustTokenStream) -> RustTokenStream { }), } } - RustTokenStream::from(rust_quote!({ + proc_macro::TokenStream::from(rust_quote!({ #output_token_stream quote_macro_result })) } + +#[cfg(test)] +mod tests { + use super::{process_token_stream, QuoteToken}; + use proc_macro2::{Ident, Span}; + use quote::{quote as rust_quote, TokenStreamExt}; + + #[test] + fn parse_cairo_attr() { + let input: proc_macro2::TokenStream = rust_quote! { + #[some_attr] + fn some_fun() { + + } + }; + let mut output = Vec::new(); + process_token_stream(input.into_iter().peekable(), &mut output); + assert_eq!( + output, + vec![ + QuoteToken::Content("#".to_string()), + QuoteToken::Content("[".to_string()), + QuoteToken::Content("some_attr".to_string()), + QuoteToken::Content("]".to_string()), + QuoteToken::Content("fn".to_string()), + QuoteToken::Whitespace, + QuoteToken::Content("some_fun".to_string()), + QuoteToken::Content("(".to_string()), + QuoteToken::Content(")".to_string()), + QuoteToken::Content("{".to_string()), + QuoteToken::Content("}".to_string()), + ] + ); + } + + #[test] + fn quote_var_whitespace() { + /* + Construct program input, equivalent to following: + input = rust_quote! { + #[some_attr] + mod #name { + } + } + In a way that avoids `#name` being parsed as `rust_quote` var. + */ + let mut input: proc_macro2::TokenStream = rust_quote! { + #[some_attr] + mod + }; + input.append(proc_macro2::TokenTree::Punct(proc_macro2::Punct::new( + '#', + proc_macro2::Spacing::Joint, + ))); + input.extend(rust_quote! { + name { + } + }); + let mut output = Vec::new(); + process_token_stream(input.into_iter().peekable(), &mut output); + assert_eq!( + output, + vec![ + QuoteToken::Content("#".to_string()), + QuoteToken::Content("[".to_string()), + QuoteToken::Content("some_attr".to_string()), + QuoteToken::Content("]".to_string()), + QuoteToken::Content("mod".to_string()), + QuoteToken::Whitespace, + QuoteToken::Var(Ident::new("name", Span::call_site())), + QuoteToken::Content("{".to_string()), + QuoteToken::Content("}".to_string()), + ] + ); + } +} From a879ad84d01ca38dc913409ae7a1f88283114ef6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Tr=C4=85tnowiecki?= Date: Wed, 5 Feb 2025 21:40:25 +0100 Subject: [PATCH 17/38] Pin cairo-lang-primitive-token to cairo git commit instead of registry dep --- Cargo.lock | 15 ++++++++++----- Cargo.toml | 1 + plugins/cairo-lang-macro/Cargo.toml | 2 +- 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b169a18fd..0eb9d1cdc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -949,7 +949,7 @@ dependencies = [ "bumpalo", "cairo-lang-macro-attributes 0.1.0", "cairo-lang-macro-stable 1.0.0", - "cairo-lang-primitive-token", + "cairo-lang-primitive-token 1.0.0 (git+https://github.com/starkware-libs/cairo?rev=1e7992b5fd6e8a0d921613dc3b49c1162dc955ea)", "cairo-lang-quote", "linkme", "serde", @@ -1006,7 +1006,7 @@ source = "git+https://github.com/starkware-libs/cairo?rev=76e8c943895c904da121fb dependencies = [ "cairo-lang-diagnostics", "cairo-lang-filesystem", - "cairo-lang-primitive-token", + "cairo-lang-primitive-token 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "cairo-lang-syntax", "cairo-lang-syntax-codegen", "cairo-lang-utils", @@ -1043,6 +1043,11 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "123ac0ecadf31bacae77436d72b88fa9caef2b8e92c89ce63a125ae911a12fae" +[[package]] +name = "cairo-lang-primitive-token" +version = "1.0.0" +source = "git+https://github.com/starkware-libs/cairo?rev=1e7992b5fd6e8a0d921613dc3b49c1162dc955ea#1e7992b5fd6e8a0d921613dc3b49c1162dc955ea" + [[package]] name = "cairo-lang-proc-macros" version = "2.11.2" @@ -1311,7 +1316,7 @@ source = "git+https://github.com/starkware-libs/cairo?rev=76e8c943895c904da121fb dependencies = [ "cairo-lang-debug", "cairo-lang-filesystem", - "cairo-lang-primitive-token", + "cairo-lang-primitive-token 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "cairo-lang-utils", "num-bigint", "num-traits", @@ -7368,9 +7373,9 @@ checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "trybuild" -version = "1.0.101" +version = "1.0.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8dcd332a5496c026f1e14b7f3d2b7bd98e509660c04239c58b0ba38a12daded4" +checksum = "6ae08be68c056db96f0e6c6dd820727cca756ced9e1f4cc7fdd20e2a55e23898" dependencies = [ "glob", "serde", diff --git a/Cargo.toml b/Cargo.toml index b97bd8f99..7a5f128ce 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -51,6 +51,7 @@ cairo-lang-filesystem = "*" cairo-lang-formatter = "*" cairo-lang-lowering = "*" cairo-lang-parser = "*" +cairo-lang-primitive-token = { git = "https://github.com/starkware-libs/cairo", rev = "1e7992b5fd6e8a0d921613dc3b49c1162dc955ea" } cairo-lang-runner = "*" cairo-lang-semantic = "*" cairo-lang-sierra = "*" diff --git a/plugins/cairo-lang-macro/Cargo.toml b/plugins/cairo-lang-macro/Cargo.toml index 021c43d84..e6e08912c 100644 --- a/plugins/cairo-lang-macro/Cargo.toml +++ b/plugins/cairo-lang-macro/Cargo.toml @@ -17,7 +17,7 @@ repository.workspace = true bumpalo.workspace = true cairo-lang-macro-attributes = { path = "../cairo-lang-macro-attributes" } cairo-lang-macro-stable = { path = "../cairo-lang-macro-stable" } -cairo-lang-primitive-token = "1.0.0" +cairo-lang-primitive-token.workspace = true cairo-lang-quote = { path = "../cairo-lang-quote", version = "0.1.0" } linkme.workspace = true serde = { workspace = true, optional = true } From 310c4e35de41fb5e2977b0bfa2ac60720afd1282 Mon Sep 17 00:00:00 2001 From: maciektr Date: Fri, 14 Feb 2025 17:14:36 +0100 Subject: [PATCH 18/38] Make token stream empty and new public (#1964) --- Cargo.lock | 11 +-- Cargo.toml | 2 +- plugins/cairo-lang-macro/src/types/token.rs | 56 ++++++++++- scarb/tests/proc_macro_quote.rs | 93 +++++++++++++++++++ .../src/cairo_plugin_project_builder.rs | 2 +- 5 files changed, 152 insertions(+), 12 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0eb9d1cdc..602ef6ecb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -949,7 +949,7 @@ dependencies = [ "bumpalo", "cairo-lang-macro-attributes 0.1.0", "cairo-lang-macro-stable 1.0.0", - "cairo-lang-primitive-token 1.0.0 (git+https://github.com/starkware-libs/cairo?rev=1e7992b5fd6e8a0d921613dc3b49c1162dc955ea)", + "cairo-lang-primitive-token", "cairo-lang-quote", "linkme", "serde", @@ -1006,7 +1006,7 @@ source = "git+https://github.com/starkware-libs/cairo?rev=76e8c943895c904da121fb dependencies = [ "cairo-lang-diagnostics", "cairo-lang-filesystem", - "cairo-lang-primitive-token 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cairo-lang-primitive-token", "cairo-lang-syntax", "cairo-lang-syntax-codegen", "cairo-lang-utils", @@ -1043,11 +1043,6 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "123ac0ecadf31bacae77436d72b88fa9caef2b8e92c89ce63a125ae911a12fae" -[[package]] -name = "cairo-lang-primitive-token" -version = "1.0.0" -source = "git+https://github.com/starkware-libs/cairo?rev=1e7992b5fd6e8a0d921613dc3b49c1162dc955ea#1e7992b5fd6e8a0d921613dc3b49c1162dc955ea" - [[package]] name = "cairo-lang-proc-macros" version = "2.11.2" @@ -1316,7 +1311,7 @@ source = "git+https://github.com/starkware-libs/cairo?rev=76e8c943895c904da121fb dependencies = [ "cairo-lang-debug", "cairo-lang-filesystem", - "cairo-lang-primitive-token 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cairo-lang-primitive-token", "cairo-lang-utils", "num-bigint", "num-traits", diff --git a/Cargo.toml b/Cargo.toml index 7a5f128ce..60c12df0c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -51,7 +51,7 @@ cairo-lang-filesystem = "*" cairo-lang-formatter = "*" cairo-lang-lowering = "*" cairo-lang-parser = "*" -cairo-lang-primitive-token = { git = "https://github.com/starkware-libs/cairo", rev = "1e7992b5fd6e8a0d921613dc3b49c1162dc955ea" } +cairo-lang-primitive-token = "1" cairo-lang-runner = "*" cairo-lang-semantic = "*" cairo-lang-sierra = "*" diff --git a/plugins/cairo-lang-macro/src/types/token.rs b/plugins/cairo-lang-macro/src/types/token.rs index e1f15bcd1..e5b015970 100644 --- a/plugins/cairo-lang-macro/src/types/token.rs +++ b/plugins/cairo-lang-macro/src/types/token.rs @@ -251,7 +251,7 @@ pub struct TokenStreamMetadata { } impl TokenStream { - #[doc(hidden)] + /// Create a new [`TokenStream`] from a vector of [`TokenTree`]s. pub fn new(tokens: Vec) -> Self { Self { tokens, @@ -259,7 +259,7 @@ impl TokenStream { } } - #[doc(hidden)] + /// Create a new empty [`TokenStream`]. pub fn empty() -> Self { Self::new(Vec::default()) } @@ -323,6 +323,13 @@ impl Extend for TokenStream { } } +impl Extend for TokenStream { + fn extend>(&mut self, iter: T) { + iter.into_iter() + .for_each(|token_stream| self.extend(token_stream)); + } +} + impl Display for TokenStream { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { for token in &self.tokens { @@ -485,4 +492,49 @@ mod test { }) ); } + + #[test] + pub fn token_stream_can_be_extended_with_token_stream() { + let mut first = TokenStream::new(vec![TokenTree::Ident(Token::new( + "first", + TextSpan::new(0, 1), + ))]); + let second = TokenStream::new(vec![TokenTree::Ident(Token::new( + "second", + TextSpan::new(2, 3), + ))]); + first.extend(second); + assert_eq!( + first.tokens, + vec![ + TokenTree::Ident(Token::new("first", TextSpan::new(0, 1))), + TokenTree::Ident(Token::new("second", TextSpan::new(2, 3))), + ] + ); + } + + #[test] + pub fn token_stream_can_be_extended_with_vec_of_token_sterams() { + let mut first = TokenStream::new(vec![TokenTree::Ident(Token::new( + "first", + TextSpan::new(0, 1), + ))]); + let second = TokenStream::new(vec![TokenTree::Ident(Token::new( + "second", + TextSpan::new(2, 3), + ))]); + let third = TokenStream::new(vec![TokenTree::Ident(Token::new( + "third", + TextSpan::new(4, 5), + ))]); + first.extend(vec![second, third]); + assert_eq!( + first.tokens, + vec![ + TokenTree::Ident(Token::new("first", TextSpan::new(0, 1))), + TokenTree::Ident(Token::new("second", TextSpan::new(2, 3))), + TokenTree::Ident(Token::new("third", TextSpan::new(4, 5))), + ] + ); + } } diff --git a/scarb/tests/proc_macro_quote.rs b/scarb/tests/proc_macro_quote.rs index 1b3fade7f..8e6f486c9 100644 --- a/scarb/tests/proc_macro_quote.rs +++ b/scarb/tests/proc_macro_quote.rs @@ -374,3 +374,96 @@ fn can_use_quote_with_cairo_specific_syntax() { expanded, ); } + +#[test] +fn can_parse_incoming_token_stream() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .add_primitive_token_dep() + .add_dep(r#"cairo-lang-syntax = { git = "https://github.com/starkware-libs/cairo", rev = "b5fdf14a8bd2e4973e2adcec17abf1ae5c1ddfdc" }"#) + .add_dep(r#"cairo-lang-parser = { git = "https://github.com/starkware-libs/cairo", rev = "b5fdf14a8bd2e4973e2adcec17abf1ae5c1ddfdc" }"#) + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, quote}; + use cairo_lang_macro::{TokenTree, Token, TextSpan}; + use cairo_lang_parser::utils::SimpleParserDatabase; + use cairo_lang_syntax::node::with_db::SyntaxNodeWithDb; + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let db_val = SimpleParserDatabase::default(); + let db = &db_val; + let (body, _diagnostics) = db.parse_token_stream(&token_stream); + let name = TokenTree::Ident(Token::new("new_module", TextSpan::call_site())); + let body = SyntaxNodeWithDb::new(&body, db); + let tokens = quote! { + mod #name { + #body + } + }; + ProcMacroResult::new(tokens) + } + "##}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> u32 { + // completly wrong type + true + } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("expand") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success(); + + assert_eq!( + project.child("target/dev").files(), + vec!["hello.expanded.cairo"] + ); + + let expanded = project + .child("target/dev/hello.expanded.cairo") + .read_to_string(); + + snapbox::assert_eq( + indoc! {r#" + mod hello { + mod new_module { + fn main() -> u32 { + // completly wrong type + true + } + } + } + "#}, + expanded, + ); + + Scarb::quick_snapbox() + .arg("check") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .failure() + .stdout_matches(indoc! {r#" + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Checking hello v1.0.0 ([..]Scarb.toml) + error: Unexpected return type. Expected: "core::integer::u32", found: "core::bool". + --> [..]lib.cairo:2:14 + fn main() -> u32 { + ^^^^ + + error: could not check `hello` due to previous error + "#}); +} diff --git a/utils/scarb-test-support/src/cairo_plugin_project_builder.rs b/utils/scarb-test-support/src/cairo_plugin_project_builder.rs index bec8bddf2..07e70d086 100644 --- a/utils/scarb-test-support/src/cairo_plugin_project_builder.rs +++ b/utils/scarb-test-support/src/cairo_plugin_project_builder.rs @@ -99,7 +99,7 @@ impl CairoPluginProjectBuilder { } pub fn add_primitive_token_dep(self) -> Self { - self.add_dep(r#"cairo-lang-primitive-token = "1.0.0""#) + self.add_dep(r#"cairo-lang-primitive-token = "1""#) } } From 7c78c3c09fec57fe91937ac760b14aca469eb1e4 Mon Sep 17 00:00:00 2001 From: Maksim Zdobnikau <43750648+DelevoXDG@users.noreply.github.com> Date: Tue, 25 Feb 2025 18:17:47 +0300 Subject: [PATCH 19/38] Fix `#token` interpolation in quote macro (#2013) Closes #1992 --- plugins/cairo-lang-quote/src/lib.rs | 48 +++++++++++++++++- scarb/tests/proc_macro_quote.rs | 79 +++++++++++++++++++++++++++++ 2 files changed, 126 insertions(+), 1 deletion(-) diff --git a/plugins/cairo-lang-quote/src/lib.rs b/plugins/cairo-lang-quote/src/lib.rs index e3627445b..4808ded8e 100644 --- a/plugins/cairo-lang-quote/src/lib.rs +++ b/plugins/cairo-lang-quote/src/lib.rs @@ -64,16 +64,18 @@ fn process_token_stream( } let var_ident = Ident::new(&ident.to_string(), Span::call_site()); output.push(QuoteToken::Var(var_ident)); + was_previous_ident = true; // Move iterator, as we only did peek before. let _ = token_stream.next(); } else { // E.g. to support Cairo attributes (i.e. punct followed by non-ident `#[`). output.push(QuoteToken::Content(punct.to_string())); + was_previous_ident = false; } } else { output.push(QuoteToken::Content(punct.to_string())); + was_previous_ident = false; } - was_previous_ident = false; } TokenTree::Ident(ident) => { if was_previous_ident { @@ -199,4 +201,48 @@ mod tests { ] ); } + + #[test] + fn interpolate_tokens() { + use super::{process_token_stream, QuoteToken}; + use proc_macro2::{Ident, Punct, Spacing, Span, TokenTree}; + use quote::{quote as rust_quote, TokenStreamExt}; + + // impl #impl_token of NameTrait<#name_token> {} + + let mut input: proc_macro2::TokenStream = rust_quote! { + impl + }; + input.append(TokenTree::Punct(Punct::new('#', Spacing::Joint))); + input.extend(rust_quote! { + impl_token + }); + input.extend(rust_quote! { + of NameTrait< + }); + input.append(TokenTree::Punct(Punct::new('#', Spacing::Joint))); + input.extend(rust_quote! { + name_token> {} + }); + + let mut output = Vec::new(); + process_token_stream(input.into_iter().peekable(), &mut output); + assert_eq!( + output, + vec![ + QuoteToken::Content("impl".to_string()), + QuoteToken::Whitespace, + QuoteToken::Var(Ident::new("impl_token", Span::call_site())), + QuoteToken::Whitespace, + QuoteToken::Content("of".to_string()), + QuoteToken::Whitespace, + QuoteToken::Content("NameTrait".to_string()), + QuoteToken::Content("<".to_string()), + QuoteToken::Var(Ident::new("name_token", Span::call_site())), + QuoteToken::Content(">".to_string()), + QuoteToken::Content("{".to_string()), + QuoteToken::Content("}".to_string()), + ] + ); + } } diff --git a/scarb/tests/proc_macro_quote.rs b/scarb/tests/proc_macro_quote.rs index 8e6f486c9..8ed6a146b 100644 --- a/scarb/tests/proc_macro_quote.rs +++ b/scarb/tests/proc_macro_quote.rs @@ -467,3 +467,82 @@ fn can_parse_incoming_token_stream() { error: could not check `hello` due to previous error "#}); } + +#[test] +fn can_parse_with_token_interpolation() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .add_primitive_token_dep() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, TokenTree, Token, TextSpan, quote}; + #[attribute_macro] + pub fn some(_attr: TokenStream, _token_stream: TokenStream) -> ProcMacroResult { + let name_string = "MyStruct".to_string(); + let name_token = TokenTree::Ident(Token::new(name_string.clone(), TextSpan::call_site())); + let impl_string = format!("{}NameImpl", name_string); + let impl_token = TokenTree::Ident(Token::new(impl_string, TextSpan::call_site())); + let res_string = format!("\"{}\"", name_string); + let res_token = TokenTree::Ident(Token::new(res_string, TextSpan::call_site())); + let tokens = quote! { + impl #impl_token of NameTrait<#name_token> { + fn name(self: @#name_token) -> ByteArray { + #res_token + } + } + }; + ProcMacroResult::new(tokens) + } + "##}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + pub trait NameTrait { + fn name(self: @NameTrait) -> ByteArray; + } + pub struct MyStruct {} + + #[some] + fn main() -> u32 { + true + } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("expand") + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success(); + + assert_eq!( + project.child("target/dev").files(), + vec!["hello.expanded.cairo"] + ); + + let expanded = project + .child("target/dev/hello.expanded.cairo") + .read_to_string(); + + snapbox::assert_eq( + indoc! {r#" + mod hello { + pub trait NameTrait { + fn name(self: @NameTrait) -> ByteArray; + } + pub struct MyStruct {} + impl MyStructNameImpl of NameTrait { + fn name(self: @MyStruct) -> ByteArray { + "MyStruct" + } + } + } + "#}, + expanded, + ); +} From d71e118878843786336f8b2fc3ae0b97d66c141f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Tr=C4=85tnowiecki?= Date: Thu, 27 Feb 2025 17:54:33 +0100 Subject: [PATCH 20/38] Update test cases after dev rebase --- scarb/tests/proc_macro_expand.rs | 22 ++++++++++++------- scarb/tests/proc_macro_metadata.rs | 35 +++++++++++++++++------------- 2 files changed, 34 insertions(+), 23 deletions(-) diff --git a/scarb/tests/proc_macro_expand.rs b/scarb/tests/proc_macro_expand.rs index b0885d62f..0d7ac11c8 100644 --- a/scarb/tests/proc_macro_expand.rs +++ b/scarb/tests/proc_macro_expand.rs @@ -1448,16 +1448,22 @@ fn can_be_used_through_re_export() { let t = temp.child("some"); CairoPluginProjectBuilder::default() .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + use cairo_lang_macro::{ProcMacroResult, TokenStream, TokenTree, Token, TextSpan, attribute_macro}; #[attribute_macro] pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let token_stream = TokenStream::new( + ProcMacroResult::new(TokenStream::new( token_stream - .to_string() - .replace("12", "34") - ); - ProcMacroResult::new(token_stream) + .into_iter() + .map(|TokenTree::Ident(token)| { + if token.content.to_string() == "12" { + TokenTree::Ident(Token::new("34", TextSpan::call_site())) + } else { + TokenTree::Ident(token) + } + }) + .collect(), + )) } "##}) .build(&t); @@ -1477,7 +1483,7 @@ fn can_be_used_through_re_export() { .dep("dep", &dep) .lib_cairo(indoc! {r#" #[some] - fn main() -> felt252 { 12 } + fn main() -> felt252 {12} "#}) .build(&project); @@ -1648,7 +1654,7 @@ fn code_mappings_preserve_inline_macro_error_locations() { .stdout_matches(indoc! {r#" [..] Compiling some v1.0.0 ([..]Scarb.toml) [..] Compiling hello v1.0.0 ([..]Scarb.toml) - error: Identifier not found. + error[E0006]: Identifier not found. --> [..]lib.cairo:1:1 fn main() -> felt252 { ^^^^^^^^^ diff --git a/scarb/tests/proc_macro_metadata.rs b/scarb/tests/proc_macro_metadata.rs index 438593601..568c1e892 100644 --- a/scarb/tests/proc_macro_metadata.rs +++ b/scarb/tests/proc_macro_metadata.rs @@ -133,26 +133,31 @@ fn can_resolve_full_path_markers() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); CairoPluginProjectBuilder::default() + .add_primitive_token_dep() .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, post_process, PostProcessContext, TokenTree, Token, TextSpan}; + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, post_process, PostProcessContext, TokenTree, Token, TextSpan, quote}; #[attribute_macro] pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { let full_path_markers = vec!["some-key".to_string()]; - - let code = format!( - r#"#[macro::full_path_marker("some-key")] {}"#, - token_stream.to_string().replace("12", "34") + let token_stream = TokenStream::new( + token_stream + .into_iter() + .map(|TokenTree::Ident(token)| { + if token.content.to_string() == "12" { + TokenTree::Ident(Token::new("34", TextSpan::call_site())) + } else { + TokenTree::Ident(token) + } + }) + .collect(), ); - - ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new( - code.clone(), - TextSpan { - start: 0, - end: code.len() as u32, - }, - ))]) - ).with_full_path_markers(full_path_markers) + let code = quote!( + #[macro::full_path_marker("some-key")] + #token_stream + ); + ProcMacroResult::new(code) + .with_full_path_markers(full_path_markers) } #[post_process] @@ -170,7 +175,7 @@ fn can_resolve_full_path_markers() { .dep("some", &t) .lib_cairo(indoc! {r#" #[some] - fn main() -> felt252 { 12 } + fn main() -> felt252 {12} "#}) .build(&project); From 183fe0adfe296d7934e8fc054b4d47011da0665d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Tr=C4=85tnowiecki?= Date: Thu, 27 Feb 2025 21:11:14 +0100 Subject: [PATCH 21/38] Remove unused ProcMacroHost --- plugins/cairo-lang-quote/src/lib.rs | 8 ++-- .../compiler/plugin/proc_macro/host/mod.rs | 37 +------------------ 2 files changed, 6 insertions(+), 39 deletions(-) diff --git a/plugins/cairo-lang-quote/src/lib.rs b/plugins/cairo-lang-quote/src/lib.rs index 4808ded8e..31d128992 100644 --- a/plugins/cairo-lang-quote/src/lib.rs +++ b/plugins/cairo-lang-quote/src/lib.rs @@ -129,9 +129,9 @@ pub fn quote(input: proc_macro::TokenStream) -> proc_macro::TokenStream { #[cfg(test)] mod tests { - use super::{process_token_stream, QuoteToken}; + use super::{QuoteToken, process_token_stream}; use proc_macro2::{Ident, Span}; - use quote::{quote as rust_quote, TokenStreamExt}; + use quote::{TokenStreamExt, quote as rust_quote}; #[test] fn parse_cairo_attr() { @@ -204,9 +204,9 @@ mod tests { #[test] fn interpolate_tokens() { - use super::{process_token_stream, QuoteToken}; + use super::{QuoteToken, process_token_stream}; use proc_macro2::{Ident, Punct, Spacing, Span, TokenTree}; - use quote::{quote as rust_quote, TokenStreamExt}; + use quote::{TokenStreamExt, quote as rust_quote}; // impl #impl_token of NameTrait<#name_token> {} diff --git a/scarb/src/compiler/plugin/proc_macro/host/mod.rs b/scarb/src/compiler/plugin/proc_macro/host/mod.rs index e2d48749d..9c6bd3ca5 100644 --- a/scarb/src/compiler/plugin/proc_macro/host/mod.rs +++ b/scarb/src/compiler/plugin/proc_macro/host/mod.rs @@ -9,10 +9,9 @@ use attribute::*; pub use aux_data::ProcMacroAuxData; use inline::*; -use crate::compiler::plugin::proc_macro::compilation::SharedLibraryProvider; use crate::compiler::plugin::proc_macro::{Expansion, ExpansionKind, ProcMacroInstance}; -use crate::core::{Config, Package, PackageId, edition_variant}; -use anyhow::{Context, Result, ensure}; +use crate::core::{PackageId, edition_variant}; +use anyhow::{Result, ensure}; use cairo_lang_defs::plugin::{MacroPlugin, MacroPluginMetadata, PluginResult}; use cairo_lang_filesystem::db::Edition; use cairo_lang_filesystem::ids::{CodeMapping, CodeOrigin}; @@ -232,38 +231,6 @@ impl MacroPlugin for ProcMacroHostPlugin { } } -/// A Scarb wrapper around the `ProcMacroHost` compiler plugin. -/// -/// This struct represent the compiler plugin in terms of Scarb data model. -/// It also builds a plugin suite that enables the compiler plugin. -#[derive(Default)] -pub struct ProcMacroHost { - macros: Vec>, -} - -impl ProcMacroHost { - pub fn register_instance(&mut self, instance: Arc) { - self.macros.push(instance); - } - - pub fn register_new(&mut self, package: Package, config: &Config) -> Result<()> { - let lib_path = package - .shared_lib_path(config) - .context("could not resolve shared library path")?; - let instance = ProcMacroInstance::try_new(package.id, lib_path)?; - self.register_instance(Arc::new(instance)); - Ok(()) - } - - pub fn into_plugin(self) -> Result { - ProcMacroHostPlugin::try_new(self.macros) - } - - pub fn macros(&self) -> &[Arc] { - &self.macros - } -} - fn generate_code_mappings(token_stream: &TokenStream) -> Vec { token_stream .tokens From 47434fff420e7d3e47b4238f09371429e2b27620 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Tr=C4=85tnowiecki?= Date: Fri, 28 Feb 2025 14:16:05 +0100 Subject: [PATCH 22/38] Add test for token stream with null characters --- plugins/cairo-lang-macro/src/types/mod.rs | 27 +++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/plugins/cairo-lang-macro/src/types/mod.rs b/plugins/cairo-lang-macro/src/types/mod.rs index a679ed3f9..2e13e52cd 100644 --- a/plugins/cairo-lang-macro/src/types/mod.rs +++ b/plugins/cairo-lang-macro/src/types/mod.rs @@ -255,6 +255,7 @@ pub struct FullPathMarker { #[cfg(test)] mod tests { use crate::types::TokenStream; + use crate::{AllocationContext, TextSpan, Token, TokenTree}; #[test] fn new_token_stream_metadata_empty() { @@ -262,4 +263,30 @@ mod tests { assert!(token_stream.metadata.file_id.is_none()); assert!(token_stream.metadata.original_file_path.is_none()); } + + #[test] + fn can_convert_to_stable() { + let token_stream = TokenStream::new(vec![ + TokenTree::Ident(Token::new("test", TextSpan::new(0, 4))), + TokenTree::Ident(Token::new(";", TextSpan::new(4, 5))), + ]); + let stable = token_stream.as_stable(); + let ctx = AllocationContext::default(); + let token_stream = unsafe { TokenStream::from_stable_in(&stable, &ctx) }; + assert_eq!(token_stream.tokens.len(), 2); + assert_eq!(token_stream.to_string(), "test;"); + } + + #[test] + fn can_store_null_character() { + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + "te\0st", + TextSpan::new(0, 4), + ))]); + let stable = token_stream.as_stable(); + let ctx = AllocationContext::default(); + let token_stream = unsafe { TokenStream::from_stable_in(&stable, &ctx) }; + assert_eq!(token_stream.tokens.len(), 1); + assert_eq!(token_stream.to_string(), "te\0st"); + } } From 40bcdcc6ce70b64f81944f1a83b9c6a24ad68f74 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Tr=C4=85tnowiecki?= Date: Wed, 19 Mar 2025 23:25:33 +0100 Subject: [PATCH 23/38] remove duplicated test cases from proc_macro_expand --- scarb/tests/proc_macro_expand.rs | 615 ------------------------------- 1 file changed, 615 deletions(-) diff --git a/scarb/tests/proc_macro_expand.rs b/scarb/tests/proc_macro_expand.rs index 0d7ac11c8..f75fced01 100644 --- a/scarb/tests/proc_macro_expand.rs +++ b/scarb/tests/proc_macro_expand.rs @@ -1,211 +1,11 @@ use assert_fs::TempDir; use assert_fs::fixture::PathChild; -use cairo_lang_sierra::program::VersionedProgram; use indoc::indoc; use scarb_test_support::cairo_plugin_project_builder::CairoPluginProjectBuilder; use scarb_test_support::command::Scarb; use scarb_test_support::fsx::ChildPathEx; use scarb_test_support::project_builder::ProjectBuilder; use scarb_test_support::workspace_builder::WorkspaceBuilder; -use snapbox::assert_matches; - -#[test] -fn compile_cairo_plugin() { - let t = TempDir::new().unwrap(); - CairoPluginProjectBuilder::default().build(&t); - let output = Scarb::quick_snapbox() - .arg("build") - // Disable colors in Cargo output. - .env("CARGO_TERM_COLOR", "never") - .current_dir(&t) - .output() - .unwrap(); - assert!( - output.status.success(), - "stdout={}\n stderr={}", - String::from_utf8_lossy(&output.stdout), - String::from_utf8_lossy(&output.stderr), - ); - let stdout = String::from_utf8_lossy(&output.stdout).to_string(); - assert!(stdout.contains("Compiling some v1.0.0")); - let lines = stdout.lines().map(ToString::to_string).collect::>(); - let (last, lines) = lines.split_last().unwrap(); - assert_matches(r#"[..] Finished `dev` profile target(s) in [..]"#, last); - let (last, _lines) = lines.split_last().unwrap(); - // Line from Cargo output - assert_matches( - r#"[..]Finished `release` profile [optimized] target(s) in[..]"#, - last, - ); -} - -#[test] -fn check_cairo_plugin() { - let t = TempDir::new().unwrap(); - CairoPluginProjectBuilder::default().build(&t); - let output = Scarb::quick_snapbox() - .arg("check") - // Disable colors in Cargo output. - .env("CARGO_TERM_COLOR", "never") - .current_dir(&t) - .output() - .unwrap(); - assert!( - output.status.success(), - "{}", - String::from_utf8_lossy(&output.stderr) - ); - let stdout = String::from_utf8_lossy(&output.stdout).to_string(); - assert!(stdout.contains("Checking some v1.0.0")); - let lines = stdout.lines().map(ToString::to_string).collect::>(); - let (last, lines) = lines.split_last().unwrap(); - assert_matches( - r#"[..] Finished checking `dev` profile target(s) in [..]"#, - last, - ); - let (last, _lines) = lines.split_last().unwrap(); - // Line from Cargo output - assert_matches( - r#"[..]Finished `release` profile [optimized] target(s) in[..]"#, - last, - ); -} - -#[test] -fn can_check_cairo_project_with_plugins() { - let temp = TempDir::new().unwrap(); - let t = temp.child("some"); - CairoPluginProjectBuilder::default().build(&t); - let project = temp.child("hello"); - let y = project.child("other"); - CairoPluginProjectBuilder::default().name("other").build(&y); - WorkspaceBuilder::start() - .add_member("other") - .package( - ProjectBuilder::start() - .name("hello") - .version("1.0.0") - .dep("some", &t), - ) - .build(&project); - Scarb::quick_snapbox() - .arg("check") - // Disable output from Cargo. - .env("CARGO_TERM_QUIET", "true") - .current_dir(&project) - .assert() - .success() - .stdout_matches(indoc! {r#" - [..]Checking other v1.0.0 ([..]Scarb.toml) - [..]Compiling some v1.0.0 ([..]Scarb.toml) - [..]Checking hello v1.0.0 ([..]Scarb.toml) - [..]Finished checking `dev` profile target(s) in [..] - "#}); -} - -#[test] -fn resolve_fetched_plugins() { - let t = TempDir::new().unwrap(); - CairoPluginProjectBuilder::default().build(&t); - assert!(!t.child("Cargo.lock").exists()); - let output = Scarb::quick_snapbox() - .arg("fetch") - // Disable colors in Cargo output. - .env("CARGO_TERM_COLOR", "never") - .current_dir(&t) - .output() - .unwrap(); - assert!( - output.status.success(), - "{}", - String::from_utf8_lossy(&output.stderr) - ); - assert!(t.child("Cargo.lock").exists()) -} - -#[test] -fn can_use_json_output() { - let t = TempDir::new().unwrap(); - CairoPluginProjectBuilder::default().build(&t); - let output = Scarb::quick_snapbox() - .arg("--json") - .arg("check") - // Disable colors in Cargo output. - .env("CARGO_TERM_COLOR", "never") - .current_dir(&t) - .output() - .unwrap(); - assert!( - output.status.success(), - "{}", - String::from_utf8_lossy(&output.stderr) - ); - let stdout = String::from_utf8_lossy(&output.stdout).to_string(); - let lines = stdout.lines().map(ToString::to_string).collect::>(); - let (first, lines) = lines.split_first().unwrap(); - assert_matches( - r#"{"status":"checking","message":"some v1.0.0 ([..]Scarb.toml)"}"#, - first, - ); - let (last, lines) = lines.split_last().unwrap(); - assert_matches( - r#"{"status":"finished","message":"checking `dev` profile target(s) in [..]"}"#, - last, - ); - // Line from Cargo. - let (last, _lines) = lines.split_last().unwrap(); - assert_matches(r#"{"reason":"build-finished","success":true}"#, last); -} - -#[test] -fn compile_cairo_plugin_with_lib_target() { - let t = TempDir::new().unwrap(); - ProjectBuilder::start() - .name("hello") - .version("1.0.0") - .manifest_extra(indoc! {r#" - [lib] - [cairo-plugin] - "#}) - .build(&t); - - Scarb::quick_snapbox() - .arg("build") - .current_dir(&t) - .assert() - .failure() - .stdout_matches(indoc! {r#" - error: failed to parse manifest at: [..]/Scarb.toml - - Caused by: - target `cairo-plugin` cannot be mixed with other targets - "#}); -} - -#[test] -fn compile_cairo_plugin_with_other_target() { - let t = TempDir::new().unwrap(); - ProjectBuilder::start() - .name("hello") - .version("1.0.0") - .manifest_extra(indoc! {r#" - [cairo-plugin] - [[target.starknet-contract]] - "#}) - .build(&t); - - Scarb::quick_snapbox() - .arg("build") - .current_dir(&t) - .assert() - .failure() - .stdout_matches(indoc! {r#" - error: failed to parse manifest at: [..]/Scarb.toml - - Caused by: - target `cairo-plugin` cannot be mixed with other targets - "#}); -} #[test] fn can_emit_plugin_warning() { @@ -403,320 +203,6 @@ fn can_replace_original_node() { "#}); } -#[test] -fn can_return_aux_data_from_plugin() { - let temp = TempDir::new().unwrap(); - let t = temp.child("some"); - CairoPluginProjectBuilder::default() - .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, AuxData, PostProcessContext, post_process}; - use serde::{Serialize, Deserialize}; - - #[derive(Debug, Serialize, Deserialize)] - struct SomeMacroDataFormat { - msg: String - } - - #[attribute_macro] - pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let value = SomeMacroDataFormat { msg: "Hello from some macro!".to_string() }; - let value = serde_json::to_string(&value).unwrap(); - let value: Vec = value.into_bytes(); - let aux_data = AuxData::new(value); - ProcMacroResult::new(token_stream).with_aux_data(aux_data) - } - - #[post_process] - pub fn callback(context: PostProcessContext) { - let aux_data = context.aux_data.into_iter() - .map(|aux_data| { - let value: Vec = aux_data.into(); - let aux_data: SomeMacroDataFormat = serde_json::from_slice(&value).unwrap(); - aux_data - }) - .collect::>(); - println!("{:?}", aux_data); - } - - #[post_process] - pub fn some_no_op_callback(context: PostProcessContext) { - drop(context.aux_data); - } - "##}) - .add_dep(r#"serde = { version = "*", features = ["derive"] }"#) - .add_dep(r#"serde_json = "*""#) - .build(&t); - let project = temp.child("hello"); - ProjectBuilder::start() - .name("hello") - .version("1.0.0") - .dep_starknet() - .dep("some", &t) - .lib_cairo(indoc! {r#" - #[some] - fn main() -> felt252 { 12 } - "#}) - .build(&project); - - Scarb::quick_snapbox() - .arg("build") - // Disable output from Cargo. - .env("CARGO_TERM_QUIET", "true") - .current_dir(&project) - .assert() - .success() - .stdout_matches(indoc! {r#" - [..]Compiling some v1.0.0 ([..]Scarb.toml) - [..]Compiling hello v1.0.0 ([..]Scarb.toml) - [SomeMacroDataFormat { msg: "Hello from some macro!" }] - [..]Finished `dev` profile target(s) in [..] - "#}); -} - -#[test] -fn can_read_token_stream_metadata() { - let temp = TempDir::new().unwrap(); - let t = temp.child("some"); - CairoPluginProjectBuilder::default() - .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; - - #[attribute_macro] - pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - println!("{:#?}", token_stream.metadata()); - ProcMacroResult::new(token_stream) - } - "##}) - .build(&t); - - let project = temp.child("hello"); - ProjectBuilder::start() - .name("hello") - .version("1.0.0") - .dep_starknet() - .dep("some", &t) - .lib_cairo(indoc! {r#" - #[some] - fn main() -> felt252 { 12 } - "#}) - .build(&project); - - Scarb::quick_snapbox() - .arg("build") - // Disable output from Cargo. - .env("CARGO_TERM_QUIET", "true") - .current_dir(&project) - .assert() - .success() - .stdout_matches(indoc! {r#" - [..]Compiling some v1.0.0 ([..]Scarb.toml) - [..]Compiling hello v1.0.0 ([..]Scarb.toml) - TokenStreamMetadata { - original_file_path: Some( - "[..]lib.cairo", - ), - file_id: Some( - "[..]", - ), - edition: Some( - "[..]", - ), - } - [..]Finished `dev` profile target(s) in [..] - "#}); -} - -#[test] -fn can_define_multiple_macros() { - let temp = TempDir::new().unwrap(); - let t = temp.child("some"); - CairoPluginProjectBuilder::default() - .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, AuxData, PostProcessContext, post_process, TokenTree, Token, TextSpan}; - - #[attribute_macro] - pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let new_token_string = token_stream.to_string().replace("12", "34"); - let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( - new_token_string.clone(), - TextSpan { start: 0, end: new_token_string.len() as u32 }, - ))]); - let aux_data = AuxData::new(Vec::new()); - ProcMacroResult::new(token_stream).with_aux_data(aux_data) - } - - #[attribute_macro] - pub fn world(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let new_token_string = token_stream.to_string().replace("56", "78"); - let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( - new_token_string.clone(), - TextSpan { start: 0, end: new_token_string.len() as u32 }, - ))]); - let aux_data = AuxData::new(Vec::new()); - ProcMacroResult::new(token_stream).with_aux_data(aux_data) - } - - #[post_process] - pub fn callback(context: PostProcessContext) { - assert_eq!(context.aux_data.len(), 2); - } - "##}) - .build(&t); - - let w = temp.child("other"); - CairoPluginProjectBuilder::default() - .name("other") - .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, AuxData, PostProcessContext, post_process, TokenTree, Token, TextSpan}; - - #[attribute_macro] - pub fn beautiful(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let new_token_string = token_stream.to_string().replace("90", "09"); - let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( - new_token_string.clone(), - TextSpan { start: 0, end: new_token_string.len() as u32 }, - ))]); - let aux_data = AuxData::new(Vec::new()); - ProcMacroResult::new(token_stream).with_aux_data(aux_data) - } - - #[post_process] - pub fn callback(context: PostProcessContext) { - assert_eq!(context.aux_data.len(), 1); - } - "##}) - .build(&w); - - let project = temp.child("hello"); - ProjectBuilder::start() - .name("hello") - .version("1.0.0") - .dep_starknet() - .dep("some", &t) - .dep("other", &w) - .lib_cairo(indoc! {r#" - #[hello] - #[beautiful] - #[world] - fn main() -> felt252 { 12 + 56 + 90 } - "#}) - .build(&project); - - Scarb::quick_snapbox() - .arg("cairo-run") - // Disable output from Cargo. - .env("CARGO_TERM_QUIET", "true") - .current_dir(&project) - .assert() - .success() - .stdout_matches(indoc! {r#" - warn: `scarb cairo-run` will be deprecated soon - help: use `scarb execute` instead - [..]Compiling other v1.0.0 ([..]Scarb.toml) - [..]Compiling some v1.0.0 ([..]Scarb.toml) - [..]Compiling hello v1.0.0 ([..]Scarb.toml) - [..]Finished `dev` profile target(s) in [..] - [..]Running hello - Run completed successfully, returning [121] - "#}); -} - -#[test] -fn cannot_use_undefined_macro() { - let temp = TempDir::new().unwrap(); - let t = temp.child("some"); - CairoPluginProjectBuilder::default().build(&t); - let project = temp.child("hello"); - ProjectBuilder::start() - .name("hello") - .version("1.0.0") - .dep_starknet() - .dep("some", &t) - .lib_cairo(indoc! {r#" - #[world] - fn main() -> felt252 { 12 + 56 + 90 } - "#}) - .build(&project); - Scarb::quick_snapbox() - .arg("build") - // Disable output from Cargo. - .env("CARGO_TERM_QUIET", "true") - .current_dir(&project) - .assert() - .failure() - .stdout_matches(indoc! {r#" - [..]Compiling some v1.0.0 ([..]Scarb.toml) - [..]Compiling hello v1.0.0 ([..]Scarb.toml) - error: Plugin diagnostic: Unsupported attribute. - --> [..]lib.cairo:1:1 - #[world] - ^^^^^^^^ - - error: could not compile `hello` due to previous error - "#}); -} - -#[test] -fn can_resolve_full_path_markers() { - let temp = TempDir::new().unwrap(); - let t = temp.child("some"); - CairoPluginProjectBuilder::default() - .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, post_process, PostProcessContext, TokenTree, Token, TextSpan}; - - #[attribute_macro] - pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let full_path_markers = vec!["some-key".to_string()]; - - let code = format!( - r#"#[macro::full_path_marker("some-key")] {}"#, - token_stream.to_string().replace("12", "34") - ); - - ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new( - code.clone(), - TextSpan { - start: 0, - end: code.len() as u32, - }, - ))]) - ).with_full_path_markers(full_path_markers) - } - - #[post_process] - pub fn callback(context: PostProcessContext) { - println!("{:?}", context.full_path_markers); - } - "##}) - .build(&t); - - let project = temp.child("hello"); - ProjectBuilder::start() - .name("hello") - .version("1.0.0") - .dep_starknet() - .dep("some", &t) - .lib_cairo(indoc! {r#" - #[some] - fn main() -> felt252 { 12 } - "#}) - .build(&project); - - Scarb::quick_snapbox() - .arg("build") - // Disable output from Cargo. - .env("CARGO_TERM_QUIET", "true") - .current_dir(&project) - .assert() - .success() - .stdout_matches(indoc! {r#" - [..]Compiling some v1.0.0 ([..]Scarb.toml) - [..]Compiling hello v1.0.0 ([..]Scarb.toml) - [FullPathMarker { key: "some-key", full_path: "hello::main" }] - [..]Finished `dev` profile target(s) in [..] - "#}); -} - #[test] fn can_implement_inline_macro() { let temp = TempDir::new().unwrap(); @@ -1050,107 +536,6 @@ fn can_read_attribute_args() { "#}); } -#[test] -fn can_create_executable_attribute() { - let temp = TempDir::new().unwrap(); - let t = temp.child("some"); - CairoPluginProjectBuilder::default() - .lib_rs(indoc! {r##" - use cairo_lang_macro::executable_attribute; - - executable_attribute!("some"); - "##}) - .build(&t); - - let project = temp.child("hello"); - ProjectBuilder::start() - .name("hello") - .version("1.0.0") - .dep_starknet() - .dep("some", &t) - .lib_cairo(indoc! {r#" - #[some] - fn main() -> felt252 { 12 } - "#}) - .build(&project); - - Scarb::quick_snapbox() - .arg("build") - // Disable output from Cargo. - .env("CARGO_TERM_QUIET", "true") - .current_dir(&project) - .assert() - .success() - .stdout_matches(indoc! {r#" - [..]Compiling some v1.0.0 ([..]Scarb.toml) - [..]Compiling hello v1.0.0 ([..]Scarb.toml) - [..]Finished `dev` profile target(s) in [..] - "#}); - let sierra = project - .child("target") - .child("dev") - .child("hello.sierra.json") - .read_to_string(); - let sierra = serde_json::from_str::(&sierra).unwrap(); - let sierra = sierra.into_v1().unwrap(); - let executables = sierra.debug_info.unwrap().executables; - assert_eq!(executables.len(), 1); - let executables = executables.get("some").unwrap(); - assert_eq!(executables.len(), 1); - let fid = executables.first().unwrap().clone(); - assert_eq!(fid.clone().debug_name.unwrap(), "hello::main"); - assert!( - sierra - .program - .funcs - .iter() - .any(|f| f.id.clone() == fid.clone()) - ); -} - -#[test] -fn executable_name_cannot_clash_attr() { - let temp = TempDir::new().unwrap(); - let t = temp.child("some"); - CairoPluginProjectBuilder::default() - .lib_rs(indoc! {r##" - use cairo_lang_macro::{executable_attribute, attribute_macro, TokenStream, ProcMacroResult}; - - executable_attribute!("some"); - - #[attribute_macro] - fn some(_args: TokenStream, input: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(input) - } - "##}) - .build(&t); - - let project = temp.child("hello"); - ProjectBuilder::start() - .name("hello") - .version("1.0.0") - .dep_starknet() - .dep("some", &t) - .lib_cairo(indoc! {r#" - #[some] - fn main() -> felt252 { 12 } - "#}) - .build(&project); - - Scarb::quick_snapbox() - .arg("build") - // Disable output from Cargo. - .env("CARGO_TERM_QUIET", "true") - .current_dir(&project) - .assert() - .failure() - .stdout_matches(indoc! {r#" - [..]Compiling some v1.0.0 ([..]Scarb.toml) - [..]Compiling hello v1.0.0 ([..]Scarb.toml) - error: duplicate expansions defined for procedural macro some v1.0.0 ([..]Scarb.toml): some - "#}); -} - #[test] fn can_be_expanded() { let temp = TempDir::new().unwrap(); From 6d6373643e9afa78f5be87e769b9eb2a432665c3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Tr=C4=85tnowiecki?= Date: Wed, 12 Mar 2025 15:55:12 +0100 Subject: [PATCH 24/38] Rename proc macro tests to proc_macro_v2_ --- scarb/tests/{proc_macro_build.rs => proc_macro_v2_build.rs} | 0 .../{proc_macro_executable.rs => proc_macro_v2_executable.rs} | 0 scarb/tests/{proc_macro_expand.rs => proc_macro_v2_expand.rs} | 0 scarb/tests/{proc_macro_metadata.rs => proc_macro_v2_metadata.rs} | 0 scarb/tests/{proc_macro_prebuilt.rs => proc_macro_v2_prebuilt.rs} | 0 scarb/tests/{proc_macro_quote.rs => proc_macro_v2_quote.rs} | 0 scarb/tests/{proc_macro_server.rs => proc_macro_v2_server.rs} | 0 7 files changed, 0 insertions(+), 0 deletions(-) rename scarb/tests/{proc_macro_build.rs => proc_macro_v2_build.rs} (100%) rename scarb/tests/{proc_macro_executable.rs => proc_macro_v2_executable.rs} (100%) rename scarb/tests/{proc_macro_expand.rs => proc_macro_v2_expand.rs} (100%) rename scarb/tests/{proc_macro_metadata.rs => proc_macro_v2_metadata.rs} (100%) rename scarb/tests/{proc_macro_prebuilt.rs => proc_macro_v2_prebuilt.rs} (100%) rename scarb/tests/{proc_macro_quote.rs => proc_macro_v2_quote.rs} (100%) rename scarb/tests/{proc_macro_server.rs => proc_macro_v2_server.rs} (100%) diff --git a/scarb/tests/proc_macro_build.rs b/scarb/tests/proc_macro_v2_build.rs similarity index 100% rename from scarb/tests/proc_macro_build.rs rename to scarb/tests/proc_macro_v2_build.rs diff --git a/scarb/tests/proc_macro_executable.rs b/scarb/tests/proc_macro_v2_executable.rs similarity index 100% rename from scarb/tests/proc_macro_executable.rs rename to scarb/tests/proc_macro_v2_executable.rs diff --git a/scarb/tests/proc_macro_expand.rs b/scarb/tests/proc_macro_v2_expand.rs similarity index 100% rename from scarb/tests/proc_macro_expand.rs rename to scarb/tests/proc_macro_v2_expand.rs diff --git a/scarb/tests/proc_macro_metadata.rs b/scarb/tests/proc_macro_v2_metadata.rs similarity index 100% rename from scarb/tests/proc_macro_metadata.rs rename to scarb/tests/proc_macro_v2_metadata.rs diff --git a/scarb/tests/proc_macro_prebuilt.rs b/scarb/tests/proc_macro_v2_prebuilt.rs similarity index 100% rename from scarb/tests/proc_macro_prebuilt.rs rename to scarb/tests/proc_macro_v2_prebuilt.rs diff --git a/scarb/tests/proc_macro_quote.rs b/scarb/tests/proc_macro_v2_quote.rs similarity index 100% rename from scarb/tests/proc_macro_quote.rs rename to scarb/tests/proc_macro_v2_quote.rs diff --git a/scarb/tests/proc_macro_server.rs b/scarb/tests/proc_macro_v2_server.rs similarity index 100% rename from scarb/tests/proc_macro_server.rs rename to scarb/tests/proc_macro_v2_server.rs From 72009931b7bae9653bc5ef26fc2fffde9067eb13 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Tr=C4=85tnowiecki?= Date: Wed, 12 Mar 2025 23:53:43 +0100 Subject: [PATCH 25/38] Rename not mangled symbols with _v2 suffix --- plugins/cairo-lang-macro/src/lib.rs | 14 +++++++------- scarb/src/compiler/plugin/proc_macro/ffi.rs | 14 +++++++------- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/plugins/cairo-lang-macro/src/lib.rs b/plugins/cairo-lang-macro/src/lib.rs index f1c34da98..723188755 100644 --- a/plugins/cairo-lang-macro/src/lib.rs +++ b/plugins/cairo-lang-macro/src/lib.rs @@ -68,7 +68,7 @@ pub static MACRO_DEFINITIONS_SLICE: [ExpansionDefinition]; /// # Safety #[doc(hidden)] #[no_mangle] -pub unsafe extern "C" fn list_expansions() -> StableExpansionsList { +pub unsafe extern "C" fn list_expansions_v2() -> StableExpansionsList { let list = MACRO_DEFINITIONS_SLICE .iter() .map(|m| m.clone().into_stable()) @@ -84,7 +84,7 @@ pub unsafe extern "C" fn list_expansions() -> StableExpansionsList { /// # Safety #[doc(hidden)] #[no_mangle] -pub unsafe extern "C" fn free_expansions_list(list: StableExpansionsList) { +pub unsafe extern "C" fn free_expansions_list_v2(list: StableExpansionsList) { let v = list.into_owned(); v.into_iter().for_each(|v| { ExpansionDefinition::free_owned(v); @@ -101,7 +101,7 @@ pub unsafe extern "C" fn free_expansions_list(list: StableExpansionsList) { /// # Safety #[doc(hidden)] #[no_mangle] -pub unsafe extern "C" fn expand( +pub unsafe extern "C" fn expand_v2( item_name: *const c_char, call_site: StableTextSpan, stable_attr: cairo_lang_macro_stable::StableTokenStream, @@ -158,7 +158,7 @@ pub unsafe extern "C" fn expand( /// # Safety #[doc(hidden)] #[no_mangle] -pub unsafe extern "C" fn free_result(result: StableProcMacroResult) { +pub unsafe extern "C" fn free_result_v2(result: StableProcMacroResult) { ProcMacroResult::free_owned_stable(result); } @@ -178,7 +178,7 @@ pub static AUX_DATA_CALLBACKS: [fn(PostProcessContext)]; /// # Safety #[doc(hidden)] #[no_mangle] -pub unsafe extern "C" fn post_process_callback( +pub unsafe extern "C" fn post_process_callback_v2( context: StablePostProcessContext, ) -> StablePostProcessContext { if !AUX_DATA_CALLBACKS.is_empty() { @@ -197,7 +197,7 @@ pub unsafe extern "C" fn post_process_callback( /// #[doc(hidden)] #[no_mangle] -pub unsafe extern "C" fn doc(item_name: *mut c_char) -> *mut c_char { +pub unsafe extern "C" fn doc_v2(item_name: *mut c_char) -> *mut c_char { let item_name = CStr::from_ptr(item_name).to_string_lossy().to_string(); let doc = MACRO_DEFINITIONS_SLICE .iter() @@ -218,7 +218,7 @@ pub unsafe extern "C" fn doc(item_name: *mut c_char) -> *mut c_char { /// #[doc(hidden)] #[no_mangle] -pub unsafe extern "C" fn free_doc(doc: *mut c_char) { +pub unsafe extern "C" fn free_doc_v2(doc: *mut c_char) { if !doc.is_null() { let _ = CString::from_raw(doc); } diff --git a/scarb/src/compiler/plugin/proc_macro/ffi.rs b/scarb/src/compiler/plugin/proc_macro/ffi.rs index a32c2243e..2b63ab904 100644 --- a/scarb/src/compiler/plugin/proc_macro/ffi.rs +++ b/scarb/src/compiler/plugin/proc_macro/ffi.rs @@ -319,21 +319,21 @@ impl VTableV0 { unsafe fn try_new(library: &Library) -> Result { unsafe { Ok(VTableV0 { - list_expansions: get_symbol!(library, b"list_expansions\0", ListExpansions), + list_expansions: get_symbol!(library, b"list_expansions_v2\0", ListExpansions), free_expansions_list: get_symbol!( library, - b"free_expansions_list\0", + b"free_expansions_list_v2\0", FreeExpansionsList ), - expand: get_symbol!(library, b"expand\0", ExpandCode), - free_result: get_symbol!(library, b"free_result\0", FreeResult), + expand: get_symbol!(library, b"expand_v2\0", ExpandCode), + free_result: get_symbol!(library, b"free_result_v2\0", FreeResult), post_process_callback: get_symbol!( library, - b"post_process_callback\0", + b"post_process_callback_v2\0", PostProcessCallback ), - doc: get_symbol!(library, b"doc\0", DocExpansion), - free_doc: get_symbol!(library, b"free_doc\0", FreeExpansionDoc), + doc: get_symbol!(library, b"doc_v2\0", DocExpansion), + free_doc: get_symbol!(library, b"free_doc_v2\0", FreeExpansionDoc), }) } } From accc296f9a7de4c26c85db2f98c25f9be3a37733 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Tr=C4=85tnowiecki?= Date: Thu, 13 Mar 2025 18:58:55 +0100 Subject: [PATCH 26/38] Bump macro api versions, depend on both macro apis --- Cargo.lock | 40 ++++++++++--------- .../cairo-lang-macro-attributes/Cargo.toml | 2 +- plugins/cairo-lang-macro-stable/Cargo.toml | 2 +- plugins/cairo-lang-macro/Cargo.toml | 2 +- scarb/Cargo.toml | 2 + .../scarb-proc-macro-server-types/Cargo.toml | 2 +- 6 files changed, 27 insertions(+), 23 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 602ef6ecb..dfb8c0db3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -945,33 +945,35 @@ dependencies = [ [[package]] name = "cairo-lang-macro" version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dda77fe9404e438edaa80c9acaf0d72260aa883ba433812d0a745f5a72f31881" dependencies = [ - "bumpalo", "cairo-lang-macro-attributes 0.1.0", "cairo-lang-macro-stable 1.0.0", - "cairo-lang-primitive-token", - "cairo-lang-quote", "linkme", "serde", - "serde_json", - "trybuild", ] [[package]] name = "cairo-lang-macro" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dda77fe9404e438edaa80c9acaf0d72260aa883ba433812d0a745f5a72f31881" +version = "0.2.0" dependencies = [ - "cairo-lang-macro-attributes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "cairo-lang-macro-stable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bumpalo", + "cairo-lang-macro-attributes 0.2.0", + "cairo-lang-macro-stable 2.0.0", + "cairo-lang-primitive-token", + "cairo-lang-quote", "linkme", "serde", + "serde_json", + "trybuild", ] [[package]] name = "cairo-lang-macro-attributes" version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e32e958decd95ae122ee64daa26721da2f76e83231047f947fd9cdc5d3c90cc6" dependencies = [ "quote", "scarb-stable-hash 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -980,9 +982,7 @@ dependencies = [ [[package]] name = "cairo-lang-macro-attributes" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e32e958decd95ae122ee64daa26721da2f76e83231047f947fd9cdc5d3c90cc6" +version = "0.2.0" dependencies = [ "quote", "scarb-stable-hash 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -992,12 +992,12 @@ dependencies = [ [[package]] name = "cairo-lang-macro-stable" version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c49906d6b1c215e5814be7c5c65ecf2328898b335bee8c2409ec07cfb5530daf" [[package]] name = "cairo-lang-macro-stable" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c49906d6b1c215e5814be7c5c65ecf2328898b335bee8c2409ec07cfb5530daf" +version = "2.0.0" [[package]] name = "cairo-lang-parser" @@ -1422,7 +1422,7 @@ dependencies = [ "cairo-lang-filesystem", "cairo-lang-formatter", "cairo-lang-lowering", - "cairo-lang-macro 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "cairo-lang-macro 0.1.1", "cairo-lang-parser", "cairo-lang-project", "cairo-lang-semantic", @@ -5719,7 +5719,9 @@ dependencies = [ "cairo-lang-formatter", "cairo-lang-lowering", "cairo-lang-macro 0.1.1", + "cairo-lang-macro 0.2.0", "cairo-lang-macro-stable 1.0.0", + "cairo-lang-macro-stable 2.0.0", "cairo-lang-parser", "cairo-lang-semantic", "cairo-lang-sierra", @@ -5960,7 +5962,7 @@ dependencies = [ name = "scarb-proc-macro-server-types" version = "0.2.0" dependencies = [ - "cairo-lang-macro 0.1.1", + "cairo-lang-macro 0.2.0", "serde", "serde_json", ] @@ -5970,7 +5972,7 @@ name = "scarb-proc-macro-server-types" version = "0.2.0" source = "git+https://github.com/software-mansion/scarb?rev=8caf288a0634e8f90323529bde3eac79c47b1fa4#8caf288a0634e8f90323529bde3eac79c47b1fa4" dependencies = [ - "cairo-lang-macro 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "cairo-lang-macro 0.1.1", "serde", "serde_json", ] diff --git a/plugins/cairo-lang-macro-attributes/Cargo.toml b/plugins/cairo-lang-macro-attributes/Cargo.toml index b0f70f856..5978717ac 100644 --- a/plugins/cairo-lang-macro-attributes/Cargo.toml +++ b/plugins/cairo-lang-macro-attributes/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "cairo-lang-macro-attributes" -version = "0.1.0" +version = "0.2.0" edition = "2021" authors.workspace = true categories = ["development-tools"] diff --git a/plugins/cairo-lang-macro-stable/Cargo.toml b/plugins/cairo-lang-macro-stable/Cargo.toml index ac7dc3529..d8181e96f 100644 --- a/plugins/cairo-lang-macro-stable/Cargo.toml +++ b/plugins/cairo-lang-macro-stable/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "cairo-lang-macro-stable" -version = "1.0.0" +version = "2.0.0" edition = "2021" authors.workspace = true diff --git a/plugins/cairo-lang-macro/Cargo.toml b/plugins/cairo-lang-macro/Cargo.toml index e6e08912c..bad8d07f1 100644 --- a/plugins/cairo-lang-macro/Cargo.toml +++ b/plugins/cairo-lang-macro/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "cairo-lang-macro" -version = "0.1.1" +version = "0.2.0" edition = "2021" rust-version = "1.73" diff --git a/scarb/Cargo.toml b/scarb/Cargo.toml index 9f69ba41a..94d4968ac 100644 --- a/scarb/Cargo.toml +++ b/scarb/Cargo.toml @@ -25,6 +25,8 @@ cairo-lang-formatter.workspace = true cairo-lang-lowering.workspace = true cairo-lang-macro = { path = "../plugins/cairo-lang-macro" } cairo-lang-macro-stable = { path = "../plugins/cairo-lang-macro-stable" } +cairo-lang-macro-v1 = { version = "0.1", package = "cairo-lang-macro" } +cairo-lang-macro-stable-v1 = { version = "1", package = "cairo-lang-macro-stable" } cairo-lang-parser.workspace = true cairo-lang-semantic.workspace = true cairo-lang-sierra-generator.workspace = true diff --git a/utils/scarb-proc-macro-server-types/Cargo.toml b/utils/scarb-proc-macro-server-types/Cargo.toml index 5408b789d..309030ec2 100644 --- a/utils/scarb-proc-macro-server-types/Cargo.toml +++ b/utils/scarb-proc-macro-server-types/Cargo.toml @@ -10,6 +10,6 @@ license.workspace = true repository.workspace = true [dependencies] -cairo-lang-macro = { path = "../../plugins/cairo-lang-macro", version = "0.1", features = ["serde"] } +cairo-lang-macro = { path = "../../plugins/cairo-lang-macro", version = "0.2", features = ["serde"] } serde.workspace = true serde_json.workspace = true From 7d47741a11a07d01f446626eb06c6203b379a798 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Tr=C4=85tnowiecki?= Date: Thu, 13 Mar 2025 19:57:25 +0100 Subject: [PATCH 27/38] Proc macro: restore v1 api tests --- scarb/tests/package.rs | 8 +- scarb/tests/proc_macro_v1.rs | 1660 +++++++++++++++++ scarb/tests/proc_macro_v1_and_v2.rs | 159 ++ scarb/tests/proc_macro_v2_server.rs | 10 +- .../src/cairo_plugin_project_builder.rs | 55 +- .../src/proc_macro_server.rs | 2 +- 6 files changed, 1880 insertions(+), 14 deletions(-) create mode 100644 scarb/tests/proc_macro_v1.rs create mode 100644 scarb/tests/proc_macro_v1_and_v2.rs diff --git a/scarb/tests/package.rs b/scarb/tests/package.rs index e5d4c31e9..99fccfac4 100644 --- a/scarb/tests/package.rs +++ b/scarb/tests/package.rs @@ -496,7 +496,7 @@ fn workspace() { fn cairo_plugin() { let t = TempDir::new().unwrap(); // Note this will be packaged with `cairo-lang-macro` from crates, not the local one. - CairoPluginProjectBuilder::default().build(&t); + CairoPluginProjectBuilder::default_v1().build(&t); Scarb::quick_snapbox() .arg("package") @@ -571,7 +571,7 @@ fn cairo_plugin() { crate-type = ["cdylib"] [dependencies] - cairo-lang-macro = { path = "[..]cairo-lang-macro", version = "0.1.0" } + cairo-lang-macro = "0.1" "#}, ) .file_matches( @@ -1480,7 +1480,7 @@ fn package_without_verification() { #[test] fn package_cairo_plugin_without_verification() { let t = TempDir::new().unwrap(); - CairoPluginProjectBuilder::default().build(&t); + CairoPluginProjectBuilder::default_v1().build(&t); Scarb::quick_snapbox() .arg("package") @@ -1742,7 +1742,7 @@ fn package_proc_macro_with_package_script() { "cargo build --release && cp target/release/{dll_filename} target/scarb/cairo-plugin" ); - CairoPluginProjectBuilder::start() + CairoPluginProjectBuilder::start_v1() .name("foo") .scarb_project(|b| { b.name("foo") diff --git a/scarb/tests/proc_macro_v1.rs b/scarb/tests/proc_macro_v1.rs new file mode 100644 index 000000000..5c10c7ebf --- /dev/null +++ b/scarb/tests/proc_macro_v1.rs @@ -0,0 +1,1660 @@ +use assert_fs::TempDir; +use assert_fs::fixture::PathChild; +use cairo_lang_sierra::program::VersionedProgram; +use indoc::indoc; +use scarb_test_support::cairo_plugin_project_builder::CairoPluginProjectBuilder; +use scarb_test_support::command::Scarb; +use scarb_test_support::fsx::ChildPathEx; +use scarb_test_support::project_builder::ProjectBuilder; +use scarb_test_support::workspace_builder::WorkspaceBuilder; +use snapbox::assert_matches; + +#[test] +fn compile_cairo_plugin() { + let t = TempDir::new().unwrap(); + CairoPluginProjectBuilder::default_v1().build(&t); + let output = Scarb::quick_snapbox() + .arg("build") + // Disable colors in Cargo output. + .env("CARGO_TERM_COLOR", "never") + .current_dir(&t) + .output() + .unwrap(); + assert!( + output.status.success(), + "stdout={}\n stderr={}", + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr), + ); + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + assert!(stdout.contains("Compiling some v1.0.0")); + let lines = stdout.lines().map(ToString::to_string).collect::>(); + let (last, lines) = lines.split_last().unwrap(); + assert_matches(r#"[..] Finished `dev` profile target(s) in [..]"#, last); + let (last, _lines) = lines.split_last().unwrap(); + // Line from Cargo output + assert_matches( + r#"[..]Finished `release` profile [optimized] target(s) in[..]"#, + last, + ); +} + +#[test] +fn check_cairo_plugin() { + let t = TempDir::new().unwrap(); + CairoPluginProjectBuilder::default_v1().build(&t); + let output = Scarb::quick_snapbox() + .arg("check") + // Disable colors in Cargo output. + .env("CARGO_TERM_COLOR", "never") + .current_dir(&t) + .output() + .unwrap(); + assert!( + output.status.success(), + "{}", + String::from_utf8_lossy(&output.stderr) + ); + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + assert!(stdout.contains("Checking some v1.0.0")); + let lines = stdout.lines().map(ToString::to_string).collect::>(); + let (last, lines) = lines.split_last().unwrap(); + assert_matches( + r#"[..] Finished checking `dev` profile target(s) in [..]"#, + last, + ); + let (last, _lines) = lines.split_last().unwrap(); + // Line from Cargo output + assert_matches( + r#"[..]Finished `release` profile [optimized] target(s) in[..]"#, + last, + ); +} + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn can_check_cairo_project_with_plugins() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default_v1().build(&t); + let project = temp.child("hello"); + let y = project.child("other"); + CairoPluginProjectBuilder::default_v1() + .name("other") + .build(&y); + WorkspaceBuilder::start() + .add_member("other") + .package( + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t), + ) + .build(&project); + Scarb::quick_snapbox() + .arg("check") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + [..]Checking other v1.0.0 ([..]Scarb.toml) + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Checking hello v1.0.0 ([..]Scarb.toml) + [..]Finished checking `dev` profile target(s) in [..] + "#}); +} + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn resolve_fetched_plugins() { + let t = TempDir::new().unwrap(); + CairoPluginProjectBuilder::default_v1().build(&t); + assert!(!t.child("Cargo.lock").exists()); + let output = Scarb::quick_snapbox() + .arg("fetch") + // Disable colors in Cargo output. + .env("CARGO_TERM_COLOR", "never") + .current_dir(&t) + .output() + .unwrap(); + assert!( + output.status.success(), + "{}", + String::from_utf8_lossy(&output.stderr) + ); + assert!(t.child("Cargo.lock").exists()) +} + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn can_use_json_output() { + let t = TempDir::new().unwrap(); + CairoPluginProjectBuilder::default_v1().build(&t); + let output = Scarb::quick_snapbox() + .arg("--json") + .arg("check") + // Disable colors in Cargo output. + .env("CARGO_TERM_COLOR", "never") + .current_dir(&t) + .output() + .unwrap(); + assert!( + output.status.success(), + "{}", + String::from_utf8_lossy(&output.stderr) + ); + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + let lines = stdout.lines().map(ToString::to_string).collect::>(); + let (first, lines) = lines.split_first().unwrap(); + assert_matches( + r#"{"status":"checking","message":"some v1.0.0 ([..]Scarb.toml)"}"#, + first, + ); + let (last, lines) = lines.split_last().unwrap(); + assert_matches( + r#"{"status":"finished","message":"checking `dev` profile target(s) in [..]"}"#, + last, + ); + // Line from Cargo. + let (last, _lines) = lines.split_last().unwrap(); + assert_matches(r#"{"reason":"build-finished","success":true}"#, last); +} + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn compile_cairo_plugin_with_lib_target() { + let t = TempDir::new().unwrap(); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .manifest_extra(indoc! {r#" + [lib] + [cairo-plugin] + "#}) + .build(&t); + + Scarb::quick_snapbox() + .arg("build") + .current_dir(&t) + .assert() + .failure() + .stdout_matches(indoc! {r#" + error: failed to parse manifest at: [..]/Scarb.toml + + Caused by: + target `cairo-plugin` cannot be mixed with other targets + "#}); +} + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn compile_cairo_plugin_with_other_target() { + let t = TempDir::new().unwrap(); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .manifest_extra(indoc! {r#" + [cairo-plugin] + [[target.starknet-contract]] + "#}) + .build(&t); + + Scarb::quick_snapbox() + .arg("build") + .current_dir(&t) + .assert() + .failure() + .stdout_matches(indoc! {r#" + error: failed to parse manifest at: [..]/Scarb.toml + + Caused by: + target `cairo-plugin` cannot be mixed with other targets + "#}); +} + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn can_emit_plugin_warning() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default_v1() + .lib_rs(indoc! {r#" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, Diagnostic}; + + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let diag = Diagnostic::warn("Some warning from macro."); + ProcMacroResult::new(token_stream) + .with_diagnostics(diag.into()) + } + "#}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn f() -> felt252 { 12 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + [..] Compiling some v1.0.0 ([..]Scarb.toml) + [..] Compiling hello v1.0.0 ([..]Scarb.toml) + warn: Plugin diagnostic: Some warning from macro. + --> [..]lib.cairo:1:1 + #[some] + ^^^^^^^ + + [..]Finished `dev` profile target(s) in [..] + "#}); +} + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn can_emit_plugin_error() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default_v1() + .lib_rs(indoc! {r#" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, Diagnostic}; + + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let diag = Diagnostic::error("Some error from macro."); + ProcMacroResult::new(token_stream) + .with_diagnostics(diag.into()) + } + "#}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn f() -> felt252 { 12 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .failure() + .stdout_matches(indoc! {r#" + [..] Compiling some v1.0.0 ([..]Scarb.toml) + [..] Compiling hello v1.0.0 ([..]Scarb.toml) + error: Plugin diagnostic: Some error from macro. + --> [..]lib.cairo:1:1 + #[some] + ^^^^^^^ + + error: could not compile `hello` due to previous error + "#}); +} + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn diags_from_generated_code_mapped_correctly() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default_v1() + .lib_rs(indoc! {r#" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, Diagnostic}; + + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let diag = Diagnostic::error("Some error from macro."); + ProcMacroResult::new(token_stream) + .with_diagnostics(diag.into()) + } + "#}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[cfg(target: 'lib')] + #[some] + fn test_increase_balance() { + i_don_exist(); + } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .failure() + .stdout_matches(indoc! {r#" + [..] Compiling some v1.0.0 ([..]Scarb.toml) + [..] Compiling hello v1.0.0 ([..]Scarb.toml) + error: Plugin diagnostic: Some error from macro. + --> [..]lib.cairo:2:1 + #[some] + ^^^^^^^ + + error[E0006]: Function not found. + --> [..]lib.cairo:4:5 + i_don_exist(); + ^^^^^^^^^^^ + + error: could not compile `hello` due to previous error + "#}); +} + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn can_remove_original_node() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default_v1() + .lib_rs(indoc! {r#" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + + #[attribute_macro] + pub fn some(_attr: TokenStream, _: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(TokenStream::empty()) + } + "#}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> felt252 { 12 } + + fn main() -> felt252 { 34 } + + #[some] + fn main() -> felt252 { 56 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("cairo-run") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + warn: `scarb cairo-run` will be deprecated soon + help: use `scarb execute` instead + [..] Compiling some v1.0.0 ([..]Scarb.toml) + [..] Compiling hello v1.0.0 ([..]Scarb.toml) + [..]Finished `dev` profile target(s) in [..] + [..]Running hello + Run completed successfully, returning [34] + "#}); +} + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn can_replace_original_node() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default_v1() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let token_stream = TokenStream::new( + token_stream + .to_string() + .replace("12", "34") + ); + ProcMacroResult::new(token_stream) + } + "##}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> felt252 { 12 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("cairo-run") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + warn: `scarb cairo-run` will be deprecated soon + help: use `scarb execute` instead + [..] Compiling some v1.0.0 ([..]Scarb.toml) + [..] Compiling hello v1.0.0 ([..]Scarb.toml) + [..]Finished `dev` profile target(s) in [..] + [..]Running hello + Run completed successfully, returning [34] + "#}); +} + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn can_return_aux_data_from_plugin() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default_v1() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, AuxData, PostProcessContext, post_process}; + use serde::{Serialize, Deserialize}; + + #[derive(Debug, Serialize, Deserialize)] + struct SomeMacroDataFormat { + msg: String + } + + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let value = SomeMacroDataFormat { msg: "Hello from some macro!".to_string() }; + let value = serde_json::to_string(&value).unwrap(); + let value: Vec = value.into_bytes(); + let aux_data = AuxData::new(value); + ProcMacroResult::new(token_stream).with_aux_data(aux_data) + } + + #[post_process] + pub fn callback(context: PostProcessContext) { + let aux_data = context.aux_data.into_iter() + .map(|aux_data| { + let value: Vec = aux_data.into(); + let aux_data: SomeMacroDataFormat = serde_json::from_slice(&value).unwrap(); + aux_data + }) + .collect::>(); + println!("{:?}", aux_data); + } + + #[post_process] + pub fn some_no_op_callback(context: PostProcessContext) { + drop(context.aux_data); + } + "##}) + .add_dep(r#"serde = { version = "*", features = ["derive"] }"#) + .add_dep(r#"serde_json = "*""#) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> felt252 { 12 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + [SomeMacroDataFormat { msg: "Hello from some macro!" }] + [..]Finished `dev` profile target(s) in [..] + "#}); +} + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn can_read_token_stream_metadata() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default_v1() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + println!("{:#?}", token_stream.metadata()); + ProcMacroResult::new(token_stream) + } + "##}) + .build(&t); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> felt252 { 12 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + TokenStreamMetadata { + original_file_path: Some( + "[..]lib.cairo", + ), + file_id: Some( + "[..]", + ), + } + [..]Finished `dev` profile target(s) in [..] + "#}); +} + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn can_define_multiple_macros() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default_v1() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, AuxData, PostProcessContext, post_process}; + + #[attribute_macro] + pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let token_stream = TokenStream::new( + token_stream + .to_string() + .replace("12", "34") + ); + let aux_data = AuxData::new(Vec::new()); + ProcMacroResult::new(token_stream).with_aux_data(aux_data) + } + + #[attribute_macro] + pub fn world(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let token_stream = TokenStream::new( + token_stream + .to_string() + .replace("56", "78") + ); + let aux_data = AuxData::new(Vec::new()); + ProcMacroResult::new(token_stream).with_aux_data(aux_data) + } + + #[post_process] + pub fn callback(context: PostProcessContext) { + assert_eq!(context.aux_data.len(), 2); + } + "##}) + .build(&t); + + let w = temp.child("other"); + CairoPluginProjectBuilder::default_v1() + .name("other") + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, AuxData, PostProcessContext, post_process}; + + #[attribute_macro] + pub fn beautiful(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let token_stream = TokenStream::new( + token_stream + .to_string() + .replace("90", "09") + ); + let aux_data = AuxData::new(Vec::new()); + ProcMacroResult::new(token_stream).with_aux_data(aux_data) + } + + #[post_process] + pub fn callback(context: PostProcessContext) { + assert_eq!(context.aux_data.len(), 1); + } + "##}) + .build(&w); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .dep("other", &w) + .lib_cairo(indoc! {r#" + #[hello] + #[beautiful] + #[world] + fn main() -> felt252 { 12 + 56 + 90 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("cairo-run") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + warn: `scarb cairo-run` will be deprecated soon + help: use `scarb execute` instead + [..]Compiling other v1.0.0 ([..]Scarb.toml) + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + [..]Finished `dev` profile target(s) in [..] + [..]Running hello + Run completed successfully, returning [121] + "#}); +} + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn cannot_duplicate_macros() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default_v1() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + + #[attribute_macro] + pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(token_stream) + } + + #[attribute_macro] + pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(token_stream) + } + "##}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[hello] + fn main() -> felt252 { 12 + 56 + 90 } + "#}) + .build(&project); + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + // Fails with Cargo compile error. + .failure(); +} + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn cannot_duplicate_macros_across_packages() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default_v1() + .lib_rs(indoc! {r#" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + + #[attribute_macro] + pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(token_stream) + } + + #[attribute_macro] + pub fn world(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(token_stream) + } + "#}) + .build(&t); + + let w = temp.child("other"); + CairoPluginProjectBuilder::default_v1() + .name("other") + .lib_rs(indoc! {r#" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + + #[attribute_macro] + pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(token_stream) + } + "#}) + .build(&w); + + let p = temp.child("pkg"); + CairoPluginProjectBuilder::default_v1() + .name("pkg") + .lib_rs(indoc! {r#" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + + #[attribute_macro] + pub fn foo(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(token_stream) + } + "#}) + .build(&p); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .dep("other", &w) + .dep("pkg", &p) + .lib_cairo(indoc! {r#" + #[hello] + #[world] + fn main() -> felt252 { 12 + 56 + 90 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .failure() + .stdout_matches(indoc! {r#" + [..]Compiling other v1.0.0 ([..]Scarb.toml) + [..]Compiling pkg v1.0.0 ([..]Scarb.toml) + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + error: duplicate expansions defined for procedural macros: hello (other v1.0.0 ([..]Scarb.toml) and some v1.0.0 ([..]Scarb.toml)) + "#}); +} + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn cannot_use_undefined_macro() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default_v1() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + + #[attribute_macro] + pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(token_stream) + } + "##}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[world] + fn main() -> felt252 { 12 + 56 + 90 } + "#}) + .build(&project); + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .failure() + .stdout_matches(indoc! {r#" + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + error: Plugin diagnostic: Unsupported attribute. + --> [..]lib.cairo:1:1 + #[world] + ^^^^^^^^ + + error: could not compile `hello` due to previous error + "#}); +} + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn can_resolve_full_path_markers() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default_v1() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, post_process, PostProcessContext}; + + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let full_path_markers = vec!["some-key".to_string()]; + + let code = format!( + r#"#[macro::full_path_marker("some-key")] {}"#, + token_stream.to_string().replace("12", "34") + ); + + ProcMacroResult::new(TokenStream::new(code)) + .with_full_path_markers(full_path_markers) + } + + #[post_process] + pub fn callback(context: PostProcessContext) { + println!("{:?}", context.full_path_markers); + } + "##}) + .build(&t); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> felt252 { 12 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + [FullPathMarker { key: "some-key", full_path: "hello::main" }] + [..]Finished `dev` profile target(s) in [..] + "#}); +} + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn can_implement_inline_macro() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default_v1() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, inline_macro}; + + #[inline_macro] + pub fn some(token_stream: TokenStream) -> ProcMacroResult { + assert_eq!(token_stream.to_string(), "()"); + ProcMacroResult::new(TokenStream::new("34".to_string())) + } + "##}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + fn main() -> felt252 { + let x = some!(); + x + } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("cairo-run") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + warn: `scarb cairo-run` will be deprecated soon + help: use `scarb execute` instead + [..] Compiling some v1.0.0 ([..]Scarb.toml) + [..] Compiling hello v1.0.0 ([..]Scarb.toml) + [..]Finished `dev` profile target(s) in [..] + [..]Running hello + Run completed successfully, returning [34] + "#}); +} + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn empty_inline_macro_result() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default_v1() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, inline_macro}; + + #[inline_macro] + pub fn some(_token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(TokenStream::empty()) + } + "##}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + fn main() -> felt252 { + let _x = some!(); + 12 + } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .failure() + .stdout_matches(indoc! {r#" + [..] Compiling some v1.0.0 ([..]Scarb.toml) + [..] Compiling hello v1.0.0 ([..]Scarb.toml) + error: Inline macro `some` failed. + --> [..]lib.cairo:2:14 + let _x = some!(); + ^^^^^^^ + + error: could not compile `hello` due to previous error + "#}); +} + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn can_implement_derive_macro() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default_v1() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{derive_macro, ProcMacroResult, TokenStream}; + + #[derive_macro] + pub fn custom_derive(token_stream: TokenStream) -> ProcMacroResult { + let name = token_stream + .clone() + .to_string() + .lines() + .find(|l| l.starts_with("struct")) + .unwrap() + .to_string() + .replace("struct", "") + .replace("}", "") + .replace("{", "") + .trim() + .to_string(); + + let token_stream = TokenStream::new(indoc::formatdoc!{r#" + impl SomeImpl of Hello<{name}> {{ + fn world(self: @{name}) -> u32 {{ + 32 + }} + }} + "#}); + + ProcMacroResult::new(token_stream) + } + "##}) + .add_dep(r#"indoc = "*""#) + .build(&t); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + trait Hello { + fn world(self: @T) -> u32; + } + + #[derive(CustomDerive, Drop)] + struct SomeType {} + + fn main() -> u32 { + let a = SomeType {}; + a.world() + } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("cairo-run") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + warn: `scarb cairo-run` will be deprecated soon + help: use `scarb execute` instead + [..] Compiling some v1.0.0 ([..]Scarb.toml) + [..] Compiling hello v1.0.0 ([..]Scarb.toml) + [..]Finished `dev` profile target(s) in [..] + [..]Running hello + Run completed successfully, returning [32] + "#}); +} + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn can_use_both_derive_and_attr() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default_v1() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{derive_macro, attribute_macro, ProcMacroResult, TokenStream}; + + #[attribute_macro] + pub fn first_attribute(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(TokenStream::new( + token_stream.to_string() + .replace("SomeType", "OtherType") + )) + } + + #[attribute_macro] + pub fn second_attribute(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let token_stream = TokenStream::new( + token_stream.to_string().replace("OtherType", "RenamedStruct") + ); + ProcMacroResult::new(TokenStream::new( + format!("#[derive(Drop)]\n{token_stream}") + )) + } + + #[derive_macro] + pub fn custom_derive(_token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(TokenStream::new( + indoc::formatdoc!{r#" + impl SomeImpl of Hello {{ + fn world(self: @RenamedStruct) -> u32 {{ + 32 + }} + }} + "#} + )) + } + "##}) + .add_dep(r#"indoc = "*""#) + .build(&t); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + trait Hello { + fn world(self: @T) -> u32; + } + + #[first_attribute] + #[derive(CustomDerive)] + #[second_attribute] + struct SomeType {} + + fn main() -> u32 { + let a = RenamedStruct {}; + a.world() + } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("cairo-run") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + warn: `scarb cairo-run` will be deprecated soon + help: use `scarb execute` instead + [..] Compiling some v1.0.0 ([..]Scarb.toml) + [..] Compiling hello v1.0.0 ([..]Scarb.toml) + [..]Finished `dev` profile target(s) in [..] + [..]Running hello + Run completed successfully, returning [32] + "#}); +} + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn can_read_attribute_args() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default_v1() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + + #[attribute_macro] + pub fn some(attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + println!("{}", attr); + ProcMacroResult::new(token_stream) + } + "##}) + .build(&t); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some( + first: "aaa", + second: "bbb", + )] + fn main() -> felt252 { 12 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + ( + first: "aaa", + second: "bbb", + ) + [..]Finished `dev` profile target(s) in [..] + "#}); +} + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn can_create_executable_attribute() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default_v1() + .lib_rs(indoc! {r##" + use cairo_lang_macro::executable_attribute; + + executable_attribute!("some"); + "##}) + .build(&t); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> felt252 { 12 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + [..]Finished `dev` profile target(s) in [..] + "#}); + let sierra = project + .child("target") + .child("dev") + .child("hello.sierra.json") + .read_to_string(); + let sierra = serde_json::from_str::(&sierra).unwrap(); + let sierra = sierra.into_v1().unwrap(); + let executables = sierra.debug_info.unwrap().executables; + assert_eq!(executables.len(), 1); + let executables = executables.get("some").unwrap(); + assert_eq!(executables.len(), 1); + let fid = executables.first().unwrap().clone(); + assert_eq!(fid.clone().debug_name.unwrap(), "hello::main"); + assert!( + sierra + .program + .funcs + .iter() + .any(|f| f.id.clone() == fid.clone()) + ); +} + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn executable_name_cannot_clash_attr() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default_v1() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{executable_attribute, attribute_macro, TokenStream, ProcMacroResult}; + + executable_attribute!("some"); + + #[attribute_macro] + fn some(_args: TokenStream, input: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(input) + } + "##}) + .build(&t); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> felt252 { 12 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .failure() + .stdout_matches(indoc! {r#" + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + error: duplicate expansions defined for procedural macro some v1.0.0 ([..]Scarb.toml): some + "#}); +} + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn can_be_expanded() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default_v1() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, derive_macro}; + + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let token_stream = TokenStream::new( + token_stream + .to_string() + .replace("12", "34") + ); + ProcMacroResult::new(token_stream) + } + + #[derive_macro] + pub fn custom_derive(token_stream: TokenStream) -> ProcMacroResult { + let name = token_stream + .clone() + .to_string() + .lines() + .find(|l| l.starts_with("struct")) + .unwrap() + .to_string() + .replace("struct", "") + .replace("}", "") + .replace("{", "") + .trim() + .to_string(); + + let token_stream = TokenStream::new(indoc::formatdoc!{r#" + impl SomeImpl of Hello<{name}> {{ + fn world(self: @{name}) -> u32 {{ + 32 + }} + }} + "#}); + + ProcMacroResult::new(token_stream) + } + "##}) + .add_dep(r#"indoc = "*""#) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + trait Hello { + fn world(self: @T) -> u32; + } + + #[derive(CustomDerive, Drop)] + struct SomeType {} + + #[some] + fn main() -> u32 { + let x = 12; + let a = SomeType {}; + a.world() + x + } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("expand") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success(); + + assert_eq!( + project.child("target/dev").files(), + vec!["hello.expanded.cairo"] + ); + let expanded = project + .child("target/dev/hello.expanded.cairo") + .read_to_string(); + snapbox::assert_eq( + indoc! {r#" + mod hello { + trait Hello { + fn world(self: @T) -> u32; + } + + #[derive(CustomDerive, Drop)] + struct SomeType {} + impl SomeTypeDrop<> of core::traits::Drop; + impl SomeImpl of Hello { + fn world(self: @SomeType) -> u32 { + 32 + } + } + fn main() -> u32 { + let x = 34; + let a = SomeType {}; + a.world() + x + } + } + "#}, + expanded, + ); +} + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn can_expand_trait_inner_func_attrr() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default_v1() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{attribute_macro, ProcMacroResult, TokenStream}; + + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(TokenStream::new( + token_stream.to_string() + .replace("hello", "world") + .replace("12", "34") + )) + } + "##}) + .build(&t); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + trait Hello { + #[some] + fn hello(self: @T) -> u32 { + 12 + } + } + + #[derive(Drop)] + struct SomeStruct {} + + impl SomeImpl of Hello {} + + fn main() -> u32 { + let a = SomeStruct {}; + a.world() + } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("cairo-run") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + warn: `scarb cairo-run` will be deprecated soon + help: use `scarb execute` instead + [..] Compiling some v1.0.0 ([..]Scarb.toml) + [..] Compiling hello v1.0.0 ([..]Scarb.toml) + [..]Finished `dev` profile target(s) in [..] + [..]Running hello + Run completed successfully, returning [34] + "#}); +} + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn can_expand_impl_inner_func_attrr() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default_v1() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{attribute_macro, ProcMacroResult, TokenStream}; + + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(TokenStream::new( + token_stream.to_string() + .replace("1", "2") + )) + } + "##}) + .build(&t); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .edition("2023_01") + .version("1.0.0") + .dep_starknet() + .dep_cairo_test() + .dep("some", &t) + .manifest_extra(indoc! {r#" + [[target.starknet-contract]] + "#}) + .lib_cairo(indoc! {r#" + #[starknet::interface] + trait IHello { + fn get(self: @T) -> u128; + fn increase(ref self: T); + } + + #[starknet::contract] + mod Hello { + use starknet::storage::{StoragePointerReadAccess, StoragePointerWriteAccess}; + use starknet::get_contract_address; + use super::IHello; + + #[storage] + struct Storage { + counter: u128 + } + + #[constructor] + fn constructor(ref self: ContractState, value_: u128) { + self.counter.write(value_); + } + + #[abi(embed_v0)] + impl IncImpl of IHello { + fn get(self: @ContractState) -> u128 { + self.counter.read() + } + + #[some] + fn increase(ref self: ContractState) { + self.counter.write( self.counter.read() + 1 ); + } + } + } + + #[cfg(test)] + mod tests { + use array::ArrayTrait; + use core::result::ResultTrait; + use core::traits::Into; + use option::OptionTrait; + use starknet::syscalls::deploy_syscall; + use traits::TryInto; + + use super::{IHello, Hello, IHelloDispatcher, IHelloDispatcherTrait}; + + #[test] +#[ignore = "TODO(maciektr): support old macro api"] + fn test_flow() { + let calldata = array![100]; + let (address0, _) = deploy_syscall( + Hello::TEST_CLASS_HASH.try_into().unwrap(), 0, calldata.span(), false + ).unwrap(); + + let mut contract0 = IHelloDispatcher { contract_address: address0 }; + + assert_eq!(@contract0.get(), @100, "contract0.get() == 100"); + @contract0.increase(); + assert_eq!(@contract0.get(), @102, "contract0.get() == 102"); + } + } + + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("cairo-test") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + [..] Compiling some v1.0.0 ([..]Scarb.toml) + [..] Compiling test(hello_unittest) hello v1.0.0 ([..]Scarb.toml) + [..]Finished `dev` profile target(s) in [..] + [..]Testing hello + running 1 test + test hello::tests::test_flow ... ok (gas usage est.: [..]) + test result: ok. 1 passed; 0 failed; 0 ignored; 0 filtered out; + "#}); +} + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn can_be_used_through_re_export() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default_v1() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let token_stream = TokenStream::new( + token_stream + .to_string() + .replace("12", "34") + ); + ProcMacroResult::new(token_stream) + } + "##}) + .build(&t); + let dep = temp.child("dep"); + ProjectBuilder::start() + .name("dep") + .version("1.0.0") + .dep("some", &t) + .manifest_package_extra(indoc! {r#" + re-export-cairo-plugins = ["some"] + "#}) + .build(&dep); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("dep", &dep) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> felt252 { 12 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("expand") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success(); + + assert_eq!( + project.child("target/dev").files(), + vec!["hello.expanded.cairo"] + ); + let expanded = project + .child("target/dev/hello.expanded.cairo") + .read_to_string(); + snapbox::assert_eq( + indoc! {r#" + mod hello { + fn main() -> felt252 { + 34 + } + } + "#}, + expanded, + ); +} diff --git a/scarb/tests/proc_macro_v1_and_v2.rs b/scarb/tests/proc_macro_v1_and_v2.rs new file mode 100644 index 000000000..6bc57c442 --- /dev/null +++ b/scarb/tests/proc_macro_v1_and_v2.rs @@ -0,0 +1,159 @@ +use assert_fs::TempDir; +use assert_fs::fixture::PathChild; +use indoc::indoc; +use scarb_test_support::cairo_plugin_project_builder::CairoPluginProjectBuilder; +use scarb_test_support::command::Scarb; +use scarb_test_support::project_builder::ProjectBuilder; + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn can_use_both_v1_and_v2_proc_macros() { + let temp = TempDir::new().unwrap(); + let foo = temp.child("foo"); + CairoPluginProjectBuilder::default_v1() + .name("foo") + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + + #[attribute_macro] + pub fn foo(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let token_stream = TokenStream::new( + token_stream + .to_string() + .replace("12", "34") + ); + ProcMacroResult::new(token_stream) + } + "##}) + .build(&foo); + let bar = temp.child("bar"); + CairoPluginProjectBuilder::default() + .name("bar") + .lib_rs(indoc! {r##" + use cairo_lang_macro_v2::{ProcMacroResult, TokenStream, attribute_macro, TokenTree, Token, TextSpan}; + + #[attribute_macro] + pub fn bar(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let token_stream = TokenStream::new( + token_stream + .into_iter() + .map(|TokenTree::Ident(token)| { + if token.content.to_string() == "12" { + TokenTree::Ident(Token::new("34", TextSpan::call_site())) + } else { + TokenTree::Ident(token) + } + }) + .collect(), + ); + ProcMacroResult::new(token_stream) + } + "##}) + .build(&bar); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("foo", &foo) + .dep("bar", &bar) + .lib_cairo(indoc! {r#" + #[foo] + fn first() -> felt252 {12} + + #[bar] + fn second() -> felt252 {12} + + fn main() -> felt252 { first() + second() } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("cairo-run") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + warn: `scarb cairo-run` will be deprecated soon + help: use `scarb execute` instead + [..]Compiling bar v1.0.0 ([..]Scarb.toml) + [..]Compiling foo v1.0.0 ([..]Scarb.toml) + [..] Compiling hello v1.0.0 ([..]Scarb.toml) + [..]Finished `dev` profile target(s) in [..] + [..]Running hello + Run completed successfully, returning [68] + "#}); +} + +#[test] +#[ignore = "TODO(maciektr): support old macro api"] +fn v1_and_v2_macros_cannot_duplicate_expansions() { + let temp = TempDir::new().unwrap(); + let foo = temp.child("foo"); + CairoPluginProjectBuilder::default_v1() + .name("foo") + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let token_stream = TokenStream::new( + token_stream + .to_string() + .replace("12", "34") + ); + ProcMacroResult::new(token_stream) + } + "##}) + .build(&foo); + let bar = temp.child("bar"); + CairoPluginProjectBuilder::default() + .name("bar") + .lib_rs(indoc! {r##" + use cairo_lang_macro_v2::{ProcMacroResult, TokenStream, attribute_macro, TokenTree, Token, TextSpan}; + + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let token_stream = TokenStream::new( + token_stream + .into_iter() + .map(|TokenTree::Ident(token)| { + if token.content.to_string() == "12" { + TokenTree::Ident(Token::new("34", TextSpan::call_site())) + } else { + TokenTree::Ident(token) + } + }) + .collect(), + ); + ProcMacroResult::new(token_stream) + } + "##}) + .build(&bar); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("foo", &foo) + .dep("bar", &bar) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> felt252 {12} + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .failure() + .stdout_matches(indoc! {r#" + [..]Compiling bar v1.0.0 ([..]Scarb.toml) + [..]Compiling foo v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + error: duplicate expansions defined for procedural macros: some (bar v1.0.0 ([..]Scarb.toml) and foo v1.0.0 ([..]Scarb.toml)) + "#}); +} diff --git a/scarb/tests/proc_macro_v2_server.rs b/scarb/tests/proc_macro_v2_server.rs index f31bd97cb..e1112df60 100644 --- a/scarb/tests/proc_macro_v2_server.rs +++ b/scarb/tests/proc_macro_v2_server.rs @@ -10,7 +10,7 @@ use scarb_proc_macro_server_types::methods::expand::ExpandInlineMacroParams; use scarb_proc_macro_server_types::scope::ProcMacroScope; use scarb_test_support::cairo_plugin_project_builder::CairoPluginProjectBuilder; use scarb_test_support::proc_macro_server::ProcMacroClient; -use scarb_test_support::proc_macro_server::SIMPLE_MACROS; +use scarb_test_support::proc_macro_server::SIMPLE_MACROS_V2; use scarb_test_support::project_builder::ProjectBuilder; #[test] @@ -19,7 +19,7 @@ fn defined_macros() { let plugin_package = t.child("some"); CairoPluginProjectBuilder::default() - .lib_rs(SIMPLE_MACROS) + .lib_rs(SIMPLE_MACROS_V2) .build(&plugin_package); let project = t.child("test_package"); @@ -70,7 +70,7 @@ fn expand_attribute() { "##; CairoPluginProjectBuilder::default() - .lib_rs(format!("{SIMPLE_MACROS}\n{rename_to_very_new_name}")) + .lib_rs(format!("{SIMPLE_MACROS_V2}\n{rename_to_very_new_name}")) .add_dep(r#"regex = "1.11.1""#) .build(&plugin_package); @@ -115,7 +115,7 @@ fn expand_derive() { let plugin_package = t.child("some"); CairoPluginProjectBuilder::default() - .lib_rs(SIMPLE_MACROS) + .lib_rs(SIMPLE_MACROS_V2) .build(&plugin_package); let project = t.child("test_package"); @@ -175,7 +175,7 @@ fn expand_inline() { "#; CairoPluginProjectBuilder::default() - .lib_rs(format!("{SIMPLE_MACROS}\n{replace_all_15_with_25}")) + .lib_rs(format!("{SIMPLE_MACROS_V2}\n{replace_all_15_with_25}")) .build(&plugin_package); let project = t.child("test_package"); diff --git a/utils/scarb-test-support/src/cairo_plugin_project_builder.rs b/utils/scarb-test-support/src/cairo_plugin_project_builder.rs index 07e70d086..e40e2dc74 100644 --- a/utils/scarb-test-support/src/cairo_plugin_project_builder.rs +++ b/utils/scarb-test-support/src/cairo_plugin_project_builder.rs @@ -7,7 +7,7 @@ use std::collections::HashMap; use std::path::PathBuf; use std::sync::LazyLock; -static CAIRO_LANG_MACRO_PATH: LazyLock = LazyLock::new(|| { +static CAIRO_LANG_MACRO_PATH_V2: LazyLock = LazyLock::new(|| { let path = fsx::canonicalize( PathBuf::from(env!("CARGO_MANIFEST_DIR")) .join("../../plugins/") @@ -17,11 +17,19 @@ static CAIRO_LANG_MACRO_PATH: LazyLock = LazyLock::new(|| { serde_json::to_string(&path).unwrap() }); +#[derive(Debug, Clone, Default, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum CairoPluginProjectVersion { + V1, + #[default] + V2, +} + pub struct CairoPluginProjectBuilder { project: ProjectBuilder, name: String, src: HashMap, deps: Vec, + macro_version: CairoPluginProjectVersion, } impl CairoPluginProjectBuilder { @@ -31,6 +39,17 @@ impl CairoPluginProjectBuilder { name: Default::default(), src: Default::default(), deps: Default::default(), + macro_version: CairoPluginProjectVersion::default(), + } + } + + pub fn start_v1() -> Self { + Self { + project: ProjectBuilder::start(), + name: Default::default(), + src: Default::default(), + deps: Default::default(), + macro_version: CairoPluginProjectVersion::V1, } } @@ -65,9 +84,15 @@ impl CairoPluginProjectBuilder { } fn render_manifest(&self) -> String { - let macro_lib_path = CAIRO_LANG_MACRO_PATH.to_string(); + let macro_lib_path = CAIRO_LANG_MACRO_PATH_V2.to_string(); let deps = self.deps.join("\n"); let name = self.name.clone(); + let macro_lib_version_req = match self.macro_version { + CairoPluginProjectVersion::V1 => "\"0.1\"".to_string(), + CairoPluginProjectVersion::V2 => { + format!("{{ path = {macro_lib_path}, version = \"0.2.0\" }}") + } + }; formatdoc! {r#" [package] name = "{name}" @@ -79,7 +104,7 @@ impl CairoPluginProjectBuilder { crate-type = ["cdylib"] [dependencies] - cairo-lang-macro = {{ path = {macro_lib_path}, version = "0.1.0" }} + cairo-lang-macro = {macro_lib_version_req} {deps} "#} } @@ -101,6 +126,25 @@ impl CairoPluginProjectBuilder { pub fn add_primitive_token_dep(self) -> Self { self.add_dep(r#"cairo-lang-primitive-token = "1""#) } + + pub fn default_v1() -> Self { + let default_name = "some"; + let default_code = indoc! {r#" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(token_stream) + } + "#}; + Self::start_v1() + .name(default_name) + .scarb_project(|b| { + b.name(default_name) + .version("1.0.0") + .manifest_extra("[cairo-plugin]") + }) + .lib_rs(default_code) + } } impl Default for CairoPluginProjectBuilder { @@ -119,7 +163,10 @@ impl Default for CairoPluginProjectBuilder { .scarb_project(|b| { b.name(default_name) .version("1.0.0") - .manifest_extra(r#"[cairo-plugin]"#) + .manifest_extra(indoc! {r#" + [cairo-plugin] + api = "v2" + "#}) }) .lib_rs(default_code) } diff --git a/utils/scarb-test-support/src/proc_macro_server.rs b/utils/scarb-test-support/src/proc_macro_server.rs index ee9c74f06..0fe661b3c 100644 --- a/utils/scarb-test-support/src/proc_macro_server.rs +++ b/utils/scarb-test-support/src/proc_macro_server.rs @@ -19,7 +19,7 @@ use std::process::ChildStdin; use std::process::ChildStdout; use std::process::Stdio; -pub const SIMPLE_MACROS: &str = r#" +pub const SIMPLE_MACROS_V2: &str = r#" use cairo_lang_macro::{ ProcMacroResult, TokenStream, TokenTree, Token, TextSpan, From 9b12179ff2f6406c20b902499e3db6498f5b5cfe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Tr=C4=85tnowiecki?= Date: Thu, 13 Mar 2025 20:20:55 +0100 Subject: [PATCH 28/38] Proc macro: move v2 impl to a submodule --- scarb/src/compiler/plugin/proc_macro/mod.rs | 13 +++---------- scarb/src/compiler/plugin/proc_macro/v1/mod.rs | 1 + .../plugin/proc_macro/{ => v2}/compilation.rs | 0 .../src/compiler/plugin/proc_macro/{ => v2}/ffi.rs | 4 ++-- .../plugin/proc_macro/{ => v2}/host/attribute.rs | 8 ++++---- .../plugin/proc_macro/{ => v2}/host/aux_data.rs | 2 +- .../plugin/proc_macro/{ => v2}/host/conversion.rs | 0 .../plugin/proc_macro/{ => v2}/host/derive.rs | 8 ++++---- .../plugin/proc_macro/{ => v2}/host/inline.rs | 8 ++++---- .../compiler/plugin/proc_macro/{ => v2}/host/mod.rs | 2 +- .../plugin/proc_macro/{ => v2}/host/post.rs | 4 ++-- scarb/src/compiler/plugin/proc_macro/v2/mod.rs | 11 +++++++++++ .../plugin/proc_macro/{ => v2}/repository.rs | 4 ++-- .../compiler/plugin/proc_macro/{ => v2}/types.rs | 2 +- 14 files changed, 36 insertions(+), 31 deletions(-) create mode 100644 scarb/src/compiler/plugin/proc_macro/v1/mod.rs rename scarb/src/compiler/plugin/proc_macro/{ => v2}/compilation.rs (100%) rename scarb/src/compiler/plugin/proc_macro/{ => v2}/ffi.rs (98%) rename scarb/src/compiler/plugin/proc_macro/{ => v2}/host/attribute.rs (98%) rename scarb/src/compiler/plugin/proc_macro/{ => v2}/host/aux_data.rs (96%) rename scarb/src/compiler/plugin/proc_macro/{ => v2}/host/conversion.rs (100%) rename scarb/src/compiler/plugin/proc_macro/{ => v2}/host/derive.rs (95%) rename scarb/src/compiler/plugin/proc_macro/{ => v2}/host/inline.rs (92%) rename scarb/src/compiler/plugin/proc_macro/{ => v2}/host/mod.rs (98%) rename scarb/src/compiler/plugin/proc_macro/{ => v2}/host/post.rs (96%) create mode 100644 scarb/src/compiler/plugin/proc_macro/v2/mod.rs rename scarb/src/compiler/plugin/proc_macro/{ => v2}/repository.rs (92%) rename scarb/src/compiler/plugin/proc_macro/{ => v2}/types.rs (98%) diff --git a/scarb/src/compiler/plugin/proc_macro/mod.rs b/scarb/src/compiler/plugin/proc_macro/mod.rs index 9b4e375a4..f19b618f9 100644 --- a/scarb/src/compiler/plugin/proc_macro/mod.rs +++ b/scarb/src/compiler/plugin/proc_macro/mod.rs @@ -1,11 +1,4 @@ -pub mod compilation; -mod ffi; -mod host; -mod repository; -mod types; +pub mod v1; +pub mod v2; -pub use compilation::{check_unit, compile_unit, fetch_crate}; -pub use ffi::*; -pub use host::*; -pub use repository::*; -pub use types::*; +pub use v2::*; diff --git a/scarb/src/compiler/plugin/proc_macro/v1/mod.rs b/scarb/src/compiler/plugin/proc_macro/v1/mod.rs new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/v1/mod.rs @@ -0,0 +1 @@ + diff --git a/scarb/src/compiler/plugin/proc_macro/compilation.rs b/scarb/src/compiler/plugin/proc_macro/v2/compilation.rs similarity index 100% rename from scarb/src/compiler/plugin/proc_macro/compilation.rs rename to scarb/src/compiler/plugin/proc_macro/v2/compilation.rs diff --git a/scarb/src/compiler/plugin/proc_macro/ffi.rs b/scarb/src/compiler/plugin/proc_macro/v2/ffi.rs similarity index 98% rename from scarb/src/compiler/plugin/proc_macro/ffi.rs rename to scarb/src/compiler/plugin/proc_macro/v2/ffi.rs index 2b63ab904..80e4fc0d4 100644 --- a/scarb/src/compiler/plugin/proc_macro/ffi.rs +++ b/scarb/src/compiler/plugin/proc_macro/v2/ffi.rs @@ -1,4 +1,4 @@ -use crate::compiler::plugin::proc_macro::compilation::SharedLibraryProvider; +use crate::compiler::plugin::proc_macro::v2::compilation::SharedLibraryProvider; use crate::core::{Package, PackageId}; use anyhow::{Context, Result, ensure}; use cairo_lang_macro::{ @@ -16,7 +16,7 @@ use std::ffi::{CStr, CString, c_char}; use std::fmt::Debug; use std::slice; -use crate::compiler::plugin::proc_macro::ProcMacroAuxData; +use crate::compiler::plugin::proc_macro::v2::ProcMacroAuxData; #[cfg(not(windows))] use libloading::os::unix::Symbol as RawSymbol; #[cfg(windows)] diff --git a/scarb/src/compiler/plugin/proc_macro/host/attribute.rs b/scarb/src/compiler/plugin/proc_macro/v2/host/attribute.rs similarity index 98% rename from scarb/src/compiler/plugin/proc_macro/host/attribute.rs rename to scarb/src/compiler/plugin/proc_macro/v2/host/attribute.rs index 8360e76ef..902913b5f 100644 --- a/scarb/src/compiler/plugin/proc_macro/host/attribute.rs +++ b/scarb/src/compiler/plugin/proc_macro/v2/host/attribute.rs @@ -1,9 +1,9 @@ -use crate::compiler::plugin::proc_macro::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; -use crate::compiler::plugin::proc_macro::host::conversion::{ +use crate::compiler::plugin::proc_macro::v2::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; +use crate::compiler::plugin::proc_macro::v2::host::conversion::{ CallSiteLocation, into_cairo_diagnostics, }; -use crate::compiler::plugin::proc_macro::host::generate_code_mappings; -use crate::compiler::plugin::proc_macro::{ +use crate::compiler::plugin::proc_macro::v2::host::generate_code_mappings; +use crate::compiler::plugin::proc_macro::v2::{ Expansion, ExpansionKind, ProcMacroHostPlugin, ProcMacroId, TokenStreamBuilder, }; use cairo_lang_defs::patcher::{PatchBuilder, RewriteNode}; diff --git a/scarb/src/compiler/plugin/proc_macro/host/aux_data.rs b/scarb/src/compiler/plugin/proc_macro/v2/host/aux_data.rs similarity index 96% rename from scarb/src/compiler/plugin/proc_macro/host/aux_data.rs rename to scarb/src/compiler/plugin/proc_macro/v2/host/aux_data.rs index a1fa0d54a..0f805aea9 100644 --- a/scarb/src/compiler/plugin/proc_macro/host/aux_data.rs +++ b/scarb/src/compiler/plugin/proc_macro/v2/host/aux_data.rs @@ -1,4 +1,4 @@ -use crate::compiler::plugin::proc_macro::{ProcMacroHostPlugin, ProcMacroId}; +use crate::compiler::plugin::proc_macro::v2::{ProcMacroHostPlugin, ProcMacroId}; use crate::core::PackageId; use cairo_lang_defs::plugin::GeneratedFileAuxData; use cairo_lang_macro::AuxData; diff --git a/scarb/src/compiler/plugin/proc_macro/host/conversion.rs b/scarb/src/compiler/plugin/proc_macro/v2/host/conversion.rs similarity index 100% rename from scarb/src/compiler/plugin/proc_macro/host/conversion.rs rename to scarb/src/compiler/plugin/proc_macro/v2/host/conversion.rs diff --git a/scarb/src/compiler/plugin/proc_macro/host/derive.rs b/scarb/src/compiler/plugin/proc_macro/v2/host/derive.rs similarity index 95% rename from scarb/src/compiler/plugin/proc_macro/host/derive.rs rename to scarb/src/compiler/plugin/proc_macro/v2/host/derive.rs index 1f4cdb237..86a3b33d6 100644 --- a/scarb/src/compiler/plugin/proc_macro/host/derive.rs +++ b/scarb/src/compiler/plugin/proc_macro/v2/host/derive.rs @@ -1,9 +1,9 @@ -use crate::compiler::plugin::proc_macro::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; -use crate::compiler::plugin::proc_macro::host::conversion::{ +use crate::compiler::plugin::proc_macro::v2::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; +use crate::compiler::plugin::proc_macro::v2::host::conversion::{ CallSiteLocation, into_cairo_diagnostics, }; -use crate::compiler::plugin::proc_macro::host::{DERIVE_ATTR, generate_code_mappings}; -use crate::compiler::plugin::proc_macro::{ +use crate::compiler::plugin::proc_macro::v2::host::{DERIVE_ATTR, generate_code_mappings}; +use crate::compiler::plugin::proc_macro::v2::{ Expansion, ExpansionKind, ProcMacroHostPlugin, ProcMacroId, TokenStreamBuilder, }; use cairo_lang_defs::plugin::{DynGeneratedFileAuxData, PluginGeneratedFile, PluginResult}; diff --git a/scarb/src/compiler/plugin/proc_macro/host/inline.rs b/scarb/src/compiler/plugin/proc_macro/v2/host/inline.rs similarity index 92% rename from scarb/src/compiler/plugin/proc_macro/host/inline.rs rename to scarb/src/compiler/plugin/proc_macro/v2/host/inline.rs index 13c9843c4..6491eff3b 100644 --- a/scarb/src/compiler/plugin/proc_macro/host/inline.rs +++ b/scarb/src/compiler/plugin/proc_macro/v2/host/inline.rs @@ -1,9 +1,9 @@ -use crate::compiler::plugin::proc_macro::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; -use crate::compiler::plugin::proc_macro::host::conversion::{ +use crate::compiler::plugin::proc_macro::v2::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; +use crate::compiler::plugin::proc_macro::v2::host::conversion::{ CallSiteLocation, into_cairo_diagnostics, }; -use crate::compiler::plugin::proc_macro::host::generate_code_mappings; -use crate::compiler::plugin::proc_macro::{ +use crate::compiler::plugin::proc_macro::v2::host::generate_code_mappings; +use crate::compiler::plugin::proc_macro::v2::{ Expansion, ProcMacroId, ProcMacroInstance, TokenStreamBuilder, }; use cairo_lang_defs::plugin::{ diff --git a/scarb/src/compiler/plugin/proc_macro/host/mod.rs b/scarb/src/compiler/plugin/proc_macro/v2/host/mod.rs similarity index 98% rename from scarb/src/compiler/plugin/proc_macro/host/mod.rs rename to scarb/src/compiler/plugin/proc_macro/v2/host/mod.rs index 9c6bd3ca5..17c626549 100644 --- a/scarb/src/compiler/plugin/proc_macro/host/mod.rs +++ b/scarb/src/compiler/plugin/proc_macro/v2/host/mod.rs @@ -9,7 +9,7 @@ use attribute::*; pub use aux_data::ProcMacroAuxData; use inline::*; -use crate::compiler::plugin::proc_macro::{Expansion, ExpansionKind, ProcMacroInstance}; +use crate::compiler::plugin::proc_macro::v2::{Expansion, ExpansionKind, ProcMacroInstance}; use crate::core::{PackageId, edition_variant}; use anyhow::{Result, ensure}; use cairo_lang_defs::plugin::{MacroPlugin, MacroPluginMetadata, PluginResult}; diff --git a/scarb/src/compiler/plugin/proc_macro/host/post.rs b/scarb/src/compiler/plugin/proc_macro/v2/host/post.rs similarity index 96% rename from scarb/src/compiler/plugin/proc_macro/host/post.rs rename to scarb/src/compiler/plugin/proc_macro/v2/host/post.rs index 07570b7a6..fd35e3545 100644 --- a/scarb/src/compiler/plugin/proc_macro/host/post.rs +++ b/scarb/src/compiler/plugin/proc_macro/v2/host/post.rs @@ -1,5 +1,5 @@ -use crate::compiler::plugin::proc_macro::ProcMacroHostPlugin; -use crate::compiler::plugin::proc_macro::host::FULL_PATH_MARKER_KEY; +use crate::compiler::plugin::proc_macro::v2::ProcMacroHostPlugin; +use crate::compiler::plugin::proc_macro::v2::host::FULL_PATH_MARKER_KEY; use crate::core::PackageId; use anyhow::Result; use cairo_lang_defs::ids::{ModuleItemId, TopLevelLanguageElementId}; diff --git a/scarb/src/compiler/plugin/proc_macro/v2/mod.rs b/scarb/src/compiler/plugin/proc_macro/v2/mod.rs new file mode 100644 index 000000000..9b4e375a4 --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/v2/mod.rs @@ -0,0 +1,11 @@ +pub mod compilation; +mod ffi; +mod host; +mod repository; +mod types; + +pub use compilation::{check_unit, compile_unit, fetch_crate}; +pub use ffi::*; +pub use host::*; +pub use repository::*; +pub use types::*; diff --git a/scarb/src/compiler/plugin/proc_macro/repository.rs b/scarb/src/compiler/plugin/proc_macro/v2/repository.rs similarity index 92% rename from scarb/src/compiler/plugin/proc_macro/repository.rs rename to scarb/src/compiler/plugin/proc_macro/v2/repository.rs index f1f08bc17..5c3a2069d 100644 --- a/scarb/src/compiler/plugin/proc_macro/repository.rs +++ b/scarb/src/compiler/plugin/proc_macro/v2/repository.rs @@ -1,5 +1,5 @@ -use crate::compiler::plugin::proc_macro::ProcMacroInstance; -use crate::compiler::plugin::proc_macro::compilation::SharedLibraryProvider; +use crate::compiler::plugin::proc_macro::v2::ProcMacroInstance; +use crate::compiler::plugin::proc_macro::v2::compilation::SharedLibraryProvider; use crate::core::{Config, Package, PackageId}; use anyhow::{Context, Result, bail}; use std::collections::HashMap; diff --git a/scarb/src/compiler/plugin/proc_macro/types.rs b/scarb/src/compiler/plugin/proc_macro/v2/types.rs similarity index 98% rename from scarb/src/compiler/plugin/proc_macro/types.rs rename to scarb/src/compiler/plugin/proc_macro/v2/types.rs index 304a8bc14..b6e7603ee 100644 --- a/scarb/src/compiler/plugin/proc_macro/types.rs +++ b/scarb/src/compiler/plugin/proc_macro/v2/types.rs @@ -68,7 +68,7 @@ fn whitespace_suffix_len(s: &str) -> u32 { #[cfg(test)] mod tests { - use crate::compiler::plugin::proc_macro::TokenStreamBuilder; + use crate::compiler::plugin::proc_macro::v2::TokenStreamBuilder; use cairo_lang_macro::{AllocationContext, TextSpan, TokenStream, TokenTree}; use cairo_lang_parser::utils::SimpleParserDatabase; use indoc::indoc; From 5cd9ff9485e52a62824405fae8ab29725ed6f980 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Tr=C4=85tnowiecki?= Date: Thu, 13 Mar 2025 20:25:17 +0100 Subject: [PATCH 29/38] Proc macro: restore v1 macro compiler impl --- .../plugin/proc_macro/v1/compilation.rs | 347 +++++ .../src/compiler/plugin/proc_macro/v1/ffi.rs | 365 +++++ .../src/compiler/plugin/proc_macro/v1/host.rs | 1204 +++++++++++++++++ .../src/compiler/plugin/proc_macro/v1/mod.rs | 6 + 4 files changed, 1922 insertions(+) create mode 100644 scarb/src/compiler/plugin/proc_macro/v1/compilation.rs create mode 100644 scarb/src/compiler/plugin/proc_macro/v1/ffi.rs create mode 100644 scarb/src/compiler/plugin/proc_macro/v1/host.rs diff --git a/scarb/src/compiler/plugin/proc_macro/v1/compilation.rs b/scarb/src/compiler/plugin/proc_macro/v1/compilation.rs new file mode 100644 index 000000000..4ab259af8 --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/v1/compilation.rs @@ -0,0 +1,347 @@ +use crate::CARGO_MANIFEST_FILE_NAME; +use crate::compiler::ProcMacroCompilationUnit; +use crate::core::{Config, Package, Workspace}; +use crate::flock::Filesystem; +use crate::internal::fsx; +use crate::ops::PackageOpts; +use crate::process::exec_piping; +use anyhow::{Context, Result, anyhow}; +use camino::Utf8PathBuf; +use cargo_metadata::MetadataCommand; +use flate2::read::GzDecoder; +use indoc::formatdoc; +use libloading::library_filename; +use ra_ap_toolchain::Tool; +use scarb_ui::{Message, OutputFormat}; +use serde::{Serialize, Serializer}; +use serde_json::value::RawValue; +use std::env::consts::DLL_SUFFIX; +use std::fmt::Display; +use std::fs; +use std::io::{Seek, SeekFrom}; +use std::ops::Deref; +use std::process::Command; +use tar::Archive; +use target_triple::target; +use tracing::trace_span; + +pub const PROC_MACRO_BUILD_PROFILE: &str = "release"; + +/// This trait is used to define the shared library path for a package. +pub trait SharedLibraryProvider { + /// Location of Cargo `target` directory. + fn target_path(&self, config: &Config) -> Filesystem; + /// Location of the shared library for the package. + fn shared_lib_path(&self, config: &Config) -> Result; + /// Location of the prebuilt binary for the package, if defined. + fn prebuilt_lib_path(&self) -> Option; +} + +impl SharedLibraryProvider for Package { + fn target_path(&self, config: &Config) -> Filesystem { + let ident = format!("{}-{}", self.id.name, self.id.source_id.ident()); + // Defines the Cargo target directory in cache, as: + // `/(..)/SCARB_CACHE/plugins/proc_macro/-/v/target/` + config + .dirs() + .procedural_macros_dir() + .into_child(ident) + .into_child(format!("v{}", self.id.version)) + .into_child("target") + } + + fn shared_lib_path(&self, config: &Config) -> Result { + let lib_name = + get_cargo_library_name(self, config).context("could not resolve library name")?; + let lib_name = library_filename(lib_name); + let lib_name = lib_name + .into_string() + .expect("library name must be valid UTF-8"); + // Defines the shared library path inside the target directory, as: + // `/(..)/target/release/[lib].[so|dll|dylib]` + Ok(self + .target_path(config) + .into_child(PROC_MACRO_BUILD_PROFILE) + .path_unchecked() + .join(lib_name)) + } + + fn prebuilt_lib_path(&self) -> Option { + let target_triple = target!(); + + let prebuilt_name = format!( + "{name}_v{version}_{target}{suffix}", + name = self.id.name, + version = self.id.version, + target = target_triple, + suffix = DLL_SUFFIX + ); + + let prebuilt_path = self + .root() + .join("target") + .join("scarb") + .join("cairo-plugin") + .join(prebuilt_name); + + prebuilt_path.exists().then_some(prebuilt_path) + } +} + +pub fn compile_unit(unit: ProcMacroCompilationUnit, ws: &Workspace<'_>) -> Result<()> { + let package = unit.components.first().unwrap().package.clone(); + run_cargo(CargoAction::Build, &package, ws) +} + +pub fn check_unit(unit: ProcMacroCompilationUnit, ws: &Workspace<'_>) -> Result<()> { + let package = unit.components.first().unwrap().package.clone(); + run_cargo(CargoAction::Check, &package, ws) +} + +fn get_cargo_package_name(package: &Package) -> Result { + let cargo_toml_path = package.root().join(CARGO_MANIFEST_FILE_NAME); + + let cargo_toml: toml::Value = toml::from_str( + &fs::read_to_string(cargo_toml_path).context("could not read `Cargo.toml`")?, + ) + .context("could not convert `Cargo.toml` to toml")?; + + let package_section = cargo_toml + .get("package") + .ok_or_else(|| anyhow!("could not get `package` section from Cargo.toml"))?; + + let package_name = package_section + .get("name") + .ok_or_else(|| anyhow!("could not get `name` field from Cargo.toml"))? + .as_str() + .ok_or_else(|| anyhow!("could not convert package name to string"))?; + + Ok(package_name.to_string()) +} + +fn get_cargo_library_name(package: &Package, config: &Config) -> Result { + let metadata = MetadataCommand::new() + .cargo_path(Tool::Cargo.path()) + .current_dir(package.root()) + .exec() + .context("could not get Cargo metadata")?; + + let cargo_package_name = get_cargo_package_name(package)?; + + if cargo_package_name != package.id.name.to_string() { + config.ui().warn(formatdoc!( + r#" + package name differs between Cargo and Scarb manifest + cargo: `{cargo_name}`, scarb: `{scarb_name}` + this might become an error in future Scarb releases + "#, + cargo_name = cargo_package_name, + scarb_name = package.id.name, + )); + } + + let package = metadata + .packages + .iter() + .find(|pkg| pkg.name == cargo_package_name) + .ok_or_else(|| anyhow!("could not get `{cargo_package_name}` package from metadata"))?; + + let cdylib_target = package + .targets + .iter() + .find(|target| target.kind.contains(&"cdylib".into())) + .ok_or_else(|| anyhow!("no target of `cdylib` kind found in package"))?; + + Ok(cdylib_target.name.clone()) +} + +fn get_cargo_package_version(package: &Package) -> Result { + let metadata = MetadataCommand::new() + .cargo_path(Tool::Cargo.path()) + .current_dir(package.root()) + .exec() + .context("could not get Cargo metadata")?; + + let cargo_package_name = get_cargo_package_name(package)?; + + let package = metadata + .packages + .iter() + .find(|pkg| pkg.name == cargo_package_name) + .ok_or_else(|| anyhow!("could not get `{cargo_package_name}` package from metadata"))?; + + Ok(package.version.to_string()) +} + +pub fn get_crate_archive_basename(package: &Package) -> Result { + let package_name = get_cargo_package_name(package)?; + let package_version = get_cargo_package_version(package)?; + + Ok(format!("{}-{}", package_name, package_version)) +} + +pub fn unpack_crate(package: &Package, config: &Config) -> Result<()> { + let archive_basename = get_crate_archive_basename(package)?; + let archive_name = format!("{}.crate", archive_basename); + + let tar = package + .target_path(config) + .into_child("package") + .open_ro_exclusive(&archive_name, &archive_name, config)?; + + // The following implementation has been copied from the `Cargo` codebase with slight modifications only. + // The original implementation can be found here: + // https://github.com/rust-lang/cargo/blob/a4600184b8d6619ed0b5a0a19946dbbe97e1d739/src/cargo/ops/cargo_package.rs#L1110 + + tar.deref().seek(SeekFrom::Start(0))?; + let f = GzDecoder::new(tar.deref()); + let dst = tar.parent().unwrap().join(&archive_basename); + if dst.exists() { + fsx::remove_dir_all(&dst)?; + } + let mut archive = Archive::new(f); + archive.set_preserve_mtime(false); // Don't set modified time to avoid filesystem errors + archive.unpack(dst.parent().unwrap())?; + + Ok(()) +} + +pub fn fetch_crate(package: &Package, ws: &Workspace<'_>) -> Result<()> { + run_cargo(CargoAction::Fetch, package, ws) +} + +pub fn package_crate(package: &Package, opts: &PackageOpts, ws: &Workspace<'_>) -> Result<()> { + run_cargo(CargoAction::Package(opts.clone()), package, ws) +} + +fn run_cargo(action: CargoAction, package: &Package, ws: &Workspace<'_>) -> Result<()> { + let cmd = CargoCommand { + action, + current_dir: package.root().to_path_buf(), + output_format: ws.config().ui().output_format(), + target_dir: package + .target_path(ws.config()) + .path_unchecked() + .to_path_buf(), + config: ws.config(), + }; + let span = trace_span!("proc_macro"); + { + let _guard = span.enter(); + exec(&mut cmd.into(), ws.config())?; + } + Ok(()) +} + +#[derive(Clone)] +enum CargoAction { + Build, + Check, + Fetch, + Package(PackageOpts), +} + +struct CargoCommand<'c> { + current_dir: Utf8PathBuf, + target_dir: Utf8PathBuf, + output_format: OutputFormat, + action: CargoAction, + config: &'c Config, +} + +enum CargoOutputFormat { + Human, + Json, +} + +impl Display for CargoOutputFormat { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + CargoOutputFormat::Human => write!(f, "human"), + CargoOutputFormat::Json => write!(f, "json"), + } + } +} + +impl From for CargoOutputFormat { + fn from(format: OutputFormat) -> Self { + match format { + OutputFormat::Text => CargoOutputFormat::Human, + OutputFormat::Json => CargoOutputFormat::Json, + } + } +} + +impl<'c> From> for Command { + fn from(args: CargoCommand<'c>) -> Self { + let mut cmd = Command::new(Tool::Cargo.path()); + cmd.current_dir(args.current_dir); + match args.action { + CargoAction::Fetch => cmd.arg("fetch"), + CargoAction::Build => cmd.arg("build"), + CargoAction::Check => cmd.arg("check"), + CargoAction::Package(_) => cmd.arg("package"), + }; + if args.config.offline() { + cmd.arg("--offline"); + } + match args.action { + CargoAction::Fetch => (), + CargoAction::Package(ref opts) => { + cmd.arg("--target-dir"); + cmd.arg(args.target_dir); + cmd.arg("--no-verify"); + if !opts.check_metadata { + cmd.arg("--no-metadata"); + } + if opts.allow_dirty { + cmd.arg("--allow-dirty"); + } + } + _ => { + cmd.arg("--release"); + cmd.arg("--message-format"); + let output_format: CargoOutputFormat = args.output_format.into(); + cmd.arg(output_format.to_string()); + cmd.arg("--target-dir"); + cmd.arg(args.target_dir); + } + } + cmd + } +} + +fn exec(cmd: &mut Command, config: &Config) -> Result<()> { + exec_piping( + cmd, + config, + |line: &str| config.ui().print(PipedText::new(line)), + |line: &str| config.ui().print(PipedText::new(line)), + ) +} + +/// This message can be used for piped text from subprocesses. +/// +/// It accepts either a string or a JSON string. +/// If the input is a JSON string, it can be serialized as a structured message. +/// Otherwise, the structured message will be skipped. +pub struct PipedText(String); + +impl PipedText { + pub fn new(text: impl Into) -> Self { + Self(text.into()) + } +} + +impl Message for PipedText { + fn text(self) -> String { + self.0 + } + + fn structured(self, ser: S) -> Result { + match serde_json::from_str::<&RawValue>(self.0.as_str()) { + Ok(value) => value.serialize(ser), + Err(_e) => Self::skip_structured(ser), + } + } +} diff --git a/scarb/src/compiler/plugin/proc_macro/v1/ffi.rs b/scarb/src/compiler/plugin/proc_macro/v1/ffi.rs new file mode 100644 index 000000000..a969b6465 --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/v1/ffi.rs @@ -0,0 +1,365 @@ +use crate::core::{Package, PackageId}; +use anyhow::{Context, Result, ensure}; +use cairo_lang_defs::patcher::PatchBuilder; +use cairo_lang_macro_stable_v1::{ + StableExpansion, StableExpansionsList, StablePostProcessContext, StableProcMacroResult, + StableResultWrapper, StableTokenStream, +}; +use cairo_lang_macro_v1::{ + ExpansionKind as SharedExpansionKind, FullPathMarker, PostProcessContext, ProcMacroResult, + TokenStream, +}; +use cairo_lang_syntax::node::TypedSyntaxNode; +use cairo_lang_syntax::node::db::SyntaxGroup; +use camino::Utf8PathBuf; +use itertools::Itertools; +use libloading::{Library, Symbol}; +use std::ffi::{CStr, CString, c_char}; +use std::fmt::Debug; +use std::slice; + +use crate::compiler::plugin::proc_macro::v1::ProcMacroAuxData; +use crate::compiler::plugin::proc_macro::v1::compilation::SharedLibraryProvider; + +#[cfg(not(windows))] +use libloading::os::unix::Symbol as RawSymbol; +#[cfg(windows)] +use libloading::os::windows::Symbol as RawSymbol; +use smol_str::SmolStr; +use tracing::trace; + +pub trait FromSyntaxNode { + fn from_syntax_node(db: &dyn SyntaxGroup, node: &impl TypedSyntaxNode) -> Self; +} + +impl FromSyntaxNode for TokenStream { + fn from_syntax_node(db: &dyn SyntaxGroup, node: &impl TypedSyntaxNode) -> Self { + let mut builder = PatchBuilder::new(db, node); + builder.add_node(node.as_syntax_node()); + Self::new(builder.build().0) + } +} + +const EXEC_ATTR_PREFIX: &str = "__exec_attr_"; + +/// Representation of a single procedural macro. +/// +/// This struct is a wrapper around a shared library containing the procedural macro implementation. +/// It is responsible for loading the shared library and providing a safe interface for code expansion. +pub struct ProcMacroInstance { + package_id: PackageId, + plugin: Plugin, + expansions: Vec, +} + +impl Debug for ProcMacroInstance { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("ProcMacroInstance") + .field("package_id", &self.package_id) + .finish() + } +} + +impl ProcMacroInstance { + /// Load shared library + pub fn try_new(package_id: PackageId, lib_path: Utf8PathBuf) -> Result { + trace!("loading compiled macro for `{}` package", package_id); + let plugin = unsafe { Plugin::try_new(lib_path)? }; + Ok(Self { + expansions: unsafe { Self::load_expansions(&plugin, package_id)? }, + package_id, + plugin, + }) + } + + pub fn try_load_prebuilt(package: Package) -> Result { + trace!("loading prebuilt macro for `{}` package", package.id); + let prebuilt_path = package + .prebuilt_lib_path() + .context("could not resolve prebuilt library path")?; + let plugin = unsafe { Plugin::try_new(prebuilt_path)? }; + Ok(Self { + expansions: unsafe { Self::load_expansions(&plugin, package.id)? }, + package_id: package.id, + plugin, + }) + } + + unsafe fn load_expansions(plugin: &Plugin, package_id: PackageId) -> Result> { + // Make a call to the FFI interface to list declared expansions. + let stable_expansions = (plugin.vtable.list_expansions)(); + let (ptr, n) = stable_expansions.raw_parts(); + let expansions = unsafe { slice::from_raw_parts(ptr, n) }; + let mut expansions: Vec = expansions + .iter() + .map(|e| unsafe { Expansion::from_stable(e) }) + .collect(); + // Free the memory allocated by the `stable_expansions`. + (plugin.vtable.free_expansions_list)(stable_expansions); + // Validate expansions. + expansions.sort_unstable_by_key(|e| e.name.clone()); + ensure!( + expansions.windows(2).all(|w| w[0].name != w[1].name), + "duplicate expansions defined for procedural macro {package_id}: {duplicates}", + duplicates = expansions + .windows(2) + .filter(|w| w[0].name == w[1].name) + .map(|w| w[0].name.as_str()) + .collect::>() + .join(", ") + ); + Ok(expansions) + } + + pub fn get_expansions(&self) -> &[Expansion] { + &self.expansions + } + + pub fn package_id(&self) -> PackageId { + self.package_id + } + + pub fn declared_attributes_and_executables(&self) -> Vec { + self.get_expansions() + .iter() + .filter(|e| e.kind == ExpansionKind::Attr || e.kind == ExpansionKind::Executable) + .map(|e| e.name.clone()) + .map(Into::into) + .collect() + } + + pub fn declared_attributes(&self) -> Vec { + self.get_expansions() + .iter() + .filter(|e| e.kind == ExpansionKind::Attr) + .map(|e| e.name.clone()) + .map(Into::into) + .collect() + } + + pub fn declared_derives(&self) -> Vec { + self.get_expansions() + .iter() + .filter(|e| e.kind == ExpansionKind::Derive) + .map(|e| e.name.clone()) + .map(Into::into) + .collect() + } + + pub fn executable_attributes(&self) -> Vec { + self.get_expansions() + .iter() + .filter(|e| e.kind == ExpansionKind::Executable) + .map(|e| e.name.clone()) + .map(Into::into) + .collect() + } + + pub fn inline_macros(&self) -> Vec { + self.get_expansions() + .iter() + .filter(|e| e.kind == ExpansionKind::Inline) + .map(|e| e.name.clone()) + .map(Into::into) + .collect() + } + + /// Apply expansion to token stream. + /// + /// This function implements the actual calls to functions from the dynamic library. + /// + /// All values passing the FFI-barrier must implement a stable ABI. + /// + /// Please be aware that the memory management of values passing the FFI-barrier is tricky. + /// The memory must be freed on the same side of the barrier, where the allocation was made. + pub(crate) fn generate_code( + &self, + item_name: SmolStr, + attr: TokenStream, + token_stream: TokenStream, + ) -> ProcMacroResult { + // This must be manually freed with call to from_owned_stable. + let stable_token_stream = token_stream.into_stable(); + let stable_attr = attr.into_stable(); + // Allocate proc macro name. + let item_name = CString::new(item_name.to_string()).unwrap().into_raw(); + // Call FFI interface for code expansion. + // Note that `stable_result` has been allocated by the dynamic library. + let stable_result = + (self.plugin.vtable.expand)(item_name, stable_attr, stable_token_stream); + // Free proc macro name. + let _ = unsafe { CString::from_raw(item_name) }; + // Free the memory allocated by the `stable_token_stream`. + // This will call `CString::from_raw` under the hood, to take ownership. + unsafe { + TokenStream::from_owned_stable(stable_result.input); + TokenStream::from_owned_stable(stable_result.input_attr); + }; + // Create Rust representation of the result. + // Note, that the memory still needs to be freed on the allocator side! + let result = unsafe { ProcMacroResult::from_stable(&stable_result.output) }; + // Call FFI interface to free the `stable_result` that has been allocated by previous call. + (self.plugin.vtable.free_result)(stable_result.output); + // Return obtained result. + result + } + + pub(crate) fn post_process_callback( + &self, + aux_data: Vec, + full_path_markers: Vec, + ) { + // Create stable representation of the context. + let context = PostProcessContext { + aux_data: aux_data.into_iter().map(Into::into).collect_vec(), + full_path_markers, + } + .into_stable(); + // Actual call to FFI interface for aux data callback. + let context = (self.plugin.vtable.post_process_callback)(context); + // Free the allocated memory. + let _ = unsafe { PostProcessContext::from_owned_stable(context) }; + } + + pub fn doc(&self, item_name: SmolStr) -> Option { + // Allocate proc macro name. + let item_name = CString::new(item_name.to_string()).unwrap().into_raw(); + // Call FFI interface for expansion doc. + // Note that `stable_result` has been allocated by the dynamic library. + let stable_result = (self.plugin.vtable.doc)(item_name); + let doc = if stable_result.is_null() { + None + } else { + let cstr = unsafe { CStr::from_ptr(stable_result) }; + Some(cstr.to_string_lossy().to_string()) + }; + // Free proc macro name. + let _ = unsafe { CString::from_raw(item_name) }; + // Call FFI interface to free the `stable_result` that has been allocated by previous call. + (self.plugin.vtable.free_doc)(stable_result); + doc + } +} + +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum ExpansionKind { + Attr, + Derive, + Inline, + Executable, +} + +impl From for ExpansionKind { + fn from(kind: SharedExpansionKind) -> Self { + match kind { + SharedExpansionKind::Attr => Self::Attr, + SharedExpansionKind::Derive => Self::Derive, + SharedExpansionKind::Inline => Self::Inline, + } + } +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct Expansion { + pub name: SmolStr, + pub kind: ExpansionKind, +} + +impl Expansion { + pub fn new(name: impl ToString, kind: ExpansionKind) -> Self { + Self { + name: SmolStr::new(name.to_string()), + kind, + } + } + + unsafe fn from_stable(stable_expansion: &StableExpansion) -> Self { + // Note this does not take ownership of underlying memory. + let name = if stable_expansion.name.is_null() { + String::default() + } else { + let cstr = unsafe { CStr::from_ptr(stable_expansion.name) }; + cstr.to_string_lossy().to_string() + }; + // Handle special case for executable attributes. + if name.starts_with(EXEC_ATTR_PREFIX) { + let name = name.strip_prefix(EXEC_ATTR_PREFIX).unwrap(); + return Self { + name: SmolStr::new(name), + kind: ExpansionKind::Executable, + }; + } + Self { + name: SmolStr::new(name), + kind: unsafe { SharedExpansionKind::from_stable(&stable_expansion.kind).into() }, + } + } +} + +type ListExpansions = extern "C" fn() -> StableExpansionsList; +type FreeExpansionsList = extern "C" fn(StableExpansionsList); +type ExpandCode = + extern "C" fn(*const c_char, StableTokenStream, StableTokenStream) -> StableResultWrapper; +type FreeResult = extern "C" fn(StableProcMacroResult); +type PostProcessCallback = extern "C" fn(StablePostProcessContext) -> StablePostProcessContext; +type DocExpansion = extern "C" fn(*const c_char) -> *mut c_char; +type FreeExpansionDoc = extern "C" fn(*mut c_char); + +struct VTableV0 { + list_expansions: RawSymbol, + free_expansions_list: RawSymbol, + expand: RawSymbol, + free_result: RawSymbol, + post_process_callback: RawSymbol, + doc: RawSymbol, + free_doc: RawSymbol, +} + +macro_rules! get_symbol { + ($library:ident, $name:literal, $type:ty) => {{ + let symbol: Symbol<'_, $type> = $library.get($name).context(format!( + "failed to load {} symbol for procedural macro", + stringify!($name) + ))?; + symbol.into_raw() + }}; +} + +impl VTableV0 { + unsafe fn try_new(library: &Library) -> Result { + unsafe { + Ok(VTableV0 { + list_expansions: get_symbol!(library, b"list_expansions\0", ListExpansions), + free_expansions_list: get_symbol!( + library, + b"free_expansions_list\0", + FreeExpansionsList + ), + expand: get_symbol!(library, b"expand\0", ExpandCode), + free_result: get_symbol!(library, b"free_result\0", FreeResult), + post_process_callback: get_symbol!( + library, + b"post_process_callback\0", + PostProcessCallback + ), + doc: get_symbol!(library, b"doc\0", DocExpansion), + free_doc: get_symbol!(library, b"free_doc\0", FreeExpansionDoc), + }) + } + } +} + +struct Plugin { + #[allow(dead_code)] + library: Library, + vtable: VTableV0, +} + +impl Plugin { + unsafe fn try_new(library_path: Utf8PathBuf) -> Result { + let library = unsafe { Library::new(library_path)? }; + let vtable = unsafe { VTableV0::try_new(&library)? }; + + Ok(Plugin { library, vtable }) + } +} diff --git a/scarb/src/compiler/plugin/proc_macro/v1/host.rs b/scarb/src/compiler/plugin/proc_macro/v1/host.rs new file mode 100644 index 000000000..6433032c3 --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/v1/host.rs @@ -0,0 +1,1204 @@ +use crate::compiler::plugin::proc_macro::v1::compilation::SharedLibraryProvider; +use crate::compiler::plugin::proc_macro::v1::{ + Expansion, ExpansionKind, FromSyntaxNode, ProcMacroInstance, +}; +use crate::core::{Config, Package, PackageId}; +use anyhow::{Context, Result, bail, ensure}; +use cairo_lang_defs::ids::{ModuleItemId, TopLevelLanguageElementId}; +use cairo_lang_defs::patcher::{PatchBuilder, RewriteNode}; +use cairo_lang_defs::plugin::{ + DynGeneratedFileAuxData, GeneratedFileAuxData, MacroPlugin, MacroPluginMetadata, + PluginGeneratedFile, PluginResult, +}; +use cairo_lang_defs::plugin::{InlineMacroExprPlugin, InlinePluginResult, PluginDiagnostic}; +use cairo_lang_diagnostics::ToOption; +use cairo_lang_filesystem::ids::CodeMapping; +use cairo_lang_macro_v1::{ + AuxData, Diagnostic, FullPathMarker, ProcMacroResult, Severity, TokenStream, + TokenStreamMetadata, +}; +use cairo_lang_semantic::db::SemanticGroup; +use cairo_lang_semantic::items::attribute::SemanticQueryAttrs; +use cairo_lang_semantic::plugin::PluginSuite; +use cairo_lang_syntax::attribute::structured::{ + Attribute, AttributeArgVariant, AttributeStructurize, +}; +use cairo_lang_syntax::node::ast::{Expr, ImplItem, MaybeImplBody, MaybeTraitBody, PathSegment}; +use cairo_lang_syntax::node::db::SyntaxGroup; +use cairo_lang_syntax::node::helpers::QueryAttrs; +use cairo_lang_syntax::node::ids::SyntaxStablePtrId; +use cairo_lang_syntax::node::{Terminal, TypedStablePtr, TypedSyntaxNode, ast}; +use convert_case::{Case, Casing}; +use itertools::Itertools; +use scarb_stable_hash::short_hash; +use smol_str::SmolStr; +use std::any::Any; +use std::collections::{HashMap, HashSet}; +use std::fmt::Debug; +use std::sync::{Arc, OnceLock, RwLock}; +use std::vec::IntoIter; +use tracing::{debug, trace_span}; + +const FULL_PATH_MARKER_KEY: &str = "macro::full_path_marker"; +const DERIVE_ATTR: &str = "derive"; + +/// A Cairo compiler plugin controlling the procedural macro execution. +/// +/// This plugin decides which macro plugins (if any) should be applied to the processed AST item. +/// It then redirects the item to the appropriate macro plugin for code expansion. +#[derive(Debug)] +pub struct ProcMacroHostPlugin { + macros: Vec>, + full_path_markers: RwLock>>, +} + +impl ProcMacroHostPlugin { + pub fn macros(&self) -> &[Arc] { + &self.macros + } + + // NOTE: Required for proc macro server. `::declared_attributes` + // returns attributes **and** executables. In PMS, we only need the former because the latter is handled separately. + pub fn declared_attributes_without_executables(&self) -> Vec { + self.macros + .iter() + .flat_map(|instance| instance.declared_attributes()) + .collect() + } + + pub fn declared_inline_macros(&self) -> Vec { + self.macros + .iter() + .flat_map(|instance| instance.inline_macros()) + .collect() + } +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct ProcMacroId { + pub package_id: PackageId, + pub expansion: Expansion, +} + +impl ProcMacroId { + pub fn new(package_id: PackageId, expansion: Expansion) -> Self { + Self { + package_id, + expansion, + } + } +} + +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct ProcMacroAuxData { + value: Vec, + macro_id: ProcMacroId, +} + +impl ProcMacroAuxData { + pub fn new(value: Vec, macro_id: ProcMacroId) -> Self { + Self { value, macro_id } + } +} + +impl From for AuxData { + fn from(data: ProcMacroAuxData) -> Self { + Self::new(data.value) + } +} + +#[derive(Debug, Clone, Default)] +pub struct EmittedAuxData(Vec); + +impl GeneratedFileAuxData for EmittedAuxData { + fn as_any(&self) -> &dyn Any { + self + } + + fn eq(&self, other: &dyn GeneratedFileAuxData) -> bool { + self.0 == other.as_any().downcast_ref::().unwrap().0 + } +} + +impl EmittedAuxData { + pub fn new(aux_data: ProcMacroAuxData) -> Self { + Self(vec![aux_data]) + } + + pub fn push(&mut self, aux_data: ProcMacroAuxData) { + self.0.push(aux_data); + } + + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } +} + +impl IntoIterator for EmittedAuxData { + type Item = ProcMacroAuxData; + type IntoIter = IntoIter; + + fn into_iter(self) -> IntoIter { + self.0.into_iter() + } +} + +impl ProcMacroHostPlugin { + pub fn try_new(macros: Vec>) -> Result { + // Validate expansions. + let mut expansions = macros + .iter() + .flat_map(|m| { + m.get_expansions() + .iter() + .map(|e| ProcMacroId::new(m.package_id(), e.clone())) + .collect_vec() + }) + .collect::>(); + expansions.sort_unstable_by_key(|e| (e.expansion.name.clone(), e.package_id)); + ensure!( + expansions + .windows(2) + .all(|w| w[0].expansion.name != w[1].expansion.name), + "duplicate expansions defined for procedural macros: {duplicates}", + duplicates = expansions + .windows(2) + .filter(|w| w[0].expansion.name == w[1].expansion.name) + .map(|w| format!( + "{} ({} and {})", + w[0].expansion.name.as_str(), + w[0].package_id, + w[1].package_id + )) + .collect::>() + .join(", ") + ); + Ok(Self { + macros, + full_path_markers: RwLock::new(Default::default()), + }) + } + + fn expand_inner_attr( + &self, + db: &dyn SyntaxGroup, + item_ast: ast::ModuleItem, + ) -> InnerAttrExpansionResult { + let mut context = InnerAttrExpansionContext::new(self); + let mut item_builder = PatchBuilder::new(db, &item_ast); + let mut used_attr_names: HashSet = Default::default(); + let mut all_none = true; + + match item_ast.clone() { + ast::ModuleItem::Trait(trait_ast) => { + item_builder.add_node(trait_ast.attributes(db).as_syntax_node()); + item_builder.add_node(trait_ast.visibility(db).as_syntax_node()); + item_builder.add_node(trait_ast.trait_kw(db).as_syntax_node()); + item_builder.add_node(trait_ast.name(db).as_syntax_node()); + item_builder.add_node(trait_ast.generic_params(db).as_syntax_node()); + + // Parser attributes for inner functions. + match trait_ast.body(db) { + MaybeTraitBody::None(terminal) => { + item_builder.add_node(terminal.as_syntax_node()); + InnerAttrExpansionResult::None + } + MaybeTraitBody::Some(body) => { + item_builder.add_node(body.lbrace(db).as_syntax_node()); + + let item_list = body.items(db); + for item in item_list.elements(db).iter() { + let ast::TraitItem::Function(func) = item else { + item_builder.add_node(item.as_syntax_node()); + continue; + }; + + let mut func_builder = PatchBuilder::new(db, func); + let attrs = func.attributes(db).elements(db); + let found = self.parse_attrs(db, &mut func_builder, attrs, func); + if let Some(name) = found.as_name() { + used_attr_names.insert(name); + } + func_builder.add_node(func.declaration(db).as_syntax_node()); + func_builder.add_node(func.body(db).as_syntax_node()); + let token_stream = TokenStream::new(func_builder.build().0); + + all_none = all_none + && self.do_expand_inner_attr( + db, + &mut context, + &mut item_builder, + found, + func, + token_stream, + ); + } + + item_builder.add_node(body.rbrace(db).as_syntax_node()); + + if all_none { + InnerAttrExpansionResult::None + } else { + let (code, mappings) = item_builder.build(); + InnerAttrExpansionResult::Some(context.into_result( + code, + mappings, + used_attr_names.into_iter().collect(), + )) + } + } + } + } + + ast::ModuleItem::Impl(impl_ast) => { + item_builder.add_node(impl_ast.attributes(db).as_syntax_node()); + item_builder.add_node(impl_ast.visibility(db).as_syntax_node()); + item_builder.add_node(impl_ast.impl_kw(db).as_syntax_node()); + item_builder.add_node(impl_ast.name(db).as_syntax_node()); + item_builder.add_node(impl_ast.generic_params(db).as_syntax_node()); + item_builder.add_node(impl_ast.of_kw(db).as_syntax_node()); + item_builder.add_node(impl_ast.trait_path(db).as_syntax_node()); + + match impl_ast.body(db) { + MaybeImplBody::None(terminal) => { + item_builder.add_node(terminal.as_syntax_node()); + InnerAttrExpansionResult::None + } + MaybeImplBody::Some(body) => { + item_builder.add_node(body.lbrace(db).as_syntax_node()); + + let items = body.items(db); + for item in items.elements(db) { + let ImplItem::Function(func) = item else { + item_builder.add_node(item.as_syntax_node()); + continue; + }; + + let mut func_builder = PatchBuilder::new(db, &func); + let attrs = func.attributes(db).elements(db); + let found = self.parse_attrs(db, &mut func_builder, attrs, &func); + if let Some(name) = found.as_name() { + used_attr_names.insert(name); + } + func_builder.add_node(func.visibility(db).as_syntax_node()); + func_builder.add_node(func.declaration(db).as_syntax_node()); + func_builder.add_node(func.body(db).as_syntax_node()); + let token_stream = TokenStream::new(func_builder.build().0); + all_none = all_none + && self.do_expand_inner_attr( + db, + &mut context, + &mut item_builder, + found, + &func, + token_stream, + ); + } + + item_builder.add_node(body.rbrace(db).as_syntax_node()); + + if all_none { + InnerAttrExpansionResult::None + } else { + let (code, mappings) = item_builder.build(); + InnerAttrExpansionResult::Some(context.into_result( + code, + mappings, + used_attr_names.into_iter().collect(), + )) + } + } + } + } + _ => InnerAttrExpansionResult::None, + } + } + + fn do_expand_inner_attr( + &self, + db: &dyn SyntaxGroup, + context: &mut InnerAttrExpansionContext<'_>, + item_builder: &mut PatchBuilder<'_>, + found: AttrExpansionFound, + func: &impl TypedSyntaxNode, + token_stream: TokenStream, + ) -> bool { + let mut all_none = true; + let (input, args, stable_ptr) = match found { + AttrExpansionFound::Last { + expansion, + args, + stable_ptr, + } => { + all_none = false; + (expansion, args, stable_ptr) + } + AttrExpansionFound::Some { + expansion, + args, + stable_ptr, + } => { + all_none = false; + (expansion, args, stable_ptr) + } + AttrExpansionFound::None => { + item_builder.add_node(func.as_syntax_node()); + return all_none; + } + }; + + let result = self.instance(input.package_id).generate_code( + input.expansion.name.clone(), + args.clone(), + token_stream.clone(), + ); + + let expanded = context.register_result(token_stream.to_string(), input, result, stable_ptr); + item_builder.add_modified(RewriteNode::Mapped { + origin: func.as_syntax_node().span(db), + node: Box::new(RewriteNode::Text(expanded.to_string())), + }); + + all_none + } + + /// Find first attribute procedural macros that should be expanded. + /// + /// Remove the attribute from the code. + fn parse_attribute( + &self, + db: &dyn SyntaxGroup, + item_ast: ast::ModuleItem, + ) -> (AttrExpansionFound, TokenStream) { + let mut item_builder = PatchBuilder::new(db, &item_ast); + let input = match item_ast.clone() { + ast::ModuleItem::Trait(trait_ast) => { + let attrs = trait_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); + item_builder.add_node(trait_ast.visibility(db).as_syntax_node()); + item_builder.add_node(trait_ast.trait_kw(db).as_syntax_node()); + item_builder.add_node(trait_ast.name(db).as_syntax_node()); + item_builder.add_node(trait_ast.generic_params(db).as_syntax_node()); + item_builder.add_node(trait_ast.body(db).as_syntax_node()); + expansion + } + ast::ModuleItem::Impl(impl_ast) => { + let attrs = impl_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); + item_builder.add_node(impl_ast.visibility(db).as_syntax_node()); + item_builder.add_node(impl_ast.impl_kw(db).as_syntax_node()); + item_builder.add_node(impl_ast.name(db).as_syntax_node()); + item_builder.add_node(impl_ast.generic_params(db).as_syntax_node()); + item_builder.add_node(impl_ast.of_kw(db).as_syntax_node()); + item_builder.add_node(impl_ast.trait_path(db).as_syntax_node()); + item_builder.add_node(impl_ast.body(db).as_syntax_node()); + expansion + } + ast::ModuleItem::Module(module_ast) => { + let attrs = module_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); + item_builder.add_node(module_ast.visibility(db).as_syntax_node()); + item_builder.add_node(module_ast.module_kw(db).as_syntax_node()); + item_builder.add_node(module_ast.name(db).as_syntax_node()); + item_builder.add_node(module_ast.body(db).as_syntax_node()); + expansion + } + ast::ModuleItem::FreeFunction(free_func_ast) => { + let attrs = free_func_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); + item_builder.add_node(free_func_ast.visibility(db).as_syntax_node()); + item_builder.add_node(free_func_ast.declaration(db).as_syntax_node()); + item_builder.add_node(free_func_ast.body(db).as_syntax_node()); + expansion + } + ast::ModuleItem::ExternFunction(extern_func_ast) => { + let attrs = extern_func_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); + item_builder.add_node(extern_func_ast.visibility(db).as_syntax_node()); + item_builder.add_node(extern_func_ast.extern_kw(db).as_syntax_node()); + item_builder.add_node(extern_func_ast.declaration(db).as_syntax_node()); + item_builder.add_node(extern_func_ast.semicolon(db).as_syntax_node()); + expansion + } + ast::ModuleItem::ExternType(extern_type_ast) => { + let attrs = extern_type_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); + item_builder.add_node(extern_type_ast.visibility(db).as_syntax_node()); + item_builder.add_node(extern_type_ast.extern_kw(db).as_syntax_node()); + item_builder.add_node(extern_type_ast.type_kw(db).as_syntax_node()); + item_builder.add_node(extern_type_ast.name(db).as_syntax_node()); + item_builder.add_node(extern_type_ast.generic_params(db).as_syntax_node()); + item_builder.add_node(extern_type_ast.semicolon(db).as_syntax_node()); + expansion + } + ast::ModuleItem::Struct(struct_ast) => { + let attrs = struct_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); + item_builder.add_node(struct_ast.visibility(db).as_syntax_node()); + item_builder.add_node(struct_ast.struct_kw(db).as_syntax_node()); + item_builder.add_node(struct_ast.name(db).as_syntax_node()); + item_builder.add_node(struct_ast.generic_params(db).as_syntax_node()); + item_builder.add_node(struct_ast.lbrace(db).as_syntax_node()); + item_builder.add_node(struct_ast.members(db).as_syntax_node()); + item_builder.add_node(struct_ast.rbrace(db).as_syntax_node()); + expansion + } + ast::ModuleItem::Enum(enum_ast) => { + let attrs = enum_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); + item_builder.add_node(enum_ast.visibility(db).as_syntax_node()); + item_builder.add_node(enum_ast.enum_kw(db).as_syntax_node()); + item_builder.add_node(enum_ast.name(db).as_syntax_node()); + item_builder.add_node(enum_ast.generic_params(db).as_syntax_node()); + item_builder.add_node(enum_ast.lbrace(db).as_syntax_node()); + item_builder.add_node(enum_ast.variants(db).as_syntax_node()); + item_builder.add_node(enum_ast.rbrace(db).as_syntax_node()); + expansion + } + _ => AttrExpansionFound::None, + }; + let token_stream = TokenStream::new(item_builder.build().0); + (input, token_stream) + } + + fn parse_attrs( + &self, + db: &dyn SyntaxGroup, + builder: &mut PatchBuilder<'_>, + attrs: Vec, + origin: &impl TypedSyntaxNode, + ) -> AttrExpansionFound { + // This function parses attributes of the item, + // checking if those attributes correspond to a procedural macro that should be fired. + // The proc macro attribute found is removed from attributes list, + // while other attributes are appended to the `PathBuilder` passed as an argument. + + // Note this function does not affect the executable attributes, + // as it only pulls `ExpansionKind::Attr` from the plugin. + // This means that executable attributes will neither be removed from the item, + // nor will they cause the item to be rewritten. + let mut expansion = None; + let mut last = true; + for attr in attrs { + // We ensure that this flag is changed *after* the expansion is found. + if last { + let structured_attr = attr.clone().structurize(db); + let found = self.find_expansion(&Expansion::new( + structured_attr.id.clone(), + ExpansionKind::Attr, + )); + if let Some(found) = found { + if expansion.is_none() { + let mut args_builder = PatchBuilder::new(db, origin); + args_builder.add_node(attr.arguments(db).as_syntax_node()); + let args = TokenStream::new(args_builder.build().0); + expansion = Some((found, args, attr.stable_ptr().untyped())); + // Do not add the attribute for found expansion. + continue; + } else { + last = false; + } + } + } + builder.add_node(attr.as_syntax_node()); + } + match (expansion, last) { + (Some((expansion, args, stable_ptr)), true) => AttrExpansionFound::Last { + expansion, + args, + stable_ptr, + }, + (Some((expansion, args, stable_ptr)), false) => AttrExpansionFound::Some { + expansion, + args, + stable_ptr, + }, + (None, _) => AttrExpansionFound::None, + } + } + + /// Handle `#[derive(...)]` attribute. + /// + /// Returns a list of expansions that this plugin should apply. + fn parse_derive(&self, db: &dyn SyntaxGroup, item_ast: ast::ModuleItem) -> Vec { + let attrs = match item_ast { + ast::ModuleItem::Struct(struct_ast) => Some(struct_ast.query_attr(db, DERIVE_ATTR)), + ast::ModuleItem::Enum(enum_ast) => Some(enum_ast.query_attr(db, DERIVE_ATTR)), + _ => None, + }; + + attrs + .unwrap_or_default() + .iter() + .map(|attr| attr.clone().structurize(db)) + .flat_map(|attr| attr.args.into_iter()) + .filter_map(|attr| { + let AttributeArgVariant::Unnamed(value) = attr.clone().variant else { + return None; + }; + let Expr::Path(path) = value else { + return None; + }; + let path = path.elements(db); + let path = path.last()?; + let PathSegment::Simple(segment) = path else { + return None; + }; + let ident = segment.ident(db); + let value = ident.text(db).to_string(); + + self.find_expansion(&Expansion::new( + value.to_case(Case::Snake), + ExpansionKind::Derive, + )) + }) + .collect_vec() + } + + fn expand_derives( + &self, + db: &dyn SyntaxGroup, + item_ast: ast::ModuleItem, + stream_metadata: TokenStreamMetadata, + ) -> Option { + let stable_ptr = item_ast.clone().stable_ptr().untyped(); + let token_stream = + TokenStream::from_syntax_node(db, &item_ast).with_metadata(stream_metadata.clone()); + + let mut aux_data = EmittedAuxData::default(); + let mut all_diagnostics: Vec = Vec::new(); + + // All derives to be applied. + let derives = self.parse_derive(db, item_ast.clone()); + let any_derives = !derives.is_empty(); + + let mut derived_code = PatchBuilder::new(db, &item_ast); + for derive in derives.iter() { + let result = self.instance(derive.package_id).generate_code( + derive.expansion.name.clone(), + TokenStream::empty(), + token_stream.clone(), + ); + + // Register diagnostics. + all_diagnostics.extend(result.diagnostics); + + // Register aux data. + if let Some(new_aux_data) = result.aux_data { + aux_data.push(ProcMacroAuxData::new( + new_aux_data.into(), + ProcMacroId::new(derive.package_id, derive.expansion.clone()), + )); + } + + if result.token_stream.is_empty() { + // No code has been generated. + // We do not need to do anything. + continue; + } + + derived_code.add_str(result.token_stream.to_string().as_str()); + } + + if any_derives { + let derived_code = derived_code.build().0; + return Some(PluginResult { + code: if derived_code.is_empty() { + None + } else { + let msg = if derives.len() == 1 { + "the derive macro" + } else { + "one of the derive macros" + }; + let derive_names = derives + .iter() + .map(|derive| derive.expansion.name.to_string()) + .join("`, `"); + let note = format!("this error originates in {msg}: `{derive_names}`"); + Some(PluginGeneratedFile { + name: "proc_macro_derive".into(), + code_mappings: Vec::new(), + content: derived_code, + aux_data: if aux_data.is_empty() { + None + } else { + Some(DynGeneratedFileAuxData::new(aux_data)) + }, + diagnostics_note: Some(note), + }) + }, + diagnostics: into_cairo_diagnostics(all_diagnostics, stable_ptr), + // Note that we don't remove the original item here, unlike for attributes. + // We do not add the original code to the generated file either. + remove_original_item: false, + }); + } + + None + } + + fn expand_attribute( + &self, + input: ProcMacroId, + last: bool, + args: TokenStream, + token_stream: TokenStream, + stable_ptr: SyntaxStablePtrId, + ) -> PluginResult { + let result = self.instance(input.package_id).generate_code( + input.expansion.name.clone(), + args.clone(), + token_stream.clone(), + ); + + // Handle token stream. + if result.token_stream.is_empty() { + // Remove original code + return PluginResult { + diagnostics: into_cairo_diagnostics(result.diagnostics, stable_ptr), + code: None, + remove_original_item: true, + }; + } + + // Full path markers require code modification. + self.register_full_path_markers(input.package_id, result.full_path_markers.clone()); + + // This is a minor optimization. + // If the expanded macro attribute is the only one that will be expanded by `ProcMacroHost` + // in this `generate_code` call (i.e. all the other macro attributes has been expanded by + // previous calls), and the expansion did not produce any changes, we can skip rewriting the + // expanded node by simply returning no generated code, and leaving the original item as is. + // However, if we have other macro attributes to expand, we must rewrite the node even if no + // changes have been produced, so that we can parse the attributes once again and expand them. + // In essence, `code: None, remove_original_item: false` means `ProcMacroHost` will not be + // called again for this AST item. + // This optimization limits the number of generated nodes a bit. + if last + && result.aux_data.is_none() + && token_stream.to_string() == result.token_stream.to_string() + { + return PluginResult { + code: None, + remove_original_item: false, + diagnostics: into_cairo_diagnostics(result.diagnostics, stable_ptr), + }; + } + + let file_name = format!("proc_{}", input.expansion.name); + let content = result.token_stream.to_string(); + PluginResult { + code: Some(PluginGeneratedFile { + name: file_name.into(), + code_mappings: Vec::new(), + content, + diagnostics_note: Some(format!( + "this error originates in the attribute macro: `{}`", + input.expansion.name + )), + aux_data: result.aux_data.map(|new_aux_data| { + DynGeneratedFileAuxData::new(EmittedAuxData::new(ProcMacroAuxData::new( + new_aux_data.into(), + input, + ))) + }), + }), + diagnostics: into_cairo_diagnostics(result.diagnostics, stable_ptr), + remove_original_item: true, + } + } + + fn find_expansion(&self, expansion: &Expansion) -> Option { + self.macros + .iter() + .find(|m| m.get_expansions().contains(expansion)) + .map(|m| m.package_id()) + .map(|package_id| ProcMacroId::new(package_id, expansion.clone())) + } + + pub fn build_plugin_suite(macro_host: Arc) -> PluginSuite { + let mut suite = PluginSuite::default(); + // Register inline macro plugins. + for proc_macro in ¯o_host.macros { + let expansions = proc_macro + .get_expansions() + .iter() + .filter(|exp| matches!(exp.kind, ExpansionKind::Inline)); + for expansion in expansions { + let plugin = Arc::new(ProcMacroInlinePlugin::new( + proc_macro.clone(), + expansion.clone(), + )); + suite.add_inline_macro_plugin_ex(expansion.name.as_str(), plugin); + } + } + // Register procedural macro host plugin. + suite.add_plugin_ex(macro_host); + suite + } + + #[tracing::instrument(level = "trace", skip_all)] + pub fn post_process(&self, db: &dyn SemanticGroup) -> Result<()> { + let markers = self.collect_full_path_markers(db); + + let aux_data = self.collect_aux_data(db); + for instance in self.macros.iter() { + let _ = trace_span!( + "post_process_callback", + instance = %instance.package_id() + ) + .entered(); + let instance_markers = self + .full_path_markers + .read() + .unwrap() + .get(&instance.package_id()) + .cloned() + .unwrap_or_default(); + let markers_for_instance = markers + .iter() + .filter(|(key, _)| instance_markers.contains(key)) + .map(|(key, full_path)| FullPathMarker { + key: key.clone(), + full_path: full_path.clone(), + }) + .collect_vec(); + let data = aux_data + .get(&instance.package_id()) + .cloned() + .unwrap_or_default(); + debug!("calling post processing callback with: {data:?}"); + instance.post_process_callback(data.clone(), markers_for_instance); + } + Ok(()) + } + + fn collect_full_path_markers(&self, db: &dyn SemanticGroup) -> HashMap { + let mut markers: HashMap = HashMap::new(); + // FULL_PATH_MARKER_KEY + for crate_id in db.crates() { + let modules = db.crate_modules(crate_id); + for module_id in modules.iter() { + let Ok(module_items) = db.module_items(*module_id) else { + continue; + }; + for item_id in module_items.iter() { + let attr = match item_id { + ModuleItemId::Struct(id) => { + id.query_attr(db, FULL_PATH_MARKER_KEY).to_option() + } + ModuleItemId::Enum(id) => { + id.query_attr(db, FULL_PATH_MARKER_KEY).to_option() + } + ModuleItemId::FreeFunction(id) => { + id.query_attr(db, FULL_PATH_MARKER_KEY).to_option() + } + _ => None, + }; + + let keys = attr + .unwrap_or_default() + .into_iter() + .filter_map(|attr| Self::extract_key(db, attr)) + .collect_vec(); + let full_path = item_id.full_path(db.upcast()); + for key in keys { + markers.insert(key, full_path.clone()); + } + } + } + } + markers + } + + fn extract_key(db: &dyn SemanticGroup, attr: Attribute) -> Option { + if attr.id != FULL_PATH_MARKER_KEY { + return None; + } + + for arg in attr.args.clone() { + if let AttributeArgVariant::Unnamed(Expr::String(s)) = arg.variant { + return s.string_value(db.upcast()); + } + } + + None + } + + fn collect_aux_data( + &self, + db: &dyn SemanticGroup, + ) -> HashMap> { + let mut data = Vec::new(); + for crate_id in db.crates() { + let crate_modules = db.crate_modules(crate_id); + for module in crate_modules.iter() { + let file_infos = db.module_generated_file_aux_data(*module); + if let Ok(file_infos) = file_infos { + for file_info in file_infos.iter() { + let aux_data = file_info + .as_ref() + .and_then(|ad| ad.as_any().downcast_ref::()); + if let Some(aux_data) = aux_data { + data.extend(aux_data.clone().into_iter()); + } + } + } + } + } + data.into_iter() + .into_group_map_by(|d| d.macro_id.package_id) + } + + pub fn instance(&self, package_id: PackageId) -> &ProcMacroInstance { + self.macros + .iter() + .find(|m| m.package_id() == package_id) + .expect("procedural macro must be registered in proc macro host") + } + + fn register_full_path_markers(&self, package_id: PackageId, markers: Vec) { + self.full_path_markers + .write() + .unwrap() + .entry(package_id) + .and_modify(|markers| markers.extend(markers.clone())) + .or_insert(markers); + } + + fn calculate_metadata(db: &dyn SyntaxGroup, item_ast: ast::ModuleItem) -> TokenStreamMetadata { + let stable_ptr = item_ast.clone().stable_ptr().untyped(); + let file_path = stable_ptr.file_id(db).full_path(db.upcast()); + let file_id = short_hash(file_path.clone()); + TokenStreamMetadata::new(file_path, file_id) + } +} + +struct InnerAttrExpansionContext<'a> { + host: &'a ProcMacroHostPlugin, + // Metadata returned for expansions. + diagnostics: Vec, + aux_data: EmittedAuxData, + any_changed: bool, +} + +impl<'a> InnerAttrExpansionContext<'a> { + pub fn new<'b: 'a>(host: &'b ProcMacroHostPlugin) -> Self { + Self { + diagnostics: Vec::new(), + aux_data: EmittedAuxData::default(), + any_changed: false, + host, + } + } + + pub fn register_result( + &mut self, + original: String, + input: ProcMacroId, + result: ProcMacroResult, + stable_ptr: SyntaxStablePtrId, + ) -> String { + let expanded = result.token_stream.to_string(); + let changed = expanded.as_str() != original; + + if changed { + self.host + .register_full_path_markers(input.package_id, result.full_path_markers.clone()); + } + + self.diagnostics + .extend(into_cairo_diagnostics(result.diagnostics, stable_ptr)); + + if let Some(new_aux_data) = result.aux_data { + self.aux_data + .push(ProcMacroAuxData::new(new_aux_data.into(), input)); + } + + self.any_changed = self.any_changed || changed; + + expanded + } + pub fn into_result( + self, + expanded: String, + code_mappings: Vec, + attr_names: Vec, + ) -> PluginResult { + let msg = if attr_names.len() == 1 { + "the attribute macro" + } else { + "one of the attribute macros" + }; + let derive_names = attr_names.iter().map(ToString::to_string).join("`, `"); + let note = format!("this error originates in {msg}: `{derive_names}`"); + PluginResult { + code: Some(PluginGeneratedFile { + name: "proc_attr_inner".into(), + content: expanded, + aux_data: if self.aux_data.is_empty() { + None + } else { + Some(DynGeneratedFileAuxData::new(self.aux_data)) + }, + code_mappings, + diagnostics_note: Some(note), + }), + diagnostics: self.diagnostics, + remove_original_item: true, + } + } +} + +enum InnerAttrExpansionResult { + None, + Some(PluginResult), +} + +impl MacroPlugin for ProcMacroHostPlugin { + fn generate_code( + &self, + db: &dyn SyntaxGroup, + item_ast: ast::ModuleItem, + _metadata: &MacroPluginMetadata<'_>, + ) -> PluginResult { + let stream_metadata = Self::calculate_metadata(db, item_ast.clone()); + + // Handle inner functions. + if let InnerAttrExpansionResult::Some(result) = self.expand_inner_attr(db, item_ast.clone()) + { + return result; + } + + // Expand first attribute. + // Note that we only expand the first attribute, as we assume that the rest of the attributes + // will be handled by a subsequent call to this function. + let (input, body) = self.parse_attribute(db, item_ast.clone()); + + if let Some(result) = match input { + AttrExpansionFound::Last { + expansion, + args, + stable_ptr, + } => Some((expansion, args, stable_ptr, true)), + AttrExpansionFound::Some { + expansion, + args, + stable_ptr, + } => Some((expansion, args, stable_ptr, false)), + AttrExpansionFound::None => None, + } + .map(|(expansion, args, stable_ptr, last)| { + let token_stream = body.with_metadata(stream_metadata.clone()); + self.expand_attribute(expansion, last, args, token_stream, stable_ptr) + }) { + return result; + } + + // Expand all derives. + // Note that all proc macro attributes should be already expanded at this point. + if let Some(result) = self.expand_derives(db, item_ast.clone(), stream_metadata.clone()) { + return result; + } + + // No expansions can be applied. + PluginResult { + code: None, + diagnostics: Vec::new(), + remove_original_item: false, + } + } + + fn declared_attributes(&self) -> Vec { + self.macros + .iter() + .flat_map(|m| m.declared_attributes_and_executables()) + .chain(vec![FULL_PATH_MARKER_KEY.to_string()]) + .collect() + } + + fn declared_derives(&self) -> Vec { + self.macros + .iter() + .flat_map(|m| m.declared_derives()) + .map(|s| s.to_case(Case::UpperCamel)) + .collect() + } + + fn executable_attributes(&self) -> Vec { + self.macros + .iter() + .flat_map(|m| m.executable_attributes()) + .collect() + } +} + +enum AttrExpansionFound { + Some { + expansion: ProcMacroId, + args: TokenStream, + stable_ptr: SyntaxStablePtrId, + }, + None, + Last { + expansion: ProcMacroId, + args: TokenStream, + stable_ptr: SyntaxStablePtrId, + }, +} +impl AttrExpansionFound { + pub fn as_name(&self) -> Option { + match self { + AttrExpansionFound::Some { expansion, .. } + | AttrExpansionFound::Last { expansion, .. } => Some(expansion.expansion.name.clone()), + AttrExpansionFound::None => None, + } + } +} + +/// A Cairo compiler inline macro plugin controlling the inline procedural macro execution. +/// +/// This plugin represents a single expansion capable of handling inline procedural macros. +/// The plugin triggers code expansion in a corresponding procedural macro instance. +#[derive(Debug)] +pub struct ProcMacroInlinePlugin { + instance: Arc, + expansion: Expansion, + doc: OnceLock>, +} + +impl ProcMacroInlinePlugin { + pub fn new(instance: Arc, expansion: Expansion) -> Self { + assert!(instance.get_expansions().contains(&expansion)); + Self { + instance, + expansion, + doc: Default::default(), + } + } + + pub fn name(&self) -> &str { + self.expansion.name.as_str() + } + + fn instance(&self) -> &ProcMacroInstance { + &self.instance + } +} + +impl InlineMacroExprPlugin for ProcMacroInlinePlugin { + fn generate_code( + &self, + db: &dyn SyntaxGroup, + syntax: &ast::ExprInlineMacro, + _metadata: &MacroPluginMetadata<'_>, + ) -> InlinePluginResult { + let stable_ptr = syntax.clone().stable_ptr().untyped(); + let arguments = syntax.arguments(db); + let token_stream = TokenStream::from_syntax_node(db, &arguments); + let result = self.instance().generate_code( + self.expansion.name.clone(), + TokenStream::empty(), + token_stream, + ); + // Handle diagnostics. + let diagnostics = into_cairo_diagnostics(result.diagnostics, stable_ptr); + let token_stream = result.token_stream.clone(); + if token_stream.is_empty() { + // Remove original code + InlinePluginResult { + code: None, + diagnostics, + } + } else { + // Replace + let aux_data = result.aux_data.map(|aux_data| { + let aux_data = ProcMacroAuxData::new( + aux_data.into(), + ProcMacroId::new(self.instance.package_id(), self.expansion.clone()), + ); + let mut emitted = EmittedAuxData::default(); + emitted.push(aux_data); + DynGeneratedFileAuxData::new(emitted) + }); + let content = token_stream.to_string(); + InlinePluginResult { + code: Some(PluginGeneratedFile { + name: "inline_proc_macro".into(), + code_mappings: Vec::new(), + content, + aux_data, + diagnostics_note: Some(format!( + "this error originates in the inline macro: `{}`", + self.expansion.name + )), + }), + diagnostics, + } + } + } + + fn documentation(&self) -> Option { + self.doc + .get_or_init(|| self.instance().doc(self.expansion.name.clone())) + .clone() + } +} + +fn into_cairo_diagnostics( + diagnostics: Vec, + stable_ptr: SyntaxStablePtrId, +) -> Vec { + diagnostics + .into_iter() + .map(|diag| PluginDiagnostic { + stable_ptr, + message: diag.message, + severity: match diag.severity { + Severity::Error => cairo_lang_diagnostics::Severity::Error, + Severity::Warning => cairo_lang_diagnostics::Severity::Warning, + }, + }) + .collect_vec() +} + +/// A global storage for dynamically-loaded procedural macros. +/// Loads dynamic shared libraries and hides them beside [`ProcMacroInstance`]. +/// Guarantees that every library is loaded exactly once, +/// but does not prevent loading multiple versions of the same library. +#[derive(Default)] +pub struct ProcMacroRepository { + /// A mapping between the [`PackageId`] of the package which defines the plugin + /// and the [`ProcMacroInstance`] holding the underlying shared library. + macros: RwLock>>, +} + +impl ProcMacroRepository { + /// Returns the [`ProcMacroInstance`] representing the procedural macros defined in the [`Package`]. + /// Loads the underlying shared library if it has not been loaded yet. + pub fn get_or_load(&self, package: Package, config: &Config) -> Result> { + let Ok(macros) = self.macros.read() else { + bail!("could not get a read access to the ProcMacroRepository"); + }; + + if let Some(instance) = macros.get(&package.id) { + return Ok(instance.clone()); + } + + drop(macros); + + let Ok(mut macros) = self.macros.write() else { + bail!("could not get a write access to the ProcMacroRepository"); + }; + + let lib_path = package + .shared_lib_path(config) + .context("could not resolve shared library path")?; + + let instance = Arc::new(ProcMacroInstance::try_new(package.id, lib_path)?); + macros.insert(package.id, instance.clone()); + + Ok(instance) + } +} diff --git a/scarb/src/compiler/plugin/proc_macro/v1/mod.rs b/scarb/src/compiler/plugin/proc_macro/v1/mod.rs index 8b1378917..888c012fc 100644 --- a/scarb/src/compiler/plugin/proc_macro/v1/mod.rs +++ b/scarb/src/compiler/plugin/proc_macro/v1/mod.rs @@ -1 +1,7 @@ +pub mod compilation; +mod ffi; +mod host; +pub use compilation::{check_unit, compile_unit, fetch_crate}; +pub use ffi::*; +pub use host::*; From 09b67fb54f4a0f5945ac6714b2d5235982ea5a31 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Tr=C4=85tnowiecki?= Date: Thu, 13 Mar 2025 20:32:30 +0100 Subject: [PATCH 30/38] Proc macro: read api version from toml file --- scarb/src/compiler/plugin/mod.rs | 3 +++ .../compiler/plugin/proc_macro/instance.rs | 27 +++++++++++++++++++ scarb/src/compiler/plugin/proc_macro/mod.rs | 2 ++ scarb/src/core/manifest/toml_manifest.rs | 1 + .../core/publishing/manifest_normalization.rs | 1 + 5 files changed, 34 insertions(+) create mode 100644 scarb/src/compiler/plugin/proc_macro/instance.rs diff --git a/scarb/src/compiler/plugin/mod.rs b/scarb/src/compiler/plugin/mod.rs index ca7debfae..efb5bd2a5 100644 --- a/scarb/src/compiler/plugin/mod.rs +++ b/scarb/src/compiler/plugin/mod.rs @@ -9,6 +9,7 @@ use itertools::Itertools; use serde::{Deserialize, Serialize}; use crate::compiler::plugin::builtin::BuiltinCairoRunPlugin; +use crate::compiler::plugin::proc_macro::ProcMacroApiVersion; use crate::compiler::plugin::proc_macro::compilation::SharedLibraryProvider; use crate::core::{Package, PackageId, TargetKind, Workspace}; @@ -25,6 +26,8 @@ pub struct CairoPluginProps { /// Mark this macro plugin as builtin. /// Builtin plugins are assumed to be available in `CairoPluginRepository` for the whole Scarb execution. pub builtin: bool, + /// Version of the API used by the plugin. + pub api: ProcMacroApiVersion, } pub fn fetch_cairo_plugin(package: &Package, ws: &Workspace<'_>) -> Result<()> { diff --git a/scarb/src/compiler/plugin/proc_macro/instance.rs b/scarb/src/compiler/plugin/proc_macro/instance.rs new file mode 100644 index 000000000..5571e9fa4 --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/instance.rs @@ -0,0 +1,27 @@ +use crate::compiler::plugin::CairoPluginProps; +use crate::core::{Package, TargetKind}; +use anyhow::Result; +use serde::{Deserialize, Serialize}; + +#[derive( + Debug, Clone, Copy, Serialize, Deserialize, Default, PartialEq, Eq, PartialOrd, Ord, Hash, +)] +#[serde(rename_all = "lowercase")] +pub enum ProcMacroApiVersion { + #[default] + V1, + V2, +} + +pub trait ProcMacroApiVersionReader { + fn api_version(&self) -> Result; +} + +impl ProcMacroApiVersionReader for Package { + fn api_version(&self) -> Result { + assert!(self.is_cairo_plugin()); + let target = self.fetch_target(&TargetKind::CAIRO_PLUGIN)?; + let props: CairoPluginProps = target.props()?; + Ok(props.api) + } +} diff --git a/scarb/src/compiler/plugin/proc_macro/mod.rs b/scarb/src/compiler/plugin/proc_macro/mod.rs index f19b618f9..415253463 100644 --- a/scarb/src/compiler/plugin/proc_macro/mod.rs +++ b/scarb/src/compiler/plugin/proc_macro/mod.rs @@ -1,4 +1,6 @@ +mod instance; pub mod v1; pub mod v2; +pub use instance::*; pub use v2::*; diff --git a/scarb/src/core/manifest/toml_manifest.rs b/scarb/src/core/manifest/toml_manifest.rs index 34bbb3077..ac32469c0 100644 --- a/scarb/src/core/manifest/toml_manifest.rs +++ b/scarb/src/core/manifest/toml_manifest.rs @@ -311,6 +311,7 @@ pub struct TomlExecutableTargetParams { #[serde(rename_all = "kebab-case")] pub struct TomlCairoPluginTargetParams { pub builtin: Option, + pub api: Option, } pub type TomlExternalTargetParams = BTreeMap; diff --git a/scarb/src/core/publishing/manifest_normalization.rs b/scarb/src/core/publishing/manifest_normalization.rs index 86b864648..9fe963901 100644 --- a/scarb/src/core/publishing/manifest_normalization.rs +++ b/scarb/src/core/publishing/manifest_normalization.rs @@ -160,6 +160,7 @@ fn generate_cairo_plugin(pkg: &Package) -> Option Date: Thu, 13 Mar 2025 20:43:57 +0100 Subject: [PATCH 31/38] Proc macro: move common struct to top-level proc macro module --- scarb/src/compiler/plugin/mod.rs | 2 +- .../plugin/proc_macro/{v1 => }/compilation.rs | 105 +----- scarb/src/compiler/plugin/proc_macro/mod.rs | 6 + .../plugin/proc_macro/{v2 => }/repository.rs | 2 +- .../proc_macro/shared_library_provider.rs | 110 ++++++ .../src/compiler/plugin/proc_macro/v1/ffi.rs | 2 +- .../src/compiler/plugin/proc_macro/v1/host.rs | 45 +-- .../src/compiler/plugin/proc_macro/v1/mod.rs | 2 - .../plugin/proc_macro/v2/compilation.rs | 347 ------------------ .../src/compiler/plugin/proc_macro/v2/ffi.rs | 2 +- .../src/compiler/plugin/proc_macro/v2/mod.rs | 4 - scarb/src/ops/package.rs | 3 +- scarb/src/ops/resolve.rs | 2 +- 13 files changed, 127 insertions(+), 505 deletions(-) rename scarb/src/compiler/plugin/proc_macro/{v1 => }/compilation.rs (67%) rename scarb/src/compiler/plugin/proc_macro/{v2 => }/repository.rs (95%) create mode 100644 scarb/src/compiler/plugin/proc_macro/shared_library_provider.rs delete mode 100644 scarb/src/compiler/plugin/proc_macro/v2/compilation.rs diff --git a/scarb/src/compiler/plugin/mod.rs b/scarb/src/compiler/plugin/mod.rs index efb5bd2a5..d53ae3d35 100644 --- a/scarb/src/compiler/plugin/mod.rs +++ b/scarb/src/compiler/plugin/mod.rs @@ -10,7 +10,7 @@ use serde::{Deserialize, Serialize}; use crate::compiler::plugin::builtin::BuiltinCairoRunPlugin; use crate::compiler::plugin::proc_macro::ProcMacroApiVersion; -use crate::compiler::plugin::proc_macro::compilation::SharedLibraryProvider; +use crate::compiler::plugin::proc_macro::SharedLibraryProvider; use crate::core::{Package, PackageId, TargetKind, Workspace}; use self::builtin::{BuiltinStarknetPlugin, BuiltinTestPlugin}; diff --git a/scarb/src/compiler/plugin/proc_macro/v1/compilation.rs b/scarb/src/compiler/plugin/proc_macro/compilation.rs similarity index 67% rename from scarb/src/compiler/plugin/proc_macro/v1/compilation.rs rename to scarb/src/compiler/plugin/proc_macro/compilation.rs index 4ab259af8..6f08fb1e0 100644 --- a/scarb/src/compiler/plugin/proc_macro/v1/compilation.rs +++ b/scarb/src/compiler/plugin/proc_macro/compilation.rs @@ -1,7 +1,7 @@ use crate::CARGO_MANIFEST_FILE_NAME; use crate::compiler::ProcMacroCompilationUnit; +use crate::compiler::plugin::proc_macro::SharedLibraryProvider; use crate::core::{Config, Package, Workspace}; -use crate::flock::Filesystem; use crate::internal::fsx; use crate::ops::PackageOpts; use crate::process::exec_piping; @@ -9,85 +9,20 @@ use anyhow::{Context, Result, anyhow}; use camino::Utf8PathBuf; use cargo_metadata::MetadataCommand; use flate2::read::GzDecoder; -use indoc::formatdoc; -use libloading::library_filename; use ra_ap_toolchain::Tool; use scarb_ui::{Message, OutputFormat}; use serde::{Serialize, Serializer}; use serde_json::value::RawValue; -use std::env::consts::DLL_SUFFIX; use std::fmt::Display; use std::fs; use std::io::{Seek, SeekFrom}; use std::ops::Deref; use std::process::Command; use tar::Archive; -use target_triple::target; use tracing::trace_span; pub const PROC_MACRO_BUILD_PROFILE: &str = "release"; -/// This trait is used to define the shared library path for a package. -pub trait SharedLibraryProvider { - /// Location of Cargo `target` directory. - fn target_path(&self, config: &Config) -> Filesystem; - /// Location of the shared library for the package. - fn shared_lib_path(&self, config: &Config) -> Result; - /// Location of the prebuilt binary for the package, if defined. - fn prebuilt_lib_path(&self) -> Option; -} - -impl SharedLibraryProvider for Package { - fn target_path(&self, config: &Config) -> Filesystem { - let ident = format!("{}-{}", self.id.name, self.id.source_id.ident()); - // Defines the Cargo target directory in cache, as: - // `/(..)/SCARB_CACHE/plugins/proc_macro/-/v/target/` - config - .dirs() - .procedural_macros_dir() - .into_child(ident) - .into_child(format!("v{}", self.id.version)) - .into_child("target") - } - - fn shared_lib_path(&self, config: &Config) -> Result { - let lib_name = - get_cargo_library_name(self, config).context("could not resolve library name")?; - let lib_name = library_filename(lib_name); - let lib_name = lib_name - .into_string() - .expect("library name must be valid UTF-8"); - // Defines the shared library path inside the target directory, as: - // `/(..)/target/release/[lib].[so|dll|dylib]` - Ok(self - .target_path(config) - .into_child(PROC_MACRO_BUILD_PROFILE) - .path_unchecked() - .join(lib_name)) - } - - fn prebuilt_lib_path(&self) -> Option { - let target_triple = target!(); - - let prebuilt_name = format!( - "{name}_v{version}_{target}{suffix}", - name = self.id.name, - version = self.id.version, - target = target_triple, - suffix = DLL_SUFFIX - ); - - let prebuilt_path = self - .root() - .join("target") - .join("scarb") - .join("cairo-plugin") - .join(prebuilt_name); - - prebuilt_path.exists().then_some(prebuilt_path) - } -} - pub fn compile_unit(unit: ProcMacroCompilationUnit, ws: &Workspace<'_>) -> Result<()> { let package = unit.components.first().unwrap().package.clone(); run_cargo(CargoAction::Build, &package, ws) @@ -98,7 +33,7 @@ pub fn check_unit(unit: ProcMacroCompilationUnit, ws: &Workspace<'_>) -> Result< run_cargo(CargoAction::Check, &package, ws) } -fn get_cargo_package_name(package: &Package) -> Result { +pub fn get_cargo_package_name(package: &Package) -> Result { let cargo_toml_path = package.root().join(CARGO_MANIFEST_FILE_NAME); let cargo_toml: toml::Value = toml::from_str( @@ -119,42 +54,6 @@ fn get_cargo_package_name(package: &Package) -> Result { Ok(package_name.to_string()) } -fn get_cargo_library_name(package: &Package, config: &Config) -> Result { - let metadata = MetadataCommand::new() - .cargo_path(Tool::Cargo.path()) - .current_dir(package.root()) - .exec() - .context("could not get Cargo metadata")?; - - let cargo_package_name = get_cargo_package_name(package)?; - - if cargo_package_name != package.id.name.to_string() { - config.ui().warn(formatdoc!( - r#" - package name differs between Cargo and Scarb manifest - cargo: `{cargo_name}`, scarb: `{scarb_name}` - this might become an error in future Scarb releases - "#, - cargo_name = cargo_package_name, - scarb_name = package.id.name, - )); - } - - let package = metadata - .packages - .iter() - .find(|pkg| pkg.name == cargo_package_name) - .ok_or_else(|| anyhow!("could not get `{cargo_package_name}` package from metadata"))?; - - let cdylib_target = package - .targets - .iter() - .find(|target| target.kind.contains(&"cdylib".into())) - .ok_or_else(|| anyhow!("no target of `cdylib` kind found in package"))?; - - Ok(cdylib_target.name.clone()) -} - fn get_cargo_package_version(package: &Package) -> Result { let metadata = MetadataCommand::new() .cargo_path(Tool::Cargo.path()) diff --git a/scarb/src/compiler/plugin/proc_macro/mod.rs b/scarb/src/compiler/plugin/proc_macro/mod.rs index 415253463..2b92b5216 100644 --- a/scarb/src/compiler/plugin/proc_macro/mod.rs +++ b/scarb/src/compiler/plugin/proc_macro/mod.rs @@ -1,6 +1,12 @@ +pub mod compilation; mod instance; +mod repository; +mod shared_library_provider; pub mod v1; pub mod v2; +pub use compilation::{check_unit, compile_unit, fetch_crate}; pub use instance::*; +pub use repository::*; +pub use shared_library_provider::SharedLibraryProvider; pub use v2::*; diff --git a/scarb/src/compiler/plugin/proc_macro/v2/repository.rs b/scarb/src/compiler/plugin/proc_macro/repository.rs similarity index 95% rename from scarb/src/compiler/plugin/proc_macro/v2/repository.rs rename to scarb/src/compiler/plugin/proc_macro/repository.rs index 5c3a2069d..62eea57e1 100644 --- a/scarb/src/compiler/plugin/proc_macro/v2/repository.rs +++ b/scarb/src/compiler/plugin/proc_macro/repository.rs @@ -1,5 +1,5 @@ +use crate::compiler::plugin::proc_macro::SharedLibraryProvider; use crate::compiler::plugin::proc_macro::v2::ProcMacroInstance; -use crate::compiler::plugin::proc_macro::v2::compilation::SharedLibraryProvider; use crate::core::{Config, Package, PackageId}; use anyhow::{Context, Result, bail}; use std::collections::HashMap; diff --git a/scarb/src/compiler/plugin/proc_macro/shared_library_provider.rs b/scarb/src/compiler/plugin/proc_macro/shared_library_provider.rs new file mode 100644 index 000000000..31beac2e5 --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/shared_library_provider.rs @@ -0,0 +1,110 @@ +use crate::compiler::plugin::proc_macro::compilation::{ + PROC_MACRO_BUILD_PROFILE, get_cargo_package_name, +}; +use crate::core::{Config, Package}; +use crate::flock::Filesystem; +use anyhow::{Context, anyhow}; +use camino::Utf8PathBuf; +use cargo_metadata::MetadataCommand; +use indoc::formatdoc; +use libloading::library_filename; +use ra_ap_toolchain::Tool; +use std::env::consts::DLL_SUFFIX; +use target_triple::target; + +/// This trait is used to define the shared library path for a package. +pub trait SharedLibraryProvider { + /// Location of Cargo `target` directory. + fn target_path(&self, config: &Config) -> Filesystem; + /// Location of the shared library for the package. + fn shared_lib_path(&self, config: &Config) -> anyhow::Result; + /// Location of the prebuilt binary for the package, if defined. + fn prebuilt_lib_path(&self) -> Option; +} + +impl SharedLibraryProvider for Package { + fn target_path(&self, config: &Config) -> Filesystem { + let ident = format!("{}-{}", self.id.name, self.id.source_id.ident()); + // Defines the Cargo target directory in cache, as: + // `/(..)/SCARB_CACHE/plugins/proc_macro/-/v/target/` + config + .dirs() + .procedural_macros_dir() + .into_child(ident) + .into_child(format!("v{}", self.id.version)) + .into_child("target") + } + + fn shared_lib_path(&self, config: &Config) -> anyhow::Result { + let lib_name = + get_cargo_library_name(self, config).context("could not resolve library name")?; + let lib_name = library_filename(lib_name); + let lib_name = lib_name + .into_string() + .expect("library name must be valid UTF-8"); + // Defines the shared library path inside the target directory, as: + // `/(..)/target/release/[lib].[so|dll|dylib]` + Ok(self + .target_path(config) + .into_child(PROC_MACRO_BUILD_PROFILE) + .path_unchecked() + .join(lib_name)) + } + + fn prebuilt_lib_path(&self) -> Option { + let target_triple = target!(); + + let prebuilt_name = format!( + "{name}_v{version}_{target}{suffix}", + name = self.id.name, + version = self.id.version, + target = target_triple, + suffix = DLL_SUFFIX + ); + + let prebuilt_path = self + .root() + .join("target") + .join("scarb") + .join("cairo-plugin") + .join(prebuilt_name); + + prebuilt_path.exists().then_some(prebuilt_path) + } +} + +pub fn get_cargo_library_name(package: &Package, config: &Config) -> anyhow::Result { + let metadata = MetadataCommand::new() + .cargo_path(Tool::Cargo.path()) + .current_dir(package.root()) + .exec() + .context("could not get Cargo metadata")?; + + let cargo_package_name = get_cargo_package_name(package)?; + + if cargo_package_name != package.id.name.to_string() { + config.ui().warn(formatdoc!( + r#" + package name differs between Cargo and Scarb manifest + cargo: `{cargo_name}`, scarb: `{scarb_name}` + this might become an error in future Scarb releases + "#, + cargo_name = cargo_package_name, + scarb_name = package.id.name, + )); + } + + let package = metadata + .packages + .iter() + .find(|pkg| pkg.name == cargo_package_name) + .ok_or_else(|| anyhow!("could not get `{cargo_package_name}` package from metadata"))?; + + let cdylib_target = package + .targets + .iter() + .find(|target| target.kind.contains(&"cdylib".into())) + .ok_or_else(|| anyhow!("no target of `cdylib` kind found in package"))?; + + Ok(cdylib_target.name.clone()) +} diff --git a/scarb/src/compiler/plugin/proc_macro/v1/ffi.rs b/scarb/src/compiler/plugin/proc_macro/v1/ffi.rs index a969b6465..d407372c7 100644 --- a/scarb/src/compiler/plugin/proc_macro/v1/ffi.rs +++ b/scarb/src/compiler/plugin/proc_macro/v1/ffi.rs @@ -18,8 +18,8 @@ use std::ffi::{CStr, CString, c_char}; use std::fmt::Debug; use std::slice; +use crate::compiler::plugin::proc_macro::SharedLibraryProvider; use crate::compiler::plugin::proc_macro::v1::ProcMacroAuxData; -use crate::compiler::plugin::proc_macro::v1::compilation::SharedLibraryProvider; #[cfg(not(windows))] use libloading::os::unix::Symbol as RawSymbol; diff --git a/scarb/src/compiler/plugin/proc_macro/v1/host.rs b/scarb/src/compiler/plugin/proc_macro/v1/host.rs index 6433032c3..38f3e21ee 100644 --- a/scarb/src/compiler/plugin/proc_macro/v1/host.rs +++ b/scarb/src/compiler/plugin/proc_macro/v1/host.rs @@ -1,9 +1,8 @@ -use crate::compiler::plugin::proc_macro::v1::compilation::SharedLibraryProvider; use crate::compiler::plugin::proc_macro::v1::{ Expansion, ExpansionKind, FromSyntaxNode, ProcMacroInstance, }; -use crate::core::{Config, Package, PackageId}; -use anyhow::{Context, Result, bail, ensure}; +use crate::core::PackageId; +use anyhow::{Result, ensure}; use cairo_lang_defs::ids::{ModuleItemId, TopLevelLanguageElementId}; use cairo_lang_defs::patcher::{PatchBuilder, RewriteNode}; use cairo_lang_defs::plugin::{ @@ -1162,43 +1161,3 @@ fn into_cairo_diagnostics( }) .collect_vec() } - -/// A global storage for dynamically-loaded procedural macros. -/// Loads dynamic shared libraries and hides them beside [`ProcMacroInstance`]. -/// Guarantees that every library is loaded exactly once, -/// but does not prevent loading multiple versions of the same library. -#[derive(Default)] -pub struct ProcMacroRepository { - /// A mapping between the [`PackageId`] of the package which defines the plugin - /// and the [`ProcMacroInstance`] holding the underlying shared library. - macros: RwLock>>, -} - -impl ProcMacroRepository { - /// Returns the [`ProcMacroInstance`] representing the procedural macros defined in the [`Package`]. - /// Loads the underlying shared library if it has not been loaded yet. - pub fn get_or_load(&self, package: Package, config: &Config) -> Result> { - let Ok(macros) = self.macros.read() else { - bail!("could not get a read access to the ProcMacroRepository"); - }; - - if let Some(instance) = macros.get(&package.id) { - return Ok(instance.clone()); - } - - drop(macros); - - let Ok(mut macros) = self.macros.write() else { - bail!("could not get a write access to the ProcMacroRepository"); - }; - - let lib_path = package - .shared_lib_path(config) - .context("could not resolve shared library path")?; - - let instance = Arc::new(ProcMacroInstance::try_new(package.id, lib_path)?); - macros.insert(package.id, instance.clone()); - - Ok(instance) - } -} diff --git a/scarb/src/compiler/plugin/proc_macro/v1/mod.rs b/scarb/src/compiler/plugin/proc_macro/v1/mod.rs index 888c012fc..7e563afe8 100644 --- a/scarb/src/compiler/plugin/proc_macro/v1/mod.rs +++ b/scarb/src/compiler/plugin/proc_macro/v1/mod.rs @@ -1,7 +1,5 @@ -pub mod compilation; mod ffi; mod host; -pub use compilation::{check_unit, compile_unit, fetch_crate}; pub use ffi::*; pub use host::*; diff --git a/scarb/src/compiler/plugin/proc_macro/v2/compilation.rs b/scarb/src/compiler/plugin/proc_macro/v2/compilation.rs deleted file mode 100644 index 4ab259af8..000000000 --- a/scarb/src/compiler/plugin/proc_macro/v2/compilation.rs +++ /dev/null @@ -1,347 +0,0 @@ -use crate::CARGO_MANIFEST_FILE_NAME; -use crate::compiler::ProcMacroCompilationUnit; -use crate::core::{Config, Package, Workspace}; -use crate::flock::Filesystem; -use crate::internal::fsx; -use crate::ops::PackageOpts; -use crate::process::exec_piping; -use anyhow::{Context, Result, anyhow}; -use camino::Utf8PathBuf; -use cargo_metadata::MetadataCommand; -use flate2::read::GzDecoder; -use indoc::formatdoc; -use libloading::library_filename; -use ra_ap_toolchain::Tool; -use scarb_ui::{Message, OutputFormat}; -use serde::{Serialize, Serializer}; -use serde_json::value::RawValue; -use std::env::consts::DLL_SUFFIX; -use std::fmt::Display; -use std::fs; -use std::io::{Seek, SeekFrom}; -use std::ops::Deref; -use std::process::Command; -use tar::Archive; -use target_triple::target; -use tracing::trace_span; - -pub const PROC_MACRO_BUILD_PROFILE: &str = "release"; - -/// This trait is used to define the shared library path for a package. -pub trait SharedLibraryProvider { - /// Location of Cargo `target` directory. - fn target_path(&self, config: &Config) -> Filesystem; - /// Location of the shared library for the package. - fn shared_lib_path(&self, config: &Config) -> Result; - /// Location of the prebuilt binary for the package, if defined. - fn prebuilt_lib_path(&self) -> Option; -} - -impl SharedLibraryProvider for Package { - fn target_path(&self, config: &Config) -> Filesystem { - let ident = format!("{}-{}", self.id.name, self.id.source_id.ident()); - // Defines the Cargo target directory in cache, as: - // `/(..)/SCARB_CACHE/plugins/proc_macro/-/v/target/` - config - .dirs() - .procedural_macros_dir() - .into_child(ident) - .into_child(format!("v{}", self.id.version)) - .into_child("target") - } - - fn shared_lib_path(&self, config: &Config) -> Result { - let lib_name = - get_cargo_library_name(self, config).context("could not resolve library name")?; - let lib_name = library_filename(lib_name); - let lib_name = lib_name - .into_string() - .expect("library name must be valid UTF-8"); - // Defines the shared library path inside the target directory, as: - // `/(..)/target/release/[lib].[so|dll|dylib]` - Ok(self - .target_path(config) - .into_child(PROC_MACRO_BUILD_PROFILE) - .path_unchecked() - .join(lib_name)) - } - - fn prebuilt_lib_path(&self) -> Option { - let target_triple = target!(); - - let prebuilt_name = format!( - "{name}_v{version}_{target}{suffix}", - name = self.id.name, - version = self.id.version, - target = target_triple, - suffix = DLL_SUFFIX - ); - - let prebuilt_path = self - .root() - .join("target") - .join("scarb") - .join("cairo-plugin") - .join(prebuilt_name); - - prebuilt_path.exists().then_some(prebuilt_path) - } -} - -pub fn compile_unit(unit: ProcMacroCompilationUnit, ws: &Workspace<'_>) -> Result<()> { - let package = unit.components.first().unwrap().package.clone(); - run_cargo(CargoAction::Build, &package, ws) -} - -pub fn check_unit(unit: ProcMacroCompilationUnit, ws: &Workspace<'_>) -> Result<()> { - let package = unit.components.first().unwrap().package.clone(); - run_cargo(CargoAction::Check, &package, ws) -} - -fn get_cargo_package_name(package: &Package) -> Result { - let cargo_toml_path = package.root().join(CARGO_MANIFEST_FILE_NAME); - - let cargo_toml: toml::Value = toml::from_str( - &fs::read_to_string(cargo_toml_path).context("could not read `Cargo.toml`")?, - ) - .context("could not convert `Cargo.toml` to toml")?; - - let package_section = cargo_toml - .get("package") - .ok_or_else(|| anyhow!("could not get `package` section from Cargo.toml"))?; - - let package_name = package_section - .get("name") - .ok_or_else(|| anyhow!("could not get `name` field from Cargo.toml"))? - .as_str() - .ok_or_else(|| anyhow!("could not convert package name to string"))?; - - Ok(package_name.to_string()) -} - -fn get_cargo_library_name(package: &Package, config: &Config) -> Result { - let metadata = MetadataCommand::new() - .cargo_path(Tool::Cargo.path()) - .current_dir(package.root()) - .exec() - .context("could not get Cargo metadata")?; - - let cargo_package_name = get_cargo_package_name(package)?; - - if cargo_package_name != package.id.name.to_string() { - config.ui().warn(formatdoc!( - r#" - package name differs between Cargo and Scarb manifest - cargo: `{cargo_name}`, scarb: `{scarb_name}` - this might become an error in future Scarb releases - "#, - cargo_name = cargo_package_name, - scarb_name = package.id.name, - )); - } - - let package = metadata - .packages - .iter() - .find(|pkg| pkg.name == cargo_package_name) - .ok_or_else(|| anyhow!("could not get `{cargo_package_name}` package from metadata"))?; - - let cdylib_target = package - .targets - .iter() - .find(|target| target.kind.contains(&"cdylib".into())) - .ok_or_else(|| anyhow!("no target of `cdylib` kind found in package"))?; - - Ok(cdylib_target.name.clone()) -} - -fn get_cargo_package_version(package: &Package) -> Result { - let metadata = MetadataCommand::new() - .cargo_path(Tool::Cargo.path()) - .current_dir(package.root()) - .exec() - .context("could not get Cargo metadata")?; - - let cargo_package_name = get_cargo_package_name(package)?; - - let package = metadata - .packages - .iter() - .find(|pkg| pkg.name == cargo_package_name) - .ok_or_else(|| anyhow!("could not get `{cargo_package_name}` package from metadata"))?; - - Ok(package.version.to_string()) -} - -pub fn get_crate_archive_basename(package: &Package) -> Result { - let package_name = get_cargo_package_name(package)?; - let package_version = get_cargo_package_version(package)?; - - Ok(format!("{}-{}", package_name, package_version)) -} - -pub fn unpack_crate(package: &Package, config: &Config) -> Result<()> { - let archive_basename = get_crate_archive_basename(package)?; - let archive_name = format!("{}.crate", archive_basename); - - let tar = package - .target_path(config) - .into_child("package") - .open_ro_exclusive(&archive_name, &archive_name, config)?; - - // The following implementation has been copied from the `Cargo` codebase with slight modifications only. - // The original implementation can be found here: - // https://github.com/rust-lang/cargo/blob/a4600184b8d6619ed0b5a0a19946dbbe97e1d739/src/cargo/ops/cargo_package.rs#L1110 - - tar.deref().seek(SeekFrom::Start(0))?; - let f = GzDecoder::new(tar.deref()); - let dst = tar.parent().unwrap().join(&archive_basename); - if dst.exists() { - fsx::remove_dir_all(&dst)?; - } - let mut archive = Archive::new(f); - archive.set_preserve_mtime(false); // Don't set modified time to avoid filesystem errors - archive.unpack(dst.parent().unwrap())?; - - Ok(()) -} - -pub fn fetch_crate(package: &Package, ws: &Workspace<'_>) -> Result<()> { - run_cargo(CargoAction::Fetch, package, ws) -} - -pub fn package_crate(package: &Package, opts: &PackageOpts, ws: &Workspace<'_>) -> Result<()> { - run_cargo(CargoAction::Package(opts.clone()), package, ws) -} - -fn run_cargo(action: CargoAction, package: &Package, ws: &Workspace<'_>) -> Result<()> { - let cmd = CargoCommand { - action, - current_dir: package.root().to_path_buf(), - output_format: ws.config().ui().output_format(), - target_dir: package - .target_path(ws.config()) - .path_unchecked() - .to_path_buf(), - config: ws.config(), - }; - let span = trace_span!("proc_macro"); - { - let _guard = span.enter(); - exec(&mut cmd.into(), ws.config())?; - } - Ok(()) -} - -#[derive(Clone)] -enum CargoAction { - Build, - Check, - Fetch, - Package(PackageOpts), -} - -struct CargoCommand<'c> { - current_dir: Utf8PathBuf, - target_dir: Utf8PathBuf, - output_format: OutputFormat, - action: CargoAction, - config: &'c Config, -} - -enum CargoOutputFormat { - Human, - Json, -} - -impl Display for CargoOutputFormat { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - CargoOutputFormat::Human => write!(f, "human"), - CargoOutputFormat::Json => write!(f, "json"), - } - } -} - -impl From for CargoOutputFormat { - fn from(format: OutputFormat) -> Self { - match format { - OutputFormat::Text => CargoOutputFormat::Human, - OutputFormat::Json => CargoOutputFormat::Json, - } - } -} - -impl<'c> From> for Command { - fn from(args: CargoCommand<'c>) -> Self { - let mut cmd = Command::new(Tool::Cargo.path()); - cmd.current_dir(args.current_dir); - match args.action { - CargoAction::Fetch => cmd.arg("fetch"), - CargoAction::Build => cmd.arg("build"), - CargoAction::Check => cmd.arg("check"), - CargoAction::Package(_) => cmd.arg("package"), - }; - if args.config.offline() { - cmd.arg("--offline"); - } - match args.action { - CargoAction::Fetch => (), - CargoAction::Package(ref opts) => { - cmd.arg("--target-dir"); - cmd.arg(args.target_dir); - cmd.arg("--no-verify"); - if !opts.check_metadata { - cmd.arg("--no-metadata"); - } - if opts.allow_dirty { - cmd.arg("--allow-dirty"); - } - } - _ => { - cmd.arg("--release"); - cmd.arg("--message-format"); - let output_format: CargoOutputFormat = args.output_format.into(); - cmd.arg(output_format.to_string()); - cmd.arg("--target-dir"); - cmd.arg(args.target_dir); - } - } - cmd - } -} - -fn exec(cmd: &mut Command, config: &Config) -> Result<()> { - exec_piping( - cmd, - config, - |line: &str| config.ui().print(PipedText::new(line)), - |line: &str| config.ui().print(PipedText::new(line)), - ) -} - -/// This message can be used for piped text from subprocesses. -/// -/// It accepts either a string or a JSON string. -/// If the input is a JSON string, it can be serialized as a structured message. -/// Otherwise, the structured message will be skipped. -pub struct PipedText(String); - -impl PipedText { - pub fn new(text: impl Into) -> Self { - Self(text.into()) - } -} - -impl Message for PipedText { - fn text(self) -> String { - self.0 - } - - fn structured(self, ser: S) -> Result { - match serde_json::from_str::<&RawValue>(self.0.as_str()) { - Ok(value) => value.serialize(ser), - Err(_e) => Self::skip_structured(ser), - } - } -} diff --git a/scarb/src/compiler/plugin/proc_macro/v2/ffi.rs b/scarb/src/compiler/plugin/proc_macro/v2/ffi.rs index 80e4fc0d4..1bb40d524 100644 --- a/scarb/src/compiler/plugin/proc_macro/v2/ffi.rs +++ b/scarb/src/compiler/plugin/proc_macro/v2/ffi.rs @@ -1,4 +1,4 @@ -use crate::compiler::plugin::proc_macro::v2::compilation::SharedLibraryProvider; +use crate::compiler::plugin::proc_macro::SharedLibraryProvider; use crate::core::{Package, PackageId}; use anyhow::{Context, Result, ensure}; use cairo_lang_macro::{ diff --git a/scarb/src/compiler/plugin/proc_macro/v2/mod.rs b/scarb/src/compiler/plugin/proc_macro/v2/mod.rs index 9b4e375a4..5a5bd3ce2 100644 --- a/scarb/src/compiler/plugin/proc_macro/v2/mod.rs +++ b/scarb/src/compiler/plugin/proc_macro/v2/mod.rs @@ -1,11 +1,7 @@ -pub mod compilation; mod ffi; mod host; -mod repository; mod types; -pub use compilation::{check_unit, compile_unit, fetch_crate}; pub use ffi::*; pub use host::*; -pub use repository::*; pub use types::*; diff --git a/scarb/src/ops/package.rs b/scarb/src/ops/package.rs index a2cf4f6af..8fb4ffd04 100644 --- a/scarb/src/ops/package.rs +++ b/scarb/src/ops/package.rs @@ -13,8 +13,9 @@ use scarb_ui::components::Status; use scarb_ui::{HumanBytes, HumanCount}; use serde::Serialize; +use crate::compiler::plugin::proc_macro::SharedLibraryProvider; use crate::compiler::plugin::proc_macro::compilation::{ - SharedLibraryProvider, get_crate_archive_basename, package_crate, unpack_crate, + get_crate_archive_basename, package_crate, unpack_crate, }; use crate::core::publishing::manifest_normalization::prepare_manifest_for_publish; use crate::core::publishing::source::list_source_files; diff --git a/scarb/src/ops/resolve.rs b/scarb/src/ops/resolve.rs index 587c5bd97..96a5f645c 100644 --- a/scarb/src/ops/resolve.rs +++ b/scarb/src/ops/resolve.rs @@ -1,5 +1,5 @@ use crate::compiler::plugin::proc_macro::ProcMacroInstance; -use crate::compiler::plugin::proc_macro::compilation::SharedLibraryProvider; +use crate::compiler::plugin::proc_macro::SharedLibraryProvider; use crate::compiler::plugin::{CairoPluginProps, fetch_cairo_plugin}; use crate::compiler::{ CairoCompilationUnit, CompilationUnit, CompilationUnitAttributes, CompilationUnitCairoPlugin, From ccbeada0d3c59e298af946572ed4adf584a72d82 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Tr=C4=85tnowiecki?= Date: Thu, 13 Mar 2025 22:44:06 +0100 Subject: [PATCH 32/38] Proc macro: create versioned macro instance struct --- .../compiler/plugin/proc_macro/expansion.rs | 50 +++ .../compiler/plugin/proc_macro/instance.rs | 191 ++++++++++- scarb/src/compiler/plugin/proc_macro/mod.rs | 4 +- .../compiler/plugin/proc_macro/repository.rs | 4 +- .../src/compiler/plugin/proc_macro/v1/ffi.rs | 311 ++++++----------- .../src/compiler/plugin/proc_macro/v1/host.rs | 67 ++-- .../src/compiler/plugin/proc_macro/v2/ffi.rs | 318 ++++++------------ .../plugin/proc_macro/v2/host/attribute.rs | 35 +- .../plugin/proc_macro/v2/host/derive.rs | 19 +- .../plugin/proc_macro/v2/host/inline.rs | 22 +- .../compiler/plugin/proc_macro/v2/host/mod.rs | 3 +- .../plugin/proc_macro/v2/host/post.rs | 5 +- .../methods/expand_attribute.rs | 8 +- .../methods/expand_derive.rs | 15 +- .../methods/expand_inline.rs | 5 +- 15 files changed, 571 insertions(+), 486 deletions(-) create mode 100644 scarb/src/compiler/plugin/proc_macro/expansion.rs diff --git a/scarb/src/compiler/plugin/proc_macro/expansion.rs b/scarb/src/compiler/plugin/proc_macro/expansion.rs new file mode 100644 index 000000000..9a88f55d4 --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/expansion.rs @@ -0,0 +1,50 @@ +use cairo_lang_macro::ExpansionKind as ExpansionKindV1; +use cairo_lang_macro_v1::ExpansionKind as ExpansionKindV2; +use smol_str::SmolStr; + +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum ExpansionKind { + Attr, + Derive, + Inline, + Executable, +} + +// Implement conversions from expansion kind enums exposed by the proc macro implementation api. +// Note that `executable` kind is not represented on the macro side and executable attributes are +// inferred from the attribute expansion by separate logic. +// See `EXEC_ATTR_PREFIX` for implementation details. + +impl From for ExpansionKind { + fn from(kind: ExpansionKindV1) -> Self { + match kind { + ExpansionKindV1::Attr => Self::Attr, + ExpansionKindV1::Derive => Self::Derive, + ExpansionKindV1::Inline => Self::Inline, + } + } +} +impl From for ExpansionKind { + fn from(kind: ExpansionKindV2) -> Self { + match kind { + ExpansionKindV2::Attr => Self::Attr, + ExpansionKindV2::Derive => Self::Derive, + ExpansionKindV2::Inline => Self::Inline, + } + } +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct Expansion { + pub name: SmolStr, + pub kind: ExpansionKind, +} + +impl Expansion { + pub fn new(name: impl ToString, kind: ExpansionKind) -> Self { + Self { + name: SmolStr::new(name.to_string()), + kind, + } + } +} diff --git a/scarb/src/compiler/plugin/proc_macro/instance.rs b/scarb/src/compiler/plugin/proc_macro/instance.rs index 5571e9fa4..dc2c16b8c 100644 --- a/scarb/src/compiler/plugin/proc_macro/instance.rs +++ b/scarb/src/compiler/plugin/proc_macro/instance.rs @@ -1,7 +1,14 @@ use crate::compiler::plugin::CairoPluginProps; -use crate::core::{Package, TargetKind}; -use anyhow::Result; +use crate::compiler::plugin::proc_macro; +use crate::compiler::plugin::proc_macro::SharedLibraryProvider; +use crate::compiler::plugin::proc_macro::expansion::{Expansion, ExpansionKind}; +use crate::core::{Package, PackageId, TargetKind}; +use anyhow::{Context, Result, anyhow}; +use camino::{Utf8Path, Utf8PathBuf}; use serde::{Deserialize, Serialize}; +use smol_str::SmolStr; +use std::fmt::Debug; +use tracing::trace; #[derive( Debug, Clone, Copy, Serialize, Deserialize, Default, PartialEq, Eq, PartialOrd, Ord, Hash, @@ -25,3 +32,183 @@ impl ProcMacroApiVersionReader for Package { Ok(props.api) } } + +/// Representation of a single, loaded procedural macro package. +/// +/// This struct holds Scarb metadata of a proc macro package (package id, available expansions) +/// and a loaded plugin instance. +pub struct ProcMacroInstance { + package_id: PackageId, + plugin: VersionedPlugin, + expansions: Vec, +} + +impl Debug for ProcMacroInstance { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("ProcMacroInstance") + .field("package_id", &self.package_id) + .finish() + } +} + +impl ProcMacroInstance { + /// Load shared library + pub fn try_new(package: &Package, lib_path: Utf8PathBuf) -> Result { + trace!("loading compiled macro for `{}` package", package.id); + let plugin = VersionedPlugin::try_new(package, &lib_path)?; + Ok(Self { + expansions: unsafe { plugin.load_expansions(package.id)? }, + package_id: package.id, + plugin, + }) + } + + pub fn try_load_prebuilt(package: Package) -> Result { + trace!("loading prebuilt macro for `{}` package", package.id); + let prebuilt_path = package + .prebuilt_lib_path() + .context("could not resolve prebuilt library path")?; + let plugin = VersionedPlugin::try_new(&package, &prebuilt_path)?; + Ok(Self { + expansions: unsafe { plugin.load_expansions(package.id)? }, + package_id: package.id, + plugin, + }) + } + + pub fn package_id(&self) -> PackageId { + self.package_id + } + + pub fn get_expansions(&self) -> &[Expansion] { + &self.expansions + } + + fn plugin(&self) -> &VersionedPlugin { + &self.plugin + } + + pub fn declared_attributes_and_executables(&self) -> Vec { + self.get_expansions() + .iter() + .filter(|e| e.kind == ExpansionKind::Attr || e.kind == ExpansionKind::Executable) + .map(|e| e.name.clone()) + .map(Into::into) + .collect() + } + + pub fn declared_attributes(&self) -> Vec { + self.get_expansions() + .iter() + .filter(|e| e.kind == ExpansionKind::Attr) + .map(|e| e.name.clone()) + .map(Into::into) + .collect() + } + + pub fn declared_derives(&self) -> Vec { + self.get_expansions() + .iter() + .filter(|e| e.kind == ExpansionKind::Derive) + .map(|e| e.name.clone()) + .map(Into::into) + .collect() + } + + pub fn executable_attributes(&self) -> Vec { + self.get_expansions() + .iter() + .filter(|e| e.kind == ExpansionKind::Executable) + .map(|e| e.name.clone()) + .map(Into::into) + .collect() + } + + pub fn inline_macros(&self) -> Vec { + self.get_expansions() + .iter() + .filter(|e| e.kind == ExpansionKind::Inline) + .map(|e| e.name.clone()) + .map(Into::into) + .collect() + } + + pub fn doc(&self, item_name: SmolStr) -> Option { + self.plugin().doc(item_name) + } + + pub fn try_v1(&self) -> Result<&proc_macro::v1::Plugin> { + self.plugin().as_v1().ok_or_else(|| { + anyhow!( + "procedural macro `{}` using v2 api used in a context expecting v1 api", + self.package_id() + ) + }) + } + + pub fn try_v2(&self) -> Result<&proc_macro::v2::Plugin> { + self.plugin().as_v2().ok_or_else(|| { + anyhow!( + "procedural macro `{}` using v1 api used in a context expecting v2 api", + self.package_id() + ) + }) + } +} + +/// This struct provides a unified interface for both v1 and v2 proc macro plugins. +/// +/// It provides utilities for loading the macro implementation and invoking exposed interface, +/// but it should not implement any logic outside of macro api versioning. +pub enum VersionedPlugin { + V1(proc_macro::v1::Plugin), + V2(proc_macro::v2::Plugin), +} + +impl VersionedPlugin { + /// Load the shared library under the given path, and store versioned plugin instance. + /// + /// # Safety + /// This function is unsafe because it calls the FFI interface of procedural macro package. + pub fn try_new(package: &Package, lib_path: &Utf8Path) -> Result { + match package.api_version()? { + ProcMacroApiVersion::V1 => Ok(VersionedPlugin::V1(unsafe { + proc_macro::v1::Plugin::try_new(lib_path)? + })), + ProcMacroApiVersion::V2 => Ok(VersionedPlugin::V2(unsafe { + proc_macro::v2::Plugin::try_new(lib_path)? + })), + } + } + + /// Obtain metadata of available expansions from the procedural macro. + /// + /// # Safety + /// This function is unsafe because it calls the FFI interface of procedural macro package. + pub unsafe fn load_expansions(&self, package_id: PackageId) -> Result> { + match self { + VersionedPlugin::V1(plugin) => unsafe { plugin.load_expansions(package_id) }, + VersionedPlugin::V2(plugin) => unsafe { plugin.load_expansions(package_id) }, + } + } + + pub fn doc(&self, item_name: SmolStr) -> Option { + match self { + VersionedPlugin::V1(plugin) => plugin.doc(item_name), + VersionedPlugin::V2(plugin) => plugin.doc(item_name), + } + } + + pub fn as_v1(&self) -> Option<&proc_macro::v1::Plugin> { + match self { + VersionedPlugin::V1(plugin) => Some(plugin), + _ => None, + } + } + pub fn as_v2(&self) -> Option<&proc_macro::v2::Plugin> { + match self { + VersionedPlugin::V2(plugin) => Some(plugin), + _ => None, + } + } +} diff --git a/scarb/src/compiler/plugin/proc_macro/mod.rs b/scarb/src/compiler/plugin/proc_macro/mod.rs index 2b92b5216..40c55c413 100644 --- a/scarb/src/compiler/plugin/proc_macro/mod.rs +++ b/scarb/src/compiler/plugin/proc_macro/mod.rs @@ -1,4 +1,5 @@ pub mod compilation; +mod expansion; mod instance; mod repository; mod shared_library_provider; @@ -6,7 +7,8 @@ pub mod v1; pub mod v2; pub use compilation::{check_unit, compile_unit, fetch_crate}; +pub use expansion::*; pub use instance::*; pub use repository::*; pub use shared_library_provider::SharedLibraryProvider; -pub use v2::*; +pub use v2::ProcMacroHostPlugin; diff --git a/scarb/src/compiler/plugin/proc_macro/repository.rs b/scarb/src/compiler/plugin/proc_macro/repository.rs index 62eea57e1..84c4aada1 100644 --- a/scarb/src/compiler/plugin/proc_macro/repository.rs +++ b/scarb/src/compiler/plugin/proc_macro/repository.rs @@ -1,5 +1,5 @@ +use crate::compiler::plugin::proc_macro::ProcMacroInstance; use crate::compiler::plugin::proc_macro::SharedLibraryProvider; -use crate::compiler::plugin::proc_macro::v2::ProcMacroInstance; use crate::core::{Config, Package, PackageId}; use anyhow::{Context, Result, bail}; use std::collections::HashMap; @@ -38,7 +38,7 @@ impl ProcMacroRepository { .shared_lib_path(config) .context("could not resolve shared library path")?; - let instance = Arc::new(ProcMacroInstance::try_new(package.id, lib_path)?); + let instance = Arc::new(ProcMacroInstance::try_new(&package, lib_path)?); macros.insert(package.id, instance.clone()); Ok(instance) diff --git a/scarb/src/compiler/plugin/proc_macro/v1/ffi.rs b/scarb/src/compiler/plugin/proc_macro/v1/ffi.rs index d407372c7..4cc9f5f80 100644 --- a/scarb/src/compiler/plugin/proc_macro/v1/ffi.rs +++ b/scarb/src/compiler/plugin/proc_macro/v1/ffi.rs @@ -1,4 +1,4 @@ -use crate::core::{Package, PackageId}; +use crate::core::PackageId; use anyhow::{Context, Result, ensure}; use cairo_lang_defs::patcher::PatchBuilder; use cairo_lang_macro_stable_v1::{ @@ -6,27 +6,25 @@ use cairo_lang_macro_stable_v1::{ StableResultWrapper, StableTokenStream, }; use cairo_lang_macro_v1::{ - ExpansionKind as SharedExpansionKind, FullPathMarker, PostProcessContext, ProcMacroResult, + ExpansionKind as ExpansionKindV1, FullPathMarker, PostProcessContext, ProcMacroResult, TokenStream, }; use cairo_lang_syntax::node::TypedSyntaxNode; use cairo_lang_syntax::node::db::SyntaxGroup; -use camino::Utf8PathBuf; +use camino::Utf8Path; use itertools::Itertools; use libloading::{Library, Symbol}; use std::ffi::{CStr, CString, c_char}; -use std::fmt::Debug; use std::slice; -use crate::compiler::plugin::proc_macro::SharedLibraryProvider; use crate::compiler::plugin::proc_macro::v1::ProcMacroAuxData; +use crate::compiler::plugin::proc_macro::expansion::{Expansion, ExpansionKind}; #[cfg(not(windows))] use libloading::os::unix::Symbol as RawSymbol; #[cfg(windows)] use libloading::os::windows::Symbol as RawSymbol; use smol_str::SmolStr; -use tracing::trace; pub trait FromSyntaxNode { fn from_syntax_node(db: &dyn SyntaxGroup, node: &impl TypedSyntaxNode) -> Self; @@ -40,62 +38,102 @@ impl FromSyntaxNode for TokenStream { } } +/// This constant is used to identify executable attributes. +/// +/// An attribute is considered executable if it starts with this prefix. +/// No other metadata is stored for executable attributes. +/// This means, that this constant is part of the stable contract between Scarb and procedural macro. +/// Warning: Changing this would be breaking to existing macros! const EXEC_ATTR_PREFIX: &str = "__exec_attr_"; -/// Representation of a single procedural macro. -/// -/// This struct is a wrapper around a shared library containing the procedural macro implementation. -/// It is responsible for loading the shared library and providing a safe interface for code expansion. -pub struct ProcMacroInstance { - package_id: PackageId, - plugin: Plugin, - expansions: Vec, +type ListExpansions = extern "C" fn() -> StableExpansionsList; +type FreeExpansionsList = extern "C" fn(StableExpansionsList); +type ExpandCode = + extern "C" fn(*const c_char, StableTokenStream, StableTokenStream) -> StableResultWrapper; +type FreeResult = extern "C" fn(StableProcMacroResult); +type PostProcessCallback = extern "C" fn(StablePostProcessContext) -> StablePostProcessContext; +type DocExpansion = extern "C" fn(*const c_char) -> *mut c_char; +type FreeExpansionDoc = extern "C" fn(*mut c_char); + +struct VTableV0 { + list_expansions: RawSymbol, + free_expansions_list: RawSymbol, + expand: RawSymbol, + free_result: RawSymbol, + post_process_callback: RawSymbol, + doc: RawSymbol, + free_doc: RawSymbol, } -impl Debug for ProcMacroInstance { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("ProcMacroInstance") - .field("package_id", &self.package_id) - .finish() - } +macro_rules! get_symbol { + ($library:ident, $name:literal, $type:ty) => {{ + let symbol: Symbol<'_, $type> = $library.get($name).context(format!( + "failed to load {} symbol for procedural macro", + stringify!($name) + ))?; + symbol.into_raw() + }}; } -impl ProcMacroInstance { - /// Load shared library - pub fn try_new(package_id: PackageId, lib_path: Utf8PathBuf) -> Result { - trace!("loading compiled macro for `{}` package", package_id); - let plugin = unsafe { Plugin::try_new(lib_path)? }; - Ok(Self { - expansions: unsafe { Self::load_expansions(&plugin, package_id)? }, - package_id, - plugin, - }) +impl VTableV0 { + unsafe fn try_new(library: &Library) -> Result { + unsafe { + Ok(VTableV0 { + list_expansions: get_symbol!(library, b"list_expansions\0", ListExpansions), + free_expansions_list: get_symbol!( + library, + b"free_expansions_list\0", + FreeExpansionsList + ), + expand: get_symbol!(library, b"expand\0", ExpandCode), + free_result: get_symbol!(library, b"free_result\0", FreeResult), + post_process_callback: get_symbol!( + library, + b"post_process_callback\0", + PostProcessCallback + ), + doc: get_symbol!(library, b"doc\0", DocExpansion), + free_doc: get_symbol!(library, b"free_doc\0", FreeExpansionDoc), + }) + } } +} + +/// This struct is a wrapper around a shared library containing the procedural macro implementation. +/// It is responsible for loading the shared library and providing a safe interface for its access. +pub struct Plugin { + #[allow(dead_code)] + library: Library, + vtable: VTableV0, +} + +impl Plugin { + /// Load the shared library under the given path and store pointers to its public symbols. + /// + /// # Safety + /// This function is unsafe because it calls the FFI interface of procedural macro package. + pub unsafe fn try_new(library_path: &Utf8Path) -> Result { + let library = unsafe { Library::new(library_path)? }; + let vtable = unsafe { VTableV0::try_new(&library)? }; - pub fn try_load_prebuilt(package: Package) -> Result { - trace!("loading prebuilt macro for `{}` package", package.id); - let prebuilt_path = package - .prebuilt_lib_path() - .context("could not resolve prebuilt library path")?; - let plugin = unsafe { Plugin::try_new(prebuilt_path)? }; - Ok(Self { - expansions: unsafe { Self::load_expansions(&plugin, package.id)? }, - package_id: package.id, - plugin, - }) + Ok(Plugin { library, vtable }) } - unsafe fn load_expansions(plugin: &Plugin, package_id: PackageId) -> Result> { + /// Obtain metadata of available expansions from the procedural macro. + /// + /// # Safety + /// This function is unsafe because it calls the FFI interface of procedural macro package. + pub unsafe fn load_expansions(&self, package_id: PackageId) -> Result> { // Make a call to the FFI interface to list declared expansions. - let stable_expansions = (plugin.vtable.list_expansions)(); + let stable_expansions = (self.vtable.list_expansions)(); let (ptr, n) = stable_expansions.raw_parts(); let expansions = unsafe { slice::from_raw_parts(ptr, n) }; let mut expansions: Vec = expansions .iter() - .map(|e| unsafe { Expansion::from_stable(e) }) + .map(|stable_expansion| stable_expansion.into()) .collect(); // Free the memory allocated by the `stable_expansions`. - (plugin.vtable.free_expansions_list)(stable_expansions); + (self.vtable.free_expansions_list)(stable_expansions); // Validate expansions. expansions.sort_unstable_by_key(|e| e.name.clone()); ensure!( @@ -111,59 +149,6 @@ impl ProcMacroInstance { Ok(expansions) } - pub fn get_expansions(&self) -> &[Expansion] { - &self.expansions - } - - pub fn package_id(&self) -> PackageId { - self.package_id - } - - pub fn declared_attributes_and_executables(&self) -> Vec { - self.get_expansions() - .iter() - .filter(|e| e.kind == ExpansionKind::Attr || e.kind == ExpansionKind::Executable) - .map(|e| e.name.clone()) - .map(Into::into) - .collect() - } - - pub fn declared_attributes(&self) -> Vec { - self.get_expansions() - .iter() - .filter(|e| e.kind == ExpansionKind::Attr) - .map(|e| e.name.clone()) - .map(Into::into) - .collect() - } - - pub fn declared_derives(&self) -> Vec { - self.get_expansions() - .iter() - .filter(|e| e.kind == ExpansionKind::Derive) - .map(|e| e.name.clone()) - .map(Into::into) - .collect() - } - - pub fn executable_attributes(&self) -> Vec { - self.get_expansions() - .iter() - .filter(|e| e.kind == ExpansionKind::Executable) - .map(|e| e.name.clone()) - .map(Into::into) - .collect() - } - - pub fn inline_macros(&self) -> Vec { - self.get_expansions() - .iter() - .filter(|e| e.kind == ExpansionKind::Inline) - .map(|e| e.name.clone()) - .map(Into::into) - .collect() - } - /// Apply expansion to token stream. /// /// This function implements the actual calls to functions from the dynamic library. @@ -172,6 +157,9 @@ impl ProcMacroInstance { /// /// Please be aware that the memory management of values passing the FFI-barrier is tricky. /// The memory must be freed on the same side of the barrier, where the allocation was made. + /// + /// # Safety + /// This function is unsafe because it calls the FFI interface of procedural macro package. pub(crate) fn generate_code( &self, item_name: SmolStr, @@ -185,8 +173,7 @@ impl ProcMacroInstance { let item_name = CString::new(item_name.to_string()).unwrap().into_raw(); // Call FFI interface for code expansion. // Note that `stable_result` has been allocated by the dynamic library. - let stable_result = - (self.plugin.vtable.expand)(item_name, stable_attr, stable_token_stream); + let stable_result = (self.vtable.expand)(item_name, stable_attr, stable_token_stream); // Free proc macro name. let _ = unsafe { CString::from_raw(item_name) }; // Free the memory allocated by the `stable_token_stream`. @@ -199,11 +186,15 @@ impl ProcMacroInstance { // Note, that the memory still needs to be freed on the allocator side! let result = unsafe { ProcMacroResult::from_stable(&stable_result.output) }; // Call FFI interface to free the `stable_result` that has been allocated by previous call. - (self.plugin.vtable.free_result)(stable_result.output); + (self.vtable.free_result)(stable_result.output); // Return obtained result. result } + /// Call post process callbacks defined in the procedural macro. + /// + /// # Safety + /// This function is unsafe because it calls the FFI interface of procedural macro package. pub(crate) fn post_process_callback( &self, aux_data: Vec, @@ -216,17 +207,21 @@ impl ProcMacroInstance { } .into_stable(); // Actual call to FFI interface for aux data callback. - let context = (self.plugin.vtable.post_process_callback)(context); + let context = (self.vtable.post_process_callback)(context); // Free the allocated memory. let _ = unsafe { PostProcessContext::from_owned_stable(context) }; } - pub fn doc(&self, item_name: SmolStr) -> Option { + /// Obtain expansion doc string. + /// + /// # Safety + /// This function is unsafe because it calls the FFI interface of procedural macro package. + pub fn doc(&self, item_name: impl ToString) -> Option { // Allocate proc macro name. let item_name = CString::new(item_name.to_string()).unwrap().into_raw(); // Call FFI interface for expansion doc. // Note that `stable_result` has been allocated by the dynamic library. - let stable_result = (self.plugin.vtable.doc)(item_name); + let stable_result = (self.vtable.doc)(item_name); let doc = if stable_result.is_null() { None } else { @@ -236,44 +231,13 @@ impl ProcMacroInstance { // Free proc macro name. let _ = unsafe { CString::from_raw(item_name) }; // Call FFI interface to free the `stable_result` that has been allocated by previous call. - (self.plugin.vtable.free_doc)(stable_result); + (self.vtable.free_doc)(stable_result); doc } } -#[derive(Clone, Debug, Eq, PartialEq)] -pub enum ExpansionKind { - Attr, - Derive, - Inline, - Executable, -} - -impl From for ExpansionKind { - fn from(kind: SharedExpansionKind) -> Self { - match kind { - SharedExpansionKind::Attr => Self::Attr, - SharedExpansionKind::Derive => Self::Derive, - SharedExpansionKind::Inline => Self::Inline, - } - } -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct Expansion { - pub name: SmolStr, - pub kind: ExpansionKind, -} - -impl Expansion { - pub fn new(name: impl ToString, kind: ExpansionKind) -> Self { - Self { - name: SmolStr::new(name.to_string()), - kind, - } - } - - unsafe fn from_stable(stable_expansion: &StableExpansion) -> Self { +impl From<&StableExpansion> for Expansion { + fn from(stable_expansion: &StableExpansion) -> Self { // Note this does not take ownership of underlying memory. let name = if stable_expansion.name.is_null() { String::default() @@ -289,77 +253,10 @@ impl Expansion { kind: ExpansionKind::Executable, }; } + let expansion_kind = unsafe { ExpansionKindV1::from_stable(&stable_expansion.kind) }.into(); Self { name: SmolStr::new(name), - kind: unsafe { SharedExpansionKind::from_stable(&stable_expansion.kind).into() }, - } - } -} - -type ListExpansions = extern "C" fn() -> StableExpansionsList; -type FreeExpansionsList = extern "C" fn(StableExpansionsList); -type ExpandCode = - extern "C" fn(*const c_char, StableTokenStream, StableTokenStream) -> StableResultWrapper; -type FreeResult = extern "C" fn(StableProcMacroResult); -type PostProcessCallback = extern "C" fn(StablePostProcessContext) -> StablePostProcessContext; -type DocExpansion = extern "C" fn(*const c_char) -> *mut c_char; -type FreeExpansionDoc = extern "C" fn(*mut c_char); - -struct VTableV0 { - list_expansions: RawSymbol, - free_expansions_list: RawSymbol, - expand: RawSymbol, - free_result: RawSymbol, - post_process_callback: RawSymbol, - doc: RawSymbol, - free_doc: RawSymbol, -} - -macro_rules! get_symbol { - ($library:ident, $name:literal, $type:ty) => {{ - let symbol: Symbol<'_, $type> = $library.get($name).context(format!( - "failed to load {} symbol for procedural macro", - stringify!($name) - ))?; - symbol.into_raw() - }}; -} - -impl VTableV0 { - unsafe fn try_new(library: &Library) -> Result { - unsafe { - Ok(VTableV0 { - list_expansions: get_symbol!(library, b"list_expansions\0", ListExpansions), - free_expansions_list: get_symbol!( - library, - b"free_expansions_list\0", - FreeExpansionsList - ), - expand: get_symbol!(library, b"expand\0", ExpandCode), - free_result: get_symbol!(library, b"free_result\0", FreeResult), - post_process_callback: get_symbol!( - library, - b"post_process_callback\0", - PostProcessCallback - ), - doc: get_symbol!(library, b"doc\0", DocExpansion), - free_doc: get_symbol!(library, b"free_doc\0", FreeExpansionDoc), - }) + kind: expansion_kind, } } } - -struct Plugin { - #[allow(dead_code)] - library: Library, - vtable: VTableV0, -} - -impl Plugin { - unsafe fn try_new(library_path: Utf8PathBuf) -> Result { - let library = unsafe { Library::new(library_path)? }; - let vtable = unsafe { VTableV0::try_new(&library)? }; - - Ok(Plugin { library, vtable }) - } -} diff --git a/scarb/src/compiler/plugin/proc_macro/v1/host.rs b/scarb/src/compiler/plugin/proc_macro/v1/host.rs index 38f3e21ee..47023a8f7 100644 --- a/scarb/src/compiler/plugin/proc_macro/v1/host.rs +++ b/scarb/src/compiler/plugin/proc_macro/v1/host.rs @@ -1,6 +1,6 @@ -use crate::compiler::plugin::proc_macro::v1::{ - Expansion, ExpansionKind, FromSyntaxNode, ProcMacroInstance, -}; +use crate::compiler::plugin::proc_macro::ProcMacroInstance; +use crate::compiler::plugin::proc_macro::expansion::{Expansion, ExpansionKind}; +use crate::compiler::plugin::proc_macro::v1::FromSyntaxNode; use crate::core::PackageId; use anyhow::{Result, ensure}; use cairo_lang_defs::ids::{ModuleItemId, TopLevelLanguageElementId}; @@ -346,11 +346,15 @@ impl ProcMacroHostPlugin { } }; - let result = self.instance(input.package_id).generate_code( - input.expansion.name.clone(), - args.clone(), - token_stream.clone(), - ); + let result = self + .instance(input.package_id) + .try_v1() + .expect("procedural macro using v2 api used in a context expecting v1 api") + .generate_code( + input.expansion.name.clone(), + args.clone(), + token_stream.clone(), + ); let expanded = context.register_result(token_stream.to_string(), input, result, stable_ptr); item_builder.add_modified(RewriteNode::Mapped { @@ -573,11 +577,15 @@ impl ProcMacroHostPlugin { let mut derived_code = PatchBuilder::new(db, &item_ast); for derive in derives.iter() { - let result = self.instance(derive.package_id).generate_code( - derive.expansion.name.clone(), - TokenStream::empty(), - token_stream.clone(), - ); + let result = self + .instance(derive.package_id) + .try_v1() + .expect("procedural macro using v2 api used in a context expecting v1 api") + .generate_code( + derive.expansion.name.clone(), + TokenStream::empty(), + token_stream.clone(), + ); // Register diagnostics. all_diagnostics.extend(result.diagnostics); @@ -645,11 +653,15 @@ impl ProcMacroHostPlugin { token_stream: TokenStream, stable_ptr: SyntaxStablePtrId, ) -> PluginResult { - let result = self.instance(input.package_id).generate_code( - input.expansion.name.clone(), - args.clone(), - token_stream.clone(), - ); + let result = self + .instance(input.package_id) + .try_v1() + .expect("procedural macro using v2 api used in a context expecting v1 api") + .generate_code( + input.expansion.name.clone(), + args.clone(), + token_stream.clone(), + ); // Handle token stream. if result.token_stream.is_empty() { @@ -768,7 +780,10 @@ impl ProcMacroHostPlugin { .cloned() .unwrap_or_default(); debug!("calling post processing callback with: {data:?}"); - instance.post_process_callback(data.clone(), markers_for_instance); + instance + .try_v1() + .expect("procedural macro using v2 api used in a context expecting v1 api") + .post_process_callback(data.clone(), markers_for_instance); } Ok(()) } @@ -1096,11 +1111,15 @@ impl InlineMacroExprPlugin for ProcMacroInlinePlugin { let stable_ptr = syntax.clone().stable_ptr().untyped(); let arguments = syntax.arguments(db); let token_stream = TokenStream::from_syntax_node(db, &arguments); - let result = self.instance().generate_code( - self.expansion.name.clone(), - TokenStream::empty(), - token_stream, - ); + let result = self + .instance() + .try_v1() + .expect("procedural macro using v2 api used in a context expecting v1 api") + .generate_code( + self.expansion.name.clone(), + TokenStream::empty(), + token_stream, + ); // Handle diagnostics. let diagnostics = into_cairo_diagnostics(result.diagnostics, stable_ptr); let token_stream = result.token_stream.clone(); diff --git a/scarb/src/compiler/plugin/proc_macro/v2/ffi.rs b/scarb/src/compiler/plugin/proc_macro/v2/ffi.rs index 1bb40d524..1f4087a8b 100644 --- a/scarb/src/compiler/plugin/proc_macro/v2/ffi.rs +++ b/scarb/src/compiler/plugin/proc_macro/v2/ffi.rs @@ -1,85 +1,127 @@ -use crate::compiler::plugin::proc_macro::SharedLibraryProvider; -use crate::core::{Package, PackageId}; +use crate::core::PackageId; use anyhow::{Context, Result, ensure}; use cairo_lang_macro::{ - ExpansionKind as SharedExpansionKind, FullPathMarker, PostProcessContext, ProcMacroResult, + ExpansionKind as ExpansionKindV2, FullPathMarker, PostProcessContext, ProcMacroResult, TextSpan, TokenStream, }; use cairo_lang_macro_stable::{ StableExpansion, StableExpansionsList, StablePostProcessContext, StableProcMacroResult, StableResultWrapper, StableTextSpan, StableTokenStream, }; -use camino::Utf8PathBuf; +use camino::Utf8Path; use itertools::Itertools; use libloading::{Library, Symbol}; use std::ffi::{CStr, CString, c_char}; -use std::fmt::Debug; use std::slice; +use crate::compiler::plugin::proc_macro::expansion::{Expansion, ExpansionKind}; use crate::compiler::plugin::proc_macro::v2::ProcMacroAuxData; #[cfg(not(windows))] use libloading::os::unix::Symbol as RawSymbol; #[cfg(windows)] use libloading::os::windows::Symbol as RawSymbol; use smol_str::SmolStr; -use tracing::trace; +/// This constant is used to identify executable attributes. +/// +/// An attribute is considered executable if it starts with this prefix. +/// No other metadata is stored for executable attributes. +/// This means, that this constant is part of the stable contract between Scarb and procedural macro. +/// Warning: Changing this would be breaking to existing macros! const EXEC_ATTR_PREFIX: &str = "__exec_attr_"; -/// Representation of a single procedural macro. -/// -/// This struct is a wrapper around a shared library containing the procedural macro implementation. -/// It is responsible for loading the shared library and providing a safe interface for code expansion. -pub struct ProcMacroInstance { - package_id: PackageId, - plugin: Plugin, - expansions: Vec, +type ListExpansions = extern "C" fn() -> StableExpansionsList; +type FreeExpansionsList = extern "C" fn(StableExpansionsList); +type ExpandCode = extern "C" fn( + *const c_char, + StableTextSpan, + StableTokenStream, + StableTokenStream, +) -> StableResultWrapper; +type FreeResult = extern "C" fn(StableProcMacroResult); +type PostProcessCallback = extern "C" fn(StablePostProcessContext) -> StablePostProcessContext; +type DocExpansion = extern "C" fn(*const c_char) -> *mut c_char; +type FreeExpansionDoc = extern "C" fn(*mut c_char); + +struct VTableV0 { + list_expansions: RawSymbol, + free_expansions_list: RawSymbol, + expand: RawSymbol, + free_result: RawSymbol, + post_process_callback: RawSymbol, + doc: RawSymbol, + free_doc: RawSymbol, } -impl Debug for ProcMacroInstance { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("ProcMacroInstance") - .field("package_id", &self.package_id) - .finish() - } +macro_rules! get_symbol { + ($library:ident, $name:literal, $type:ty) => {{ + let symbol: Symbol<'_, $type> = $library.get($name).context(format!( + "failed to load {} symbol for procedural macro", + stringify!($name) + ))?; + symbol.into_raw() + }}; } -impl ProcMacroInstance { - /// Load shared library - pub fn try_new(package_id: PackageId, lib_path: Utf8PathBuf) -> Result { - trace!("loading compiled macro for `{}` package", package_id); - let plugin = unsafe { Plugin::try_new(lib_path)? }; - Ok(Self { - expansions: unsafe { Self::load_expansions(&plugin, package_id)? }, - package_id, - plugin, - }) +impl VTableV0 { + unsafe fn try_new(library: &Library) -> Result { + unsafe { + Ok(VTableV0 { + list_expansions: get_symbol!(library, b"list_expansions_v2\0", ListExpansions), + free_expansions_list: get_symbol!( + library, + b"free_expansions_list_v2\0", + FreeExpansionsList + ), + expand: get_symbol!(library, b"expand_v2\0", ExpandCode), + free_result: get_symbol!(library, b"free_result_v2\0", FreeResult), + post_process_callback: get_symbol!( + library, + b"post_process_callback_v2\0", + PostProcessCallback + ), + doc: get_symbol!(library, b"doc_v2\0", DocExpansion), + free_doc: get_symbol!(library, b"free_doc_v2\0", FreeExpansionDoc), + }) + } } +} - pub fn try_load_prebuilt(package: Package) -> Result { - trace!("loading prebuilt macro for `{}` package", package.id); - let prebuilt_path = package - .prebuilt_lib_path() - .context("could not resolve prebuilt library path")?; - let plugin = unsafe { Plugin::try_new(prebuilt_path)? }; - Ok(Self { - expansions: unsafe { Self::load_expansions(&plugin, package.id)? }, - package_id: package.id, - plugin, - }) +/// This struct is a wrapper around a shared library containing the procedural macro implementation. +/// It is responsible for loading the shared library and providing a safe interface for its access. +pub struct Plugin { + #[allow(dead_code)] + library: Library, + vtable: VTableV0, +} + +impl Plugin { + /// Load the shared library under the given path and store pointers to its public symbols. + /// + /// # Safety + /// This function is unsafe because it calls the FFI interface of procedural macro package. + pub unsafe fn try_new(library_path: &Utf8Path) -> Result { + let library = unsafe { Library::new(library_path)? }; + let vtable = unsafe { VTableV0::try_new(&library)? }; + + Ok(Plugin { library, vtable }) } - unsafe fn load_expansions(plugin: &Plugin, package_id: PackageId) -> Result> { + /// Obtain metadata of available expansions from the procedural macro. + /// + /// # Safety + /// This function is unsafe because it calls the FFI interface of procedural macro package. + pub unsafe fn load_expansions(&self, package_id: PackageId) -> Result> { // Make a call to the FFI interface to list declared expansions. - let stable_expansions = (plugin.vtable.list_expansions)(); + let stable_expansions = (self.vtable.list_expansions)(); let (ptr, n) = stable_expansions.raw_parts(); let expansions = unsafe { slice::from_raw_parts(ptr, n) }; let mut expansions: Vec = expansions .iter() - .map(|e| unsafe { Expansion::from_stable(e) }) + .map(|stable_expansion| stable_expansion.into()) .collect(); // Free the memory allocated by the `stable_expansions`. - (plugin.vtable.free_expansions_list)(stable_expansions); + (self.vtable.free_expansions_list)(stable_expansions); // Validate expansions. expansions.sort_unstable_by_key(|e| e.name.clone()); ensure!( @@ -95,59 +137,6 @@ impl ProcMacroInstance { Ok(expansions) } - pub fn get_expansions(&self) -> &[Expansion] { - &self.expansions - } - - pub fn package_id(&self) -> PackageId { - self.package_id - } - - pub fn declared_attributes_and_executables(&self) -> Vec { - self.get_expansions() - .iter() - .filter(|e| e.kind == ExpansionKind::Attr || e.kind == ExpansionKind::Executable) - .map(|e| e.name.clone()) - .map(Into::into) - .collect() - } - - pub fn declared_attributes(&self) -> Vec { - self.get_expansions() - .iter() - .filter(|e| e.kind == ExpansionKind::Attr) - .map(|e| e.name.clone()) - .map(Into::into) - .collect() - } - - pub fn declared_derives(&self) -> Vec { - self.get_expansions() - .iter() - .filter(|e| e.kind == ExpansionKind::Derive) - .map(|e| e.name.clone()) - .map(Into::into) - .collect() - } - - pub fn executable_attributes(&self) -> Vec { - self.get_expansions() - .iter() - .filter(|e| e.kind == ExpansionKind::Executable) - .map(|e| e.name.clone()) - .map(Into::into) - .collect() - } - - pub fn inline_macros(&self) -> Vec { - self.get_expansions() - .iter() - .filter(|e| e.kind == ExpansionKind::Inline) - .map(|e| e.name.clone()) - .map(Into::into) - .collect() - } - /// Apply expansion to token stream. /// /// This function implements the actual calls to functions from the dynamic library. @@ -156,6 +145,9 @@ impl ProcMacroInstance { /// /// Please be aware that the memory management of values passing the FFI-barrier is tricky. /// The memory must be freed on the same side of the barrier, where the allocation was made. + /// + /// # Safety + /// This function is unsafe because it calls the FFI interface of procedural macro package. pub(crate) fn generate_code( &self, item_name: SmolStr, @@ -172,7 +164,7 @@ impl ProcMacroInstance { // Note that `stable_result` has been allocated by the dynamic library. let call_site: StableTextSpan = call_site.into_stable(); let stable_result = - (self.plugin.vtable.expand)(item_name, call_site, stable_attr, stable_token_stream); + (self.vtable.expand)(item_name, call_site, stable_attr, stable_token_stream); // Free proc macro name. let _ = unsafe { CString::from_raw(item_name) }; // Free the memory allocated by the `stable_token_stream`. @@ -185,11 +177,15 @@ impl ProcMacroInstance { // Note, that the memory still needs to be freed on the allocator side! let result = unsafe { ProcMacroResult::from_stable(&stable_result.output) }; // Call FFI interface to free the `stable_result` that has been allocated by previous call. - (self.plugin.vtable.free_result)(stable_result.output); + (self.vtable.free_result)(stable_result.output); // Return obtained result. result } + /// Call post process callbacks defined in the procedural macro. + /// + /// # Safety + /// This function is unsafe because it calls the FFI interface of procedural macro package. pub(crate) fn post_process_callback( &self, aux_data: Vec, @@ -202,17 +198,21 @@ impl ProcMacroInstance { } .into_stable(); // Actual call to FFI interface for aux data callback. - let context = (self.plugin.vtable.post_process_callback)(context); + let context = (self.vtable.post_process_callback)(context); // Free the allocated memory. unsafe { PostProcessContext::free_owned_stable(context) }; } - pub fn doc(&self, item_name: SmolStr) -> Option { + /// Obtain expansion doc string. + /// + /// # Safety + /// This function is unsafe because it calls the FFI interface of procedural macro package. + pub fn doc(&self, item_name: impl ToString) -> Option { // Allocate proc macro name. let item_name = CString::new(item_name.to_string()).unwrap().into_raw(); // Call FFI interface for expansion doc. // Note that `stable_result` has been allocated by the dynamic library. - let stable_result = (self.plugin.vtable.doc)(item_name); + let stable_result = (self.vtable.doc)(item_name); let doc = if stable_result.is_null() { None } else { @@ -222,44 +222,13 @@ impl ProcMacroInstance { // Free proc macro name. let _ = unsafe { CString::from_raw(item_name) }; // Call FFI interface to free the `stable_result` that has been allocated by previous call. - (self.plugin.vtable.free_doc)(stable_result); + (self.vtable.free_doc)(stable_result); doc } } -#[derive(Clone, Debug, Eq, PartialEq)] -pub enum ExpansionKind { - Attr, - Derive, - Inline, - Executable, -} - -impl From for ExpansionKind { - fn from(kind: SharedExpansionKind) -> Self { - match kind { - SharedExpansionKind::Attr => Self::Attr, - SharedExpansionKind::Derive => Self::Derive, - SharedExpansionKind::Inline => Self::Inline, - } - } -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct Expansion { - pub name: SmolStr, - pub kind: ExpansionKind, -} - -impl Expansion { - pub fn new(name: impl ToString, kind: ExpansionKind) -> Self { - Self { - name: SmolStr::new(name.to_string()), - kind, - } - } - - unsafe fn from_stable(stable_expansion: &StableExpansion) -> Self { +impl From<&StableExpansion> for Expansion { + fn from(stable_expansion: &StableExpansion) -> Self { // Note this does not take ownership of underlying memory. let name = if stable_expansion.name.is_null() { String::default() @@ -275,81 +244,10 @@ impl Expansion { kind: ExpansionKind::Executable, }; } + let expansion_kind = unsafe { ExpansionKindV2::from_stable(&stable_expansion.kind) }.into(); Self { name: SmolStr::new(name), - kind: unsafe { SharedExpansionKind::from_stable(&stable_expansion.kind).into() }, - } - } -} - -type ListExpansions = extern "C" fn() -> StableExpansionsList; -type FreeExpansionsList = extern "C" fn(StableExpansionsList); -type ExpandCode = extern "C" fn( - *const c_char, - StableTextSpan, - StableTokenStream, - StableTokenStream, -) -> StableResultWrapper; -type FreeResult = extern "C" fn(StableProcMacroResult); -type PostProcessCallback = extern "C" fn(StablePostProcessContext) -> StablePostProcessContext; -type DocExpansion = extern "C" fn(*const c_char) -> *mut c_char; -type FreeExpansionDoc = extern "C" fn(*mut c_char); - -struct VTableV0 { - list_expansions: RawSymbol, - free_expansions_list: RawSymbol, - expand: RawSymbol, - free_result: RawSymbol, - post_process_callback: RawSymbol, - doc: RawSymbol, - free_doc: RawSymbol, -} - -macro_rules! get_symbol { - ($library:ident, $name:literal, $type:ty) => {{ - let symbol: Symbol<'_, $type> = $library.get($name).context(format!( - "failed to load {} symbol for procedural macro", - stringify!($name) - ))?; - symbol.into_raw() - }}; -} - -impl VTableV0 { - unsafe fn try_new(library: &Library) -> Result { - unsafe { - Ok(VTableV0 { - list_expansions: get_symbol!(library, b"list_expansions_v2\0", ListExpansions), - free_expansions_list: get_symbol!( - library, - b"free_expansions_list_v2\0", - FreeExpansionsList - ), - expand: get_symbol!(library, b"expand_v2\0", ExpandCode), - free_result: get_symbol!(library, b"free_result_v2\0", FreeResult), - post_process_callback: get_symbol!( - library, - b"post_process_callback_v2\0", - PostProcessCallback - ), - doc: get_symbol!(library, b"doc_v2\0", DocExpansion), - free_doc: get_symbol!(library, b"free_doc_v2\0", FreeExpansionDoc), - }) + kind: expansion_kind, } } } - -struct Plugin { - #[allow(dead_code)] - library: Library, - vtable: VTableV0, -} - -impl Plugin { - unsafe fn try_new(library_path: Utf8PathBuf) -> Result { - let library = unsafe { Library::new(library_path)? }; - let vtable = unsafe { VTableV0::try_new(&library)? }; - - Ok(Plugin { library, vtable }) - } -} diff --git a/scarb/src/compiler/plugin/proc_macro/v2/host/attribute.rs b/scarb/src/compiler/plugin/proc_macro/v2/host/attribute.rs index 902913b5f..370166ad7 100644 --- a/scarb/src/compiler/plugin/proc_macro/v2/host/attribute.rs +++ b/scarb/src/compiler/plugin/proc_macro/v2/host/attribute.rs @@ -1,10 +1,11 @@ +use crate::compiler::plugin::proc_macro::expansion::{Expansion, ExpansionKind}; use crate::compiler::plugin::proc_macro::v2::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; use crate::compiler::plugin::proc_macro::v2::host::conversion::{ CallSiteLocation, into_cairo_diagnostics, }; use crate::compiler::plugin::proc_macro::v2::host::generate_code_mappings; use crate::compiler::plugin::proc_macro::v2::{ - Expansion, ExpansionKind, ProcMacroHostPlugin, ProcMacroId, TokenStreamBuilder, + ProcMacroHostPlugin, ProcMacroId, TokenStreamBuilder, }; use cairo_lang_defs::patcher::{PatchBuilder, RewriteNode}; use cairo_lang_defs::plugin::PluginDiagnostic; @@ -184,12 +185,16 @@ impl ProcMacroHostPlugin { } }; - let result = self.instance(input.id.package_id).generate_code( - input.id.expansion.name.clone(), - input.call_site.span, - input.args, - token_stream.clone(), - ); + let result = self + .instance(input.id.package_id) + .try_v2() + .expect("procedural macro using v1 api used in a context expecting v2 api") + .generate_code( + input.id.expansion.name.clone(), + input.call_site.span, + input.args, + token_stream.clone(), + ); let expanded = context.register_result( token_stream.to_string(), @@ -367,12 +372,16 @@ impl ProcMacroHostPlugin { call_site: CallSiteLocation, ) -> PluginResult { let original = token_stream.to_string(); - let result = self.instance(input.package_id).generate_code( - input.expansion.name.clone(), - call_site.span, - args, - token_stream, - ); + let result = self + .instance(input.package_id) + .try_v2() + .expect("procedural macro using v1 api used in a context expecting v2 api") + .generate_code( + input.expansion.name.clone(), + call_site.span, + args, + token_stream, + ); // Handle token stream. if result.token_stream.is_empty() { diff --git a/scarb/src/compiler/plugin/proc_macro/v2/host/derive.rs b/scarb/src/compiler/plugin/proc_macro/v2/host/derive.rs index 86a3b33d6..9daf545e8 100644 --- a/scarb/src/compiler/plugin/proc_macro/v2/host/derive.rs +++ b/scarb/src/compiler/plugin/proc_macro/v2/host/derive.rs @@ -1,10 +1,11 @@ +use crate::compiler::plugin::proc_macro::expansion::{Expansion, ExpansionKind}; use crate::compiler::plugin::proc_macro::v2::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; use crate::compiler::plugin::proc_macro::v2::host::conversion::{ CallSiteLocation, into_cairo_diagnostics, }; use crate::compiler::plugin::proc_macro::v2::host::{DERIVE_ATTR, generate_code_mappings}; use crate::compiler::plugin::proc_macro::v2::{ - Expansion, ExpansionKind, ProcMacroHostPlugin, ProcMacroId, TokenStreamBuilder, + ProcMacroHostPlugin, ProcMacroId, TokenStreamBuilder, }; use cairo_lang_defs::plugin::{DynGeneratedFileAuxData, PluginGeneratedFile, PluginResult}; use cairo_lang_filesystem::ids::CodeMapping; @@ -93,12 +94,16 @@ impl ProcMacroHostPlugin { let call_site = &derive.call_site; let derive = &derive.id; let token_stream = token_stream_builder.build(&ctx); - let result = self.instance(derive.package_id).generate_code( - derive.expansion.name.clone(), - call_site.span.clone(), - TokenStream::empty(), - token_stream, - ); + let result = self + .instance(derive.package_id) + .try_v2() + .expect("procedural macro using v1 api used in a context expecting v2 api") + .generate_code( + derive.expansion.name.clone(), + call_site.span.clone(), + TokenStream::empty(), + token_stream, + ); // Register diagnostics. all_diagnostics.extend(result.diagnostics); diff --git a/scarb/src/compiler/plugin/proc_macro/v2/host/inline.rs b/scarb/src/compiler/plugin/proc_macro/v2/host/inline.rs index 6491eff3b..028fe3d8f 100644 --- a/scarb/src/compiler/plugin/proc_macro/v2/host/inline.rs +++ b/scarb/src/compiler/plugin/proc_macro/v2/host/inline.rs @@ -1,11 +1,11 @@ +use crate::compiler::plugin::proc_macro::ProcMacroInstance; +use crate::compiler::plugin::proc_macro::expansion::Expansion; use crate::compiler::plugin::proc_macro::v2::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; use crate::compiler::plugin::proc_macro::v2::host::conversion::{ CallSiteLocation, into_cairo_diagnostics, }; use crate::compiler::plugin::proc_macro::v2::host::generate_code_mappings; -use crate::compiler::plugin::proc_macro::v2::{ - Expansion, ProcMacroId, ProcMacroInstance, TokenStreamBuilder, -}; +use crate::compiler::plugin::proc_macro::v2::{ProcMacroId, TokenStreamBuilder}; use cairo_lang_defs::plugin::{ DynGeneratedFileAuxData, InlineMacroExprPlugin, InlinePluginResult, MacroPluginMetadata, PluginGeneratedFile, @@ -54,12 +54,16 @@ impl InlineMacroExprPlugin for ProcMacroInlinePlugin { let mut token_stream_builder = TokenStreamBuilder::new(db); token_stream_builder.add_node(arguments.as_syntax_node()); let token_stream = token_stream_builder.build(&ctx); - let result = self.instance().generate_code( - self.expansion.name.clone(), - call_site.span, - TokenStream::empty(), - token_stream, - ); + let result = self + .instance() + .try_v2() + .expect("procedural macro using v1 api used in a context expecting v2 api") + .generate_code( + self.expansion.name.clone(), + call_site.span, + TokenStream::empty(), + token_stream, + ); // Handle diagnostics. let diagnostics = into_cairo_diagnostics(result.diagnostics, call_site.stable_ptr); let token_stream = result.token_stream.clone(); diff --git a/scarb/src/compiler/plugin/proc_macro/v2/host/mod.rs b/scarb/src/compiler/plugin/proc_macro/v2/host/mod.rs index 17c626549..fcea12c68 100644 --- a/scarb/src/compiler/plugin/proc_macro/v2/host/mod.rs +++ b/scarb/src/compiler/plugin/proc_macro/v2/host/mod.rs @@ -9,7 +9,8 @@ use attribute::*; pub use aux_data::ProcMacroAuxData; use inline::*; -use crate::compiler::plugin::proc_macro::v2::{Expansion, ExpansionKind, ProcMacroInstance}; +use crate::compiler::plugin::proc_macro::ProcMacroInstance; +use crate::compiler::plugin::proc_macro::expansion::{Expansion, ExpansionKind}; use crate::core::{PackageId, edition_variant}; use anyhow::{Result, ensure}; use cairo_lang_defs::plugin::{MacroPlugin, MacroPluginMetadata, PluginResult}; diff --git a/scarb/src/compiler/plugin/proc_macro/v2/host/post.rs b/scarb/src/compiler/plugin/proc_macro/v2/host/post.rs index fd35e3545..6678ca798 100644 --- a/scarb/src/compiler/plugin/proc_macro/v2/host/post.rs +++ b/scarb/src/compiler/plugin/proc_macro/v2/host/post.rs @@ -45,7 +45,10 @@ impl ProcMacroHostPlugin { .cloned() .unwrap_or_default(); debug!("calling post processing callback with: {data:?}"); - instance.post_process_callback(data.clone(), markers_for_instance); + instance + .try_v2() + .expect("procedural macro using v1 api used in a context expecting v2 api") + .post_process_callback(data.clone(), markers_for_instance); } Ok(()) } diff --git a/scarb/src/ops/proc_macro_server/methods/expand_attribute.rs b/scarb/src/ops/proc_macro_server/methods/expand_attribute.rs index 1746a6e43..bbeb17c6a 100644 --- a/scarb/src/ops/proc_macro_server/methods/expand_attribute.rs +++ b/scarb/src/ops/proc_macro_server/methods/expand_attribute.rs @@ -4,7 +4,8 @@ use anyhow::{Context, Result}; use scarb_proc_macro_server_types::methods::{ProcMacroResult, expand::ExpandAttribute}; use super::Handler; -use crate::compiler::plugin::{collection::WorkspaceProcMacros, proc_macro::ExpansionKind}; +use crate::compiler::plugin::collection::WorkspaceProcMacros; +use crate::compiler::plugin::proc_macro::ExpansionKind; impl Handler for ExpandAttribute { fn handle( @@ -35,7 +36,10 @@ impl Handler for ExpandAttribute { }) .with_context(|| format!("Unsupported attribute: {attr}"))?; - let result = instance.generate_code(attr.into(), call_site, args, item); + let result = instance + .try_v2() + .expect("procedural macro using v1 api used in a context expecting v2 api") + .generate_code(attr.into(), call_site, args, item); Ok(ProcMacroResult { token_stream: result.token_stream, diff --git a/scarb/src/ops/proc_macro_server/methods/expand_derive.rs b/scarb/src/ops/proc_macro_server/methods/expand_derive.rs index 705698eea..b3f33a430 100644 --- a/scarb/src/ops/proc_macro_server/methods/expand_derive.rs +++ b/scarb/src/ops/proc_macro_server/methods/expand_derive.rs @@ -39,12 +39,15 @@ impl Handler for ExpandDerive { .find(|instance| instance.get_expansions().contains(&expansion)) .with_context(|| format!("Unsupported derive macro: {derive}"))?; - let result = instance.generate_code( - expansion.name.clone(), - call_site.clone(), - TokenStream::empty(), - item.clone(), - ); + let result = instance + .try_v2() + .expect("procedural macro using v1 api used in a context expecting v2 api") + .generate_code( + expansion.name.clone(), + call_site.clone(), + TokenStream::empty(), + item.clone(), + ); // Register diagnostics. all_diagnostics.extend(result.diagnostics); diff --git a/scarb/src/ops/proc_macro_server/methods/expand_inline.rs b/scarb/src/ops/proc_macro_server/methods/expand_inline.rs index 14298b49e..9466e3898 100644 --- a/scarb/src/ops/proc_macro_server/methods/expand_inline.rs +++ b/scarb/src/ops/proc_macro_server/methods/expand_inline.rs @@ -35,7 +35,10 @@ impl Handler for ExpandInline { }) .with_context(|| format!("Unsupported inline macro: {name}"))?; - let result = instance.generate_code(name.into(), call_site, TokenStream::empty(), args); + let result = instance + .try_v2() + .expect("procedural macro using v1 api used in a context expecting v2 api") + .generate_code(name.into(), call_site, TokenStream::empty(), args); Ok(ProcMacroResult { token_stream: result.token_stream, From 06c7d837a585429664a94b6fc9857f50f426d7b4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Tr=C4=85tnowiecki?= Date: Fri, 14 Mar 2025 00:33:30 +0100 Subject: [PATCH 33/38] Proc macro: use versioned proc macro host --- scarb/src/compiler/db.rs | 7 +- scarb/src/compiler/plugin/collection.rs | 127 +++++++++++++++--- scarb/src/compiler/plugin/proc_macro/host.rs | 114 ++++++++++++++++ .../compiler/plugin/proc_macro/instance.rs | 11 ++ scarb/src/compiler/plugin/proc_macro/mod.rs | 3 +- .../src/compiler/plugin/proc_macro/v1/host.rs | 56 +++----- .../compiler/plugin/proc_macro/v2/host/mod.rs | 57 +++----- .../plugin/proc_macro/v2/host/post.rs | 4 +- .../methods/defined_macros.rs | 40 +++--- .../methods/expand_attribute.rs | 15 ++- .../methods/expand_derive.rs | 14 +- .../methods/expand_inline.rs | 12 +- 12 files changed, 325 insertions(+), 135 deletions(-) create mode 100644 scarb/src/compiler/plugin/proc_macro/host.rs diff --git a/scarb/src/compiler/db.rs b/scarb/src/compiler/db.rs index 274138f29..788b5a99d 100644 --- a/scarb/src/compiler/db.rs +++ b/scarb/src/compiler/db.rs @@ -28,7 +28,7 @@ use tracing::trace; pub struct ScarbDatabase { pub db: RootDatabase, - pub proc_macros: Vec>, + pub proc_macros: Vec, } pub(crate) fn build_scarb_root_database( @@ -64,7 +64,10 @@ pub(crate) fn build_scarb_root_database( apply_plugins(&mut db, plugins); inject_virtual_wrapper_lib(&mut db, unit)?; - let proc_macros = proc_macros.into_values().collect(); + let proc_macros = proc_macros + .into_values() + .flat_map(|hosts| hosts.into_iter()) + .collect(); Ok(ScarbDatabase { db, proc_macros }) } diff --git a/scarb/src/compiler/plugin/collection.rs b/scarb/src/compiler/plugin/collection.rs index f206fecbb..876e74b64 100644 --- a/scarb/src/compiler/plugin/collection.rs +++ b/scarb/src/compiler/plugin/collection.rs @@ -1,23 +1,22 @@ use std::{collections::HashMap, sync::Arc}; -use anyhow::Result; +use anyhow::{Result, ensure}; use cairo_lang_semantic::{inline_macros::get_default_plugin_suite, plugin::PluginSuite}; use itertools::Itertools; use scarb_proc_macro_server_types::scope::CompilationUnitComponent; +use smol_str::SmolStr; +use std::vec::IntoIter; +use super::proc_macro::{DeclaredProcMacroInstances, ProcMacroHostPlugin, ProcMacroInstance}; +use crate::core::PackageId; use crate::{ compiler::{CairoCompilationUnit, CompilationUnitComponentId, CompilationUnitDependency}, core::Workspace, }; -#[cfg(doc)] -use crate::core::PackageId; - -use super::proc_macro::{ProcMacroHostPlugin, ProcMacroInstance}; - pub struct PluginsForComponents { pub plugins: HashMap, - pub proc_macros: HashMap>, + pub proc_macros: HashMap, } impl PluginsForComponents { @@ -28,14 +27,16 @@ impl PluginsForComponents { let proc_macros = collect_proc_macros(workspace, unit)? .into_iter() .map(|(component_id, instances)| { - let plugin = Arc::new(ProcMacroHostPlugin::try_new(instances)?); - Ok((component_id, plugin)) + Ok(( + component_id, + ComponentProcMacroHost::try_from_instances(instances)?, + )) }) .collect::>>()?; for (component_id, suite) in plugins.iter_mut() { if let Some(proc_macro) = proc_macros.get(component_id) { - suite.add(ProcMacroHostPlugin::build_plugin_suite(proc_macro.clone())); + suite.add(proc_macro.build_plugin_suite()); } } @@ -54,7 +55,7 @@ pub struct WorkspaceProcMacros { /// Contains IDs of all components of all compilation units from the workspace, /// each mapped to a [`ProcMacroHostPlugin`] which contains /// **all proc macro dependencies of the package** collected from **all compilation units it appears in**. - pub macros_for_components: HashMap>, + pub macros_for_components: HashMap>>, } impl WorkspaceProcMacros { @@ -84,14 +85,14 @@ impl WorkspaceProcMacros { let macros_for_components = macros_for_components .into_iter() .map(|(component, macro_instances)| { - let deduplicated_instances = macro_instances + let deduplicated_instances: Vec> = macro_instances .into_iter() .unique_by(|instance| instance.package_id()) .collect(); - - let plugin = Arc::new(ProcMacroHostPlugin::try_new(deduplicated_instances)?); - - Ok((component, plugin)) + let proc_macros = + ComponentProcMacroHost::try_from_instances(deduplicated_instances)?; + let proc_macros: Vec = proc_macros.into(); + Ok((component, Arc::new(proc_macros))) }) .collect::>>()?; @@ -100,8 +101,13 @@ impl WorkspaceProcMacros { }) } - /// Returns a [`ProcMacroHostPlugin`] assigned to the [`CompilationUnitComponent`]. - pub fn get(&self, component: &CompilationUnitComponent) -> Option> { + /// Returns a vector of [`ProcMacroHostPlugin`]s assigned to the [`CompilationUnitComponent`]. + /// + /// Proc macro instances should be grouped into separate plugins by macro api version used. + pub fn get( + &self, + component: &CompilationUnitComponent, + ) -> Option>> { self.macros_for_components.get(component).cloned() } } @@ -172,7 +178,7 @@ fn collect_proc_macros( continue; } - let proc_macro = plugin.prebuilt.clone().map(Result::Ok).unwrap_or_else(|| { + let proc_macro = plugin.prebuilt.clone().map(Ok).unwrap_or_else(|| { proc_macro_repository.get_or_load(plugin.package.clone(), workspace.config()) })?; @@ -184,3 +190,86 @@ fn collect_proc_macros( Ok(proc_macros_for_components) } + +pub struct ComponentProcMacroHost(Vec); + +impl ComponentProcMacroHost { + pub fn try_new(hosts: Vec) -> Result { + struct MacroId { + package_id: PackageId, + expansion_name: SmolStr, + } + + // Validate expansions across hosts. + let mut expansions = hosts + .iter() + .flat_map(|host| host.instances()) + .flat_map(|m| { + m.get_expansions() + .iter() + .map(|e| MacroId { + package_id: m.package_id(), + expansion_name: e.name.clone(), + }) + .collect_vec() + }) + .collect::>(); + expansions.sort_unstable_by_key(|e| (e.expansion_name.clone(), e.package_id)); + ensure!( + expansions + .windows(2) + .all(|w| w[0].expansion_name != w[1].expansion_name), + "duplicate expansions defined for procedural macros: {duplicates}", + duplicates = expansions + .windows(2) + .filter(|w| w[0].expansion_name == w[1].expansion_name) + .map(|w| format!( + "{} ({} and {})", + w[0].expansion_name.as_str(), + w[0].package_id, + w[1].package_id + )) + .collect::>() + .join(", ") + ); + Ok(Self(hosts)) + } + + pub fn try_from_instances(instances: Vec>) -> Result { + let instances = instances + .into_iter() + .sorted_by_key(|instance| instance.api_version()) + .chunk_by(|instance| instance.api_version()); + let plugins = instances + .into_iter() + .map(|(api_version, instances)| { + let instances: Vec> = instances.collect_vec(); + ProcMacroHostPlugin::try_new(instances, api_version) + }) + .collect::>>()?; + Self::try_new(plugins) + } + + pub fn build_plugin_suite(&self) -> PluginSuite { + let mut suite = PluginSuite::default(); + for host in self.0.iter() { + suite.add(host.build_plugin_suite()); + } + suite + } +} + +impl IntoIterator for ComponentProcMacroHost { + type Item = ProcMacroHostPlugin; + type IntoIter = IntoIter; + + fn into_iter(self) -> IntoIter { + self.0.into_iter() + } +} + +impl From for Vec { + fn from(host: ComponentProcMacroHost) -> Self { + host.0 + } +} diff --git a/scarb/src/compiler/plugin/proc_macro/host.rs b/scarb/src/compiler/plugin/proc_macro/host.rs new file mode 100644 index 000000000..36d869714 --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/host.rs @@ -0,0 +1,114 @@ +use crate::compiler::plugin::proc_macro::ProcMacroInstance; +use crate::compiler::plugin::{ProcMacroApiVersion, proc_macro}; +use anyhow::Result; +use cairo_lang_semantic::db::SemanticGroup; +use cairo_lang_semantic::plugin::PluginSuite; +use convert_case::{Case, Casing}; +use itertools::Itertools; +use std::sync::Arc; + +pub const FULL_PATH_MARKER_KEY: &str = "macro::full_path_marker"; + +pub enum ProcMacroHostPlugin { + V1(Arc), + V2(Arc), +} + +impl ProcMacroHostPlugin { + pub fn try_new( + instances: Vec>, + api_version: ProcMacroApiVersion, + ) -> Result { + assert!( + instances + .iter() + .map(|instance| instance.api_version()) + .all_equal(), + "all proc macro instances in a single host must use the same API version" + ); + Ok(match api_version { + ProcMacroApiVersion::V1 => Self::V1(Arc::new( + proc_macro::v1::ProcMacroHostPlugin::try_new(instances)?, + )), + ProcMacroApiVersion::V2 => Self::V2(Arc::new( + proc_macro::v2::ProcMacroHostPlugin::try_new(instances)?, + )), + }) + } + + pub fn post_process(&self, db: &dyn SemanticGroup) -> Result<()> { + match self { + ProcMacroHostPlugin::V1(host) => host.post_process(db), + ProcMacroHostPlugin::V2(host) => host.post_process(db), + } + } + + pub fn build_plugin_suite(&self) -> PluginSuite { + match self { + ProcMacroHostPlugin::V1(host) => { + proc_macro::v1::ProcMacroHostPlugin::build_plugin_suite(host.clone()) + } + ProcMacroHostPlugin::V2(host) => { + proc_macro::v2::ProcMacroHostPlugin::build_plugin_suite(host.clone()) + } + } + } + + pub fn api_version(&self) -> ProcMacroApiVersion { + match self { + ProcMacroHostPlugin::V1(_) => ProcMacroApiVersion::V1, + ProcMacroHostPlugin::V2(_) => ProcMacroApiVersion::V2, + } + } +} + +pub trait DeclaredProcMacroInstances { + fn instances(&self) -> &[Arc]; + + // NOTE: Required for proc macro server. `::declared_attributes` + // returns attributes **and** executables. In PMS, we only need the former because the latter is handled separately. + fn declared_attributes_without_executables(&self) -> Vec { + self.instances() + .iter() + .flat_map(|instance| instance.declared_attributes()) + .collect() + } + + fn declared_inline_macros(&self) -> Vec { + self.instances() + .iter() + .flat_map(|instance| instance.inline_macros()) + .collect() + } + + fn declared_derives(&self) -> Vec { + self.instances() + .iter() + .flat_map(|m| m.declared_derives()) + .map(|s| s.to_case(Case::UpperCamel)) + .collect() + } + + fn executable_attributes(&self) -> Vec { + self.instances() + .iter() + .flat_map(|m| m.executable_attributes()) + .collect() + } + fn declared_attributes(&self) -> Vec { + self.instances() + .iter() + .flat_map(|m| m.declared_attributes_and_executables()) + .chain(vec![FULL_PATH_MARKER_KEY.to_string()]) + .collect() + } +} + +impl DeclaredProcMacroInstances for ProcMacroHostPlugin { + fn instances(&self) -> &[Arc] { + match self { + ProcMacroHostPlugin::V1(host) => host.instances(), + ProcMacroHostPlugin::V2(host) => host.instances(), + } + } +} diff --git a/scarb/src/compiler/plugin/proc_macro/instance.rs b/scarb/src/compiler/plugin/proc_macro/instance.rs index dc2c16b8c..758f301be 100644 --- a/scarb/src/compiler/plugin/proc_macro/instance.rs +++ b/scarb/src/compiler/plugin/proc_macro/instance.rs @@ -154,6 +154,10 @@ impl ProcMacroInstance { ) }) } + + pub fn api_version(&self) -> ProcMacroApiVersion { + self.plugin().api_version() + } } /// This struct provides a unified interface for both v1 and v2 proc macro plugins. @@ -211,4 +215,11 @@ impl VersionedPlugin { _ => None, } } + + pub fn api_version(&self) -> ProcMacroApiVersion { + match self { + VersionedPlugin::V1(_) => ProcMacroApiVersion::V1, + VersionedPlugin::V2(_) => ProcMacroApiVersion::V2, + } + } } diff --git a/scarb/src/compiler/plugin/proc_macro/mod.rs b/scarb/src/compiler/plugin/proc_macro/mod.rs index 40c55c413..dc00aea19 100644 --- a/scarb/src/compiler/plugin/proc_macro/mod.rs +++ b/scarb/src/compiler/plugin/proc_macro/mod.rs @@ -1,5 +1,6 @@ pub mod compilation; mod expansion; +mod host; mod instance; mod repository; mod shared_library_provider; @@ -8,7 +9,7 @@ pub mod v2; pub use compilation::{check_unit, compile_unit, fetch_crate}; pub use expansion::*; +pub use host::*; pub use instance::*; pub use repository::*; pub use shared_library_provider::SharedLibraryProvider; -pub use v2::ProcMacroHostPlugin; diff --git a/scarb/src/compiler/plugin/proc_macro/v1/host.rs b/scarb/src/compiler/plugin/proc_macro/v1/host.rs index 47023a8f7..d49e4652d 100644 --- a/scarb/src/compiler/plugin/proc_macro/v1/host.rs +++ b/scarb/src/compiler/plugin/proc_macro/v1/host.rs @@ -1,6 +1,8 @@ -use crate::compiler::plugin::proc_macro::ProcMacroInstance; use crate::compiler::plugin::proc_macro::expansion::{Expansion, ExpansionKind}; use crate::compiler::plugin::proc_macro::v1::FromSyntaxNode; +use crate::compiler::plugin::proc_macro::{ + DeclaredProcMacroInstances, FULL_PATH_MARKER_KEY, ProcMacroInstance, +}; use crate::core::PackageId; use anyhow::{Result, ensure}; use cairo_lang_defs::ids::{ModuleItemId, TopLevelLanguageElementId}; @@ -38,7 +40,6 @@ use std::sync::{Arc, OnceLock, RwLock}; use std::vec::IntoIter; use tracing::{debug, trace_span}; -const FULL_PATH_MARKER_KEY: &str = "macro::full_path_marker"; const DERIVE_ATTR: &str = "derive"; /// A Cairo compiler plugin controlling the procedural macro execution. @@ -47,29 +48,13 @@ const DERIVE_ATTR: &str = "derive"; /// It then redirects the item to the appropriate macro plugin for code expansion. #[derive(Debug)] pub struct ProcMacroHostPlugin { - macros: Vec>, + instances: Vec>, full_path_markers: RwLock>>, } -impl ProcMacroHostPlugin { - pub fn macros(&self) -> &[Arc] { - &self.macros - } - - // NOTE: Required for proc macro server. `::declared_attributes` - // returns attributes **and** executables. In PMS, we only need the former because the latter is handled separately. - pub fn declared_attributes_without_executables(&self) -> Vec { - self.macros - .iter() - .flat_map(|instance| instance.declared_attributes()) - .collect() - } - - pub fn declared_inline_macros(&self) -> Vec { - self.macros - .iter() - .flat_map(|instance| instance.inline_macros()) - .collect() +impl DeclaredProcMacroInstances for ProcMacroHostPlugin { + fn instances(&self) -> &[Arc] { + &self.instances } } @@ -173,7 +158,7 @@ impl ProcMacroHostPlugin { .join(", ") ); Ok(Self { - macros, + instances: macros, full_path_markers: RwLock::new(Default::default()), }) } @@ -721,7 +706,7 @@ impl ProcMacroHostPlugin { } fn find_expansion(&self, expansion: &Expansion) -> Option { - self.macros + self.instances .iter() .find(|m| m.get_expansions().contains(expansion)) .map(|m| m.package_id()) @@ -731,7 +716,7 @@ impl ProcMacroHostPlugin { pub fn build_plugin_suite(macro_host: Arc) -> PluginSuite { let mut suite = PluginSuite::default(); // Register inline macro plugins. - for proc_macro in ¯o_host.macros { + for proc_macro in ¯o_host.instances { let expansions = proc_macro .get_expansions() .iter() @@ -754,7 +739,7 @@ impl ProcMacroHostPlugin { let markers = self.collect_full_path_markers(db); let aux_data = self.collect_aux_data(db); - for instance in self.macros.iter() { + for instance in self.instances.iter() { let _ = trace_span!( "post_process_callback", instance = %instance.package_id() @@ -866,7 +851,7 @@ impl ProcMacroHostPlugin { } pub fn instance(&self, package_id: PackageId) -> &ProcMacroInstance { - self.macros + self.instances .iter() .find(|m| m.package_id() == package_id) .expect("procedural macro must be registered in proc macro host") @@ -1025,26 +1010,15 @@ impl MacroPlugin for ProcMacroHostPlugin { } fn declared_attributes(&self) -> Vec { - self.macros - .iter() - .flat_map(|m| m.declared_attributes_and_executables()) - .chain(vec![FULL_PATH_MARKER_KEY.to_string()]) - .collect() + DeclaredProcMacroInstances::declared_attributes(self) } fn declared_derives(&self) -> Vec { - self.macros - .iter() - .flat_map(|m| m.declared_derives()) - .map(|s| s.to_case(Case::UpperCamel)) - .collect() + DeclaredProcMacroInstances::declared_derives(self) } fn executable_attributes(&self) -> Vec { - self.macros - .iter() - .flat_map(|m| m.executable_attributes()) - .collect() + DeclaredProcMacroInstances::executable_attributes(self) } } diff --git a/scarb/src/compiler/plugin/proc_macro/v2/host/mod.rs b/scarb/src/compiler/plugin/proc_macro/v2/host/mod.rs index fcea12c68..23cfc442a 100644 --- a/scarb/src/compiler/plugin/proc_macro/v2/host/mod.rs +++ b/scarb/src/compiler/plugin/proc_macro/v2/host/mod.rs @@ -9,8 +9,8 @@ use attribute::*; pub use aux_data::ProcMacroAuxData; use inline::*; -use crate::compiler::plugin::proc_macro::ProcMacroInstance; use crate::compiler::plugin::proc_macro::expansion::{Expansion, ExpansionKind}; +use crate::compiler::plugin::proc_macro::{DeclaredProcMacroInstances, ProcMacroInstance}; use crate::core::{PackageId, edition_variant}; use anyhow::{Result, ensure}; use cairo_lang_defs::plugin::{MacroPlugin, MacroPluginMetadata, PluginResult}; @@ -21,14 +21,12 @@ use cairo_lang_macro::{AllocationContext, TokenStream, TokenStreamMetadata, Toke use cairo_lang_semantic::plugin::PluginSuite; use cairo_lang_syntax::node::db::SyntaxGroup; use cairo_lang_syntax::node::{TypedStablePtr, TypedSyntaxNode, ast}; -use convert_case::{Case, Casing}; use itertools::Itertools; use scarb_stable_hash::short_hash; use std::collections::HashMap; use std::fmt::Debug; use std::sync::{Arc, RwLock}; -const FULL_PATH_MARKER_KEY: &str = "macro::full_path_marker"; const DERIVE_ATTR: &str = "derive"; /// A Cairo compiler plugin controlling the procedural macro execution. @@ -37,10 +35,16 @@ const DERIVE_ATTR: &str = "derive"; /// It then redirects the item to the appropriate macro plugin for code expansion. #[derive(Debug)] pub struct ProcMacroHostPlugin { - macros: Vec>, + instances: Vec>, full_path_markers: RwLock>>, } +impl DeclaredProcMacroInstances for ProcMacroHostPlugin { + fn instances(&self) -> &[Arc] { + &self.instances + } +} + #[derive(Debug, Clone, Eq, PartialEq)] pub struct ProcMacroId { pub package_id: PackageId, @@ -87,13 +91,13 @@ impl ProcMacroHostPlugin { .join(", ") ); Ok(Self { - macros, + instances: macros, full_path_markers: RwLock::new(Default::default()), }) } fn find_expansion(&self, expansion: &Expansion) -> Option { - self.macros + self.instances() .iter() .find(|m| m.get_expansions().contains(expansion)) .map(|m| m.package_id()) @@ -103,7 +107,7 @@ impl ProcMacroHostPlugin { pub fn build_plugin_suite(macro_host: Arc) -> PluginSuite { let mut suite = PluginSuite::default(); // Register inline macro plugins. - for proc_macro in ¯o_host.macros { + for proc_macro in ¯o_host.instances { let expansions = proc_macro .get_expansions() .iter() @@ -122,7 +126,7 @@ impl ProcMacroHostPlugin { } pub fn instance(&self, package_id: PackageId) -> &ProcMacroInstance { - self.macros + self.instances .iter() .find(|m| m.package_id() == package_id) .expect("procedural macro must be registered in proc macro host") @@ -139,26 +143,6 @@ impl ProcMacroHostPlugin { let edition = edition_variant(edition); TokenStreamMetadata::new(file_path, file_id, edition) } - - pub fn macros(&self) -> &[Arc] { - &self.macros - } - - // NOTE: Required for proc macro server. `::declared_attributes` - // returns attributes **and** executables. In PMS, we only need the former because the latter is handled separately. - pub fn declared_attributes_without_executables(&self) -> Vec { - self.macros - .iter() - .flat_map(|instance| instance.declared_attributes()) - .collect() - } - - pub fn declared_inline_macros(&self) -> Vec { - self.macros - .iter() - .flat_map(|instance| instance.inline_macros()) - .collect() - } } impl MacroPlugin for ProcMacroHostPlugin { @@ -209,26 +193,15 @@ impl MacroPlugin for ProcMacroHostPlugin { } fn declared_attributes(&self) -> Vec { - self.macros - .iter() - .flat_map(|m| m.declared_attributes_and_executables()) - .chain(vec![FULL_PATH_MARKER_KEY.to_string()]) - .collect() + DeclaredProcMacroInstances::declared_attributes(self) } fn declared_derives(&self) -> Vec { - self.macros - .iter() - .flat_map(|m| m.declared_derives()) - .map(|s| s.to_case(Case::UpperCamel)) - .collect() + DeclaredProcMacroInstances::declared_derives(self) } fn executable_attributes(&self) -> Vec { - self.macros - .iter() - .flat_map(|m| m.executable_attributes()) - .collect() + DeclaredProcMacroInstances::executable_attributes(self) } } diff --git a/scarb/src/compiler/plugin/proc_macro/v2/host/post.rs b/scarb/src/compiler/plugin/proc_macro/v2/host/post.rs index 6678ca798..da296a3e1 100644 --- a/scarb/src/compiler/plugin/proc_macro/v2/host/post.rs +++ b/scarb/src/compiler/plugin/proc_macro/v2/host/post.rs @@ -1,5 +1,5 @@ +use crate::compiler::plugin::proc_macro::FULL_PATH_MARKER_KEY; use crate::compiler::plugin::proc_macro::v2::ProcMacroHostPlugin; -use crate::compiler::plugin::proc_macro::v2::host::FULL_PATH_MARKER_KEY; use crate::core::PackageId; use anyhow::Result; use cairo_lang_defs::ids::{ModuleItemId, TopLevelLanguageElementId}; @@ -19,7 +19,7 @@ impl ProcMacroHostPlugin { let markers = self.collect_full_path_markers(db); let aux_data = self.collect_aux_data(db); - for instance in self.macros.iter() { + for instance in self.instances.iter() { let _ = trace_span!( "post_process_callback", instance = %instance.package_id() diff --git a/scarb/src/ops/proc_macro_server/methods/defined_macros.rs b/scarb/src/ops/proc_macro_server/methods/defined_macros.rs index 15fc5815b..efa64ebe1 100644 --- a/scarb/src/ops/proc_macro_server/methods/defined_macros.rs +++ b/scarb/src/ops/proc_macro_server/methods/defined_macros.rs @@ -1,14 +1,15 @@ use std::sync::Arc; use anyhow::Result; -use cairo_lang_defs::plugin::MacroPlugin; use convert_case::{Case, Casing}; +use itertools::Itertools; use scarb_proc_macro_server_types::methods::defined_macros::{ CompilationUnitComponentMacros, DefinedMacros, DefinedMacrosResponse, }; use super::Handler; use crate::compiler::plugin::collection::WorkspaceProcMacros; +use crate::compiler::plugin::proc_macro::DeclaredProcMacroInstances; impl Handler for DefinedMacros { fn handle( @@ -18,23 +19,28 @@ impl Handler for DefinedMacros { let macros_for_cu_components = workspace_macros .macros_for_components .iter() - .map(|(component, plugin)| { - let attributes = plugin.declared_attributes_without_executables(); - let inline_macros = plugin.declared_inline_macros(); - let derives = plugin - .declared_derives() - .into_iter() - .map(|name| name.to_case(Case::Snake)) - .collect(); - let executables = plugin.executable_attributes(); + .flat_map(|(component, plugin)| { + plugin + .iter() + .map(|plugin| { + let attributes = plugin.declared_attributes_without_executables(); + let inline_macros = plugin.declared_inline_macros(); + let derives = plugin + .declared_derives() + .into_iter() + .map(|name| name.to_case(Case::Snake)) + .collect(); + let executables = plugin.executable_attributes(); - CompilationUnitComponentMacros { - component: component.to_owned(), - attributes, - inline_macros, - derives, - executables, - } + CompilationUnitComponentMacros { + component: component.to_owned(), + attributes, + inline_macros, + derives, + executables, + } + }) + .collect_vec() }) .collect(); diff --git a/scarb/src/ops/proc_macro_server/methods/expand_attribute.rs b/scarb/src/ops/proc_macro_server/methods/expand_attribute.rs index bbeb17c6a..a8879accd 100644 --- a/scarb/src/ops/proc_macro_server/methods/expand_attribute.rs +++ b/scarb/src/ops/proc_macro_server/methods/expand_attribute.rs @@ -5,7 +5,9 @@ use scarb_proc_macro_server_types::methods::{ProcMacroResult, expand::ExpandAttr use super::Handler; use crate::compiler::plugin::collection::WorkspaceProcMacros; -use crate::compiler::plugin::proc_macro::ExpansionKind; +use crate::compiler::plugin::proc_macro::{ + DeclaredProcMacroInstances, ExpansionKind, ProcMacroApiVersion, +}; impl Handler for ExpandAttribute { fn handle( @@ -20,12 +22,17 @@ impl Handler for ExpandAttribute { call_site, } = params; - let plugin = workspace_macros - .get(&context.component) + let plugin = workspace_macros.get(&context.component); + let plugin = plugin + .as_ref() + .and_then(|v| { + v.iter() + .find(|a| a.api_version() == ProcMacroApiVersion::V2) + }) .with_context(|| format!("No macros found in scope: {context:?}"))?; let instance = plugin - .macros() + .instances() .iter() .find(|instance| { instance diff --git a/scarb/src/ops/proc_macro_server/methods/expand_derive.rs b/scarb/src/ops/proc_macro_server/methods/expand_derive.rs index b3f33a430..b160febca 100644 --- a/scarb/src/ops/proc_macro_server/methods/expand_derive.rs +++ b/scarb/src/ops/proc_macro_server/methods/expand_derive.rs @@ -6,6 +6,7 @@ use convert_case::{Case, Casing}; use scarb_proc_macro_server_types::methods::{ProcMacroResult, expand::ExpandDerive}; use super::Handler; +use crate::compiler::plugin::proc_macro::{DeclaredProcMacroInstances, ProcMacroApiVersion}; use crate::compiler::plugin::{ collection::WorkspaceProcMacros, proc_macro::{Expansion, ExpansionKind}, @@ -29,12 +30,17 @@ impl Handler for ExpandDerive { for derive in derives { let expansion = Expansion::new(derive.to_case(Case::Snake), ExpansionKind::Derive); - let plugin = workspace_macros - .get(&context.component) - .with_context(|| format!("No macros found in scope {context:?}"))?; + let plugin = workspace_macros.get(&context.component); + let plugin = plugin + .as_ref() + .and_then(|v| { + v.iter() + .find(|a| a.api_version() == ProcMacroApiVersion::V2) + }) + .with_context(|| format!("No macros found in scope: {context:?}"))?; let instance = plugin - .macros() + .instances() .iter() .find(|instance| instance.get_expansions().contains(&expansion)) .with_context(|| format!("Unsupported derive macro: {derive}"))?; diff --git a/scarb/src/ops/proc_macro_server/methods/expand_inline.rs b/scarb/src/ops/proc_macro_server/methods/expand_inline.rs index 9466e3898..9220b9a48 100644 --- a/scarb/src/ops/proc_macro_server/methods/expand_inline.rs +++ b/scarb/src/ops/proc_macro_server/methods/expand_inline.rs @@ -5,6 +5,7 @@ use cairo_lang_macro::TokenStream; use scarb_proc_macro_server_types::methods::{ProcMacroResult, expand::ExpandInline}; use super::Handler; +use crate::compiler::plugin::proc_macro::{DeclaredProcMacroInstances, ProcMacroApiVersion}; use crate::compiler::plugin::{collection::WorkspaceProcMacros, proc_macro::ExpansionKind}; impl Handler for ExpandInline { @@ -19,12 +20,17 @@ impl Handler for ExpandInline { call_site, } = params; - let plugin = workspace_macros - .get(&context.component) + let plugin = workspace_macros.get(&context.component); + let plugin = plugin + .as_ref() + .and_then(|v| { + v.iter() + .find(|a| a.api_version() == ProcMacroApiVersion::V2) + }) .with_context(|| format!("No macros found in scope: {context:?}"))?; let instance = plugin - .macros() + .instances() .iter() .find(|instance| { instance From f17aac2152496215b594843cd89bb3391ee5efa7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Tr=C4=85tnowiecki?= Date: Fri, 14 Mar 2025 01:10:35 +0100 Subject: [PATCH 34/38] Proc macro: Enable v1 tests --- scarb/tests/proc_macro_v1.rs | 29 ---- scarb/tests/proc_macro_v1_and_v2.rs | 6 +- scarb/tests/proc_macro_v1_prebuilt.rs | 195 ++++++++++++++++++++++++++ 3 files changed, 197 insertions(+), 33 deletions(-) create mode 100644 scarb/tests/proc_macro_v1_prebuilt.rs diff --git a/scarb/tests/proc_macro_v1.rs b/scarb/tests/proc_macro_v1.rs index 5c10c7ebf..6147b68b0 100644 --- a/scarb/tests/proc_macro_v1.rs +++ b/scarb/tests/proc_macro_v1.rs @@ -72,7 +72,6 @@ fn check_cairo_plugin() { } #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn can_check_cairo_project_with_plugins() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); @@ -107,7 +106,6 @@ fn can_check_cairo_project_with_plugins() { } #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn resolve_fetched_plugins() { let t = TempDir::new().unwrap(); CairoPluginProjectBuilder::default_v1().build(&t); @@ -128,7 +126,6 @@ fn resolve_fetched_plugins() { } #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn can_use_json_output() { let t = TempDir::new().unwrap(); CairoPluginProjectBuilder::default_v1().build(&t); @@ -163,7 +160,6 @@ fn can_use_json_output() { } #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn compile_cairo_plugin_with_lib_target() { let t = TempDir::new().unwrap(); ProjectBuilder::start() @@ -189,7 +185,6 @@ fn compile_cairo_plugin_with_lib_target() { } #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn compile_cairo_plugin_with_other_target() { let t = TempDir::new().unwrap(); ProjectBuilder::start() @@ -215,7 +210,6 @@ fn compile_cairo_plugin_with_other_target() { } #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn can_emit_plugin_warning() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); @@ -262,7 +256,6 @@ fn can_emit_plugin_warning() { } #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn can_emit_plugin_error() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); @@ -309,7 +302,6 @@ fn can_emit_plugin_error() { } #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn diags_from_generated_code_mapped_correctly() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); @@ -364,7 +356,6 @@ fn diags_from_generated_code_mapped_correctly() { } #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn can_remove_original_node() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); @@ -413,7 +404,6 @@ fn can_remove_original_node() { } #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn can_replace_original_node() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); @@ -462,7 +452,6 @@ fn can_replace_original_node() { } #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn can_return_aux_data_from_plugin() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); @@ -533,7 +522,6 @@ fn can_return_aux_data_from_plugin() { } #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn can_read_token_stream_metadata() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); @@ -584,7 +572,6 @@ fn can_read_token_stream_metadata() { } #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn can_define_multiple_macros() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); @@ -680,7 +667,6 @@ fn can_define_multiple_macros() { } #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn cannot_duplicate_macros() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); @@ -721,7 +707,6 @@ fn cannot_duplicate_macros() { } #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn cannot_duplicate_macros_across_packages() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); @@ -799,7 +784,6 @@ fn cannot_duplicate_macros_across_packages() { } #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn cannot_use_undefined_macro() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); @@ -844,7 +828,6 @@ fn cannot_use_undefined_macro() { } #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn can_resolve_full_path_markers() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); @@ -900,7 +883,6 @@ fn can_resolve_full_path_markers() { } #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn can_implement_inline_macro() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); @@ -947,7 +929,6 @@ fn can_implement_inline_macro() { } #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn empty_inline_macro_result() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); @@ -994,7 +975,6 @@ fn empty_inline_macro_result() { } #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn can_implement_derive_macro() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); @@ -1070,7 +1050,6 @@ fn can_implement_derive_macro() { } #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn can_use_both_derive_and_attr() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); @@ -1153,7 +1132,6 @@ fn can_use_both_derive_and_attr() { } #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn can_read_attribute_args() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); @@ -1203,7 +1181,6 @@ fn can_read_attribute_args() { } #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn can_create_executable_attribute() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); @@ -1262,7 +1239,6 @@ fn can_create_executable_attribute() { } #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn executable_name_cannot_clash_attr() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); @@ -1306,7 +1282,6 @@ fn executable_name_cannot_clash_attr() { } #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn can_be_expanded() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); @@ -1416,7 +1391,6 @@ fn can_be_expanded() { } #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn can_expand_trait_inner_func_attrr() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); @@ -1479,7 +1453,6 @@ fn can_expand_trait_inner_func_attrr() { } #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn can_expand_impl_inner_func_attrr() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); @@ -1556,7 +1529,6 @@ fn can_expand_impl_inner_func_attrr() { use super::{IHello, Hello, IHelloDispatcher, IHelloDispatcherTrait}; #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn test_flow() { let calldata = array![100]; let (address0, _) = deploy_syscall( @@ -1593,7 +1565,6 @@ fn can_expand_impl_inner_func_attrr() { } #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn can_be_used_through_re_export() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); diff --git a/scarb/tests/proc_macro_v1_and_v2.rs b/scarb/tests/proc_macro_v1_and_v2.rs index 6bc57c442..899b24eae 100644 --- a/scarb/tests/proc_macro_v1_and_v2.rs +++ b/scarb/tests/proc_macro_v1_and_v2.rs @@ -6,7 +6,6 @@ use scarb_test_support::command::Scarb; use scarb_test_support::project_builder::ProjectBuilder; #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn can_use_both_v1_and_v2_proc_macros() { let temp = TempDir::new().unwrap(); let foo = temp.child("foo"); @@ -30,7 +29,7 @@ fn can_use_both_v1_and_v2_proc_macros() { CairoPluginProjectBuilder::default() .name("bar") .lib_rs(indoc! {r##" - use cairo_lang_macro_v2::{ProcMacroResult, TokenStream, attribute_macro, TokenTree, Token, TextSpan}; + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, TokenTree, Token, TextSpan}; #[attribute_macro] pub fn bar(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { @@ -87,7 +86,6 @@ fn can_use_both_v1_and_v2_proc_macros() { } #[test] -#[ignore = "TODO(maciektr): support old macro api"] fn v1_and_v2_macros_cannot_duplicate_expansions() { let temp = TempDir::new().unwrap(); let foo = temp.child("foo"); @@ -111,7 +109,7 @@ fn v1_and_v2_macros_cannot_duplicate_expansions() { CairoPluginProjectBuilder::default() .name("bar") .lib_rs(indoc! {r##" - use cairo_lang_macro_v2::{ProcMacroResult, TokenStream, attribute_macro, TokenTree, Token, TextSpan}; + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, TokenTree, Token, TextSpan}; #[attribute_macro] pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { diff --git a/scarb/tests/proc_macro_v1_prebuilt.rs b/scarb/tests/proc_macro_v1_prebuilt.rs new file mode 100644 index 000000000..e13790799 --- /dev/null +++ b/scarb/tests/proc_macro_v1_prebuilt.rs @@ -0,0 +1,195 @@ +use assert_fs::TempDir; +use assert_fs::fixture::{ChildPath, FileWriteStr, PathCreateDir}; +use assert_fs::prelude::PathChild; +use indoc::indoc; +use libloading::library_filename; +use scarb_test_support::cairo_plugin_project_builder::CairoPluginProjectBuilder; +use scarb_test_support::command::Scarb; +use scarb_test_support::project_builder::ProjectBuilder; +use scarb_test_support::workspace_builder::WorkspaceBuilder; +use snapbox::cmd::Command; +use std::fs; + +static TRIPLETS: [(&str, &str); 4] = [ + ("aarch64-apple-darwin", ".dylib"), + ("x86_64-apple-darwin", ".dylib"), + ("x86_64-unknown-linux-gnu", ".so"), + ("x86_64-pc-windows-msvc", ".dll"), +]; + +fn proc_macro_example(t: &ChildPath) { + let name = "proc_macro_example"; + let version = "0.1.0"; + CairoPluginProjectBuilder::default_v1() + .name(name) + .version(version) + .lib_rs(indoc! {r#" + use cairo_lang_macro::{ProcMacroResult, TokenStream, inline_macro}; + #[inline_macro] + pub fn some(token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(token_stream) + } + "#}) + .build(t); + let dll_filename = library_filename(name); + let dll_filename = dll_filename.to_string_lossy().to_string(); + let build_dir = t.child("cargo_build_dir"); + Command::new("cargo") + .arg("build") + .arg("--release") + .env("CARGO_TARGET_DIR", build_dir.path()) + .current_dir(t) + .assert() + .success(); + t.child("target/scarb/cairo-plugin") + .create_dir_all() + .unwrap(); + for (target, extension) in TRIPLETS { + let target_name = format!("{name}_v{version}_{target}{extension}"); + fs::copy( + build_dir.child("release").child(dll_filename.clone()), + t.child("target/scarb/cairo-plugin/").child(target_name), + ) + .unwrap(); + } +} + +#[test] +fn compile_with_prebuilt_plugins() { + let t = TempDir::new().unwrap(); + proc_macro_example(&t.child("dep")); + let builder = |name: &str| { + ProjectBuilder::start() + .name(name) + .lib_cairo(indoc! {r#" + fn main() -> u32 { + let x = some!(42); + x + } + "#}) + .dep("proc_macro_example", t.child("dep")) + .manifest_extra(indoc! {r#" + [tool.scarb] + allow-prebuilt-plugins = ["proc_macro_example"] + "#}) + }; + builder("a").build(&t.child("a")); + builder("b").build(&t.child("b")); + WorkspaceBuilder::start() + .add_member("a") + .add_member("b") + .build(&t); + Scarb::quick_snapbox() + .arg("build") + // Disable Cargo and Rust compiler. + .env("CARGO", "/bin/false") + .env("RUSTC", "/bin/false") + .current_dir(&t) + .assert() + .success() + .stdout_matches(indoc! {r#" + [..]Compiling a v1.0.0 ([..]Scarb.toml) + [..]Compiling b v1.0.0 ([..]Scarb.toml) + [..] Finished `dev` profile target(s) in [..] + "#}); +} + +#[test] +fn compile_with_prebuilt_plugins_only_one_allows() { + let t = TempDir::new().unwrap(); + proc_macro_example(&t.child("dep")); + let builder = |name: &str, allow: bool| { + let b = ProjectBuilder::start() + .name(name) + .lib_cairo(indoc! {r#" + fn main() -> u32 { + let x = some!(42); + x + } + "#}) + .dep("proc_macro_example", t.child("dep")); + if allow { + b.manifest_extra(indoc! {r#" + [tool.scarb] + allow-prebuilt-plugins = ["proc_macro_example"] + "#}) + } else { + b + } + }; + builder("a", true).build(&t.child("a")); + builder("b", false).build(&t.child("b")); + WorkspaceBuilder::start() + .add_member("a") + .add_member("b") + .build(&t); + Scarb::quick_snapbox() + .arg("build") + .current_dir(&t) + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .assert() + .success() + .stdout_matches(indoc! {r#" + [..]Compiling proc_macro_example v0.1.0 ([..]) + [..]Compiling a v1.0.0 ([..]Scarb.toml) + [..]Compiling b v1.0.0 ([..]Scarb.toml) + [..] Finished `dev` profile target(s) in [..] + "#}); +} + +fn invalid_prebuilt_project(t: &ChildPath) { + let name = "invalid_prebuilt_example"; + let version = "0.1.0"; + CairoPluginProjectBuilder::default_v1() + .name(name) + .version(version) + .lib_rs(indoc! {r#" + use cairo_lang_macro::{ProcMacroResult, TokenStream, inline_macro}; + #[inline_macro] + pub fn some(token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(token_stream) + } + "#}) + .build(t); + let target = t.child("target/scarb/cairo-plugin"); + for (triplet, extension) in TRIPLETS { + let path = format!("{name}_v{version}_{triplet}{extension}"); + target + .child(path) + .write_str("this is not a valid lib") + .unwrap(); + } +} + +#[test] +fn compile_with_invalid_prebuilt_plugins() { + let t = TempDir::new().unwrap(); + invalid_prebuilt_project(&t.child("dep")); + ProjectBuilder::start() + .name("hello") + .lib_cairo(indoc! {r#" + fn main() -> u32 { + let x = some!(42); + x + } + "#}) + .dep("invalid_prebuilt_example", t.child("dep")) + .manifest_extra(indoc! {r#" + [tool.scarb] + allow-prebuilt-plugins = ["invalid_prebuilt_example"] + "#}) + .build(&t); + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&t) + .assert() + .success() + .stdout_matches(indoc! {r#" + [..]Compiling invalid_prebuilt_example v0.1.0 ([..]) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + [..] Finished `dev` profile target(s) in [..] + "#}); +} From f827348ed0a2e710686e274e215e88e9c7984085 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Tr=C4=85tnowiecki?= Date: Mon, 17 Mar 2025 16:42:27 +0100 Subject: [PATCH 35/38] Set edition to 2021 in cairo-lang-macro-quote --- plugins/cairo-lang-quote/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/cairo-lang-quote/Cargo.toml b/plugins/cairo-lang-quote/Cargo.toml index 7249b2a57..e294241b7 100644 --- a/plugins/cairo-lang-quote/Cargo.toml +++ b/plugins/cairo-lang-quote/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "cairo-lang-quote" version = "0.1.0" -edition.workspace = true +edition = "2021" authors.workspace = true categories = ["development-tools"] From a3cd995ede32be33c209982062f4c521cf4f27a0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Tr=C4=85tnowiecki?= Date: Fri, 21 Mar 2025 09:50:09 +0100 Subject: [PATCH 36/38] Infer proc macro version from shared library --- plugins/cairo-lang-macro/src/lib.rs | 4 ++ scarb/src/compiler/plugin/proc_macro/ffi.rs | 63 +++++++++++++++++++ .../compiler/plugin/proc_macro/instance.rs | 38 +++++------ scarb/src/compiler/plugin/proc_macro/mod.rs | 1 + .../src/compiler/plugin/proc_macro/v1/ffi.rs | 5 +- .../src/compiler/plugin/proc_macro/v2/ffi.rs | 5 +- scarb/src/core/manifest/toml_manifest.rs | 1 - .../core/publishing/manifest_normalization.rs | 1 - .../src/cairo_plugin_project_builder.rs | 4 +- 9 files changed, 89 insertions(+), 33 deletions(-) create mode 100644 scarb/src/compiler/plugin/proc_macro/ffi.rs diff --git a/plugins/cairo-lang-macro/src/lib.rs b/plugins/cairo-lang-macro/src/lib.rs index 723188755..fa3adf472 100644 --- a/plugins/cairo-lang-macro/src/lib.rs +++ b/plugins/cairo-lang-macro/src/lib.rs @@ -28,11 +28,15 @@ use cairo_lang_macro_stable::{ StableExpansionsList, StablePostProcessContext, StableProcMacroResult, StableTextSpan, }; use std::ffi::{CStr, CString, c_char}; +use std::num::NonZeroU8; use std::ops::Deref; mod types; pub use types::*; +#[no_mangle] +pub static CAIRO_LANG_MACRO_API_VERSION: NonZeroU8 = unsafe { NonZeroU8::new_unchecked(2) }; + // A thread-local allocation context for allocating tokens on proc macro side. thread_local!(static CONTEXT: RefCell = RefCell::default() ); diff --git a/scarb/src/compiler/plugin/proc_macro/ffi.rs b/scarb/src/compiler/plugin/proc_macro/ffi.rs new file mode 100644 index 000000000..8a73c4bdb --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/ffi.rs @@ -0,0 +1,63 @@ +use crate::compiler::plugin::proc_macro::ProcMacroApiVersion; +use anyhow::{Result, anyhow}; +use camino::Utf8Path; +use libloading::Library; +use std::num::NonZeroU8; +use tracing::debug; + +pub struct SharedPluginLibrary { + api_version: ProcMacroApiVersion, + library: Library, +} + +impl SharedPluginLibrary { + /// Load the shared library under the given path, and store its version. + /// + /// # Safety + /// This function is unsafe because it calls the FFI interface of procedural macro package. + pub unsafe fn try_new(lib_path: &Utf8Path) -> Result { + let library = unsafe { Library::new(lib_path)? }; + let api_version: ProcMacroApiVersion = if let Ok(symbol) = + unsafe { library.get::<*mut NonZeroU8>(b"CAIRO_LANG_MACRO_API_VERSION\0") } + { + let api_version: NonZeroU8 = unsafe { **symbol }; + let api_version: u8 = api_version.get(); + api_version.try_into()? + } else { + debug!( + "CAIRO_LANG_MACRO_API_VERSION symbol for `{}` proc macro not found, defaulting to V1 API version", + lib_path + ); + ProcMacroApiVersion::V1 + }; + Ok(Self { + library, + api_version, + }) + } + + pub fn api_version(&self) -> ProcMacroApiVersion { + self.api_version + } +} + +impl TryFrom for ProcMacroApiVersion { + type Error = anyhow::Error; + + fn try_from(value: u8) -> Result { + match value { + 1 => Ok(ProcMacroApiVersion::V1), + 2 => Ok(ProcMacroApiVersion::V2), + _ => Err(anyhow!( + "unsupported proc macro api version `{}`, expected `1` or `2`", + value + )), + } + } +} + +impl From for Library { + fn from(plugin: SharedPluginLibrary) -> Self { + plugin.library + } +} diff --git a/scarb/src/compiler/plugin/proc_macro/instance.rs b/scarb/src/compiler/plugin/proc_macro/instance.rs index 758f301be..841fcc638 100644 --- a/scarb/src/compiler/plugin/proc_macro/instance.rs +++ b/scarb/src/compiler/plugin/proc_macro/instance.rs @@ -1,8 +1,8 @@ -use crate::compiler::plugin::CairoPluginProps; use crate::compiler::plugin::proc_macro; use crate::compiler::plugin::proc_macro::SharedLibraryProvider; use crate::compiler::plugin::proc_macro::expansion::{Expansion, ExpansionKind}; -use crate::core::{Package, PackageId, TargetKind}; +use crate::compiler::plugin::proc_macro::ffi::SharedPluginLibrary; +use crate::core::{Package, PackageId}; use anyhow::{Context, Result, anyhow}; use camino::{Utf8Path, Utf8PathBuf}; use serde::{Deserialize, Serialize}; @@ -20,19 +20,6 @@ pub enum ProcMacroApiVersion { V2, } -pub trait ProcMacroApiVersionReader { - fn api_version(&self) -> Result; -} - -impl ProcMacroApiVersionReader for Package { - fn api_version(&self) -> Result { - assert!(self.is_cairo_plugin()); - let target = self.fetch_target(&TargetKind::CAIRO_PLUGIN)?; - let props: CairoPluginProps = target.props()?; - Ok(props.api) - } -} - /// Representation of a single, loaded procedural macro package. /// /// This struct holds Scarb metadata of a proc macro package (package id, available expansions) @@ -55,7 +42,7 @@ impl ProcMacroInstance { /// Load shared library pub fn try_new(package: &Package, lib_path: Utf8PathBuf) -> Result { trace!("loading compiled macro for `{}` package", package.id); - let plugin = VersionedPlugin::try_new(package, &lib_path)?; + let plugin = unsafe { VersionedPlugin::try_new(package, &lib_path)? }; Ok(Self { expansions: unsafe { plugin.load_expansions(package.id)? }, package_id: package.id, @@ -68,7 +55,7 @@ impl ProcMacroInstance { let prebuilt_path = package .prebuilt_lib_path() .context("could not resolve prebuilt library path")?; - let plugin = VersionedPlugin::try_new(&package, &prebuilt_path)?; + let plugin = unsafe { VersionedPlugin::try_new(&package, &prebuilt_path)? }; Ok(Self { expansions: unsafe { plugin.load_expansions(package.id)? }, package_id: package.id, @@ -174,13 +161,22 @@ impl VersionedPlugin { /// /// # Safety /// This function is unsafe because it calls the FFI interface of procedural macro package. - pub fn try_new(package: &Package, lib_path: &Utf8Path) -> Result { - match package.api_version()? { + pub unsafe fn try_new(package: &Package, lib_path: &Utf8Path) -> Result { + let library = unsafe { + SharedPluginLibrary::try_new(lib_path).with_context(|| { + format!( + "failed to open dynamic library for `{}` proc macro", + package.id + ) + })? + }; + + match library.api_version() { ProcMacroApiVersion::V1 => Ok(VersionedPlugin::V1(unsafe { - proc_macro::v1::Plugin::try_new(lib_path)? + proc_macro::v1::Plugin::try_new(library.into())? })), ProcMacroApiVersion::V2 => Ok(VersionedPlugin::V2(unsafe { - proc_macro::v2::Plugin::try_new(lib_path)? + proc_macro::v2::Plugin::try_new(library.into())? })), } } diff --git a/scarb/src/compiler/plugin/proc_macro/mod.rs b/scarb/src/compiler/plugin/proc_macro/mod.rs index dc00aea19..71117b074 100644 --- a/scarb/src/compiler/plugin/proc_macro/mod.rs +++ b/scarb/src/compiler/plugin/proc_macro/mod.rs @@ -1,5 +1,6 @@ pub mod compilation; mod expansion; +mod ffi; mod host; mod instance; mod repository; diff --git a/scarb/src/compiler/plugin/proc_macro/v1/ffi.rs b/scarb/src/compiler/plugin/proc_macro/v1/ffi.rs index 4cc9f5f80..e7d7cc4da 100644 --- a/scarb/src/compiler/plugin/proc_macro/v1/ffi.rs +++ b/scarb/src/compiler/plugin/proc_macro/v1/ffi.rs @@ -11,7 +11,6 @@ use cairo_lang_macro_v1::{ }; use cairo_lang_syntax::node::TypedSyntaxNode; use cairo_lang_syntax::node::db::SyntaxGroup; -use camino::Utf8Path; use itertools::Itertools; use libloading::{Library, Symbol}; use std::ffi::{CStr, CString, c_char}; @@ -112,10 +111,8 @@ impl Plugin { /// /// # Safety /// This function is unsafe because it calls the FFI interface of procedural macro package. - pub unsafe fn try_new(library_path: &Utf8Path) -> Result { - let library = unsafe { Library::new(library_path)? }; + pub unsafe fn try_new(library: Library) -> Result { let vtable = unsafe { VTableV0::try_new(&library)? }; - Ok(Plugin { library, vtable }) } diff --git a/scarb/src/compiler/plugin/proc_macro/v2/ffi.rs b/scarb/src/compiler/plugin/proc_macro/v2/ffi.rs index 1f4087a8b..c1f2d7a00 100644 --- a/scarb/src/compiler/plugin/proc_macro/v2/ffi.rs +++ b/scarb/src/compiler/plugin/proc_macro/v2/ffi.rs @@ -8,7 +8,6 @@ use cairo_lang_macro_stable::{ StableExpansion, StableExpansionsList, StablePostProcessContext, StableProcMacroResult, StableResultWrapper, StableTextSpan, StableTokenStream, }; -use camino::Utf8Path; use itertools::Itertools; use libloading::{Library, Symbol}; use std::ffi::{CStr, CString, c_char}; @@ -100,10 +99,8 @@ impl Plugin { /// /// # Safety /// This function is unsafe because it calls the FFI interface of procedural macro package. - pub unsafe fn try_new(library_path: &Utf8Path) -> Result { - let library = unsafe { Library::new(library_path)? }; + pub unsafe fn try_new(library: Library) -> Result { let vtable = unsafe { VTableV0::try_new(&library)? }; - Ok(Plugin { library, vtable }) } diff --git a/scarb/src/core/manifest/toml_manifest.rs b/scarb/src/core/manifest/toml_manifest.rs index ac32469c0..34bbb3077 100644 --- a/scarb/src/core/manifest/toml_manifest.rs +++ b/scarb/src/core/manifest/toml_manifest.rs @@ -311,7 +311,6 @@ pub struct TomlExecutableTargetParams { #[serde(rename_all = "kebab-case")] pub struct TomlCairoPluginTargetParams { pub builtin: Option, - pub api: Option, } pub type TomlExternalTargetParams = BTreeMap; diff --git a/scarb/src/core/publishing/manifest_normalization.rs b/scarb/src/core/publishing/manifest_normalization.rs index 9fe963901..86b864648 100644 --- a/scarb/src/core/publishing/manifest_normalization.rs +++ b/scarb/src/core/publishing/manifest_normalization.rs @@ -160,7 +160,6 @@ fn generate_cairo_plugin(pkg: &Package) -> Option Self { let default_name = "some"; let default_code = indoc! {r#" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + use cairo_lang_macro::{ProcMacroResult, TokenStream, CAIRO_LANG_MACRO_API_VERSION, attribute_macro}; #[attribute_macro] pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + assert!(CAIRO_LANG_MACRO_API_VERSION == unsafe { std::num::NonZeroU8::new_unchecked(2)} ); ProcMacroResult::new(token_stream) } "#}; @@ -165,7 +166,6 @@ impl Default for CairoPluginProjectBuilder { .version("1.0.0") .manifest_extra(indoc! {r#" [cairo-plugin] - api = "v2" "#}) }) .lib_rs(default_code) From 54fe6ce3ba27162c7163ec02e7230af9497fb7f5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Tr=C4=85tnowiecki?= Date: Fri, 21 Mar 2025 12:29:22 +0100 Subject: [PATCH 37/38] Add v1 macro server tests --- scarb/tests/proc_macro_v1_prebuilt.rs | 40 +++++++++++++ ...o_v2_server.rs => proc_macro_v1_server.rs} | 57 ++++++------------- scarb/tests/proc_macro_v2_prebuilt.rs | 50 ---------------- .../src/proc_macro_server.rs | 28 +++++++++ 4 files changed, 85 insertions(+), 90 deletions(-) rename scarb/tests/{proc_macro_v2_server.rs => proc_macro_v1_server.rs} (77%) diff --git a/scarb/tests/proc_macro_v1_prebuilt.rs b/scarb/tests/proc_macro_v1_prebuilt.rs index e13790799..85c14414e 100644 --- a/scarb/tests/proc_macro_v1_prebuilt.rs +++ b/scarb/tests/proc_macro_v1_prebuilt.rs @@ -1,10 +1,14 @@ use assert_fs::TempDir; use assert_fs::fixture::{ChildPath, FileWriteStr, PathCreateDir}; use assert_fs::prelude::PathChild; +use cairo_lang_macro_v1::TokenStream; use indoc::indoc; use libloading::library_filename; +use scarb_proc_macro_server_types::methods::expand::{ExpandInline, ExpandInlineMacroParams}; +use scarb_proc_macro_server_types::scope::ProcMacroScope; use scarb_test_support::cairo_plugin_project_builder::CairoPluginProjectBuilder; use scarb_test_support::command::Scarb; +use scarb_test_support::proc_macro_server::ProcMacroClient; use scarb_test_support::project_builder::ProjectBuilder; use scarb_test_support::workspace_builder::WorkspaceBuilder; use snapbox::cmd::Command; @@ -193,3 +197,39 @@ fn compile_with_invalid_prebuilt_plugins() { [..] Finished `dev` profile target(s) in [..] "#}); } + +#[test] +fn load_prebuilt_proc_macros() { + let t = TempDir::new().unwrap(); + proc_macro_example(&t.child("dep")); + + let project = t.child("test_package"); + + ProjectBuilder::start() + .name("test_package") + .version("1.0.0") + .lib_cairo("") + .dep("proc_macro_example", t.child("dep")) + .manifest_extra(indoc! {r#" + [tool.scarb] + allow-prebuilt-plugins = ["proc_macro_example"] + "#}) + .build(&project); + + let mut proc_macro_client = ProcMacroClient::new_without_cargo(&project); + + let component = proc_macro_client + .defined_macros_for_package("test_package") + .component; + + let response = proc_macro_client + .request_and_wait::(ExpandInlineMacroParams { + context: ProcMacroScope { component }, + name: "some".to_string(), + args: TokenStream::new("42".to_string()), + }) + .unwrap(); + + assert_eq!(response.diagnostics, vec![]); + assert_eq!(response.token_stream, TokenStream::new("42".to_string())); +} diff --git a/scarb/tests/proc_macro_v2_server.rs b/scarb/tests/proc_macro_v1_server.rs similarity index 77% rename from scarb/tests/proc_macro_v2_server.rs rename to scarb/tests/proc_macro_v1_server.rs index e1112df60..4b357e85e 100644 --- a/scarb/tests/proc_macro_v2_server.rs +++ b/scarb/tests/proc_macro_v1_server.rs @@ -1,6 +1,6 @@ use assert_fs::TempDir; use assert_fs::prelude::PathChild; -use cairo_lang_macro::{TextSpan, Token, TokenStream, TokenTree}; +use cairo_lang_macro_v1::TokenStream; use scarb_proc_macro_server_types::methods::expand::ExpandAttribute; use scarb_proc_macro_server_types::methods::expand::ExpandAttributeParams; use scarb_proc_macro_server_types::methods::expand::ExpandDerive; @@ -10,7 +10,7 @@ use scarb_proc_macro_server_types::methods::expand::ExpandInlineMacroParams; use scarb_proc_macro_server_types::scope::ProcMacroScope; use scarb_test_support::cairo_plugin_project_builder::CairoPluginProjectBuilder; use scarb_test_support::proc_macro_server::ProcMacroClient; -use scarb_test_support::proc_macro_server::SIMPLE_MACROS_V2; +use scarb_test_support::proc_macro_server::SIMPLE_MACROS_V1; use scarb_test_support::project_builder::ProjectBuilder; #[test] @@ -18,8 +18,8 @@ fn defined_macros() { let t = TempDir::new().unwrap(); let plugin_package = t.child("some"); - CairoPluginProjectBuilder::default() - .lib_rs(SIMPLE_MACROS_V2) + CairoPluginProjectBuilder::default_v1() + .lib_rs(SIMPLE_MACROS_V1) .build(&plugin_package); let project = t.child("test_package"); @@ -58,19 +58,12 @@ fn expand_attribute() { let output = input.replace(name, "very_new_name"); - let span = TextSpan { start: 0, end: output.len() as u32 }; - ProcMacroResult::new( - TokenStream::new(vec![ - TokenTree::Ident( - Token::new(output, span) - ) - ]) - ) + ProcMacroResult::new(TokenStream::new(output)) }} "##; - CairoPluginProjectBuilder::default() - .lib_rs(format!("{SIMPLE_MACROS_V2}\n{rename_to_very_new_name}")) + CairoPluginProjectBuilder::default_v1() + .lib_rs(format!("{SIMPLE_MACROS_V1}\n{rename_to_very_new_name}")) .add_dep(r#"regex = "1.11.1""#) .build(&plugin_package); @@ -94,11 +87,7 @@ fn expand_attribute() { context: ProcMacroScope { component }, attr: "rename_to_very_new_name".to_string(), args: TokenStream::empty(), - call_site: TextSpan::new(0, 0), - item: TokenStream::new(vec![TokenTree::Ident(Token::new( - "fn some_test_fn(){}", - TextSpan::new(0, 0), - ))]), + item: TokenStream::new("fn some_test_fn(){}".to_string()), }) .unwrap(); @@ -114,8 +103,8 @@ fn expand_derive() { let t = TempDir::new().unwrap(); let plugin_package = t.child("some"); - CairoPluginProjectBuilder::default() - .lib_rs(SIMPLE_MACROS_V2) + CairoPluginProjectBuilder::default_v1() + .lib_rs(SIMPLE_MACROS_V1) .build(&plugin_package); let project = t.child("test_package"); @@ -133,16 +122,12 @@ fn expand_derive() { .defined_macros_for_package("test_package") .component; - let item = TokenStream::new(vec![TokenTree::Ident(Token::new( - "fn some_test_fn(){}", - TextSpan::new(0, 0), - ))]); + let item = TokenStream::new("fn some_test_fn(){}".to_string()); let response = proc_macro_client .request_and_wait::(ExpandDeriveParams { context: ProcMacroScope { component }, derives: vec!["some_derive".to_string()], - call_site: TextSpan::new(0, 0), item, }) .unwrap(); @@ -164,18 +149,12 @@ fn expand_inline() { pub fn replace_all_15_with_25(token_stream: TokenStream) -> ProcMacroResult { let content = token_stream.to_string().replace("15", "25"); let span = TextSpan { start: 0, end: content.len() as u32 }; - ProcMacroResult::new( - TokenStream::new(vec![ - TokenTree::Ident( - Token::new(content, span) - ) - ]) - ) + ProcMacroResult::new(TokenStream::new(content)) } "#; - CairoPluginProjectBuilder::default() - .lib_rs(format!("{SIMPLE_MACROS_V2}\n{replace_all_15_with_25}")) + CairoPluginProjectBuilder::default_v1() + .lib_rs(format!("{SIMPLE_MACROS_V1}\n{replace_all_15_with_25}")) .build(&plugin_package); let project = t.child("test_package"); @@ -197,11 +176,9 @@ fn expand_inline() { .request_and_wait::(ExpandInlineMacroParams { context: ProcMacroScope { component }, name: "replace_all_15_with_25".to_string(), - call_site: TextSpan::new(0, 0), - args: TokenStream::new(vec![TokenTree::Ident(Token::new( - "struct A { field: 15 , other_field: macro_call!(12)}", - TextSpan::new(0, 0), - ))]), + args: TokenStream::new( + "struct A { field: 15 , other_field: macro_call!(12)}".to_string(), + ), }) .unwrap(); diff --git a/scarb/tests/proc_macro_v2_prebuilt.rs b/scarb/tests/proc_macro_v2_prebuilt.rs index 6586cd601..26a4d73ec 100644 --- a/scarb/tests/proc_macro_v2_prebuilt.rs +++ b/scarb/tests/proc_macro_v2_prebuilt.rs @@ -1,14 +1,10 @@ use assert_fs::TempDir; use assert_fs::fixture::{ChildPath, FileWriteStr, PathCreateDir}; use assert_fs::prelude::PathChild; -use cairo_lang_macro::{TextSpan, Token, TokenStream, TokenTree}; use indoc::indoc; use libloading::library_filename; -use scarb_proc_macro_server_types::methods::expand::{ExpandInline, ExpandInlineMacroParams}; -use scarb_proc_macro_server_types::scope::ProcMacroScope; use scarb_test_support::cairo_plugin_project_builder::CairoPluginProjectBuilder; use scarb_test_support::command::Scarb; -use scarb_test_support::proc_macro_server::ProcMacroClient; use scarb_test_support::project_builder::ProjectBuilder; use scarb_test_support::workspace_builder::WorkspaceBuilder; use snapbox::cmd::Command; @@ -197,49 +193,3 @@ fn compile_with_invalid_prebuilt_plugins() { [..] Finished `dev` profile target(s) in [..] "#}); } - -#[test] -fn load_prebuilt_proc_macros() { - let t = TempDir::new().unwrap(); - proc_macro_example(&t.child("dep")); - - let project = t.child("test_package"); - - ProjectBuilder::start() - .name("test_package") - .version("1.0.0") - .lib_cairo("") - .dep("proc_macro_example", t.child("dep")) - .manifest_extra(indoc! {r#" - [tool.scarb] - allow-prebuilt-plugins = ["proc_macro_example"] - "#}) - .build(&project); - - let mut proc_macro_client = ProcMacroClient::new_without_cargo(&project); - - let component = proc_macro_client - .defined_macros_for_package("test_package") - .component; - - let response = proc_macro_client - .request_and_wait::(ExpandInlineMacroParams { - context: ProcMacroScope { component }, - name: "some".to_string(), - args: TokenStream::new(vec![TokenTree::Ident(Token::new( - "42", - TextSpan::call_site(), - ))]), - call_site: TextSpan::new(0, 0), - }) - .unwrap(); - - assert_eq!(response.diagnostics, vec![]); - assert_eq!( - response.token_stream, - TokenStream::new(vec![TokenTree::Ident(Token::new( - "42", - TextSpan::call_site(), - ))]) - ); -} diff --git a/utils/scarb-test-support/src/proc_macro_server.rs b/utils/scarb-test-support/src/proc_macro_server.rs index 0fe661b3c..2881f9ee0 100644 --- a/utils/scarb-test-support/src/proc_macro_server.rs +++ b/utils/scarb-test-support/src/proc_macro_server.rs @@ -19,6 +19,34 @@ use std::process::ChildStdin; use std::process::ChildStdout; use std::process::Stdio; +pub const SIMPLE_MACROS_V1: &str = r#" +use cairo_lang_macro::{ + ProcMacroResult, + TokenStream, + attribute_macro, + inline_macro, + derive_macro, + executable_attribute +}; + +executable_attribute!("some_executable"); + +#[attribute_macro] +pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(token_stream) +} + +#[inline_macro] +pub fn inline_some(token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(token_stream) +} + +#[derive_macro] +fn some_derive(_token_stream: TokenStream)-> ProcMacroResult { + ProcMacroResult::new(TokenStream::new("impl SomeImpl of SomeTrait {}".to_string())) +} +"#; + pub const SIMPLE_MACROS_V2: &str = r#" use cairo_lang_macro::{ ProcMacroResult, From 565475ff218385ef904412a2262a3fe5a4c149fd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Tr=C4=85tnowiecki?= Date: Fri, 21 Mar 2025 12:50:58 +0100 Subject: [PATCH 38/38] Revert proc macro server to support only v1 macros --- Cargo.lock | 2 +- .../methods/defined_macros.rs | 3 ++- .../methods/expand_attribute.rs | 15 +++++------- .../methods/expand_derive.rs | 24 +++++++------------ .../methods/expand_inline.rs | 11 ++++----- scarb/tests/proc_macro_v1_server.rs | 1 - .../scarb-proc-macro-server-types/Cargo.toml | 2 +- .../src/methods/expand.rs | 8 +------ 8 files changed, 25 insertions(+), 41 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index dfb8c0db3..df11b4885 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5962,7 +5962,7 @@ dependencies = [ name = "scarb-proc-macro-server-types" version = "0.2.0" dependencies = [ - "cairo-lang-macro 0.2.0", + "cairo-lang-macro 0.1.1", "serde", "serde_json", ] diff --git a/scarb/src/ops/proc_macro_server/methods/defined_macros.rs b/scarb/src/ops/proc_macro_server/methods/defined_macros.rs index efa64ebe1..af5471b71 100644 --- a/scarb/src/ops/proc_macro_server/methods/defined_macros.rs +++ b/scarb/src/ops/proc_macro_server/methods/defined_macros.rs @@ -9,7 +9,7 @@ use scarb_proc_macro_server_types::methods::defined_macros::{ use super::Handler; use crate::compiler::plugin::collection::WorkspaceProcMacros; -use crate::compiler::plugin::proc_macro::DeclaredProcMacroInstances; +use crate::compiler::plugin::proc_macro::{DeclaredProcMacroInstances, ProcMacroApiVersion}; impl Handler for DefinedMacros { fn handle( @@ -22,6 +22,7 @@ impl Handler for DefinedMacros { .flat_map(|(component, plugin)| { plugin .iter() + .filter(|p| p.api_version() == ProcMacroApiVersion::V1) .map(|plugin| { let attributes = plugin.declared_attributes_without_executables(); let inline_macros = plugin.declared_inline_macros(); diff --git a/scarb/src/ops/proc_macro_server/methods/expand_attribute.rs b/scarb/src/ops/proc_macro_server/methods/expand_attribute.rs index a8879accd..5e0aed03a 100644 --- a/scarb/src/ops/proc_macro_server/methods/expand_attribute.rs +++ b/scarb/src/ops/proc_macro_server/methods/expand_attribute.rs @@ -4,10 +4,8 @@ use anyhow::{Context, Result}; use scarb_proc_macro_server_types::methods::{ProcMacroResult, expand::ExpandAttribute}; use super::Handler; -use crate::compiler::plugin::collection::WorkspaceProcMacros; -use crate::compiler::plugin::proc_macro::{ - DeclaredProcMacroInstances, ExpansionKind, ProcMacroApiVersion, -}; +use crate::compiler::plugin::proc_macro::{DeclaredProcMacroInstances, ProcMacroApiVersion}; +use crate::compiler::plugin::{collection::WorkspaceProcMacros, proc_macro::ExpansionKind}; impl Handler for ExpandAttribute { fn handle( @@ -19,7 +17,6 @@ impl Handler for ExpandAttribute { attr, args, item, - call_site, } = params; let plugin = workspace_macros.get(&context.component); @@ -27,7 +24,7 @@ impl Handler for ExpandAttribute { .as_ref() .and_then(|v| { v.iter() - .find(|a| a.api_version() == ProcMacroApiVersion::V2) + .find(|a| a.api_version() == ProcMacroApiVersion::V1) }) .with_context(|| format!("No macros found in scope: {context:?}"))?; @@ -44,9 +41,9 @@ impl Handler for ExpandAttribute { .with_context(|| format!("Unsupported attribute: {attr}"))?; let result = instance - .try_v2() - .expect("procedural macro using v1 api used in a context expecting v2 api") - .generate_code(attr.into(), call_site, args, item); + .try_v1() + .expect("procedural macro using v2 api used in a context expecting v1 api") + .generate_code(attr.into(), args, item); Ok(ProcMacroResult { token_stream: result.token_stream, diff --git a/scarb/src/ops/proc_macro_server/methods/expand_derive.rs b/scarb/src/ops/proc_macro_server/methods/expand_derive.rs index b160febca..97b014fed 100644 --- a/scarb/src/ops/proc_macro_server/methods/expand_derive.rs +++ b/scarb/src/ops/proc_macro_server/methods/expand_derive.rs @@ -1,7 +1,7 @@ use std::sync::Arc; use anyhow::{Context, Result}; -use cairo_lang_macro::TokenStream; +use cairo_lang_macro_v1::TokenStream; use convert_case::{Case, Casing}; use scarb_proc_macro_server_types::methods::{ProcMacroResult, expand::ExpandDerive}; @@ -21,10 +21,9 @@ impl Handler for ExpandDerive { context, derives, item, - call_site, } = params; - let mut derived_code = TokenStream::empty(); + let mut derived_code = String::new(); let mut all_diagnostics = vec![]; for derive in derives { @@ -35,9 +34,9 @@ impl Handler for ExpandDerive { .as_ref() .and_then(|v| { v.iter() - .find(|a| a.api_version() == ProcMacroApiVersion::V2) + .find(|a| a.api_version() == ProcMacroApiVersion::V1) }) - .with_context(|| format!("No macros found in scope: {context:?}"))?; + .with_context(|| format!("No macros found in scope {context:?}"))?; let instance = plugin .instances() @@ -46,23 +45,18 @@ impl Handler for ExpandDerive { .with_context(|| format!("Unsupported derive macro: {derive}"))?; let result = instance - .try_v2() - .expect("procedural macro using v1 api used in a context expecting v2 api") - .generate_code( - expansion.name.clone(), - call_site.clone(), - TokenStream::empty(), - item.clone(), - ); + .try_v1() + .expect("procedural macro using v2 api used in a context expecting v1 api") + .generate_code(expansion.name.clone(), TokenStream::empty(), item.clone()); // Register diagnostics. all_diagnostics.extend(result.diagnostics); // Add generated code. - derived_code.tokens.extend(result.token_stream.tokens); + derived_code.push_str(&result.token_stream.to_string()); } Ok(ProcMacroResult { - token_stream: derived_code, + token_stream: TokenStream::new(derived_code), diagnostics: all_diagnostics, }) } diff --git a/scarb/src/ops/proc_macro_server/methods/expand_inline.rs b/scarb/src/ops/proc_macro_server/methods/expand_inline.rs index 9220b9a48..c6c244b3e 100644 --- a/scarb/src/ops/proc_macro_server/methods/expand_inline.rs +++ b/scarb/src/ops/proc_macro_server/methods/expand_inline.rs @@ -1,7 +1,7 @@ use std::sync::Arc; use anyhow::{Context, Result}; -use cairo_lang_macro::TokenStream; +use cairo_lang_macro_v1::TokenStream; use scarb_proc_macro_server_types::methods::{ProcMacroResult, expand::ExpandInline}; use super::Handler; @@ -17,7 +17,6 @@ impl Handler for ExpandInline { context, name, args, - call_site, } = params; let plugin = workspace_macros.get(&context.component); @@ -25,7 +24,7 @@ impl Handler for ExpandInline { .as_ref() .and_then(|v| { v.iter() - .find(|a| a.api_version() == ProcMacroApiVersion::V2) + .find(|a| a.api_version() == ProcMacroApiVersion::V1) }) .with_context(|| format!("No macros found in scope: {context:?}"))?; @@ -42,9 +41,9 @@ impl Handler for ExpandInline { .with_context(|| format!("Unsupported inline macro: {name}"))?; let result = instance - .try_v2() - .expect("procedural macro using v1 api used in a context expecting v2 api") - .generate_code(name.into(), call_site, TokenStream::empty(), args); + .try_v1() + .expect("procedural macro using v2 api used in a context expecting v1 api") + .generate_code(name.into(), TokenStream::empty(), args); Ok(ProcMacroResult { token_stream: result.token_stream, diff --git a/scarb/tests/proc_macro_v1_server.rs b/scarb/tests/proc_macro_v1_server.rs index 4b357e85e..81685b8c4 100644 --- a/scarb/tests/proc_macro_v1_server.rs +++ b/scarb/tests/proc_macro_v1_server.rs @@ -148,7 +148,6 @@ fn expand_inline() { #[inline_macro] pub fn replace_all_15_with_25(token_stream: TokenStream) -> ProcMacroResult { let content = token_stream.to_string().replace("15", "25"); - let span = TextSpan { start: 0, end: content.len() as u32 }; ProcMacroResult::new(TokenStream::new(content)) } "#; diff --git a/utils/scarb-proc-macro-server-types/Cargo.toml b/utils/scarb-proc-macro-server-types/Cargo.toml index 309030ec2..9592d8150 100644 --- a/utils/scarb-proc-macro-server-types/Cargo.toml +++ b/utils/scarb-proc-macro-server-types/Cargo.toml @@ -10,6 +10,6 @@ license.workspace = true repository.workspace = true [dependencies] -cairo-lang-macro = { path = "../../plugins/cairo-lang-macro", version = "0.2", features = ["serde"] } +cairo-lang-macro = { version = "0.1", features = ["serde"] } serde.workspace = true serde_json.workspace = true diff --git a/utils/scarb-proc-macro-server-types/src/methods/expand.rs b/utils/scarb-proc-macro-server-types/src/methods/expand.rs index f33af1562..faa60fd1b 100644 --- a/utils/scarb-proc-macro-server-types/src/methods/expand.rs +++ b/utils/scarb-proc-macro-server-types/src/methods/expand.rs @@ -2,7 +2,7 @@ use crate::scope::ProcMacroScope; use super::Method; use super::ProcMacroResult; -use cairo_lang_macro::{TextSpan, TokenStream}; +use cairo_lang_macro::TokenStream; use serde::{Deserialize, Serialize}; /// Parameters for expanding a specific attribute macro. @@ -19,8 +19,6 @@ pub struct ExpandAttributeParams { pub args: TokenStream, /// The token stream representing the item on which the macro is applied. pub item: TokenStream, - // Call site span. - pub call_site: TextSpan, } /// Represents a request to expand a single attribute macro. @@ -44,8 +42,6 @@ pub struct ExpandDeriveParams { pub derives: Vec, /// The token stream of the item to which the derive macros are applied. pub item: TokenStream, - // Call site span. - pub call_site: TextSpan, } /// Represents a request to expand derive macros. @@ -69,8 +65,6 @@ pub struct ExpandInlineMacroParams { pub name: String, /// The token stream representing arguments passed to the macro. pub args: TokenStream, - // Call site span. - pub call_site: TextSpan, } /// Represents a request to expand a single inline macro.