diff --git a/Cargo.lock b/Cargo.lock index bc970fbeb..7ca210f55 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -894,8 +894,11 @@ dependencies = [ name = "cairo-lang-macro" version = "0.1.1" dependencies = [ - "cairo-lang-macro-attributes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "cairo-lang-macro-stable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bumpalo", + "cairo-lang-macro-attributes 0.1.0", + "cairo-lang-macro-stable 1.0.0", + "cairo-lang-primitive-token 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cairo-lang-quote", "linkme", "serde", "serde_json", @@ -1076,6 +1079,14 @@ dependencies = [ "toml", ] +[[package]] +name = "cairo-lang-quote" +version = "0.1.0" +dependencies = [ + "proc-macro2", + "quote", +] + [[package]] name = "cairo-lang-runnable-utils" version = "2.9.1" @@ -5425,8 +5436,8 @@ dependencies = [ "cairo-lang-filesystem 2.9.1 (registry+https://github.com/rust-lang/crates.io-index)", "cairo-lang-formatter 2.9.1 (registry+https://github.com/rust-lang/crates.io-index)", "cairo-lang-lowering 2.9.1 (registry+https://github.com/rust-lang/crates.io-index)", - "cairo-lang-macro 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "cairo-lang-macro-stable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cairo-lang-macro 0.1.1", + "cairo-lang-macro-stable 1.0.0", "cairo-lang-parser 2.9.1 (registry+https://github.com/rust-lang/crates.io-index)", "cairo-lang-semantic 2.9.1 (registry+https://github.com/rust-lang/crates.io-index)", "cairo-lang-sierra 2.9.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -5634,7 +5645,7 @@ dependencies = [ name = "scarb-proc-macro-server-types" version = "0.1.0" dependencies = [ - "cairo-lang-macro 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "cairo-lang-macro 0.1.1", "serde", "serde_json", ] diff --git a/Cargo.toml b/Cargo.toml index 841e2e00f..ab325142d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -9,6 +9,7 @@ members = [ "plugins/cairo-lang-macro", "plugins/cairo-lang-macro-attributes", "plugins/cairo-lang-macro-stable", + "plugins/cairo-lang-quote", "utils/create-output-dir", "utils/scarb-proc-macro-server-types", "utils/scarb-build-metadata", @@ -35,6 +36,7 @@ anyhow = "1" assert_fs = "1" async-trait = "0.1" axum = { version = "0.6", features = ["http2"] } +bumpalo = "3" cairo-lang-compiler = "2.9.1" cairo-lang-defs = "2.9.1" cairo-lang-diagnostics = "2.9.1" diff --git a/plugins/cairo-lang-macro-stable/src/lib.rs b/plugins/cairo-lang-macro-stable/src/lib.rs index dd91b21a8..f9458a3a5 100644 --- a/plugins/cairo-lang-macro-stable/src/lib.rs +++ b/plugins/cairo-lang-macro-stable/src/lib.rs @@ -1,11 +1,31 @@ use crate::ffi::{StableOption, StableSlice}; -use std::ffi::CStr; use std::num::NonZeroU8; use std::os::raw::c_char; use std::ptr::NonNull; pub mod ffi; +#[repr(C)] +#[derive(Debug)] +pub struct StableToken { + pub span: StableTextSpan, + pub ptr: *const u8, + pub len: usize, +} + +#[repr(C)] +#[derive(Debug)] +pub struct StableTextSpan { + pub start: u32, + pub end: u32, +} + +#[repr(C)] +#[derive(Debug)] +pub enum StableTokenTree { + Ident(StableToken), +} + #[repr(C)] #[derive(Debug)] pub struct StableExpansion { @@ -23,8 +43,9 @@ pub type StableExpansionsList = StableSlice; #[repr(C)] #[derive(Debug)] pub struct StableTokenStream { - pub value: *mut c_char, + pub tokens: StableSlice, pub metadata: StableTokenStreamMetadata, + pub size_hint: usize, } /// Token stream metadata. @@ -35,6 +56,7 @@ pub struct StableTokenStream { pub struct StableTokenStreamMetadata { pub original_file_path: Option>, pub file_id: Option>, + pub edition: Option>, } /// Auxiliary data returned by the procedural macro. @@ -76,17 +98,6 @@ pub struct StableResultWrapper { pub output: StableProcMacroResult, } -impl StableTokenStream { - /// Convert to String. - /// - /// # Safety - pub unsafe fn to_string(&self) -> String { - // Note that this does not deallocate the c-string. - // The memory must still be freed with `CString::from_raw`. - CStr::from_ptr(self.value).to_string_lossy().to_string() - } -} - #[repr(C)] pub struct StablePostProcessContext { pub aux_data: StableSlice, diff --git a/plugins/cairo-lang-macro/Cargo.toml b/plugins/cairo-lang-macro/Cargo.toml index bec89f24c..40ac4da49 100644 --- a/plugins/cairo-lang-macro/Cargo.toml +++ b/plugins/cairo-lang-macro/Cargo.toml @@ -2,7 +2,7 @@ name = "cairo-lang-macro" version = "0.1.1" edition.workspace = true -rust-version = "1.64" +rust-version = "1.73" authors.workspace = true categories = ["development-tools"] @@ -14,8 +14,11 @@ readme = "README.md" repository.workspace = true [dependencies] -cairo-lang-macro-attributes = "0.1" -cairo-lang-macro-stable = "1" +bumpalo.workspace = true +cairo-lang-macro-attributes = { path = "../cairo-lang-macro-attributes" } +cairo-lang-macro-stable = { path = "../cairo-lang-macro-stable" } +cairo-lang-primitive-token = "1.0.0" +cairo-lang-quote = { path = "../cairo-lang-quote", version = "0.1.0" } linkme.workspace = true serde = { workspace = true, optional = true } diff --git a/plugins/cairo-lang-macro/src/lib.rs b/plugins/cairo-lang-macro/src/lib.rs index 382f0c66a..48ae98171 100644 --- a/plugins/cairo-lang-macro/src/lib.rs +++ b/plugins/cairo-lang-macro/src/lib.rs @@ -16,19 +16,28 @@ //! pub use cairo_lang_macro_attributes::*; +pub use cairo_lang_quote::*; + #[doc(hidden)] pub use linkme; +use std::cell::RefCell; + use cairo_lang_macro_stable::ffi::StableSlice; use cairo_lang_macro_stable::{ - StableExpansionsList, StablePostProcessContext, StableProcMacroResult, + StableExpansionsList, StablePostProcessContext, StableProcMacroResult, StableTextSpan, }; use std::ffi::{c_char, CStr, CString}; +use std::ops::Deref; mod types; - pub use types::*; +// A thread-local allocation context for allocating tokens on proc macro side. +thread_local!(static CONTEXT: RefCell = RefCell::default() ); + +thread_local!(static CALL_SITE: RefCell<(u32, u32)> = RefCell::default()); + #[doc(hidden)] #[derive(Clone)] pub struct ExpansionDefinition { @@ -94,32 +103,48 @@ pub unsafe extern "C" fn free_expansions_list(list: StableExpansionsList) { #[no_mangle] pub unsafe extern "C" fn expand( item_name: *const c_char, + call_site: StableTextSpan, stable_attr: cairo_lang_macro_stable::StableTokenStream, stable_token_stream: cairo_lang_macro_stable::StableTokenStream, ) -> cairo_lang_macro_stable::StableResultWrapper { - let token_stream = TokenStream::from_stable(&stable_token_stream); - let attr_token_stream = TokenStream::from_stable(&stable_attr); - let item_name = CStr::from_ptr(item_name).to_string_lossy().to_string(); - let fun = MACRO_DEFINITIONS_SLICE - .iter() - .find_map(|m| { - if m.name == item_name.as_str() { - Some(m.fun.clone()) - } else { - None - } - }) - .expect("procedural macro not found"); - let result = match fun { - ExpansionFunc::Attr(fun) => fun(attr_token_stream, token_stream), - ExpansionFunc::Other(fun) => fun(token_stream), - }; - let result: StableProcMacroResult = result.into_stable(); - cairo_lang_macro_stable::StableResultWrapper { - input: stable_token_stream, - input_attr: stable_attr, - output: result, - } + CONTEXT.with(|ctx_cell| { + // Read size hint from stable token stream. This will be used to create a sufficiently + // large bump allocation buffer. + let size_hint: usize = stable_token_stream.size_hint + stable_attr.size_hint; + // Replace the allocation context with a new one. + // If there is no interned string guards, the old context will be de-allocated. + ctx_cell.replace(AllocationContext::with_capacity(size_hint)); + let ctx_borrow = ctx_cell.borrow(); + let ctx: &AllocationContext = ctx_borrow.deref(); + // Set the call site for the current expand call. + CALL_SITE.replace((call_site.start, call_site.end)); + // Copy the stable token stream into current context. + let token_stream = TokenStream::from_stable_in(&stable_token_stream, ctx); + let attr_token_stream = TokenStream::from_stable_in(&stable_attr, ctx); + let item_name = CStr::from_ptr(item_name) + .to_str() + .expect("item name must be a valid string"); + let fun = MACRO_DEFINITIONS_SLICE + .iter() + .find_map(|m| { + if m.name == item_name { + Some(m.fun.clone()) + } else { + None + } + }) + .expect("procedural macro not found"); + let result = match fun { + ExpansionFunc::Attr(fun) => fun(attr_token_stream, token_stream), + ExpansionFunc::Other(fun) => fun(token_stream), + }; + let result: StableProcMacroResult = result.into_stable(); + cairo_lang_macro_stable::StableResultWrapper { + input: stable_token_stream, + input_attr: stable_attr, + output: result, + } + }) } /// Free the memory allocated for the [`StableProcMacroResult`]. @@ -134,7 +159,7 @@ pub unsafe extern "C" fn expand( #[doc(hidden)] #[no_mangle] pub unsafe extern "C" fn free_result(result: StableProcMacroResult) { - ProcMacroResult::from_owned_stable(result); + ProcMacroResult::free_owned_stable(result); } /// Distributed slice for storing auxiliary data collection callback pointers. diff --git a/plugins/cairo-lang-macro/src/types/conversion.rs b/plugins/cairo-lang-macro/src/types/conversion.rs index 2e5516d0a..2a9e6338d 100644 --- a/plugins/cairo-lang-macro/src/types/conversion.rs +++ b/plugins/cairo-lang-macro/src/types/conversion.rs @@ -1,12 +1,13 @@ use crate::{ - AuxData, Diagnostic, ExpansionDefinition, FullPathMarker, PostProcessContext, ProcMacroResult, - Severity, TokenStream, TokenStreamMetadata, + AllocationContext, AuxData, Diagnostic, ExpansionDefinition, FullPathMarker, + PostProcessContext, ProcMacroResult, Severity, TextSpan, Token, TokenStream, + TokenStreamMetadata, TokenTree, }; use cairo_lang_macro_stable::ffi::StableSlice; use cairo_lang_macro_stable::{ StableAuxData, StableDiagnostic, StableExpansion, StableFullPathMarker, - StablePostProcessContext, StableProcMacroResult, StableSeverity, StableTokenStream, - StableTokenStreamMetadata, + StablePostProcessContext, StableProcMacroResult, StableSeverity, StableTextSpan, StableToken, + StableTokenStream, StableTokenStreamMetadata, StableTokenTree, }; use std::ffi::{c_char, CStr, CString}; use std::num::NonZeroU8; @@ -30,7 +31,7 @@ impl ProcMacroResult { .map(|m| CString::new(m).unwrap().into_raw()) .collect::>(); StableProcMacroResult { - token_stream: self.token_stream.into_stable(), + token_stream: self.token_stream.as_stable(), aux_data: AuxData::maybe_into_stable(self.aux_data), diagnostics: StableSlice::new(diagnostics), full_path_markers: StableSlice::new(full_path_markers), @@ -39,11 +40,12 @@ impl ProcMacroResult { /// Convert to native Rust representation, without taking the ownership of the string. /// - /// Note that you still need to free the memory by calling `from_owned_stable`. + /// Note that you still need to free the memory by calling `free_owned_stable`. /// /// # Safety #[doc(hidden)] pub unsafe fn from_stable(result: &StableProcMacroResult) -> Self { + let ctx = AllocationContext::with_capacity(result.token_stream.size_hint); let (ptr, n) = result.diagnostics.raw_parts(); let diagnostics = slice::from_raw_parts(ptr, n) .iter() @@ -55,37 +57,130 @@ impl ProcMacroResult { .map(|m| from_raw_cstr(*m)) .collect::>(); ProcMacroResult { - token_stream: TokenStream::from_stable(&result.token_stream), + token_stream: TokenStream::from_stable_in(&result.token_stream, &ctx), diagnostics, full_path_markers, aux_data: AuxData::from_stable(&result.aux_data), } } - /// Convert to native Rust representation, with taking the ownership of the string. + /// Take the ownership of memory under the pointer and drop it. /// /// Useful when you need to free the allocated memory. /// Only use on the same side of FFI-barrier, where the memory has been allocated. /// /// # Safety #[doc(hidden)] - pub unsafe fn from_owned_stable(result: StableProcMacroResult) -> Self { - let diagnostics = result.diagnostics.into_owned(); - let diagnostics = diagnostics - .into_iter() - .map(|d| Diagnostic::from_owned_stable(d)) - .collect::>(); - let full_path_markers = result - .full_path_markers - .into_owned() - .iter() - .map(|m| from_raw_cstring(*m)) - .collect::>(); - ProcMacroResult { - token_stream: TokenStream::from_owned_stable(result.token_stream), - aux_data: AuxData::from_owned_stable(result.aux_data), - diagnostics, - full_path_markers, + pub unsafe fn free_owned_stable(result: StableProcMacroResult) { + for diagnostic in result.diagnostics.into_owned() { + Diagnostic::free_owned_stable(diagnostic); + } + for marker in result.full_path_markers.into_owned() { + free_raw_cstring(marker) + } + TokenStream::free_owned_stable(result.token_stream); + AuxData::free_owned_stable(result.aux_data); + } +} + +impl TextSpan { + /// Convert to FFI-safe representation. + #[doc(hidden)] + pub fn into_stable(self) -> StableTextSpan { + StableTextSpan { + start: self.start, + end: self.end, + } + } + + /// Convert to native Rust representation, without taking the ownership. + #[doc(hidden)] + pub fn from_stable(span: &StableTextSpan) -> Self { + Self { + start: span.start, + end: span.end, + } + } + + #[doc(hidden)] + pub fn free_owned_stable(span: StableTextSpan) { + let _ = span; + } +} + +impl Token { + /// Convert to FFI-safe representation. + #[doc(hidden)] + pub fn as_stable(&self) -> StableToken { + let ptr = self.content.as_ptr(); + let len = self.content.len(); + StableToken { + span: self.span.clone().into_stable(), + ptr, + len, + } + } + + /// Convert to native Rust representation, without taking the ownership of the string. + /// + /// Note that you still need to free the memory by calling `free_owned_stable`. + /// + /// # Safety + #[doc(hidden)] + pub unsafe fn from_stable_in(token: &StableToken, ctx: &AllocationContext) -> Self { + let content = slice::from_raw_parts(token.ptr, token.len); + let content = ctx.intern(std::str::from_utf8(content).unwrap()); + Self { + content, + span: TextSpan::from_stable(&token.span), + } + } + + /// Take the ownership of memory under the pointer and drop it. + /// + /// Useful when you need to free the allocated memory. + /// Only use on the same side of FFI-barrier, where the memory has been allocated. + /// + /// # Safety + #[doc(hidden)] + pub unsafe fn free_owned_stable(token: StableToken) { + TextSpan::free_owned_stable(token.span); + } +} + +impl TokenTree { + /// Convert to FFI-safe representation. + #[doc(hidden)] + pub fn as_stable(&self) -> StableTokenTree { + match self { + Self::Ident(token) => StableTokenTree::Ident(token.as_stable()), + } + } + + /// Convert to native Rust representation, without taking the ownership of the string. + /// + /// Note that you still need to free the memory by calling `free_owned_stable`. + /// + /// # Safety + #[doc(hidden)] + pub unsafe fn from_stable_in(token_tree: &StableTokenTree, ctx: &AllocationContext) -> Self { + match token_tree { + StableTokenTree::Ident(token) => Self::Ident(Token::from_stable_in(token, ctx)), + } + } + + /// Take the ownership of memory under the pointer and drop it. + /// + /// Useful when you need to free the allocated memory. + /// Only use on the same side of FFI-barrier, where the memory has been allocated. + /// + /// # Safety + #[doc(hidden)] + pub unsafe fn free_owned_stable(token_tree: StableTokenTree) { + match token_tree { + StableTokenTree::Ident(token) => { + Token::free_owned_stable(token); + } } } } @@ -95,39 +190,56 @@ impl TokenStream { /// /// # Safety #[doc(hidden)] - pub fn into_stable(self) -> StableTokenStream { - let cstr = CString::new(self.value).unwrap(); + pub fn as_stable(&self) -> StableTokenStream { + let mut size_hint: usize = 0; + let tokens = self + .tokens + .iter() + .map(|token| { + size_hint += token.size_hint(); + token.as_stable() + }) + .collect::>(); StableTokenStream { - value: cstr.into_raw(), - metadata: self.metadata.into_stable(), + tokens: StableSlice::new(tokens), + metadata: self.metadata.clone().into_stable(), + size_hint, } } /// Convert to native Rust representation, without taking the ownership of the string. /// - /// Note that you still need to free the memory by calling `from_owned_stable`. + /// Note that you still need to free the memory by calling `free_owned_stable`. /// /// # Safety #[doc(hidden)] - pub unsafe fn from_stable(token_stream: &StableTokenStream) -> Self { + pub unsafe fn from_stable_in( + token_stream: &StableTokenStream, + ctx: &AllocationContext, + ) -> Self { + let (ptr, n) = token_stream.tokens.raw_parts(); + let tokens = slice::from_raw_parts(ptr, n) + .iter() + .map(|token_tree| TokenTree::from_stable_in(token_tree, ctx)) + .collect::>(); Self { - value: from_raw_cstr(token_stream.value), + tokens, metadata: TokenStreamMetadata::from_stable(&token_stream.metadata), } } - /// Convert to native Rust representation, with taking the ownership of the string. + /// Take the ownership of memory under the pointer and drop it. /// /// Useful when you need to free the allocated memory. /// Only use on the same side of FFI-barrier, where the memory has been allocated. /// /// # Safety #[doc(hidden)] - pub unsafe fn from_owned_stable(token_stream: StableTokenStream) -> Self { - Self { - value: from_raw_cstring(token_stream.value), - metadata: TokenStreamMetadata::from_owned_stable(token_stream.metadata), + pub unsafe fn free_owned_stable(token_stream: StableTokenStream) { + for token_tree in token_stream.tokens.into_owned() { + TokenTree::free_owned_stable(token_tree); } + TokenStreamMetadata::free_owned_stable(token_stream.metadata); } } @@ -139,19 +251,23 @@ impl TokenStreamMetadata { pub fn into_stable(self) -> StableTokenStreamMetadata { let original_file_path = self .original_file_path - .and_then(|path| NonNull::new(CString::new(path).unwrap().into_raw())); + .and_then(|value| NonNull::new(CString::new(value).unwrap().into_raw())); let file_id = self .file_id - .and_then(|path| NonNull::new(CString::new(path).unwrap().into_raw())); + .and_then(|value| NonNull::new(CString::new(value).unwrap().into_raw())); + let edition = self + .edition + .and_then(|value| NonNull::new(CString::new(value).unwrap().into_raw())); StableTokenStreamMetadata { original_file_path, file_id, + edition, } } /// Convert to native Rust representation, without taking the ownership of the string. /// - /// Note that you still need to free the memory by calling `from_owned_stable`. + /// Note that you still need to free the memory by calling `free_owned_stable`. /// /// # Safety #[doc(hidden)] @@ -160,27 +276,30 @@ impl TokenStreamMetadata { .original_file_path .map(|raw| from_raw_cstr(raw.as_ptr())); let file_id = metadata.file_id.map(|raw| from_raw_cstr(raw.as_ptr())); + let edition = metadata.edition.map(|raw| from_raw_cstr(raw.as_ptr())); Self { original_file_path, file_id, + edition, } } - /// Convert to native Rust representation, with taking the ownership of the string. + /// Take the ownership of memory under the pointer and drop it. /// /// Useful when you need to free the allocated memory. /// Only use on the same side of FFI-barrier, where the memory has been allocated. /// /// # Safety #[doc(hidden)] - pub unsafe fn from_owned_stable(metadata: StableTokenStreamMetadata) -> Self { - let original_file_path = metadata - .original_file_path - .map(|raw| from_raw_cstring(raw.as_ptr())); - let file_id = metadata.file_id.map(|raw| from_raw_cstring(raw.as_ptr())); - Self { - original_file_path, - file_id, + pub unsafe fn free_owned_stable(metadata: StableTokenStreamMetadata) { + if let Some(raw) = metadata.original_file_path { + free_raw_cstring(raw.as_ptr()); + } + if let Some(raw) = metadata.file_id { + free_raw_cstring(raw.as_ptr()); + } + if let Some(raw) = metadata.edition { + free_raw_cstring(raw.as_ptr()); } } } @@ -209,7 +328,7 @@ impl AuxData { /// Convert to native Rust representation, without taking the ownership of the string. /// - /// Note that you still need to free the memory by calling `from_owned_stable`. + /// Note that you still need to free the memory by calling `free_owned_stable`. /// /// # Safety #[doc(hidden)] @@ -224,18 +343,20 @@ impl AuxData { } } - /// Convert to native Rust representation, with taking the ownership of the string. + /// Take the ownership of memory under the pointer and drop it. /// /// Useful when you need to free the allocated memory. /// Only use on the same side of FFI-barrier, where the memory has been allocated. /// /// # Safety #[doc(hidden)] - pub unsafe fn from_owned_stable(aux_data: StableAuxData) -> Option { + pub unsafe fn free_owned_stable(aux_data: StableAuxData) { match aux_data { - StableAuxData::None => None, - StableAuxData::Some(raw) => Some(Self::new(raw.into_owned())), - } + StableAuxData::None => {} + StableAuxData::Some(raw) => { + let _ = raw.into_owned(); + } + }; } } @@ -253,7 +374,7 @@ impl Diagnostic { /// Convert to native Rust representation, without taking the ownership of the string. /// - /// Note that you still need to free the memory by calling `from_owned_stable`. + /// Note that you still need to free the memory by calling `free_owned_stable`. /// /// # Safety #[doc(hidden)] @@ -264,18 +385,15 @@ impl Diagnostic { } } - /// Convert to native Rust representation, with taking the ownership of the string. + /// Take the ownership of memory under the pointer and drop it. /// /// Useful when you need to free the allocated memory. /// Only use on the same side of FFI-barrier, where the memory has been allocated. /// /// # Safety #[doc(hidden)] - pub unsafe fn from_owned_stable(diagnostic: StableDiagnostic) -> Self { - Self { - message: from_raw_cstring(diagnostic.message), - severity: Severity::from_stable(&diagnostic.severity), - } + pub unsafe fn free_owned_stable(diagnostic: StableDiagnostic) { + free_raw_cstring(diagnostic.message); } } @@ -314,7 +432,7 @@ impl ExpansionDefinition { } } - /// Take the ownership of the string. + /// Take the ownership of memory under the pointer and drop it. /// /// Useful when you need to free the allocated memory. /// Only use on the same side of FFI-barrier, where the memory has been allocated. @@ -322,7 +440,7 @@ impl ExpansionDefinition { /// # Safety #[doc(hidden)] pub unsafe fn free_owned(expansion: StableExpansion) { - let _ = from_raw_cstring(expansion.name); + free_raw_cstring(expansion.name); } } @@ -340,7 +458,7 @@ impl FullPathMarker { /// Convert to native Rust representation, without taking the ownership of the string. /// - /// Note that you still need to free the memory by calling `from_owned_stable`. + /// Note that you still need to free the memory by calling `free_owned_stable`. /// /// # Safety #[doc(hidden)] @@ -351,18 +469,16 @@ impl FullPathMarker { } } - /// Convert to native Rust representation, with taking the ownership of the string. + /// Take the ownership of memory under the pointer and drop it. /// /// Useful when you need to free the allocated memory. /// Only use on the same side of FFI-barrier, where the memory has been allocated. /// /// # Safety #[doc(hidden)] - pub unsafe fn from_owned_stable(marker: StableFullPathMarker) -> Self { - Self { - key: from_raw_cstring(marker.key), - full_path: from_raw_cstring(marker.full_path), - } + pub unsafe fn free_owned_stable(marker: StableFullPathMarker) { + free_raw_cstring(marker.key); + free_raw_cstring(marker.full_path); } } @@ -392,7 +508,7 @@ impl PostProcessContext { /// Convert to native Rust representation, without taking the ownership of the string. /// - /// Note that you still need to free the memory by calling `from_owned_stable`. + /// Note that you still need to free the memory by calling `free_owned_stable`. /// /// # Safety #[doc(hidden)] @@ -413,41 +529,28 @@ impl PostProcessContext { } } - /// Convert to native Rust representation, with taking the ownership of the string. + /// Take the ownership of memory under the pointer and drop it. /// /// Useful when you need to free the allocated memory. /// Only use on the same side of FFI-barrier, where the memory has been allocated. /// /// # Safety #[doc(hidden)] - pub unsafe fn from_owned_stable(diagnostic: StablePostProcessContext) -> Self { - let aux_data = diagnostic - .aux_data - .into_owned() - .into_iter() - .filter_map(|a| AuxData::from_owned_stable(a)) - .collect::>(); - let full_path_markers = diagnostic - .full_path_markers - .into_owned() - .into_iter() - .map(|m| FullPathMarker::from_owned_stable(m)) - .collect::>(); - Self { - aux_data, - full_path_markers, + pub unsafe fn free_owned_stable(diagnostic: StablePostProcessContext) { + for aux_data in diagnostic.aux_data.into_owned() { + AuxData::free_owned_stable(aux_data) + } + for marker in diagnostic.full_path_markers.into_owned() { + FullPathMarker::free_owned_stable(marker); } } } -// Create a string from a raw pointer to a c_char. +// Create a c-string from a raw pointer to a c_char, and drop it immediately. // Note that this will free the underlying memory. -unsafe fn from_raw_cstring(raw: *mut c_char) -> String { - if raw.is_null() { - String::default() - } else { - let cstr = CString::from_raw(raw); - cstr.to_string_lossy().to_string() +unsafe fn free_raw_cstring(raw: *mut c_char) { + if !raw.is_null() { + let _ = CString::from_raw(raw); } } diff --git a/plugins/cairo-lang-macro/src/types/mod.rs b/plugins/cairo-lang-macro/src/types/mod.rs index a811d7558..a679ed3f9 100644 --- a/plugins/cairo-lang-macro/src/types/mod.rs +++ b/plugins/cairo-lang-macro/src/types/mod.rs @@ -1,10 +1,11 @@ -use std::fmt::Display; use std::vec::IntoIter; mod conversion; mod expansions; +mod token; pub use expansions::*; +pub use token::*; /// Result of procedural macro code generation. #[derive(Debug, Clone)] @@ -15,78 +16,6 @@ pub struct ProcMacroResult { pub full_path_markers: Vec, } -/// An abstract stream of Cairo tokens. -/// -/// This is both input and part of an output of a procedural macro. -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] -pub struct TokenStream { - value: String, - metadata: TokenStreamMetadata, -} - -/// Metadata of [`TokenStream`]. -/// -/// This struct can be used to describe the origin of the [`TokenStream`]. -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] -pub struct TokenStreamMetadata { - /// The path to the file from which the [`TokenStream`] has been created. - pub original_file_path: Option, - /// ID of the file from which the [`TokenStream`] has been created. - /// - /// It is guaranteed, that the `file_id` will be unique for each file. - pub file_id: Option, -} - -impl TokenStream { - #[doc(hidden)] - pub fn new(value: String) -> Self { - Self { - value, - metadata: TokenStreamMetadata::default(), - } - } - - #[doc(hidden)] - pub fn empty() -> Self { - Self::new("".to_string()) - } - - #[doc(hidden)] - pub fn with_metadata(mut self, metadata: TokenStreamMetadata) -> Self { - self.metadata = metadata; - self - } - - /// Get `[TokenStreamMetadata`] associated with this [`TokenStream`]. - /// - /// The metadata struct can be used to describe the [`TokenStream`] origin. - pub fn metadata(&self) -> &TokenStreamMetadata { - &self.metadata - } - - pub fn is_empty(&self) -> bool { - self.to_string().is_empty() - } -} - -impl Display for TokenStream { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.value) - } -} - -impl TokenStreamMetadata { - #[doc(hidden)] - pub fn new(file_path: impl ToString, file_id: impl ToString) -> Self { - Self { - original_file_path: Some(file_path.to_string()), - file_id: Some(file_id.to_string()), - } - } -} - /// **Auxiliary data** returned by procedural macro code generation. /// /// This struct can be used to collect additional information from the Cairo source code of @@ -101,7 +30,7 @@ impl TokenStreamMetadata { /// For instance, auxiliary data can be serialized as JSON. /// /// ``` -/// use cairo_lang_macro::{AuxData, ProcMacroResult, TokenStream, attribute_macro, post_process, PostProcessContext}; +/// use cairo_lang_macro::{AuxData, ProcMacroResult, TokenStream, TokenTree, Token, TextSpan, attribute_macro, post_process, PostProcessContext}; /// use serde::{Serialize, Deserialize}; /// #[derive(Debug, Serialize, Deserialize)] /// struct SomeAuxDataFormat { @@ -110,11 +39,16 @@ impl TokenStreamMetadata { /// /// #[attribute_macro] /// pub fn some_macro(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { -/// let token_stream = TokenStream::new( -/// token_stream.to_string() -/// // Remove macro call to avoid infinite loop. -/// .replace("#[some]", "") -/// ); +/// // Remove macro call to avoid infinite loop. +/// let code = token_stream.to_string().replace("#[some]", ""); +/// let token_stream = TokenStream::new(vec![ +/// TokenTree::Ident( +/// Token::new( +/// &code, +/// TextSpan::new(0, code.len() as u32) +/// ) +/// ) +/// ]); /// let value = SomeAuxDataFormat { some_message: "Hello from some macro!".to_string() }; /// let value = serde_json::to_string(&value).unwrap(); /// let value: Vec = value.into_bytes(); diff --git a/plugins/cairo-lang-macro/src/types/token.rs b/plugins/cairo-lang-macro/src/types/token.rs new file mode 100644 index 000000000..b9d0ca4b8 --- /dev/null +++ b/plugins/cairo-lang-macro/src/types/token.rs @@ -0,0 +1,488 @@ +use crate::{CALL_SITE, CONTEXT}; +use bumpalo::Bump; +use cairo_lang_primitive_token::{PrimitiveSpan, PrimitiveToken, ToPrimitiveTokenStream}; +use std::fmt::{Debug, Display, Write}; +use std::hash::{Hash, Hasher}; +use std::iter::{once, Map, Once}; +use std::ops::Deref; +use std::rc::Rc; +use std::vec::IntoIter; + +/// An abstract stream of Cairo tokens. +/// +/// This is both input and part of an output of a procedural macro. +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "serde", serde(try_from = "deserializer::TokenStream"))] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TokenStream { + pub tokens: Vec, + pub metadata: TokenStreamMetadata, +} + +/// This module implements deserialization of the token stream, for the serde feature. +/// This is intermediate representation is needed, as real [`Token`] only contains a reference to the +/// represented string, which needs to be allocated outside the [`Token`] struct. +/// Here we allocate each token to an owned String with SerDe and then copy it's content into context. +#[cfg(feature = "serde")] +#[doc(hidden)] +mod deserializer { + use crate::{AllocationContext, TextSpan, TokenStreamMetadata}; + use std::fmt::{Display, Formatter}; + + #[derive(serde::Serialize, serde::Deserialize)] + pub struct TokenStream { + pub tokens: Vec, + pub metadata: TokenStreamMetadata, + } + + #[derive(serde::Serialize, serde::Deserialize)] + pub enum TokenTree { + Ident(Token), + } + + #[derive(serde::Serialize, serde::Deserialize)] + pub struct Token { + pub content: String, + pub span: TextSpan, + } + + pub struct Error {} + + impl Display for Error { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.write_str("TokenStream deserialization error") + } + } + + impl TryFrom for crate::TokenStream { + type Error = Error; + + fn try_from(value: TokenStream) -> Result { + let ctx = AllocationContext::default(); + let tokens = value + .tokens + .into_iter() + .map(|token| match token { + TokenTree::Ident(token) => { + let content = ctx.intern(token.content.as_str()); + let token = crate::Token { + content, + span: token.span, + }; + crate::TokenTree::Ident(token) + } + }) + .collect::>(); + Ok(Self { + tokens, + metadata: value.metadata, + }) + } + } +} + +/// A single token or a delimited sequence of token trees. +#[cfg_attr(feature = "serde", derive(serde::Serialize))] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum TokenTree { + Ident(Token), +} + +impl TokenTree { + /// Get the size hint for the [`TokenTree`]. + /// This can be used to estimate size of a buffer needed for allocating this [`TokenTree`]. + pub(crate) fn size_hint(&self) -> usize { + match self { + Self::Ident(token) => token.size_hint(), + } + } +} + +pub type TextOffset = u32; + +/// A range of text offsets that form a span (like text selection). +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TextSpan { + pub start: TextOffset, + pub end: TextOffset, +} + +/// A single Cairo token. +/// +/// The most atomic item of Cairo code representation. +#[cfg_attr(feature = "serde", derive(serde::Serialize))] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Token { + pub content: InternedStr, + pub span: TextSpan, +} + +impl Token { + /// Get the size hint for the [`Token`]. + /// This can be used to estimate size of a buffer needed for allocating this [`Token`]. + pub(crate) fn size_hint(&self) -> usize { + self.content.deref().len() + } +} + +/// A wrapper over a string pointer. +/// This contains a pointer to a string allocated in a bump allocator +/// and a guard which keeps the buffer alive. +/// This way we do not need to allocate a new string, +/// but also do not need to worry about the lifetime of the string. +#[derive(Clone)] +pub struct InternedStr { + ptr: *const str, + // Holding a rc to the underlying buffer, so that ptr will always point to valid memory. + _bump: Rc, +} + +impl Debug for InternedStr { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_char('"')?; + f.write_str(self.as_ref())?; + f.write_char('"') + } +} + +impl InternedStr { + #[allow(unknown_lints)] + #[allow(private_interfaces)] + #[doc(hidden)] + pub(crate) fn new_in(s: &str, bump: Rc) -> Self { + let allocated = bump.0.alloc_str(s); + let ptr = allocated as *const str; + Self { ptr, _bump: bump } + } +} + +impl AsRef for InternedStr { + fn as_ref(&self) -> &str { + self.deref() + } +} + +impl Deref for InternedStr { + type Target = str; + + fn deref(&self) -> &Self::Target { + unsafe { &*self.ptr } + } +} + +#[cfg(feature = "serde")] +impl serde::Serialize for InternedStr { + fn serialize(&self, s: S) -> Result { + s.serialize_str(self.as_ref()) + } +} + +impl PartialEq for InternedStr { + fn eq(&self, other: &Self) -> bool { + self.as_ref().eq(other.as_ref()) + } +} + +impl Eq for InternedStr {} + +impl Hash for InternedStr { + fn hash(&self, state: &mut H) { + self.as_ref().hash(state); + } +} + +/// This wrapper de-allocates the underlying buffer on drop. +#[derive(Debug)] +pub(crate) struct BumpWrap(pub Bump); + +impl Drop for BumpWrap { + fn drop(&mut self) { + self.0.reset(); + } +} + +/// A context for allocating Cairo tokens. +/// This wrapper contains a bump allocator, which is used to allocate strings for tokens. +#[derive(Clone)] +pub struct AllocationContext { + bump: Rc, +} + +impl AllocationContext { + /// Allocate a new context with pre-determined buffer size. + pub fn with_capacity(size_hint: usize) -> Self { + Self { + bump: Rc::new(BumpWrap(Bump::with_capacity(size_hint))), + } + } + + /// Allocate a string in the context. + /// This returned a string pointer, guarded by reference counter to the buffer. + /// The buffer will be deallocated when the last reference to the buffer is dropped. + /// No special handling or lifetimes are needed for the string. + pub(crate) fn intern(&self, value: &str) -> InternedStr { + InternedStr::new_in(value, self.bump.clone()) + } +} + +impl Default for AllocationContext { + fn default() -> Self { + Self { + bump: Rc::new(BumpWrap(Bump::new())), + } + } +} + +/// Metadata of [`TokenStream`]. +/// +/// This struct describes the origin of the [`TokenStream`]. +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] +pub struct TokenStreamMetadata { + /// The path to the file from which the [`TokenStream`] has been created. + pub original_file_path: Option, + /// ID of the file from which the [`TokenStream`] has been created. + /// + /// It is guaranteed, that the `file_id` will be unique for each file. + pub file_id: Option, + /// Cairo edition defined for the token stream. + pub edition: Option, +} + +impl TokenStream { + #[doc(hidden)] + pub fn new(tokens: Vec) -> Self { + Self { + tokens, + metadata: TokenStreamMetadata::default(), + } + } + + #[doc(hidden)] + pub fn empty() -> Self { + Self::new(Vec::default()) + } + + #[doc(hidden)] + pub fn with_metadata(mut self, metadata: TokenStreamMetadata) -> Self { + self.metadata = metadata; + self + } + + /// Get `[TokenStreamMetadata`] associated with this [`TokenStream`]. + /// + /// The metadata struct can be used to describe the [`TokenStream`] origin. + pub fn metadata(&self) -> &TokenStreamMetadata { + &self.metadata + } + + /// Check if the [`TokenStream`] is empty. + pub fn is_empty(&self) -> bool { + self.tokens.is_empty() + } + + pub fn from_primitive_token_stream( + stable_token_stream: impl Iterator, + ) -> Self { + Self::new( + stable_token_stream + .map(|stable_token| { + TokenTree::Ident(Token::new( + stable_token.content, + stable_token + .span + .map(|stable_span| TextSpan { + start: stable_span.start as u32, + end: stable_span.end as u32, + }) + .unwrap_or(TextSpan::call_site()), + )) + }) + .collect(), + ) + } + + pub fn push_token(&mut self, token_tree: TokenTree) { + self.tokens.push(token_tree); + } +} + +impl IntoIterator for TokenStream { + type Item = TokenTree; + type IntoIter = IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.tokens.into_iter() + } +} + +impl Extend for TokenStream { + fn extend>(&mut self, iter: T) { + self.tokens.extend(iter); + } +} + +impl Display for TokenStream { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + for token in &self.tokens { + match token { + TokenTree::Ident(token) => { + write!(f, "{}", token.content.as_ref())?; + } + } + } + Ok(()) + } +} + +impl TokenStreamMetadata { + #[doc(hidden)] + pub fn new(file_path: impl ToString, file_id: impl ToString, edition: impl ToString) -> Self { + Self { + original_file_path: Some(file_path.to_string()), + file_id: Some(file_id.to_string()), + edition: Some(edition.to_string()), + } + } +} + +impl TokenTree { + /// Create a new [`TokenTree`] from an identifier [`Token`]. + pub fn from_ident(token: Token) -> Self { + Self::Ident(token) + } +} + +impl TextSpan { + /// Create a new [`TextSpan`]. + pub fn new(start: TextOffset, end: TextOffset) -> TextSpan { + TextSpan { start, end } + } + + /// Create a new [`TextSpan`], located at the invocation of the current procedural macro. + /// Identifiers created with this span will be resolved as if they were written directly at + /// the macro call location (call-site hygiene). + pub fn call_site() -> Self { + CALL_SITE.with(|call_site| { + let call_site = call_site.borrow(); + Self::new(call_site.0, call_site.1) + }) + } + + /// Create a new [`TextSpan`], with width `0`, located right before this span. + pub fn start(self) -> Self { + Self::new(self.start, self.start) + } + + /// Create a new [`TextSpan`], with width `0`, located right after this span. + pub fn end(self) -> Self { + Self::new(self.end, self.end) + } +} + +impl Token { + /// Create [`Token`] in thread-local context. + pub fn new(content: impl AsRef, span: TextSpan) -> Self { + CONTEXT.with(|ctx| { + let ctx_borrow = ctx.borrow(); + let ctx: &AllocationContext = ctx_borrow.deref(); + Self::new_in(content, span, ctx) + }) + } + + /// Create [`Token`] in specified context. + pub fn new_in(content: impl AsRef, span: TextSpan, ctx: &AllocationContext) -> Self { + let content = ctx.intern(content.as_ref()); + Self { content, span } + } +} + +impl ToPrimitiveTokenStream for TokenStream { + type Iter = Map, fn(TokenTree) -> PrimitiveToken>; + fn to_primitive_token_stream(&self) -> Self::Iter { + self.tokens + .clone() + .into_iter() + .map(|token_tree| match token_tree { + TokenTree::Ident(token) => PrimitiveToken::new( + token.content.to_string(), + Some(PrimitiveSpan { + start: token.span.start as usize, + end: token.span.end as usize, + }), + ), + }) + } +} + +impl ToPrimitiveTokenStream for TokenTree { + type Iter = Once; + fn to_primitive_token_stream(&self) -> Self::Iter { + once(match self { + TokenTree::Ident(token) => PrimitiveToken::new( + token.content.to_string(), + Some(PrimitiveSpan { + start: token.span.start as usize, + end: token.span.end as usize, + }), + ), + }) + } +} + +#[cfg(test)] +mod test { + use crate::{AllocationContext, TextSpan, Token, TokenStream, TokenTree}; + + #[test] + pub fn can_serde_empty_token_stream() { + let original = TokenStream::empty(); + let serialized = serde_json::to_string(&original).unwrap(); + let derived: TokenStream = serde_json::from_str(serialized.as_str()).unwrap(); + assert_eq!(original, derived); + let val: serde_json::Value = serde_json::from_str(serialized.as_str()).unwrap(); + assert_eq!( + val, + serde_json::json!({ + "tokens": [], + "metadata": { + "original_file_path": null, + "file_id": null, + "edition": null + } + }) + ); + } + + #[test] + pub fn can_serde_token_stream() { + let ctx = AllocationContext::default(); + let original = TokenStream::new(vec![ + TokenTree::Ident(Token::new_in("first", TextSpan::new(0, 1), &ctx)), + TokenTree::Ident(Token::new_in("second", TextSpan::new(2, 3), &ctx)), + TokenTree::Ident(Token::new_in("third", TextSpan::new(4, 5), &ctx)), + TokenTree::Ident(Token::new_in("fourth", TextSpan::new(6, 7), &ctx)), + ]); + let serialized = serde_json::to_string(&original).unwrap(); + let derived: TokenStream = serde_json::from_str(serialized.as_str()).unwrap(); + assert_eq!(original, derived); + let val: serde_json::Value = serde_json::from_str(serialized.as_str()).unwrap(); + assert_eq!( + val, + serde_json::json!({ + "tokens": [ + {"Ident": {"content": "first", "span": {"start": 0, "end": 1}}}, + {"Ident": {"content": "second", "span": {"start": 2, "end": 3}}}, + {"Ident": {"content": "third", "span": {"start": 4, "end": 5}}}, + {"Ident": {"content": "fourth", "span": {"start": 6, "end": 7}}}, + ], + "metadata": { + "original_file_path": null, + "file_id": null, + "edition": null + } + }) + ); + } +} diff --git a/plugins/cairo-lang-quote/Cargo.toml b/plugins/cairo-lang-quote/Cargo.toml new file mode 100644 index 000000000..7249b2a57 --- /dev/null +++ b/plugins/cairo-lang-quote/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "cairo-lang-quote" +version = "0.1.0" +edition.workspace = true + +authors.workspace = true +categories = ["development-tools"] +description = "Cairo procedural macro helper for constructing procedural macro results." +homepage.workspace = true +keywords = ["scarb"] +license.workspace = true +repository.workspace = true + +[lib] +proc-macro = true + +[dependencies] +proc-macro2.workspace = true +quote.workspace = true diff --git a/plugins/cairo-lang-quote/src/lib.rs b/plugins/cairo-lang-quote/src/lib.rs new file mode 100644 index 000000000..332bcb98d --- /dev/null +++ b/plugins/cairo-lang-quote/src/lib.rs @@ -0,0 +1,115 @@ +use std::iter::Peekable; + +use proc_macro::{Delimiter, TokenStream as RustTokenStream, TokenTree as RustTokenTree}; +use proc_macro2::{Ident, Span}; + +extern crate proc_macro; +use quote::quote as rust_quote; + +#[derive(Debug)] +enum QuoteToken { + Var(Ident), + Content(String), + Whitespace, +} + +enum DelimiterVariant { + Open, + Close, +} + +impl QuoteToken { + pub fn from_delimiter(delimiter: Delimiter, variant: DelimiterVariant) -> Self { + match (delimiter, variant) { + (Delimiter::Brace, DelimiterVariant::Open) => Self::Content("{".to_string()), + (Delimiter::Brace, DelimiterVariant::Close) => Self::Content("}".to_string()), + (Delimiter::Bracket, DelimiterVariant::Open) => Self::Content("[".to_string()), + (Delimiter::Bracket, DelimiterVariant::Close) => Self::Content("]".to_string()), + (Delimiter::Parenthesis, DelimiterVariant::Open) => Self::Content("(".to_string()), + (Delimiter::Parenthesis, DelimiterVariant::Close) => Self::Content(")".to_string()), + (Delimiter::None, _) => Self::Content(String::default()), + } + } +} + +fn process_token_stream( + mut token_stream: Peekable>, + output: &mut Vec, +) { + // Rust proc macro parser to TokenStream gets rid of all whitespaces. + // Here we just make sure no two identifiers are without a space between them. + let mut was_previous_ident: bool = false; + while let Some(token_tree) = token_stream.next() { + match token_tree { + RustTokenTree::Group(group) => { + let token_iter = group.stream().into_iter().peekable(); + let delimiter = group.delimiter(); + output.push(QuoteToken::from_delimiter( + delimiter, + DelimiterVariant::Open, + )); + process_token_stream(token_iter, output); + output.push(QuoteToken::from_delimiter( + delimiter, + DelimiterVariant::Close, + )); + was_previous_ident = false; + } + RustTokenTree::Punct(punct) => { + if punct.as_char() == '#' { + if let Some(RustTokenTree::Ident(ident)) = token_stream.next() { + let var_ident = Ident::new(&ident.to_string(), Span::call_site()); + output.push(QuoteToken::Var(var_ident)) + } + } else { + output.push(QuoteToken::Content(punct.to_string())); + } + was_previous_ident = false; + } + RustTokenTree::Ident(ident) => { + if was_previous_ident { + output.push(QuoteToken::Whitespace); + } + output.push(QuoteToken::Content(ident.to_string())); + was_previous_ident = true; + } + RustTokenTree::Literal(literal) => { + output.push(QuoteToken::Content(literal.to_string())); + was_previous_ident = false; + } + } + } +} + +#[proc_macro] +pub fn quote(input: RustTokenStream) -> RustTokenStream { + let mut parsed_input: Vec = Vec::new(); + let mut output_token_stream = rust_quote! { + let mut quote_macro_result = ::cairo_lang_macro::TokenStream::empty(); + }; + + let token_iter = input.into_iter().peekable(); + process_token_stream(token_iter, &mut parsed_input); + + for quote_token in parsed_input.iter() { + match quote_token { + QuoteToken::Content(content) => { + output_token_stream.extend(rust_quote! { + quote_macro_result.push_token(::cairo_lang_macro::TokenTree::Ident(::cairo_lang_macro::Token::new(::std::string::ToString::to_string(#content), ::cairo_lang_macro::TextSpan::call_site()))); + }); + } + QuoteToken::Var(ident) => { + output_token_stream.extend(rust_quote! { + quote_macro_result.extend(::cairo_lang_macro::TokenStream::from_primitive_token_stream(::cairo_lang_primitive_token::ToPrimitiveTokenStream::to_primitive_token_stream(&#ident)).into_iter()); + }); + } + QuoteToken::Whitespace => output_token_stream.extend(rust_quote! { + quote_macro_result.push_token(::cairo_lang_macro::TokenTree::Ident(::cairo_lang_macro::Token::new(" ".to_string(), ::cairo_lang_macro::TextSpan::call_site()))); + }), + } + } + RustTokenStream::from(rust_quote!({ + #output_token_stream + quote_macro_result + })) +} diff --git a/scarb/Cargo.toml b/scarb/Cargo.toml index d1d093ab7..6ccc50698 100644 --- a/scarb/Cargo.toml +++ b/scarb/Cargo.toml @@ -22,8 +22,8 @@ cairo-lang-diagnostics.workspace = true cairo-lang-filesystem.workspace = true cairo-lang-formatter.workspace = true cairo-lang-lowering.workspace = true -cairo-lang-macro = "0.1" -cairo-lang-macro-stable = "1" +cairo-lang-macro = { path = "../plugins/cairo-lang-macro" } +cairo-lang-macro-stable = { path = "../plugins/cairo-lang-macro-stable" } cairo-lang-parser.workspace = true cairo-lang-semantic.workspace = true cairo-lang-sierra-to-casm.workspace = true diff --git a/scarb/src/compiler/plugin/proc_macro/ffi.rs b/scarb/src/compiler/plugin/proc_macro/ffi.rs index 4d42bab6c..9c4d18baf 100644 --- a/scarb/src/compiler/plugin/proc_macro/ffi.rs +++ b/scarb/src/compiler/plugin/proc_macro/ffi.rs @@ -1,16 +1,14 @@ +use crate::compiler::plugin::proc_macro::compilation::SharedLibraryProvider; use crate::core::{Config, Package, PackageId}; use anyhow::{ensure, Context, Result}; -use cairo_lang_defs::patcher::PatchBuilder; use cairo_lang_macro::{ ExpansionKind as SharedExpansionKind, FullPathMarker, PostProcessContext, ProcMacroResult, - TokenStream, + TextSpan, TokenStream, }; use cairo_lang_macro_stable::{ StableExpansion, StableExpansionsList, StablePostProcessContext, StableProcMacroResult, - StableResultWrapper, StableTokenStream, + StableResultWrapper, StableTextSpan, StableTokenStream, }; -use cairo_lang_syntax::node::db::SyntaxGroup; -use cairo_lang_syntax::node::TypedSyntaxNode; use camino::Utf8PathBuf; use itertools::Itertools; use libloading::{Library, Symbol}; @@ -18,27 +16,13 @@ use std::ffi::{c_char, CStr, CString}; use std::fmt::Debug; use std::slice; -use crate::compiler::plugin::proc_macro::compilation::SharedLibraryProvider; use crate::compiler::plugin::proc_macro::ProcMacroAuxData; - #[cfg(not(windows))] use libloading::os::unix::Symbol as RawSymbol; #[cfg(windows)] use libloading::os::windows::Symbol as RawSymbol; use smol_str::SmolStr; -pub trait FromSyntaxNode { - fn from_syntax_node(db: &dyn SyntaxGroup, node: &impl TypedSyntaxNode) -> Self; -} - -impl FromSyntaxNode for TokenStream { - fn from_syntax_node(db: &dyn SyntaxGroup, node: &impl TypedSyntaxNode) -> Self { - let mut builder = PatchBuilder::new(db, node); - builder.add_node(node.as_syntax_node()); - Self::new(builder.build().0) - } -} - const EXEC_ATTR_PREFIX: &str = "__exec_attr_"; /// Representation of a single procedural macro. @@ -163,25 +147,27 @@ impl ProcMacroInstance { pub(crate) fn generate_code( &self, item_name: SmolStr, + call_site: TextSpan, attr: TokenStream, token_stream: TokenStream, ) -> ProcMacroResult { - // This must be manually freed with call to from_owned_stable. - let stable_token_stream = token_stream.into_stable(); - let stable_attr = attr.into_stable(); + // This must be manually freed with call to `free_owned_stable`. + let stable_token_stream = token_stream.as_stable(); + let stable_attr = attr.as_stable(); // Allocate proc macro name. let item_name = CString::new(item_name.to_string()).unwrap().into_raw(); // Call FFI interface for code expansion. // Note that `stable_result` has been allocated by the dynamic library. + let call_site: StableTextSpan = call_site.into_stable(); let stable_result = - (self.plugin.vtable.expand)(item_name, stable_attr, stable_token_stream); + (self.plugin.vtable.expand)(item_name, call_site, stable_attr, stable_token_stream); // Free proc macro name. let _ = unsafe { CString::from_raw(item_name) }; // Free the memory allocated by the `stable_token_stream`. // This will call `CString::from_raw` under the hood, to take ownership. unsafe { - TokenStream::from_owned_stable(stable_result.input); - TokenStream::from_owned_stable(stable_result.input_attr); + TokenStream::free_owned_stable(stable_result.input); + TokenStream::free_owned_stable(stable_result.input_attr); }; // Create Rust representation of the result. // Note, that the memory still needs to be freed on the allocator side! @@ -206,7 +192,7 @@ impl ProcMacroInstance { // Actual call to FFI interface for aux data callback. let context = (self.plugin.vtable.post_process_callback)(context); // Free the allocated memory. - let _ = unsafe { PostProcessContext::from_owned_stable(context) }; + unsafe { PostProcessContext::free_owned_stable(context) }; } pub fn doc(&self, item_name: SmolStr) -> Option { @@ -286,8 +272,12 @@ impl Expansion { type ListExpansions = extern "C" fn() -> StableExpansionsList; type FreeExpansionsList = extern "C" fn(StableExpansionsList); -type ExpandCode = - extern "C" fn(*const c_char, StableTokenStream, StableTokenStream) -> StableResultWrapper; +type ExpandCode = extern "C" fn( + *const c_char, + StableTextSpan, + StableTokenStream, + StableTokenStream, +) -> StableResultWrapper; type FreeResult = extern "C" fn(StableProcMacroResult); type PostProcessCallback = extern "C" fn(StablePostProcessContext) -> StablePostProcessContext; type DocExpansion = extern "C" fn(*const c_char) -> *mut c_char; diff --git a/scarb/src/compiler/plugin/proc_macro/host.rs b/scarb/src/compiler/plugin/proc_macro/host.rs deleted file mode 100644 index 0886abb81..000000000 --- a/scarb/src/compiler/plugin/proc_macro/host.rs +++ /dev/null @@ -1,1166 +0,0 @@ -use crate::compiler::plugin::proc_macro::{ - Expansion, ExpansionKind, FromSyntaxNode, ProcMacroInstance, -}; -use crate::core::{Config, Package, PackageId}; -use anyhow::{ensure, Result}; -use cairo_lang_defs::ids::{ModuleItemId, TopLevelLanguageElementId}; -use cairo_lang_defs::patcher::{PatchBuilder, RewriteNode}; -use cairo_lang_defs::plugin::{ - DynGeneratedFileAuxData, GeneratedFileAuxData, MacroPlugin, MacroPluginMetadata, - PluginGeneratedFile, PluginResult, -}; -use cairo_lang_defs::plugin::{InlineMacroExprPlugin, InlinePluginResult, PluginDiagnostic}; -use cairo_lang_diagnostics::ToOption; -use cairo_lang_filesystem::ids::CodeMapping; -use cairo_lang_macro::{ - AuxData, Diagnostic, FullPathMarker, ProcMacroResult, Severity, TokenStream, - TokenStreamMetadata, -}; -use cairo_lang_semantic::db::SemanticGroup; -use cairo_lang_semantic::items::attribute::SemanticQueryAttrs; -use cairo_lang_semantic::plugin::PluginSuite; -use cairo_lang_syntax::attribute::structured::{ - Attribute, AttributeArgVariant, AttributeStructurize, -}; -use cairo_lang_syntax::node::ast::{Expr, ImplItem, MaybeImplBody, MaybeTraitBody, PathSegment}; -use cairo_lang_syntax::node::db::SyntaxGroup; -use cairo_lang_syntax::node::helpers::QueryAttrs; -use cairo_lang_syntax::node::ids::SyntaxStablePtrId; -use cairo_lang_syntax::node::{ast, Terminal, TypedStablePtr, TypedSyntaxNode}; -use convert_case::{Case, Casing}; -use itertools::Itertools; -use scarb_stable_hash::short_hash; -use smol_str::SmolStr; -use std::any::Any; -use std::collections::{HashMap, HashSet}; -use std::fmt::Debug; -use std::sync::{Arc, OnceLock, RwLock}; -use std::vec::IntoIter; -use tracing::{debug, trace_span}; - -const FULL_PATH_MARKER_KEY: &str = "macro::full_path_marker"; -const DERIVE_ATTR: &str = "derive"; - -/// A Cairo compiler plugin controlling the procedural macro execution. -/// -/// This plugin decides which macro plugins (if any) should be applied to the processed AST item. -/// It then redirects the item to the appropriate macro plugin for code expansion. -#[derive(Debug)] -pub struct ProcMacroHostPlugin { - macros: Vec>, - full_path_markers: RwLock>>, -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct ProcMacroId { - pub package_id: PackageId, - pub expansion: Expansion, -} - -impl ProcMacroId { - pub fn new(package_id: PackageId, expansion: Expansion) -> Self { - Self { - package_id, - expansion, - } - } -} - -#[derive(Clone, Debug, Eq, PartialEq)] -pub struct ProcMacroAuxData { - value: Vec, - macro_id: ProcMacroId, -} - -impl ProcMacroAuxData { - pub fn new(value: Vec, macro_id: ProcMacroId) -> Self { - Self { value, macro_id } - } -} - -impl From for AuxData { - fn from(data: ProcMacroAuxData) -> Self { - Self::new(data.value) - } -} - -#[derive(Debug, Clone, Default)] -pub struct EmittedAuxData(Vec); - -impl GeneratedFileAuxData for EmittedAuxData { - fn as_any(&self) -> &dyn Any { - self - } - - fn eq(&self, other: &dyn GeneratedFileAuxData) -> bool { - self.0 == other.as_any().downcast_ref::().unwrap().0 - } -} - -impl EmittedAuxData { - pub fn new(aux_data: ProcMacroAuxData) -> Self { - Self(vec![aux_data]) - } - - pub fn push(&mut self, aux_data: ProcMacroAuxData) { - self.0.push(aux_data); - } - - pub fn is_empty(&self) -> bool { - self.0.is_empty() - } -} - -impl IntoIterator for EmittedAuxData { - type Item = ProcMacroAuxData; - type IntoIter = IntoIter; - - fn into_iter(self) -> IntoIter { - self.0.into_iter() - } -} - -impl ProcMacroHostPlugin { - pub fn try_new(macros: Vec>) -> Result { - // Validate expansions. - let mut expansions = macros - .iter() - .flat_map(|m| { - m.get_expansions() - .iter() - .map(|e| ProcMacroId::new(m.package_id(), e.clone())) - .collect_vec() - }) - .collect::>(); - expansions.sort_unstable_by_key(|e| e.expansion.name.clone()); - ensure!( - expansions - .windows(2) - .all(|w| w[0].expansion.name != w[1].expansion.name), - "duplicate expansions defined for procedural macros: {duplicates}", - duplicates = expansions - .windows(2) - .filter(|w| w[0].expansion.name == w[1].expansion.name) - .map(|w| format!( - "{} ({} and {})", - w[0].expansion.name.as_str(), - w[0].package_id, - w[1].package_id - )) - .collect::>() - .join(", ") - ); - Ok(Self { - macros, - full_path_markers: RwLock::new(Default::default()), - }) - } - - fn expand_inner_attr( - &self, - db: &dyn SyntaxGroup, - item_ast: ast::ModuleItem, - ) -> InnerAttrExpansionResult { - let mut context = InnerAttrExpansionContext::new(self); - let mut item_builder = PatchBuilder::new(db, &item_ast); - let mut used_attr_names: HashSet = Default::default(); - let mut all_none = true; - - match item_ast.clone() { - ast::ModuleItem::Trait(trait_ast) => { - item_builder.add_node(trait_ast.attributes(db).as_syntax_node()); - item_builder.add_node(trait_ast.visibility(db).as_syntax_node()); - item_builder.add_node(trait_ast.trait_kw(db).as_syntax_node()); - item_builder.add_node(trait_ast.name(db).as_syntax_node()); - item_builder.add_node(trait_ast.generic_params(db).as_syntax_node()); - - // Parser attributes for inner functions. - match trait_ast.body(db) { - MaybeTraitBody::None(terminal) => { - item_builder.add_node(terminal.as_syntax_node()); - InnerAttrExpansionResult::None - } - MaybeTraitBody::Some(body) => { - item_builder.add_node(body.lbrace(db).as_syntax_node()); - - let item_list = body.items(db); - for item in item_list.elements(db).iter() { - let ast::TraitItem::Function(func) = item else { - item_builder.add_node(item.as_syntax_node()); - continue; - }; - - let mut func_builder = PatchBuilder::new(db, func); - let attrs = func.attributes(db).elements(db); - let found = self.parse_attrs(db, &mut func_builder, attrs, func); - if let Some(name) = found.as_name() { - used_attr_names.insert(name); - } - func_builder.add_node(func.declaration(db).as_syntax_node()); - func_builder.add_node(func.body(db).as_syntax_node()); - let token_stream = TokenStream::new(func_builder.build().0); - - all_none = all_none - && self.do_expand_inner_attr( - db, - &mut context, - &mut item_builder, - found, - func, - token_stream, - ); - } - - item_builder.add_node(body.rbrace(db).as_syntax_node()); - - if all_none { - InnerAttrExpansionResult::None - } else { - let (code, mappings) = item_builder.build(); - InnerAttrExpansionResult::Some(context.into_result( - code, - mappings, - used_attr_names.into_iter().collect(), - )) - } - } - } - } - - ast::ModuleItem::Impl(impl_ast) => { - item_builder.add_node(impl_ast.attributes(db).as_syntax_node()); - item_builder.add_node(impl_ast.visibility(db).as_syntax_node()); - item_builder.add_node(impl_ast.impl_kw(db).as_syntax_node()); - item_builder.add_node(impl_ast.name(db).as_syntax_node()); - item_builder.add_node(impl_ast.generic_params(db).as_syntax_node()); - item_builder.add_node(impl_ast.of_kw(db).as_syntax_node()); - item_builder.add_node(impl_ast.trait_path(db).as_syntax_node()); - - match impl_ast.body(db) { - MaybeImplBody::None(terminal) => { - item_builder.add_node(terminal.as_syntax_node()); - InnerAttrExpansionResult::None - } - MaybeImplBody::Some(body) => { - item_builder.add_node(body.lbrace(db).as_syntax_node()); - - let items = body.items(db); - for item in items.elements(db) { - let ImplItem::Function(func) = item else { - item_builder.add_node(item.as_syntax_node()); - continue; - }; - - let mut func_builder = PatchBuilder::new(db, &func); - let attrs = func.attributes(db).elements(db); - let found = self.parse_attrs(db, &mut func_builder, attrs, &func); - if let Some(name) = found.as_name() { - used_attr_names.insert(name); - } - func_builder.add_node(func.visibility(db).as_syntax_node()); - func_builder.add_node(func.declaration(db).as_syntax_node()); - func_builder.add_node(func.body(db).as_syntax_node()); - let token_stream = TokenStream::new(func_builder.build().0); - all_none = all_none - && self.do_expand_inner_attr( - db, - &mut context, - &mut item_builder, - found, - &func, - token_stream, - ); - } - - item_builder.add_node(body.rbrace(db).as_syntax_node()); - - if all_none { - InnerAttrExpansionResult::None - } else { - let (code, mappings) = item_builder.build(); - InnerAttrExpansionResult::Some(context.into_result( - code, - mappings, - used_attr_names.into_iter().collect(), - )) - } - } - } - } - _ => InnerAttrExpansionResult::None, - } - } - - fn do_expand_inner_attr( - &self, - db: &dyn SyntaxGroup, - context: &mut InnerAttrExpansionContext<'_>, - item_builder: &mut PatchBuilder<'_>, - found: AttrExpansionFound, - func: &impl TypedSyntaxNode, - token_stream: TokenStream, - ) -> bool { - let mut all_none = true; - let (input, args, stable_ptr) = match found { - AttrExpansionFound::Last { - expansion, - args, - stable_ptr, - } => { - all_none = false; - (expansion, args, stable_ptr) - } - AttrExpansionFound::Some { - expansion, - args, - stable_ptr, - } => { - all_none = false; - (expansion, args, stable_ptr) - } - AttrExpansionFound::None => { - item_builder.add_node(func.as_syntax_node()); - return all_none; - } - }; - - let result = self.instance(input.package_id).generate_code( - input.expansion.name.clone(), - args.clone(), - token_stream.clone(), - ); - - let expanded = context.register_result(token_stream.to_string(), input, result, stable_ptr); - item_builder.add_modified(RewriteNode::Mapped { - origin: func.as_syntax_node().span(db), - node: Box::new(RewriteNode::Text(expanded.to_string())), - }); - - all_none - } - - /// Find first attribute procedural macros that should be expanded. - /// - /// Remove the attribute from the code. - fn parse_attribute( - &self, - db: &dyn SyntaxGroup, - item_ast: ast::ModuleItem, - ) -> (AttrExpansionFound, TokenStream) { - let mut item_builder = PatchBuilder::new(db, &item_ast); - let input = match item_ast.clone() { - ast::ModuleItem::Trait(trait_ast) => { - let attrs = trait_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); - item_builder.add_node(trait_ast.visibility(db).as_syntax_node()); - item_builder.add_node(trait_ast.trait_kw(db).as_syntax_node()); - item_builder.add_node(trait_ast.name(db).as_syntax_node()); - item_builder.add_node(trait_ast.generic_params(db).as_syntax_node()); - item_builder.add_node(trait_ast.body(db).as_syntax_node()); - expansion - } - ast::ModuleItem::Impl(impl_ast) => { - let attrs = impl_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); - item_builder.add_node(impl_ast.visibility(db).as_syntax_node()); - item_builder.add_node(impl_ast.impl_kw(db).as_syntax_node()); - item_builder.add_node(impl_ast.name(db).as_syntax_node()); - item_builder.add_node(impl_ast.generic_params(db).as_syntax_node()); - item_builder.add_node(impl_ast.of_kw(db).as_syntax_node()); - item_builder.add_node(impl_ast.trait_path(db).as_syntax_node()); - item_builder.add_node(impl_ast.body(db).as_syntax_node()); - expansion - } - ast::ModuleItem::Module(module_ast) => { - let attrs = module_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); - item_builder.add_node(module_ast.visibility(db).as_syntax_node()); - item_builder.add_node(module_ast.module_kw(db).as_syntax_node()); - item_builder.add_node(module_ast.name(db).as_syntax_node()); - item_builder.add_node(module_ast.body(db).as_syntax_node()); - expansion - } - ast::ModuleItem::FreeFunction(free_func_ast) => { - let attrs = free_func_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); - item_builder.add_node(free_func_ast.visibility(db).as_syntax_node()); - item_builder.add_node(free_func_ast.declaration(db).as_syntax_node()); - item_builder.add_node(free_func_ast.body(db).as_syntax_node()); - expansion - } - ast::ModuleItem::ExternFunction(extern_func_ast) => { - let attrs = extern_func_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); - item_builder.add_node(extern_func_ast.visibility(db).as_syntax_node()); - item_builder.add_node(extern_func_ast.extern_kw(db).as_syntax_node()); - item_builder.add_node(extern_func_ast.declaration(db).as_syntax_node()); - item_builder.add_node(extern_func_ast.semicolon(db).as_syntax_node()); - expansion - } - ast::ModuleItem::ExternType(extern_type_ast) => { - let attrs = extern_type_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); - item_builder.add_node(extern_type_ast.visibility(db).as_syntax_node()); - item_builder.add_node(extern_type_ast.extern_kw(db).as_syntax_node()); - item_builder.add_node(extern_type_ast.type_kw(db).as_syntax_node()); - item_builder.add_node(extern_type_ast.name(db).as_syntax_node()); - item_builder.add_node(extern_type_ast.generic_params(db).as_syntax_node()); - item_builder.add_node(extern_type_ast.semicolon(db).as_syntax_node()); - expansion - } - ast::ModuleItem::Struct(struct_ast) => { - let attrs = struct_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); - item_builder.add_node(struct_ast.visibility(db).as_syntax_node()); - item_builder.add_node(struct_ast.struct_kw(db).as_syntax_node()); - item_builder.add_node(struct_ast.name(db).as_syntax_node()); - item_builder.add_node(struct_ast.generic_params(db).as_syntax_node()); - item_builder.add_node(struct_ast.lbrace(db).as_syntax_node()); - item_builder.add_node(struct_ast.members(db).as_syntax_node()); - item_builder.add_node(struct_ast.rbrace(db).as_syntax_node()); - expansion - } - ast::ModuleItem::Enum(enum_ast) => { - let attrs = enum_ast.attributes(db).elements(db); - let expansion = self.parse_attrs(db, &mut item_builder, attrs, &item_ast); - item_builder.add_node(enum_ast.visibility(db).as_syntax_node()); - item_builder.add_node(enum_ast.enum_kw(db).as_syntax_node()); - item_builder.add_node(enum_ast.name(db).as_syntax_node()); - item_builder.add_node(enum_ast.generic_params(db).as_syntax_node()); - item_builder.add_node(enum_ast.lbrace(db).as_syntax_node()); - item_builder.add_node(enum_ast.variants(db).as_syntax_node()); - item_builder.add_node(enum_ast.rbrace(db).as_syntax_node()); - expansion - } - _ => AttrExpansionFound::None, - }; - let token_stream = TokenStream::new(item_builder.build().0); - (input, token_stream) - } - - fn parse_attrs( - &self, - db: &dyn SyntaxGroup, - builder: &mut PatchBuilder<'_>, - attrs: Vec, - origin: &impl TypedSyntaxNode, - ) -> AttrExpansionFound { - // This function parses attributes of the item, - // checking if those attributes correspond to a procedural macro that should be fired. - // The proc macro attribute found is removed from attributes list, - // while other attributes are appended to the `PathBuilder` passed as an argument. - - // Note this function does not affect the executable attributes, - // as it only pulls `ExpansionKind::Attr` from the plugin. - // This means that executable attributes will neither be removed from the item, - // nor will they cause the item to be rewritten. - let mut expansion = None; - let mut last = true; - for attr in attrs { - // We ensure that this flag is changed *after* the expansion is found. - if last { - let structured_attr = attr.clone().structurize(db); - let found = self.find_expansion(&Expansion::new( - structured_attr.id.clone(), - ExpansionKind::Attr, - )); - if let Some(found) = found { - if expansion.is_none() { - let mut args_builder = PatchBuilder::new(db, origin); - args_builder.add_node(attr.arguments(db).as_syntax_node()); - let args = TokenStream::new(args_builder.build().0); - expansion = Some((found, args, attr.stable_ptr().untyped())); - // Do not add the attribute for found expansion. - continue; - } else { - last = false; - } - } - } - builder.add_node(attr.as_syntax_node()); - } - match (expansion, last) { - (Some((expansion, args, stable_ptr)), true) => AttrExpansionFound::Last { - expansion, - args, - stable_ptr, - }, - (Some((expansion, args, stable_ptr)), false) => AttrExpansionFound::Some { - expansion, - args, - stable_ptr, - }, - (None, _) => AttrExpansionFound::None, - } - } - - /// Handle `#[derive(...)]` attribute. - /// - /// Returns a list of expansions that this plugin should apply. - fn parse_derive(&self, db: &dyn SyntaxGroup, item_ast: ast::ModuleItem) -> Vec { - let attrs = match item_ast { - ast::ModuleItem::Struct(struct_ast) => Some(struct_ast.query_attr(db, DERIVE_ATTR)), - ast::ModuleItem::Enum(enum_ast) => Some(enum_ast.query_attr(db, DERIVE_ATTR)), - _ => None, - }; - - attrs - .unwrap_or_default() - .iter() - .map(|attr| attr.clone().structurize(db)) - .flat_map(|attr| attr.args.into_iter()) - .filter_map(|attr| { - let AttributeArgVariant::Unnamed(value) = attr.clone().variant else { - return None; - }; - let Expr::Path(path) = value else { - return None; - }; - let path = path.elements(db); - let path = path.last()?; - let PathSegment::Simple(segment) = path else { - return None; - }; - let ident = segment.ident(db); - let value = ident.text(db).to_string(); - - self.find_expansion(&Expansion::new( - value.to_case(Case::Snake), - ExpansionKind::Derive, - )) - }) - .collect_vec() - } - - fn expand_derives( - &self, - db: &dyn SyntaxGroup, - item_ast: ast::ModuleItem, - stream_metadata: TokenStreamMetadata, - ) -> Option { - let stable_ptr = item_ast.clone().stable_ptr().untyped(); - let token_stream = - TokenStream::from_syntax_node(db, &item_ast).with_metadata(stream_metadata.clone()); - - let mut aux_data = EmittedAuxData::default(); - let mut all_diagnostics: Vec = Vec::new(); - - // All derives to be applied. - let derives = self.parse_derive(db, item_ast.clone()); - let any_derives = !derives.is_empty(); - - let mut derived_code = PatchBuilder::new(db, &item_ast); - for derive in derives.iter() { - let result = self.instance(derive.package_id).generate_code( - derive.expansion.name.clone(), - TokenStream::empty(), - token_stream.clone(), - ); - - // Register diagnostics. - all_diagnostics.extend(result.diagnostics); - - // Register aux data. - if let Some(new_aux_data) = result.aux_data { - aux_data.push(ProcMacroAuxData::new( - new_aux_data.into(), - ProcMacroId::new(derive.package_id, derive.expansion.clone()), - )); - } - - if result.token_stream.is_empty() { - // No code has been generated. - // We do not need to do anything. - continue; - } - - derived_code.add_str(result.token_stream.to_string().as_str()); - } - - if any_derives { - let derived_code = derived_code.build().0; - return Some(PluginResult { - code: if derived_code.is_empty() { - None - } else { - let msg = if derives.len() == 1 { - "the derive macro" - } else { - "one of the derive macros" - }; - let derive_names = derives - .iter() - .map(|derive| derive.expansion.name.to_string()) - .join("`, `"); - let note = format!("this error originates in {msg}: `{derive_names}`"); - Some(PluginGeneratedFile { - name: "proc_macro_derive".into(), - code_mappings: Vec::new(), - content: derived_code, - aux_data: if aux_data.is_empty() { - None - } else { - Some(DynGeneratedFileAuxData::new(aux_data)) - }, - diagnostics_note: Some(note), - }) - }, - diagnostics: into_cairo_diagnostics(all_diagnostics, stable_ptr), - // Note that we don't remove the original item here, unlike for attributes. - // We do not add the original code to the generated file either. - remove_original_item: false, - }); - } - - None - } - - fn expand_attribute( - &self, - input: ProcMacroId, - last: bool, - args: TokenStream, - token_stream: TokenStream, - stable_ptr: SyntaxStablePtrId, - ) -> PluginResult { - let result = self.instance(input.package_id).generate_code( - input.expansion.name.clone(), - args.clone(), - token_stream.clone(), - ); - - // Handle token stream. - if result.token_stream.is_empty() { - // Remove original code - return PluginResult { - diagnostics: into_cairo_diagnostics(result.diagnostics, stable_ptr), - code: None, - remove_original_item: true, - }; - } - - // Full path markers require code modification. - self.register_full_path_markers(input.package_id, result.full_path_markers.clone()); - - // This is a minor optimization. - // If the expanded macro attribute is the only one that will be expanded by `ProcMacroHost` - // in this `generate_code` call (i.e. all the other macro attributes has been expanded by - // previous calls), and the expansion did not produce any changes, we can skip rewriting the - // expanded node by simply returning no generated code, and leaving the original item as is. - // However, if we have other macro attributes to expand, we must rewrite the node even if no - // changes have been produced, so that we can parse the attributes once again and expand them. - // In essence, `code: None, remove_original_item: false` means `ProcMacroHost` will not be - // called again for this AST item. - // This optimization limits the number of generated nodes a bit. - if last - && result.aux_data.is_none() - && token_stream.to_string() == result.token_stream.to_string() - { - return PluginResult { - code: None, - remove_original_item: false, - diagnostics: into_cairo_diagnostics(result.diagnostics, stable_ptr), - }; - } - - let file_name = format!("proc_{}", input.expansion.name); - let content = result.token_stream.to_string(); - PluginResult { - code: Some(PluginGeneratedFile { - name: file_name.into(), - code_mappings: Vec::new(), - content, - diagnostics_note: Some(format!( - "this error originates in the attribute macro: `{}`", - input.expansion.name - )), - aux_data: result.aux_data.map(|new_aux_data| { - DynGeneratedFileAuxData::new(EmittedAuxData::new(ProcMacroAuxData::new( - new_aux_data.into(), - input, - ))) - }), - }), - diagnostics: into_cairo_diagnostics(result.diagnostics, stable_ptr), - remove_original_item: true, - } - } - - fn find_expansion(&self, expansion: &Expansion) -> Option { - self.macros - .iter() - .find(|m| m.get_expansions().contains(expansion)) - .map(|m| m.package_id()) - .map(|package_id| ProcMacroId::new(package_id, expansion.clone())) - } - - pub fn build_plugin_suite(macro_host: Arc) -> PluginSuite { - let mut suite = PluginSuite::default(); - // Register inline macro plugins. - for proc_macro in ¯o_host.macros { - let expansions = proc_macro - .get_expansions() - .iter() - .filter(|exp| matches!(exp.kind, ExpansionKind::Inline)); - for expansion in expansions { - let plugin = Arc::new(ProcMacroInlinePlugin::new( - proc_macro.clone(), - expansion.clone(), - )); - suite.add_inline_macro_plugin_ex(expansion.name.as_str(), plugin); - } - } - // Register procedural macro host plugin. - suite.add_plugin_ex(macro_host); - suite - } - - #[tracing::instrument(level = "trace", skip_all)] - pub fn post_process(&self, db: &dyn SemanticGroup) -> Result<()> { - let markers = self.collect_full_path_markers(db); - - let aux_data = self.collect_aux_data(db); - for instance in self.macros.iter() { - let _ = trace_span!( - "post_process_callback", - instance = %instance.package_id() - ) - .entered(); - let instance_markers = self - .full_path_markers - .read() - .unwrap() - .get(&instance.package_id()) - .cloned() - .unwrap_or_default(); - let markers_for_instance = markers - .iter() - .filter(|(key, _)| instance_markers.contains(key)) - .map(|(key, full_path)| FullPathMarker { - key: key.clone(), - full_path: full_path.clone(), - }) - .collect_vec(); - let data = aux_data - .get(&instance.package_id()) - .cloned() - .unwrap_or_default(); - debug!("calling post processing callback with: {data:?}"); - instance.post_process_callback(data.clone(), markers_for_instance); - } - Ok(()) - } - - fn collect_full_path_markers(&self, db: &dyn SemanticGroup) -> HashMap { - let mut markers: HashMap = HashMap::new(); - // FULL_PATH_MARKER_KEY - for crate_id in db.crates() { - let modules = db.crate_modules(crate_id); - for module_id in modules.iter() { - let Ok(module_items) = db.module_items(*module_id) else { - continue; - }; - for item_id in module_items.iter() { - let attr = match item_id { - ModuleItemId::Struct(id) => { - id.query_attr(db, FULL_PATH_MARKER_KEY).to_option() - } - ModuleItemId::Enum(id) => { - id.query_attr(db, FULL_PATH_MARKER_KEY).to_option() - } - ModuleItemId::FreeFunction(id) => { - id.query_attr(db, FULL_PATH_MARKER_KEY).to_option() - } - _ => None, - }; - - let keys = attr - .unwrap_or_default() - .into_iter() - .filter_map(|attr| Self::extract_key(db, attr)) - .collect_vec(); - let full_path = item_id.full_path(db.upcast()); - for key in keys { - markers.insert(key, full_path.clone()); - } - } - } - } - markers - } - - fn extract_key(db: &dyn SemanticGroup, attr: Attribute) -> Option { - if attr.id != FULL_PATH_MARKER_KEY { - return None; - } - - for arg in attr.args.clone() { - if let AttributeArgVariant::Unnamed(Expr::String(s)) = arg.variant { - return s.string_value(db.upcast()); - } - } - - None - } - - fn collect_aux_data( - &self, - db: &dyn SemanticGroup, - ) -> HashMap> { - let mut data = Vec::new(); - for crate_id in db.crates() { - let crate_modules = db.crate_modules(crate_id); - for module in crate_modules.iter() { - let file_infos = db.module_generated_file_aux_data(*module); - if let Ok(file_infos) = file_infos { - for file_info in file_infos.iter() { - let aux_data = file_info - .as_ref() - .and_then(|ad| ad.as_any().downcast_ref::()); - if let Some(aux_data) = aux_data { - data.extend(aux_data.clone().into_iter()); - } - } - } - } - } - data.into_iter() - .into_group_map_by(|d| d.macro_id.package_id) - } - - pub fn instance(&self, package_id: PackageId) -> &ProcMacroInstance { - self.macros - .iter() - .find(|m| m.package_id() == package_id) - .expect("procedural macro must be registered in proc macro host") - } - - fn register_full_path_markers(&self, package_id: PackageId, markers: Vec) { - self.full_path_markers - .write() - .unwrap() - .entry(package_id) - .and_modify(|markers| markers.extend(markers.clone())) - .or_insert(markers); - } - - fn calculate_metadata(db: &dyn SyntaxGroup, item_ast: ast::ModuleItem) -> TokenStreamMetadata { - let stable_ptr = item_ast.clone().stable_ptr().untyped(); - let file_path = stable_ptr.file_id(db).full_path(db.upcast()); - let file_id = short_hash(file_path.clone()); - TokenStreamMetadata::new(file_path, file_id) - } -} - -struct InnerAttrExpansionContext<'a> { - host: &'a ProcMacroHostPlugin, - // Metadata returned for expansions. - diagnostics: Vec, - aux_data: EmittedAuxData, - any_changed: bool, -} - -impl<'a> InnerAttrExpansionContext<'a> { - pub fn new<'b: 'a>(host: &'b ProcMacroHostPlugin) -> Self { - Self { - diagnostics: Vec::new(), - aux_data: EmittedAuxData::default(), - any_changed: false, - host, - } - } - - pub fn register_result( - &mut self, - original: String, - input: ProcMacroId, - result: ProcMacroResult, - stable_ptr: SyntaxStablePtrId, - ) -> String { - let expanded = result.token_stream.to_string(); - let changed = expanded.as_str() != original; - - if changed { - self.host - .register_full_path_markers(input.package_id, result.full_path_markers.clone()); - } - - self.diagnostics - .extend(into_cairo_diagnostics(result.diagnostics, stable_ptr)); - - if let Some(new_aux_data) = result.aux_data { - self.aux_data - .push(ProcMacroAuxData::new(new_aux_data.into(), input)); - } - - self.any_changed = self.any_changed || changed; - - expanded - } - pub fn into_result( - self, - expanded: String, - code_mappings: Vec, - attr_names: Vec, - ) -> PluginResult { - let msg = if attr_names.len() == 1 { - "the attribute macro" - } else { - "one of the attribute macros" - }; - let derive_names = attr_names.iter().map(ToString::to_string).join("`, `"); - let note = format!("this error originates in {msg}: `{derive_names}`"); - PluginResult { - code: Some(PluginGeneratedFile { - name: "proc_attr_inner".into(), - content: expanded, - aux_data: if self.aux_data.is_empty() { - None - } else { - Some(DynGeneratedFileAuxData::new(self.aux_data)) - }, - code_mappings, - diagnostics_note: Some(note), - }), - diagnostics: self.diagnostics, - remove_original_item: true, - } - } -} - -enum InnerAttrExpansionResult { - None, - Some(PluginResult), -} - -impl MacroPlugin for ProcMacroHostPlugin { - fn generate_code( - &self, - db: &dyn SyntaxGroup, - item_ast: ast::ModuleItem, - _metadata: &MacroPluginMetadata<'_>, - ) -> PluginResult { - let stream_metadata = Self::calculate_metadata(db, item_ast.clone()); - - // Handle inner functions. - if let InnerAttrExpansionResult::Some(result) = self.expand_inner_attr(db, item_ast.clone()) - { - return result; - } - - // Expand first attribute. - // Note that we only expand the first attribute, as we assume that the rest of the attributes - // will be handled by a subsequent call to this function. - let (input, body) = self.parse_attribute(db, item_ast.clone()); - - if let Some(result) = match input { - AttrExpansionFound::Last { - expansion, - args, - stable_ptr, - } => Some((expansion, args, stable_ptr, true)), - AttrExpansionFound::Some { - expansion, - args, - stable_ptr, - } => Some((expansion, args, stable_ptr, false)), - AttrExpansionFound::None => None, - } - .map(|(expansion, args, stable_ptr, last)| { - let token_stream = body.with_metadata(stream_metadata.clone()); - self.expand_attribute(expansion, last, args, token_stream, stable_ptr) - }) { - return result; - } - - // Expand all derives. - // Note that all proc macro attributes should be already expanded at this point. - if let Some(result) = self.expand_derives(db, item_ast.clone(), stream_metadata.clone()) { - return result; - } - - // No expansions can be applied. - PluginResult { - code: None, - diagnostics: Vec::new(), - remove_original_item: false, - } - } - - fn declared_attributes(&self) -> Vec { - self.macros - .iter() - .flat_map(|m| m.declared_attributes_and_executables()) - .chain(vec![FULL_PATH_MARKER_KEY.to_string()]) - .collect() - } - - fn declared_derives(&self) -> Vec { - self.macros - .iter() - .flat_map(|m| m.declared_derives()) - .map(|s| s.to_case(Case::UpperCamel)) - .collect() - } - - fn executable_attributes(&self) -> Vec { - self.macros - .iter() - .flat_map(|m| m.executable_attributes()) - .collect() - } -} - -enum AttrExpansionFound { - Some { - expansion: ProcMacroId, - args: TokenStream, - stable_ptr: SyntaxStablePtrId, - }, - None, - Last { - expansion: ProcMacroId, - args: TokenStream, - stable_ptr: SyntaxStablePtrId, - }, -} -impl AttrExpansionFound { - pub fn as_name(&self) -> Option { - match self { - AttrExpansionFound::Some { expansion, .. } - | AttrExpansionFound::Last { expansion, .. } => Some(expansion.expansion.name.clone()), - AttrExpansionFound::None => None, - } - } -} - -/// A Cairo compiler inline macro plugin controlling the inline procedural macro execution. -/// -/// This plugin represents a single expansion capable of handling inline procedural macros. -/// The plugin triggers code expansion in a corresponding procedural macro instance. -#[derive(Debug)] -pub struct ProcMacroInlinePlugin { - instance: Arc, - expansion: Expansion, - doc: OnceLock>, -} - -impl ProcMacroInlinePlugin { - pub fn new(instance: Arc, expansion: Expansion) -> Self { - assert!(instance.get_expansions().contains(&expansion)); - Self { - instance, - expansion, - doc: Default::default(), - } - } - - pub fn name(&self) -> &str { - self.expansion.name.as_str() - } - - fn instance(&self) -> &ProcMacroInstance { - &self.instance - } -} - -impl InlineMacroExprPlugin for ProcMacroInlinePlugin { - fn generate_code( - &self, - db: &dyn SyntaxGroup, - syntax: &ast::ExprInlineMacro, - _metadata: &MacroPluginMetadata<'_>, - ) -> InlinePluginResult { - let stable_ptr = syntax.clone().stable_ptr().untyped(); - let arguments = syntax.arguments(db); - let token_stream = TokenStream::from_syntax_node(db, &arguments); - let result = self.instance().generate_code( - self.expansion.name.clone(), - TokenStream::empty(), - token_stream, - ); - // Handle diagnostics. - let diagnostics = into_cairo_diagnostics(result.diagnostics, stable_ptr); - let token_stream = result.token_stream.clone(); - if token_stream.is_empty() { - // Remove original code - InlinePluginResult { - code: None, - diagnostics, - } - } else { - // Replace - let aux_data = result.aux_data.map(|aux_data| { - let aux_data = ProcMacroAuxData::new( - aux_data.into(), - ProcMacroId::new(self.instance.package_id(), self.expansion.clone()), - ); - let mut emitted = EmittedAuxData::default(); - emitted.push(aux_data); - DynGeneratedFileAuxData::new(emitted) - }); - let content = token_stream.to_string(); - InlinePluginResult { - code: Some(PluginGeneratedFile { - name: "inline_proc_macro".into(), - code_mappings: Vec::new(), - content, - aux_data, - diagnostics_note: Some(format!( - "this error originates in the inline macro: `{}`", - self.expansion.name - )), - }), - diagnostics, - } - } - } - - fn documentation(&self) -> Option { - self.doc - .get_or_init(|| self.instance().doc(self.expansion.name.clone())) - .clone() - } -} - -fn into_cairo_diagnostics( - diagnostics: Vec, - stable_ptr: SyntaxStablePtrId, -) -> Vec { - diagnostics - .into_iter() - .map(|diag| PluginDiagnostic { - stable_ptr, - message: diag.message, - severity: match diag.severity { - Severity::Error => cairo_lang_diagnostics::Severity::Error, - Severity::Warning => cairo_lang_diagnostics::Severity::Warning, - }, - }) - .collect_vec() -} - -/// A Scarb wrapper around the `ProcMacroHost` compiler plugin. -/// -/// This struct represent the compiler plugin in terms of Scarb data model. -/// It also builds a plugin suite that enables the compiler plugin. -#[derive(Default)] -pub struct ProcMacroHost { - macros: Vec>, -} - -impl ProcMacroHost { - pub fn register(&mut self, package: Package, config: &Config) -> Result<()> { - let instance = ProcMacroInstance::try_new(package, config)?; - self.macros.push(Arc::new(instance)); - Ok(()) - } - - pub fn into_plugin(self) -> Result { - ProcMacroHostPlugin::try_new(self.macros) - } - - pub fn macros(&self) -> &[Arc] { - &self.macros - } -} diff --git a/scarb/src/compiler/plugin/proc_macro/host/attribute.rs b/scarb/src/compiler/plugin/proc_macro/host/attribute.rs new file mode 100644 index 000000000..1174eea28 --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/host/attribute.rs @@ -0,0 +1,536 @@ +use crate::compiler::plugin::proc_macro::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; +use crate::compiler::plugin::proc_macro::host::conversion::{ + into_cairo_diagnostics, CallSiteLocation, +}; +use crate::compiler::plugin::proc_macro::host::generate_code_mappings; +use crate::compiler::plugin::proc_macro::{ + Expansion, ExpansionKind, ProcMacroHostPlugin, ProcMacroId, TokenStreamBuilder, +}; +use cairo_lang_defs::patcher::{PatchBuilder, RewriteNode}; +use cairo_lang_defs::plugin::PluginDiagnostic; +use cairo_lang_defs::plugin::{DynGeneratedFileAuxData, PluginGeneratedFile, PluginResult}; +use cairo_lang_filesystem::ids::CodeMapping; +use cairo_lang_macro::{AllocationContext, ProcMacroResult, TokenStream}; +use cairo_lang_syntax::attribute::structured::AttributeStructurize; +use cairo_lang_syntax::node::ast::{ImplItem, MaybeImplBody, MaybeTraitBody}; +use cairo_lang_syntax::node::db::SyntaxGroup; +use cairo_lang_syntax::node::ids::SyntaxStablePtrId; +use cairo_lang_syntax::node::{ast, TypedSyntaxNode}; +use itertools::Itertools; +use smol_str::SmolStr; +use std::collections::HashSet; + +impl ProcMacroHostPlugin { + pub(crate) fn expand_inner_attr( + &self, + db: &dyn SyntaxGroup, + item_ast: ast::ModuleItem, + ) -> InnerAttrExpansionResult { + let mut context = InnerAttrExpansionContext::new(self); + let mut item_builder = PatchBuilder::new(db, &item_ast); + let mut used_attr_names: HashSet = Default::default(); + let mut all_none = true; + let ctx = AllocationContext::default(); + + match item_ast.clone() { + ast::ModuleItem::Trait(trait_ast) => { + item_builder.add_node(trait_ast.attributes(db).as_syntax_node()); + item_builder.add_node(trait_ast.visibility(db).as_syntax_node()); + item_builder.add_node(trait_ast.trait_kw(db).as_syntax_node()); + item_builder.add_node(trait_ast.name(db).as_syntax_node()); + item_builder.add_node(trait_ast.generic_params(db).as_syntax_node()); + + // Parser attributes for inner functions. + match trait_ast.body(db) { + MaybeTraitBody::None(terminal) => { + item_builder.add_node(terminal.as_syntax_node()); + InnerAttrExpansionResult::None + } + MaybeTraitBody::Some(body) => { + item_builder.add_node(body.lbrace(db).as_syntax_node()); + + let item_list = body.items(db); + for item in item_list.elements(db).iter() { + let ast::TraitItem::Function(func) = item else { + item_builder.add_node(item.as_syntax_node()); + continue; + }; + + let mut token_stream_builder = TokenStreamBuilder::new(db); + let attrs = func.attributes(db).elements(db); + let found = + self.parse_attrs(db, &mut token_stream_builder, attrs, &ctx); + if let Some(name) = found.as_name() { + used_attr_names.insert(name); + } + token_stream_builder.add_node(func.declaration(db).as_syntax_node()); + token_stream_builder.add_node(func.body(db).as_syntax_node()); + let token_stream = token_stream_builder.build(&ctx); + + all_none = all_none + && self.do_expand_inner_attr( + db, + &mut context, + &mut item_builder, + found, + func, + token_stream, + ); + } + + item_builder.add_node(body.rbrace(db).as_syntax_node()); + + if all_none { + InnerAttrExpansionResult::None + } else { + let (code, mappings) = item_builder.build(); + InnerAttrExpansionResult::Some(context.into_result( + code, + mappings, + used_attr_names.into_iter().collect(), + )) + } + } + } + } + + ast::ModuleItem::Impl(impl_ast) => { + item_builder.add_node(impl_ast.attributes(db).as_syntax_node()); + item_builder.add_node(impl_ast.visibility(db).as_syntax_node()); + item_builder.add_node(impl_ast.impl_kw(db).as_syntax_node()); + item_builder.add_node(impl_ast.name(db).as_syntax_node()); + item_builder.add_node(impl_ast.generic_params(db).as_syntax_node()); + item_builder.add_node(impl_ast.of_kw(db).as_syntax_node()); + item_builder.add_node(impl_ast.trait_path(db).as_syntax_node()); + + match impl_ast.body(db) { + MaybeImplBody::None(terminal) => { + item_builder.add_node(terminal.as_syntax_node()); + InnerAttrExpansionResult::None + } + MaybeImplBody::Some(body) => { + item_builder.add_node(body.lbrace(db).as_syntax_node()); + + let items = body.items(db); + for item in items.elements(db) { + let ImplItem::Function(func) = item else { + item_builder.add_node(item.as_syntax_node()); + continue; + }; + + let mut token_stream_builder = TokenStreamBuilder::new(db); + let attrs = func.attributes(db).elements(db); + let found = + self.parse_attrs(db, &mut token_stream_builder, attrs, &ctx); + if let Some(name) = found.as_name() { + used_attr_names.insert(name); + } + token_stream_builder.add_node(func.visibility(db).as_syntax_node()); + token_stream_builder.add_node(func.declaration(db).as_syntax_node()); + token_stream_builder.add_node(func.body(db).as_syntax_node()); + let token_stream = token_stream_builder.build(&ctx); + all_none = all_none + && self.do_expand_inner_attr( + db, + &mut context, + &mut item_builder, + found, + &func, + token_stream, + ); + } + + item_builder.add_node(body.rbrace(db).as_syntax_node()); + + if all_none { + InnerAttrExpansionResult::None + } else { + let (code, mappings) = item_builder.build(); + InnerAttrExpansionResult::Some(context.into_result( + code, + mappings, + used_attr_names.into_iter().collect(), + )) + } + } + } + } + _ => InnerAttrExpansionResult::None, + } + } + + fn do_expand_inner_attr( + &self, + db: &dyn SyntaxGroup, + context: &mut InnerAttrExpansionContext<'_>, + item_builder: &mut PatchBuilder<'_>, + found: AttrExpansionFound, + func: &impl TypedSyntaxNode, + token_stream: TokenStream, + ) -> bool { + let mut all_none = true; + let input = match found { + AttrExpansionFound::Last(input) => { + all_none = false; + input + } + AttrExpansionFound::Some(input) => { + all_none = false; + input + } + AttrExpansionFound::None => { + item_builder.add_node(func.as_syntax_node()); + return all_none; + } + }; + + let result = self.instance(input.id.package_id).generate_code( + input.id.expansion.name.clone(), + input.call_site.span, + input.args, + token_stream.clone(), + ); + + let expanded = context.register_result( + token_stream.to_string(), + input.id, + result, + input.call_site.stable_ptr, + ); + + item_builder.add_modified(RewriteNode::Mapped { + origin: func.as_syntax_node().span(db), + node: Box::new(RewriteNode::Text(expanded.to_string())), + }); + + all_none + } + + /// Find first attribute procedural macros that should be expanded. + /// + /// Remove the attribute from the code. + pub(crate) fn parse_attribute( + &self, + db: &dyn SyntaxGroup, + item_ast: ast::ModuleItem, + ctx: &AllocationContext, + ) -> (AttrExpansionFound, TokenStream) { + let mut token_stream_builder = TokenStreamBuilder::new(db); + let input = match item_ast.clone() { + ast::ModuleItem::Trait(trait_ast) => { + let attrs = trait_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); + token_stream_builder.add_node(trait_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(trait_ast.trait_kw(db).as_syntax_node()); + token_stream_builder.add_node(trait_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(trait_ast.generic_params(db).as_syntax_node()); + token_stream_builder.add_node(trait_ast.body(db).as_syntax_node()); + expansion + } + ast::ModuleItem::Impl(impl_ast) => { + let attrs = impl_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); + token_stream_builder.add_node(impl_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.impl_kw(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.generic_params(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.of_kw(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.trait_path(db).as_syntax_node()); + token_stream_builder.add_node(impl_ast.body(db).as_syntax_node()); + expansion + } + ast::ModuleItem::Module(module_ast) => { + let attrs = module_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); + token_stream_builder.add_node(module_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(module_ast.module_kw(db).as_syntax_node()); + token_stream_builder.add_node(module_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(module_ast.body(db).as_syntax_node()); + expansion + } + ast::ModuleItem::FreeFunction(free_func_ast) => { + let attrs = free_func_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); + token_stream_builder.add_node(free_func_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(free_func_ast.declaration(db).as_syntax_node()); + token_stream_builder.add_node(free_func_ast.body(db).as_syntax_node()); + expansion + } + ast::ModuleItem::ExternFunction(extern_func_ast) => { + let attrs = extern_func_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); + token_stream_builder.add_node(extern_func_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(extern_func_ast.extern_kw(db).as_syntax_node()); + token_stream_builder.add_node(extern_func_ast.declaration(db).as_syntax_node()); + token_stream_builder.add_node(extern_func_ast.semicolon(db).as_syntax_node()); + expansion + } + ast::ModuleItem::ExternType(extern_type_ast) => { + let attrs = extern_type_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); + token_stream_builder.add_node(extern_type_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(extern_type_ast.extern_kw(db).as_syntax_node()); + token_stream_builder.add_node(extern_type_ast.type_kw(db).as_syntax_node()); + token_stream_builder.add_node(extern_type_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(extern_type_ast.generic_params(db).as_syntax_node()); + token_stream_builder.add_node(extern_type_ast.semicolon(db).as_syntax_node()); + expansion + } + ast::ModuleItem::Struct(struct_ast) => { + let attrs = struct_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); + token_stream_builder.add_node(struct_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.struct_kw(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.generic_params(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.lbrace(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.members(db).as_syntax_node()); + token_stream_builder.add_node(struct_ast.rbrace(db).as_syntax_node()); + expansion + } + ast::ModuleItem::Enum(enum_ast) => { + let attrs = enum_ast.attributes(db).elements(db); + let expansion = self.parse_attrs(db, &mut token_stream_builder, attrs, ctx); + token_stream_builder.add_node(enum_ast.visibility(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.enum_kw(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.name(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.generic_params(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.lbrace(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.variants(db).as_syntax_node()); + token_stream_builder.add_node(enum_ast.rbrace(db).as_syntax_node()); + expansion + } + _ => AttrExpansionFound::None, + }; + let token_stream = token_stream_builder.build(ctx); + (input, token_stream) + } + + fn parse_attrs( + &self, + db: &dyn SyntaxGroup, + builder: &mut TokenStreamBuilder<'_>, + attrs: Vec, + ctx: &AllocationContext, + ) -> AttrExpansionFound { + // This function parses attributes of the item, + // checking if those attributes correspond to a procedural macro that should be fired. + // The proc macro attribute found is removed from attributes list, + // while other attributes are appended to the `PathBuilder` passed as an argument. + + // Note this function does not affect the executable attributes, + // as it only pulls `ExpansionKind::Attr` from the plugin. + // This means that executable attributes will neither be removed from the item, + // nor will they cause the item to be rewritten. + let mut expansion = None; + let mut last = true; + for attr in attrs { + // We ensure that this flag is changed *after* the expansion is found. + if last { + let structured_attr = attr.clone().structurize(db); + let found = self.find_expansion(&Expansion::new( + structured_attr.id.clone(), + ExpansionKind::Attr, + )); + if let Some(found) = found { + if expansion.is_none() { + let mut args_builder = TokenStreamBuilder::new(db); + args_builder.add_node(attr.arguments(db).as_syntax_node()); + let args = args_builder.build(ctx); + expansion = Some(AttrExpansionArgs { + id: found, + args, + call_site: CallSiteLocation::new(&attr, db), + }); + // Do not add the attribute for found expansion. + continue; + } else { + last = false; + } + } + } + builder.add_node(attr.as_syntax_node()); + } + match (expansion, last) { + (Some(args), true) => AttrExpansionFound::Last(args), + (Some(args), false) => AttrExpansionFound::Some(args), + (None, _) => AttrExpansionFound::None, + } + } + + pub fn expand_attribute( + &self, + input: ProcMacroId, + last: bool, + args: TokenStream, + token_stream: TokenStream, + call_site: CallSiteLocation, + ) -> PluginResult { + let original = token_stream.to_string(); + let result = self.instance(input.package_id).generate_code( + input.expansion.name.clone(), + call_site.span, + args, + token_stream, + ); + + // Handle token stream. + if result.token_stream.is_empty() { + // Remove original code + return PluginResult { + diagnostics: into_cairo_diagnostics(result.diagnostics, call_site.stable_ptr), + code: None, + remove_original_item: true, + }; + } + + // Full path markers require code modification. + self.register_full_path_markers(input.package_id, result.full_path_markers.clone()); + + // This is a minor optimization. + // If the expanded macro attribute is the only one that will be expanded by `ProcMacroHost` + // in this `generate_code` call (i.e. all the other macro attributes has been expanded by + // previous calls), and the expansion did not produce any changes, we can skip rewriting the + // expanded node by simply returning no generated code, and leaving the original item as is. + // However, if we have other macro attributes to expand, we must rewrite the node even if no + // changes have been produced, so that we can parse the attributes once again and expand them. + // In essence, `code: None, remove_original_item: false` means `ProcMacroHost` will not be + // called again for this AST item. + // This optimization limits the number of generated nodes a bit. + if last && result.aux_data.is_none() && original == result.token_stream.to_string() { + return PluginResult { + code: None, + remove_original_item: false, + diagnostics: into_cairo_diagnostics(result.diagnostics, call_site.stable_ptr), + }; + } + + let file_name = format!("proc_{}", input.expansion.name); + let code_mappings = generate_code_mappings(&result.token_stream); + let content = result.token_stream.to_string(); + PluginResult { + code: Some(PluginGeneratedFile { + name: file_name.into(), + code_mappings, + content, + diagnostics_note: Some(format!( + "this error originates in the attribute macro: `{}`", + input.expansion.name + )), + aux_data: result.aux_data.map(|new_aux_data| { + DynGeneratedFileAuxData::new(EmittedAuxData::new(ProcMacroAuxData::new( + new_aux_data.into(), + input, + ))) + }), + }), + diagnostics: into_cairo_diagnostics(result.diagnostics, call_site.stable_ptr), + remove_original_item: true, + } + } +} + +pub enum AttrExpansionFound { + Some(AttrExpansionArgs), + Last(AttrExpansionArgs), + None, +} + +pub struct AttrExpansionArgs { + pub id: ProcMacroId, + pub args: TokenStream, + pub call_site: CallSiteLocation, +} + +impl AttrExpansionFound { + pub fn as_name(&self) -> Option { + match self { + AttrExpansionFound::Some(args) | AttrExpansionFound::Last(args) => { + Some(args.id.expansion.name.clone()) + } + AttrExpansionFound::None => None, + } + } +} + +pub enum InnerAttrExpansionResult { + None, + Some(PluginResult), +} + +pub struct InnerAttrExpansionContext<'a> { + host: &'a ProcMacroHostPlugin, + // Metadata returned for expansions. + diagnostics: Vec, + aux_data: EmittedAuxData, + any_changed: bool, +} + +impl<'a> InnerAttrExpansionContext<'a> { + pub fn new<'b: 'a>(host: &'b ProcMacroHostPlugin) -> Self { + Self { + diagnostics: Vec::new(), + aux_data: EmittedAuxData::default(), + any_changed: false, + host, + } + } + + pub fn register_result( + &mut self, + original: String, + input: ProcMacroId, + result: ProcMacroResult, + stable_ptr: SyntaxStablePtrId, + ) -> String { + let result_str = result.token_stream.to_string(); + let changed = result_str != original; + + if changed { + self.host + .register_full_path_markers(input.package_id, result.full_path_markers.clone()); + } + + self.diagnostics + .extend(into_cairo_diagnostics(result.diagnostics, stable_ptr)); + + if let Some(new_aux_data) = result.aux_data { + self.aux_data + .push(ProcMacroAuxData::new(new_aux_data.into(), input)); + } + + self.any_changed = self.any_changed || changed; + + result_str + } + + pub fn into_result( + self, + expanded: String, + code_mappings: Vec, + attr_names: Vec, + ) -> PluginResult { + let msg = if attr_names.len() == 1 { + "the attribute macro" + } else { + "one of the attribute macros" + }; + let derive_names = attr_names.iter().map(ToString::to_string).join("`, `"); + let note = format!("this error originates in {msg}: `{derive_names}`"); + PluginResult { + code: Some(PluginGeneratedFile { + name: "proc_attr_inner".into(), + content: expanded, + aux_data: if self.aux_data.is_empty() { + None + } else { + Some(DynGeneratedFileAuxData::new(self.aux_data)) + }, + code_mappings, + diagnostics_note: Some(note), + }), + diagnostics: self.diagnostics, + remove_original_item: true, + } + } +} diff --git a/scarb/src/compiler/plugin/proc_macro/host/aux_data.rs b/scarb/src/compiler/plugin/proc_macro/host/aux_data.rs new file mode 100644 index 000000000..a1fa0d54a --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/host/aux_data.rs @@ -0,0 +1,90 @@ +use crate::compiler::plugin::proc_macro::{ProcMacroHostPlugin, ProcMacroId}; +use crate::core::PackageId; +use cairo_lang_defs::plugin::GeneratedFileAuxData; +use cairo_lang_macro::AuxData; +use cairo_lang_semantic::db::SemanticGroup; +use itertools::Itertools; +use std::any::Any; +use std::collections::HashMap; +use std::vec::IntoIter; + +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct ProcMacroAuxData { + value: Vec, + macro_id: ProcMacroId, +} + +impl ProcMacroAuxData { + pub fn new(value: Vec, macro_id: ProcMacroId) -> Self { + Self { value, macro_id } + } +} + +impl From for AuxData { + fn from(data: ProcMacroAuxData) -> Self { + Self::new(data.value) + } +} + +#[derive(Debug, Clone, Default)] +pub struct EmittedAuxData(Vec); + +impl GeneratedFileAuxData for EmittedAuxData { + fn as_any(&self) -> &dyn Any { + self + } + + fn eq(&self, other: &dyn GeneratedFileAuxData) -> bool { + self.0 == other.as_any().downcast_ref::().unwrap().0 + } +} + +impl EmittedAuxData { + pub fn new(aux_data: ProcMacroAuxData) -> Self { + Self(vec![aux_data]) + } + + pub fn push(&mut self, aux_data: ProcMacroAuxData) { + self.0.push(aux_data); + } + + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } +} + +impl IntoIterator for EmittedAuxData { + type Item = ProcMacroAuxData; + type IntoIter = IntoIter; + + fn into_iter(self) -> IntoIter { + self.0.into_iter() + } +} + +impl ProcMacroHostPlugin { + pub(crate) fn collect_aux_data( + &self, + db: &dyn SemanticGroup, + ) -> HashMap> { + let mut data = Vec::new(); + for crate_id in db.crates() { + let crate_modules = db.crate_modules(crate_id); + for module in crate_modules.iter() { + let file_infos = db.module_generated_file_aux_data(*module); + if let Ok(file_infos) = file_infos { + for file_info in file_infos.iter() { + let aux_data = file_info + .as_ref() + .and_then(|ad| ad.as_any().downcast_ref::()); + if let Some(aux_data) = aux_data { + data.extend(aux_data.clone().into_iter()); + } + } + } + } + } + data.into_iter() + .into_group_map_by(|d| d.macro_id.package_id) + } +} diff --git a/scarb/src/compiler/plugin/proc_macro/host/conversion.rs b/scarb/src/compiler/plugin/proc_macro/host/conversion.rs new file mode 100644 index 000000000..aef4ac5f6 --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/host/conversion.rs @@ -0,0 +1,49 @@ +use cairo_lang_defs::plugin::PluginDiagnostic; +use cairo_lang_macro::{Diagnostic, Severity, TextSpan}; +use cairo_lang_syntax::node::db::SyntaxGroup; +use cairo_lang_syntax::node::ids::SyntaxStablePtrId; +use cairo_lang_syntax::node::{TypedStablePtr, TypedSyntaxNode}; +use itertools::Itertools; + +pub trait SpanSource { + fn text_span(&self, db: &dyn SyntaxGroup) -> TextSpan; +} + +impl SpanSource for T { + fn text_span(&self, db: &dyn SyntaxGroup) -> TextSpan { + let node = self.as_syntax_node(); + let span = node.span(db); + TextSpan::new(span.start.as_u32(), span.end.as_u32()) + } +} + +pub struct CallSiteLocation { + pub stable_ptr: SyntaxStablePtrId, + pub span: TextSpan, +} + +impl CallSiteLocation { + pub fn new(node: &T, db: &dyn SyntaxGroup) -> Self { + Self { + stable_ptr: node.stable_ptr().untyped(), + span: node.text_span(db), + } + } +} + +pub fn into_cairo_diagnostics( + diagnostics: Vec, + stable_ptr: SyntaxStablePtrId, +) -> Vec { + diagnostics + .into_iter() + .map(|diag| PluginDiagnostic { + stable_ptr, + message: diag.message, + severity: match diag.severity { + Severity::Error => cairo_lang_diagnostics::Severity::Error, + Severity::Warning => cairo_lang_diagnostics::Severity::Warning, + }, + }) + .collect_vec() +} diff --git a/scarb/src/compiler/plugin/proc_macro/host/derive.rs b/scarb/src/compiler/plugin/proc_macro/host/derive.rs new file mode 100644 index 000000000..0834f9b5e --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/host/derive.rs @@ -0,0 +1,178 @@ +use crate::compiler::plugin::proc_macro::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; +use crate::compiler::plugin::proc_macro::host::conversion::{ + into_cairo_diagnostics, CallSiteLocation, +}; +use crate::compiler::plugin::proc_macro::host::{generate_code_mappings, DERIVE_ATTR}; +use crate::compiler::plugin::proc_macro::{ + Expansion, ExpansionKind, ProcMacroHostPlugin, ProcMacroId, TokenStreamBuilder, +}; +use cairo_lang_defs::plugin::{DynGeneratedFileAuxData, PluginGeneratedFile, PluginResult}; +use cairo_lang_filesystem::ids::CodeMapping; +use cairo_lang_filesystem::span::TextWidth; +use cairo_lang_macro::{AllocationContext, Diagnostic, TokenStream, TokenStreamMetadata}; +use cairo_lang_syntax::attribute::structured::{AttributeArgVariant, AttributeStructurize}; +use cairo_lang_syntax::node::ast::{Expr, PathSegment}; +use cairo_lang_syntax::node::db::SyntaxGroup; +use cairo_lang_syntax::node::helpers::QueryAttrs; +use cairo_lang_syntax::node::{ast, Terminal, TypedSyntaxNode}; +use convert_case::{Case, Casing}; +use itertools::Itertools; + +impl ProcMacroHostPlugin { + /// Handle `#[derive(...)]` attribute. + /// + /// Returns a list of expansions that this plugin should apply. + fn parse_derive(&self, db: &dyn SyntaxGroup, item_ast: ast::ModuleItem) -> Vec { + let attrs = match item_ast { + ast::ModuleItem::Struct(struct_ast) => Some(struct_ast.query_attr(db, DERIVE_ATTR)), + ast::ModuleItem::Enum(enum_ast) => Some(enum_ast.query_attr(db, DERIVE_ATTR)), + _ => None, + }; + + attrs + .unwrap_or_default() + .iter() + .map(|attr| attr.clone().structurize(db)) + .flat_map(|attr| attr.args.into_iter()) + .filter_map(|attr| { + let AttributeArgVariant::Unnamed(value) = attr.clone().variant else { + return None; + }; + let Expr::Path(path) = value else { + return None; + }; + let path = path.elements(db); + let path = path.last()?; + let PathSegment::Simple(segment) = path else { + return None; + }; + let ident = segment.ident(db); + let value = ident.text(db).to_string(); + + self.find_expansion(&Expansion::new( + value.to_case(Case::Snake), + ExpansionKind::Derive, + )) + .map(|id| DeriveFound { + id, + call_site: CallSiteLocation::new(segment, db), + }) + }) + .collect_vec() + } + + pub fn expand_derives( + &self, + db: &dyn SyntaxGroup, + item_ast: ast::ModuleItem, + stream_metadata: TokenStreamMetadata, + ) -> Option { + let mut token_stream_builder = TokenStreamBuilder::new(db); + token_stream_builder.add_node(item_ast.as_syntax_node()); + token_stream_builder.with_metadata(stream_metadata.clone()); + let mut aux_data = EmittedAuxData::default(); + let mut all_diagnostics: Vec = Vec::new(); + + // All derives to be applied. + let derives = self.parse_derive(db, item_ast.clone()); + + if derives.is_empty() { + // No derives found - returning early. + return None; + } + + // We use call site of first derive found. + let stable_ptr = derives[0].call_site.stable_ptr; + + let ctx = AllocationContext::default(); + let mut derived_code = String::new(); + let mut code_mappings = Vec::new(); + let mut current_width = TextWidth::default(); + + for derive in derives.iter() { + let call_site = &derive.call_site; + let derive = &derive.id; + let token_stream = token_stream_builder.build(&ctx); + let result = self.instance(derive.package_id).generate_code( + derive.expansion.name.clone(), + call_site.span.clone(), + TokenStream::empty(), + token_stream, + ); + + // Register diagnostics. + all_diagnostics.extend(result.diagnostics); + + // Register aux data. + if let Some(new_aux_data) = result.aux_data { + aux_data.push(ProcMacroAuxData::new( + new_aux_data.into(), + ProcMacroId::new(derive.package_id, derive.expansion.clone()), + )); + } + + if result.token_stream.is_empty() { + // No code has been generated. + // We do not need to do anything. + continue; + } + + code_mappings.extend(generate_code_mappings_with_offset( + &result.token_stream, + current_width, + )); + current_width = current_width + TextWidth::from_str(&result.token_stream.to_string()); + derived_code.push_str(&result.token_stream.to_string()); + } + + Some(PluginResult { + code: if derived_code.is_empty() { + None + } else { + let msg = if derives.len() == 1 { + "the derive macro" + } else { + "one of the derive macros" + }; + let derive_names = derives + .iter() + .map(|derive| derive.id.expansion.name.to_string()) + .join("`, `"); + let note = format!("this error originates in {msg}: `{derive_names}`"); + + Some(PluginGeneratedFile { + name: "proc_macro_derive".into(), + code_mappings, + content: derived_code, + diagnostics_note: Some(note), + aux_data: if aux_data.is_empty() { + None + } else { + Some(DynGeneratedFileAuxData::new(aux_data)) + }, + }) + }, + diagnostics: into_cairo_diagnostics(all_diagnostics, stable_ptr), + // Note that we don't remove the original item here, unlike for attributes. + // We do not add the original code to the generated file either. + remove_original_item: false, + }) + } +} + +struct DeriveFound { + id: ProcMacroId, + call_site: CallSiteLocation, +} + +fn generate_code_mappings_with_offset( + token_stream: &TokenStream, + offset: TextWidth, +) -> Vec { + let mut mappings = generate_code_mappings(token_stream); + for mapping in &mut mappings { + mapping.span.start = mapping.span.start.add_width(offset); + mapping.span.end = mapping.span.end.add_width(offset); + } + mappings +} diff --git a/scarb/src/compiler/plugin/proc_macro/host/inline.rs b/scarb/src/compiler/plugin/proc_macro/host/inline.rs new file mode 100644 index 000000000..b1df6a04c --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/host/inline.rs @@ -0,0 +1,106 @@ +use crate::compiler::plugin::proc_macro::host::aux_data::{EmittedAuxData, ProcMacroAuxData}; +use crate::compiler::plugin::proc_macro::host::conversion::{ + into_cairo_diagnostics, CallSiteLocation, +}; +use crate::compiler::plugin::proc_macro::host::generate_code_mappings; +use crate::compiler::plugin::proc_macro::{ + Expansion, ProcMacroId, ProcMacroInstance, TokenStreamBuilder, +}; +use cairo_lang_defs::plugin::{ + DynGeneratedFileAuxData, InlineMacroExprPlugin, InlinePluginResult, MacroPluginMetadata, + PluginGeneratedFile, +}; +use cairo_lang_macro::{AllocationContext, TokenStream}; +use cairo_lang_syntax::node::db::SyntaxGroup; +use cairo_lang_syntax::node::{ast, TypedSyntaxNode}; +use std::sync::{Arc, OnceLock}; + +/// A Cairo compiler inline macro plugin controlling the inline procedural macro execution. +/// +/// This plugin represents a single expansion capable of handling inline procedural macros. +/// The plugin triggers code expansion in a corresponding procedural macro instance. +#[derive(Debug)] +pub struct ProcMacroInlinePlugin { + instance: Arc, + expansion: Expansion, + doc: OnceLock>, +} + +impl ProcMacroInlinePlugin { + pub fn new(instance: Arc, expansion: Expansion) -> Self { + assert!(instance.get_expansions().contains(&expansion)); + Self { + instance, + expansion, + doc: Default::default(), + } + } + + fn instance(&self) -> &ProcMacroInstance { + &self.instance + } +} + +impl InlineMacroExprPlugin for ProcMacroInlinePlugin { + fn generate_code( + &self, + db: &dyn SyntaxGroup, + syntax: &ast::ExprInlineMacro, + _metadata: &MacroPluginMetadata<'_>, + ) -> InlinePluginResult { + let call_site = CallSiteLocation::new(syntax, db); + let ctx = AllocationContext::default(); + let arguments = syntax.arguments(db); + let mut token_stream_builder = TokenStreamBuilder::new(db); + token_stream_builder.add_node(arguments.as_syntax_node()); + let token_stream = token_stream_builder.build(&ctx); + let result = self.instance().generate_code( + self.expansion.name.clone(), + call_site.span, + TokenStream::empty(), + token_stream, + ); + // Handle diagnostics. + let diagnostics = into_cairo_diagnostics(result.diagnostics, call_site.stable_ptr); + let token_stream = result.token_stream.clone(); + if token_stream.is_empty() { + // Remove original code + InlinePluginResult { + code: None, + diagnostics, + } + } else { + // Replace + let aux_data = result.aux_data.map(|aux_data| { + let aux_data = ProcMacroAuxData::new( + aux_data.into(), + ProcMacroId::new(self.instance.package_id(), self.expansion.clone()), + ); + let mut emitted = EmittedAuxData::default(); + emitted.push(aux_data); + DynGeneratedFileAuxData::new(emitted) + }); + let content = token_stream.to_string(); + let code_mappings = generate_code_mappings(&token_stream); + InlinePluginResult { + code: Some(PluginGeneratedFile { + name: "inline_proc_macro".into(), + code_mappings, + content, + aux_data, + diagnostics_note: Some(format!( + "this error originates in the inline macro: `{}`", + self.expansion.name + )), + }), + diagnostics, + } + } + } + + fn documentation(&self) -> Option { + self.doc + .get_or_init(|| self.instance().doc(self.expansion.name.clone())) + .clone() + } +} diff --git a/scarb/src/compiler/plugin/proc_macro/host/mod.rs b/scarb/src/compiler/plugin/proc_macro/host/mod.rs new file mode 100644 index 000000000..de8d101b0 --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/host/mod.rs @@ -0,0 +1,264 @@ +mod attribute; +mod aux_data; +mod conversion; +mod derive; +mod inline; +mod post; + +use attribute::*; +pub use aux_data::ProcMacroAuxData; +use inline::*; + +use crate::compiler::plugin::proc_macro::{Expansion, ExpansionKind, ProcMacroInstance}; +use crate::core::{edition_variant, Config, Package, PackageId}; +use anyhow::{ensure, Result}; +use cairo_lang_defs::plugin::{MacroPlugin, MacroPluginMetadata, PluginResult}; +use cairo_lang_filesystem::db::Edition; +use cairo_lang_filesystem::ids::{CodeMapping, CodeOrigin}; +use cairo_lang_filesystem::span::{TextOffset, TextSpan, TextWidth}; +use cairo_lang_macro::{AllocationContext, TokenStream, TokenStreamMetadata, TokenTree}; +use cairo_lang_semantic::plugin::PluginSuite; +use cairo_lang_syntax::node::db::SyntaxGroup; +use cairo_lang_syntax::node::{ast, TypedStablePtr, TypedSyntaxNode}; +use convert_case::{Case, Casing}; +use itertools::Itertools; +use scarb_stable_hash::short_hash; +use std::collections::HashMap; +use std::fmt::Debug; +use std::sync::{Arc, RwLock}; + +const FULL_PATH_MARKER_KEY: &str = "macro::full_path_marker"; +const DERIVE_ATTR: &str = "derive"; + +/// A Cairo compiler plugin controlling the procedural macro execution. +/// +/// This plugin decides which macro plugins (if any) should be applied to the processed AST item. +/// It then redirects the item to the appropriate macro plugin for code expansion. +#[derive(Debug)] +pub struct ProcMacroHostPlugin { + macros: Vec>, + full_path_markers: RwLock>>, +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct ProcMacroId { + pub package_id: PackageId, + pub expansion: Expansion, +} + +impl ProcMacroId { + pub fn new(package_id: PackageId, expansion: Expansion) -> Self { + Self { + package_id, + expansion, + } + } +} + +impl ProcMacroHostPlugin { + pub fn try_new(macros: Vec>) -> Result { + // Validate expansions. + let mut expansions = macros + .iter() + .flat_map(|m| { + m.get_expansions() + .iter() + .map(|e| ProcMacroId::new(m.package_id(), e.clone())) + .collect_vec() + }) + .collect::>(); + expansions.sort_unstable_by_key(|e| e.expansion.name.clone()); + ensure!( + expansions + .windows(2) + .all(|w| w[0].expansion.name != w[1].expansion.name), + "duplicate expansions defined for procedural macros: {duplicates}", + duplicates = expansions + .windows(2) + .filter(|w| w[0].expansion.name == w[1].expansion.name) + .map(|w| format!( + "{} ({} and {})", + w[0].expansion.name.as_str(), + w[0].package_id, + w[1].package_id + )) + .collect::>() + .join(", ") + ); + Ok(Self { + macros, + full_path_markers: RwLock::new(Default::default()), + }) + } + + fn find_expansion(&self, expansion: &Expansion) -> Option { + self.macros + .iter() + .find(|m| m.get_expansions().contains(expansion)) + .map(|m| m.package_id()) + .map(|package_id| ProcMacroId::new(package_id, expansion.clone())) + } + + pub fn build_plugin_suite(macro_host: Arc) -> PluginSuite { + let mut suite = PluginSuite::default(); + // Register inline macro plugins. + for proc_macro in ¯o_host.macros { + let expansions = proc_macro + .get_expansions() + .iter() + .filter(|exp| matches!(exp.kind, ExpansionKind::Inline)); + for expansion in expansions { + let plugin = Arc::new(ProcMacroInlinePlugin::new( + proc_macro.clone(), + expansion.clone(), + )); + suite.add_inline_macro_plugin_ex(expansion.name.as_str(), plugin); + } + } + // Register procedural macro host plugin. + suite.add_plugin_ex(macro_host); + suite + } + + pub fn instance(&self, package_id: PackageId) -> &ProcMacroInstance { + self.macros + .iter() + .find(|m| m.package_id() == package_id) + .expect("procedural macro must be registered in proc macro host") + } + + fn calculate_metadata( + db: &dyn SyntaxGroup, + item_ast: ast::ModuleItem, + edition: Edition, + ) -> TokenStreamMetadata { + let stable_ptr = item_ast.clone().stable_ptr().untyped(); + let file_path = stable_ptr.file_id(db).full_path(db.upcast()); + let file_id = short_hash(file_path.clone()); + let edition = edition_variant(edition); + TokenStreamMetadata::new(file_path, file_id, edition) + } +} + +impl MacroPlugin for ProcMacroHostPlugin { + fn generate_code( + &self, + db: &dyn SyntaxGroup, + item_ast: ast::ModuleItem, + metadata: &MacroPluginMetadata<'_>, + ) -> PluginResult { + let stream_metadata = Self::calculate_metadata(db, item_ast.clone(), metadata.edition); + + // Handle inner functions. + if let InnerAttrExpansionResult::Some(result) = self.expand_inner_attr(db, item_ast.clone()) + { + return result; + } + + // Expand first attribute. + // Note that we only expand the first attribute, as we assume that the rest of the attributes + // will be handled by a subsequent call to this function. + let ctx = AllocationContext::default(); + let (input, body) = self.parse_attribute(db, item_ast.clone(), &ctx); + + if let Some(result) = match input { + AttrExpansionFound::Last(input) => Some((input, true)), + AttrExpansionFound::Some(input) => Some((input, false)), + AttrExpansionFound::None => None, + } + .map(|(input, last)| { + let token_stream = body.with_metadata(stream_metadata.clone()); + self.expand_attribute(input.id, last, input.args, token_stream, input.call_site) + }) { + return result; + } + + // Expand all derives. + // Note that all proc macro attributes should be already expanded at this point. + if let Some(result) = self.expand_derives(db, item_ast.clone(), stream_metadata.clone()) { + return result; + } + + // No expansions can be applied. + PluginResult { + code: None, + diagnostics: Vec::new(), + remove_original_item: false, + } + } + + fn declared_attributes(&self) -> Vec { + self.macros + .iter() + .flat_map(|m| m.declared_attributes_and_executables()) + .chain(vec![FULL_PATH_MARKER_KEY.to_string()]) + .collect() + } + + fn declared_derives(&self) -> Vec { + self.macros + .iter() + .flat_map(|m| m.declared_derives()) + .map(|s| s.to_case(Case::UpperCamel)) + .collect() + } + + fn executable_attributes(&self) -> Vec { + self.macros + .iter() + .flat_map(|m| m.executable_attributes()) + .collect() + } +} + +/// A Scarb wrapper around the `ProcMacroHost` compiler plugin. +/// +/// This struct represent the compiler plugin in terms of Scarb data model. +/// It also builds a plugin suite that enables the compiler plugin. +#[derive(Default)] +pub struct ProcMacroHost { + macros: Vec>, +} + +impl ProcMacroHost { + pub fn register(&mut self, package: Package, config: &Config) -> Result<()> { + let instance = ProcMacroInstance::try_new(package, config)?; + self.macros.push(Arc::new(instance)); + Ok(()) + } + + pub fn into_plugin(self) -> Result { + ProcMacroHostPlugin::try_new(self.macros) + } + + pub fn macros(&self) -> &[Arc] { + &self.macros + } +} + +fn generate_code_mappings(token_stream: &TokenStream) -> Vec { + token_stream + .tokens + .iter() + .scan(TextOffset::default(), |current_pos, token| { + let TokenTree::Ident(token) = token; + let token_width = TextWidth::from_str(token.content.as_ref()); + + let mapping = CodeMapping { + span: TextSpan { + start: *current_pos, + end: current_pos.add_width(token_width), + }, + origin: CodeOrigin::Span(TextSpan { + start: TextOffset::default() + .add_width(TextWidth::new_for_testing(token.span.start)), + end: TextOffset::default() + .add_width(TextWidth::new_for_testing(token.span.end)), + }), + }; + + *current_pos = current_pos.add_width(token_width); + Some(mapping) + }) + .collect() +} diff --git a/scarb/src/compiler/plugin/proc_macro/host/post.rs b/scarb/src/compiler/plugin/proc_macro/host/post.rs new file mode 100644 index 000000000..078bfef0e --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/host/post.rs @@ -0,0 +1,113 @@ +use crate::compiler::plugin::proc_macro::host::FULL_PATH_MARKER_KEY; +use crate::compiler::plugin::proc_macro::ProcMacroHostPlugin; +use crate::core::PackageId; +use anyhow::Result; +use cairo_lang_defs::ids::{ModuleItemId, TopLevelLanguageElementId}; +use cairo_lang_diagnostics::ToOption; +use cairo_lang_macro::FullPathMarker; +use cairo_lang_semantic::db::SemanticGroup; +use cairo_lang_semantic::items::attribute::SemanticQueryAttrs; +use cairo_lang_syntax::attribute::structured::{Attribute, AttributeArgVariant}; +use cairo_lang_syntax::node::ast::Expr; +use itertools::Itertools; +use std::collections::HashMap; +use tracing::{debug, trace_span}; + +impl ProcMacroHostPlugin { + #[tracing::instrument(level = "trace", skip_all)] + pub fn post_process(&self, db: &dyn SemanticGroup) -> Result<()> { + let markers = self.collect_full_path_markers(db); + + let aux_data = self.collect_aux_data(db); + for instance in self.macros.iter() { + let _ = trace_span!( + "post_process_callback", + instance = %instance.package_id() + ) + .entered(); + let instance_markers = self + .full_path_markers + .read() + .unwrap() + .get(&instance.package_id()) + .cloned() + .unwrap_or_default(); + let markers_for_instance = markers + .iter() + .filter(|(key, _)| instance_markers.contains(key)) + .map(|(key, full_path)| FullPathMarker { + key: key.clone(), + full_path: full_path.clone(), + }) + .collect_vec(); + let data = aux_data + .get(&instance.package_id()) + .cloned() + .unwrap_or_default(); + debug!("calling post processing callback with: {data:?}"); + instance.post_process_callback(data.clone(), markers_for_instance); + } + Ok(()) + } + + fn collect_full_path_markers(&self, db: &dyn SemanticGroup) -> HashMap { + let mut markers: HashMap = HashMap::new(); + // FULL_PATH_MARKER_KEY + for crate_id in db.crates() { + let modules = db.crate_modules(crate_id); + for module_id in modules.iter() { + let Ok(module_items) = db.module_items(*module_id) else { + continue; + }; + for item_id in module_items.iter() { + let attr = match item_id { + ModuleItemId::Struct(id) => { + id.query_attr(db, FULL_PATH_MARKER_KEY).to_option() + } + ModuleItemId::Enum(id) => { + id.query_attr(db, FULL_PATH_MARKER_KEY).to_option() + } + ModuleItemId::FreeFunction(id) => { + id.query_attr(db, FULL_PATH_MARKER_KEY).to_option() + } + _ => None, + }; + + let keys = attr + .unwrap_or_default() + .into_iter() + .filter_map(|attr| Self::extract_key(db, attr)) + .collect_vec(); + let full_path = item_id.full_path(db.upcast()); + for key in keys { + markers.insert(key, full_path.clone()); + } + } + } + } + markers + } + + fn extract_key(db: &dyn SemanticGroup, attr: Attribute) -> Option { + if attr.id != FULL_PATH_MARKER_KEY { + return None; + } + + for arg in attr.args.clone() { + if let AttributeArgVariant::Unnamed(Expr::String(s)) = arg.variant { + return s.string_value(db.upcast()); + } + } + + None + } + + pub(crate) fn register_full_path_markers(&self, package_id: PackageId, markers: Vec) { + self.full_path_markers + .write() + .unwrap() + .entry(package_id) + .and_modify(|markers| markers.extend(markers.clone())) + .or_insert(markers); + } +} diff --git a/scarb/src/compiler/plugin/proc_macro/mod.rs b/scarb/src/compiler/plugin/proc_macro/mod.rs index 888c012fc..83a4e7822 100644 --- a/scarb/src/compiler/plugin/proc_macro/mod.rs +++ b/scarb/src/compiler/plugin/proc_macro/mod.rs @@ -1,7 +1,9 @@ pub mod compilation; mod ffi; mod host; +mod types; pub use compilation::{check_unit, compile_unit, fetch_crate}; pub use ffi::*; pub use host::*; +pub use types::*; diff --git a/scarb/src/compiler/plugin/proc_macro/types.rs b/scarb/src/compiler/plugin/proc_macro/types.rs new file mode 100644 index 000000000..ca4090484 --- /dev/null +++ b/scarb/src/compiler/plugin/proc_macro/types.rs @@ -0,0 +1,100 @@ +use cairo_lang_macro::{ + AllocationContext, TextSpan, Token, TokenStream, TokenStreamMetadata, TokenTree, +}; +use cairo_lang_syntax::node::{db::SyntaxGroup, SyntaxNode}; + +/// Helps creating TokenStream based on multiple SyntaxNodes, +/// which aren't descendants or ascendants of each other inside the SyntaxTree. +pub struct TokenStreamBuilder<'a> { + db: &'a dyn SyntaxGroup, + nodes: Vec, + metadata: Option, +} + +impl<'a> TokenStreamBuilder<'a> { + pub fn new(db: &'a dyn SyntaxGroup) -> Self { + Self { + db, + nodes: Vec::default(), + metadata: None, + } + } + + pub fn add_node(&mut self, node: SyntaxNode) { + self.nodes.push(node); + } + + pub fn with_metadata(&mut self, metadata: TokenStreamMetadata) { + self.metadata = Some(metadata); + } + + pub fn build(&self, ctx: &AllocationContext) -> TokenStream { + let result: Vec = self + .nodes + .iter() + .flat_map(|node| { + let leaves = node.tokens(self.db); + leaves.map(|node| TokenTree::Ident(self.token_from_syntax_node(node.clone(), ctx))) + }) + .collect(); + + match self.metadata.as_ref() { + Some(metadata) => TokenStream::new(result).with_metadata(metadata.clone()), + None => TokenStream::new(result), + } + } + + pub fn token_from_syntax_node(&self, node: SyntaxNode, ctx: &AllocationContext) -> Token { + let span = node.span(self.db); + let text = node.get_text(self.db); + let span = TextSpan { + // We skip the whitespace prefix, so that diagnostics start where the actual token contents is. + start: span.start.as_u32() + whitespace_prefix_len(&text), + end: span.end.as_u32(), + }; + Token::new_in(text, span, ctx) + } +} + +fn whitespace_prefix_len(s: &str) -> u32 { + s.chars().take_while(|c| c.is_whitespace()).count() as u32 +} + +#[cfg(test)] +mod tests { + use crate::compiler::plugin::proc_macro::TokenStreamBuilder; + use cairo_lang_macro::{AllocationContext, TextSpan, TokenStream, TokenTree}; + use cairo_lang_parser::utils::SimpleParserDatabase; + use indoc::indoc; + + #[test] + fn whitespace_skipped() { + let db = SimpleParserDatabase::default(); + let mut builder = TokenStreamBuilder::new(&db); + let content = indoc! {r#" + fn main() { + let x = 42; + } + "#}; + let parsed = db.parse_virtual(content).unwrap(); + builder.add_node(parsed); + let ctx = AllocationContext::default(); + let token_stream = builder.build(&ctx); + let token_at = |token_stream: &TokenStream, idx: usize| { + let token: TokenTree = token_stream.tokens[idx].clone(); + match token { + TokenTree::Ident(token) => token, + } + }; + let token = token_at(&token_stream, 4); + assert_eq!(token.content.as_ref(), "{\n"); + assert_eq!(token.span, TextSpan { start: 10, end: 12 }); + let token = token_at(&token_stream, 5); + assert_eq!(token.content.as_ref(), " let "); + // Note we skip 4 whitespaces characters in the span. + assert_eq!(token.span, TextSpan { start: 16, end: 20 }); + let token = token_at(&token_stream, 6); + assert_eq!(token.content.as_ref(), "x "); + assert_eq!(token.span, TextSpan { start: 20, end: 22 }); + } +} diff --git a/scarb/src/ops/proc_macro_server/methods/expand_attribute.rs b/scarb/src/ops/proc_macro_server/methods/expand_attribute.rs index 89fc10480..264e5eb68 100644 --- a/scarb/src/ops/proc_macro_server/methods/expand_attribute.rs +++ b/scarb/src/ops/proc_macro_server/methods/expand_attribute.rs @@ -19,7 +19,12 @@ impl Handler for ExpandAttribute { }) .unwrap(); - let result = instance.generate_code(params.attr.into(), params.args, params.item); + let result = instance.generate_code( + params.attr.into(), + params.call_site, + params.args, + params.item, + ); Ok(ProcMacroResult { token_stream: result.token_stream, diff --git a/scarb/src/ops/proc_macro_server/methods/expand_derive.rs b/scarb/src/ops/proc_macro_server/methods/expand_derive.rs index d792ef51e..091aa342d 100644 --- a/scarb/src/ops/proc_macro_server/methods/expand_derive.rs +++ b/scarb/src/ops/proc_macro_server/methods/expand_derive.rs @@ -10,7 +10,7 @@ use crate::compiler::plugin::proc_macro::{Expansion, ExpansionKind, ProcMacroHos impl Handler for ExpandDerive { fn handle(proc_macro_host: Arc, params: Self::Params) -> Result { - let mut derived_code = String::new(); + let mut derived_code = TokenStream::empty(); let mut all_diagnostics = vec![]; for derive in params.derives { @@ -23,6 +23,7 @@ impl Handler for ExpandDerive { let result = instance.generate_code( expansion.name.clone(), + params.call_site.clone(), TokenStream::empty(), params.item.clone(), ); @@ -30,11 +31,11 @@ impl Handler for ExpandDerive { // Register diagnostics. all_diagnostics.extend(result.diagnostics); // Add generated code. - derived_code.push_str(&result.token_stream.to_string()); + derived_code.tokens.extend(result.token_stream.tokens); } Ok(ProcMacroResult { - token_stream: TokenStream::new(derived_code), + token_stream: derived_code, diagnostics: all_diagnostics, }) } diff --git a/scarb/src/ops/proc_macro_server/methods/expand_inline.rs b/scarb/src/ops/proc_macro_server/methods/expand_inline.rs index 1e209fae1..c4d26d5ce 100644 --- a/scarb/src/ops/proc_macro_server/methods/expand_inline.rs +++ b/scarb/src/ops/proc_macro_server/methods/expand_inline.rs @@ -20,7 +20,12 @@ impl Handler for ExpandInline { }) .unwrap(); - let result = instance.generate_code(params.name.into(), TokenStream::empty(), params.args); + let result = instance.generate_code( + params.name.into(), + params.call_site, + TokenStream::empty(), + params.args, + ); Ok(ProcMacroResult { token_stream: result.token_stream, diff --git a/scarb/tests/package.rs b/scarb/tests/package.rs index 00d3cb279..2b7233428 100644 --- a/scarb/tests/package.rs +++ b/scarb/tests/package.rs @@ -486,6 +486,7 @@ fn workspace() { #[test] fn cairo_plugin() { let t = TempDir::new().unwrap(); + // Note this will be packaged with `cairo-lang-macro` from crates, not the local one. CairoPluginProjectBuilder::default().build(&t); Scarb::quick_snapbox() diff --git a/scarb/tests/proc_macro_build.rs b/scarb/tests/proc_macro_build.rs new file mode 100644 index 000000000..748940877 --- /dev/null +++ b/scarb/tests/proc_macro_build.rs @@ -0,0 +1,436 @@ +use assert_fs::fixture::PathChild; +use assert_fs::TempDir; +use indoc::indoc; +use scarb_test_support::cairo_plugin_project_builder::CairoPluginProjectBuilder; +use scarb_test_support::command::Scarb; +use scarb_test_support::project_builder::ProjectBuilder; +use scarb_test_support::workspace_builder::WorkspaceBuilder; +use snapbox::assert_matches; + +#[test] +fn compile_cairo_plugin() { + let t = TempDir::new().unwrap(); + CairoPluginProjectBuilder::default().build(&t); + let output = Scarb::quick_snapbox() + .arg("build") + // Disable colors in Cargo output. + .env("CARGO_TERM_COLOR", "never") + .current_dir(&t) + .output() + .unwrap(); + assert!( + output.status.success(), + "stdout={}\n stderr={}", + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr), + ); + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + assert!(stdout.contains("Compiling some v1.0.0")); + let lines = stdout.lines().map(ToString::to_string).collect::>(); + let (last, lines) = lines.split_last().unwrap(); + assert_matches(r#"[..] Finished `dev` profile target(s) in [..]"#, last); + let (last, _lines) = lines.split_last().unwrap(); + // Line from Cargo output + assert_matches( + r#"[..]Finished `release` profile [optimized] target(s) in[..]"#, + last, + ); +} + +#[test] +fn check_cairo_plugin() { + let t = TempDir::new().unwrap(); + CairoPluginProjectBuilder::default().build(&t); + let output = Scarb::quick_snapbox() + .arg("check") + // Disable colors in Cargo output. + .env("CARGO_TERM_COLOR", "never") + .current_dir(&t) + .output() + .unwrap(); + assert!( + output.status.success(), + "{}", + String::from_utf8_lossy(&output.stderr) + ); + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + assert!(stdout.contains("Checking some v1.0.0")); + let lines = stdout.lines().map(ToString::to_string).collect::>(); + let (last, lines) = lines.split_last().unwrap(); + assert_matches( + r#"[..] Finished checking `dev` profile target(s) in [..]"#, + last, + ); + let (last, _lines) = lines.split_last().unwrap(); + // Line from Cargo output + assert_matches( + r#"[..]Finished `release` profile [optimized] target(s) in[..]"#, + last, + ); +} + +#[test] +fn can_check_cairo_project_with_plugins() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default().build(&t); + let project = temp.child("hello"); + let y = project.child("other"); + CairoPluginProjectBuilder::default().name("other").build(&y); + WorkspaceBuilder::start() + .add_member("other") + .package( + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t), + ) + .build(&project); + Scarb::quick_snapbox() + .arg("check") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Checking other v1.0.0 ([..]Scarb.toml) + [..]Checking hello v1.0.0 ([..]Scarb.toml) + [..]Finished checking `dev` profile target(s) in [..] + "#}); +} + +#[test] +fn resolve_fetched_plugins() { + let t = TempDir::new().unwrap(); + CairoPluginProjectBuilder::default().build(&t); + assert!(!t.child("Cargo.lock").exists()); + let output = Scarb::quick_snapbox() + .arg("fetch") + // Disable colors in Cargo output. + .env("CARGO_TERM_COLOR", "never") + .current_dir(&t) + .output() + .unwrap(); + assert!( + output.status.success(), + "{}", + String::from_utf8_lossy(&output.stderr) + ); + assert!(t.child("Cargo.lock").exists()) +} + +#[test] +fn can_use_json_output() { + let t = TempDir::new().unwrap(); + CairoPluginProjectBuilder::default().build(&t); + let output = Scarb::quick_snapbox() + .arg("--json") + .arg("check") + // Disable colors in Cargo output. + .env("CARGO_TERM_COLOR", "never") + .current_dir(&t) + .output() + .unwrap(); + assert!( + output.status.success(), + "{}", + String::from_utf8_lossy(&output.stderr) + ); + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + let lines = stdout.lines().map(ToString::to_string).collect::>(); + let (first, lines) = lines.split_first().unwrap(); + assert_matches( + r#"{"status":"checking","message":"some v1.0.0 ([..]Scarb.toml)"}"#, + first, + ); + let (last, lines) = lines.split_last().unwrap(); + assert_matches( + r#"{"status":"finished","message":"checking `dev` profile target(s) in [..]"}"#, + last, + ); + // Line from Cargo. + let (last, _lines) = lines.split_last().unwrap(); + assert_matches(r#"{"reason":"build-finished","success":true}"#, last); +} + +#[test] +fn compile_cairo_plugin_with_lib_target() { + let t = TempDir::new().unwrap(); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .manifest_extra(indoc! {r#" + [lib] + [cairo-plugin] + "#}) + .build(&t); + + Scarb::quick_snapbox() + .arg("build") + .current_dir(&t) + .assert() + .failure() + .stdout_matches(indoc! {r#" + error: failed to parse manifest at: [..]/Scarb.toml + + Caused by: + target `cairo-plugin` cannot be mixed with other targets + "#}); +} + +#[test] +fn compile_cairo_plugin_with_other_target() { + let t = TempDir::new().unwrap(); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .manifest_extra(indoc! {r#" + [cairo-plugin] + [[target.starknet-contract]] + "#}) + .build(&t); + + Scarb::quick_snapbox() + .arg("build") + .current_dir(&t) + .assert() + .failure() + .stdout_matches(indoc! {r#" + error: failed to parse manifest at: [..]/Scarb.toml + + Caused by: + target `cairo-plugin` cannot be mixed with other targets + "#}); +} + +#[test] +fn can_define_multiple_macros() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, AuxData, PostProcessContext, post_process, TokenTree, Token, TextSpan}; + + #[attribute_macro] + pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let new_token_string = token_stream.to_string().replace("12", "34"); + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { start: 0, end: new_token_string.len() as u32 }, + ))]); + let aux_data = AuxData::new(Vec::new()); + ProcMacroResult::new(token_stream).with_aux_data(aux_data) + } + + #[attribute_macro] + pub fn world(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let new_token_string = token_stream.to_string().replace("56", "78"); + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { start: 0, end: new_token_string.len() as u32 }, + ))]); + let aux_data = AuxData::new(Vec::new()); + ProcMacroResult::new(token_stream).with_aux_data(aux_data) + } + + #[post_process] + pub fn callback(context: PostProcessContext) { + assert_eq!(context.aux_data.len(), 2); + } + "##}) + .build(&t); + + let w = temp.child("other"); + CairoPluginProjectBuilder::default() + .name("other") + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, AuxData, PostProcessContext, post_process, TokenTree, Token, TextSpan}; + + #[attribute_macro] + pub fn beautiful(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let new_token_string = token_stream.to_string().replace("90", "09"); + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { start: 0, end: new_token_string.len() as u32 }, + ))]); + let aux_data = AuxData::new(Vec::new()); + ProcMacroResult::new(token_stream).with_aux_data(aux_data) + } + + #[post_process] + pub fn callback(context: PostProcessContext) { + assert_eq!(context.aux_data.len(), 1); + } + "##}) + .build(&w); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .dep("other", &w) + .lib_cairo(indoc! {r#" + #[hello] + #[beautiful] + #[world] + fn main() -> felt252 { 12 + 56 + 90 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("cairo-run") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling other v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + [..]Finished `dev` profile target(s) in [..] + [..]Running hello + Run completed successfully, returning [121] + "#}); +} + +#[test] +fn cannot_duplicate_macros() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + + #[attribute_macro] + pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(token_stream) + } + + #[attribute_macro] + pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(token_stream) + } + "##}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[hello] + fn main() -> felt252 { 12 + 56 + 90 } + "#}) + .build(&project); + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + // Fails with Cargo compile error. + .failure(); +} + +#[test] +fn cannot_duplicate_macros_across_packages() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r#" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + + #[attribute_macro] + pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(token_stream) + } + + #[attribute_macro] + pub fn world(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(token_stream) + } + "#}) + .build(&t); + + let w = temp.child("other"); + CairoPluginProjectBuilder::default() + .name("other") + .lib_rs(indoc! {r#" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + + #[attribute_macro] + pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(token_stream) + } + "#}) + .build(&w); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .dep("other", &w) + .lib_cairo(indoc! {r#" + #[hello] + #[world] + fn main() -> felt252 { 12 + 56 + 90 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .failure() + .stdout_matches(indoc! {r#" + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling other v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + error: duplicate expansions defined for procedural macros: hello (some v1.0.0 ([..]Scarb.toml) and other v1.0.0 ([..]Scarb.toml)) + "#}); +} + +#[test] +fn cannot_use_undefined_macro() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default().build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[world] + fn main() -> felt252 { 12 + 56 + 90 } + "#}) + .build(&project); + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .failure() + .stdout_matches(indoc! {r#" + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + error: Plugin diagnostic: Unsupported attribute. + --> [..]lib.cairo:1:1 + #[world] + ^******^ + + error: could not compile `hello` due to previous error + "#}); +} diff --git a/scarb/tests/proc_macro_executable.rs b/scarb/tests/proc_macro_executable.rs new file mode 100644 index 000000000..2385c3025 --- /dev/null +++ b/scarb/tests/proc_macro_executable.rs @@ -0,0 +1,107 @@ +use assert_fs::fixture::PathChild; +use assert_fs::TempDir; +use cairo_lang_sierra::program::VersionedProgram; +use indoc::indoc; +use scarb_test_support::cairo_plugin_project_builder::CairoPluginProjectBuilder; +use scarb_test_support::command::Scarb; +use scarb_test_support::fsx::ChildPathEx; +use scarb_test_support::project_builder::ProjectBuilder; + +#[test] +fn can_create_executable_attribute() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r##" + use cairo_lang_macro::executable_attribute; + + executable_attribute!("some"); + "##}) + .build(&t); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> felt252 { 12 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + [..]Finished `dev` profile target(s) in [..] + "#}); + let sierra = project + .child("target") + .child("dev") + .child("hello.sierra.json") + .read_to_string(); + let sierra = serde_json::from_str::(&sierra).unwrap(); + let sierra = sierra.into_v1().unwrap(); + let executables = sierra.debug_info.unwrap().executables; + assert_eq!(executables.len(), 1); + let executables = executables.get("some").unwrap(); + assert_eq!(executables.len(), 1); + let fid = executables.first().unwrap().clone(); + assert_eq!(fid.clone().debug_name.unwrap(), "hello::main"); + assert!(sierra + .program + .funcs + .iter() + .any(|f| f.id.clone() == fid.clone())); +} + +#[test] +fn executable_name_cannot_clash_attr() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{executable_attribute, attribute_macro, TokenStream, ProcMacroResult}; + + executable_attribute!("some"); + + #[attribute_macro] + fn some(_args: TokenStream, input: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(input) + } + "##}) + .build(&t); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> felt252 { 12 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .failure() + .stdout_matches(indoc! {r#" + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + error: duplicate expansions defined for procedural macro some v1.0.0 ([..]Scarb.toml): some + "#}); +} diff --git a/scarb/tests/build_cairo_plugin.rs b/scarb/tests/proc_macro_expand.rs similarity index 53% rename from scarb/tests/build_cairo_plugin.rs rename to scarb/tests/proc_macro_expand.rs index c93fb3b38..249296307 100644 --- a/scarb/tests/build_cairo_plugin.rs +++ b/scarb/tests/proc_macro_expand.rs @@ -1,225 +1,22 @@ use assert_fs::fixture::PathChild; use assert_fs::TempDir; -use cairo_lang_sierra::program::VersionedProgram; use indoc::indoc; use scarb_test_support::cairo_plugin_project_builder::CairoPluginProjectBuilder; use scarb_test_support::command::Scarb; use scarb_test_support::fsx::ChildPathEx; use scarb_test_support::project_builder::ProjectBuilder; -use scarb_test_support::workspace_builder::WorkspaceBuilder; -use snapbox::assert_matches; #[test] -fn compile_cairo_plugin() { - let t = TempDir::new().unwrap(); - CairoPluginProjectBuilder::default().build(&t); - let output = Scarb::quick_snapbox() - .arg("build") - // Disable colors in Cargo output. - .env("CARGO_TERM_COLOR", "never") - .current_dir(&t) - .output() - .unwrap(); - assert!( - output.status.success(), - "stdout={}\n stderr={}", - String::from_utf8_lossy(&output.stdout), - String::from_utf8_lossy(&output.stderr), - ); - let stdout = String::from_utf8_lossy(&output.stdout).to_string(); - assert!(stdout.contains("Compiling some v1.0.0")); - let lines = stdout.lines().map(ToString::to_string).collect::>(); - let (last, lines) = lines.split_last().unwrap(); - assert_matches(r#"[..] Finished `dev` profile target(s) in [..]"#, last); - let (last, _lines) = lines.split_last().unwrap(); - // Line from Cargo output - assert_matches( - r#"[..]Finished `release` profile [optimized] target(s) in[..]"#, - last, - ); -} - -#[test] -fn check_cairo_plugin() { - let t = TempDir::new().unwrap(); - CairoPluginProjectBuilder::default().build(&t); - let output = Scarb::quick_snapbox() - .arg("check") - // Disable colors in Cargo output. - .env("CARGO_TERM_COLOR", "never") - .current_dir(&t) - .output() - .unwrap(); - assert!( - output.status.success(), - "{}", - String::from_utf8_lossy(&output.stderr) - ); - let stdout = String::from_utf8_lossy(&output.stdout).to_string(); - assert!(stdout.contains("Checking some v1.0.0")); - let lines = stdout.lines().map(ToString::to_string).collect::>(); - let (last, lines) = lines.split_last().unwrap(); - assert_matches( - r#"[..] Finished checking `dev` profile target(s) in [..]"#, - last, - ); - let (last, _lines) = lines.split_last().unwrap(); - // Line from Cargo output - assert_matches( - r#"[..]Finished `release` profile [optimized] target(s) in[..]"#, - last, - ); -} - -#[test] -fn can_check_cairo_project_with_plugins() { - let temp = TempDir::new().unwrap(); - let t = temp.child("some"); - CairoPluginProjectBuilder::default().build(&t); - let project = temp.child("hello"); - let y = project.child("other"); - CairoPluginProjectBuilder::default().name("other").build(&y); - WorkspaceBuilder::start() - .add_member("other") - .package( - ProjectBuilder::start() - .name("hello") - .version("1.0.0") - .dep("some", &t), - ) - .build(&project); - Scarb::quick_snapbox() - .arg("check") - // Disable output from Cargo. - .env("CARGO_TERM_QUIET", "true") - .current_dir(&project) - .assert() - .success() - .stdout_matches(indoc! {r#" - [..]Compiling some v1.0.0 ([..]Scarb.toml) - [..]Checking other v1.0.0 ([..]Scarb.toml) - [..]Checking hello v1.0.0 ([..]Scarb.toml) - [..]Finished checking `dev` profile target(s) in [..] - "#}); -} - -#[test] -fn resolve_fetched_plugins() { - let t = TempDir::new().unwrap(); - CairoPluginProjectBuilder::default().build(&t); - assert!(!t.child("Cargo.lock").exists()); - let output = Scarb::quick_snapbox() - .arg("fetch") - // Disable colors in Cargo output. - .env("CARGO_TERM_COLOR", "never") - .current_dir(&t) - .output() - .unwrap(); - assert!( - output.status.success(), - "{}", - String::from_utf8_lossy(&output.stderr) - ); - assert!(t.child("Cargo.lock").exists()) -} - -#[test] -fn can_use_json_output() { - let t = TempDir::new().unwrap(); - CairoPluginProjectBuilder::default().build(&t); - let output = Scarb::quick_snapbox() - .arg("--json") - .arg("check") - // Disable colors in Cargo output. - .env("CARGO_TERM_COLOR", "never") - .current_dir(&t) - .output() - .unwrap(); - assert!( - output.status.success(), - "{}", - String::from_utf8_lossy(&output.stderr) - ); - let stdout = String::from_utf8_lossy(&output.stdout).to_string(); - let lines = stdout.lines().map(ToString::to_string).collect::>(); - let (first, lines) = lines.split_first().unwrap(); - assert_matches( - r#"{"status":"checking","message":"some v1.0.0 ([..]Scarb.toml)"}"#, - first, - ); - let (last, lines) = lines.split_last().unwrap(); - assert_matches( - r#"{"status":"finished","message":"checking `dev` profile target(s) in [..]"}"#, - last, - ); - // Line from Cargo. - let (last, _lines) = lines.split_last().unwrap(); - assert_matches(r#"{"reason":"build-finished","success":true}"#, last); -} - -#[test] -fn compile_cairo_plugin_with_lib_target() { - let t = TempDir::new().unwrap(); - ProjectBuilder::start() - .name("hello") - .version("1.0.0") - .manifest_extra(indoc! {r#" - [lib] - [cairo-plugin] - "#}) - .build(&t); - - Scarb::quick_snapbox() - .arg("build") - .current_dir(&t) - .assert() - .failure() - .stdout_matches(indoc! {r#" - error: failed to parse manifest at: [..]/Scarb.toml - - Caused by: - target `cairo-plugin` cannot be mixed with other targets - "#}); -} - -#[test] -fn compile_cairo_plugin_with_other_target() { - let t = TempDir::new().unwrap(); - ProjectBuilder::start() - .name("hello") - .version("1.0.0") - .manifest_extra(indoc! {r#" - [cairo-plugin] - [[target.starknet-contract]] - "#}) - .build(&t); - - Scarb::quick_snapbox() - .arg("build") - .current_dir(&t) - .assert() - .failure() - .stdout_matches(indoc! {r#" - error: failed to parse manifest at: [..]/Scarb.toml - - Caused by: - target `cairo-plugin` cannot be mixed with other targets - "#}); -} - -#[test] -fn can_emit_plugin_warning() { +fn can_remove_original_node() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); CairoPluginProjectBuilder::default() .lib_rs(indoc! {r#" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, Diagnostic}; + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; #[attribute_macro] - pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let diag = Diagnostic::warn("Some warning from macro."); - ProcMacroResult::new(token_stream) - .with_diagnostics(diag.into()) + pub fn some(_attr: TokenStream, _: TokenStream) -> ProcMacroResult { + ProcMacroResult::new(TokenStream::empty()) } "#}) .build(&t); @@ -230,12 +27,17 @@ fn can_emit_plugin_warning() { .dep("some", &t) .lib_cairo(indoc! {r#" #[some] - fn f() -> felt252 { 12 } + fn main() -> felt252 { 12 } + + fn main() -> felt252 { 34 } + + #[some] + fn main() -> felt252 { 56 } "#}) .build(&project); Scarb::quick_snapbox() - .arg("build") + .arg("cairo-run") // Disable output from Cargo. .env("CARGO_TERM_QUIET", "true") .current_dir(&project) @@ -244,30 +46,30 @@ fn can_emit_plugin_warning() { .stdout_matches(indoc! {r#" [..] Compiling some v1.0.0 ([..]Scarb.toml) [..] Compiling hello v1.0.0 ([..]Scarb.toml) - warn: Plugin diagnostic: Some warning from macro. - --> [..]lib.cairo:1:1 - #[some] - ^*****^ - [..]Finished `dev` profile target(s) in [..] + [..]Running hello + Run completed successfully, returning [34] "#}); } #[test] -fn can_emit_plugin_error() { +fn can_replace_original_node() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); CairoPluginProjectBuilder::default() - .lib_rs(indoc! {r#" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, Diagnostic}; + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, TokenTree, Token, TextSpan}; #[attribute_macro] pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let diag = Diagnostic::error("Some error from macro."); + let new_token_string = token_stream.to_string().replace("12", "34"); + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { start: 0, end: new_token_string.len() as u32 }, + ))]); ProcMacroResult::new(token_stream) - .with_diagnostics(diag.into()) } - "#}) + "##}) .build(&t); let project = temp.child("hello"); ProjectBuilder::start() @@ -276,44 +78,46 @@ fn can_emit_plugin_error() { .dep("some", &t) .lib_cairo(indoc! {r#" #[some] - fn f() -> felt252 { 12 } + fn main() -> felt252 { 12 } "#}) .build(&project); Scarb::quick_snapbox() - .arg("build") + .arg("cairo-run") // Disable output from Cargo. .env("CARGO_TERM_QUIET", "true") .current_dir(&project) .assert() - .failure() + .success() .stdout_matches(indoc! {r#" [..] Compiling some v1.0.0 ([..]Scarb.toml) [..] Compiling hello v1.0.0 ([..]Scarb.toml) - error: Plugin diagnostic: Some error from macro. - --> [..]lib.cairo:1:1 - #[some] - ^*****^ - - error: could not compile `hello` due to previous error + [..]Finished `dev` profile target(s) in [..] + [..]Running hello + Run completed successfully, returning [34] "#}); } #[test] -fn diags_from_generated_code_mapped_correctly() { +fn can_implement_inline_macro() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); CairoPluginProjectBuilder::default() - .lib_rs(indoc! {r#" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, Diagnostic}; + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, inline_macro, TokenTree, Token, TextSpan}; - #[attribute_macro] - pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let diag = Diagnostic::error("Some error from macro."); - ProcMacroResult::new(token_stream) - .with_diagnostics(diag.into()) + #[inline_macro] + pub fn some(token_stream: TokenStream) -> ProcMacroResult { + assert_eq!(token_stream.to_string(), "()"); + ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new( + "34".to_string(), + TextSpan { + start: 0, + end: 2, + }, + ))])) } - "#}) + "##}) .build(&t); let project = temp.child("hello"); ProjectBuilder::start() @@ -321,51 +125,42 @@ fn diags_from_generated_code_mapped_correctly() { .version("1.0.0") .dep("some", &t) .lib_cairo(indoc! {r#" - #[cfg(target: 'lib')] - #[some] - fn test_increase_balance() { - i_don_exist(); - } + fn main() -> felt252 { + let x = some!(); + x + } "#}) .build(&project); Scarb::quick_snapbox() - .arg("build") + .arg("cairo-run") // Disable output from Cargo. .env("CARGO_TERM_QUIET", "true") .current_dir(&project) .assert() - .failure() + .success() .stdout_matches(indoc! {r#" [..] Compiling some v1.0.0 ([..]Scarb.toml) [..] Compiling hello v1.0.0 ([..]Scarb.toml) - error: Plugin diagnostic: Some error from macro. - --> [..]lib.cairo:2:1 - #[some] - ^*****^ - - error: Function not found. - --> [..]lib.cairo:4:5 - i_don_exist(); - ^*********^ - - error: could not compile `hello` due to previous error - "#}); + [..]Finished `dev` profile target(s) in [..] + [..]Running hello + Run completed successfully, returning [34] + "#}); } #[test] -fn can_remove_original_node() { +fn empty_inline_macro_result() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); CairoPluginProjectBuilder::default() - .lib_rs(indoc! {r#" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, inline_macro}; - #[attribute_macro] - pub fn some(_attr: TokenStream, _: TokenStream) -> ProcMacroResult { + #[inline_macro] + pub fn some(_token_stream: TokenStream) -> ProcMacroResult { ProcMacroResult::new(TokenStream::empty()) } - "#}) + "##}) .build(&t); let project = temp.child("hello"); ProjectBuilder::start() @@ -373,59 +168,94 @@ fn can_remove_original_node() { .version("1.0.0") .dep("some", &t) .lib_cairo(indoc! {r#" - #[some] - fn main() -> felt252 { 12 } - - fn main() -> felt252 { 34 } - - #[some] - fn main() -> felt252 { 56 } + fn main() -> felt252 { + let _x = some!(); + 12 + } "#}) .build(&project); Scarb::quick_snapbox() - .arg("cairo-run") + .arg("build") // Disable output from Cargo. .env("CARGO_TERM_QUIET", "true") .current_dir(&project) .assert() - .success() + .failure() .stdout_matches(indoc! {r#" [..] Compiling some v1.0.0 ([..]Scarb.toml) [..] Compiling hello v1.0.0 ([..]Scarb.toml) - [..]Finished `dev` profile target(s) in [..] - [..]Running hello - Run completed successfully, returning [34] + error: Inline macro `some` failed. + --> [..]lib.cairo:2:14 + let _x = some!(); + ^*****^ + + error: could not compile `hello` due to previous error "#}); } #[test] -fn can_replace_original_node() { +fn can_implement_derive_macro() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); CairoPluginProjectBuilder::default() .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + use cairo_lang_macro::{derive_macro, ProcMacroResult, TokenStream, TokenTree, Token, TextSpan}; - #[attribute_macro] - pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let token_stream = TokenStream::new( - token_stream + #[derive_macro] + pub fn custom_derive(token_stream: TokenStream) -> ProcMacroResult { + let name = token_stream + .clone() .to_string() - .replace("12", "34") - ); - ProcMacroResult::new(token_stream) - } + .lines() + .find(|l| l.starts_with("struct")) + .unwrap() + .to_string() + .replace("struct", "") + .replace("}", "") + .replace("{", "") + .trim() + .to_string(); + + let code = indoc::formatdoc!{r#" + impl SomeImpl of Hello<{name}> {{ + fn world(self: @{name}) -> u32 {{ + 32 + }} + }} + "#}; + + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + code.clone(), + TextSpan { + start: 0, + end: code.len() as u32, + }, + ))]); + + ProcMacroResult::new(token_stream) + } "##}) + .add_dep(r#"indoc = "*""#) .build(&t); + let project = temp.child("hello"); ProjectBuilder::start() .name("hello") .version("1.0.0") .dep("some", &t) .lib_cairo(indoc! {r#" - #[some] - fn main() -> felt252 { 12 } + trait Hello { + fn world(self: @T) -> u32; + } + + #[derive(CustomDerive, Drop)] + struct SomeType {} + + fn main() -> u32 { + let a = SomeType {}; + a.world() + } "#}) .build(&project); @@ -441,82 +271,113 @@ fn can_replace_original_node() { [..] Compiling hello v1.0.0 ([..]Scarb.toml) [..]Finished `dev` profile target(s) in [..] [..]Running hello - Run completed successfully, returning [34] + Run completed successfully, returning [32] "#}); } #[test] -fn can_return_aux_data_from_plugin() { +fn can_use_both_derive_and_attr() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); CairoPluginProjectBuilder::default() .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, AuxData, PostProcessContext, post_process}; - use serde::{Serialize, Deserialize}; + use cairo_lang_macro::{derive_macro, attribute_macro, ProcMacroResult, TokenStream, TokenTree, TextSpan, Token}; - #[derive(Debug, Serialize, Deserialize)] - struct SomeMacroDataFormat { - msg: String - } + #[attribute_macro] + pub fn first_attribute(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let new_token_string = token_stream.to_string().replace("SomeType", "OtherType"); + ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { + start: 0, + end: new_token_string.len() as u32, + }, + ))])) + } - #[attribute_macro] - pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let value = SomeMacroDataFormat { msg: "Hello from some macro!".to_string() }; - let value = serde_json::to_string(&value).unwrap(); - let value: Vec = value.into_bytes(); - let aux_data = AuxData::new(value); - ProcMacroResult::new(token_stream).with_aux_data(aux_data) - } - - #[post_process] - pub fn callback(context: PostProcessContext) { - let aux_data = context.aux_data.into_iter() - .map(|aux_data| { - let value: Vec = aux_data.into(); - let aux_data: SomeMacroDataFormat = serde_json::from_slice(&value).unwrap(); - aux_data - }) - .collect::>(); - println!("{:?}", aux_data); - } + #[attribute_macro] + pub fn second_attribute(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let code = token_stream.to_string().replace("OtherType", "RenamedStruct"); + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + code.clone(), + TextSpan { + start: 0, + end: code.len() as u32, + }, + ))]); + + let result_string = format!("#[derive(Drop)]\n{token_stream}"); + ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new( + result_string.clone(), + TextSpan { + start: 0, + end: result_string.len() as u32, + }, + ))])) + } - #[post_process] - pub fn some_no_op_callback(context: PostProcessContext) { - drop(context.aux_data); - } + #[derive_macro] + pub fn custom_derive(_token_stream: TokenStream) -> ProcMacroResult { + let code = indoc::formatdoc!{r#" + impl SomeImpl of Hello {{ + fn world(self: @RenamedStruct) -> u32 {{ + 32 + }} + }} + "#}; + + ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new( + code.clone(), + TextSpan { + start: 0, + end: code.len() as u32, + }, + ))])) + } "##}) - .add_dep(r#"serde = { version = "*", features = ["derive"] }"#) - .add_dep(r#"serde_json = "*""#) + .add_dep(r#"indoc = "*""#) .build(&t); + let project = temp.child("hello"); ProjectBuilder::start() .name("hello") .version("1.0.0") - .dep_starknet() .dep("some", &t) .lib_cairo(indoc! {r#" - #[some] - fn main() -> felt252 { 12 } + trait Hello { + fn world(self: @T) -> u32; + } + + #[first_attribute] + #[derive(CustomDerive)] + #[second_attribute] + struct SomeType {} + + fn main() -> u32 { + let a = RenamedStruct {}; + a.world() + } "#}) .build(&project); Scarb::quick_snapbox() - .arg("build") + .arg("cairo-run") // Disable output from Cargo. .env("CARGO_TERM_QUIET", "true") .current_dir(&project) .assert() .success() .stdout_matches(indoc! {r#" - [..]Compiling some v1.0.0 ([..]Scarb.toml) - [..]Compiling hello v1.0.0 ([..]Scarb.toml) - [SomeMacroDataFormat { msg: "Hello from some macro!" }] + [..] Compiling some v1.0.0 ([..]Scarb.toml) + [..] Compiling hello v1.0.0 ([..]Scarb.toml) [..]Finished `dev` profile target(s) in [..] + [..]Running hello + Run completed successfully, returning [32] "#}); } #[test] -fn can_read_token_stream_metadata() { +fn can_read_attribute_args() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); CairoPluginProjectBuilder::default() @@ -524,8 +385,8 @@ fn can_read_token_stream_metadata() { use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; #[attribute_macro] - pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - println!("{:#?}", token_stream.metadata()); + pub fn some(attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + println!("{}", attr); ProcMacroResult::new(token_stream) } "##}) @@ -538,7 +399,10 @@ fn can_read_token_stream_metadata() { .dep_starknet() .dep("some", &t) .lib_cairo(indoc! {r#" - #[some] + #[some( + first: "aaa", + second: "bbb", + )] fn main() -> felt252 { 12 } "#}) .build(&project); @@ -553,294 +417,373 @@ fn can_read_token_stream_metadata() { .stdout_matches(indoc! {r#" [..]Compiling some v1.0.0 ([..]Scarb.toml) [..]Compiling hello v1.0.0 ([..]Scarb.toml) - TokenStreamMetadata { - original_file_path: Some( - "[..]lib.cairo", - ), - file_id: Some( - "[..]", - ), - } + ( + first: "aaa", + second: "bbb", + ) [..]Finished `dev` profile target(s) in [..] "#}); } #[test] -fn can_define_multiple_macros() { +fn can_be_expanded() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); CairoPluginProjectBuilder::default() .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, AuxData, PostProcessContext, post_process}; - - #[attribute_macro] - pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let token_stream = TokenStream::new( - token_stream - .to_string() - .replace("12", "34") - ); - let aux_data = AuxData::new(Vec::new()); - ProcMacroResult::new(token_stream).with_aux_data(aux_data) - } + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, derive_macro, TokenTree, Token, TextSpan}; #[attribute_macro] - pub fn world(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let token_stream = TokenStream::new( - token_stream - .to_string() - .replace("56", "78") - ); - let aux_data = AuxData::new(Vec::new()); - ProcMacroResult::new(token_stream).with_aux_data(aux_data) + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let new_token_string = token_stream.to_string().replace("12", "34"); + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { start: 0, end: new_token_string.len() as u32 }, + ))]); + ProcMacroResult::new(token_stream) } - #[post_process] - pub fn callback(context: PostProcessContext) { - assert_eq!(context.aux_data.len(), 2); - } - "##}) - .build(&t); + #[derive_macro] + pub fn custom_derive(token_stream: TokenStream) -> ProcMacroResult { + let name = token_stream + .clone() + .to_string() + .lines() + .find(|l| l.starts_with("struct")) + .unwrap() + .to_string() + .replace("struct", "") + .replace("}", "") + .replace("{", "") + .trim() + .to_string(); - let w = temp.child("other"); - CairoPluginProjectBuilder::default() - .name("other") - .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, AuxData, PostProcessContext, post_process}; + let code = indoc::formatdoc!{r#" + impl SomeImpl of Hello<{name}> {{ + fn world(self: @{name}) -> u32 {{ + 32 + }} + }} + "#}; - #[attribute_macro] - pub fn beautiful(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let token_stream = TokenStream::new( - token_stream - .to_string() - .replace("90", "09") - ); - let aux_data = AuxData::new(Vec::new()); - ProcMacroResult::new(token_stream).with_aux_data(aux_data) - } + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + code.clone(), + TextSpan { start: 0, end: code.len() as u32 }, + ))]); - #[post_process] - pub fn callback(context: PostProcessContext) { - assert_eq!(context.aux_data.len(), 1); + ProcMacroResult::new(token_stream) } "##}) - .build(&w); - + .add_dep(r#"indoc = "*""#) + .build(&t); let project = temp.child("hello"); ProjectBuilder::start() .name("hello") .version("1.0.0") - .dep_starknet() .dep("some", &t) - .dep("other", &w) .lib_cairo(indoc! {r#" - #[hello] - #[beautiful] - #[world] - fn main() -> felt252 { 12 + 56 + 90 } + trait Hello { + fn world(self: @T) -> u32; + } + + #[derive(CustomDerive, Drop)] + struct SomeType {} + + #[some] + fn main() -> u32 { + let x = 12; + let a = SomeType {}; + a.world() + x + } "#}) .build(&project); Scarb::quick_snapbox() - .arg("cairo-run") + .arg("expand") // Disable output from Cargo. .env("CARGO_TERM_QUIET", "true") .current_dir(&project) .assert() - .success() - .stdout_matches(indoc! {r#" - [..]Compiling some v1.0.0 ([..]Scarb.toml) - [..]Compiling other v1.0.0 ([..]Scarb.toml) - [..]Compiling hello v1.0.0 ([..]Scarb.toml) - [..]Finished `dev` profile target(s) in [..] - [..]Running hello - Run completed successfully, returning [121] - "#}); + .success(); + + assert_eq!( + project.child("target/dev").files(), + vec!["hello.expanded.cairo"] + ); + let expanded = project + .child("target/dev/hello.expanded.cairo") + .read_to_string(); + snapbox::assert_eq( + indoc! {r#" + mod hello { + trait Hello { + fn world(self: @T) -> u32; + } + + #[derive(CustomDerive, Drop)] + struct SomeType {} + impl SomeTypeDrop of core::traits::Drop; + impl SomeImpl of Hello { + fn world(self: @SomeType) -> u32 { + 32 + } + } + fn main() -> u32 { + let x = 34; + let a = SomeType {}; + a.world() + x + } + } + "#}, + expanded, + ); } #[test] -fn cannot_duplicate_macros() { +fn can_expand_trait_inner_func_attrr() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); CairoPluginProjectBuilder::default() .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; - - #[attribute_macro] - pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(token_stream) - } + use cairo_lang_macro::{attribute_macro, ProcMacroResult, TokenStream, TokenTree, Token, TextSpan}; - #[attribute_macro] - pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(token_stream) - } + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let new_token_string = token_stream.to_string() + .replace("hello", "world") + .replace("12", "34"); + ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { start: 0, end: new_token_string.len() as u32 }, + ))])) + } "##}) .build(&t); + let project = temp.child("hello"); ProjectBuilder::start() .name("hello") .version("1.0.0") - .dep_starknet() .dep("some", &t) .lib_cairo(indoc! {r#" - #[hello] - fn main() -> felt252 { 12 + 56 + 90 } - "#}) - .build(&project); - Scarb::quick_snapbox() - .arg("build") - // Disable output from Cargo. - .env("CARGO_TERM_QUIET", "true") - .current_dir(&project) - .assert() - // Fails with Cargo compile error. - .failure(); -} - -#[test] -fn cannot_duplicate_macros_across_packages() { - let temp = TempDir::new().unwrap(); - let t = temp.child("some"); - CairoPluginProjectBuilder::default() - .lib_rs(indoc! {r#" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; - - #[attribute_macro] - pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(token_stream) - } - - #[attribute_macro] - pub fn world(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(token_stream) - } - "#}) - .build(&t); + trait Hello { + #[some] + fn hello(self: @T) -> u32 { + 12 + } + } - let w = temp.child("other"); - CairoPluginProjectBuilder::default() - .name("other") - .lib_rs(indoc! {r#" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + #[derive(Drop)] + struct SomeStruct {} - #[attribute_macro] - pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(token_stream) - } - "#}) - .build(&w); + impl SomeImpl of Hello {} - let project = temp.child("hello"); - ProjectBuilder::start() - .name("hello") - .version("1.0.0") - .dep_starknet() - .dep("some", &t) - .dep("other", &w) - .lib_cairo(indoc! {r#" - #[hello] - #[world] - fn main() -> felt252 { 12 + 56 + 90 } + fn main() -> u32 { + let a = SomeStruct {}; + a.world() + } "#}) .build(&project); Scarb::quick_snapbox() - .arg("build") + .arg("cairo-run") // Disable output from Cargo. .env("CARGO_TERM_QUIET", "true") .current_dir(&project) .assert() - .failure() + .success() .stdout_matches(indoc! {r#" - [..]Compiling some v1.0.0 ([..]Scarb.toml) - [..]Compiling other v1.0.0 ([..]Scarb.toml) - [..]Compiling hello v1.0.0 ([..]Scarb.toml) - error: duplicate expansions defined for procedural macros: hello (some v1.0.0 ([..]Scarb.toml) and other v1.0.0 ([..]Scarb.toml)) + [..] Compiling some v1.0.0 ([..]Scarb.toml) + [..] Compiling hello v1.0.0 ([..]Scarb.toml) + [..]Finished `dev` profile target(s) in [..] + [..]Running hello + Run completed successfully, returning [34] "#}); } #[test] -fn cannot_use_undefined_macro() { +fn can_expand_impl_inner_func_attrr() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); CairoPluginProjectBuilder::default() .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + use cairo_lang_macro::{attribute_macro, ProcMacroResult, TokenStream, Token, TokenTree, TextSpan}; - #[attribute_macro] - pub fn hello(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(token_stream) - } + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let new_token_string = token_stream.to_string().replace("1", "2"); + ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new( + new_token_string.clone(), + TextSpan { start: 0, end: new_token_string.len() as u32 }, + ))])) + } "##}) .build(&t); + let project = temp.child("hello"); ProjectBuilder::start() .name("hello") .version("1.0.0") .dep_starknet() + .dep_cairo_test() .dep("some", &t) - .lib_cairo(indoc! {r#" - #[world] - fn main() -> felt252 { 12 + 56 + 90 } + .manifest_extra(indoc! {r#" + [[target.starknet-contract]] "#}) - .build(&project); - Scarb::quick_snapbox() - .arg("build") - // Disable output from Cargo. - .env("CARGO_TERM_QUIET", "true") - .current_dir(&project) - .assert() - .failure() - .stdout_matches(indoc! {r#" - [..]Compiling some v1.0.0 ([..]Scarb.toml) - [..]Compiling hello v1.0.0 ([..]Scarb.toml) - error: Plugin diagnostic: Unsupported attribute. - --> [..]lib.cairo:1:1 - #[world] - ^******^ - - error: could not compile `hello` due to previous error - "#}); -} - -#[test] -fn can_resolve_full_path_markers() { + .lib_cairo(indoc! {r#" + #[starknet::interface] + trait IHello { + fn get(self: @T) -> u128; + fn increase(ref self: T); + } + + #[starknet::contract] + mod Hello { + use starknet::storage::{StoragePointerReadAccess, StoragePointerWriteAccess}; + use starknet::get_contract_address; + use super::IHello; + + #[storage] + struct Storage { + counter: u128 + } + + #[constructor] + fn constructor(ref self: ContractState, value_: u128) { + self.counter.write(value_); + } + + #[abi(embed_v0)] + impl IncImpl of IHello { + fn get(self: @ContractState) -> u128 { + self.counter.read() + } + + #[some] + fn increase(ref self: ContractState) { + self.counter.write( self.counter.read() + 1 ); + } + } + } + + #[cfg(test)] + mod tests { + use array::ArrayTrait; + use core::result::ResultTrait; + use core::traits::Into; + use option::OptionTrait; + use starknet::syscalls::deploy_syscall; + use traits::TryInto; + + use super::{IHello, Hello, IHelloDispatcher, IHelloDispatcherTrait}; + + #[test] + fn test_flow() { + let calldata = array![100]; + let (address0, _) = deploy_syscall( + Hello::TEST_CLASS_HASH.try_into().unwrap(), 0, calldata.span(), false + ).unwrap(); + + let mut contract0 = IHelloDispatcher { contract_address: address0 }; + + assert_eq!(@contract0.get(), @100, "contract0.get() == 100"); + @contract0.increase(); + assert_eq!(@contract0.get(), @102, "contract0.get() == 102"); + } + } + + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("cairo-test") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + [..] Compiling some v1.0.0 ([..]Scarb.toml) + [..] Compiling test(hello_unittest) hello v1.0.0 ([..]Scarb.toml) + [..]Finished `dev` profile target(s) in [..] + testing hello ... + running 1 test + test hello::tests::test_flow ... ok (gas usage est.: [..]) + test result: ok. 1 passed; 0 failed; 0 ignored; 0 filtered out; + + "#}); +} + +#[test] +fn can_emit_plugin_warning() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); CairoPluginProjectBuilder::default() - .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, post_process, PostProcessContext}; + .lib_rs(indoc! {r#" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, Diagnostic}; #[attribute_macro] pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let full_path_markers = vec!["some-key".to_string()]; + let diag = Diagnostic::warn("Some warning from macro."); + ProcMacroResult::new(token_stream) + .with_diagnostics(diag.into()) + } + "#}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn f() -> felt252 { 12 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + [..] Compiling some v1.0.0 ([..]Scarb.toml) + [..] Compiling hello v1.0.0 ([..]Scarb.toml) + warn: Plugin diagnostic: Some warning from macro. + --> [..]lib.cairo:1:1 + #[some] + ^*****^ - let code = format!( - r#"#[macro::full_path_marker("some-key")] {}"#, - token_stream.to_string().replace("12", "34") - ); + [..]Finished `dev` profile target(s) in [..] + "#}); +} - ProcMacroResult::new(TokenStream::new(code)) - .with_full_path_markers(full_path_markers) - } +#[test] +fn can_emit_plugin_error() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r#" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, Diagnostic}; - #[post_process] - pub fn callback(context: PostProcessContext) { - println!("{:?}", context.full_path_markers); + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let diag = Diagnostic::error("Some error from macro."); + ProcMacroResult::new(token_stream) + .with_diagnostics(diag.into()) } - "##}) + "#}) .build(&t); - let project = temp.child("hello"); ProjectBuilder::start() .name("hello") .version("1.0.0") - .dep_starknet() .dep("some", &t) .lib_cairo(indoc! {r#" #[some] - fn main() -> felt252 { 12 } + fn f() -> felt252 { 12 } "#}) .build(&project); @@ -850,29 +793,34 @@ fn can_resolve_full_path_markers() { .env("CARGO_TERM_QUIET", "true") .current_dir(&project) .assert() - .success() + .failure() .stdout_matches(indoc! {r#" - [..]Compiling some v1.0.0 ([..]Scarb.toml) - [..]Compiling hello v1.0.0 ([..]Scarb.toml) - [FullPathMarker { key: "some-key", full_path: "hello::main" }] - [..]Finished `dev` profile target(s) in [..] + [..] Compiling some v1.0.0 ([..]Scarb.toml) + [..] Compiling hello v1.0.0 ([..]Scarb.toml) + error: Plugin diagnostic: Some error from macro. + --> [..]lib.cairo:1:1 + #[some] + ^*****^ + + error: could not compile `hello` due to previous error "#}); } #[test] -fn can_implement_inline_macro() { +fn diags_from_generated_code_mapped_correctly() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); CairoPluginProjectBuilder::default() - .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, inline_macro}; + .lib_rs(indoc! {r#" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, Diagnostic}; - #[inline_macro] - pub fn some(token_stream: TokenStream) -> ProcMacroResult { - assert_eq!(token_stream.to_string(), "()"); - ProcMacroResult::new(TokenStream::new("34".to_string())) + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let diag = Diagnostic::error("Some error from macro."); + ProcMacroResult::new(token_stream) + .with_diagnostics(diag.into()) } - "##}) + "#}) .build(&t); let project = temp.child("hello"); ProjectBuilder::start() @@ -880,43 +828,110 @@ fn can_implement_inline_macro() { .version("1.0.0") .dep("some", &t) .lib_cairo(indoc! {r#" - fn main() -> felt252 { - let x = some!(); + #[cfg(target: 'lib')] + #[some] + fn test_increase_balance() { + i_don_exist(); + } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .failure() + .stdout_matches(indoc! {r#" + [..] Compiling some v1.0.0 ([..]Scarb.toml) + [..] Compiling hello v1.0.0 ([..]Scarb.toml) + error: Plugin diagnostic: Some error from macro. + --> [..]lib.cairo:2:1 + #[some] + ^*****^ + + error: Function not found. + --> [..]lib.cairo:4:5 + i_don_exist(); + ^*********^ + + error: could not compile `hello` due to previous error + "#}); +} + +#[test] +fn code_mappings_preserve_attribute_error_locations() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r#" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, TokenTree, Token, TextSpan}; + + #[attribute_macro] + pub fn some(_attr: TokenStream, mut token_stream: TokenStream) -> ProcMacroResult { + let token_stream_length = token_stream.to_string().len() as u32; + token_stream.tokens.push(TokenTree::Ident(Token::new(" ", TextSpan { start: token_stream_length + 1, end: token_stream_length + 5 }))); + ProcMacroResult::new(token_stream) + } + "#}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn f() -> felt252 { + let x = 1; + x = 2; x } "#}) .build(&project); Scarb::quick_snapbox() - .arg("cairo-run") + .arg("build") // Disable output from Cargo. .env("CARGO_TERM_QUIET", "true") .current_dir(&project) .assert() - .success() + .failure() .stdout_matches(indoc! {r#" [..] Compiling some v1.0.0 ([..]Scarb.toml) [..] Compiling hello v1.0.0 ([..]Scarb.toml) - [..]Finished `dev` profile target(s) in [..] - [..]Running hello - Run completed successfully, returning [34] + error: Cannot assign to an immutable variable. + --> [..]lib.cairo[proc_some]:3:5 + x = 2; + ^***^ + note: this error originates in the attribute macro: `some` + + error: could not compile `hello` due to previous error "#}); } #[test] -fn empty_inline_macro_result() { +fn code_mappings_preserve_inline_macro_error_locations() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); CairoPluginProjectBuilder::default() .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, inline_macro}; + use cairo_lang_macro::{inline_macro, ProcMacroResult, TokenStream, TokenTree, Token, TextSpan}; #[inline_macro] pub fn some(_token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(TokenStream::empty()) + let mut tokens = Vec::new(); + tokens.push(TokenTree::Ident(Token::new( + "undefined".to_string(), + TextSpan::new(0, 9), + ))); + + ProcMacroResult::new(TokenStream::new(tokens)) } "##}) .build(&t); + let project = temp.child("hello"); ProjectBuilder::start() .name("hello") @@ -932,7 +947,6 @@ fn empty_inline_macro_result() { Scarb::quick_snapbox() .arg("build") - // Disable output from Cargo. .env("CARGO_TERM_QUIET", "true") .current_dir(&project) .assert() @@ -940,22 +954,22 @@ fn empty_inline_macro_result() { .stdout_matches(indoc! {r#" [..] Compiling some v1.0.0 ([..]Scarb.toml) [..] Compiling hello v1.0.0 ([..]Scarb.toml) - error: Inline macro `some` failed. - --> [..]lib.cairo:2:14 - let _x = some!(); - ^*****^ - + error: Identifier not found. + --> [..]lib.cairo:1:1 + fn main() -> felt252 { + ^*******^ + error: could not compile `hello` due to previous error "#}); } #[test] -fn can_implement_derive_macro() { +fn code_mappings_preserve_derive_error_locations() { let temp = TempDir::new().unwrap(); let t = temp.child("some"); CairoPluginProjectBuilder::default() .lib_rs(indoc! {r##" - use cairo_lang_macro::{derive_macro, ProcMacroResult, TokenStream}; + use cairo_lang_macro::{derive_macro, ProcMacroResult, TokenStream, TokenTree, Token, TextSpan}; #[derive_macro] pub fn custom_derive(token_stream: TokenStream) -> ProcMacroResult { @@ -972,13 +986,21 @@ fn can_implement_derive_macro() { .trim() .to_string(); - let token_stream = TokenStream::new(indoc::formatdoc!{r#" - impl SomeImpl of Hello<{name}> {{ - fn world(self: @{name}) -> u32 {{ - 32 + let code = indoc::formatdoc!{r#" + impl SomeImpl{name} of Hello<{name}> {{ + fn world(self: @{name}) -> u8 {{ + 256 }} }} - "#}); + "#}; + + let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new( + code.clone(), + TextSpan { + start: 0, + end: code.len() as u32, + }, + ))]); ProcMacroResult::new(token_stream) } @@ -993,540 +1015,43 @@ fn can_implement_derive_macro() { .dep("some", &t) .lib_cairo(indoc! {r#" trait Hello { - fn world(self: @T) -> u32; + fn world(self: @T) -> u8; } #[derive(CustomDerive, Drop)] struct SomeType {} - fn main() -> u32 { + #[derive(CustomDerive, Drop)] + struct AnotherType {} + + fn main() -> u8 { let a = SomeType {}; a.world() } "#}) .build(&project); - Scarb::quick_snapbox() - .arg("cairo-run") - // Disable output from Cargo. - .env("CARGO_TERM_QUIET", "true") - .current_dir(&project) - .assert() - .success() - .stdout_matches(indoc! {r#" - [..] Compiling some v1.0.0 ([..]Scarb.toml) - [..] Compiling hello v1.0.0 ([..]Scarb.toml) - [..]Finished `dev` profile target(s) in [..] - [..]Running hello - Run completed successfully, returning [32] - "#}); -} - -#[test] -fn can_use_both_derive_and_attr() { - let temp = TempDir::new().unwrap(); - let t = temp.child("some"); - CairoPluginProjectBuilder::default() - .lib_rs(indoc! {r##" - use cairo_lang_macro::{derive_macro, attribute_macro, ProcMacroResult, TokenStream}; - - #[attribute_macro] - pub fn first_attribute(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(TokenStream::new( - token_stream.to_string() - .replace("SomeType", "OtherType") - )) - } - - #[attribute_macro] - pub fn second_attribute(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let token_stream = TokenStream::new( - token_stream.to_string().replace("OtherType", "RenamedStruct") - ); - ProcMacroResult::new(TokenStream::new( - format!("#[derive(Drop)]\n{token_stream}") - )) - } - - #[derive_macro] - pub fn custom_derive(_token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(TokenStream::new( - indoc::formatdoc!{r#" - impl SomeImpl of Hello {{ - fn world(self: @RenamedStruct) -> u32 {{ - 32 - }} - }} - "#} - )) - } - "##}) - .add_dep(r#"indoc = "*""#) - .build(&t); - - let project = temp.child("hello"); - ProjectBuilder::start() - .name("hello") - .version("1.0.0") - .dep("some", &t) - .lib_cairo(indoc! {r#" - trait Hello { - fn world(self: @T) -> u32; - } - - #[first_attribute] - #[derive(CustomDerive)] - #[second_attribute] - struct SomeType {} - - fn main() -> u32 { - let a = RenamedStruct {}; - a.world() - } - "#}) - .build(&project); - - Scarb::quick_snapbox() - .arg("cairo-run") - // Disable output from Cargo. - .env("CARGO_TERM_QUIET", "true") - .current_dir(&project) - .assert() - .success() - .stdout_matches(indoc! {r#" - [..] Compiling some v1.0.0 ([..]Scarb.toml) - [..] Compiling hello v1.0.0 ([..]Scarb.toml) - [..]Finished `dev` profile target(s) in [..] - [..]Running hello - Run completed successfully, returning [32] - "#}); -} - -#[test] -fn can_read_attribute_args() { - let temp = TempDir::new().unwrap(); - let t = temp.child("some"); - CairoPluginProjectBuilder::default() - .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; - - #[attribute_macro] - pub fn some(attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - println!("{}", attr); - ProcMacroResult::new(token_stream) - } - "##}) - .build(&t); - - let project = temp.child("hello"); - ProjectBuilder::start() - .name("hello") - .version("1.0.0") - .dep_starknet() - .dep("some", &t) - .lib_cairo(indoc! {r#" - #[some( - first: "aaa", - second: "bbb", - )] - fn main() -> felt252 { 12 } - "#}) - .build(&project); - - Scarb::quick_snapbox() - .arg("build") - // Disable output from Cargo. - .env("CARGO_TERM_QUIET", "true") - .current_dir(&project) - .assert() - .success() - .stdout_matches(indoc! {r#" - [..]Compiling some v1.0.0 ([..]Scarb.toml) - [..]Compiling hello v1.0.0 ([..]Scarb.toml) - ( - first: "aaa", - second: "bbb", - ) - [..]Finished `dev` profile target(s) in [..] - "#}); -} - -#[test] -fn can_create_executable_attribute() { - let temp = TempDir::new().unwrap(); - let t = temp.child("some"); - CairoPluginProjectBuilder::default() - .lib_rs(indoc! {r##" - use cairo_lang_macro::executable_attribute; - - executable_attribute!("some"); - "##}) - .build(&t); - - let project = temp.child("hello"); - ProjectBuilder::start() - .name("hello") - .version("1.0.0") - .dep_starknet() - .dep("some", &t) - .lib_cairo(indoc! {r#" - #[some] - fn main() -> felt252 { 12 } - "#}) - .build(&project); - - Scarb::quick_snapbox() - .arg("build") - // Disable output from Cargo. - .env("CARGO_TERM_QUIET", "true") - .current_dir(&project) - .assert() - .success() - .stdout_matches(indoc! {r#" - [..]Compiling some v1.0.0 ([..]Scarb.toml) - [..]Compiling hello v1.0.0 ([..]Scarb.toml) - [..]Finished `dev` profile target(s) in [..] - "#}); - let sierra = project - .child("target") - .child("dev") - .child("hello.sierra.json") - .read_to_string(); - let sierra = serde_json::from_str::(&sierra).unwrap(); - let sierra = sierra.into_v1().unwrap(); - let executables = sierra.debug_info.unwrap().executables; - assert_eq!(executables.len(), 1); - let executables = executables.get("some").unwrap(); - assert_eq!(executables.len(), 1); - let fid = executables.first().unwrap().clone(); - assert_eq!(fid.clone().debug_name.unwrap(), "hello::main"); - assert!(sierra - .program - .funcs - .iter() - .any(|f| f.id.clone() == fid.clone())); -} - -#[test] -fn executable_name_cannot_clash_attr() { - let temp = TempDir::new().unwrap(); - let t = temp.child("some"); - CairoPluginProjectBuilder::default() - .lib_rs(indoc! {r##" - use cairo_lang_macro::{executable_attribute, attribute_macro, TokenStream, ProcMacroResult}; - - executable_attribute!("some"); - - #[attribute_macro] - fn some(_args: TokenStream, input: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(input) - } - "##}) - .build(&t); - - let project = temp.child("hello"); - ProjectBuilder::start() - .name("hello") - .version("1.0.0") - .dep_starknet() - .dep("some", &t) - .lib_cairo(indoc! {r#" - #[some] - fn main() -> felt252 { 12 } - "#}) - .build(&project); - Scarb::quick_snapbox() .arg("build") - // Disable output from Cargo. .env("CARGO_TERM_QUIET", "true") .current_dir(&project) .assert() .failure() - .stdout_matches(indoc! {r#" - [..]Compiling some v1.0.0 ([..]Scarb.toml) - [..]Compiling hello v1.0.0 ([..]Scarb.toml) - error: duplicate expansions defined for procedural macro some v1.0.0 ([..]Scarb.toml): some - "#}); -} - -#[test] -fn can_be_expanded() { - let temp = TempDir::new().unwrap(); - let t = temp.child("some"); - CairoPluginProjectBuilder::default() - .lib_rs(indoc! {r##" - use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, derive_macro}; - - #[attribute_macro] - pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - let token_stream = TokenStream::new( - token_stream - .to_string() - .replace("12", "34") - ); - ProcMacroResult::new(token_stream) - } - - #[derive_macro] - pub fn custom_derive(token_stream: TokenStream) -> ProcMacroResult { - let name = token_stream - .clone() - .to_string() - .lines() - .find(|l| l.starts_with("struct")) - .unwrap() - .to_string() - .replace("struct", "") - .replace("}", "") - .replace("{", "") - .trim() - .to_string(); - - let token_stream = TokenStream::new(indoc::formatdoc!{r#" - impl SomeImpl of Hello<{name}> {{ - fn world(self: @{name}) -> u32 {{ - 32 - }} - }} - "#}); - - ProcMacroResult::new(token_stream) - } - "##}) - .add_dep(r#"indoc = "*""#) - .build(&t); - let project = temp.child("hello"); - ProjectBuilder::start() - .name("hello") - .version("1.0.0") - .dep("some", &t) - .lib_cairo(indoc! {r#" - trait Hello { - fn world(self: @T) -> u32; - } - - #[derive(CustomDerive, Drop)] - struct SomeType {} - - #[some] - fn main() -> u32 { - let x = 12; - let a = SomeType {}; - a.world() + x - } - "#}) - .build(&project); - - Scarb::quick_snapbox() - .arg("expand") - // Disable output from Cargo. - .env("CARGO_TERM_QUIET", "true") - .current_dir(&project) - .assert() - .success(); - - assert_eq!( - project.child("target/dev").files(), - vec!["hello.expanded.cairo"] - ); - let expanded = project - .child("target/dev/hello.expanded.cairo") - .read_to_string(); - snapbox::assert_eq( - indoc! {r#" - mod hello { - trait Hello { - fn world(self: @T) -> u32; - } - - #[derive(CustomDerive, Drop)] - struct SomeType {} - impl SomeTypeDrop of core::traits::Drop; - impl SomeImpl of Hello { - fn world(self: @SomeType) -> u32 { - 32 - } - } - fn main() -> u32 { - let x = 34; - let a = SomeType {}; - a.world() + x - } - } - "#}, - expanded, - ); -} - -#[test] -fn can_expand_trait_inner_func_attrr() { - let temp = TempDir::new().unwrap(); - let t = temp.child("some"); - CairoPluginProjectBuilder::default() - .lib_rs(indoc! {r##" - use cairo_lang_macro::{attribute_macro, ProcMacroResult, TokenStream}; - - #[attribute_macro] - pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(TokenStream::new( - token_stream.to_string() - .replace("hello", "world") - .replace("12", "34") - )) - } - "##}) - .build(&t); - - let project = temp.child("hello"); - ProjectBuilder::start() - .name("hello") - .version("1.0.0") - .dep("some", &t) - .lib_cairo(indoc! {r#" - trait Hello { - #[some] - fn hello(self: @T) -> u32 { - 12 - } - } - - #[derive(Drop)] - struct SomeStruct {} - - impl SomeImpl of Hello {} - - fn main() -> u32 { - let a = SomeStruct {}; - a.world() - } - "#}) - .build(&project); - - Scarb::quick_snapbox() - .arg("cairo-run") - // Disable output from Cargo. - .env("CARGO_TERM_QUIET", "true") - .current_dir(&project) - .assert() - .success() .stdout_matches(indoc! {r#" [..] Compiling some v1.0.0 ([..]Scarb.toml) [..] Compiling hello v1.0.0 ([..]Scarb.toml) - [..]Finished `dev` profile target(s) in [..] - [..]Running hello - Run completed successfully, returning [34] - "#}); -} - -#[test] -fn can_expand_impl_inner_func_attrr() { - let temp = TempDir::new().unwrap(); - let t = temp.child("some"); - CairoPluginProjectBuilder::default() - .lib_rs(indoc! {r##" - use cairo_lang_macro::{attribute_macro, ProcMacroResult, TokenStream}; - - #[attribute_macro] - pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(TokenStream::new( - token_stream.to_string() - .replace("1", "2") - )) - } - "##}) - .build(&t); - - let project = temp.child("hello"); - ProjectBuilder::start() - .name("hello") - .version("1.0.0") - .dep_starknet() - .dep_cairo_test() - .dep("some", &t) - .manifest_extra(indoc! {r#" - [[target.starknet-contract]] - "#}) - .lib_cairo(indoc! {r#" - #[starknet::interface] - trait IHello { - fn get(self: @T) -> u128; - fn increase(ref self: T); - } - - #[starknet::contract] - mod Hello { - use starknet::storage::{StoragePointerReadAccess, StoragePointerWriteAccess}; - use starknet::get_contract_address; - use super::IHello; - - #[storage] - struct Storage { - counter: u128 - } - - #[constructor] - fn constructor(ref self: ContractState, value_: u128) { - self.counter.write(value_); - } - - #[abi(embed_v0)] - impl IncImpl of IHello { - fn get(self: @ContractState) -> u128 { - self.counter.read() - } - - #[some] - fn increase(ref self: ContractState) { - self.counter.write( self.counter.read() + 1 ); - } - } - } - - #[cfg(test)] - mod tests { - use array::ArrayTrait; - use core::result::ResultTrait; - use core::traits::Into; - use option::OptionTrait; - use starknet::syscalls::deploy_syscall; - use traits::TryInto; - - use super::{IHello, Hello, IHelloDispatcher, IHelloDispatcherTrait}; - - #[test] - fn test_flow() { - let calldata = array![100]; - let (address0, _) = deploy_syscall( - Hello::TEST_CLASS_HASH.try_into().unwrap(), 0, calldata.span(), false - ).unwrap(); - - let mut contract0 = IHelloDispatcher { contract_address: address0 }; - - assert_eq!(@contract0.get(), @100, "contract0.get() == 100"); - @contract0.increase(); - assert_eq!(@contract0.get(), @102, "contract0.get() == 102"); - } - } - - "#}) - .build(&project); + error: The value does not fit within the range of type core::integer::u8. + --> [..]lib.cairo:1:1 + trait Hello { + ^**************^ + note: this error originates in the derive macro: `custom_derive` - Scarb::quick_snapbox() - .arg("cairo-test") - // Disable output from Cargo. - .env("CARGO_TERM_QUIET", "true") - .current_dir(&project) - .assert() - .success() - .stdout_matches(indoc! {r#" - [..] Compiling some v1.0.0 ([..]Scarb.toml) - [..] Compiling test(hello_unittest) hello v1.0.0 ([..]Scarb.toml) - [..]Finished `dev` profile target(s) in [..] - testing hello ... - running 1 test - test hello::tests::test_flow ... ok (gas usage est.: [..]) - test result: ok. 1 passed; 0 failed; 0 ignored; 0 filtered out; + error: The value does not fit within the range of type core::integer::u8. + --> [..]lib.cairo:1:1 + trait Hello { + ^**************^ + note: this error originates in the derive macro: `custom_derive` + error: could not compile `hello` due to previous error "#}); } diff --git a/scarb/tests/proc_macro_metadata.rs b/scarb/tests/proc_macro_metadata.rs new file mode 100644 index 000000000..ee120a142 --- /dev/null +++ b/scarb/tests/proc_macro_metadata.rs @@ -0,0 +1,190 @@ +use assert_fs::fixture::PathChild; +use assert_fs::TempDir; +use indoc::indoc; +use scarb_test_support::cairo_plugin_project_builder::CairoPluginProjectBuilder; +use scarb_test_support::command::Scarb; +use scarb_test_support::project_builder::ProjectBuilder; + +#[test] +fn can_return_aux_data_from_plugin() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, AuxData, PostProcessContext, post_process}; + use serde::{Serialize, Deserialize}; + + #[derive(Debug, Serialize, Deserialize)] + struct SomeMacroDataFormat { + msg: String + } + + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let value = SomeMacroDataFormat { msg: "Hello from some macro!".to_string() }; + let value = serde_json::to_string(&value).unwrap(); + let value: Vec = value.into_bytes(); + let aux_data = AuxData::new(value); + ProcMacroResult::new(token_stream).with_aux_data(aux_data) + } + + #[post_process] + pub fn callback(context: PostProcessContext) { + let aux_data = context.aux_data.into_iter() + .map(|aux_data| { + let value: Vec = aux_data.into(); + let aux_data: SomeMacroDataFormat = serde_json::from_slice(&value).unwrap(); + aux_data + }) + .collect::>(); + println!("{:?}", aux_data); + } + + #[post_process] + pub fn some_no_op_callback(context: PostProcessContext) { + drop(context.aux_data); + } + "##}) + .add_dep(r#"serde = { version = "*", features = ["derive"] }"#) + .add_dep(r#"serde_json = "*""#) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> felt252 { 12 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + [SomeMacroDataFormat { msg: "Hello from some macro!" }] + [..]Finished `dev` profile target(s) in [..] + "#}); +} + +#[test] +fn can_read_token_stream_metadata() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro}; + + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + println!("{:#?}", token_stream.metadata()); + ProcMacroResult::new(token_stream) + } + "##}) + .build(&t); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> felt252 { 12 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + TokenStreamMetadata { + original_file_path: Some( + "[..]lib.cairo", + ), + file_id: Some( + "[..]", + ), + edition: Some( + "[..]", + ), + } + [..]Finished `dev` profile target(s) in [..] + "#}); +} + +#[test] +fn can_resolve_full_path_markers() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, post_process, PostProcessContext, TokenTree, Token, TextSpan}; + + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let full_path_markers = vec!["some-key".to_string()]; + + let code = format!( + r#"#[macro::full_path_marker("some-key")] {}"#, + token_stream.to_string().replace("12", "34") + ); + + ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new( + code.clone(), + TextSpan { + start: 0, + end: code.len() as u32, + }, + ))]) + ).with_full_path_markers(full_path_markers) + } + + #[post_process] + pub fn callback(context: PostProcessContext) { + println!("{:?}", context.full_path_markers); + } + "##}) + .build(&t); + + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep_starknet() + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> felt252 { 12 } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("build") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success() + .stdout_matches(indoc! {r#" + [..]Compiling some v1.0.0 ([..]Scarb.toml) + [..]Compiling hello v1.0.0 ([..]Scarb.toml) + [FullPathMarker { key: "some-key", full_path: "hello::main" }] + [..]Finished `dev` profile target(s) in [..] + "#}); +} diff --git a/scarb/tests/proc_macro_quote.rs b/scarb/tests/proc_macro_quote.rs new file mode 100644 index 000000000..f39b17a7a --- /dev/null +++ b/scarb/tests/proc_macro_quote.rs @@ -0,0 +1,370 @@ +use assert_fs::fixture::PathChild; +use assert_fs::TempDir; +use indoc::indoc; +use scarb_test_support::cairo_plugin_project_builder::CairoPluginProjectBuilder; +use scarb_test_support::command::Scarb; +use scarb_test_support::fsx::ChildPathEx; +use scarb_test_support::project_builder::ProjectBuilder; + +#[test] +fn can_use_quote() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .add_primitive_token_dep() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, inline_macro, quote}; + #[inline_macro] + pub fn some(_token_stream: TokenStream) -> ProcMacroResult { + let tokens = quote! { + 5 + }; + ProcMacroResult::new(tokens) + } + "##}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + fn main() -> felt252 { some!() } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("cairo-run") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .stdout_matches(indoc! {r#" + [..] Compiling some v1.0.0 [..] + [..] Compiling hello v1.0.0 [..] + [..] Finished `dev` profile [..] + [..] Running hello + Run completed successfully, returning [5] + "#}) + .success(); +} + +#[test] +fn can_use_quote_with_token_tree() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .add_primitive_token_dep() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, inline_macro, TokenTree, Token, TextSpan, quote}; + #[inline_macro] + pub fn some(_token_stream: TokenStream) -> ProcMacroResult { + let token = TokenTree::Ident(Token::new("5".to_string(), TextSpan::call_site())); + let tokens = quote! { + #token + }; + ProcMacroResult::new(tokens) + } + "##}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + fn main() -> felt252 { + some!() + } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("cairo-run") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .stdout_matches(indoc! {r#" + [..] Compiling some v1.0.0 [..] + [..] Compiling hello v1.0.0 [..] + [..] Finished `dev` profile [..] + [..] Running hello + Run completed successfully, returning [5] + "#}) + .success(); +} + +#[test] +fn can_use_quote_with_token_stream() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .add_primitive_token_dep() + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, inline_macro, TokenTree, Token, TextSpan, quote}; + #[inline_macro] + pub fn some(_token_stream: TokenStream) -> ProcMacroResult { + let token = TokenStream::new(vec![TokenTree::Ident(Token::new("5".to_string(), TextSpan::call_site()))]); + let tokens = quote! { + #token + }; + ProcMacroResult::new(tokens) + } + "##}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + fn main() -> felt252 { + some!() + } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("cairo-run") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .stdout_matches(indoc! {r#" + [..] Compiling some v1.0.0 [..] + [..] Compiling hello v1.0.0 [..] + [..] Finished `dev` profile [..] + [..] Running hello + Run completed successfully, returning [5] + "#}) + .success(); +} + +#[test] +fn can_use_quote_with_syntax_node() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default() + .add_primitive_token_dep() + .add_dep(r#"cairo-lang-syntax = "2.9.1""#) + .add_dep(r#"cairo-lang-parser = "2.9.1""#) + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, quote}; + use cairo_lang_parser::utils::SimpleParserDatabase; + use cairo_lang_syntax::node::with_db::SyntaxNodeWithDb; + #[attribute_macro] + pub fn some(_attr: TokenStream, token_stream: TokenStream) -> ProcMacroResult { + let db_val = SimpleParserDatabase::default(); + let db = &db_val; + let code = r#" + fn main() -> felt252 { + 5 + } + "#; + let syntax_node = db.parse_virtual(code).unwrap(); + let syntax_node_with_db = SyntaxNodeWithDb::new(&syntax_node, db); + let tokens = quote! { + #syntax_node_with_db + }; + ProcMacroResult::new(tokens) + } + "##}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> u32 { + // completly wrong type + true + } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("expand") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success(); + + assert_eq!( + project.child("target/dev").files(), + vec!["hello.expanded.cairo"] + ); + + let expanded = project + .child("target/dev/hello.expanded.cairo") + .read_to_string(); + + snapbox::assert_eq( + indoc! {r#" + mod hello { + fn main() -> felt252 { + 5 + } + } + "#}, + expanded, + ); +} + +#[test] +fn can_use_quote_with_cairo_specific_syntax() { + let temp = TempDir::new().unwrap(); + let t = temp.child("some"); + CairoPluginProjectBuilder::default().add_primitive_token_dep() + .add_dep(r#"cairo-lang-syntax = "2.9.1""#) + .add_dep(r#"cairo-lang-parser = "2.9.1""#) + .lib_rs(indoc! {r##" + use cairo_lang_macro::{ProcMacroResult, TokenStream, attribute_macro, quote}; + use cairo_lang_parser::utils::SimpleParserDatabase; + use cairo_lang_syntax::node::with_db::SyntaxNodeWithDb; + #[attribute_macro] + pub fn some(_attr: TokenStream, _token_stream: TokenStream) -> ProcMacroResult { + let db_val = SimpleParserDatabase::default(); + let db = &db_val; + let code = r#" + #[derive(Drop)] + struct Rectangle { + width: u64, + height: u64, + } + #[derive(Drop, PartialEq)] + struct Square { + side_length: u64, + } + impl RectangleIntoSquare of TryInto { + fn try_into(self: Rectangle) -> Option { + if self.height == self.width { + Option::Some(Square { side_length: self.height }) + } else { + Option::None + } + } + } + fn main() { + let rectangle = Rectangle { width: 8, height: 8 }; + let result: Square = rectangle.try_into().unwrap(); + let expected = Square { side_length: 8 }; + assert!( + result == expected, + "Rectangle with equal width and height should be convertible to a square." + ); + let rectangle = Rectangle { width: 5, height: 8 }; + let result: Option = rectangle.try_into(); + assert!( + result.is_none(), + "Rectangle with different width and height should not be convertible to a square." + ); + } + "#; + let syntax_node = db.parse_virtual(code).unwrap(); + let syntax_node_with_db = SyntaxNodeWithDb::new(&syntax_node, db); + let tokens = quote! { + #syntax_node_with_db + trait Circle { + fn print() -> (); + } + impl CircleImpl of Circle { + fn print() -> () { + println!("This is a circle!"); + } + } + }; + ProcMacroResult::new(tokens) + } + "##}) + .build(&t); + let project = temp.child("hello"); + ProjectBuilder::start() + .name("hello") + .version("1.0.0") + .dep("some", &t) + .lib_cairo(indoc! {r#" + #[some] + fn main() -> u32 { + // completly wrong type + true + } + "#}) + .build(&project); + + Scarb::quick_snapbox() + .arg("expand") + // Disable output from Cargo. + .env("CARGO_TERM_QUIET", "true") + .current_dir(&project) + .assert() + .success(); + + assert_eq!( + project.child("target/dev").files(), + vec!["hello.expanded.cairo"] + ); + + let expanded = project + .child("target/dev/hello.expanded.cairo") + .read_to_string(); + + snapbox::assert_eq( + indoc! {r#" + mod hello { + #[derive(Drop)] + struct Rectangle { + width: u64, + height: u64, + } + #[derive(Drop, PartialEq)] + struct Square { + side_length: u64, + } + impl RectangleIntoSquare of TryInto { + fn try_into(self: Rectangle) -> Option { + if self.height == self.width { + Option::Some(Square { side_length: self.height }) + } else { + Option::None + } + } + } + fn main() { + let rectangle = Rectangle { width: 8, height: 8 }; + let result: Square = rectangle.try_into().unwrap(); + let expected = Square { side_length: 8 }; + assert!( + result == expected, + "Rectangle with equal width and height should be convertible to a square.", + ); + let rectangle = Rectangle { width: 5, height: 8 }; + let result: Option = rectangle.try_into(); + assert!( + result.is_none(), + "Rectangle with different width and height should not be convertible to a square.", + ); + } + trait Circle { + fn print() -> (); + } + impl CircleImpl of Circle { + fn print() -> () { + println!("This is a circle!"); + } + } + impl RectangleDrop of core::traits::Drop; + impl SquareDrop of core::traits::Drop; + impl SquarePartialEq of core::traits::PartialEq { + fn eq(lhs: @Square, rhs: @Square) -> bool { + lhs.side_length == rhs.side_length + } + } + } + "#}, + expanded, + ); +} diff --git a/scarb/tests/proc_macro_server.rs b/scarb/tests/proc_macro_server.rs index b01f865dc..a117cf5b8 100644 --- a/scarb/tests/proc_macro_server.rs +++ b/scarb/tests/proc_macro_server.rs @@ -1,6 +1,6 @@ use assert_fs::prelude::PathChild; use assert_fs::TempDir; -use cairo_lang_macro::TokenStream; +use cairo_lang_macro::{TextSpan, Token, TokenStream, TokenTree}; use scarb_proc_macro_server_types::methods::defined_macros::DefinedMacros; use scarb_proc_macro_server_types::methods::defined_macros::DefinedMacrosParams; use scarb_proc_macro_server_types::methods::expand::ExpandAttribute; @@ -58,7 +58,14 @@ fn expand_attribute() { let output = input.replace(name, "very_new_name"); - ProcMacroResult::new(TokenStream::new(output)) + let span = TextSpan { start: 0, end: output.len() as u32 }; + ProcMacroResult::new( + TokenStream::new(vec![ + TokenTree::Ident( + Token::new(output, span) + ) + ]) + ) }} "##; @@ -82,14 +89,18 @@ fn expand_attribute() { .request_and_wait::(ExpandAttributeParams { attr: "rename_to_very_new_name".to_string(), args: TokenStream::empty(), - item: TokenStream::new("fn some_test_fn(){}".to_string()), + call_site: TextSpan::new(0, 0), + item: TokenStream::new(vec![TokenTree::Ident(Token::new( + "fn some_test_fn(){}", + TextSpan::new(0, 0), + ))]), }) .unwrap(); assert_eq!(response.diagnostics, vec![]); assert_eq!( - response.token_stream, - TokenStream::new("fn very_new_name(){}".to_string()) + response.token_stream.to_string(), + "fn very_new_name(){}".to_string() ); } @@ -113,19 +124,23 @@ fn expand_derive() { let mut proc_macro_server = ProcMacroClient::new(&project); - let item = TokenStream::new("fn some_test_fn(){}".to_string()); + let item = TokenStream::new(vec![TokenTree::Ident(Token::new( + "fn some_test_fn(){}", + TextSpan::new(0, 0), + ))]); let response = proc_macro_server .request_and_wait::(ExpandDeriveParams { derives: vec!["some_derive".to_string()], + call_site: TextSpan::new(0, 0), item, }) .unwrap(); assert_eq!(response.diagnostics, vec![]); assert_eq!( - response.token_stream, - TokenStream::new("impl SomeImpl of SomeTrait {}".to_string()) + response.token_stream.to_string(), + "impl SomeImpl of SomeTrait {}".to_string() ); } @@ -137,7 +152,15 @@ fn expand_inline() { let replace_all_15_with_25 = r#" #[inline_macro] pub fn replace_all_15_with_25(token_stream: TokenStream) -> ProcMacroResult { - ProcMacroResult::new(TokenStream::new(token_stream.to_string().replace("15", "25"))) + let content = token_stream.to_string().replace("15", "25"); + let span = TextSpan { start: 0, end: content.len() as u32 }; + ProcMacroResult::new( + TokenStream::new(vec![ + TokenTree::Ident( + Token::new(content, span) + ) + ]) + ) } "#; @@ -159,15 +182,17 @@ fn expand_inline() { let response = proc_macro_server .request_and_wait::(ExpandInlineMacroParams { name: "replace_all_15_with_25".to_string(), - args: TokenStream::new( - "struct A { field: 15 , other_field: macro_call!(12)}".to_string(), - ), + call_site: TextSpan::new(0, 0), + args: TokenStream::new(vec![TokenTree::Ident(Token::new( + "struct A { field: 15 , other_field: macro_call!(12)}", + TextSpan::new(0, 0), + ))]), }) .unwrap(); assert_eq!(response.diagnostics, vec![]); assert_eq!( - response.token_stream, - TokenStream::new("struct A { field: 25 , other_field: macro_call!(12)}".to_string()) + response.token_stream.to_string(), + "struct A { field: 25 , other_field: macro_call!(12)}".to_string() ); } diff --git a/utils/scarb-proc-macro-server-types/Cargo.toml b/utils/scarb-proc-macro-server-types/Cargo.toml index 6263e831e..25db1ecd3 100644 --- a/utils/scarb-proc-macro-server-types/Cargo.toml +++ b/utils/scarb-proc-macro-server-types/Cargo.toml @@ -10,6 +10,6 @@ license.workspace = true repository.workspace = true [dependencies] -cairo-lang-macro = { version = "0.1", features = ["serde"] } +cairo-lang-macro = { path = "../../plugins/cairo-lang-macro", version = "0.1", features = ["serde"] } serde.workspace = true serde_json.workspace = true diff --git a/utils/scarb-proc-macro-server-types/src/methods/expand.rs b/utils/scarb-proc-macro-server-types/src/methods/expand.rs index cb33541b9..dba2d6cb9 100644 --- a/utils/scarb-proc-macro-server-types/src/methods/expand.rs +++ b/utils/scarb-proc-macro-server-types/src/methods/expand.rs @@ -1,6 +1,6 @@ use super::Method; use super::ProcMacroResult; -use cairo_lang_macro::TokenStream; +use cairo_lang_macro::{TextSpan, TokenStream}; use serde::{Deserialize, Serialize}; /// Parameters for expanding a specific attribute macro. @@ -15,6 +15,8 @@ pub struct ExpandAttributeParams { pub args: TokenStream, /// The token stream representing the item on which the macro is applied. pub item: TokenStream, + // Call site span. + pub call_site: TextSpan, } /// Represents a request to expand a single attribute macro. @@ -36,6 +38,8 @@ pub struct ExpandDeriveParams { pub derives: Vec, /// The token stream of the item to which the derive macros are applied. pub item: TokenStream, + // Call site span. + pub call_site: TextSpan, } /// Represents a request to expand derive macros. @@ -57,6 +61,8 @@ pub struct ExpandInlineMacroParams { pub name: String, /// The token stream representing arguments passed to the macro. pub args: TokenStream, + // Call site span. + pub call_site: TextSpan, } /// Represents a request to expand a single inline macro. diff --git a/utils/scarb-proc-macro-server-types/src/methods/mod.rs b/utils/scarb-proc-macro-server-types/src/methods/mod.rs index de8377f4a..102d355f7 100644 --- a/utils/scarb-proc-macro-server-types/src/methods/mod.rs +++ b/utils/scarb-proc-macro-server-types/src/methods/mod.rs @@ -15,10 +15,19 @@ pub trait Method { /// /// This struct encapsulates both the resulting token stream from macro expansion /// and any diagnostic messages (e.g., errors or warnings) that were generated during processing. -#[derive(Debug, Clone, Default, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] pub struct ProcMacroResult { /// The resultant token stream produced after the macro expansion. pub token_stream: TokenStream, /// A list of diagnostics produced during the macro execution. pub diagnostics: Vec, } + +impl Default for ProcMacroResult { + fn default() -> Self { + Self { + token_stream: TokenStream::empty(), + diagnostics: Vec::new(), + } + } +} diff --git a/utils/scarb-test-support/src/cairo_plugin_project_builder.rs b/utils/scarb-test-support/src/cairo_plugin_project_builder.rs index bc8fae976..6f75bdf1f 100644 --- a/utils/scarb-test-support/src/cairo_plugin_project_builder.rs +++ b/utils/scarb-test-support/src/cairo_plugin_project_builder.rs @@ -92,6 +92,10 @@ impl CairoPluginProjectBuilder { self.project.just_manifest(t); self.just_code(t); } + + pub fn add_primitive_token_dep(self) -> Self { + self.add_dep(r#"cairo-lang-primitive-token = "1.0.0""#) + } } impl Default for CairoPluginProjectBuilder { diff --git a/utils/scarb-test-support/src/proc_macro_server.rs b/utils/scarb-test-support/src/proc_macro_server.rs index 12e93176f..be13aa882 100644 --- a/utils/scarb-test-support/src/proc_macro_server.rs +++ b/utils/scarb-test-support/src/proc_macro_server.rs @@ -19,7 +19,7 @@ use std::process::Stdio; pub const SIMPLE_MACROS: &str = r#" use cairo_lang_macro::{ ProcMacroResult, - TokenStream, + TokenStream, TokenTree, Token, TextSpan, attribute_macro, inline_macro, derive_macro, @@ -40,7 +40,15 @@ pub fn inline_some(token_stream: TokenStream) -> ProcMacroResult { #[derive_macro] fn some_derive(_token_stream: TokenStream)-> ProcMacroResult { - ProcMacroResult::new(TokenStream::new("impl SomeImpl of SomeTrait {}".to_string())) + let content = "impl SomeImpl of SomeTrait {}".to_string(); + let span = TextSpan { start: 0, end: content.len() as u32 }; + ProcMacroResult::new( + TokenStream::new(vec![ + TokenTree::Ident( + Token::new(content, span) + ) + ]) + ) } "#;