Skip to content

Commit

Permalink
Make TextSpan u32 to match the compiler one (#1748)
Browse files Browse the repository at this point in the history
commit-id:107b5e17

---

**Stack**:
- #1749
- #1748⚠️ *Part of a stack created by [spr](https://github.com/ejoffe/spr). Do
not merge manually using the UI - doing so may have unexpected results.*
  • Loading branch information
maciektr committed Dec 4, 2024
1 parent a776c69 commit 38b89fc
Show file tree
Hide file tree
Showing 7 changed files with 30 additions and 29 deletions.
4 changes: 2 additions & 2 deletions plugins/cairo-lang-macro-stable/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@ pub struct StableToken {
#[repr(C)]
#[derive(Debug)]
pub struct StableTextSpan {
pub start: usize,
pub end: usize,
pub start: u32,
pub end: u32,
}

#[repr(C)]
Expand Down
2 changes: 1 addition & 1 deletion plugins/cairo-lang-macro/src/types/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ pub struct ProcMacroResult {
/// TokenTree::Ident(
/// Token::new(
/// &code,
/// TextSpan::new(0, code.len())
/// TextSpan::new(0, code.len() as u32)
/// )
/// )
/// ]);
Expand Down
8 changes: 5 additions & 3 deletions plugins/cairo-lang-macro/src/types/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -95,12 +95,14 @@ impl TokenTree {
}
}

pub type TextOffset = u32;

/// A range of text offsets that form a span (like text selection).
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct TextSpan {
pub start: usize,
pub end: usize,
pub start: TextOffset,
pub end: TextOffset,
}

/// A single Cairo token.
Expand Down Expand Up @@ -311,7 +313,7 @@ impl TokenTree {

impl TextSpan {
/// Create a new [`TextSpan`].
pub fn new(start: usize, end: usize) -> TextSpan {
pub fn new(start: TextOffset, end: TextOffset) -> TextSpan {
TextSpan { start, end }
}
}
Expand Down
11 changes: 5 additions & 6 deletions scarb/src/compiler/plugin/proc_macro/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ use cairo_lang_macro::{
AllocationContext, TextSpan, Token, TokenStream, TokenStreamMetadata, TokenTree,
};
use cairo_lang_syntax::node::{db::SyntaxGroup, SyntaxNode};
use std::ops::Add;

/// Helps creating TokenStream based on multiple SyntaxNodes,
/// which aren't descendants or ascendants of each other inside the SyntaxTree.
Expand Down Expand Up @@ -46,19 +45,19 @@ impl<'a> TokenStreamBuilder<'a> {
}

pub fn token_from_syntax_node(&self, node: SyntaxNode, ctx: &AllocationContext) -> Token {
let span = node.span(self.db).to_str_range();
let span = node.span(self.db);
let text = node.get_text(self.db);
let span = TextSpan {
// We skip the whitespace prefix, so that diagnostics start where the actual token contents is.
start: span.start.add(whitespace_prefix_len(&text)),
end: span.end,
start: span.start.as_u32() + whitespace_prefix_len(&text),
end: span.end.as_u32(),
};
Token::new_in(text, span, ctx)
}
}

fn whitespace_prefix_len(s: &str) -> usize {
s.chars().take_while(|c| c.is_whitespace()).count()
fn whitespace_prefix_len(s: &str) -> u32 {
s.chars().take_while(|c| c.is_whitespace()).count() as u32
}

#[cfg(test)]
Expand Down
28 changes: 14 additions & 14 deletions scarb/tests/build_cairo_plugin.rs
Original file line number Diff line number Diff line change
Expand Up @@ -412,7 +412,7 @@ fn can_replace_original_node() {
let new_token_string = token_stream.to_string().replace("12", "34");
let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new(
new_token_string.clone(),
TextSpan { start: 0, end: new_token_string.len() },
TextSpan { start: 0, end: new_token_string.len() as u32 },
))]);
ProcMacroResult::new(token_stream)
}
Expand Down Expand Up @@ -581,7 +581,7 @@ fn can_define_multiple_macros() {
let new_token_string = token_stream.to_string().replace("12", "34");
let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new(
new_token_string.clone(),
TextSpan { start: 0, end: new_token_string.len() },
TextSpan { start: 0, end: new_token_string.len() as u32 },
))]);
let aux_data = AuxData::new(Vec::new());
ProcMacroResult::new(token_stream).with_aux_data(aux_data)
Expand All @@ -592,7 +592,7 @@ fn can_define_multiple_macros() {
let new_token_string = token_stream.to_string().replace("56", "78");
let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new(
new_token_string.clone(),
TextSpan { start: 0, end: new_token_string.len() },
TextSpan { start: 0, end: new_token_string.len() as u32 },
))]);
let aux_data = AuxData::new(Vec::new());
ProcMacroResult::new(token_stream).with_aux_data(aux_data)
Expand All @@ -616,7 +616,7 @@ fn can_define_multiple_macros() {
let new_token_string = token_stream.to_string().replace("90", "09");
let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new(
new_token_string.clone(),
TextSpan { start: 0, end: new_token_string.len() },
TextSpan { start: 0, end: new_token_string.len() as u32 },
))]);
let aux_data = AuxData::new(Vec::new());
ProcMacroResult::new(token_stream).with_aux_data(aux_data)
Expand Down Expand Up @@ -819,7 +819,7 @@ fn can_resolve_full_path_markers() {
code.clone(),
TextSpan {
start: 0,
end: code.len(),
end: code.len() as u32,
},
))])
).with_full_path_markers(full_path_markers)
Expand Down Expand Up @@ -990,7 +990,7 @@ fn can_implement_derive_macro() {
code.clone(),
TextSpan {
start: 0,
end: code.len(),
end: code.len() as u32,
},
))]);
Expand Down Expand Up @@ -1051,7 +1051,7 @@ fn can_use_both_derive_and_attr() {
new_token_string.clone(),
TextSpan {
start: 0,
end: new_token_string.len(),
end: new_token_string.len() as u32,
},
))]))
}
Expand All @@ -1063,7 +1063,7 @@ fn can_use_both_derive_and_attr() {
code.clone(),
TextSpan {
start: 0,
end: code.len(),
end: code.len() as u32,
},
))]);
Expand All @@ -1072,7 +1072,7 @@ fn can_use_both_derive_and_attr() {
result_string.clone(),
TextSpan {
start: 0,
end: result_string.len(),
end: result_string.len() as u32,
},
))]))
}
Expand All @@ -1091,7 +1091,7 @@ fn can_use_both_derive_and_attr() {
code.clone(),
TextSpan {
start: 0,
end: code.len(),
end: code.len() as u32,
},
))]))
}
Expand Down Expand Up @@ -1298,7 +1298,7 @@ fn can_be_expanded() {
let new_token_string = token_stream.to_string().replace("12", "34");
let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new(
new_token_string.clone(),
TextSpan { start: 0, end: new_token_string.len() },
TextSpan { start: 0, end: new_token_string.len() as u32 },
))]);
ProcMacroResult::new(token_stream)
}
Expand Down Expand Up @@ -1328,7 +1328,7 @@ fn can_be_expanded() {
let token_stream = TokenStream::new(vec![TokenTree::Ident(Token::new(
code.clone(),
TextSpan { start: 0, end: code.len() },
TextSpan { start: 0, end: code.len() as u32 },
))]);
ProcMacroResult::new(token_stream)
Expand Down Expand Up @@ -1414,7 +1414,7 @@ fn can_expand_trait_inner_func_attrr() {
.replace("12", "34");
ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new(
new_token_string.clone(),
TextSpan { start: 0, end: new_token_string.len() },
TextSpan { start: 0, end: new_token_string.len() as u32 },
))]))
}
"##})
Expand Down Expand Up @@ -1474,7 +1474,7 @@ fn can_expand_impl_inner_func_attrr() {
let new_token_string = token_stream.to_string().replace("1", "2");
ProcMacroResult::new(TokenStream::new(vec![TokenTree::Ident(Token::new(
new_token_string.clone(),
TextSpan { start: 0, end: new_token_string.len() },
TextSpan { start: 0, end: new_token_string.len() as u32 },
))]))
}
"##})
Expand Down
4 changes: 2 additions & 2 deletions scarb/tests/proc_macro_server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ fn expand_attribute() {
let output = input.replace(name, "very_new_name");
let span = TextSpan { start: 0, end: output.len() };
let span = TextSpan { start: 0, end: output.len() as u32 };
ProcMacroResult::new(
TokenStream::new(vec![
TokenTree::Ident(
Expand Down Expand Up @@ -151,7 +151,7 @@ fn expand_inline() {
#[inline_macro]
pub fn replace_all_15_with_25(token_stream: TokenStream) -> ProcMacroResult {
let content = token_stream.to_string().replace("15", "25");
let span = TextSpan { start: 0, end: content.len() };
let span = TextSpan { start: 0, end: content.len() as u32 };
ProcMacroResult::new(
TokenStream::new(vec![
TokenTree::Ident(
Expand Down
2 changes: 1 addition & 1 deletion utils/scarb-test-support/src/proc_macro_server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ pub fn inline_some(token_stream: TokenStream) -> ProcMacroResult {
#[derive_macro]
fn some_derive(_token_stream: TokenStream)-> ProcMacroResult {
let content = "impl SomeImpl of SomeTrait {}".to_string();
let span = TextSpan { start: 0, end: content.len() };
let span = TextSpan { start: 0, end: content.len() as u32 };
ProcMacroResult::new(
TokenStream::new(vec![
TokenTree::Ident(
Expand Down

0 comments on commit 38b89fc

Please sign in to comment.