diff --git a/compiler/driver/src/cli.rs b/compiler/driver/src/cli.rs index 47fb6ed3..4f4739b0 100644 --- a/compiler/driver/src/cli.rs +++ b/compiler/driver/src/cli.rs @@ -204,6 +204,7 @@ pub(crate) fn arguments() -> Result<(Command, GlobalOptions)> { .arg( Arg::new(argument::NAME) .required(true) + .value_parser(WordParser) .help("The name of the package"), ) .args(package_creation_arguments), diff --git a/compiler/lexer/src/lib.rs b/compiler/lexer/src/lib.rs index 1eb62a1d..28215b8e 100644 --- a/compiler/lexer/src/lib.rs +++ b/compiler/lexer/src/lib.rs @@ -25,6 +25,7 @@ pub struct Outcome { #[derive(Default)] pub struct Options { + pub keep_shebang: bool, pub keep_comments: bool, } @@ -35,6 +36,7 @@ fn lex_string(source: String) -> Outcome { &map[file], &Options { keep_comments: true, + keep_shebang: true, }, ) .lex() @@ -45,8 +47,9 @@ pub trait WordExt: Sized { } impl WordExt for Word { - // @Beacon @Bug this allows ids like ` foo-bar ` - // (and `hey;;;wkwkwkwwkw`)! just write a custom lexer for this! + // @Bug this allows words like ` foo-bar ` (leading & trailing ws) + // @Note and not long before it used to allow `hey;;;wkwkwkwwkw`! + // @Task just write a custom lexer for this! fn parse(name: String) -> Result { let Outcome { tokens, errors } = lex_string(name); @@ -58,7 +61,7 @@ impl WordExt for Word { obtain!( (tokens.next().ok_or(())?, tokens.next().ok_or(())?), - (BareToken::Word(atom), BareToken::EndOfInput) => atom + (BareToken::Word(word), BareToken::EndOfInput) => word ) .map(Self::new_unchecked) .ok_or(()) @@ -216,12 +219,19 @@ impl<'a> Lexer<'a> { if let Some('!') = self.peek() { while let Some(character) = self.peek() { + if self.options.keep_shebang { + self.take(); + } self.advance(); if character == '\n' { break; } } + + if self.options.keep_shebang { + self.add(Shebang); + } } else { self.lex_punctuation(); } diff --git a/compiler/lexer/src/test.rs b/compiler/lexer/src/test.rs index 2a8a60ed..2fd26744 100644 --- a/compiler/lexer/src/test.rs +++ b/compiler/lexer/src/test.rs @@ -87,6 +87,7 @@ fn shebang() { #!/usr/bin/lushui run it", vec![ + Token::new(span(1, 23), Shebang), Token::new(span(23, 25), Word("it".into())), Token::new(span(25, 25), EndOfInput), ], diff --git a/compiler/token/src/lib.rs b/compiler/token/src/lib.rs index 307e92d2..b2b1421c 100644 --- a/compiler/token/src/lib.rs +++ b/compiler/token/src/lib.rs @@ -59,6 +59,7 @@ impl TokenExt for Token { #[derive(Clone, PartialEq, Eq, Discriminant, Debug)] #[discriminant(name: TokenName)] pub enum BareToken { + Shebang, Comment, DocumentationComment, Word(Atom), // @Task use crate::Word @@ -173,6 +174,7 @@ impl fmt::Display for TokenName { } f.write_str(match self { + Shebang => "shebang", Comment => "comment", DocumentationComment => "documentation comment", Word => "word",