Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .github/labeler.yml
Original file line number Diff line number Diff line change
Expand Up @@ -110,3 +110,8 @@ L-Grit:
- changed-files:
- any-glob-to-any-file:
- crates/biome_grit_*/**

L-Tailwind:
- changed-files:
- any-glob-to-any-file:
- crates/biome_tailwind_*/**
14 changes: 14 additions & 0 deletions crates/biome_parser/src/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -201,6 +201,8 @@ pub trait Lexer<'src> {
}

/// Returns the byte at position `self.position + offset` or `None` if it is out of bounds.
///
/// See also: [`Self::prev_byte_at`]
#[inline]
fn byte_at(&self, offset: usize) -> Option<u8> {
self.source()
Expand All @@ -218,6 +220,18 @@ pub trait Lexer<'src> {
}
}

/// Returns the byte at position `self.position - offset` or `None` if it is out of bounds. Looks backwards instead of forwards.
#[inline]
fn prev_byte_at(&self, offset: usize) -> Option<u8> {
if offset > self.position() {
return None;
}
self.source()
.as_bytes()
.get(self.position() - offset)
.copied()
}

#[inline]
fn text_position(&self) -> TextSize {
TextSize::try_from(self.position()).expect("Input to be smaller than 4 GB")
Expand Down
7 changes: 7 additions & 0 deletions crates/biome_tailwind_factory/src/generated/node_factory.rs

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

16 changes: 16 additions & 0 deletions crates/biome_tailwind_parser/src/lexer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,21 @@ impl<'src> TailwindLexer<'src> {
}
}

fn consume_token_saw_negative(&mut self, current: u8) -> TailwindSyntaxKind {
match current {
b'\n' | b'\r' | b'\t' | b' ' => self.consume_newline_or_whitespaces(),
bracket @ (b'[' | b']' | b'(' | b')') => self.consume_bracket(bracket),
_ if self.current_kind == T!['['] => self.consume_bracketed_thing(TW_SELECTOR, b']'),
_ if self.current_kind == T!['('] => self.consume_bracketed_thing(TW_VALUE, b')'),
b':' => self.consume_byte(T![:]),
b'-' => self.consume_byte(T![-]),
b'!' => self.consume_byte(T![!]),
b'/' => self.consume_byte(T![/]),
_ if current.is_ascii_alphabetic() => self.consume_base(),
_ => self.consume_unexpected_character(),
}
}

/// Consume a token in the arbitrary context
fn consume_token_arbitrary(&mut self, current: u8) -> TailwindSyntaxKind {
match current {
Expand Down Expand Up @@ -252,6 +267,7 @@ impl<'src> Lexer<'src> for TailwindLexer<'src> {
match self.current_byte() {
Some(current) => match context {
TailwindLexContext::Regular => self.consume_token(current),
TailwindLexContext::SawNegative => self.consume_token_saw_negative(current),
TailwindLexContext::Arbitrary => self.consume_token_arbitrary(current),
TailwindLexContext::ArbitraryVariant => {
self.consume_token_arbitrary_variant(current)
Expand Down
10 changes: 10 additions & 0 deletions crates/biome_tailwind_parser/src/lexer/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -191,3 +191,13 @@ fn variant_multiple_full() {
TW_VALUE:7
);
}

#[test]
fn negative() {
assert_lex!(
TailwindLexContext::SawNegative,
"-mt",
DASH:1,
TW_BASE:2,
);
}
20 changes: 12 additions & 8 deletions crates/biome_tailwind_parser/src/syntax/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,8 @@ impl ParseSeparatedList for CandidateList {
) -> biome_parser::parse_recovery::RecoveryResult {
parsed_element.or_recover_with_token_set(
p,
&ParseRecoveryTokenSet::new(TW_BOGUS_CANDIDATE, token_set![WHITESPACE, NEWLINE, EOF]),
&ParseRecoveryTokenSet::new(TW_BOGUS_CANDIDATE, token_set![WHITESPACE])
.enable_recovery_on_line_break(),
expected_candidate,
)
}
Expand All @@ -68,11 +69,16 @@ fn parse_full_candidate(p: &mut TailwindParser) -> ParsedSyntax {

VariantList.parse_list(p);

if p.at(T![-]) {
p.bump_with_context(T![-], TailwindLexContext::SawNegative);
}

let candidate = parse_arbitrary_candidate(p)
.or_else(|| parse_functional_or_static_candidate(p))
.or_recover_with_token_set(
p,
&ParseRecoveryTokenSet::new(TW_BOGUS_CANDIDATE, token_set![WHITESPACE, NEWLINE, EOF]),
&ParseRecoveryTokenSet::new(TW_BOGUS_CANDIDATE, token_set![WHITESPACE])
.enable_recovery_on_line_break(),
expected_candidate,
);

Expand Down Expand Up @@ -112,10 +118,11 @@ fn parse_functional_or_static_candidate(p: &mut TailwindParser) -> ParsedSyntax
return Present(m.complete(p, TW_STATIC_CANDIDATE));
}

p.bump(DASH);
p.expect(T![-]);
match parse_value(p).or_recover_with_token_set(
p,
&ParseRecoveryTokenSet::new(TW_BOGUS_VALUE, token_set![WHITESPACE, NEWLINE, T![!], EOF]),
&ParseRecoveryTokenSet::new(TW_BOGUS_VALUE, token_set![WHITESPACE, T![!]])
.enable_recovery_on_line_break(),
expected_value,
) {
Ok(_) => {}
Expand Down Expand Up @@ -192,10 +199,7 @@ fn parse_modifier(p: &mut TailwindParser) -> ParsedSyntax {
}
match parse_value(p).or_recover_with_token_set(
p,
&ParseRecoveryTokenSet::new(
TW_BOGUS_MODIFIER,
token_set![WHITESPACE, NEWLINE, T![!], EOF],
),
&ParseRecoveryTokenSet::new(TW_BOGUS_MODIFIER, token_set![WHITESPACE, NEWLINE, T![!]]),
expected_value,
) {
Ok(_) => {}
Expand Down
4 changes: 4 additions & 0 deletions crates/biome_tailwind_parser/src/syntax/variant.rs
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,10 @@ impl ParseSeparatedList for VariantList {
}

pub(crate) fn parse_variant(p: &mut TailwindParser) -> ParsedSyntax {
if p.at(T![-]) {
// variants can't start with a negative sign
return Absent;
}
if p.at(T!['[']) {
return parse_arbitrary_variant(p);
}
Expand Down
2 changes: 2 additions & 0 deletions crates/biome_tailwind_parser/src/token_source.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@ pub(crate) enum TailwindLexContext {
/// The default state.
#[default]
Regular,
/// The parser just encountered a `-` before a basename, e.g. in `-mt-4`. That meant that the next token should be a basename.
SawNegative,
/// The lexer has encountered a `[` and the parser has yet to encounter the matching `]`.
Arbitrary,
/// Like Arbitrary, but specifically for arbitrary variants.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ TwRoot {
candidates: TwCandidateList [
TwFullCandidate {
variants: TwVariantList [],
negative_token: missing (optional),
candidate: TwBogusCandidate {
items: [
[email protected] "[" [] [],
Expand All @@ -30,6 +31,7 @@ TwRoot {
[email protected] " " [] [],
TwFullCandidate {
variants: TwVariantList [],
negative_token: missing (optional),
candidate: TwFunctionalCandidate {
base_token: [email protected] "w" [] [],
minus_token: [email protected] "-" [] [],
Expand All @@ -53,21 +55,23 @@ TwRoot {
1: [email protected]
0: [email protected]
0: [email protected]
1: [email protected]
1: (empty)
2: [email protected]
0: [email protected] "[" [] []
1: [email protected] ":40px" [] []
2: [email protected] "]" [] []
2: (empty)
3: (empty)
1: [email protected] " " [] []
2: [email protected]
0: [email protected]
1: [email protected]
1: (empty)
2: [email protected]
0: [email protected] "w" [] []
1: [email protected] "-" [] []
2: [email protected]
0: [email protected] "5" [] [Newline("\n")]
3: (empty)
2: (empty)
3: (empty)
2: [email protected] "" [] []

```
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ TwRoot {
candidates: TwCandidateList [
TwFullCandidate {
variants: TwVariantList [],
negative_token: missing (optional),
candidate: TwBogusCandidate {
items: [
[email protected] "[" [] [],
Expand All @@ -30,6 +31,7 @@ TwRoot {
[email protected] " " [] [],
TwFullCandidate {
variants: TwVariantList [],
negative_token: missing (optional),
candidate: TwFunctionalCandidate {
base_token: [email protected] "w" [] [],
minus_token: [email protected] "-" [] [],
Expand All @@ -53,21 +55,23 @@ TwRoot {
1: [email protected]
0: [email protected]
0: [email protected]
1: [email protected]
1: (empty)
2: [email protected]
0: [email protected] "[" [] []
1: [email protected] "width:" [] []
2: [email protected] "]" [] []
2: (empty)
3: (empty)
1: [email protected] " " [] []
2: [email protected]
0: [email protected]
1: [email protected]
1: (empty)
2: [email protected]
0: [email protected] "w" [] []
1: [email protected] "-" [] []
2: [email protected]
0: [email protected] "5" [] [Newline("\n")]
3: (empty)
2: (empty)
3: (empty)
2: [email protected] "" [] []

```
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ TwRoot {
candidates: TwCandidateList [
TwFullCandidate {
variants: TwVariantList [],
negative_token: missing (optional),
candidate: TwFunctionalCandidate {
base_token: [email protected] "text" [] [],
minus_token: [email protected] "-" [] [],
Expand All @@ -43,13 +44,14 @@ TwRoot {
1: [email protected]
0: [email protected]
0: [email protected]
1: [email protected]
1: (empty)
2: [email protected]
0: [email protected] "text" [] []
1: [email protected] "-" [] []
2: [email protected]
0: [email protected] "[" [] [Newline("\n")]
3: (empty)
2: (empty)
3: (empty)
2: [email protected] "" [] []

```
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ TwRoot {
candidates: TwCandidateList [
TwFullCandidate {
variants: TwVariantList [],
negative_token: missing (optional),
candidate: TwFunctionalCandidate {
base_token: [email protected] "text" [] [],
minus_token: [email protected] "-" [] [],
Expand All @@ -44,14 +45,15 @@ TwRoot {
1: [email protected]
0: [email protected]
0: [email protected]
1: [email protected]
1: (empty)
2: [email protected]
0: [email protected] "text" [] []
1: [email protected] "-" [] []
2: [email protected]
0: [email protected] "[" [] []
1: [email protected] "#ff0000" [] [Newline("\n")]
3: (empty)
2: (empty)
3: (empty)
2: [email protected] "" [] []

```
Expand Down
Loading
Loading