diff --git a/src/textual/css/parse.py b/src/textual/css/parse.py index 17fe3d982..9062df769 100644 --- a/src/textual/css/parse.py +++ b/src/textual/css/parse.py @@ -56,7 +56,7 @@ def parse_selectors(css_selectors: str) -> tuple[SelectorSet, ...]: rule_selectors.append(selectors[:]) selectors.clear() combinator = None - elif token.name == "rule_declaration_set_start": + elif token.name == "declaration_set_start": break elif token.name == "combinator_child": combinator = CombinatorType.CHILD @@ -98,7 +98,7 @@ def parse_rule_set(tokens: Iterator[Token], token: Token) -> Iterable[RuleSet]: rule_selectors.append(selectors[:]) selectors.clear() combinator = None - elif token.name == "rule_declaration_set_start": + elif token.name == "declaration_set_start": break elif token.name == "combinator_child": combinator = CombinatorType.CHILD @@ -128,9 +128,9 @@ def parse_rule_set(tokens: Iterator[Token], token: Token) -> Iterable[RuleSet]: while True: token = next(tokens) token_name = token.name - if token_name in ("whitespace", "rule_declaration_end"): + if token_name in ("whitespace", "declaration_end"): continue - if token_name == "rule_declaration_name": + if token_name == "declaration_name": if declaration.tokens: try: styles_builder.add_declaration(declaration) @@ -138,7 +138,7 @@ def parse_rule_set(tokens: Iterator[Token], token: Token) -> Iterable[RuleSet]: errors.append((error.token, error.message)) declaration = Declaration(token, "") declaration.name = token.value.rstrip(":") - elif token_name == "rule_declaration_set_end": + elif token_name == "declaration_set_end": break else: declaration.tokens.append(token) @@ -178,9 +178,9 @@ def parse_declarations(css: str, path: str) -> Styles: if token is None: break token_name = token.name - if token_name in ("whitespace", "rule_declaration_end", "eof"): + if token_name in ("whitespace", "declaration_end", "eof"): continue - if token_name == "rule_declaration_name": + if token_name == "declaration_name": if declaration and declaration.tokens: try: styles_builder.add_declaration(declaration) @@ -189,7 +189,7 @@ def parse_declarations(css: str, path: str) -> Styles: raise declaration = Declaration(token, "") declaration.name = token.value.rstrip(":") - elif token_name == "rule_declaration_set_end": + elif token_name == "declaration_set_end": break else: if declaration: diff --git a/src/textual/css/tokenize.py b/src/textual/css/tokenize.py index 6e9d2d31a..f131eecd8 100644 --- a/src/textual/css/tokenize.py +++ b/src/textual/css/tokenize.py @@ -14,9 +14,9 @@ COLOR = r"\#[0-9a-fA-F]{6}|color\([0-9]{1,3}\)|rgb\(\d{1,3}\,\s?\d{1,3}\,\s?\d{1 KEY_VALUE = r"[a-zA-Z_-][a-zA-Z0-9_-]*=[0-9a-zA-Z_\-\/]+" TOKEN = "[a-zA-Z_-]+" STRING = r"\".*?\"" -VARIABLE_REF = r"\$[a-zA-Z0-9_\-]+" +VARIABLE_REF = r"\$[a-zA-Z0-9_-]+" -# Values permitted in variable and rule declarations. +# Values permitted in declarations. DECLARATION_VALUES = { "scalar": SCALAR, "duration": DURATION, @@ -38,16 +38,19 @@ expect_root_scope = Expect( selector_start_class=r"\.[a-zA-Z_\-][a-zA-Z0-9_\-]*", selector_start_universal=r"\*", selector_start=r"[a-zA-Z_\-]+", - variable_declaration_start=rf"{VARIABLE_REF}:", + variable_name=f"{VARIABLE_REF}:", ).expect_eof(True) # After a variable declaration e.g. "$warning-text: TOKENS;" # for tokenizing variable value ------^~~~~~~^ -expect_variable_declaration_continue = Expect( - variable_declaration_end=r"\n|;", - whitespace=r"\s+", +expect_variable_value = Expect( comment_start=COMMENT_START, - **DECLARATION_VALUES, + whitespace=r"\s+", + variable_value=rf"[^;\n{COMMENT_START}]+", +) + +expect_variable_value_end = Expect( + variable_value_end=r"\n|;", ).expect_eof(True) expect_comment_end = Expect( @@ -66,35 +69,35 @@ expect_selector_continue = Expect( selector=r"[a-zA-Z_\-]+", combinator_child=">", new_selector=r",", - rule_declaration_set_start=r"\{", + declaration_set_start=r"\{", ) -# A rule declaration e.g. "text: red;" -# ^---^ -expect_rule_declaration = Expect( +# A declaration e.g. "text: red;" +# ^---^ +expect_declaration = Expect( whitespace=r"\s+", comment_start=COMMENT_START, - rule_declaration_name=r"[a-zA-Z_\-]+\:", - rule_declaration_set_end=r"\}", + declaration_name=r"[a-zA-Z_\-]+\:", + declaration_set_end=r"\}", ) -expect_rule_declaration_solo = Expect( +expect_declaration_solo = Expect( whitespace=r"\s+", comment_start=COMMENT_START, - rule_declaration_name=r"[a-zA-Z_\-]+\:", - rule_declaration_set_end=r"\}", + declaration_name=r"[a-zA-Z_\-]+\:", + declaration_set_end=r"\}", ).expect_eof(True) -# The value(s)/content from a rule declaration e.g. "text: red;" -# ^---^ -expect_rule_declaration_content = Expect( - rule_declaration_end=r"\n|;", +# The value(s)/content from a declaration e.g. "text: red;" +# ^---^ +expect_declaration_content = Expect( + declaration_end=r"\n|;", whitespace=r"\s+", comment_start=COMMENT_START, **DECLARATION_VALUES, important=r"\!important", comma=",", - rule_declaration_set_end=r"\}", + declaration_set_end=r"\}", ) @@ -112,8 +115,9 @@ class TokenizerState: EXPECT = expect_root_scope STATE_MAP = { - "variable_declaration_start": expect_variable_declaration_continue, - "variable_declaration_end": expect_root_scope, + "variable_name": expect_variable_value, + "variable_value": expect_variable_value_end, + "variable_value_end": expect_root_scope, "selector_start": expect_selector_continue, "selector_start_id": expect_selector_continue, "selector_start_class": expect_selector_continue, @@ -121,10 +125,10 @@ class TokenizerState: "selector_id": expect_selector_continue, "selector_class": expect_selector_continue, "selector_universal": expect_selector_continue, - "rule_declaration_set_start": expect_rule_declaration, - "rule_declaration_name": expect_rule_declaration_content, - "rule_declaration_end": expect_rule_declaration, - "rule_declaration_set_end": expect_root_scope, + "declaration_set_start": expect_declaration, + "declaration_name": expect_declaration_content, + "declaration_end": expect_declaration, + "declaration_set_end": expect_root_scope, } def __call__(self, code: str, path: str) -> Iterable[Token]: @@ -145,10 +149,10 @@ class TokenizerState: class DeclarationTokenizerState(TokenizerState): - EXPECT = expect_rule_declaration_solo + EXPECT = expect_declaration_solo STATE_MAP = { - "rule_declaration_name": expect_rule_declaration_content, - "rule_declaration_end": expect_rule_declaration_solo, + "declaration_name": expect_declaration_content, + "declaration_end": expect_declaration_solo, } diff --git a/tests/css/test_tokenize.py b/tests/css/test_tokenize.py index 8ef78f64c..7a8a73f8c 100644 --- a/tests/css/test_tokenize.py +++ b/tests/css/test_tokenize.py @@ -1,5 +1,6 @@ import pytest +import textual.css.tokenizer from textual.css.tokenize import tokenize from textual.css.tokenizer import Token, TokenizeError @@ -21,92 +22,67 @@ def test_variable_declaration_valid_names(name): css = f"${name}: black on red;" assert list(tokenize(css, "")) == [ Token( - name="variable_declaration_start", value=f"${name}:", path="", code=css, location=(0, 0) + name="variable_name", value=f"${name}:", path="", code=css, location=(0, 0) ), Token(name="whitespace", value=" ", path="", code=css, location=(0, 14)), - Token(name="token", value="black", path="", code=css, location=(0, 15)), - Token(name="whitespace", value=" ", path="", code=css, location=(0, 20)), - Token(name="token", value="on", path="", code=css, location=(0, 21)), - Token(name="whitespace", value=" ", path="", code=css, location=(0, 23)), - Token(name="token", value="red", path="", code=css, location=(0, 24)), - Token(name="variable_declaration_end", value=";", path="", code=css, location=(0, 27)), - ] - - -def test_variable_declaration_multiple_values(): - css = "$x: 2vw\t4% 6s red;" - assert list(tokenize(css, "")) == [ - Token(name='variable_declaration_start', value='$x:', path='', code=css, location=(0, 0)), - Token(name='whitespace', value=' ', path='', code=css, location=(0, 3)), - Token(name='scalar', value='2vw', path='', code=css, location=(0, 4)), - Token(name='whitespace', value='\t', path='', code=css, location=(0, 7)), - Token(name='scalar', value='4%', path='', code=css, location=(0, 8)), - Token(name='whitespace', value=' ', path='', code=css, location=(0, 10)), - Token(name='duration', value='6s', path='', code=css, location=(0, 11)), - Token(name='whitespace', value=' ', path='', code=css, location=(0, 13)), - Token(name='token', value='red', path='', code=css, location=(0, 15)), - Token(name='variable_declaration_end', value=';', path='', code=css, location=(0, 18)), + Token(name="variable_value", value="black on red", path="", code=css, location=(0, 15)), + Token(name="variable_value_end", value=";", path="", code=css, location=(0, 27)), ] def test_variable_declaration_comment_ignored(): css = "$x: red; /* comment */" assert list(tokenize(css, "")) == [ - Token(name='variable_declaration_start', value='$x:', path='', code=css, location=(0, 0)), + Token(name='variable_name', value='$x:', path='', code=css, location=(0, 0)), Token(name='whitespace', value=' ', path='', code=css, location=(0, 3)), - Token(name='token', value='red', path='', code=css, location=(0, 4)), - Token(name='variable_declaration_end', value=';', path='', code=css, location=(0, 7)), + Token(name='variable_value', value='red', path='', code=css, location=(0, 4)), + Token(name='variable_value_end', value=';', path='', code=css, location=(0, 7)), Token(name='whitespace', value=' ', path='', code=css, location=(0, 8)), ] -def test_variable_declaration_comment_interspersed_ignored(): +def test_variable_declaration_comment_interspersed_raises(): css = "$x: re/* comment */d;" - assert list(tokenize(css, "")) == [ - Token(name='variable_declaration_start', value='$x:', path='', code=css, location=(0, 0)), - Token(name='whitespace', value=' ', path='', code=css, location=(0, 3)), - Token(name='token', value='re', path='', code=css, location=(0, 4)), - Token(name='token', value='d', path='', code=css, location=(0, 19)), - Token(name='variable_declaration_end', value=';', path='', code=css, location=(0, 20)), - ] + with pytest.raises(TokenizeError): + assert list(tokenize(css, "")) + + +def test_variable_declaration_invalid_value_eof(): + css = "$x:\n" + with pytest.raises(textual.css.tokenizer.EOFError): + list(tokenize(css, "")) def test_variable_declaration_no_semicolon(): css = "$x: 1\n$y: 2" assert list(tokenize(css, "")) == [ - Token(name="variable_declaration_start", value="$x:", code=css, path="", location=(0, 0)), + Token(name="variable_name", value="$x:", code=css, path="", location=(0, 0)), Token(name="whitespace", value=" ", code=css, path="", location=(0, 3)), - Token(name="number", value="1", code=css, path="", location=(0, 4)), - Token(name="variable_declaration_end", value="\n", code=css, path="", location=(0, 5)), - Token(name="variable_declaration_start", value="$y:", code=css, path="", location=(1, 0)), + Token(name="variable_value", value="1", code=css, path="", location=(0, 4)), + Token(name="variable_value_end", value="\n", code=css, path="", location=(0, 5)), + Token(name="variable_name", value="$y:", code=css, path="", location=(1, 0)), Token(name="whitespace", value=" ", code=css, path="", location=(1, 3)), - Token(name="number", value="2", code=css, path="", location=(1, 4)), + Token(name="variable_value", value="2", code=css, path="", location=(1, 4)), ] -def test_variable_declaration_invalid_value(): - css = "$x:(@$12x)" - with pytest.raises(TokenizeError): - list(tokenize(css, "")) - - def test_variables_declarations_amongst_rulesets(): css = "$x:1; .thing{text:red;} $y:2;" assert list(tokenize(css, "")) == [ - Token(name='variable_declaration_start', value='$x:', path='', code=css, location=(0, 0)), - Token(name='number', value='1', path='', code=css, location=(0, 3)), - Token(name='variable_declaration_end', value=';', path='', code=css, location=(0, 4)), + Token(name='variable_name', value='$x:', path='', code=css, location=(0, 0)), + Token(name='variable_value', value='1', path='', code=css, location=(0, 3)), + Token(name='variable_value_end', value=';', path='', code=css, location=(0, 4)), Token(name='whitespace', value=' ', path='', code=css, location=(0, 5)), Token(name='selector_start_class', value='.thing', path='', code=css, location=(0, 6)), - Token(name='rule_declaration_set_start', value='{', path='', code=css, location=(0, 12)), - Token(name='rule_declaration_name', value='text:', path='', code=css, location=(0, 13)), + Token(name='declaration_set_start', value='{', path='', code=css, location=(0, 12)), + Token(name='declaration_name', value='text:', path='', code=css, location=(0, 13)), Token(name='token', value='red', path='', code=css, location=(0, 18)), - Token(name='rule_declaration_end', value=';', path='', code=css, location=(0, 21)), - Token(name='rule_declaration_set_end', value='}', path='', code=css, location=(0, 22)), + Token(name='declaration_end', value=';', path='', code=css, location=(0, 21)), + Token(name='declaration_set_end', value='}', path='', code=css, location=(0, 22)), Token(name='whitespace', value=' ', path='', code=css, location=(0, 23)), - Token(name='variable_declaration_start', value='$y:', path='', code=css, location=(0, 24)), - Token(name='number', value='2', path='', code=css, location=(0, 27)), - Token(name='variable_declaration_end', value=';', path='', code=css, location=(0, 28)), + Token(name='variable_name', value='$y:', path='', code=css, location=(0, 24)), + Token(name='variable_value', value='2', path='', code=css, location=(0, 27)), + Token(name='variable_value_end', value=';', path='', code=css, location=(0, 28)), ] @@ -114,12 +90,12 @@ def test_variables_reference_in_rule_declaration_value(): css = ".warn{text: $warning;}" assert list(tokenize(css, "")) == [ Token(name='selector_start_class', value='.warn', path='', code=css, location=(0, 0)), - Token(name='rule_declaration_set_start', value='{', path='', code=css, location=(0, 5)), - Token(name='rule_declaration_name', value='text:', path='', code=css, location=(0, 6)), + Token(name='declaration_set_start', value='{', path='', code=css, location=(0, 5)), + Token(name='declaration_name', value='text:', path='', code=css, location=(0, 6)), Token(name='whitespace', value=' ', path='', code=css, location=(0, 11)), Token(name='variable_ref', value='$warning', path='', code=css, location=(0, 12)), - Token(name='rule_declaration_end', value=';', path='', code=css, location=(0, 20)), - Token(name='rule_declaration_set_end', value='}', path='', code=css, location=(0, 21)), + Token(name='declaration_end', value=';', path='', code=css, location=(0, 20)), + Token(name='declaration_set_end', value='}', path='', code=css, location=(0, 21)), ] @@ -127,34 +103,32 @@ def test_variables_reference_in_rule_declaration_value_multiple(): css = ".card{padding: $pad-y $pad-x;}" assert list(tokenize(css, "")) == [ Token(name='selector_start_class', value='.card', path='', code=css, location=(0, 0)), - Token(name='rule_declaration_set_start', value='{', path='', code=css, location=(0, 5)), - Token(name='rule_declaration_name', value='padding:', path='', code=css, location=(0, 6)), + Token(name='declaration_set_start', value='{', path='', code=css, location=(0, 5)), + Token(name='declaration_name', value='padding:', path='', code=css, location=(0, 6)), Token(name='whitespace', value=' ', path='', code=css, location=(0, 14)), Token(name='variable_ref', value='$pad-y', path='', code=css, location=(0, 15)), Token(name='whitespace', value=' ', path='', code=css, location=(0, 21)), Token(name='variable_ref', value='$pad-x', path='', code=css, location=(0, 22)), - Token(name='rule_declaration_end', value=';', path='', code=css, location=(0, 28)), - Token(name='rule_declaration_set_end', value='}', path='', code=css, location=(0, 29)), + Token(name='declaration_end', value=';', path='', code=css, location=(0, 28)), + Token(name='declaration_set_end', value='}', path='', code=css, location=(0, 29)), ] def test_variables_reference_in_variable_declaration(): css = "$x: $y;" assert list(tokenize(css, "")) == [ - Token(name='variable_declaration_start', value='$x:', path='', code=css, location=(0, 0)), + Token(name='variable_name', value='$x:', path='', code=css, location=(0, 0)), Token(name='whitespace', value=' ', path='', code=css, location=(0, 3)), - Token(name='variable_ref', value='$y', path='', code=css, location=(0, 4)), - Token(name='variable_declaration_end', value=';', path='', code=css, location=(0, 6)), + Token(name='variable_value', value='$y', path='', code=css, location=(0, 4)), + Token(name='variable_value_end', value=';', path='', code=css, location=(0, 6)), ] def test_variable_references_in_variable_declaration_multiple(): css = "$x: $y $z\n" assert list(tokenize(css, "")) == [ - Token(name='variable_declaration_start', value='$x:', path='', code=css, location=(0, 0)), + Token(name='variable_name', value='$x:', path='', code=css, location=(0, 0)), Token(name='whitespace', value=' ', path='', code=css, location=(0, 3)), - Token(name='variable_ref', value='$y', path='', code=css, location=(0, 4)), - Token(name='whitespace', value=' ', path='', code=css, location=(0, 6)), - Token(name='variable_ref', value='$z', path='', code=css, location=(0, 8)), - Token(name='variable_declaration_end', value='\n', path='', code=css, location=(0, 10)), + Token(name='variable_value', value='$y $z', path='', code=css, location=(0, 4)), + Token(name='variable_value_end', value='\n', path='', code=css, location=(0, 10)), ]