diff --git a/src/textual/css/tokenize.py b/src/textual/css/tokenize.py index 74ac16f61..9895b8430 100644 --- a/src/textual/css/tokenize.py +++ b/src/textual/css/tokenize.py @@ -6,17 +6,18 @@ from typing import Iterable from textual.css.tokenizer import Expect, Tokenizer, Token -STRING = r"\".*?\"" -TOKEN = "[a-zA-Z_-]+" -KEY_VALUE = r"[a-zA-Z_-][a-zA-Z0-9_-]*=[0-9a-zA-Z_\-\/]+" -COLOR = r"\#[0-9a-fA-F]{6}|color\([0-9]{1,3}\)|rgb\(\d{1,3}\,\s?\d{1,3}\,\s?\d{1,3}\)" -NUMBER = r"\-?\d+\.?\d*" -DURATION = r"\d+\.?\d*(?:ms|s)" -SCALAR = r"\-?\d+\.?\d*(?:fr|%|w|h|vw|vh)" COMMENT_START = r"\/\*" +SCALAR = r"\-?\d+\.?\d*(?:fr|%|w|h|vw|vh)" +DURATION = r"\d+\.?\d*(?:ms|s)" +NUMBER = r"\-?\d+\.?\d*" +COLOR = r"\#[0-9a-fA-F]{6}|color\([0-9]{1,3}\)|rgb\(\d{1,3}\,\s?\d{1,3}\,\s?\d{1,3}\)" +KEY_VALUE = r"[a-zA-Z_-][a-zA-Z0-9_-]*=[0-9a-zA-Z_\-\/]+" +TOKEN = "[a-zA-Z_-]+" +STRING = r"\".*?\"" +VARIABLE_REF = r"\$[a-zA-Z0-9_\-]+" # Values permitted in variable and rule declarations. -DECLARATION_CONTENT = { +DECLARATION_VALUES = { "scalar": SCALAR, "duration": DURATION, "number": NUMBER, @@ -24,9 +25,10 @@ DECLARATION_CONTENT = { "key_value": KEY_VALUE, "token": TOKEN, "string": STRING, + "variable_ref": VARIABLE_REF, } -# The tokenisers "expectation" while at the root/highest level of scope +# The tokenizers "expectation" while at the root/highest level of scope # in the CSS file. At this level we might expect to see selectors, comments, # variable definitions etc. expect_root_scope = Expect( @@ -36,7 +38,7 @@ expect_root_scope = Expect( selector_start_class=r"\.[a-zA-Z_\-][a-zA-Z0-9_\-]*", selector_start_universal=r"\*", selector_start=r"[a-zA-Z_\-]+", - variable_declaration_start=r"\$[a-zA-Z0-9_\-]+\:", + variable_declaration_start=rf"{VARIABLE_REF}:", ).expect_eof(True) # After a variable declaration e.g. "$warning-text: TOKENS;" @@ -45,7 +47,7 @@ expect_variable_declaration_continue = Expect( variable_declaration_end=r"\n|;", whitespace=r"\s+", comment_start=COMMENT_START, - **DECLARATION_CONTENT, + **DECLARATION_VALUES, ).expect_eof(True) expect_comment_end = Expect( @@ -89,7 +91,7 @@ expect_rule_declaration_content = Expect( rule_declaration_end=r"\n|;", whitespace=r"\s+", comment_start=COMMENT_START, - **DECLARATION_CONTENT, + **DECLARATION_VALUES, important=r"\!important", comma=",", rule_declaration_set_end=r"\}", diff --git a/tests/css/test_tokenize.py b/tests/css/test_tokenize.py index 00b490a6c..8ef78f64c 100644 --- a/tests/css/test_tokenize.py +++ b/tests/css/test_tokenize.py @@ -45,7 +45,7 @@ def test_variable_declaration_multiple_values(): Token(name='duration', value='6s', path='', code=css, location=(0, 11)), Token(name='whitespace', value=' ', path='', code=css, location=(0, 13)), Token(name='token', value='red', path='', code=css, location=(0, 15)), - Token(name='variable_declaration_end', value=';', path='', code=css, location=(0, 18)) + Token(name='variable_declaration_end', value=';', path='', code=css, location=(0, 18)), ] @@ -56,7 +56,7 @@ def test_variable_declaration_comment_ignored(): Token(name='whitespace', value=' ', path='', code=css, location=(0, 3)), Token(name='token', value='red', path='', code=css, location=(0, 4)), Token(name='variable_declaration_end', value=';', path='', code=css, location=(0, 7)), - Token(name='whitespace', value=' ', path='', code=css, location=(0, 8)) + Token(name='whitespace', value=' ', path='', code=css, location=(0, 8)), ] @@ -67,7 +67,7 @@ def test_variable_declaration_comment_interspersed_ignored(): Token(name='whitespace', value=' ', path='', code=css, location=(0, 3)), Token(name='token', value='re', path='', code=css, location=(0, 4)), Token(name='token', value='d', path='', code=css, location=(0, 19)), - Token(name='variable_declaration_end', value=';', path='', code=css, location=(0, 20)) + Token(name='variable_declaration_end', value=';', path='', code=css, location=(0, 20)), ] @@ -92,8 +92,7 @@ def test_variable_declaration_invalid_value(): def test_variables_declarations_amongst_rulesets(): css = "$x:1; .thing{text:red;} $y:2;" - tokens = list(tokenize(css, "")) - assert tokens == [ + assert list(tokenize(css, "")) == [ Token(name='variable_declaration_start', value='$x:', path='', code=css, location=(0, 0)), Token(name='number', value='1', path='', code=css, location=(0, 3)), Token(name='variable_declaration_end', value=';', path='', code=css, location=(0, 4)), @@ -109,3 +108,53 @@ def test_variables_declarations_amongst_rulesets(): Token(name='number', value='2', path='', code=css, location=(0, 27)), Token(name='variable_declaration_end', value=';', path='', code=css, location=(0, 28)), ] + + +def test_variables_reference_in_rule_declaration_value(): + css = ".warn{text: $warning;}" + assert list(tokenize(css, "")) == [ + Token(name='selector_start_class', value='.warn', path='', code=css, location=(0, 0)), + Token(name='rule_declaration_set_start', value='{', path='', code=css, location=(0, 5)), + Token(name='rule_declaration_name', value='text:', path='', code=css, location=(0, 6)), + Token(name='whitespace', value=' ', path='', code=css, location=(0, 11)), + Token(name='variable_ref', value='$warning', path='', code=css, location=(0, 12)), + Token(name='rule_declaration_end', value=';', path='', code=css, location=(0, 20)), + Token(name='rule_declaration_set_end', value='}', path='', code=css, location=(0, 21)), + ] + + +def test_variables_reference_in_rule_declaration_value_multiple(): + css = ".card{padding: $pad-y $pad-x;}" + assert list(tokenize(css, "")) == [ + Token(name='selector_start_class', value='.card', path='', code=css, location=(0, 0)), + Token(name='rule_declaration_set_start', value='{', path='', code=css, location=(0, 5)), + Token(name='rule_declaration_name', value='padding:', path='', code=css, location=(0, 6)), + Token(name='whitespace', value=' ', path='', code=css, location=(0, 14)), + Token(name='variable_ref', value='$pad-y', path='', code=css, location=(0, 15)), + Token(name='whitespace', value=' ', path='', code=css, location=(0, 21)), + Token(name='variable_ref', value='$pad-x', path='', code=css, location=(0, 22)), + Token(name='rule_declaration_end', value=';', path='', code=css, location=(0, 28)), + Token(name='rule_declaration_set_end', value='}', path='', code=css, location=(0, 29)), + ] + + +def test_variables_reference_in_variable_declaration(): + css = "$x: $y;" + assert list(tokenize(css, "")) == [ + Token(name='variable_declaration_start', value='$x:', path='', code=css, location=(0, 0)), + Token(name='whitespace', value=' ', path='', code=css, location=(0, 3)), + Token(name='variable_ref', value='$y', path='', code=css, location=(0, 4)), + Token(name='variable_declaration_end', value=';', path='', code=css, location=(0, 6)), + ] + + +def test_variable_references_in_variable_declaration_multiple(): + css = "$x: $y $z\n" + assert list(tokenize(css, "")) == [ + Token(name='variable_declaration_start', value='$x:', path='', code=css, location=(0, 0)), + Token(name='whitespace', value=' ', path='', code=css, location=(0, 3)), + Token(name='variable_ref', value='$y', path='', code=css, location=(0, 4)), + Token(name='whitespace', value=' ', path='', code=css, location=(0, 6)), + Token(name='variable_ref', value='$z', path='', code=css, location=(0, 8)), + Token(name='variable_declaration_end', value='\n', path='', code=css, location=(0, 10)), + ]