mirror of
https://github.com/Textualize/textual.git
synced 2025-10-17 02:38:12 +03:00
Additional tests for variables with multiple values/comments
This commit is contained in:
@@ -6,7 +6,7 @@ from typing import Iterable
|
|||||||
|
|
||||||
from textual.css.tokenizer import Expect, Tokenizer, Token
|
from textual.css.tokenizer import Expect, Tokenizer, Token
|
||||||
|
|
||||||
# Things we can match at the top-most scope in the CSS file
|
# Everything we can match at the top-most scope in the CSS file
|
||||||
expect_root_scope = Expect(
|
expect_root_scope = Expect(
|
||||||
whitespace=r"\s+",
|
whitespace=r"\s+",
|
||||||
comment_start=r"\/\*",
|
comment_start=r"\/\*",
|
||||||
@@ -17,6 +17,8 @@ expect_root_scope = Expect(
|
|||||||
variable_declaration_start=r"\$[a-zA-Z0-9_\-]+\:",
|
variable_declaration_start=r"\$[a-zA-Z0-9_\-]+\:",
|
||||||
).expect_eof(True)
|
).expect_eof(True)
|
||||||
|
|
||||||
|
# After a variable declaration e.g. "$warning-text: TOKENS;"
|
||||||
|
# for tokenizing variable value ------^~~~~~~^
|
||||||
expect_variable_declaration_continue = Expect(
|
expect_variable_declaration_continue = Expect(
|
||||||
variable_declaration_end=r"\n|;",
|
variable_declaration_end=r"\n|;",
|
||||||
whitespace=r"\s+",
|
whitespace=r"\s+",
|
||||||
@@ -34,6 +36,8 @@ expect_comment_end = Expect(
|
|||||||
comment_end=re.escape("*/"),
|
comment_end=re.escape("*/"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# After we come across a selector in CSS e.g. ".my-class", we may
|
||||||
|
# find other selectors, pseudo-classes... e.g. ".my-class :hover"
|
||||||
expect_selector_continue = Expect(
|
expect_selector_continue = Expect(
|
||||||
whitespace=r"\s+",
|
whitespace=r"\s+",
|
||||||
comment_start=r"\/\*",
|
comment_start=r"\/\*",
|
||||||
@@ -47,6 +51,8 @@ expect_selector_continue = Expect(
|
|||||||
rule_declaration_set_start=r"\{",
|
rule_declaration_set_start=r"\{",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# A rule declaration e.g. "text: red;"
|
||||||
|
# ^---^
|
||||||
expect_rule_declaration = Expect(
|
expect_rule_declaration = Expect(
|
||||||
whitespace=r"\s+",
|
whitespace=r"\s+",
|
||||||
comment_start=r"\/\*",
|
comment_start=r"\/\*",
|
||||||
@@ -61,6 +67,8 @@ expect_rule_declaration_solo = Expect(
|
|||||||
rule_declaration_set_end=r"\}",
|
rule_declaration_set_end=r"\}",
|
||||||
).expect_eof(True)
|
).expect_eof(True)
|
||||||
|
|
||||||
|
# The value(s)/content from a rule declaration e.g. "text: red;"
|
||||||
|
# ^---^
|
||||||
expect_rule_declaration_content = Expect(
|
expect_rule_declaration_content = Expect(
|
||||||
rule_declaration_end=r"\n|;",
|
rule_declaration_end=r"\n|;",
|
||||||
whitespace=r"\s+",
|
whitespace=r"\s+",
|
||||||
|
|||||||
@@ -19,7 +19,6 @@ VALID_VARIABLE_NAMES = [
|
|||||||
@pytest.mark.parametrize("name", VALID_VARIABLE_NAMES)
|
@pytest.mark.parametrize("name", VALID_VARIABLE_NAMES)
|
||||||
def test_variable_declaration_valid_names(name):
|
def test_variable_declaration_valid_names(name):
|
||||||
css = f"${name}: black on red;"
|
css = f"${name}: black on red;"
|
||||||
|
|
||||||
assert list(tokenize(css, "")) == [
|
assert list(tokenize(css, "")) == [
|
||||||
Token(
|
Token(
|
||||||
name="variable_declaration_start", value=f"${name}:", path="", code=css, location=(0, 0)
|
name="variable_declaration_start", value=f"${name}:", path="", code=css, location=(0, 0)
|
||||||
@@ -34,9 +33,46 @@ def test_variable_declaration_valid_names(name):
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_variable_declaration_multiple_values():
|
||||||
|
css = "$x: 2vw\t4% 6s red;"
|
||||||
|
assert list(tokenize(css, "")) == [
|
||||||
|
Token(name='variable_declaration_start', value='$x:', path='', code=css, location=(0, 0)),
|
||||||
|
Token(name='whitespace', value=' ', path='', code=css, location=(0, 3)),
|
||||||
|
Token(name='scalar', value='2vw', path='', code=css, location=(0, 4)),
|
||||||
|
Token(name='whitespace', value='\t', path='', code=css, location=(0, 7)),
|
||||||
|
Token(name='scalar', value='4%', path='', code=css, location=(0, 8)),
|
||||||
|
Token(name='whitespace', value=' ', path='', code=css, location=(0, 10)),
|
||||||
|
Token(name='duration', value='6s', path='', code=css, location=(0, 11)),
|
||||||
|
Token(name='whitespace', value=' ', path='', code=css, location=(0, 13)),
|
||||||
|
Token(name='token', value='red', path='', code=css, location=(0, 15)),
|
||||||
|
Token(name='variable_declaration_end', value=';', path='', code=css, location=(0, 18))
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_variable_declaration_comment_not_tokenized():
|
||||||
|
css = "$x: red; /* comment */"
|
||||||
|
assert list(tokenize(css, "")) == [
|
||||||
|
Token(name='variable_declaration_start', value='$x:', path='', code=css, location=(0, 0)),
|
||||||
|
Token(name='whitespace', value=' ', path='', code=css, location=(0, 3)),
|
||||||
|
Token(name='token', value='red', path='', code=css, location=(0, 4)),
|
||||||
|
Token(name='variable_declaration_end', value=';', path='', code=css, location=(0, 7)),
|
||||||
|
Token(name='whitespace', value=' ', path='', code=css, location=(0, 8))
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_variable_declaration_comment_interspersed_not_tokenized():
|
||||||
|
css = "$x: re/* comment */d;"
|
||||||
|
assert list(tokenize(css, "")) == [
|
||||||
|
Token(name='variable_declaration_start', value='$x:', path='', code=css, location=(0, 0)),
|
||||||
|
Token(name='whitespace', value=' ', path='', code=css, location=(0, 3)),
|
||||||
|
Token(name='token', value='re', path='', code=css, location=(0, 4)),
|
||||||
|
Token(name='token', value='d', path='', code=css, location=(0, 19)),
|
||||||
|
Token(name='variable_declaration_end', value=';', path='', code=css, location=(0, 20))
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def test_variable_declaration_no_semicolon():
|
def test_variable_declaration_no_semicolon():
|
||||||
css = "$x: 1\n$y: 2"
|
css = "$x: 1\n$y: 2"
|
||||||
|
|
||||||
assert list(tokenize(css, "")) == [
|
assert list(tokenize(css, "")) == [
|
||||||
Token(name="variable_declaration_start", value="$x:", code=css, path="", location=(0, 0)),
|
Token(name="variable_declaration_start", value="$x:", code=css, path="", location=(0, 0)),
|
||||||
Token(name="whitespace", value=" ", code=css, path="", location=(0, 3)),
|
Token(name="whitespace", value=" ", code=css, path="", location=(0, 3)),
|
||||||
@@ -57,7 +93,6 @@ def test_variable_declaration_invalid_value():
|
|||||||
def test_variables_declarations_amongst_rulesets():
|
def test_variables_declarations_amongst_rulesets():
|
||||||
css = "$x:1; .thing{text:red;} $y:2;"
|
css = "$x:1; .thing{text:red;} $y:2;"
|
||||||
tokens = list(tokenize(css, ""))
|
tokens = list(tokenize(css, ""))
|
||||||
|
|
||||||
assert tokens == [
|
assert tokens == [
|
||||||
Token(name='variable_declaration_start', value='$x:', path='', code=css, location=(0, 0)),
|
Token(name='variable_declaration_start', value='$x:', path='', code=css, location=(0, 0)),
|
||||||
Token(name='number', value='1', path='', code=css, location=(0, 3)),
|
Token(name='number', value='1', path='', code=css, location=(0, 3)),
|
||||||
|
|||||||
Reference in New Issue
Block a user