Parsing variable values as individual tokens

This commit is contained in:
Darren Burns
2022-02-02 10:49:09 +00:00
parent 19b835b8a1
commit 6587ba257f
4 changed files with 90 additions and 122 deletions

View File

@@ -1,5 +1,8 @@
from __future__ import annotations from __future__ import annotations
import itertools
from collections import defaultdict
from rich import print from rich import print
from functools import lru_cache from functools import lru_cache
@@ -205,51 +208,34 @@ def parse_declarations(css: str, path: str) -> Styles:
return styles_builder.styles return styles_builder.styles
def parse(css: str, path: str) -> Iterable[RuleSet]: # def _resolve_variables(tokens: Iterator[Token]):
# # First pass to collect variable declarations
# variables: dict[str, list[Token]] = defaultdict(list)
# while True:
# token = next(tokens, None)
# if token is None:
# break
# if token.name == "variable_name":
# variable_name = token.value[1:-1] # Trim the $ and the :, i.e. "$x:" -> "x"
# # At this point, we need to tokenize the variable value, as when we pass
# # the Declarations to the style builder, types must be known (e.g. Scalar vs Duration)
# variables[variable_name] =
tokens = iter(tokenize(css, path))
def parse(css: str, path: str) -> Iterable[RuleSet]:
# Make two iterators over the same tokens
tokens1, tokens2 = itertools.tee(iter(tokenize(css, path)))
# First pass in order to resolve variables
# variables = _resolve_variables(tokens1)
# Parsing rulesets
while True: while True:
token = next(tokens, None) token = next(tokens2, None)
if token is None: if token is None:
break break
if token.name.startswith("selector_start"): if token.name.startswith("selector_start"):
yield from parse_rule_set(tokens, token) yield from parse_rule_set(tokens2, token)
# if __name__ == "__main__":
# test = """
# App View {
# text: red;
# }
# .foo.bar baz:focus, #egg .foo.baz {
# /* ignore me, I'm a comment */
# display: block;
# visibility: visible;
# border: solid green !important;
# outline: red;
# padding: 1 2;
# margin: 5;
# text: bold red on magenta
# text-color: green;
# text-background: white
# docks: foo bar bar
# dock-group: foo
# dock-edge: top
# offset-x: 4
# offset-y: 5
# }"""
# from .stylesheet import Stylesheet
# print(test)
# print()
# stylesheet = Stylesheet()
# stylesheet.parse(test)
# print(stylesheet)
# print()
# print(stylesheet.css)
if __name__ == "__main__": if __name__ == "__main__":

View File

@@ -14,9 +14,9 @@ COLOR = r"\#[0-9a-fA-F]{6}|color\([0-9]{1,3}\)|rgb\(\d{1,3}\,\s?\d{1,3}\,\s?\d{1
KEY_VALUE = r"[a-zA-Z_-][a-zA-Z0-9_-]*=[0-9a-zA-Z_\-\/]+" KEY_VALUE = r"[a-zA-Z_-][a-zA-Z0-9_-]*=[0-9a-zA-Z_\-\/]+"
TOKEN = "[a-zA-Z_-]+" TOKEN = "[a-zA-Z_-]+"
STRING = r"\".*?\"" STRING = r"\".*?\""
VARIABLE_REF = r"\$[a-zA-Z0-9_-]+" VARIABLE_REF = r"\$[a-zA-Z0-9_\-]+"
# Values permitted in declarations. # Values permitted in variable and rule declarations.
DECLARATION_VALUES = { DECLARATION_VALUES = {
"scalar": SCALAR, "scalar": SCALAR,
"duration": DURATION, "duration": DURATION,
@@ -38,19 +38,16 @@ expect_root_scope = Expect(
selector_start_class=r"\.[a-zA-Z_\-][a-zA-Z0-9_\-]*", selector_start_class=r"\.[a-zA-Z_\-][a-zA-Z0-9_\-]*",
selector_start_universal=r"\*", selector_start_universal=r"\*",
selector_start=r"[a-zA-Z_\-]+", selector_start=r"[a-zA-Z_\-]+",
variable_name=f"{VARIABLE_REF}:", variable_name=rf"{VARIABLE_REF}:",
).expect_eof(True) ).expect_eof(True)
# After a variable declaration e.g. "$warning-text: TOKENS;" # After a variable declaration e.g. "$warning-text: TOKENS;"
# for tokenizing variable value ------^~~~~~~^ # for tokenizing variable value ------^~~~~~~^
expect_variable_value = Expect( expect_variable_name_continue = Expect(
comment_start=COMMENT_START,
whitespace=r"\s+",
variable_value=rf"[^;\n{COMMENT_START}]+",
)
expect_variable_value_end = Expect(
variable_value_end=r"\n|;", variable_value_end=r"\n|;",
whitespace=r"\s+",
comment_start=COMMENT_START,
**DECLARATION_VALUES,
).expect_eof(True) ).expect_eof(True)
expect_comment_end = Expect( expect_comment_end = Expect(
@@ -72,8 +69,8 @@ expect_selector_continue = Expect(
declaration_set_start=r"\{", declaration_set_start=r"\{",
) )
# A declaration e.g. "text: red;" # A rule declaration e.g. "text: red;"
# ^---^ # ^---^
expect_declaration = Expect( expect_declaration = Expect(
whitespace=r"\s+", whitespace=r"\s+",
comment_start=COMMENT_START, comment_start=COMMENT_START,
@@ -88,8 +85,8 @@ expect_declaration_solo = Expect(
declaration_set_end=r"\}", declaration_set_end=r"\}",
).expect_eof(True) ).expect_eof(True)
# The value(s)/content from a declaration e.g. "text: red;" # The value(s)/content from a rule declaration e.g. "text: red;"
# ^---^ # ^---^
expect_declaration_content = Expect( expect_declaration_content = Expect(
declaration_end=r"\n|;", declaration_end=r"\n|;",
whitespace=r"\s+", whitespace=r"\s+",
@@ -115,8 +112,7 @@ class TokenizerState:
EXPECT = expect_root_scope EXPECT = expect_root_scope
STATE_MAP = { STATE_MAP = {
"variable_name": expect_variable_value, "variable_name": expect_variable_name_continue,
"variable_value": expect_variable_value_end,
"variable_value_end": expect_root_scope, "variable_value_end": expect_root_scope,
"selector_start": expect_selector_continue, "selector_start": expect_selector_continue,
"selector_start_id": expect_selector_continue, "selector_start_id": expect_selector_continue,

View File

@@ -1,12 +1,21 @@
import pytest import pytest
from rich.color import Color, ColorType from rich.color import Color, ColorType
# from textual.css.parse import _resolve_variables
from textual.css.scalar import Scalar, Unit from textual.css.scalar import Scalar, Unit
from textual.css.stylesheet import Stylesheet, StylesheetParseError from textual.css.stylesheet import Stylesheet, StylesheetParseError
from textual.css.tokenize import tokenize
from textual.css.transition import Transition from textual.css.transition import Transition
from textual.layouts.dock import DockLayout from textual.layouts.dock import DockLayout
# class TestVariableResolution:
# def test_resolve_single_variable(self):
# css = "$x: 1;"
# variables = _resolve_variables(tokenize(css, ""))
# assert variables == {"x": }
class TestParseLayout: class TestParseLayout:
def test_valid_layout_name(self): def test_valid_layout_name(self):
css = "#some-widget { layout: dock; }" css = "#some-widget { layout: dock; }"

View File

@@ -1,6 +1,5 @@
import pytest import pytest
import textual.css.tokenizer
from textual.css.tokenize import tokenize from textual.css.tokenize import tokenize
from textual.css.tokenizer import Token, TokenizeError from textual.css.tokenizer import Token, TokenizeError
@@ -25,32 +24,51 @@ def test_variable_declaration_valid_names(name):
name="variable_name", value=f"${name}:", path="", code=css, location=(0, 0) name="variable_name", value=f"${name}:", path="", code=css, location=(0, 0)
), ),
Token(name="whitespace", value=" ", path="", code=css, location=(0, 14)), Token(name="whitespace", value=" ", path="", code=css, location=(0, 14)),
Token(name="variable_value", value="black on red", path="", code=css, location=(0, 15)), Token(name="token", value="black", path="", code=css, location=(0, 15)),
Token(name="whitespace", value=" ", path="", code=css, location=(0, 20)),
Token(name="token", value="on", path="", code=css, location=(0, 21)),
Token(name="whitespace", value=" ", path="", code=css, location=(0, 23)),
Token(name="token", value="red", path="", code=css, location=(0, 24)),
Token(name="variable_value_end", value=";", path="", code=css, location=(0, 27)), Token(name="variable_value_end", value=";", path="", code=css, location=(0, 27)),
] ]
def test_variable_declaration_multiple_values():
css = "$x: 2vw\t4% 6s red;"
assert list(tokenize(css, "")) == [
Token(name='variable_name', value='$x:', path='', code=css, location=(0, 0)),
Token(name='whitespace', value=' ', path='', code=css, location=(0, 3)),
Token(name='scalar', value='2vw', path='', code=css, location=(0, 4)),
Token(name='whitespace', value='\t', path='', code=css, location=(0, 7)),
Token(name='scalar', value='4%', path='', code=css, location=(0, 8)),
Token(name='whitespace', value=' ', path='', code=css, location=(0, 10)),
Token(name='duration', value='6s', path='', code=css, location=(0, 11)),
Token(name='whitespace', value=' ', path='', code=css, location=(0, 13)),
Token(name='token', value='red', path='', code=css, location=(0, 15)),
Token(name='variable_value_end', value=';', path='', code=css, location=(0, 18))
]
def test_variable_declaration_comment_ignored(): def test_variable_declaration_comment_ignored():
css = "$x: red; /* comment */" css = "$x: red; /* comment */"
assert list(tokenize(css, "")) == [ assert list(tokenize(css, "")) == [
Token(name='variable_name', value='$x:', path='', code=css, location=(0, 0)), Token(name='variable_name', value='$x:', path='', code=css, location=(0, 0)),
Token(name='whitespace', value=' ', path='', code=css, location=(0, 3)), Token(name='whitespace', value=' ', path='', code=css, location=(0, 3)),
Token(name='variable_value', value='red', path='', code=css, location=(0, 4)), Token(name='token', value='red', path='', code=css, location=(0, 4)),
Token(name='variable_value_end', value=';', path='', code=css, location=(0, 7)), Token(name='variable_value_end', value=';', path='', code=css, location=(0, 7)),
Token(name='whitespace', value=' ', path='', code=css, location=(0, 8)), Token(name='whitespace', value=' ', path='', code=css, location=(0, 8))
] ]
def test_variable_declaration_comment_interspersed_raises(): def test_variable_declaration_comment_interspersed_ignored():
css = "$x: re/* comment */d;" css = "$x: re/* comment */d;"
with pytest.raises(TokenizeError): assert list(tokenize(css, "")) == [
assert list(tokenize(css, "")) Token(name='variable_name', value='$x:', path='', code=css, location=(0, 0)),
Token(name='whitespace', value=' ', path='', code=css, location=(0, 3)),
Token(name='token', value='re', path='', code=css, location=(0, 4)),
def test_variable_declaration_invalid_value_eof(): Token(name='token', value='d', path='', code=css, location=(0, 19)),
css = "$x:\n" Token(name='variable_value_end', value=';', path='', code=css, location=(0, 20))
with pytest.raises(textual.css.tokenizer.EOFError): ]
list(tokenize(css, ""))
def test_variable_declaration_no_semicolon(): def test_variable_declaration_no_semicolon():
@@ -58,19 +76,26 @@ def test_variable_declaration_no_semicolon():
assert list(tokenize(css, "")) == [ assert list(tokenize(css, "")) == [
Token(name="variable_name", value="$x:", code=css, path="", location=(0, 0)), Token(name="variable_name", value="$x:", code=css, path="", location=(0, 0)),
Token(name="whitespace", value=" ", code=css, path="", location=(0, 3)), Token(name="whitespace", value=" ", code=css, path="", location=(0, 3)),
Token(name="variable_value", value="1", code=css, path="", location=(0, 4)), Token(name="number", value="1", code=css, path="", location=(0, 4)),
Token(name="variable_value_end", value="\n", code=css, path="", location=(0, 5)), Token(name="variable_value_end", value="\n", code=css, path="", location=(0, 5)),
Token(name="variable_name", value="$y:", code=css, path="", location=(1, 0)), Token(name="variable_name", value="$y:", code=css, path="", location=(1, 0)),
Token(name="whitespace", value=" ", code=css, path="", location=(1, 3)), Token(name="whitespace", value=" ", code=css, path="", location=(1, 3)),
Token(name="variable_value", value="2", code=css, path="", location=(1, 4)), Token(name="number", value="2", code=css, path="", location=(1, 4)),
] ]
def test_variable_declaration_invalid_value():
css = "$x:(@$12x)"
with pytest.raises(TokenizeError):
list(tokenize(css, ""))
def test_variables_declarations_amongst_rulesets(): def test_variables_declarations_amongst_rulesets():
css = "$x:1; .thing{text:red;} $y:2;" css = "$x:1; .thing{text:red;} $y:2;"
assert list(tokenize(css, "")) == [ tokens = list(tokenize(css, ""))
assert tokens == [
Token(name='variable_name', value='$x:', path='', code=css, location=(0, 0)), Token(name='variable_name', value='$x:', path='', code=css, location=(0, 0)),
Token(name='variable_value', value='1', path='', code=css, location=(0, 3)), Token(name='number', value='1', path='', code=css, location=(0, 3)),
Token(name='variable_value_end', value=';', path='', code=css, location=(0, 4)), Token(name='variable_value_end', value=';', path='', code=css, location=(0, 4)),
Token(name='whitespace', value=' ', path='', code=css, location=(0, 5)), Token(name='whitespace', value=' ', path='', code=css, location=(0, 5)),
Token(name='selector_start_class', value='.thing', path='', code=css, location=(0, 6)), Token(name='selector_start_class', value='.thing', path='', code=css, location=(0, 6)),
@@ -81,54 +106,6 @@ def test_variables_declarations_amongst_rulesets():
Token(name='declaration_set_end', value='}', path='', code=css, location=(0, 22)), Token(name='declaration_set_end', value='}', path='', code=css, location=(0, 22)),
Token(name='whitespace', value=' ', path='', code=css, location=(0, 23)), Token(name='whitespace', value=' ', path='', code=css, location=(0, 23)),
Token(name='variable_name', value='$y:', path='', code=css, location=(0, 24)), Token(name='variable_name', value='$y:', path='', code=css, location=(0, 24)),
Token(name='variable_value', value='2', path='', code=css, location=(0, 27)), Token(name='number', value='2', path='', code=css, location=(0, 27)),
Token(name='variable_value_end', value=';', path='', code=css, location=(0, 28)), Token(name='variable_value_end', value=';', path='', code=css, location=(0, 28)),
] ]
def test_variables_reference_in_rule_declaration_value():
css = ".warn{text: $warning;}"
assert list(tokenize(css, "")) == [
Token(name='selector_start_class', value='.warn', path='', code=css, location=(0, 0)),
Token(name='declaration_set_start', value='{', path='', code=css, location=(0, 5)),
Token(name='declaration_name', value='text:', path='', code=css, location=(0, 6)),
Token(name='whitespace', value=' ', path='', code=css, location=(0, 11)),
Token(name='variable_ref', value='$warning', path='', code=css, location=(0, 12)),
Token(name='declaration_end', value=';', path='', code=css, location=(0, 20)),
Token(name='declaration_set_end', value='}', path='', code=css, location=(0, 21)),
]
def test_variables_reference_in_rule_declaration_value_multiple():
css = ".card{padding: $pad-y $pad-x;}"
assert list(tokenize(css, "")) == [
Token(name='selector_start_class', value='.card', path='', code=css, location=(0, 0)),
Token(name='declaration_set_start', value='{', path='', code=css, location=(0, 5)),
Token(name='declaration_name', value='padding:', path='', code=css, location=(0, 6)),
Token(name='whitespace', value=' ', path='', code=css, location=(0, 14)),
Token(name='variable_ref', value='$pad-y', path='', code=css, location=(0, 15)),
Token(name='whitespace', value=' ', path='', code=css, location=(0, 21)),
Token(name='variable_ref', value='$pad-x', path='', code=css, location=(0, 22)),
Token(name='declaration_end', value=';', path='', code=css, location=(0, 28)),
Token(name='declaration_set_end', value='}', path='', code=css, location=(0, 29)),
]
def test_variables_reference_in_variable_declaration():
css = "$x: $y;"
assert list(tokenize(css, "")) == [
Token(name='variable_name', value='$x:', path='', code=css, location=(0, 0)),
Token(name='whitespace', value=' ', path='', code=css, location=(0, 3)),
Token(name='variable_value', value='$y', path='', code=css, location=(0, 4)),
Token(name='variable_value_end', value=';', path='', code=css, location=(0, 6)),
]
def test_variable_references_in_variable_declaration_multiple():
css = "$x: $y $z\n"
assert list(tokenize(css, "")) == [
Token(name='variable_name', value='$x:', path='', code=css, location=(0, 0)),
Token(name='whitespace', value=' ', path='', code=css, location=(0, 3)),
Token(name='variable_value', value='$y $z', path='', code=css, location=(0, 4)),
Token(name='variable_value_end', value='\n', path='', code=css, location=(0, 10)),
]