From 67af34bf8afd8235fdbfae1b8d6a4b0bbd01368f Mon Sep 17 00:00:00 2001 From: Will McGugan Date: Fri, 1 Oct 2021 21:34:02 +0100 Subject: [PATCH] css rules --- src/textual/css/_error_tools.py | 12 +++ src/textual/css/class.css | 3 - src/textual/css/example.css | 26 +++++++ src/textual/css/id.css | 3 - src/textual/css/model.py | 23 ++++-- src/textual/css/parse.py | 24 +++--- src/textual/css/rules.py | 0 src/textual/css/simple1.css | 5 -- src/textual/css/styles.py | 134 ++++++++++++++++++++++++++++++++ src/textual/css/tokenize.py | 11 ++- src/textual/css/tokenizer.py | 17 ++-- src/textual/css/types.py | 11 +++ 12 files changed, 228 insertions(+), 41 deletions(-) create mode 100644 src/textual/css/_error_tools.py delete mode 100644 src/textual/css/class.css create mode 100644 src/textual/css/example.css delete mode 100644 src/textual/css/id.css delete mode 100644 src/textual/css/rules.py delete mode 100644 src/textual/css/simple1.css create mode 100644 src/textual/css/styles.py create mode 100644 src/textual/css/types.py diff --git a/src/textual/css/_error_tools.py b/src/textual/css/_error_tools.py new file mode 100644 index 000000000..65c8c358c --- /dev/null +++ b/src/textual/css/_error_tools.py @@ -0,0 +1,12 @@ +from __future__ import annotations + +from typing import Iterable + + +def friendly_list(words: Iterable[str], joiner: str = "or") -> str: + + words = [repr(word) for word in sorted(words, key=str.lower)] + if len(words) == 1: + return words[0] + else: + return f'{", ".join(words[:-1])} {joiner} {words[-1]}' diff --git a/src/textual/css/class.css b/src/textual/css/class.css deleted file mode 100644 index bfeb9be64..000000000 --- a/src/textual/css/class.css +++ /dev/null @@ -1,3 +0,0 @@ -.foo { - color: blue -} diff --git a/src/textual/css/example.css b/src/textual/css/example.css new file mode 100644 index 000000000..7dddace27 --- /dev/null +++ b/src/textual/css/example.css @@ -0,0 +1,26 @@ + +view.buttons { + layout: dock +} + +.buttons .button { + dock: top + size: 10px +} + + +#widget { + visibility: visible; + text-style: bold; + + border: dashed !important; + border-top: dashed green; + border-bottom: red; + + padding: 1 0; + margin: 1; + + layout: dock + dock-edge: left; + +} diff --git a/src/textual/css/id.css b/src/textual/css/id.css deleted file mode 100644 index 90c34c64c..000000000 --- a/src/textual/css/id.css +++ /dev/null @@ -1,3 +0,0 @@ -#foo { - color: red -} diff --git a/src/textual/css/model.py b/src/textual/css/model.py index 3ba6ddbcf..ea8d44391 100644 --- a/src/textual/css/model.py +++ b/src/textual/css/model.py @@ -1,7 +1,13 @@ from __future__ import annotations +from rich import print + from dataclasses import dataclass, field from enum import Enum +from typing import Any + +from .styles import Styles +from .tokenize import Token class SelectorType(Enum): @@ -23,12 +29,6 @@ class Location: column: tuple[int, int] -@dataclass -class RuleSet: - selectors: list[list[Selector]] = field(default_factory=list) - declarations: list[Declaration] = field(default_factory=list) - - @dataclass class Selector: name: str @@ -40,4 +40,13 @@ class Selector: @dataclass class Declaration: name: str - tokens: list[tuple[str, str]] = field(default_factory=list) + tokens: list[Token] = field(default_factory=list) + + def process(self): + raise NotImplementedError + + +@dataclass +class RuleSet: + selectors: list[list[Selector]] = field(default_factory=list) + styles: Styles = field(default_factory=Styles) diff --git a/src/textual/css/parse.py b/src/textual/css/parse.py index 5a8235dad..8d824bb75 100644 --- a/src/textual/css/parse.py +++ b/src/textual/css/parse.py @@ -65,17 +65,18 @@ def parse_rule_set(tokens: Iterator[Token], token: Token) -> Iterable[RuleSet]: continue if token_name == "declaration_name": if declaration.tokens: - rule_set.declarations.append(declaration) + rule_set.styles.add_declaration(declaration) declaration = Declaration("") declaration.name = token.value.rstrip(":") elif token_name == "declaration_set_end": break else: - declaration.tokens.append((token_name, token.value)) + declaration.tokens.append(token) if declaration.tokens: - rule_set.declarations.append(declaration) + rule_set.styles.add_declaration(declaration) + print(rule_set) yield rule_set @@ -83,9 +84,9 @@ def parse(css: str) -> Iterable[RuleSet]: tokens = iter(tokenize(css)) while True: - - token = next(tokens) - + token = next(tokens, None) + if token is None: + break if token.name.startswith("selector_start_"): yield from parse_rule_set(tokens, token) @@ -93,14 +94,9 @@ def parse(css: str) -> Iterable[RuleSet]: if __name__ == "__main__": test = """ .foo.bar baz:focus, #egg { - color: rgb(10,20,30); - border: bold green; -} - -* { + visibility: visible; + border: solid green !important; outline: red -} - - """ +}""" for obj in parse(test): print(obj) diff --git a/src/textual/css/rules.py b/src/textual/css/rules.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/textual/css/simple1.css b/src/textual/css/simple1.css deleted file mode 100644 index 6d0774101..000000000 --- a/src/textual/css/simple1.css +++ /dev/null @@ -1,5 +0,0 @@ -#foo .bar { - border: double red; - color: blue; - text: italic white on black; -} diff --git a/src/textual/css/styles.py b/src/textual/css/styles.py new file mode 100644 index 000000000..0b8eafdbf --- /dev/null +++ b/src/textual/css/styles.py @@ -0,0 +1,134 @@ +from __future__ import annotations + +from dataclasses import dataclass, field +from typing import cast, TYPE_CHECKING + +from rich import print +from rich.color import ANSI_COLOR_NAMES, Color + +from ._error_tools import friendly_list +from .tokenize import Token +from .types import Visibility + +if TYPE_CHECKING: + from .model import Declaration + + +class DeclarationError(Exception): + def __init__(self, name: str, token: Token, message: str) -> None: + self.token = token + super().__init__(message) + + +VALID_VISIBILITY = {"visible", "hidden"} +VALID_BORDER = {"rounded", "solid", "double", "dashed", "heavy", "inner", "outer"} + + +@dataclass +class Styles: + + visibility: Visibility | None = None + + border_top: tuple[str, Color] | None = None + border_right: tuple[str, Color] | None = None + border_bottom: tuple[str, Color] | None = None + border_left: tuple[str, Color] | None = None + + outline_top: tuple[str, Color] | None = None + outline_right: tuple[str, Color] | None = None + outline_bottom: tuple[str, Color] | None = None + outline_left: tuple[str, Color] | None = None + + important: set[str] = field(default_factory=set) + + def error(self, name: str, token: Token, msg: str) -> None: + raise DeclarationError(name, token, msg) + + def add_declaration(self, declaration: Declaration) -> None: + + print(declaration) + if not declaration.tokens: + return + process_method = getattr(self, f"process_{declaration.name.replace('-', '_')}") + tokens = declaration.tokens + if tokens[-1].name == "important": + tokens = tokens[:-1] + self.important.add(declaration.name) + if process_method is not None: + process_method(declaration.name, tokens) + + def _parse_border(self, tokens: list[Token]) -> tuple[str, Color]: + color = Color.default() + border_type = "solid" + for token in tokens: + location, name, value = token + if name == "token": + if value in ANSI_COLOR_NAMES: + color = Color.parse(value) + elif value in VALID_BORDER: + border_type = value + else: + self.error(name, token, f"unknown token {value!r} in declaration") + elif name == "color": + color = Color.parse(value) + else: + self.error(name, token, f"unexpected token {value!r} in declaration") + return (border_type, color) + + def _process_border(self, edge: str, name: str, tokens: list[Token]) -> None: + border = self._parse_border(tokens) + setattr(self, f"border_{edge}", border) + + def process_border(self, name: str, tokens: list[Token]) -> None: + border = self._parse_border(tokens) + self.border_top = self.border_right = border + self.border_bottom = self.border_left = border + + def process_border_top(self, name: str, tokens: list[Token]) -> None: + self._process_border("top", name, tokens) + + def process_border_right(self, name: str, tokens: list[Token]) -> None: + self._process_border("right", name, tokens) + + def process_border_bottom(self, name: str, tokens: list[Token]) -> None: + self._process_border("bottom", name, tokens) + + def process_border_left(self, name: str, tokens: list[Token]) -> None: + self._process_border("left", name, tokens) + + def _process_outline(self, edge: str, name: str, tokens: list[Token]) -> None: + border = self._parse_border(tokens) + setattr(self, f"outline_{edge}", border) + + def process_outline(self, name: str, tokens: list[Token]) -> None: + border = self._parse_border(tokens) + self.outline_top = self.outline_right = border + self.outline_bottom = self.outline_left = border + + def process_outline_top(self, name: str, tokens: list[Token]) -> None: + self._process_outline("top", name, tokens) + + def process_parse_border_right(self, name: str, tokens: list[Token]) -> None: + self._process_outline("right", name, tokens) + + def process_outline_bottom(self, name: str, tokens: list[Token]) -> None: + self._process_outline("bottom", name, tokens) + + def process_outline_left(self, name: str, tokens: list[Token]) -> None: + self._process_outline("left", name, tokens) + + def process_visibility(self, name: str, tokens: list[Token]) -> None: + for token in tokens: + location, name, value = token + if name == "token": + value = value.lower() + if value in VALID_VISIBILITY: + self.visibility = cast(Visibility, value) + else: + self.error( + name, + token, + f"invalid value for visibility (received {value!r}, expected {friendly_list(VALID_VISIBILITY)})", + ) + else: + self.error(name, token, f"invalid token {value!r} in this context") diff --git a/src/textual/css/tokenize.py b/src/textual/css/tokenize.py index 79cef60f8..8b06521ef 100644 --- a/src/textual/css/tokenize.py +++ b/src/textual/css/tokenize.py @@ -14,7 +14,7 @@ expect_selector = Expect( selector_start_class=r"\.[a-zA-Z_\-]+", selector_start_universal=r"\*", selector_start=r"[a-zA-Z_\-]+", -) +).expect_eof(True) expect_comment_end = Expect( comment_end=re.escape("*/"), @@ -46,9 +46,10 @@ expect_declaration_content = Expect( comment_start=r"\/\*", percentage=r"\d+\%", number=r"\d+\.?\d+", - color=r"\#([0-9a-f]{6})|color\([0-9]{1,3}\)|rgb\([\d\s,]+\)", + color=r"\#[0-9a-f]{6}|color\[0-9]{1,3}\|rgb\([\d\s,]+\)", token="[a-zA-Z_-]+", - string='".*?"', + string=r"\".*?\"", + important=r"\!important", declaration_set_end=r"\}", ) @@ -78,8 +79,10 @@ def tokenize(code: str) -> Iterable[Token]: name = token.name if name == "comment_start": tokenizer.skip_to(expect_comment_end) + elif name == "eof": + break expect = get_state(name, expect) - + print(token) yield token diff --git a/src/textual/css/tokenizer.py b/src/textual/css/tokenizer.py index 9ed0f8adf..8cddd1a88 100644 --- a/src/textual/css/tokenizer.py +++ b/src/textual/css/tokenizer.py @@ -29,14 +29,18 @@ class Expect: ) self.match = self._regex.match self.search = self._regex.search + self._expect_eof = False + + def expect_eof(self, eof: bool) -> Expect: + self._expect_eof = eof + return self def __rich_repr__(self) -> rich.repr.Result: yield from zip(self.names, self.regexes) class Token(NamedTuple): - line: int - col: int + location: tuple[int, int] name: str value: str @@ -49,9 +53,12 @@ class Tokenizer: def get_token(self, expect: Expect) -> Token: line_no = self.line_no - if line_no >= len(self.lines): - raise EOFError() col_no = self.col_no + if line_no >= len(self.lines): + if expect._expect_eof: + return Token((line_no, col_no), "eof", "") + else: + raise EOFError() line = self.lines[line_no] match = expect.match(line, col_no) if match is None: @@ -68,7 +75,7 @@ class Tokenizer: break try: - return Token(line_no, col_no, name, value) + return Token((line_no, col_no), name, value) finally: col_no += len(value) if col_no >= len(line): diff --git a/src/textual/css/types.py b/src/textual/css/types.py new file mode 100644 index 000000000..234bc75e0 --- /dev/null +++ b/src/textual/css/types.py @@ -0,0 +1,11 @@ +from __future__ import annotations + +import sys + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal + + +Visibility = Literal["visible", "hidden", "initial", "inherit"]