css rules

This commit is contained in:
Will McGugan
2021-10-01 21:34:02 +01:00
parent d0b14a351c
commit 67af34bf8a
12 changed files with 228 additions and 41 deletions

View File

@@ -0,0 +1,12 @@
from __future__ import annotations
from typing import Iterable
def friendly_list(words: Iterable[str], joiner: str = "or") -> str:
words = [repr(word) for word in sorted(words, key=str.lower)]
if len(words) == 1:
return words[0]
else:
return f'{", ".join(words[:-1])} {joiner} {words[-1]}'

View File

@@ -1,3 +0,0 @@
.foo {
color: blue
}

View File

@@ -0,0 +1,26 @@
view.buttons {
layout: dock
}
.buttons .button {
dock: top
size: 10px
}
#widget {
visibility: visible;
text-style: bold;
border: dashed !important;
border-top: dashed green;
border-bottom: red;
padding: 1 0;
margin: 1;
layout: dock
dock-edge: left;
}

View File

@@ -1,3 +0,0 @@
#foo {
color: red
}

View File

@@ -1,7 +1,13 @@
from __future__ import annotations
from rich import print
from dataclasses import dataclass, field
from enum import Enum
from typing import Any
from .styles import Styles
from .tokenize import Token
class SelectorType(Enum):
@@ -23,12 +29,6 @@ class Location:
column: tuple[int, int]
@dataclass
class RuleSet:
selectors: list[list[Selector]] = field(default_factory=list)
declarations: list[Declaration] = field(default_factory=list)
@dataclass
class Selector:
name: str
@@ -40,4 +40,13 @@ class Selector:
@dataclass
class Declaration:
name: str
tokens: list[tuple[str, str]] = field(default_factory=list)
tokens: list[Token] = field(default_factory=list)
def process(self):
raise NotImplementedError
@dataclass
class RuleSet:
selectors: list[list[Selector]] = field(default_factory=list)
styles: Styles = field(default_factory=Styles)

View File

@@ -65,17 +65,18 @@ def parse_rule_set(tokens: Iterator[Token], token: Token) -> Iterable[RuleSet]:
continue
if token_name == "declaration_name":
if declaration.tokens:
rule_set.declarations.append(declaration)
rule_set.styles.add_declaration(declaration)
declaration = Declaration("")
declaration.name = token.value.rstrip(":")
elif token_name == "declaration_set_end":
break
else:
declaration.tokens.append((token_name, token.value))
declaration.tokens.append(token)
if declaration.tokens:
rule_set.declarations.append(declaration)
rule_set.styles.add_declaration(declaration)
print(rule_set)
yield rule_set
@@ -83,9 +84,9 @@ def parse(css: str) -> Iterable[RuleSet]:
tokens = iter(tokenize(css))
while True:
token = next(tokens)
token = next(tokens, None)
if token is None:
break
if token.name.startswith("selector_start_"):
yield from parse_rule_set(tokens, token)
@@ -93,14 +94,9 @@ def parse(css: str) -> Iterable[RuleSet]:
if __name__ == "__main__":
test = """
.foo.bar baz:focus, #egg {
color: rgb(10,20,30);
border: bold green;
}
* {
visibility: visible;
border: solid green !important;
outline: red
}
"""
}"""
for obj in parse(test):
print(obj)

View File

@@ -1,5 +0,0 @@
#foo .bar {
border: double red;
color: blue;
text: italic white on black;
}

134
src/textual/css/styles.py Normal file
View File

@@ -0,0 +1,134 @@
from __future__ import annotations
from dataclasses import dataclass, field
from typing import cast, TYPE_CHECKING
from rich import print
from rich.color import ANSI_COLOR_NAMES, Color
from ._error_tools import friendly_list
from .tokenize import Token
from .types import Visibility
if TYPE_CHECKING:
from .model import Declaration
class DeclarationError(Exception):
def __init__(self, name: str, token: Token, message: str) -> None:
self.token = token
super().__init__(message)
VALID_VISIBILITY = {"visible", "hidden"}
VALID_BORDER = {"rounded", "solid", "double", "dashed", "heavy", "inner", "outer"}
@dataclass
class Styles:
visibility: Visibility | None = None
border_top: tuple[str, Color] | None = None
border_right: tuple[str, Color] | None = None
border_bottom: tuple[str, Color] | None = None
border_left: tuple[str, Color] | None = None
outline_top: tuple[str, Color] | None = None
outline_right: tuple[str, Color] | None = None
outline_bottom: tuple[str, Color] | None = None
outline_left: tuple[str, Color] | None = None
important: set[str] = field(default_factory=set)
def error(self, name: str, token: Token, msg: str) -> None:
raise DeclarationError(name, token, msg)
def add_declaration(self, declaration: Declaration) -> None:
print(declaration)
if not declaration.tokens:
return
process_method = getattr(self, f"process_{declaration.name.replace('-', '_')}")
tokens = declaration.tokens
if tokens[-1].name == "important":
tokens = tokens[:-1]
self.important.add(declaration.name)
if process_method is not None:
process_method(declaration.name, tokens)
def _parse_border(self, tokens: list[Token]) -> tuple[str, Color]:
color = Color.default()
border_type = "solid"
for token in tokens:
location, name, value = token
if name == "token":
if value in ANSI_COLOR_NAMES:
color = Color.parse(value)
elif value in VALID_BORDER:
border_type = value
else:
self.error(name, token, f"unknown token {value!r} in declaration")
elif name == "color":
color = Color.parse(value)
else:
self.error(name, token, f"unexpected token {value!r} in declaration")
return (border_type, color)
def _process_border(self, edge: str, name: str, tokens: list[Token]) -> None:
border = self._parse_border(tokens)
setattr(self, f"border_{edge}", border)
def process_border(self, name: str, tokens: list[Token]) -> None:
border = self._parse_border(tokens)
self.border_top = self.border_right = border
self.border_bottom = self.border_left = border
def process_border_top(self, name: str, tokens: list[Token]) -> None:
self._process_border("top", name, tokens)
def process_border_right(self, name: str, tokens: list[Token]) -> None:
self._process_border("right", name, tokens)
def process_border_bottom(self, name: str, tokens: list[Token]) -> None:
self._process_border("bottom", name, tokens)
def process_border_left(self, name: str, tokens: list[Token]) -> None:
self._process_border("left", name, tokens)
def _process_outline(self, edge: str, name: str, tokens: list[Token]) -> None:
border = self._parse_border(tokens)
setattr(self, f"outline_{edge}", border)
def process_outline(self, name: str, tokens: list[Token]) -> None:
border = self._parse_border(tokens)
self.outline_top = self.outline_right = border
self.outline_bottom = self.outline_left = border
def process_outline_top(self, name: str, tokens: list[Token]) -> None:
self._process_outline("top", name, tokens)
def process_parse_border_right(self, name: str, tokens: list[Token]) -> None:
self._process_outline("right", name, tokens)
def process_outline_bottom(self, name: str, tokens: list[Token]) -> None:
self._process_outline("bottom", name, tokens)
def process_outline_left(self, name: str, tokens: list[Token]) -> None:
self._process_outline("left", name, tokens)
def process_visibility(self, name: str, tokens: list[Token]) -> None:
for token in tokens:
location, name, value = token
if name == "token":
value = value.lower()
if value in VALID_VISIBILITY:
self.visibility = cast(Visibility, value)
else:
self.error(
name,
token,
f"invalid value for visibility (received {value!r}, expected {friendly_list(VALID_VISIBILITY)})",
)
else:
self.error(name, token, f"invalid token {value!r} in this context")

View File

@@ -14,7 +14,7 @@ expect_selector = Expect(
selector_start_class=r"\.[a-zA-Z_\-]+",
selector_start_universal=r"\*",
selector_start=r"[a-zA-Z_\-]+",
)
).expect_eof(True)
expect_comment_end = Expect(
comment_end=re.escape("*/"),
@@ -46,9 +46,10 @@ expect_declaration_content = Expect(
comment_start=r"\/\*",
percentage=r"\d+\%",
number=r"\d+\.?\d+",
color=r"\#([0-9a-f]{6})|color\([0-9]{1,3}\)|rgb\([\d\s,]+\)",
color=r"\#[0-9a-f]{6}|color\[0-9]{1,3}\|rgb\([\d\s,]+\)",
token="[a-zA-Z_-]+",
string='".*?"',
string=r"\".*?\"",
important=r"\!important",
declaration_set_end=r"\}",
)
@@ -78,8 +79,10 @@ def tokenize(code: str) -> Iterable[Token]:
name = token.name
if name == "comment_start":
tokenizer.skip_to(expect_comment_end)
elif name == "eof":
break
expect = get_state(name, expect)
print(token)
yield token

View File

@@ -29,14 +29,18 @@ class Expect:
)
self.match = self._regex.match
self.search = self._regex.search
self._expect_eof = False
def expect_eof(self, eof: bool) -> Expect:
self._expect_eof = eof
return self
def __rich_repr__(self) -> rich.repr.Result:
yield from zip(self.names, self.regexes)
class Token(NamedTuple):
line: int
col: int
location: tuple[int, int]
name: str
value: str
@@ -49,9 +53,12 @@ class Tokenizer:
def get_token(self, expect: Expect) -> Token:
line_no = self.line_no
if line_no >= len(self.lines):
raise EOFError()
col_no = self.col_no
if line_no >= len(self.lines):
if expect._expect_eof:
return Token((line_no, col_no), "eof", "")
else:
raise EOFError()
line = self.lines[line_no]
match = expect.match(line, col_no)
if match is None:
@@ -68,7 +75,7 @@ class Tokenizer:
break
try:
return Token(line_no, col_no, name, value)
return Token((line_no, col_no), name, value)
finally:
col_no += len(value)
if col_no >= len(line):

11
src/textual/css/types.py Normal file
View File

@@ -0,0 +1,11 @@
from __future__ import annotations
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
Visibility = Literal["visible", "hidden", "initial", "inherit"]