From d967de6926817e7d729e97257b546c2479470497 Mon Sep 17 00:00:00 2001 From: Darren Burns Date: Tue, 1 Feb 2022 13:50:48 +0000 Subject: [PATCH] Tidy up a docstring --- src/textual/css/tokenize.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/src/textual/css/tokenize.py b/src/textual/css/tokenize.py index 9895b8430..6e9d2d31a 100644 --- a/src/textual/css/tokenize.py +++ b/src/textual/css/tokenize.py @@ -99,13 +99,15 @@ expect_rule_declaration_content = Expect( class TokenizerState: - """State machine for the tokeniser. + """State machine for the tokenizer. Attributes: - EXPECT: The initial expectation of the tokenizer. Since we start tokenising - at the root scope, we'd expect to see either a variable or selector. - STATE_MAP: Maps token names to Expects, which are sets of regexes conveying - what we expect to see next in the tokenising process. + EXPECT: The initial expectation of the tokenizer. Since we start tokenizing + at the root scope, we might expect to see either a variable or selector, for example. + STATE_MAP: Maps token names to Expects, defines the sets of valid tokens + that we'd expect to see next, given the current token. For example, if + we've just processed a variable declaration name, we next expect to see + the value of that variable. """ EXPECT = expect_root_scope