WIP
This commit is contained in:
parent
08988d2798
commit
3a0ee0d9c1
22
sly/lex.py
22
sly/lex.py
@ -96,9 +96,6 @@ class LexerMetaDict(dict):
|
|||||||
if isinstance(value, str):
|
if isinstance(value, str):
|
||||||
value = TokenStr(value)
|
value = TokenStr(value)
|
||||||
|
|
||||||
elif isinstance(value, tuple) and len(value) == 2:
|
|
||||||
value = TokenStr(*value)
|
|
||||||
|
|
||||||
if key in self and not isinstance(value, property):
|
if key in self and not isinstance(value, property):
|
||||||
prior = self[key]
|
prior = self[key]
|
||||||
if isinstance(prior, str):
|
if isinstance(prior, str):
|
||||||
@ -170,23 +167,24 @@ class Lexer(metaclass=LexerMeta):
|
|||||||
definitions = list(cls._attributes.items())
|
definitions = list(cls._attributes.items())
|
||||||
rules = []
|
rules = []
|
||||||
|
|
||||||
|
# Collect all of the previous rules from base classes
|
||||||
for base in cls.__bases__:
|
for base in cls.__bases__:
|
||||||
if isinstance(base, LexerMeta):
|
if isinstance(base, LexerMeta):
|
||||||
rules.extend(base._collect_rules())
|
rules.extend(base._collect_rules())
|
||||||
|
|
||||||
|
existing = dict(rules)
|
||||||
|
|
||||||
for key, value in definitions:
|
for key, value in definitions:
|
||||||
if (key in cls.tokens) or key.startswith('ignore_') or hasattr(value, 'pattern'):
|
if (key in cls.tokens) or key.startswith('ignore_') or hasattr(value, 'pattern'):
|
||||||
# Check existing rules
|
if key in existing:
|
||||||
for n, (rkey, _) in enumerate(rules):
|
n = rules.index((key, existing[key]))
|
||||||
if rkey == key:
|
rules[n] = (key, value)
|
||||||
rules[n] = (key, value)
|
existing[key] = value
|
||||||
break
|
elif isinstance(value, TokenStr) and value.before in existing:
|
||||||
elif isinstance(value, TokenStr) and value.before == rkey:
|
n = rules.index((key, existing[key]))
|
||||||
rules.insert(n, (key, value))
|
rules.insert(n, (key, value))
|
||||||
break
|
|
||||||
else:
|
else:
|
||||||
rules.append((key, value))
|
rules.append((key, value))
|
||||||
# rules.append((key, value))
|
|
||||||
elif isinstance(value, str) and not key.startswith('_') and key not in {'ignore'}:
|
elif isinstance(value, str) and not key.startswith('_') and key not in {'ignore'}:
|
||||||
raise LexerBuildError(f'{key} does not match a name in tokens')
|
raise LexerBuildError(f'{key} does not match a name in tokens')
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user