8 Commits

Author SHA1 Message Date
Anthony Sottile
7d1e61f734 v0.0.7 2020-04-04 17:44:40 -07:00
Anthony Sottile
3e7ca8e922 make grammar loading more deterministic 2020-04-04 17:44:27 -07:00
Anthony Sottile
843f1b6ff1 remove stray print 2020-04-04 13:13:53 -07:00
Anthony Sottile
f704505ee2 v0.0.6 2020-04-04 13:04:34 -07:00
Anthony Sottile
b595333fc6 Fix grammars where rules have local repositorys
for example: ruby
2020-04-04 13:03:33 -07:00
Anthony Sottile
486af96c12 Merge pull request #53 from brynphillips/PS-key-fix
Ps key fix
2020-04-03 10:36:12 -07:00
Bryn Phillips
8b71d289a3 Fixed PgDn 2020-04-03 10:28:40 -07:00
Bryn Phillips
759cadd868 Fixes for Win PS keys 2020-04-03 10:26:17 -07:00
7 changed files with 127 additions and 26 deletions

View File

@@ -3,8 +3,10 @@ from typing import Iterable
from typing import Mapping
from typing import TypeVar
TKey = TypeVar('TKey')
TValue = TypeVar('TValue')
from babi._types import Protocol
TKey = TypeVar('TKey', contravariant=True)
TValue = TypeVar('TValue', covariant=True)
class FDict(Generic[TKey, TValue]):
@@ -22,3 +24,21 @@ class FDict(Generic[TKey, TValue]):
def values(self) -> Iterable[TValue]:
return self._dct.values()
class Indexable(Generic[TKey, TValue], Protocol):
def __getitem__(self, key: TKey) -> TValue: ...
class FChainMap(Generic[TKey, TValue]):
def __init__(self, *mappings: Indexable[TKey, TValue]) -> None:
self._mappings = mappings
def __getitem__(self, key: TKey) -> TValue:
for mapping in reversed(self._mappings):
try:
return mapping[key]
except KeyError:
pass
else:
raise KeyError(key)

View File

@@ -14,7 +14,7 @@ from typing import TypeVar
from identify.identify import tags_from_filename
from babi._types import Protocol
from babi.fdict import FDict
from babi.fdict import FChainMap
from babi.reg import _Reg
from babi.reg import _RegSet
from babi.reg import ERR_REG
@@ -67,6 +67,8 @@ class _Rule(Protocol):
def include(self) -> Optional[str]: ...
@property
def patterns(self) -> 'Tuple[_Rule, ...]': ...
@property
def repository(self) -> 'FChainMap[str, _Rule]': ...
@uniquely_constructed
@@ -83,9 +85,24 @@ class Rule(NamedTuple):
while_captures: Captures
include: Optional[str]
patterns: Tuple[_Rule, ...]
repository: FChainMap[str, _Rule]
@classmethod
def from_dct(cls, dct: Dict[str, Any]) -> _Rule:
def make(
cls,
dct: Dict[str, Any],
parent_repository: FChainMap[str, _Rule],
) -> _Rule:
if 'repository' in dct:
# this looks odd, but it's so we can have a self-referential
# immutable-after-construction chain map
repository_dct: Dict[str, _Rule] = {}
repository = FChainMap(parent_repository, repository_dct)
for k, sub_dct in dct['repository'].items():
repository_dct[k] = Rule.make(sub_dct, repository)
else:
repository = parent_repository
name = _split_name(dct.get('name'))
match = dct.get('match')
begin = dct.get('begin')
@@ -95,7 +112,7 @@ class Rule(NamedTuple):
if 'captures' in dct:
captures = tuple(
(int(k), Rule.from_dct(v))
(int(k), Rule.make(v, repository))
for k, v in dct['captures'].items()
)
else:
@@ -103,7 +120,7 @@ class Rule(NamedTuple):
if 'beginCaptures' in dct:
begin_captures = tuple(
(int(k), Rule.from_dct(v))
(int(k), Rule.make(v, repository))
for k, v in dct['beginCaptures'].items()
)
else:
@@ -111,7 +128,7 @@ class Rule(NamedTuple):
if 'endCaptures' in dct:
end_captures = tuple(
(int(k), Rule.from_dct(v))
(int(k), Rule.make(v, repository))
for k, v in dct['endCaptures'].items()
)
else:
@@ -119,7 +136,7 @@ class Rule(NamedTuple):
if 'whileCaptures' in dct:
while_captures = tuple(
(int(k), Rule.from_dct(v))
(int(k), Rule.make(v, repository))
for k, v in dct['whileCaptures'].items()
)
else:
@@ -141,7 +158,7 @@ class Rule(NamedTuple):
include = dct.get('include')
if 'patterns' in dct:
patterns = tuple(Rule.from_dct(d) for d in dct['patterns'])
patterns = tuple(Rule.make(d, repository) for d in dct['patterns'])
else:
patterns = ()
@@ -158,29 +175,33 @@ class Rule(NamedTuple):
while_captures=while_captures,
include=include,
patterns=patterns,
repository=repository,
)
@uniquely_constructed
class Grammar(NamedTuple):
scope_name: str
repository: FChainMap[str, _Rule]
patterns: Tuple[_Rule, ...]
repository: FDict[str, _Rule]
@classmethod
def from_data(cls, data: Dict[str, Any]) -> 'Grammar':
def make(cls, data: Dict[str, Any]) -> 'Grammar':
scope_name = data['scopeName']
patterns = tuple(Rule.from_dct(dct) for dct in data['patterns'])
if 'repository' in data:
repository = FDict({
k: Rule.from_dct(dct) for k, dct in data['repository'].items()
})
# this looks odd, but it's so we can have a self-referential
# immutable-after-construction chain map
repository_dct: Dict[str, _Rule] = {}
repository = FChainMap(repository_dct)
for k, dct in data['repository'].items():
repository_dct[k] = Rule.make(dct, repository)
else:
repository = FDict({})
repository = FChainMap()
patterns = tuple(Rule.make(d, repository) for d in data['patterns'])
return cls(
scope_name=scope_name,
patterns=patterns,
repository=repository,
patterns=patterns,
)
@@ -530,22 +551,23 @@ class Compiler:
def _include(
self,
grammar: Grammar,
repository: FChainMap[str, _Rule],
s: str,
) -> Tuple[List[str], Tuple[_Rule, ...]]:
if s == '$self':
return self._patterns(grammar, grammar.patterns)
elif s == '$base':
grammar = self._grammars.grammar_for_scope(self._root_scope)
return self._include(grammar, '$self')
return self._include(grammar, grammar.repository, '$self')
elif s.startswith('#'):
return self._patterns(grammar, (grammar.repository[s[1:]],))
return self._patterns(grammar, (repository[s[1:]],))
elif '#' not in s:
grammar = self._grammars.grammar_for_scope(s)
return self._include(grammar, '$self')
return self._include(grammar, grammar.repository, '$self')
else:
scope, _, s = s.partition('#')
grammar = self._grammars.grammar_for_scope(scope)
return self._include(grammar, f'#{s}')
return self._include(grammar, grammar.repository, f'#{s}')
@functools.lru_cache(maxsize=None)
def _patterns(
@@ -557,7 +579,9 @@ class Compiler:
ret_rules: List[_Rule] = []
for rule in rules:
if rule.include is not None:
tmp_regs, tmp_rules = self._include(grammar, rule.include)
tmp_regs, tmp_rules = self._include(
grammar, rule.repository, rule.include,
)
ret_regs.extend(tmp_regs)
ret_rules.extend(tmp_rules)
elif rule.match is None and rule.begin is None and rule.patterns:
@@ -633,7 +657,7 @@ class Grammars:
os.path.splitext(filename)[0]: os.path.join(directory, filename)
for directory in directories
if os.path.exists(directory)
for filename in os.listdir(directory)
for filename in sorted(os.listdir(directory))
if filename.endswith('.json')
}
@@ -669,7 +693,7 @@ class Grammars:
pass
raw = self._raw_for_scope(scope)
ret = self._parsed[scope] = Grammar.from_data(raw)
ret = self._parsed[scope] = Grammar.make(raw)
return ret
def compiler_for_scope(self, scope: str) -> Compiler:

View File

@@ -69,6 +69,11 @@ KEYNAME_REWRITE = {
b'KEY_C2': b'KEY_DOWN',
b'KEY_B3': b'KEY_RIGHT',
b'KEY_B1': b'KEY_LEFT',
b'PADSTOP': b'KEY_DC',
b'KEY_A3': b'KEY_PPAGE',
b'KEY_C3': b'KEY_NPAGE',
b'KEY_A1': b'KEY_HOME',
b'KEY_C1': b'KEY_END',
# windows-curses: map to our M- names
b'ALT_U': b'M-u',
# windows-curses: arguably these names are better than the xterm names

View File

@@ -39,7 +39,6 @@ def json_with_comments(s: bytes) -> Any:
idx = match.end()
match = TOKEN.search(s, idx)
print(bio.getvalue())
bio.seek(0)
return json.load(bio)

View File

@@ -1,6 +1,6 @@
[metadata]
name = babi
version = 0.0.5
version = 0.0.7
description = a text editor
long_description = file: README.md
long_description_content_type = text/markdown

View File

@@ -1,3 +1,6 @@
import pytest
from babi.fdict import FChainMap
from babi.fdict import FDict
@@ -5,3 +8,21 @@ def test_fdict_repr():
# mostly because this shouldn't get hit elsewhere but is uesful for
# debugging purposes
assert repr(FDict({1: 2, 3: 4})) == 'FDict({1: 2, 3: 4})'
def test_f_chain_map():
chain_map = FChainMap({1: 2}, {3: 4}, FDict({1: 5}))
assert chain_map[1] == 5
assert chain_map[3] == 4
with pytest.raises(KeyError) as excinfo:
chain_map[2]
k, = excinfo.value.args
assert k == 2
def test_f_chain_map_extend():
chain_map = FChainMap({1: 2})
assert chain_map[1] == 2
chain_map = FChainMap(chain_map, {1: 5})
assert chain_map[1] == 5

View File

@@ -441,6 +441,38 @@ def test_include_repository_rule(compiler_state):
)
def test_include_with_nested_repositories(compiler_state):
compiler, state = compiler_state({
'scopeName': 'test',
'patterns': [{
'begin': '<', 'end': '>', 'name': 'b',
'patterns': [
{'include': '#rule1'},
{'include': '#rule2'},
{'include': '#rule3'},
],
'repository': {
'rule2': {'match': '2', 'name': 'inner2'},
'rule3': {'match': '3', 'name': 'inner3'},
},
}],
'repository': {
'rule1': {'match': '1', 'name': 'root1'},
'rule2': {'match': '2', 'name': 'root2'},
},
})
state, regions = highlight_line(compiler, state, '<123>', first_line=True)
assert regions == (
Region(0, 1, ('test', 'b')),
Region(1, 2, ('test', 'b', 'root1')),
Region(2, 3, ('test', 'b', 'inner2')),
Region(3, 4, ('test', 'b', 'inner3')),
Region(4, 5, ('test', 'b')),
)
def test_include_other_grammar(compiler_state):
compiler, state = compiler_state(
{