6 Commits

Author SHA1 Message Date
Anthony Sottile
032c3d78fc v0.0.3 2020-03-26 20:38:52 -07:00
Anthony Sottile
a197645087 merge the textmate demo into babi 2020-03-26 20:26:57 -07:00
Anthony Sottile
9f8e400d32 switch to babi-grammars for syntax 2020-03-26 19:43:01 -07:00
Anthony Sottile
2123e6ee84 improve performance by ~.8%
apparently contextlib.suppress is enough to show up in profiles
2020-03-23 20:57:53 -07:00
Anthony Sottile
b529dde91a Fix incorrect caching in syntax highlighter
the concrete broken case was for markdown with yaml

```md
---
x: y
---

(this one shouldn't be yaml highlighted)
---
x: y
---
```
2020-03-23 20:05:47 -07:00
Anthony Sottile
c4e2f8e9cf this is unused 2020-03-22 20:12:04 -07:00
15 changed files with 349 additions and 205 deletions

View File

@@ -63,9 +63,7 @@ in prompts (search, search replace, command):
the syntax highlighting setup is a bit manual right now
1. from a clone of babi, run `./bin/download-syntax` -- you will likely need
to install some additional packages to download them (`pip install cson`)
2. find a visual studio code theme, convert it to json (if it is not already
1. find a visual studio code theme, convert it to json (if it is not already
json) and put it at `~/.config/babi/theme.json`. a helper script is
provided to make this easier: `./bin/download-theme NAME URL`

View File

@@ -1,4 +1,3 @@
import contextlib
import curses
from typing import Dict
from typing import NamedTuple
@@ -34,8 +33,10 @@ class ColorManager(NamedTuple):
return self.raw_color_pair(fg_i, bg_i)
def raw_color_pair(self, fg: int, bg: int) -> int:
with contextlib.suppress(KeyError):
try:
return self.raw_pairs[(fg, bg)]
except KeyError:
pass
n = self.raw_pairs[(fg, bg)] = len(self.raw_pairs) + 1
curses.init_pair(n, fg, bg)

View File

@@ -40,8 +40,6 @@ if TYPE_CHECKING:
TCallable = TypeVar('TCallable', bound=Callable[..., Any])
HIGHLIGHT = curses.A_REVERSE | curses.A_DIM
def _restore_lines_eof_invariant(lines: MutableSequenceNoSlice) -> None:
"""The file lines will always contain a blank empty string at the end to

View File

@@ -1,14 +1,13 @@
import contextlib
import functools
import json
import os.path
from typing import Any
from typing import Dict
from typing import FrozenSet
from typing import List
from typing import Match
from typing import NamedTuple
from typing import Optional
from typing import Sequence
from typing import Tuple
from typing import TypeVar
@@ -618,8 +617,10 @@ class Compiler:
return PatternRule(rule.name, make_regset(*regs), rules)
def compile_rule(self, rule: _Rule) -> CompiledRule:
with contextlib.suppress(KeyError):
try:
return self._c_rules[rule]
except KeyError:
pass
grammar = self._rule_to_grammar[rule]
ret = self._c_rules[rule] = self._compile_rule(grammar, rule)
@@ -627,41 +628,58 @@ class Compiler:
class Grammars:
def __init__(self, grammars: Sequence[Dict[str, Any]]) -> None:
self._raw = {grammar['scopeName']: grammar for grammar in grammars}
self._find_scope = [
(
frozenset(grammar.get('fileTypes', ())),
make_reg(grammar.get('firstLineMatch', '$impossible^')),
grammar['scopeName'],
)
for grammar in grammars
]
self._parsed: Dict[str, Grammar] = {}
self._compilers: Dict[str, Compiler] = {}
def __init__(self, *directories: str) -> None:
self._scope_to_files = {
os.path.splitext(filename)[0]: os.path.join(directory, filename)
for directory in directories
if os.path.exists(directory)
for filename in os.listdir(directory)
if filename.endswith('.json')
}
@classmethod
def from_syntax_dir(cls, syntax_dir: str) -> 'Grammars':
grammars = [{'scopeName': 'source.unknown', 'patterns': []}]
if os.path.exists(syntax_dir):
for filename in os.listdir(syntax_dir):
with open(os.path.join(syntax_dir, filename)) as f:
grammars.append(json.load(f))
return cls(grammars)
unknown_grammar = {'scopeName': 'source.unknown', 'patterns': []}
self._raw = {'source.unknown': unknown_grammar}
self._file_types: List[Tuple[FrozenSet[str], str]] = []
self._first_line: List[Tuple[_Reg, str]] = []
self._parsed: Dict[str, Grammar] = {}
self._compiled: Dict[str, Compiler] = {}
def _raw_for_scope(self, scope: str) -> Dict[str, Any]:
try:
return self._raw[scope]
except KeyError:
pass
grammar_path = self._scope_to_files.pop(scope)
with open(grammar_path) as f:
ret = self._raw[scope] = json.load(f)
file_types = frozenset(ret.get('fileTypes', ()))
first_line = make_reg(ret.get('firstLineMatch', '$impossible^'))
self._file_types.append((file_types, scope))
self._first_line.append((first_line, scope))
return ret
def grammar_for_scope(self, scope: str) -> Grammar:
with contextlib.suppress(KeyError):
try:
return self._parsed[scope]
except KeyError:
pass
ret = self._parsed[scope] = Grammar.from_data(self._raw[scope])
raw = self._raw_for_scope(scope)
ret = self._parsed[scope] = Grammar.from_data(raw)
return ret
def compiler_for_scope(self, scope: str) -> Compiler:
with contextlib.suppress(KeyError):
return self._compilers[scope]
try:
return self._compiled[scope]
except KeyError:
pass
grammar = self.grammar_for_scope(scope)
ret = self._compilers[scope] = Compiler(grammar, self)
ret = self._compiled[scope] = Compiler(grammar, self)
return ret
def blank_compiler(self) -> Compiler:
@@ -669,20 +687,26 @@ class Grammars:
def compiler_for_file(self, filename: str, first_line: str) -> Compiler:
for tag in tags_from_filename(filename) - {'text'}:
with contextlib.suppress(KeyError):
try:
# TODO: this doesn't always match even if we detect it
return self.compiler_for_scope(f'source.{tag}')
except KeyError:
pass
# didn't find it in the fast path, need to read all the json
for k in tuple(self._scope_to_files):
self._raw_for_scope(k)
_, _, ext = os.path.basename(filename).rpartition('.')
for extensions, first_line_match, scope_name in self._find_scope:
if (
ext in extensions or
first_line_match.match(
first_line, 0, first_line=True, boundary=True,
)
):
return self.compiler_for_scope(scope_name)
else:
return self.compiler_for_scope('source.unknown')
for extensions, scope in self._file_types:
if ext in extensions:
return self.compiler_for_scope(scope)
for reg, scope in self._first_line:
if reg.match(first_line, 0, first_line=True, boundary=True):
return self.compiler_for_scope(scope)
return self.compiler_for_scope('source.unknown')
def highlight_line(

View File

@@ -1,7 +1,10 @@
import curses
from typing import Dict
import functools
import math
from typing import Callable
from typing import List
from typing import NamedTuple
from typing import Optional
from typing import Tuple
from babi.color_manager import ColorManager
@@ -14,6 +17,7 @@ from babi.hl.interface import HLs
from babi.list_spy import SequenceNoSlice
from babi.theme import Style
from babi.theme import Theme
from babi.user_data import prefix_data
from babi.user_data import xdg_config
from babi.user_data import xdg_data
@@ -36,8 +40,10 @@ class FileSyntax:
self.regions: List[HLs] = []
self._states: List[State] = []
self._hl_cache: Dict[str, Dict[State, Tuple[State, HLs]]]
self._hl_cache = {}
# this will be assigned a functools.lru_cache per instance for
# better hit rate and memory usage
self._hl: Optional[Callable[[State, str, bool], Tuple[State, HLs]]]
self._hl = None
def attr(self, style: Style) -> int:
pair = self._color_manager.color_pair(style.fg, style.bg)
@@ -48,19 +54,14 @@ class FileSyntax:
curses.A_UNDERLINE * style.u
)
def _hl(
def _hl_uncached(
self,
state: State,
line: str,
i: int,
first_line: bool,
) -> Tuple[State, HLs]:
try:
return self._hl_cache[line][state]
except KeyError:
pass
new_state, regions = highlight_line(
self._compiler, state, f'{line}\n', first_line=i == 0,
self._compiler, state, f'{line}\n', first_line=first_line,
)
# remove the trailing newline
@@ -83,18 +84,22 @@ class FileSyntax:
else:
regs.append(HL(x=r.start, end=r.end, attr=attr))
dct = self._hl_cache.setdefault(line, {})
ret = dct[state] = (new_state, tuple(regs))
return ret
return new_state, tuple(regs)
def highlight_until(self, lines: SequenceNoSlice, idx: int) -> None:
if self._hl is None:
# the docs claim better performance with power of two sizing
size = max(4096, 2 ** (int(math.log(len(lines), 2)) + 2))
self._hl = functools.lru_cache(maxsize=size)(self._hl_uncached)
if not self._states:
state = self._compiler.root_state
else:
state = self._states[-1]
for i in range(len(self._states), idx):
state, regions = self._hl(state, lines[i], i)
# https://github.com/python/mypy/issues/8579
state, regions = self._hl(state, lines[i], i == 0) # type: ignore
self._states.append(state)
self.regions.append(regions)
@@ -140,7 +145,7 @@ class Syntax(NamedTuple):
stdscr: 'curses._CursesWindow',
color_manager: ColorManager,
) -> 'Syntax':
grammars = Grammars.from_syntax_dir(xdg_data('textmate_syntax'))
grammars = Grammars(prefix_data('grammar_v1'), xdg_data('grammar_v1'))
theme = Theme.from_filename(xdg_config('theme.json'))
ret = cls(grammars, theme, color_manager)
ret._init_screen(stdscr)

69
babi/textmate_demo.py Normal file
View File

@@ -0,0 +1,69 @@
import argparse
from typing import Optional
from typing import Sequence
from babi.highlight import Compiler
from babi.highlight import Grammars
from babi.highlight import highlight_line
from babi.theme import Style
from babi.theme import Theme
from babi.user_data import prefix_data
from babi.user_data import xdg_config
def print_styled(s: str, style: Style) -> None:
color_s = ''
undo_s = ''
if style.fg is not None:
color_s += '\x1b[38;2;{r};{g};{b}m'.format(**style.fg._asdict())
undo_s += '\x1b[39m'
if style.bg is not None:
color_s += '\x1b[48;2;{r};{g};{b}m'.format(**style.bg._asdict())
undo_s += '\x1b[49m'
if style.b:
color_s += '\x1b[1m'
undo_s += '\x1b[22m'
if style.i:
color_s += '\x1b[3m'
undo_s += '\x1b[23m'
if style.u:
color_s += '\x1b[4m'
undo_s += '\x1b[24m'
print(f'{color_s}{s}{undo_s}', end='', flush=True)
def _highlight_output(theme: Theme, compiler: Compiler, filename: str) -> int:
state = compiler.root_state
if theme.default.bg is not None:
print('\x1b[48;2;{r};{g};{b}m'.format(**theme.default.bg._asdict()))
with open(filename) as f:
for line_idx, line in enumerate(f):
first_line = line_idx == 0
state, regions = highlight_line(compiler, state, line, first_line)
for start, end, scope in regions:
print_styled(line[start:end], theme.select(scope))
print('\x1b[m', end='')
return 0
def main(argv: Optional[Sequence[str]] = None) -> int:
parser = argparse.ArgumentParser()
parser.add_argument('--theme', default=xdg_config('theme.json'))
parser.add_argument('--grammar-dir', default=prefix_data('grammar_v1'))
parser.add_argument('filename')
args = parser.parse_args(argv)
with open(args.filename) as f:
first_line = next(f, '')
theme = Theme.from_filename(args.theme)
grammars = Grammars(args.grammar_dir)
compiler = grammars.compiler_for_file(args.filename, first_line)
return _highlight_output(theme, compiler, args.filename)
if __name__ == '__main__':
exit(main())

View File

@@ -1,4 +1,5 @@
import os.path
import sys
def _xdg(*path: str, env: str, default: str) -> str:
@@ -14,3 +15,7 @@ def xdg_data(*path: str) -> str:
def xdg_config(*path: str) -> str:
return _xdg(*path, env='XDG_CONFIG_HOME', default='~/.config')
def prefix_data(*path: str) -> str:
return os.path.join(sys.prefix, 'share/babi', *path)

View File

@@ -1,86 +0,0 @@
#!/usr/bin/env python3
import argparse
import enum
import json
import os.path
import plistlib
import urllib.request
from typing import NamedTuple
import cson # pip install cson
DEFAULT_DIR = os.path.join(
os.environ.get('XDG_DATA_HOME') or
os.path.expanduser('~/.local/share'),
'babi/textmate_syntax',
)
Ext = enum.Enum('Ext', 'CSON PLIST JSON')
def _convert_cson(src: bytes) -> str:
return json.dumps(cson.loads(src))
def _convert_json(src: bytes) -> str:
return json.dumps(json.loads(src))
def _convert_plist(src: bytes) -> str:
return json.dumps(plistlib.loads(src))
EXT_CONVERT = {
Ext.CSON: _convert_cson,
Ext.JSON: _convert_json,
Ext.PLIST: _convert_plist,
}
class Syntax(NamedTuple):
name: str
ext: Ext
url: str
SYNTAXES = (
Syntax('c', Ext.JSON, 'https://raw.githubusercontent.com/jeff-hykin/cpp-textmate-grammar/53e39b1c/syntaxes/c.tmLanguage.json'), # noqa: E501
Syntax('css', Ext.CSON, 'https://raw.githubusercontent.com/atom/language-css/9feb69c081308b63f78bb0d6a2af2ff5eb7d869b/grammars/css.cson'), # noqa: E501
Syntax('docker', Ext.PLIST, 'https://raw.githubusercontent.com/moby/moby/c7ad2b866/contrib/syntax/textmate/Docker.tmbundle/Syntaxes/Dockerfile.tmLanguage'), # noqa: E501
Syntax('diff', Ext.PLIST, 'https://raw.githubusercontent.com/textmate/diff.tmbundle/0593bb77/Syntaxes/Diff.plist'), # noqa: E501
Syntax('html', Ext.PLIST, 'https://raw.githubusercontent.com/textmate/html.tmbundle/0c3d5ee5/Syntaxes/HTML.plist'), # noqa: E501
Syntax('html-derivative', Ext.PLIST, 'https://raw.githubusercontent.com/textmate/html.tmbundle/0c3d5ee54de3a993f747f54186b73a4d2d3c44a2/Syntaxes/HTML%20(Derivative).tmLanguage'), # noqa: E501
Syntax('ini', Ext.PLIST, 'https://raw.githubusercontent.com/textmate/ini.tmbundle/7d8c7b55/Syntaxes/Ini.plist'), # noqa: E501
Syntax('json', Ext.PLIST, 'https://raw.githubusercontent.com/microsoft/vscode-JSON.tmLanguage/d113e90937ed3ecc31ac54750aac2e8efa08d784/JSON.tmLanguage'), # noqa: E501
Syntax('make', Ext.PLIST, 'https://raw.githubusercontent.com/fadeevab/make.tmbundle/fd57c0552/Syntaxes/Makefile.plist'), # noqa: E501
Syntax('markdown', Ext.PLIST, 'https://raw.githubusercontent.com/microsoft/vscode-markdown-tm-grammar/59a5962/syntaxes/markdown.tmLanguage'), # noqa: E501
Syntax('powershell', Ext.PLIST, 'https://raw.githubusercontent.com/PowerShell/EditorSyntax/4a0a0766/PowerShellSyntax.tmLanguage'), # noqa: E501
Syntax('puppet', Ext.PLIST, 'https://raw.githubusercontent.com/lingua-pupuli/puppet-editor-syntax/dc414b8a/syntaxes/puppet.tmLanguage'), # noqa: E501
Syntax('python', Ext.PLIST, 'https://raw.githubusercontent.com/MagicStack/MagicPython/c9b3409d/grammars/MagicPython.tmLanguage'), # noqa: E501
# TODO: https://github.com/zargony/atom-language-rust/pull/149
Syntax('rust', Ext.CSON, 'https://raw.githubusercontent.com/asottile/atom-language-rust/e113ca67/grammars/rust.cson'), # noqa: E501
Syntax('shell', Ext.CSON, 'https://raw.githubusercontent.com/atom/language-shellscript/7008ea926867d8a231003e78094091471c4fccf8/grammars/shell-unix-bash.cson'), # noqa: E501
# TODO: https://github.com/atom/language-xml/pull/99
Syntax('xml', Ext.CSON, 'https://raw.githubusercontent.com/asottile/language-xml/2d76bc1f/grammars/xml.cson'), # noqa: E501
Syntax('yaml', Ext.PLIST, 'https://raw.githubusercontent.com/textmate/yaml.tmbundle/e54ceae3/Syntaxes/YAML.tmLanguage'), # noqa: E501
)
def main() -> int:
parser = argparse.ArgumentParser()
parser.add_argument('--dest', default=DEFAULT_DIR)
args = parser.parse_args()
os.makedirs(args.dest, exist_ok=True)
for syntax in SYNTAXES:
print(f'downloading {syntax.name}...')
resp = urllib.request.urlopen(syntax.url).read()
converted = EXT_CONVERT[syntax.ext](resp)
with open(os.path.join(args.dest, f'{syntax.name}.json'), 'w') as f:
f.write(converted)
return 0
if __name__ == '__main__':
exit(main())

View File

@@ -1,6 +1,6 @@
[metadata]
name = babi
version = 0.0.2
version = 0.0.3
description = a text editor
long_description = file: README.md
long_description_content_type = text/markdown
@@ -22,6 +22,7 @@ classifiers =
[options]
packages = find:
install_requires =
babi-grammars
identify
onigurumacffi>=0.0.10
importlib_metadata>=1;python_version<"3.8"
@@ -31,6 +32,7 @@ python_requires = >=3.6.1
[options.entry_points]
console_scripts =
babi = babi.main:main
babi-textmate-demo = babi.textmate_demo:main
[options.packages.find]
exclude =

17
tests/conftest.py Normal file
View File

@@ -0,0 +1,17 @@
import json
import pytest
from babi.highlight import Grammars
@pytest.fixture
def make_grammars(tmpdir):
grammar_dir = tmpdir.join('grammars').ensure_dir()
def make_grammars(*grammar_dcts):
for grammar in grammar_dcts:
filename = f'{grammar["scopeName"]}.json'
grammar_dir.join(filename).write(json.dumps(grammar))
return Grammars(grammar_dir)
return make_grammars

View File

@@ -16,6 +16,13 @@ from babi.screen import VERSION_STR
from testing.runner import PrintsErrorRunner
@pytest.fixture(autouse=True)
def prefix_home(tmpdir):
prefix_home = tmpdir.join('prefix_home')
with mock.patch.object(sys, 'prefix', str(prefix_home)):
yield prefix_home
@pytest.fixture(autouse=True)
def xdg_data_home(tmpdir):
data_home = tmpdir.join('data_home')

View File

@@ -43,7 +43,7 @@ still more
@pytest.fixture(autouse=True)
def theme_and_grammar(xdg_data_home, xdg_config_home):
xdg_config_home.join('babi/theme.json').ensure().write(THEME)
xdg_data_home.join('babi/textmate_syntax/demo.json').ensure().write(SYNTAX)
xdg_data_home.join('babi/grammar_v1/demo.json').ensure().write(SYNTAX)
@pytest.fixture

View File

@@ -1,34 +1,37 @@
from babi.highlight import Grammars
import pytest
from babi.highlight import highlight_line
from babi.highlight import Region
def test_grammar_matches_extension_only_name():
def test_grammar_matches_extension_only_name(make_grammars):
data = {'scopeName': 'shell', 'patterns': [], 'fileTypes': ['bashrc']}
grammars = Grammars([data])
grammars = make_grammars(data)
compiler = grammars.compiler_for_file('.bashrc', 'alias nano=babi')
assert compiler.root_state.entries[0].scope[0] == 'shell'
def test_grammar_matches_via_identify_tag():
data = {'scopeName': 'source.ini', 'patterns': []}
grammars = Grammars([data])
def test_grammar_matches_via_identify_tag(make_grammars):
grammars = make_grammars({'scopeName': 'source.ini', 'patterns': []})
compiler = grammars.compiler_for_file('setup.cfg', '')
assert compiler.root_state.entries[0].scope[0] == 'source.ini'
def _compiler_state(*grammar_dcts):
grammars = Grammars(grammar_dcts)
compiler = grammars.compiler_for_scope(grammar_dcts[0]['scopeName'])
return compiler, compiler.root_state
@pytest.fixture
def compiler_state(make_grammars):
def _compiler_state(*grammar_dcts):
grammars = make_grammars(*grammar_dcts)
compiler = grammars.compiler_for_scope(grammar_dcts[0]['scopeName'])
return compiler, compiler.root_state
return _compiler_state
def test_backslash_a():
def test_backslash_a(compiler_state):
grammar = {
'scopeName': 'test',
'patterns': [{'name': 'aaa', 'match': r'\Aa+'}],
}
compiler, state = _compiler_state(grammar)
compiler, state = compiler_state(grammar)
state, (region_0,) = highlight_line(compiler, state, 'aaa', True)
state, (region_1,) = highlight_line(compiler, state, 'aaa', False)
@@ -51,8 +54,8 @@ BEGIN_END_NO_NL = {
}
def test_backslash_g_inline():
compiler, state = _compiler_state(BEGIN_END_NO_NL)
def test_backslash_g_inline(compiler_state):
compiler, state = compiler_state(BEGIN_END_NO_NL)
_, regions = highlight_line(compiler, state, 'xaax', True)
assert regions == (
@@ -63,8 +66,8 @@ def test_backslash_g_inline():
)
def test_backslash_g_next_line():
compiler, state = _compiler_state(BEGIN_END_NO_NL)
def test_backslash_g_next_line(compiler_state):
compiler, state = compiler_state(BEGIN_END_NO_NL)
state, regions1 = highlight_line(compiler, state, 'x\n', True)
state, regions2 = highlight_line(compiler, state, 'aax\n', False)
@@ -81,8 +84,8 @@ def test_backslash_g_next_line():
)
def test_end_before_other_match():
compiler, state = _compiler_state(BEGIN_END_NO_NL)
def test_end_before_other_match(compiler_state):
compiler, state = compiler_state(BEGIN_END_NO_NL)
state, regions = highlight_line(compiler, state, 'xazzx', True)
@@ -107,8 +110,8 @@ BEGIN_END_NL = {
}
def test_backslash_g_captures_nl():
compiler, state = _compiler_state(BEGIN_END_NL)
def test_backslash_g_captures_nl(compiler_state):
compiler, state = compiler_state(BEGIN_END_NL)
state, regions1 = highlight_line(compiler, state, 'x\n', True)
state, regions2 = highlight_line(compiler, state, 'aax\n', False)
@@ -124,8 +127,8 @@ def test_backslash_g_captures_nl():
)
def test_backslash_g_captures_nl_next_line():
compiler, state = _compiler_state(BEGIN_END_NL)
def test_backslash_g_captures_nl_next_line(compiler_state):
compiler, state = compiler_state(BEGIN_END_NL)
state, regions1 = highlight_line(compiler, state, 'x\n', True)
state, regions2 = highlight_line(compiler, state, 'aa\n', False)
@@ -147,8 +150,8 @@ def test_backslash_g_captures_nl_next_line():
)
def test_while_no_nl():
compiler, state = _compiler_state({
def test_while_no_nl(compiler_state):
compiler, state = compiler_state({
'scopeName': 'test',
'patterns': [{
'begin': '> ',
@@ -182,8 +185,8 @@ def test_while_no_nl():
)
def test_complex_captures():
compiler, state = _compiler_state({
def test_complex_captures(compiler_state):
compiler, state = compiler_state({
'scopeName': 'test',
'patterns': [
{
@@ -213,8 +216,8 @@ def test_complex_captures():
)
def test_captures_multiple_applied_to_same_capture():
compiler, state = _compiler_state({
def test_captures_multiple_applied_to_same_capture(compiler_state):
compiler, state = compiler_state({
'scopeName': 'test',
'patterns': [
{
@@ -256,8 +259,8 @@ def test_captures_multiple_applied_to_same_capture():
)
def test_captures_ignores_empty():
compiler, state = _compiler_state({
def test_captures_ignores_empty(compiler_state):
compiler, state = compiler_state({
'scopeName': 'test',
'patterns': [{
'match': '(.*) hi',
@@ -279,8 +282,8 @@ def test_captures_ignores_empty():
)
def test_captures_ignores_invalid_out_of_bounds():
compiler, state = _compiler_state({
def test_captures_ignores_invalid_out_of_bounds(compiler_state):
compiler, state = compiler_state({
'scopeName': 'test',
'patterns': [{'match': '.', 'captures': {'1': {'name': 'oob'}}}],
})
@@ -292,8 +295,8 @@ def test_captures_ignores_invalid_out_of_bounds():
)
def test_captures_begin_end():
compiler, state = _compiler_state({
def test_captures_begin_end(compiler_state):
compiler, state = compiler_state({
'scopeName': 'test',
'patterns': [
{
@@ -314,8 +317,8 @@ def test_captures_begin_end():
)
def test_captures_while_captures():
compiler, state = _compiler_state({
def test_captures_while_captures(compiler_state):
compiler, state = compiler_state({
'scopeName': 'test',
'patterns': [
{
@@ -343,8 +346,8 @@ def test_captures_while_captures():
)
def test_captures_implies_begin_end_captures():
compiler, state = _compiler_state({
def test_captures_implies_begin_end_captures(compiler_state):
compiler, state = compiler_state({
'scopeName': 'test',
'patterns': [
{
@@ -364,8 +367,8 @@ def test_captures_implies_begin_end_captures():
)
def test_captures_implies_begin_while_captures():
compiler, state = _compiler_state({
def test_captures_implies_begin_while_captures(compiler_state):
compiler, state = compiler_state({
'scopeName': 'test',
'patterns': [
{
@@ -392,8 +395,8 @@ def test_captures_implies_begin_while_captures():
)
def test_include_self():
compiler, state = _compiler_state({
def test_include_self(compiler_state):
compiler, state = compiler_state({
'scopeName': 'test',
'patterns': [
{
@@ -416,8 +419,8 @@ def test_include_self():
)
def test_include_repository_rule():
compiler, state = _compiler_state({
def test_include_repository_rule(compiler_state):
compiler, state = compiler_state({
'scopeName': 'test',
'patterns': [{'include': '#impl'}],
'repository': {
@@ -438,8 +441,8 @@ def test_include_repository_rule():
)
def test_include_other_grammar():
compiler, state = _compiler_state(
def test_include_other_grammar(compiler_state):
compiler, state = compiler_state(
{
'scopeName': 'test',
'patterns': [
@@ -494,8 +497,8 @@ def test_include_other_grammar():
)
def test_include_base():
compiler, state = _compiler_state(
def test_include_base(compiler_state):
compiler, state = compiler_state(
{
'scopeName': 'test',
'patterns': [
@@ -542,8 +545,8 @@ def test_include_base():
)
def test_rule_with_begin_and_no_end():
compiler, state = _compiler_state({
def test_rule_with_begin_and_no_end(compiler_state):
compiler, state = compiler_state({
'scopeName': 'test',
'patterns': [
{
@@ -566,8 +569,8 @@ def test_rule_with_begin_and_no_end():
)
def test_begin_end_substitute_special_chars():
compiler, state = _compiler_state({
def test_begin_end_substitute_special_chars(compiler_state):
compiler, state = compiler_state({
'scopeName': 'test',
'patterns': [{'begin': r'(\*)', 'end': r'\1', 'name': 'italic'}],
})

View File

@@ -5,7 +5,7 @@ from unittest import mock
import pytest
from babi.color_manager import ColorManager
from babi.highlight import Grammars
from babi.hl.interface import HL
from babi.hl.syntax import Syntax
from babi.theme import Color
from babi.theme import Theme
@@ -71,8 +71,8 @@ THEME = Theme.from_dct({
@pytest.fixture
def syntax(tmpdir):
return Syntax(Grammars.from_syntax_dir(tmpdir), THEME, ColorManager.make())
def syntax(make_grammars):
return Syntax(make_grammars(), THEME, ColorManager.make())
def test_init_screen_low_color(stdscr, syntax):
@@ -149,3 +149,20 @@ def test_style_attributes_applied(stdscr, syntax):
style = THEME.select(('keyword.python',))
attr = syntax.blank_file_highlighter().attr(style)
assert attr == 2 << 8 | curses.A_BOLD
def test_syntax_highlight_cache_first_line(stdscr, make_grammars):
with FakeCurses.patch(n_colors=256, can_change_color=False):
grammars = make_grammars({
'scopeName': 'source.demo',
'fileTypes': ['demo'],
'patterns': [{'match': r'\Aint', 'name': 'keyword'}],
})
syntax = Syntax(grammars, THEME, ColorManager.make())
syntax._init_screen(stdscr)
file_hl = syntax.file_highlighter('foo.demo', '')
file_hl.highlight_until(['int', 'int'], 2)
assert file_hl.regions == [
(HL(0, 3, curses.A_BOLD | 2 << 8),),
(),
]

View File

@@ -0,0 +1,84 @@
import json
import pytest
from babi.textmate_demo import main
THEME = {
'colors': {'foreground': '#ffffff', 'background': '#000000'},
'tokenColors': [
{'scope': 'bold', 'settings': {'fontStyle': 'bold'}},
{'scope': 'italic', 'settings': {'fontStyle': 'italic'}},
{'scope': 'underline', 'settings': {'fontStyle': 'underline'}},
{'scope': 'comment', 'settings': {'foreground': '#1e77d3'}},
],
}
GRAMMAR = {
'scopeName': 'source.demo',
'fileTypes': ['demo'],
'patterns': [
{'match': r'\*[^*]*\*', 'name': 'bold'},
{'match': '/[^/]*/', 'name': 'italic'},
{'match': '_[^_]*_', 'name': 'underline'},
{'match': '#.*', 'name': 'comment'},
],
}
@pytest.fixture
def theme_grammars(tmpdir):
theme = tmpdir.join('config/theme.json').ensure()
theme.write(json.dumps(THEME))
grammars = tmpdir.join('grammar_v1').ensure_dir()
grammars.join('source.demo.json').write(json.dumps(GRAMMAR))
return theme, grammars
def test_basic(theme_grammars, tmpdir, capsys):
theme, grammars = theme_grammars
f = tmpdir.join('f.demo')
f.write('*bold*/italic/_underline_# comment\n')
assert not main((
'--theme', str(theme), '--grammar-dir', str(grammars),
str(f),
))
out, _ = capsys.readouterr()
assert out == (
'\x1b[48;2;0;0;0m\n'
'\x1b[38;2;255;255;255m\x1b[48;2;0;0;0m\x1b[1m'
'*bold*'
'\x1b[39m\x1b[49m\x1b[22m'
'\x1b[38;2;255;255;255m\x1b[48;2;0;0;0m\x1b[3m'
'/italic/'
'\x1b[39m\x1b[49m\x1b[23m'
'\x1b[38;2;255;255;255m\x1b[48;2;0;0;0m\x1b[4m'
'_underline_'
'\x1b[39m\x1b[49m\x1b[24m'
'\x1b[38;2;30;119;211m\x1b[48;2;0;0;0m'
'# comment'
'\x1b[39m\x1b[49m\x1b'
'[38;2;255;255;255m\x1b[48;2;0;0;0m\n\x1b[39m\x1b[49m'
'\x1b[m'
)
def test_basic_with_blank_theme(theme_grammars, tmpdir, capsys):
theme, grammars = theme_grammars
theme.write('{}')
f = tmpdir.join('f.demo')
f.write('*bold*/italic/_underline_# comment\n')
assert not main((
'--theme', str(theme), '--grammar-dir', str(grammars),
str(f),
))
out, _ = capsys.readouterr()
assert out == '*bold*/italic/_underline_# comment\n\x1b[m'