14 Commits

Author SHA1 Message Date
Anthony Sottile
7850481565 v0.0.4 2020-03-28 08:01:02 -07:00
Anthony Sottile
b536291989 Fix replacing with embedded newline characters
Resolves #39
2020-03-27 20:32:43 -07:00
Anthony Sottile
f8737557d3 Add a sample theme to the README 2020-03-27 19:29:52 -07:00
Anthony Sottile
d597b4087d add dist and build to gitignore 2020-03-27 19:10:11 -07:00
Anthony Sottile
41aa025d3d Fix edge highlighting for 1-lenght highlights 2020-03-27 19:06:50 -07:00
Anthony Sottile
de956b7bab fix saving files with windows newlines 2020-03-27 18:42:37 -07:00
Anthony Sottile
1d3d413b93 Fix grammars which include \z 2020-03-27 18:18:16 -07:00
Anthony Sottile
50ad1e06f9 Add demo for showing vs code's tokenization 2020-03-27 17:59:35 -07:00
Anthony Sottile
032c3d78fc v0.0.3 2020-03-26 20:38:52 -07:00
Anthony Sottile
a197645087 merge the textmate demo into babi 2020-03-26 20:26:57 -07:00
Anthony Sottile
9f8e400d32 switch to babi-grammars for syntax 2020-03-26 19:43:01 -07:00
Anthony Sottile
2123e6ee84 improve performance by ~.8%
apparently contextlib.suppress is enough to show up in profiles
2020-03-23 20:57:53 -07:00
Anthony Sottile
b529dde91a Fix incorrect caching in syntax highlighter
the concrete broken case was for markdown with yaml

```md
---
x: y
---

(this one shouldn't be yaml highlighted)
---
x: y
---
```
2020-03-23 20:05:47 -07:00
Anthony Sottile
c4e2f8e9cf this is unused 2020-03-22 20:12:04 -07:00
23 changed files with 521 additions and 211 deletions

2
.gitignore vendored
View File

@@ -5,4 +5,6 @@
/.mypy_cache
/.pytest_cache
/.tox
/build
/dist
/venv*

View File

@@ -63,12 +63,16 @@ in prompts (search, search replace, command):
the syntax highlighting setup is a bit manual right now
1. from a clone of babi, run `./bin/download-syntax` -- you will likely need
to install some additional packages to download them (`pip install cson`)
2. find a visual studio code theme, convert it to json (if it is not already
1. find a visual studio code theme, convert it to json (if it is not already
json) and put it at `~/.config/babi/theme.json`. a helper script is
provided to make this easier: `./bin/download-theme NAME URL`
here's a modified vs dark plus theme that works:
```bash
./bin/download-theme vs-dark-asottile https://gist.github.com/asottile/b465856c82b1aaa4ba8c7c6314a72e13/raw/22d602fb355fb12b04f176a733941ba5713bc36c/vs_dark_asottile.json
```
## demos
most things work! here's a few screenshots

View File

@@ -1,4 +1,3 @@
import contextlib
import curses
from typing import Dict
from typing import NamedTuple
@@ -34,8 +33,10 @@ class ColorManager(NamedTuple):
return self.raw_color_pair(fg_i, bg_i)
def raw_color_pair(self, fg: int, bg: int) -> int:
with contextlib.suppress(KeyError):
try:
return self.raw_pairs[(fg, bg)]
except KeyError:
pass
n = self.raw_pairs[(fg, bg)] = len(self.raw_pairs) + 1
curses.init_pair(n, fg, bg)

View File

@@ -40,8 +40,6 @@ if TYPE_CHECKING:
TCallable = TypeVar('TCallable', bound=Callable[..., Any])
HIGHLIGHT = curses.A_REVERSE | curses.A_DIM
def _restore_lines_eof_invariant(lines: MutableSequenceNoSlice) -> None:
"""The file lines will always contain a blank empty string at the end to
@@ -466,9 +464,28 @@ class File:
with self.edit_action_context('replace', final=True):
replaced = match.expand(replace)
line = screen.file.lines[line_y]
line = line[:match.start()] + replaced + line[match.end():]
screen.file.lines[line_y] = line
search.offset = len(replaced)
if '\n' in replaced:
replaced_lines = replaced.split('\n')
self.lines[line_y] = (
f'{line[:match.start()]}{replaced_lines[0]}'
)
for i, ins_line in enumerate(replaced_lines[1:-1], 1):
self.lines.insert(line_y + i, ins_line)
last_insert = line_y + len(replaced_lines) - 1
self.lines.insert(
last_insert,
f'{replaced_lines[-1]}{line[match.end():]}',
)
self.y = last_insert
self.x = self.x_hint = 0
search.offset = len(replaced_lines[-1])
else:
self.lines[line_y] = (
f'{line[:match.start()]}'
f'{replaced}'
f'{line[match.end():]}'
)
search.offset = len(replaced)
elif res == 'n':
search.offset = 1
else:
@@ -846,7 +863,7 @@ class File:
for region in file_hl.regions[l_y]:
if region.x >= l_x_max:
break
elif region.end < l_x:
elif region.end <= l_x:
continue
if l_x and region.x <= l_x:

View File

@@ -1,14 +1,13 @@
import contextlib
import functools
import json
import os.path
from typing import Any
from typing import Dict
from typing import FrozenSet
from typing import List
from typing import Match
from typing import NamedTuple
from typing import Optional
from typing import Sequence
from typing import Tuple
from typing import TypeVar
@@ -618,8 +617,10 @@ class Compiler:
return PatternRule(rule.name, make_regset(*regs), rules)
def compile_rule(self, rule: _Rule) -> CompiledRule:
with contextlib.suppress(KeyError):
try:
return self._c_rules[rule]
except KeyError:
pass
grammar = self._rule_to_grammar[rule]
ret = self._c_rules[rule] = self._compile_rule(grammar, rule)
@@ -627,41 +628,58 @@ class Compiler:
class Grammars:
def __init__(self, grammars: Sequence[Dict[str, Any]]) -> None:
self._raw = {grammar['scopeName']: grammar for grammar in grammars}
self._find_scope = [
(
frozenset(grammar.get('fileTypes', ())),
make_reg(grammar.get('firstLineMatch', '$impossible^')),
grammar['scopeName'],
)
for grammar in grammars
]
self._parsed: Dict[str, Grammar] = {}
self._compilers: Dict[str, Compiler] = {}
def __init__(self, *directories: str) -> None:
self._scope_to_files = {
os.path.splitext(filename)[0]: os.path.join(directory, filename)
for directory in directories
if os.path.exists(directory)
for filename in os.listdir(directory)
if filename.endswith('.json')
}
@classmethod
def from_syntax_dir(cls, syntax_dir: str) -> 'Grammars':
grammars = [{'scopeName': 'source.unknown', 'patterns': []}]
if os.path.exists(syntax_dir):
for filename in os.listdir(syntax_dir):
with open(os.path.join(syntax_dir, filename)) as f:
grammars.append(json.load(f))
return cls(grammars)
unknown_grammar = {'scopeName': 'source.unknown', 'patterns': []}
self._raw = {'source.unknown': unknown_grammar}
self._file_types: List[Tuple[FrozenSet[str], str]] = []
self._first_line: List[Tuple[_Reg, str]] = []
self._parsed: Dict[str, Grammar] = {}
self._compiled: Dict[str, Compiler] = {}
def _raw_for_scope(self, scope: str) -> Dict[str, Any]:
try:
return self._raw[scope]
except KeyError:
pass
grammar_path = self._scope_to_files.pop(scope)
with open(grammar_path) as f:
ret = self._raw[scope] = json.load(f)
file_types = frozenset(ret.get('fileTypes', ()))
first_line = make_reg(ret.get('firstLineMatch', '$impossible^'))
self._file_types.append((file_types, scope))
self._first_line.append((first_line, scope))
return ret
def grammar_for_scope(self, scope: str) -> Grammar:
with contextlib.suppress(KeyError):
try:
return self._parsed[scope]
except KeyError:
pass
ret = self._parsed[scope] = Grammar.from_data(self._raw[scope])
raw = self._raw_for_scope(scope)
ret = self._parsed[scope] = Grammar.from_data(raw)
return ret
def compiler_for_scope(self, scope: str) -> Compiler:
with contextlib.suppress(KeyError):
return self._compilers[scope]
try:
return self._compiled[scope]
except KeyError:
pass
grammar = self.grammar_for_scope(scope)
ret = self._compilers[scope] = Compiler(grammar, self)
ret = self._compiled[scope] = Compiler(grammar, self)
return ret
def blank_compiler(self) -> Compiler:
@@ -669,20 +687,26 @@ class Grammars:
def compiler_for_file(self, filename: str, first_line: str) -> Compiler:
for tag in tags_from_filename(filename) - {'text'}:
with contextlib.suppress(KeyError):
try:
# TODO: this doesn't always match even if we detect it
return self.compiler_for_scope(f'source.{tag}')
except KeyError:
pass
# didn't find it in the fast path, need to read all the json
for k in tuple(self._scope_to_files):
self._raw_for_scope(k)
_, _, ext = os.path.basename(filename).rpartition('.')
for extensions, first_line_match, scope_name in self._find_scope:
if (
ext in extensions or
first_line_match.match(
first_line, 0, first_line=True, boundary=True,
)
):
return self.compiler_for_scope(scope_name)
else:
return self.compiler_for_scope('source.unknown')
for extensions, scope in self._file_types:
if ext in extensions:
return self.compiler_for_scope(scope)
for reg, scope in self._first_line:
if reg.match(first_line, 0, first_line=True, boundary=True):
return self.compiler_for_scope(scope)
return self.compiler_for_scope('source.unknown')
def highlight_line(

View File

@@ -1,7 +1,10 @@
import curses
from typing import Dict
import functools
import math
from typing import Callable
from typing import List
from typing import NamedTuple
from typing import Optional
from typing import Tuple
from babi.color_manager import ColorManager
@@ -14,6 +17,7 @@ from babi.hl.interface import HLs
from babi.list_spy import SequenceNoSlice
from babi.theme import Style
from babi.theme import Theme
from babi.user_data import prefix_data
from babi.user_data import xdg_config
from babi.user_data import xdg_data
@@ -36,8 +40,10 @@ class FileSyntax:
self.regions: List[HLs] = []
self._states: List[State] = []
self._hl_cache: Dict[str, Dict[State, Tuple[State, HLs]]]
self._hl_cache = {}
# this will be assigned a functools.lru_cache per instance for
# better hit rate and memory usage
self._hl: Optional[Callable[[State, str, bool], Tuple[State, HLs]]]
self._hl = None
def attr(self, style: Style) -> int:
pair = self._color_manager.color_pair(style.fg, style.bg)
@@ -48,19 +54,14 @@ class FileSyntax:
curses.A_UNDERLINE * style.u
)
def _hl(
def _hl_uncached(
self,
state: State,
line: str,
i: int,
first_line: bool,
) -> Tuple[State, HLs]:
try:
return self._hl_cache[line][state]
except KeyError:
pass
new_state, regions = highlight_line(
self._compiler, state, f'{line}\n', first_line=i == 0,
self._compiler, state, f'{line}\n', first_line=first_line,
)
# remove the trailing newline
@@ -83,18 +84,22 @@ class FileSyntax:
else:
regs.append(HL(x=r.start, end=r.end, attr=attr))
dct = self._hl_cache.setdefault(line, {})
ret = dct[state] = (new_state, tuple(regs))
return ret
return new_state, tuple(regs)
def highlight_until(self, lines: SequenceNoSlice, idx: int) -> None:
if self._hl is None:
# the docs claim better performance with power of two sizing
size = max(4096, 2 ** (int(math.log(len(lines), 2)) + 2))
self._hl = functools.lru_cache(maxsize=size)(self._hl_uncached)
if not self._states:
state = self._compiler.root_state
else:
state = self._states[-1]
for i in range(len(self._states), idx):
state, regions = self._hl(state, lines[i], i)
# https://github.com/python/mypy/issues/8579
state, regions = self._hl(state, lines[i], i == 0) # type: ignore
self._states.append(state)
self.regions.append(regions)
@@ -140,7 +145,7 @@ class Syntax(NamedTuple):
stdscr: 'curses._CursesWindow',
color_manager: ColorManager,
) -> 'Syntax':
grammars = Grammars.from_syntax_dir(xdg_data('textmate_syntax'))
grammars = Grammars(prefix_data('grammar_v1'), xdg_data('grammar_v1'))
theme = Theme.from_filename(xdg_config('theme.json'))
ret = cls(grammars, theme, color_manager)
ret._init_screen(stdscr)

View File

@@ -43,7 +43,7 @@ def _replace_esc(s: str, chars: str) -> str:
class _Reg:
def __init__(self, s: str) -> None:
self._pattern = s
self._pattern = _replace_esc(s, 'z')
def __repr__(self) -> str:
return f'{type(self).__name__}({self._pattern!r})'

View File

@@ -416,7 +416,7 @@ class Screen:
self.file.filename = filename
if os.path.isfile(self.file.filename):
with open(self.file.filename) as f:
with open(self.file.filename, newline='') as f:
*_, sha256 = get_lines(f)
else:
sha256 = hashlib.sha256(b'').hexdigest()

69
babi/textmate_demo.py Normal file
View File

@@ -0,0 +1,69 @@
import argparse
from typing import Optional
from typing import Sequence
from babi.highlight import Compiler
from babi.highlight import Grammars
from babi.highlight import highlight_line
from babi.theme import Style
from babi.theme import Theme
from babi.user_data import prefix_data
from babi.user_data import xdg_config
def print_styled(s: str, style: Style) -> None:
color_s = ''
undo_s = ''
if style.fg is not None:
color_s += '\x1b[38;2;{r};{g};{b}m'.format(**style.fg._asdict())
undo_s += '\x1b[39m'
if style.bg is not None:
color_s += '\x1b[48;2;{r};{g};{b}m'.format(**style.bg._asdict())
undo_s += '\x1b[49m'
if style.b:
color_s += '\x1b[1m'
undo_s += '\x1b[22m'
if style.i:
color_s += '\x1b[3m'
undo_s += '\x1b[23m'
if style.u:
color_s += '\x1b[4m'
undo_s += '\x1b[24m'
print(f'{color_s}{s}{undo_s}', end='', flush=True)
def _highlight_output(theme: Theme, compiler: Compiler, filename: str) -> int:
state = compiler.root_state
if theme.default.bg is not None:
print('\x1b[48;2;{r};{g};{b}m'.format(**theme.default.bg._asdict()))
with open(filename) as f:
for line_idx, line in enumerate(f):
first_line = line_idx == 0
state, regions = highlight_line(compiler, state, line, first_line)
for start, end, scope in regions:
print_styled(line[start:end], theme.select(scope))
print('\x1b[m', end='')
return 0
def main(argv: Optional[Sequence[str]] = None) -> int:
parser = argparse.ArgumentParser()
parser.add_argument('--theme', default=xdg_config('theme.json'))
parser.add_argument('--grammar-dir', default=prefix_data('grammar_v1'))
parser.add_argument('filename')
args = parser.parse_args(argv)
with open(args.filename) as f:
first_line = next(f, '')
theme = Theme.from_filename(args.theme)
grammars = Grammars(args.grammar_dir)
compiler = grammars.compiler_for_file(args.filename, first_line)
return _highlight_output(theme, compiler, args.filename)
if __name__ == '__main__':
exit(main())

View File

@@ -1,4 +1,5 @@
import os.path
import sys
def _xdg(*path: str, env: str, default: str) -> str:
@@ -14,3 +15,7 @@ def xdg_data(*path: str) -> str:
def xdg_config(*path: str) -> str:
return _xdg(*path, env='XDG_CONFIG_HOME', default='~/.config')
def prefix_data(*path: str) -> str:
return os.path.join(sys.prefix, 'share/babi', *path)

View File

@@ -1,86 +0,0 @@
#!/usr/bin/env python3
import argparse
import enum
import json
import os.path
import plistlib
import urllib.request
from typing import NamedTuple
import cson # pip install cson
DEFAULT_DIR = os.path.join(
os.environ.get('XDG_DATA_HOME') or
os.path.expanduser('~/.local/share'),
'babi/textmate_syntax',
)
Ext = enum.Enum('Ext', 'CSON PLIST JSON')
def _convert_cson(src: bytes) -> str:
return json.dumps(cson.loads(src))
def _convert_json(src: bytes) -> str:
return json.dumps(json.loads(src))
def _convert_plist(src: bytes) -> str:
return json.dumps(plistlib.loads(src))
EXT_CONVERT = {
Ext.CSON: _convert_cson,
Ext.JSON: _convert_json,
Ext.PLIST: _convert_plist,
}
class Syntax(NamedTuple):
name: str
ext: Ext
url: str
SYNTAXES = (
Syntax('c', Ext.JSON, 'https://raw.githubusercontent.com/jeff-hykin/cpp-textmate-grammar/53e39b1c/syntaxes/c.tmLanguage.json'), # noqa: E501
Syntax('css', Ext.CSON, 'https://raw.githubusercontent.com/atom/language-css/9feb69c081308b63f78bb0d6a2af2ff5eb7d869b/grammars/css.cson'), # noqa: E501
Syntax('docker', Ext.PLIST, 'https://raw.githubusercontent.com/moby/moby/c7ad2b866/contrib/syntax/textmate/Docker.tmbundle/Syntaxes/Dockerfile.tmLanguage'), # noqa: E501
Syntax('diff', Ext.PLIST, 'https://raw.githubusercontent.com/textmate/diff.tmbundle/0593bb77/Syntaxes/Diff.plist'), # noqa: E501
Syntax('html', Ext.PLIST, 'https://raw.githubusercontent.com/textmate/html.tmbundle/0c3d5ee5/Syntaxes/HTML.plist'), # noqa: E501
Syntax('html-derivative', Ext.PLIST, 'https://raw.githubusercontent.com/textmate/html.tmbundle/0c3d5ee54de3a993f747f54186b73a4d2d3c44a2/Syntaxes/HTML%20(Derivative).tmLanguage'), # noqa: E501
Syntax('ini', Ext.PLIST, 'https://raw.githubusercontent.com/textmate/ini.tmbundle/7d8c7b55/Syntaxes/Ini.plist'), # noqa: E501
Syntax('json', Ext.PLIST, 'https://raw.githubusercontent.com/microsoft/vscode-JSON.tmLanguage/d113e90937ed3ecc31ac54750aac2e8efa08d784/JSON.tmLanguage'), # noqa: E501
Syntax('make', Ext.PLIST, 'https://raw.githubusercontent.com/fadeevab/make.tmbundle/fd57c0552/Syntaxes/Makefile.plist'), # noqa: E501
Syntax('markdown', Ext.PLIST, 'https://raw.githubusercontent.com/microsoft/vscode-markdown-tm-grammar/59a5962/syntaxes/markdown.tmLanguage'), # noqa: E501
Syntax('powershell', Ext.PLIST, 'https://raw.githubusercontent.com/PowerShell/EditorSyntax/4a0a0766/PowerShellSyntax.tmLanguage'), # noqa: E501
Syntax('puppet', Ext.PLIST, 'https://raw.githubusercontent.com/lingua-pupuli/puppet-editor-syntax/dc414b8a/syntaxes/puppet.tmLanguage'), # noqa: E501
Syntax('python', Ext.PLIST, 'https://raw.githubusercontent.com/MagicStack/MagicPython/c9b3409d/grammars/MagicPython.tmLanguage'), # noqa: E501
# TODO: https://github.com/zargony/atom-language-rust/pull/149
Syntax('rust', Ext.CSON, 'https://raw.githubusercontent.com/asottile/atom-language-rust/e113ca67/grammars/rust.cson'), # noqa: E501
Syntax('shell', Ext.CSON, 'https://raw.githubusercontent.com/atom/language-shellscript/7008ea926867d8a231003e78094091471c4fccf8/grammars/shell-unix-bash.cson'), # noqa: E501
# TODO: https://github.com/atom/language-xml/pull/99
Syntax('xml', Ext.CSON, 'https://raw.githubusercontent.com/asottile/language-xml/2d76bc1f/grammars/xml.cson'), # noqa: E501
Syntax('yaml', Ext.PLIST, 'https://raw.githubusercontent.com/textmate/yaml.tmbundle/e54ceae3/Syntaxes/YAML.tmLanguage'), # noqa: E501
)
def main() -> int:
parser = argparse.ArgumentParser()
parser.add_argument('--dest', default=DEFAULT_DIR)
args = parser.parse_args()
os.makedirs(args.dest, exist_ok=True)
for syntax in SYNTAXES:
print(f'downloading {syntax.name}...')
resp = urllib.request.urlopen(syntax.url).read()
converted = EXT_CONVERT[syntax.ext](resp)
with open(os.path.join(args.dest, f'{syntax.name}.json'), 'w') as f:
f.write(converted)
return 0
if __name__ == '__main__':
exit(main())

View File

@@ -1,6 +1,6 @@
[metadata]
name = babi
version = 0.0.2
version = 0.0.4
description = a text editor
long_description = file: README.md
long_description_content_type = text/markdown
@@ -22,6 +22,7 @@ classifiers =
[options]
packages = find:
install_requires =
babi-grammars
identify
onigurumacffi>=0.0.10
importlib_metadata>=1;python_version<"3.8"
@@ -31,6 +32,7 @@ python_requires = >=3.6.1
[options.entry_points]
console_scripts =
babi = babi.main:main
babi-textmate-demo = babi.textmate_demo:main
[options.packages.find]
exclude =

2
testing/vsc_test/.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
/node_modules
/package-lock.json

View File

@@ -0,0 +1,5 @@
{
"dependencies": [
"vscode-textmate"
]
}

51
testing/vsc_test/vsc.js Normal file
View File

@@ -0,0 +1,51 @@
const fs = require('fs');
const vsctm = require('vscode-textmate');
if (process.argv.length < 4) {
console.log('usage: t.js GRAMMAR FILE');
process.exit(1);
}
const grammar = process.argv[2];
const file = process.argv[3];
const scope = JSON.parse(fs.readFileSync(grammar, {encoding: 'UTF-8'})).scopeName;
/**
* Utility to read a file as a promise
*/
function readFile(path) {
return new Promise((resolve, reject) => {
fs.readFile(path, (error, data) => error ? reject(error) : resolve(data));
})
}
// Create a registry that can create a grammar from a scope name.
const registry = new vsctm.Registry({
loadGrammar: (scopeName) => {
if (scopeName === scope) {
return readFile(grammar).then(data => vsctm.parseRawGrammar(data.toString(), grammar))
}
console.log(`Unknown scope name: ${scopeName}`);
return null;
}
});
// Load the JavaScript grammar and any other grammars included by it async.
registry.loadGrammar(scope).then(grammar => {
const text = fs.readFileSync(file, {encoding: 'UTF-8'}).trimEnd('\n').split(/\n/);
let ruleStack = vsctm.INITIAL;
for (let i = 0; i < text.length; i++) {
const line = text[i];
const lineTokens = grammar.tokenizeLine(line, ruleStack);
console.log(`\nTokenizing line: ${line}`);
for (let j = 0; j < lineTokens.tokens.length; j++) {
const token = lineTokens.tokens[j];
console.log(` - token from ${token.startIndex} to ${token.endIndex} ` +
`(${line.substring(token.startIndex, token.endIndex)}) ` +
`with scopes ${token.scopes.join(', ')}`
);
}
ruleStack = lineTokens.ruleStack;
}
});

17
tests/conftest.py Normal file
View File

@@ -0,0 +1,17 @@
import json
import pytest
from babi.highlight import Grammars
@pytest.fixture
def make_grammars(tmpdir):
grammar_dir = tmpdir.join('grammars').ensure_dir()
def make_grammars(*grammar_dcts):
for grammar in grammar_dcts:
filename = f'{grammar["scopeName"]}.json'
grammar_dir.join(filename).write(json.dumps(grammar))
return Grammars(grammar_dir)
return make_grammars

View File

@@ -16,6 +16,13 @@ from babi.screen import VERSION_STR
from testing.runner import PrintsErrorRunner
@pytest.fixture(autouse=True)
def prefix_home(tmpdir):
prefix_home = tmpdir.join('prefix_home')
with mock.patch.object(sys, 'prefix', str(prefix_home)):
yield prefix_home
@pytest.fixture(autouse=True)
def xdg_data_home(tmpdir):
data_home = tmpdir.join('data_home')
@@ -66,6 +73,7 @@ class Screen:
self.attrs[y] = (line_attr[:x] + new + line_attr[x:])[:self.width]
def chgat(self, y, x, n, attr):
assert n >= 0 # TODO: switch to > 0, we should never do 0-length
self.attrs[y][x:x + n] = [attr] * n
def move(self, y, x):

View File

@@ -272,3 +272,31 @@ def test_replace_separate_line_after_wrapping(run, ten_lines):
h.await_text_missing('line_0')
h.press('y')
h.await_text_missing('line_1')
def test_replace_with_newline_characters(run, ten_lines):
with run(str(ten_lines)) as h, and_exit(h):
h.press('^\\')
h.await_text('search (to replace):')
h.press_and_enter('(line)_([01])')
h.await_text('replace with:')
h.press_and_enter(r'\1\n\2')
h.await_text('replace [yes, no, all]?')
h.press('a')
h.await_text_missing('line_0')
h.await_text_missing('line_1')
h.await_text('line\n0\nline\n1\n')
def test_replace_with_multiple_newline_characters(run, ten_lines):
with run(str(ten_lines)) as h, and_exit(h):
h.press('^\\')
h.await_text('search (to replace):')
h.press_and_enter('(li)(ne)_(1)')
h.await_text('replace with:')
h.press_and_enter(r'\1\n\2\n\3\n')
h.await_text('replace [yes, no, all]?')
h.press('a')
h.await_text_missing('line_1')
h.await_text('li\nne\n1\n\nline_2')

View File

@@ -12,6 +12,19 @@ def test_mixed_newlines(run, tmpdir):
h.await_text(r"mixed newlines will be converted to '\n'")
def test_modify_file_with_windows_newlines(run, tmpdir):
f = tmpdir.join('f')
f.write_binary(b'foo\r\nbar\r\n')
with run(str(f)) as h, and_exit(h):
# should not start modified
h.await_text_missing('*')
h.press('Enter')
h.await_text('*')
h.press('^S')
h.await_text('saved!')
assert f.read_binary() == b'\r\nfoo\r\nbar\r\n'
def test_new_file(run):
with run('this_is_a_new_file') as h, and_exit(h):
h.await_text('this_is_a_new_file')

View File

@@ -15,6 +15,7 @@ THEME = json.dumps({
'settings': {'foreground': '#5f0000', 'background': '#ff5f5f'},
},
{'scope': 'tqs', 'settings': {'foreground': '#00005f'}},
{'scope': 'qmark', 'settings': {'foreground': '#5f0000'}},
{'scope': 'b', 'settings': {'fontStyle': 'bold'}},
{'scope': 'i', 'settings': {'fontStyle': 'italic'}},
{'scope': 'u', 'settings': {'fontStyle': 'underline'}},
@@ -28,6 +29,7 @@ SYNTAX = json.dumps({
{'match': r'#.*$\n?', 'name': 'comment'},
{'match': r'^-.*$\n?', 'name': 'diffremove'},
{'begin': '"""', 'end': '"""', 'name': 'tqs'},
{'match': r'\?', 'name': 'qmark'},
],
})
DEMO_S = '''\
@@ -43,7 +45,7 @@ still more
@pytest.fixture(autouse=True)
def theme_and_grammar(xdg_data_home, xdg_config_home):
xdg_config_home.join('babi/theme.json').ensure().write(THEME)
xdg_data_home.join('babi/textmate_syntax/demo.json').ensure().write(SYNTAX)
xdg_data_home.join('babi/grammar_v1/demo.json').ensure().write(SYNTAX)
@pytest.fixture
@@ -97,3 +99,17 @@ def test_syntax_highlighting_off_screen_does_not_crash(run, tmpdir):
h.await_text('"""b"""')
expected = [(236, 40, 0)] * 11 + [(17, 40, 0)] * 7 + [(236, 40, 0)] * 2
h.assert_screen_attr_equals(1, expected)
def test_syntax_highlighting_one_off_left_of_screen(run, tmpdir):
f = tmpdir.join('f.demo')
f.write(f'{"x" * 11}?123456789')
with run(str(f), term='screen-256color', width=20) as h, and_exit(h):
h.await_text('xxx?123')
expected = [(236, 40, 0)] * 11 + [(52, 40, 0)] + [(236, 40, 0)] * 8
h.assert_screen_attr_equals(1, expected)
h.press('End')
h.await_text_missing('?')
h.assert_screen_attr_equals(1, [(236, 40, 0)] * 20)

View File

@@ -1,34 +1,37 @@
from babi.highlight import Grammars
import pytest
from babi.highlight import highlight_line
from babi.highlight import Region
def test_grammar_matches_extension_only_name():
def test_grammar_matches_extension_only_name(make_grammars):
data = {'scopeName': 'shell', 'patterns': [], 'fileTypes': ['bashrc']}
grammars = Grammars([data])
grammars = make_grammars(data)
compiler = grammars.compiler_for_file('.bashrc', 'alias nano=babi')
assert compiler.root_state.entries[0].scope[0] == 'shell'
def test_grammar_matches_via_identify_tag():
data = {'scopeName': 'source.ini', 'patterns': []}
grammars = Grammars([data])
def test_grammar_matches_via_identify_tag(make_grammars):
grammars = make_grammars({'scopeName': 'source.ini', 'patterns': []})
compiler = grammars.compiler_for_file('setup.cfg', '')
assert compiler.root_state.entries[0].scope[0] == 'source.ini'
def _compiler_state(*grammar_dcts):
grammars = Grammars(grammar_dcts)
compiler = grammars.compiler_for_scope(grammar_dcts[0]['scopeName'])
return compiler, compiler.root_state
@pytest.fixture
def compiler_state(make_grammars):
def _compiler_state(*grammar_dcts):
grammars = make_grammars(*grammar_dcts)
compiler = grammars.compiler_for_scope(grammar_dcts[0]['scopeName'])
return compiler, compiler.root_state
return _compiler_state
def test_backslash_a():
def test_backslash_a(compiler_state):
grammar = {
'scopeName': 'test',
'patterns': [{'name': 'aaa', 'match': r'\Aa+'}],
}
compiler, state = _compiler_state(grammar)
compiler, state = compiler_state(grammar)
state, (region_0,) = highlight_line(compiler, state, 'aaa', True)
state, (region_1,) = highlight_line(compiler, state, 'aaa', False)
@@ -51,8 +54,8 @@ BEGIN_END_NO_NL = {
}
def test_backslash_g_inline():
compiler, state = _compiler_state(BEGIN_END_NO_NL)
def test_backslash_g_inline(compiler_state):
compiler, state = compiler_state(BEGIN_END_NO_NL)
_, regions = highlight_line(compiler, state, 'xaax', True)
assert regions == (
@@ -63,8 +66,8 @@ def test_backslash_g_inline():
)
def test_backslash_g_next_line():
compiler, state = _compiler_state(BEGIN_END_NO_NL)
def test_backslash_g_next_line(compiler_state):
compiler, state = compiler_state(BEGIN_END_NO_NL)
state, regions1 = highlight_line(compiler, state, 'x\n', True)
state, regions2 = highlight_line(compiler, state, 'aax\n', False)
@@ -81,8 +84,8 @@ def test_backslash_g_next_line():
)
def test_end_before_other_match():
compiler, state = _compiler_state(BEGIN_END_NO_NL)
def test_end_before_other_match(compiler_state):
compiler, state = compiler_state(BEGIN_END_NO_NL)
state, regions = highlight_line(compiler, state, 'xazzx', True)
@@ -107,8 +110,8 @@ BEGIN_END_NL = {
}
def test_backslash_g_captures_nl():
compiler, state = _compiler_state(BEGIN_END_NL)
def test_backslash_g_captures_nl(compiler_state):
compiler, state = compiler_state(BEGIN_END_NL)
state, regions1 = highlight_line(compiler, state, 'x\n', True)
state, regions2 = highlight_line(compiler, state, 'aax\n', False)
@@ -124,8 +127,8 @@ def test_backslash_g_captures_nl():
)
def test_backslash_g_captures_nl_next_line():
compiler, state = _compiler_state(BEGIN_END_NL)
def test_backslash_g_captures_nl_next_line(compiler_state):
compiler, state = compiler_state(BEGIN_END_NL)
state, regions1 = highlight_line(compiler, state, 'x\n', True)
state, regions2 = highlight_line(compiler, state, 'aa\n', False)
@@ -147,8 +150,8 @@ def test_backslash_g_captures_nl_next_line():
)
def test_while_no_nl():
compiler, state = _compiler_state({
def test_while_no_nl(compiler_state):
compiler, state = compiler_state({
'scopeName': 'test',
'patterns': [{
'begin': '> ',
@@ -182,8 +185,8 @@ def test_while_no_nl():
)
def test_complex_captures():
compiler, state = _compiler_state({
def test_complex_captures(compiler_state):
compiler, state = compiler_state({
'scopeName': 'test',
'patterns': [
{
@@ -213,8 +216,8 @@ def test_complex_captures():
)
def test_captures_multiple_applied_to_same_capture():
compiler, state = _compiler_state({
def test_captures_multiple_applied_to_same_capture(compiler_state):
compiler, state = compiler_state({
'scopeName': 'test',
'patterns': [
{
@@ -256,8 +259,8 @@ def test_captures_multiple_applied_to_same_capture():
)
def test_captures_ignores_empty():
compiler, state = _compiler_state({
def test_captures_ignores_empty(compiler_state):
compiler, state = compiler_state({
'scopeName': 'test',
'patterns': [{
'match': '(.*) hi',
@@ -279,8 +282,8 @@ def test_captures_ignores_empty():
)
def test_captures_ignores_invalid_out_of_bounds():
compiler, state = _compiler_state({
def test_captures_ignores_invalid_out_of_bounds(compiler_state):
compiler, state = compiler_state({
'scopeName': 'test',
'patterns': [{'match': '.', 'captures': {'1': {'name': 'oob'}}}],
})
@@ -292,8 +295,8 @@ def test_captures_ignores_invalid_out_of_bounds():
)
def test_captures_begin_end():
compiler, state = _compiler_state({
def test_captures_begin_end(compiler_state):
compiler, state = compiler_state({
'scopeName': 'test',
'patterns': [
{
@@ -314,8 +317,8 @@ def test_captures_begin_end():
)
def test_captures_while_captures():
compiler, state = _compiler_state({
def test_captures_while_captures(compiler_state):
compiler, state = compiler_state({
'scopeName': 'test',
'patterns': [
{
@@ -343,8 +346,8 @@ def test_captures_while_captures():
)
def test_captures_implies_begin_end_captures():
compiler, state = _compiler_state({
def test_captures_implies_begin_end_captures(compiler_state):
compiler, state = compiler_state({
'scopeName': 'test',
'patterns': [
{
@@ -364,8 +367,8 @@ def test_captures_implies_begin_end_captures():
)
def test_captures_implies_begin_while_captures():
compiler, state = _compiler_state({
def test_captures_implies_begin_while_captures(compiler_state):
compiler, state = compiler_state({
'scopeName': 'test',
'patterns': [
{
@@ -392,8 +395,8 @@ def test_captures_implies_begin_while_captures():
)
def test_include_self():
compiler, state = _compiler_state({
def test_include_self(compiler_state):
compiler, state = compiler_state({
'scopeName': 'test',
'patterns': [
{
@@ -416,8 +419,8 @@ def test_include_self():
)
def test_include_repository_rule():
compiler, state = _compiler_state({
def test_include_repository_rule(compiler_state):
compiler, state = compiler_state({
'scopeName': 'test',
'patterns': [{'include': '#impl'}],
'repository': {
@@ -438,8 +441,8 @@ def test_include_repository_rule():
)
def test_include_other_grammar():
compiler, state = _compiler_state(
def test_include_other_grammar(compiler_state):
compiler, state = compiler_state(
{
'scopeName': 'test',
'patterns': [
@@ -494,8 +497,8 @@ def test_include_other_grammar():
)
def test_include_base():
compiler, state = _compiler_state(
def test_include_base(compiler_state):
compiler, state = compiler_state(
{
'scopeName': 'test',
'patterns': [
@@ -542,8 +545,8 @@ def test_include_base():
)
def test_rule_with_begin_and_no_end():
compiler, state = _compiler_state({
def test_rule_with_begin_and_no_end(compiler_state):
compiler, state = compiler_state({
'scopeName': 'test',
'patterns': [
{
@@ -566,8 +569,8 @@ def test_rule_with_begin_and_no_end():
)
def test_begin_end_substitute_special_chars():
compiler, state = _compiler_state({
def test_begin_end_substitute_special_chars(compiler_state):
compiler, state = compiler_state({
'scopeName': 'test',
'patterns': [{'begin': r'(\*)', 'end': r'\1', 'name': 'italic'}],
})
@@ -579,3 +582,26 @@ def test_begin_end_substitute_special_chars():
Region(1, 7, ('test', 'italic')),
Region(7, 8, ('test', 'italic')),
)
def test_backslash_z(compiler_state):
# similar to text.git-commit grammar, \z matches nothing!
compiler, state = compiler_state({
'scopeName': 'test',
'patterns': [
{'begin': '#', 'end': r'\z', 'name': 'comment'},
{'name': 'other', 'match': '.'},
],
})
state, regions1 = highlight_line(compiler, state, '# comment', True)
state, regions2 = highlight_line(compiler, state, 'other?', False)
assert regions1 == (
Region(0, 1, ('test', 'comment')),
Region(1, 9, ('test', 'comment')),
)
assert regions2 == (
Region(0, 6, ('test', 'comment')),
)

View File

@@ -5,7 +5,7 @@ from unittest import mock
import pytest
from babi.color_manager import ColorManager
from babi.highlight import Grammars
from babi.hl.interface import HL
from babi.hl.syntax import Syntax
from babi.theme import Color
from babi.theme import Theme
@@ -71,8 +71,8 @@ THEME = Theme.from_dct({
@pytest.fixture
def syntax(tmpdir):
return Syntax(Grammars.from_syntax_dir(tmpdir), THEME, ColorManager.make())
def syntax(make_grammars):
return Syntax(make_grammars(), THEME, ColorManager.make())
def test_init_screen_low_color(stdscr, syntax):
@@ -149,3 +149,20 @@ def test_style_attributes_applied(stdscr, syntax):
style = THEME.select(('keyword.python',))
attr = syntax.blank_file_highlighter().attr(style)
assert attr == 2 << 8 | curses.A_BOLD
def test_syntax_highlight_cache_first_line(stdscr, make_grammars):
with FakeCurses.patch(n_colors=256, can_change_color=False):
grammars = make_grammars({
'scopeName': 'source.demo',
'fileTypes': ['demo'],
'patterns': [{'match': r'\Aint', 'name': 'keyword'}],
})
syntax = Syntax(grammars, THEME, ColorManager.make())
syntax._init_screen(stdscr)
file_hl = syntax.file_highlighter('foo.demo', '')
file_hl.highlight_until(['int', 'int'], 2)
assert file_hl.regions == [
(HL(0, 3, curses.A_BOLD | 2 << 8),),
(),
]

View File

@@ -0,0 +1,84 @@
import json
import pytest
from babi.textmate_demo import main
THEME = {
'colors': {'foreground': '#ffffff', 'background': '#000000'},
'tokenColors': [
{'scope': 'bold', 'settings': {'fontStyle': 'bold'}},
{'scope': 'italic', 'settings': {'fontStyle': 'italic'}},
{'scope': 'underline', 'settings': {'fontStyle': 'underline'}},
{'scope': 'comment', 'settings': {'foreground': '#1e77d3'}},
],
}
GRAMMAR = {
'scopeName': 'source.demo',
'fileTypes': ['demo'],
'patterns': [
{'match': r'\*[^*]*\*', 'name': 'bold'},
{'match': '/[^/]*/', 'name': 'italic'},
{'match': '_[^_]*_', 'name': 'underline'},
{'match': '#.*', 'name': 'comment'},
],
}
@pytest.fixture
def theme_grammars(tmpdir):
theme = tmpdir.join('config/theme.json').ensure()
theme.write(json.dumps(THEME))
grammars = tmpdir.join('grammar_v1').ensure_dir()
grammars.join('source.demo.json').write(json.dumps(GRAMMAR))
return theme, grammars
def test_basic(theme_grammars, tmpdir, capsys):
theme, grammars = theme_grammars
f = tmpdir.join('f.demo')
f.write('*bold*/italic/_underline_# comment\n')
assert not main((
'--theme', str(theme), '--grammar-dir', str(grammars),
str(f),
))
out, _ = capsys.readouterr()
assert out == (
'\x1b[48;2;0;0;0m\n'
'\x1b[38;2;255;255;255m\x1b[48;2;0;0;0m\x1b[1m'
'*bold*'
'\x1b[39m\x1b[49m\x1b[22m'
'\x1b[38;2;255;255;255m\x1b[48;2;0;0;0m\x1b[3m'
'/italic/'
'\x1b[39m\x1b[49m\x1b[23m'
'\x1b[38;2;255;255;255m\x1b[48;2;0;0;0m\x1b[4m'
'_underline_'
'\x1b[39m\x1b[49m\x1b[24m'
'\x1b[38;2;30;119;211m\x1b[48;2;0;0;0m'
'# comment'
'\x1b[39m\x1b[49m\x1b'
'[38;2;255;255;255m\x1b[48;2;0;0;0m\n\x1b[39m\x1b[49m'
'\x1b[m'
)
def test_basic_with_blank_theme(theme_grammars, tmpdir, capsys):
theme, grammars = theme_grammars
theme.write('{}')
f = tmpdir.join('f.demo')
f.write('*bold*/italic/_underline_# comment\n')
assert not main((
'--theme', str(theme), '--grammar-dir', str(grammars),
str(f),
))
out, _ = capsys.readouterr()
assert out == '*bold*/italic/_underline_# comment\n\x1b[m'