From 0300c4d10991fb6ce218d45c4fe6d71a73f07d62 Mon Sep 17 00:00:00 2001 From: ZyX Date: Sun, 20 Aug 2017 18:40:22 +0300 Subject: viml/expressions: Add lexer with some basic tests --- test/unit/viml/expressions/lexer_spec.lua | 337 ++++++++++++++++++++++++++++++ 1 file changed, 337 insertions(+) create mode 100644 test/unit/viml/expressions/lexer_spec.lua (limited to 'test/unit/viml/expressions/lexer_spec.lua') diff --git a/test/unit/viml/expressions/lexer_spec.lua b/test/unit/viml/expressions/lexer_spec.lua new file mode 100644 index 0000000000..bf5afe4eeb --- /dev/null +++ b/test/unit/viml/expressions/lexer_spec.lua @@ -0,0 +1,337 @@ +local helpers = require('test.unit.helpers')(after_each) +local itp = helpers.gen_itp(it) + +local child_call_once = helpers.child_call_once +local cimport = helpers.cimport +local ffi = helpers.ffi +local eq = helpers.eq + +local lib = cimport('./src/nvim/viml/parser/expressions.h') + +local eltkn_type_tab, eltkn_cmp_type_tab, ccs_tab, eltkn_mul_type_tab +local eltkn_opt_scope_tab +child_call_once(function() + eltkn_type_tab = { + [tonumber(lib.kExprLexInvalid)] = 'Invalid', + [tonumber(lib.kExprLexMissing)] = 'Missing', + [tonumber(lib.kExprLexSpacing)] = 'Spacing', + [tonumber(lib.kExprLexEOC)] = 'EOC', + + [tonumber(lib.kExprLexQuestion)] = 'Question', + [tonumber(lib.kExprLexColon)] = 'Colon', + [tonumber(lib.kExprLexOr)] = 'Or', + [tonumber(lib.kExprLexAnd)] = 'And', + [tonumber(lib.kExprLexComparison)] = 'Comparison', + [tonumber(lib.kExprLexPlus)] = 'Plus', + [tonumber(lib.kExprLexMinus)] = 'Minus', + [tonumber(lib.kExprLexDot)] = 'Dot', + [tonumber(lib.kExprLexMultiplication)] = 'Multiplication', + + [tonumber(lib.kExprLexNot)] = 'Not', + + [tonumber(lib.kExprLexNumber)] = 'Number', + [tonumber(lib.kExprLexSingleQuotedString)] = 'SingleQuotedString', + [tonumber(lib.kExprLexDoubleQuotedString)] = 'DoubleQuotedString', + [tonumber(lib.kExprLexOption)] = 'Option', + [tonumber(lib.kExprLexRegister)] = 'Register', + [tonumber(lib.kExprLexEnv)] = 'Env', + [tonumber(lib.kExprLexPlainIdentifier)] = 'PlainIdentifier', + + [tonumber(lib.kExprLexBracket)] = 'Bracket', + [tonumber(lib.kExprLexFigureBrace)] = 'FigureBrace', + [tonumber(lib.kExprLexParenthesis)] = 'Parenthesis', + [tonumber(lib.kExprLexComma)] = 'Comma', + [tonumber(lib.kExprLexArrow)] = 'Arrow', + } + + eltkn_cmp_type_tab = { + [tonumber(lib.kExprLexCmpEqual)] = 'Equal', + [tonumber(lib.kExprLexCmpMatches)] = 'Matches', + [tonumber(lib.kExprLexCmpGreater)] = 'Greater', + [tonumber(lib.kExprLexCmpGreaterOrEqual)] = 'GreaterOrEqual', + [tonumber(lib.kExprLexCmpIdentical)] = 'Identical', + } + + ccs_tab = { + [tonumber(lib.kCCStrategyUseOption)] = 'UseOption', + [tonumber(lib.kCCStrategyMatchCase)] = 'MatchCase', + [tonumber(lib.kCCStrategyIgnoreCase)] = 'IgnoreCase', + } + + eltkn_mul_type_tab = { + [tonumber(lib.kExprLexMulMul)] = 'Mul', + [tonumber(lib.kExprLexMulDiv)] = 'Div', + [tonumber(lib.kExprLexMulMod)] = 'Mod', + } + + eltkn_opt_scope_tab = { + [tonumber(lib.kExprLexOptUnspecified)] = 'Unspecified', + [tonumber(lib.kExprLexOptGlobal)] = 'Global', + [tonumber(lib.kExprLexOptLocal)] = 'Local', + } +end) + +local function array_size(arr) + return ffi.sizeof(arr) / ffi.sizeof(arr[0]) +end + +local function kvi_size(kvi) + return array_size(kvi.init_array) +end + +local function kvi_init(kvi) + kvi.capacity = kvi_size(kvi) + kvi.items = kvi.init_array + return kvi +end + +local function kvi_new(ct) + return kvi_init(ffi.new(ct)) +end + +local function new_pstate(strings) + local strings_idx = 0 + local function get_line(_, ret_pline) + strings_idx = strings_idx + 1 + local str = strings[strings_idx] + local data, size + if type(str) == 'string' then + data = str + size = #str + elseif type(str) == 'nil' then + data = nil + size = 0 + elseif type(str) == 'table' then + data = str.data + size = str.size + elseif type(str) == 'function' then + data, size = str() + size = size or 0 + end + ret_pline.data = data + ret_pline.size = size + end + local pline_init = { + data = nil, + size = 0, + } + local state = { + reader = { + get_line = get_line, + cookie = nil, + }, + pos = { line = 0, col = 0 }, + colors = kvi_new('ParserHighlight'), + can_continuate = false, + } + local ret = ffi.new('ParserState', state) + kvi_init(ret.reader.lines) + kvi_init(ret.stack) + return ret +end + +local function conv_enum(etab, eval) + local n = tonumber(eval) + return etab[n] or n +end + +local function conv_eltkn_type(typ) + return conv_enum(eltkn_type_tab, typ) +end + +local function pline2lua(pline) + return ffi.string(pline.data, pline.size) +end + +local bracket_types = { + Bracket = true, + FigureBrace = true, + Parenthesis = true, +} + +local function intchar2lua(ch) + ch = tonumber(ch) + return (20 <= ch and ch < 127) and ('%c'):format(ch) or ch +end + +local function eltkn2lua(pstate, tkn) + local ret = { + type = conv_eltkn_type(tkn.type), + len = tonumber(tkn.len), + start = { line = tonumber(tkn.start.line), col = tonumber(tkn.start.col) }, + } + if ret.start.line < pstate.reader.lines.size then + local pstr = pline2lua(pstate.reader.lines.items[ret.start.line]) + if ret.start.col >= #pstr then + ret.error = 'start.col >= #pstr' + else + ret.str = pstr:sub(ret.start.col + 1, ret.start.col + ret.len) + if #(ret.str) ~= ret.len then + ret.error = '#str /= len' + end + end + else + ret.error = 'start.line >= pstate.reader.lines.size' + end + if ret.type == 'Comparison' then + ret.data = { + type = conv_enum(eltkn_cmp_type_tab, tkn.data.cmp.type), + ccs = conv_enum(ccs_tab, tkn.data.cmp.ccs), + inv = (not not tkn.data.cmp.inv), + } + elseif ret.type == 'Multiplication' then + ret.data = { type = conv_enum(eltkn_mul_type_tab, tkn.data.mul.type) } + elseif bracket_types[ret.type] then + ret.data = { closing = (not not tkn.data.brc.closing) } + elseif ret.type == 'Register' then + ret.data = { name = intchar2lua(tkn.data.reg.name) } + elseif (ret.type == 'SingleQuotedString' + or ret.type == 'DoubleQuotedString') then + ret.data = { closed = (not not tkn.data.str.closed) } + elseif ret.type == 'Option' then + ret.data = { + scope = conv_enum(eltkn_opt_scope_tab, tkn.data.opt.scope), + name = ffi.string(tkn.data.opt.name, tkn.data.opt.len), + } + elseif ret.type == 'PlainIdentifier' then + ret.data = { + scope = intchar2lua(tkn.data.var.scope), + autoload = (not not tkn.data.var.autoload), + } + elseif ret.type == 'Invalid' then + ret.data = { error = ffi.string(tkn.data.err.msg) } + end + return ret, tkn +end + +local function next_eltkn(pstate) + return eltkn2lua(pstate, lib.viml_pexpr_next_token(pstate, false)) +end + +describe('Expressions lexer', function() + itp('works (single tokens)', function() + local function singl_eltkn_test(typ, str, data) + local pstate = new_pstate({str}) + eq({data=data, len=#str, start={col=0, line=0}, str=str, type=typ}, + next_eltkn(pstate)) + if not ( + typ == 'Spacing' + or (typ == 'Register' and str == '@') + or ((typ == 'SingleQuotedString' or typ == 'DoubleQuotedString') + and not data.closed) + ) then + pstate = new_pstate({str .. ' '}) + eq({data=data, len=#str, start={col=0, line=0}, str=str, type=typ}, + next_eltkn(pstate)) + end + pstate = new_pstate({'x' .. str}) + pstate.pos.col = 1 + eq({data=data, len=#str, start={col=1, line=0}, str=str, type=typ}, + next_eltkn(pstate)) + end + singl_eltkn_test('Parenthesis', '(', {closing=false}) + singl_eltkn_test('Parenthesis', ')', {closing=true}) + singl_eltkn_test('Bracket', '[', {closing=false}) + singl_eltkn_test('Bracket', ']', {closing=true}) + singl_eltkn_test('FigureBrace', '{', {closing=false}) + singl_eltkn_test('FigureBrace', '}', {closing=true}) + singl_eltkn_test('Question', '?') + singl_eltkn_test('Colon', ':') + singl_eltkn_test('Dot', '.') + singl_eltkn_test('Plus', '+') + singl_eltkn_test('Comma', ',') + singl_eltkn_test('Multiplication', '*', {type='Mul'}) + singl_eltkn_test('Multiplication', '/', {type='Div'}) + singl_eltkn_test('Multiplication', '%', {type='Mod'}) + singl_eltkn_test('Spacing', ' \t\t \t\t') + singl_eltkn_test('Spacing', ' ') + singl_eltkn_test('Spacing', '\t') + singl_eltkn_test('Invalid', '\x01\x02\x03', {error='E15: Invalid control character present in input: %.*s'}) + singl_eltkn_test('Number', '0123') + singl_eltkn_test('Number', '0') + singl_eltkn_test('Number', '9') + singl_eltkn_test('Env', '$abc') + singl_eltkn_test('Env', '$') + singl_eltkn_test('PlainIdentifier', 'test', {autoload=false, scope=0}) + singl_eltkn_test('PlainIdentifier', '_test', {autoload=false, scope=0}) + singl_eltkn_test('PlainIdentifier', '_test_foo', {autoload=false, scope=0}) + singl_eltkn_test('PlainIdentifier', 't', {autoload=false, scope=0}) + singl_eltkn_test('PlainIdentifier', 'test5', {autoload=false, scope=0}) + singl_eltkn_test('PlainIdentifier', 't0', {autoload=false, scope=0}) + singl_eltkn_test('PlainIdentifier', 'test#var', {autoload=true, scope=0}) + singl_eltkn_test('PlainIdentifier', 'test#var#val###', {autoload=true, scope=0}) + singl_eltkn_test('PlainIdentifier', 't#####', {autoload=true, scope=0}) + local function scope_test(scope) + singl_eltkn_test('PlainIdentifier', scope .. ':test#var', {autoload=true, scope=scope}) + singl_eltkn_test('PlainIdentifier', scope .. ':', {autoload=false, scope=scope}) + end + scope_test('s') + scope_test('g') + scope_test('v') + scope_test('b') + scope_test('w') + scope_test('t') + scope_test('l') + scope_test('a') + local function comparison_test(op, inv_op, cmp_type) + singl_eltkn_test('Comparison', op, {type=cmp_type, inv=false, ccs='UseOption'}) + singl_eltkn_test('Comparison', inv_op, {type=cmp_type, inv=true, ccs='UseOption'}) + singl_eltkn_test('Comparison', op .. '#', {type=cmp_type, inv=false, ccs='MatchCase'}) + singl_eltkn_test('Comparison', inv_op .. '#', {type=cmp_type, inv=true, ccs='MatchCase'}) + singl_eltkn_test('Comparison', op .. '?', {type=cmp_type, inv=false, ccs='IgnoreCase'}) + singl_eltkn_test('Comparison', inv_op .. '?', {type=cmp_type, inv=true, ccs='IgnoreCase'}) + end + comparison_test('is', 'isnot', 'Identical') + singl_eltkn_test('And', '&&') + singl_eltkn_test('Invalid', '&', {error='E112: Option name missing: %.*s'}) + singl_eltkn_test('Option', '&opt', {scope='Unspecified', name='opt'}) + singl_eltkn_test('Option', '&t_xx', {scope='Unspecified', name='t_xx'}) + singl_eltkn_test('Option', '&t_\r\r', {scope='Unspecified', name='t_\r\r'}) + singl_eltkn_test('Option', '&t_\t\t', {scope='Unspecified', name='t_\t\t'}) + singl_eltkn_test('Option', '&t_ ', {scope='Unspecified', name='t_ '}) + singl_eltkn_test('Option', '&g:opt', {scope='Global', name='opt'}) + singl_eltkn_test('Option', '&l:opt', {scope='Local', name='opt'}) + singl_eltkn_test('Invalid', '&l:', {error='E112: Option name missing: %.*s'}) + singl_eltkn_test('Invalid', '&g:', {error='E112: Option name missing: %.*s'}) + singl_eltkn_test('Register', '@', {name=-1}) + singl_eltkn_test('Register', '@a', {name='a'}) + singl_eltkn_test('Register', '@\r', {name=13}) + singl_eltkn_test('Register', '@ ', {name=' '}) + singl_eltkn_test('Register', '@\t', {name=9}) + singl_eltkn_test('SingleQuotedString', '\'test', {closed=false}) + singl_eltkn_test('SingleQuotedString', '\'test\'', {closed=true}) + singl_eltkn_test('SingleQuotedString', '\'\'\'\'', {closed=true}) + singl_eltkn_test('SingleQuotedString', '\'x\'\'\'', {closed=true}) + singl_eltkn_test('SingleQuotedString', '\'\'\'x\'', {closed=true}) + singl_eltkn_test('SingleQuotedString', '\'\'\'', {closed=false}) + singl_eltkn_test('SingleQuotedString', '\'x\'\'', {closed=false}) + singl_eltkn_test('SingleQuotedString', '\'\'\'x', {closed=false}) + singl_eltkn_test('DoubleQuotedString', '"test', {closed=false}) + singl_eltkn_test('DoubleQuotedString', '"test"', {closed=true}) + singl_eltkn_test('DoubleQuotedString', '"\\""', {closed=true}) + singl_eltkn_test('DoubleQuotedString', '"x\\""', {closed=true}) + singl_eltkn_test('DoubleQuotedString', '"\\"x"', {closed=true}) + singl_eltkn_test('DoubleQuotedString', '"\\"', {closed=false}) + singl_eltkn_test('DoubleQuotedString', '"x\\"', {closed=false}) + singl_eltkn_test('DoubleQuotedString', '"\\"x', {closed=false}) + singl_eltkn_test('Not', '!') + singl_eltkn_test('Invalid', '=', {error='E15: Expected == or =~: %.*s'}) + comparison_test('==', '!=', 'Equal') + comparison_test('=~', '!~', 'Matches') + comparison_test('>', '<=', 'Greater') + comparison_test('>=', '<', 'GreaterOrEqual') + singl_eltkn_test('Minus', '-') + singl_eltkn_test('Arrow', '->') + singl_eltkn_test('EOC', '\0') + singl_eltkn_test('EOC', '\n') + singl_eltkn_test('Invalid', '~', {error='E15: Unidentified character: %.*s'}) + + local pstate = new_pstate({{data=nil, size=0}}) + eq({len=0, error='start.col >= #pstr', start={col=0, line=0}, type='EOC'}, + next_eltkn(pstate)) + + local pstate = new_pstate({''}) + eq({len=0, error='start.col >= #pstr', start={col=0, line=0}, type='EOC'}, + next_eltkn(pstate)) + end) +end) -- cgit From 2d8b9937deae3731143f4ea44e5c41715fe1363a Mon Sep 17 00:00:00 2001 From: ZyX Date: Sun, 20 Aug 2017 20:40:59 +0300 Subject: viml/parser: Handle encoding conversions --- test/unit/viml/expressions/lexer_spec.lua | 7 +++++++ 1 file changed, 7 insertions(+) (limited to 'test/unit/viml/expressions/lexer_spec.lua') diff --git a/test/unit/viml/expressions/lexer_spec.lua b/test/unit/viml/expressions/lexer_spec.lua index bf5afe4eeb..c877ce4bbf 100644 --- a/test/unit/viml/expressions/lexer_spec.lua +++ b/test/unit/viml/expressions/lexer_spec.lua @@ -110,15 +110,22 @@ local function new_pstate(strings) end ret_pline.data = data ret_pline.size = size + ret_pline.allocated = false end local pline_init = { data = nil, size = 0, + allocated = false, } local state = { reader = { get_line = get_line, cookie = nil, + conv = { + vc_type = 0, + vc_factor = 1, + vc_fail = false, + }, }, pos = { line = 0, col = 0 }, colors = kvi_new('ParserHighlight'), -- cgit From 919223c23ae3c8c904f35e7d605b1cf14d44a5f0 Mon Sep 17 00:00:00 2001 From: ZyX Date: Sun, 3 Sep 2017 19:57:24 +0300 Subject: unittests: Move some functions into helpers modules --- test/unit/viml/expressions/lexer_spec.lua | 104 +++--------------------------- 1 file changed, 10 insertions(+), 94 deletions(-) (limited to 'test/unit/viml/expressions/lexer_spec.lua') diff --git a/test/unit/viml/expressions/lexer_spec.lua b/test/unit/viml/expressions/lexer_spec.lua index c877ce4bbf..32182f650d 100644 --- a/test/unit/viml/expressions/lexer_spec.lua +++ b/test/unit/viml/expressions/lexer_spec.lua @@ -1,11 +1,18 @@ local helpers = require('test.unit.helpers')(after_each) +local viml_helpers = require('test.unit.viml.helpers') local itp = helpers.gen_itp(it) local child_call_once = helpers.child_call_once +local conv_enum = helpers.conv_enum local cimport = helpers.cimport local ffi = helpers.ffi local eq = helpers.eq +local pline2lua = viml_helpers.pline2lua +local new_pstate = viml_helpers.new_pstate +local intchar2lua = viml_helpers.intchar2lua +local pstate_set_str = viml_helpers.pstate_set_str + local lib = cimport('./src/nvim/viml/parser/expressions.h') local eltkn_type_tab, eltkn_cmp_type_tab, ccs_tab, eltkn_mul_type_tab @@ -71,114 +78,23 @@ child_call_once(function() } end) -local function array_size(arr) - return ffi.sizeof(arr) / ffi.sizeof(arr[0]) -end - -local function kvi_size(kvi) - return array_size(kvi.init_array) -end - -local function kvi_init(kvi) - kvi.capacity = kvi_size(kvi) - kvi.items = kvi.init_array - return kvi -end - -local function kvi_new(ct) - return kvi_init(ffi.new(ct)) -end - -local function new_pstate(strings) - local strings_idx = 0 - local function get_line(_, ret_pline) - strings_idx = strings_idx + 1 - local str = strings[strings_idx] - local data, size - if type(str) == 'string' then - data = str - size = #str - elseif type(str) == 'nil' then - data = nil - size = 0 - elseif type(str) == 'table' then - data = str.data - size = str.size - elseif type(str) == 'function' then - data, size = str() - size = size or 0 - end - ret_pline.data = data - ret_pline.size = size - ret_pline.allocated = false - end - local pline_init = { - data = nil, - size = 0, - allocated = false, - } - local state = { - reader = { - get_line = get_line, - cookie = nil, - conv = { - vc_type = 0, - vc_factor = 1, - vc_fail = false, - }, - }, - pos = { line = 0, col = 0 }, - colors = kvi_new('ParserHighlight'), - can_continuate = false, - } - local ret = ffi.new('ParserState', state) - kvi_init(ret.reader.lines) - kvi_init(ret.stack) - return ret -end - -local function conv_enum(etab, eval) - local n = tonumber(eval) - return etab[n] or n -end - local function conv_eltkn_type(typ) return conv_enum(eltkn_type_tab, typ) end -local function pline2lua(pline) - return ffi.string(pline.data, pline.size) -end - local bracket_types = { Bracket = true, FigureBrace = true, Parenthesis = true, } -local function intchar2lua(ch) - ch = tonumber(ch) - return (20 <= ch and ch < 127) and ('%c'):format(ch) or ch -end - local function eltkn2lua(pstate, tkn) local ret = { type = conv_eltkn_type(tkn.type), - len = tonumber(tkn.len), - start = { line = tonumber(tkn.start.line), col = tonumber(tkn.start.col) }, } - if ret.start.line < pstate.reader.lines.size then - local pstr = pline2lua(pstate.reader.lines.items[ret.start.line]) - if ret.start.col >= #pstr then - ret.error = 'start.col >= #pstr' - else - ret.str = pstr:sub(ret.start.col + 1, ret.start.col + ret.len) - if #(ret.str) ~= ret.len then - ret.error = '#str /= len' - end - end - else - ret.error = 'start.line >= pstate.reader.lines.size' + pstate_set_str(pstate, tkn.start, tkn.len, ret) + if not ret.error and (#(ret.str) ~= ret.len) then + ret.error = '#str /= len' end if ret.type == 'Comparison' then ret.data = { -- cgit From 9fa8f7fc0a24371f7956450d840bdae8a2fc9a51 Mon Sep 17 00:00:00 2001 From: ZyX Date: Thu, 28 Sep 2017 00:40:25 +0300 Subject: viml/parser/expressions: Add a way to adjust lexer It also adds support for kExprLexOr which for some reason was forgotten. It was only made sure that KLEE test compiles in non-KLEE mode, not that something works or that KLEE is able to run tests. --- test/unit/viml/expressions/lexer_spec.lua | 247 ++++++++++++++++++++++++------ 1 file changed, 196 insertions(+), 51 deletions(-) (limited to 'test/unit/viml/expressions/lexer_spec.lua') diff --git a/test/unit/viml/expressions/lexer_spec.lua b/test/unit/viml/expressions/lexer_spec.lua index 32182f650d..972478c2e5 100644 --- a/test/unit/viml/expressions/lexer_spec.lua +++ b/test/unit/viml/expressions/lexer_spec.lua @@ -1,5 +1,6 @@ local helpers = require('test.unit.helpers')(after_each) local viml_helpers = require('test.unit.viml.helpers') +local global_helpers = require('test.helpers') local itp = helpers.gen_itp(it) local child_call_once = helpers.child_call_once @@ -13,6 +14,8 @@ local new_pstate = viml_helpers.new_pstate local intchar2lua = viml_helpers.intchar2lua local pstate_set_str = viml_helpers.pstate_set_str +local shallowcopy = global_helpers.shallowcopy + local lib = cimport('./src/nvim/viml/parser/expressions.h') local eltkn_type_tab, eltkn_cmp_type_tab, ccs_tab, eltkn_mul_type_tab @@ -121,37 +124,81 @@ local function eltkn2lua(pstate, tkn) scope = intchar2lua(tkn.data.var.scope), autoload = (not not tkn.data.var.autoload), } + elseif ret.type == 'Number' then + ret.data = { + is_float = (not not tkn.data.num.is_float), + } elseif ret.type == 'Invalid' then ret.data = { error = ffi.string(tkn.data.err.msg) } end return ret, tkn end -local function next_eltkn(pstate) - return eltkn2lua(pstate, lib.viml_pexpr_next_token(pstate, false)) +local function next_eltkn(pstate, flags) + return eltkn2lua(pstate, lib.viml_pexpr_next_token(pstate, flags)) end describe('Expressions lexer', function() - itp('works (single tokens)', function() - local function singl_eltkn_test(typ, str, data) - local pstate = new_pstate({str}) - eq({data=data, len=#str, start={col=0, line=0}, str=str, type=typ}, - next_eltkn(pstate)) - if not ( - typ == 'Spacing' - or (typ == 'Register' and str == '@') - or ((typ == 'SingleQuotedString' or typ == 'DoubleQuotedString') - and not data.closed) - ) then - pstate = new_pstate({str .. ' '}) - eq({data=data, len=#str, start={col=0, line=0}, str=str, type=typ}, - next_eltkn(pstate)) + local flags = 0 + local should_advance = true + local function check_advance(pstate, bytes_to_advance, initial_col) + local tgt = initial_col + bytes_to_advance + if should_advance then + if pstate.reader.lines.items[0].size == tgt then + eq(1, pstate.pos.line) + eq(0, pstate.pos.col) + else + eq(0, pstate.pos.line) + eq(tgt, pstate.pos.col) end - pstate = new_pstate({'x' .. str}) - pstate.pos.col = 1 - eq({data=data, len=#str, start={col=1, line=0}, str=str, type=typ}, - next_eltkn(pstate)) + else + eq(0, pstate.pos.line) + eq(initial_col, pstate.pos.col) end + end + local function singl_eltkn_test(typ, str, data) + local pstate = new_pstate({str}) + eq({data=data, len=#str, start={col=0, line=0}, str=str, type=typ}, + next_eltkn(pstate, flags)) + check_advance(pstate, #str, 0) + if not ( + typ == 'Spacing' + or (typ == 'Register' and str == '@') + or ((typ == 'SingleQuotedString' or typ == 'DoubleQuotedString') + and not data.closed) + ) then + pstate = new_pstate({str .. ' '}) + eq({data=data, len=#str, start={col=0, line=0}, str=str, type=typ}, + next_eltkn(pstate, flags)) + check_advance(pstate, #str, 0) + end + pstate = new_pstate({'x' .. str}) + pstate.pos.col = 1 + eq({data=data, len=#str, start={col=1, line=0}, str=str, type=typ}, + next_eltkn(pstate, flags)) + check_advance(pstate, #str, 1) + end + local function scope_test(scope) + singl_eltkn_test('PlainIdentifier', scope .. ':test#var', {autoload=true, scope=scope}) + singl_eltkn_test('PlainIdentifier', scope .. ':', {autoload=false, scope=scope}) + end + local function comparison_test(op, inv_op, cmp_type) + singl_eltkn_test('Comparison', op, {type=cmp_type, inv=false, ccs='UseOption'}) + singl_eltkn_test('Comparison', inv_op, {type=cmp_type, inv=true, ccs='UseOption'}) + singl_eltkn_test('Comparison', op .. '#', {type=cmp_type, inv=false, ccs='MatchCase'}) + singl_eltkn_test('Comparison', inv_op .. '#', {type=cmp_type, inv=true, ccs='MatchCase'}) + singl_eltkn_test('Comparison', op .. '?', {type=cmp_type, inv=false, ccs='IgnoreCase'}) + singl_eltkn_test('Comparison', inv_op .. '?', {type=cmp_type, inv=true, ccs='IgnoreCase'}) + end + local function simple_test(pstate_arg, exp_type, exp_len, exp) + local pstate = new_pstate(pstate_arg) + local exp = shallowcopy(exp) + exp.type = exp_type + exp.len = exp_len or #(pstate_arg[0]) + exp.start = { col = 0, line = 0 } + eq(exp, next_eltkn(pstate, flags)) + end + local function stable_tests() singl_eltkn_test('Parenthesis', '(', {closing=false}) singl_eltkn_test('Parenthesis', ')', {closing=true}) singl_eltkn_test('Bracket', '[', {closing=false}) @@ -170,9 +217,9 @@ describe('Expressions lexer', function() singl_eltkn_test('Spacing', ' ') singl_eltkn_test('Spacing', '\t') singl_eltkn_test('Invalid', '\x01\x02\x03', {error='E15: Invalid control character present in input: %.*s'}) - singl_eltkn_test('Number', '0123') - singl_eltkn_test('Number', '0') - singl_eltkn_test('Number', '9') + singl_eltkn_test('Number', '0123', {is_float=false}) + singl_eltkn_test('Number', '0', {is_float=false}) + singl_eltkn_test('Number', '9', {is_float=false}) singl_eltkn_test('Env', '$abc') singl_eltkn_test('Env', '$') singl_eltkn_test('PlainIdentifier', 'test', {autoload=false, scope=0}) @@ -184,28 +231,8 @@ describe('Expressions lexer', function() singl_eltkn_test('PlainIdentifier', 'test#var', {autoload=true, scope=0}) singl_eltkn_test('PlainIdentifier', 'test#var#val###', {autoload=true, scope=0}) singl_eltkn_test('PlainIdentifier', 't#####', {autoload=true, scope=0}) - local function scope_test(scope) - singl_eltkn_test('PlainIdentifier', scope .. ':test#var', {autoload=true, scope=scope}) - singl_eltkn_test('PlainIdentifier', scope .. ':', {autoload=false, scope=scope}) - end - scope_test('s') - scope_test('g') - scope_test('v') - scope_test('b') - scope_test('w') - scope_test('t') - scope_test('l') - scope_test('a') - local function comparison_test(op, inv_op, cmp_type) - singl_eltkn_test('Comparison', op, {type=cmp_type, inv=false, ccs='UseOption'}) - singl_eltkn_test('Comparison', inv_op, {type=cmp_type, inv=true, ccs='UseOption'}) - singl_eltkn_test('Comparison', op .. '#', {type=cmp_type, inv=false, ccs='MatchCase'}) - singl_eltkn_test('Comparison', inv_op .. '#', {type=cmp_type, inv=true, ccs='MatchCase'}) - singl_eltkn_test('Comparison', op .. '?', {type=cmp_type, inv=false, ccs='IgnoreCase'}) - singl_eltkn_test('Comparison', inv_op .. '?', {type=cmp_type, inv=true, ccs='IgnoreCase'}) - end - comparison_test('is', 'isnot', 'Identical') singl_eltkn_test('And', '&&') + singl_eltkn_test('Or', '||') singl_eltkn_test('Invalid', '&', {error='E112: Option name missing: %.*s'}) singl_eltkn_test('Option', '&opt', {scope='Unspecified', name='opt'}) singl_eltkn_test('Option', '&t_xx', {scope='Unspecified', name='t_xx'}) @@ -245,16 +272,134 @@ describe('Expressions lexer', function() comparison_test('>=', '<', 'GreaterOrEqual') singl_eltkn_test('Minus', '-') singl_eltkn_test('Arrow', '->') + singl_eltkn_test('Invalid', '~', {error='E15: Unidentified character: %.*s'}) + simple_test({{data=nil, size=0}}, 'EOC', 0, {error='start.col >= #pstr'}) + simple_test({''}, 'EOC', 0, {error='start.col >= #pstr'}) + simple_test({'2.'}, 'Number', 1, {data={is_float=false}, str='2'}) + simple_test({'2.x'}, 'Number', 1, {data={is_float=false}, str='2'}) + end + + local function regular_scope_tests() + scope_test('s') + scope_test('g') + scope_test('v') + scope_test('b') + scope_test('w') + scope_test('t') + scope_test('l') + scope_test('a') + + simple_test({'g:'}, 'PlainIdentifier', 2, {data={scope='g', autoload=false}, str='g:'}) + simple_test({'g:is#foo'}, 'PlainIdentifier', 8, {data={scope='g', autoload=true}, str='g:is#foo'}) + simple_test({'g:isnot#foo'}, 'PlainIdentifier', 11, {data={scope='g', autoload=true}, str='g:isnot#foo'}) + end + + local function regular_is_tests() + comparison_test('is', 'isnot', 'Identical') + + simple_test({'is'}, 'Comparison', 2, {data={type='Identical', inv=false, ccs='UseOption'}, str='is'}) + simple_test({'isnot'}, 'Comparison', 5, {data={type='Identical', inv=true, ccs='UseOption'}, str='isnot'}) + simple_test({'is?'}, 'Comparison', 3, {data={type='Identical', inv=false, ccs='IgnoreCase'}, str='is?'}) + simple_test({'isnot?'}, 'Comparison', 6, {data={type='Identical', inv=true, ccs='IgnoreCase'}, str='isnot?'}) + simple_test({'is#'}, 'Comparison', 3, {data={type='Identical', inv=false, ccs='MatchCase'}, str='is#'}) + simple_test({'isnot#'}, 'Comparison', 6, {data={type='Identical', inv=true, ccs='MatchCase'}, str='isnot#'}) + simple_test({'is#foo'}, 'Comparison', 3, {data={type='Identical', inv=false, ccs='MatchCase'}, str='is#'}) + simple_test({'isnot#foo'}, 'Comparison', 6, {data={type='Identical', inv=true, ccs='MatchCase'}, str='isnot#'}) + end + + local function regular_number_tests() + simple_test({'2.0'}, 'Number', 1, {data={is_float=false}, str='2'}) + simple_test({'2.0x'}, 'Number', 1, {data={is_float=false}, str='2'}) + simple_test({'2.0e'}, 'Number', 1, {data={is_float=false}, str='2'}) + simple_test({'2.0e+'}, 'Number', 1, {data={is_float=false}, str='2'}) + simple_test({'2.0e-'}, 'Number', 1, {data={is_float=false}, str='2'}) + simple_test({'2.0e+x'}, 'Number', 1, {data={is_float=false}, str='2'}) + simple_test({'2.0e-x'}, 'Number', 1, {data={is_float=false}, str='2'}) + simple_test({'2.0e5'}, 'Number', 1, {data={is_float=false}, str='2'}) + simple_test({'2.0e+5'}, 'Number', 1, {data={is_float=false}, str='2'}) + simple_test({'2.0e-5'}, 'Number', 1, {data={is_float=false}, str='2'}) + end + + local function regular_eoc_tests() + singl_eltkn_test('EOC', '|') singl_eltkn_test('EOC', '\0') singl_eltkn_test('EOC', '\n') - singl_eltkn_test('Invalid', '~', {error='E15: Unidentified character: %.*s'}) + end + + itp('works (single tokens, zero flags)', function() + stable_tests() + + regular_eoc_tests() + regular_scope_tests() + regular_is_tests() + regular_number_tests() + end) + itp('peeks', function() + flags = tonumber(lib.kELFlagPeek) + should_advance = false + stable_tests() + + regular_eoc_tests() + regular_scope_tests() + regular_is_tests() + regular_number_tests() + end) + itp('forbids scope', function() + flags = tonumber(lib.kELFlagForbidScope) + stable_tests() + + regular_eoc_tests() + regular_is_tests() + regular_number_tests() + + simple_test({'g:'}, 'PlainIdentifier', 1, {data={scope=0, autoload=false}, str='g'}) + end) + itp('allows floats', function() + flags = tonumber(lib.kELFlagAllowFloat) + stable_tests() + + regular_eoc_tests() + regular_scope_tests() + regular_is_tests() + + simple_test({'2.0'}, 'Number', 3, {data={is_float=true}, str='2.0'}) + simple_test({'2.0x'}, 'Number', 3, {data={is_float=true}, str='2.0'}) + simple_test({'2.0e'}, 'Number', 3, {data={is_float=true}, str='2.0'}) + simple_test({'2.0e+'}, 'Number', 3, {data={is_float=true}, str='2.0'}) + simple_test({'2.0e-'}, 'Number', 3, {data={is_float=true}, str='2.0'}) + simple_test({'2.0e+x'}, 'Number', 3, {data={is_float=true}, str='2.0'}) + simple_test({'2.0e-x'}, 'Number', 3, {data={is_float=true}, str='2.0'}) + simple_test({'2.0e5'}, 'Number', 5, {data={is_float=true}, str='2.0e5'}) + simple_test({'2.0e+5'}, 'Number', 6, {data={is_float=true}, str='2.0e+5'}) + simple_test({'2.0e-5'}, 'Number', 6, {data={is_float=true}, str='2.0e-5'}) + end) + itp('treats `is` as an identifier', function() + flags = tonumber(lib.kELFlagIsNotCmp) + stable_tests() + + regular_eoc_tests() + regular_scope_tests() + regular_number_tests() + + simple_test({'is'}, 'PlainIdentifier', 2, {data={scope=0, autoload=false}, str='is'}) + simple_test({'isnot'}, 'PlainIdentifier', 5, {data={scope=0, autoload=false}, str='isnot'}) + simple_test({'is?'}, 'PlainIdentifier', 2, {data={scope=0, autoload=false}, str='is'}) + simple_test({'isnot?'}, 'PlainIdentifier', 5, {data={scope=0, autoload=false}, str='isnot'}) + simple_test({'is#'}, 'PlainIdentifier', 3, {data={scope=0, autoload=true}, str='is#'}) + simple_test({'isnot#'}, 'PlainIdentifier', 6, {data={scope=0, autoload=true}, str='isnot#'}) + simple_test({'is#foo'}, 'PlainIdentifier', 6, {data={scope=0, autoload=true}, str='is#foo'}) + simple_test({'isnot#foo'}, 'PlainIdentifier', 9, {data={scope=0, autoload=true}, str='isnot#foo'}) + end) + itp('forbids EOC', function() + flags = tonumber(lib.kELFlagForbidEOC) + stable_tests() - local pstate = new_pstate({{data=nil, size=0}}) - eq({len=0, error='start.col >= #pstr', start={col=0, line=0}, type='EOC'}, - next_eltkn(pstate)) + regular_scope_tests() + regular_is_tests() + regular_number_tests() - local pstate = new_pstate({''}) - eq({len=0, error='start.col >= #pstr', start={col=0, line=0}, type='EOC'}, - next_eltkn(pstate)) + singl_eltkn_test('Invalid', '|', {error='E15: Unexpected EOC character: %.*s'}) + singl_eltkn_test('Invalid', '\0', {error='E15: Unexpected EOC character: %.*s'}) + singl_eltkn_test('Invalid', '\n', {error='E15: Unexpected EOC character: %.*s'}) end) end) -- cgit From 6168e1127c1c80a3810854649b0776146545043b Mon Sep 17 00:00:00 2001 From: ZyX Date: Mon, 2 Oct 2017 02:41:55 +0300 Subject: viml/parser/expressions: Add support for comparison operators --- test/unit/viml/expressions/lexer_spec.lua | 25 ++++++------------------- 1 file changed, 6 insertions(+), 19 deletions(-) (limited to 'test/unit/viml/expressions/lexer_spec.lua') diff --git a/test/unit/viml/expressions/lexer_spec.lua b/test/unit/viml/expressions/lexer_spec.lua index 972478c2e5..d201d54526 100644 --- a/test/unit/viml/expressions/lexer_spec.lua +++ b/test/unit/viml/expressions/lexer_spec.lua @@ -1,7 +1,7 @@ local helpers = require('test.unit.helpers')(after_each) -local viml_helpers = require('test.unit.viml.helpers') local global_helpers = require('test.helpers') local itp = helpers.gen_itp(it) +local viml_helpers = require('test.unit.viml.helpers') local child_call_once = helpers.child_call_once local conv_enum = helpers.conv_enum @@ -9,17 +9,18 @@ local cimport = helpers.cimport local ffi = helpers.ffi local eq = helpers.eq +local conv_ccs = viml_helpers.conv_ccs local pline2lua = viml_helpers.pline2lua local new_pstate = viml_helpers.new_pstate local intchar2lua = viml_helpers.intchar2lua +local conv_cmp_type = viml_helpers.conv_cmp_type local pstate_set_str = viml_helpers.pstate_set_str local shallowcopy = global_helpers.shallowcopy local lib = cimport('./src/nvim/viml/parser/expressions.h') -local eltkn_type_tab, eltkn_cmp_type_tab, ccs_tab, eltkn_mul_type_tab -local eltkn_opt_scope_tab +local eltkn_type_tab, eltkn_mul_type_tab, eltkn_opt_scope_tab child_call_once(function() eltkn_type_tab = { [tonumber(lib.kExprLexInvalid)] = 'Invalid', @@ -54,20 +55,6 @@ child_call_once(function() [tonumber(lib.kExprLexArrow)] = 'Arrow', } - eltkn_cmp_type_tab = { - [tonumber(lib.kExprLexCmpEqual)] = 'Equal', - [tonumber(lib.kExprLexCmpMatches)] = 'Matches', - [tonumber(lib.kExprLexCmpGreater)] = 'Greater', - [tonumber(lib.kExprLexCmpGreaterOrEqual)] = 'GreaterOrEqual', - [tonumber(lib.kExprLexCmpIdentical)] = 'Identical', - } - - ccs_tab = { - [tonumber(lib.kCCStrategyUseOption)] = 'UseOption', - [tonumber(lib.kCCStrategyMatchCase)] = 'MatchCase', - [tonumber(lib.kCCStrategyIgnoreCase)] = 'IgnoreCase', - } - eltkn_mul_type_tab = { [tonumber(lib.kExprLexMulMul)] = 'Mul', [tonumber(lib.kExprLexMulDiv)] = 'Div', @@ -101,8 +88,8 @@ local function eltkn2lua(pstate, tkn) end if ret.type == 'Comparison' then ret.data = { - type = conv_enum(eltkn_cmp_type_tab, tkn.data.cmp.type), - ccs = conv_enum(ccs_tab, tkn.data.cmp.ccs), + type = conv_cmp_type(tkn.data.cmp.type), + ccs = conv_ccs(tkn.data.cmp.ccs), inv = (not not tkn.data.cmp.inv), } elseif ret.type == 'Multiplication' then -- cgit From 0bc4e2237960712426da3774c1430f5874c49aea Mon Sep 17 00:00:00 2001 From: ZyX Date: Tue, 3 Oct 2017 00:39:40 +0300 Subject: viml/parser/expressions: Forbid dot or alpha characters after a float This is basically what Vim already does, in addition to forbidding floats should there be a concat immediately before it. --- test/unit/viml/expressions/lexer_spec.lua | 21 +++++++++------------ 1 file changed, 9 insertions(+), 12 deletions(-) (limited to 'test/unit/viml/expressions/lexer_spec.lua') diff --git a/test/unit/viml/expressions/lexer_spec.lua b/test/unit/viml/expressions/lexer_spec.lua index d201d54526..bd8045632e 100644 --- a/test/unit/viml/expressions/lexer_spec.lua +++ b/test/unit/viml/expressions/lexer_spec.lua @@ -264,6 +264,15 @@ describe('Expressions lexer', function() simple_test({''}, 'EOC', 0, {error='start.col >= #pstr'}) simple_test({'2.'}, 'Number', 1, {data={is_float=false}, str='2'}) simple_test({'2.x'}, 'Number', 1, {data={is_float=false}, str='2'}) + simple_test({'2.2.'}, 'Number', 1, {data={is_float=false}, str='2'}) + simple_test({'2.0x'}, 'Number', 1, {data={is_float=false}, str='2'}) + simple_test({'2.0e'}, 'Number', 1, {data={is_float=false}, str='2'}) + simple_test({'2.0e+'}, 'Number', 1, {data={is_float=false}, str='2'}) + simple_test({'2.0e-'}, 'Number', 1, {data={is_float=false}, str='2'}) + simple_test({'2.0e+x'}, 'Number', 1, {data={is_float=false}, str='2'}) + simple_test({'2.0e-x'}, 'Number', 1, {data={is_float=false}, str='2'}) + simple_test({'2.0e+1a'}, 'Number', 1, {data={is_float=false}, str='2'}) + simple_test({'2.0e-1a'}, 'Number', 1, {data={is_float=false}, str='2'}) end local function regular_scope_tests() @@ -296,12 +305,6 @@ describe('Expressions lexer', function() local function regular_number_tests() simple_test({'2.0'}, 'Number', 1, {data={is_float=false}, str='2'}) - simple_test({'2.0x'}, 'Number', 1, {data={is_float=false}, str='2'}) - simple_test({'2.0e'}, 'Number', 1, {data={is_float=false}, str='2'}) - simple_test({'2.0e+'}, 'Number', 1, {data={is_float=false}, str='2'}) - simple_test({'2.0e-'}, 'Number', 1, {data={is_float=false}, str='2'}) - simple_test({'2.0e+x'}, 'Number', 1, {data={is_float=false}, str='2'}) - simple_test({'2.0e-x'}, 'Number', 1, {data={is_float=false}, str='2'}) simple_test({'2.0e5'}, 'Number', 1, {data={is_float=false}, str='2'}) simple_test({'2.0e+5'}, 'Number', 1, {data={is_float=false}, str='2'}) simple_test({'2.0e-5'}, 'Number', 1, {data={is_float=false}, str='2'}) @@ -350,12 +353,6 @@ describe('Expressions lexer', function() regular_is_tests() simple_test({'2.0'}, 'Number', 3, {data={is_float=true}, str='2.0'}) - simple_test({'2.0x'}, 'Number', 3, {data={is_float=true}, str='2.0'}) - simple_test({'2.0e'}, 'Number', 3, {data={is_float=true}, str='2.0'}) - simple_test({'2.0e+'}, 'Number', 3, {data={is_float=true}, str='2.0'}) - simple_test({'2.0e-'}, 'Number', 3, {data={is_float=true}, str='2.0'}) - simple_test({'2.0e+x'}, 'Number', 3, {data={is_float=true}, str='2.0'}) - simple_test({'2.0e-x'}, 'Number', 3, {data={is_float=true}, str='2.0'}) simple_test({'2.0e5'}, 'Number', 5, {data={is_float=true}, str='2.0e5'}) simple_test({'2.0e+5'}, 'Number', 6, {data={is_float=true}, str='2.0e+5'}) simple_test({'2.0e-5'}, 'Number', 6, {data={is_float=true}, str='2.0e-5'}) -- cgit From 163792e9b9854fe046ada3233dec0fd0f6c55737 Mon Sep 17 00:00:00 2001 From: ZyX Date: Fri, 6 Oct 2017 01:19:43 +0300 Subject: viml/parser/expressions: Make lexer parse numbers, support non-decimal --- test/unit/viml/expressions/lexer_spec.lua | 73 +++++++++++++++++++++---------- 1 file changed, 51 insertions(+), 22 deletions(-) (limited to 'test/unit/viml/expressions/lexer_spec.lua') diff --git a/test/unit/viml/expressions/lexer_spec.lua b/test/unit/viml/expressions/lexer_spec.lua index bd8045632e..f180d8ceff 100644 --- a/test/unit/viml/expressions/lexer_spec.lua +++ b/test/unit/viml/expressions/lexer_spec.lua @@ -114,7 +114,11 @@ local function eltkn2lua(pstate, tkn) elseif ret.type == 'Number' then ret.data = { is_float = (not not tkn.data.num.is_float), + base = tonumber(tkn.data.num.base), } + ret.data.val = tonumber(tkn.data.num.is_float + and tkn.data.num.val.floating + or tkn.data.num.val.integer) elseif ret.type == 'Invalid' then ret.data = { error = ffi.string(tkn.data.err.msg) } end @@ -204,9 +208,20 @@ describe('Expressions lexer', function() singl_eltkn_test('Spacing', ' ') singl_eltkn_test('Spacing', '\t') singl_eltkn_test('Invalid', '\x01\x02\x03', {error='E15: Invalid control character present in input: %.*s'}) - singl_eltkn_test('Number', '0123', {is_float=false}) - singl_eltkn_test('Number', '0', {is_float=false}) - singl_eltkn_test('Number', '9', {is_float=false}) + singl_eltkn_test('Number', '0123', {is_float=false, base=8, val=83}) + singl_eltkn_test('Number', '01234567', {is_float=false, base=8, val=342391}) + singl_eltkn_test('Number', '012345678', {is_float=false, base=10, val=12345678}) + singl_eltkn_test('Number', '0x123', {is_float=false, base=16, val=291}) + singl_eltkn_test('Number', '0x56FF', {is_float=false, base=16, val=22271}) + singl_eltkn_test('Number', '0xabcdef', {is_float=false, base=16, val=11259375}) + singl_eltkn_test('Number', '0xABCDEF', {is_float=false, base=16, val=11259375}) + singl_eltkn_test('Number', '0x0', {is_float=false, base=16, val=0}) + singl_eltkn_test('Number', '00', {is_float=false, base=8, val=0}) + singl_eltkn_test('Number', '0b0', {is_float=false, base=2, val=0}) + singl_eltkn_test('Number', '0b010111', {is_float=false, base=2, val=23}) + singl_eltkn_test('Number', '0b100111', {is_float=false, base=2, val=39}) + singl_eltkn_test('Number', '0', {is_float=false, base=10, val=0}) + singl_eltkn_test('Number', '9', {is_float=false, base=10, val=9}) singl_eltkn_test('Env', '$abc') singl_eltkn_test('Env', '$') singl_eltkn_test('PlainIdentifier', 'test', {autoload=false, scope=0}) @@ -262,17 +277,21 @@ describe('Expressions lexer', function() singl_eltkn_test('Invalid', '~', {error='E15: Unidentified character: %.*s'}) simple_test({{data=nil, size=0}}, 'EOC', 0, {error='start.col >= #pstr'}) simple_test({''}, 'EOC', 0, {error='start.col >= #pstr'}) - simple_test({'2.'}, 'Number', 1, {data={is_float=false}, str='2'}) - simple_test({'2.x'}, 'Number', 1, {data={is_float=false}, str='2'}) - simple_test({'2.2.'}, 'Number', 1, {data={is_float=false}, str='2'}) - simple_test({'2.0x'}, 'Number', 1, {data={is_float=false}, str='2'}) - simple_test({'2.0e'}, 'Number', 1, {data={is_float=false}, str='2'}) - simple_test({'2.0e+'}, 'Number', 1, {data={is_float=false}, str='2'}) - simple_test({'2.0e-'}, 'Number', 1, {data={is_float=false}, str='2'}) - simple_test({'2.0e+x'}, 'Number', 1, {data={is_float=false}, str='2'}) - simple_test({'2.0e-x'}, 'Number', 1, {data={is_float=false}, str='2'}) - simple_test({'2.0e+1a'}, 'Number', 1, {data={is_float=false}, str='2'}) - simple_test({'2.0e-1a'}, 'Number', 1, {data={is_float=false}, str='2'}) + simple_test({'2.'}, 'Number', 1, {data={is_float=false, base=10, val=2}, str='2'}) + simple_test({'2e5'}, 'Number', 1, {data={is_float=false, base=10, val=2}, str='2'}) + simple_test({'2.x'}, 'Number', 1, {data={is_float=false, base=10, val=2}, str='2'}) + simple_test({'2.2.'}, 'Number', 1, {data={is_float=false, base=10, val=2}, str='2'}) + simple_test({'2.0x'}, 'Number', 1, {data={is_float=false, base=10, val=2}, str='2'}) + simple_test({'2.0e'}, 'Number', 1, {data={is_float=false, base=10, val=2}, str='2'}) + simple_test({'2.0e+'}, 'Number', 1, {data={is_float=false, base=10, val=2}, str='2'}) + simple_test({'2.0e-'}, 'Number', 1, {data={is_float=false, base=10, val=2}, str='2'}) + simple_test({'2.0e+x'}, 'Number', 1, {data={is_float=false, base=10, val=2}, str='2'}) + simple_test({'2.0e-x'}, 'Number', 1, {data={is_float=false, base=10, val=2}, str='2'}) + simple_test({'2.0e+1a'}, 'Number', 1, {data={is_float=false, base=10, val=2}, str='2'}) + simple_test({'2.0e-1a'}, 'Number', 1, {data={is_float=false, base=10, val=2}, str='2'}) + simple_test({'0b102'}, 'Number', 4, {data={is_float=false, base=2, val=2}, str='0b10'}) + simple_test({'10F'}, 'Number', 2, {data={is_float=false, base=10, val=10}, str='10'}) + simple_test({'0x0123456789ABCDEFG'}, 'Number', 18, {data={is_float=false, base=16, val=81985529216486895}, str='0x0123456789ABCDEF'}) end local function regular_scope_tests() @@ -304,10 +323,10 @@ describe('Expressions lexer', function() end local function regular_number_tests() - simple_test({'2.0'}, 'Number', 1, {data={is_float=false}, str='2'}) - simple_test({'2.0e5'}, 'Number', 1, {data={is_float=false}, str='2'}) - simple_test({'2.0e+5'}, 'Number', 1, {data={is_float=false}, str='2'}) - simple_test({'2.0e-5'}, 'Number', 1, {data={is_float=false}, str='2'}) + simple_test({'2.0'}, 'Number', 1, {data={is_float=false, base=10, val=2}, str='2'}) + simple_test({'2.0e5'}, 'Number', 1, {data={is_float=false, base=10, val=2}, str='2'}) + simple_test({'2.0e+5'}, 'Number', 1, {data={is_float=false, base=10, val=2}, str='2'}) + simple_test({'2.0e-5'}, 'Number', 1, {data={is_float=false, base=10, val=2}, str='2'}) end local function regular_eoc_tests() @@ -352,10 +371,20 @@ describe('Expressions lexer', function() regular_scope_tests() regular_is_tests() - simple_test({'2.0'}, 'Number', 3, {data={is_float=true}, str='2.0'}) - simple_test({'2.0e5'}, 'Number', 5, {data={is_float=true}, str='2.0e5'}) - simple_test({'2.0e+5'}, 'Number', 6, {data={is_float=true}, str='2.0e+5'}) - simple_test({'2.0e-5'}, 'Number', 6, {data={is_float=true}, str='2.0e-5'}) + simple_test({'2.2'}, 'Number', 3, {data={is_float=true, base=10, val=2.2}, str='2.2'}) + simple_test({'2.0e5'}, 'Number', 5, {data={is_float=true, base=10, val=2e5}, str='2.0e5'}) + simple_test({'2.0e+5'}, 'Number', 6, {data={is_float=true, base=10, val=2e5}, str='2.0e+5'}) + simple_test({'2.0e-5'}, 'Number', 6, {data={is_float=true, base=10, val=2e-5}, str='2.0e-5'}) + simple_test({'2.500000e-5'}, 'Number', 11, {data={is_float=true, base=10, val=2.5e-5}, str='2.500000e-5'}) + simple_test({'2.5555e2'}, 'Number', 8, {data={is_float=true, base=10, val=2.5555e2}, str='2.5555e2'}) + simple_test({'2.5555e+2'}, 'Number', 9, {data={is_float=true, base=10, val=2.5555e2}, str='2.5555e+2'}) + simple_test({'2.5555e-2'}, 'Number', 9, {data={is_float=true, base=10, val=2.5555e-2}, str='2.5555e-2'}) + simple_test({{data='2.5e-5', size=3}}, + 'Number', 3, {data={is_float=true, base=10, val=2.5}, str='2.5'}) + simple_test({{data='2.5e5', size=4}}, + 'Number', 1, {data={is_float=false, base=10, val=2}, str='2'}) + simple_test({{data='2.5e-50', size=6}}, + 'Number', 6, {data={is_float=true, base=10, val=2.5e-5}, str='2.5e-5'}) end) itp('treats `is` as an identifier', function() flags = tonumber(lib.kELFlagIsNotCmp) -- cgit From fa3cfc0dd54df125a1dbabccda47a5f45dc483ae Mon Sep 17 00:00:00 2001 From: ZyX Date: Mon, 9 Oct 2017 02:55:56 +0300 Subject: viml/parser/expressions: Finish parser MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Note: formatc.lua was unable to swallow some newer additions to ExprASTNodeType (specifically `kExprNodeOr = '|'` and probably something else), so all `= …` were dropped: in any case they only were there in order to not bother updating viml_pexpr_debug_print_ast_node and since it is now known all nodes which will be present it is not much of an issue. --- test/unit/viml/expressions/lexer_spec.lua | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'test/unit/viml/expressions/lexer_spec.lua') diff --git a/test/unit/viml/expressions/lexer_spec.lua b/test/unit/viml/expressions/lexer_spec.lua index f180d8ceff..674b1b37db 100644 --- a/test/unit/viml/expressions/lexer_spec.lua +++ b/test/unit/viml/expressions/lexer_spec.lua @@ -62,9 +62,9 @@ child_call_once(function() } eltkn_opt_scope_tab = { - [tonumber(lib.kExprLexOptUnspecified)] = 'Unspecified', - [tonumber(lib.kExprLexOptGlobal)] = 'Global', - [tonumber(lib.kExprLexOptLocal)] = 'Local', + [tonumber(lib.kExprOptScopeUnspecified)] = 'Unspecified', + [tonumber(lib.kExprOptScopeGlobal)] = 'Global', + [tonumber(lib.kExprOptScopeLocal)] = 'Local', } end) -- cgit From 1a3635304b80b48625bcd9d48f4f38778b42e4af Mon Sep 17 00:00:00 2001 From: ZyX Date: Mon, 16 Oct 2017 00:07:32 +0300 Subject: charset: Avoid overflow in vim_str2nr --- test/unit/viml/expressions/lexer_spec.lua | 3 +++ 1 file changed, 3 insertions(+) (limited to 'test/unit/viml/expressions/lexer_spec.lua') diff --git a/test/unit/viml/expressions/lexer_spec.lua b/test/unit/viml/expressions/lexer_spec.lua index 674b1b37db..5910468017 100644 --- a/test/unit/viml/expressions/lexer_spec.lua +++ b/test/unit/viml/expressions/lexer_spec.lua @@ -292,6 +292,9 @@ describe('Expressions lexer', function() simple_test({'0b102'}, 'Number', 4, {data={is_float=false, base=2, val=2}, str='0b10'}) simple_test({'10F'}, 'Number', 2, {data={is_float=false, base=10, val=10}, str='10'}) simple_test({'0x0123456789ABCDEFG'}, 'Number', 18, {data={is_float=false, base=16, val=81985529216486895}, str='0x0123456789ABCDEF'}) + simple_test({{data='00', size=2}}, 'Number', 2, {data={is_float=false, base=8, val=0}, str='00'}) + simple_test({{data='009', size=2}}, 'Number', 2, {data={is_float=false, base=8, val=0}, str='00'}) + simple_test({{data='01', size=1}}, 'Number', 1, {data={is_float=false, base=10, val=0}, str='0'}) end local function regular_scope_tests() -- cgit From 3ecb95298ffd9ef6ee681876f2d32553fd222b96 Mon Sep 17 00:00:00 2001 From: ZyX Date: Mon, 30 Oct 2017 01:48:32 +0300 Subject: tests: Fix testlint errors --- test/unit/viml/expressions/lexer_spec.lua | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) (limited to 'test/unit/viml/expressions/lexer_spec.lua') diff --git a/test/unit/viml/expressions/lexer_spec.lua b/test/unit/viml/expressions/lexer_spec.lua index 5910468017..d4ec870a4e 100644 --- a/test/unit/viml/expressions/lexer_spec.lua +++ b/test/unit/viml/expressions/lexer_spec.lua @@ -10,7 +10,6 @@ local ffi = helpers.ffi local eq = helpers.eq local conv_ccs = viml_helpers.conv_ccs -local pline2lua = viml_helpers.pline2lua local new_pstate = viml_helpers.new_pstate local intchar2lua = viml_helpers.intchar2lua local conv_cmp_type = viml_helpers.conv_cmp_type @@ -183,7 +182,7 @@ describe('Expressions lexer', function() end local function simple_test(pstate_arg, exp_type, exp_len, exp) local pstate = new_pstate(pstate_arg) - local exp = shallowcopy(exp) + exp = shallowcopy(exp) exp.type = exp_type exp.len = exp_len or #(pstate_arg[0]) exp.start = { col = 0, line = 0 } -- cgit From 7bc6de75263f58c6c4f999bc86a6454ae9f28b80 Mon Sep 17 00:00:00 2001 From: ZyX Date: Sun, 5 Nov 2017 02:41:44 +0300 Subject: api/vim,functests: Add tests for nvim_parse_expression, fix found bugs --- test/unit/viml/expressions/lexer_spec.lua | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'test/unit/viml/expressions/lexer_spec.lua') diff --git a/test/unit/viml/expressions/lexer_spec.lua b/test/unit/viml/expressions/lexer_spec.lua index d4ec870a4e..75a641c48a 100644 --- a/test/unit/viml/expressions/lexer_spec.lua +++ b/test/unit/viml/expressions/lexer_spec.lua @@ -11,11 +11,11 @@ local eq = helpers.eq local conv_ccs = viml_helpers.conv_ccs local new_pstate = viml_helpers.new_pstate -local intchar2lua = viml_helpers.intchar2lua local conv_cmp_type = viml_helpers.conv_cmp_type local pstate_set_str = viml_helpers.pstate_set_str local shallowcopy = global_helpers.shallowcopy +local intchar2lua = global_helpers.intchar2lua local lib = cimport('./src/nvim/viml/parser/expressions.h') -- cgit From c7495ebcc0918ffd682083408895451318e41d1f Mon Sep 17 00:00:00 2001 From: ZyX Date: Sun, 12 Nov 2017 02:18:43 +0300 Subject: viml/parser/expressions: Add support for parsing assignments --- test/unit/viml/expressions/lexer_spec.lua | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) (limited to 'test/unit/viml/expressions/lexer_spec.lua') diff --git a/test/unit/viml/expressions/lexer_spec.lua b/test/unit/viml/expressions/lexer_spec.lua index 75a641c48a..1b57a24ad5 100644 --- a/test/unit/viml/expressions/lexer_spec.lua +++ b/test/unit/viml/expressions/lexer_spec.lua @@ -13,6 +13,7 @@ local conv_ccs = viml_helpers.conv_ccs local new_pstate = viml_helpers.new_pstate local conv_cmp_type = viml_helpers.conv_cmp_type local pstate_set_str = viml_helpers.pstate_set_str +local conv_expr_asgn_type = viml_helpers.conv_expr_asgn_type local shallowcopy = global_helpers.shallowcopy local intchar2lua = global_helpers.intchar2lua @@ -52,6 +53,8 @@ child_call_once(function() [tonumber(lib.kExprLexParenthesis)] = 'Parenthesis', [tonumber(lib.kExprLexComma)] = 'Comma', [tonumber(lib.kExprLexArrow)] = 'Arrow', + + [tonumber(lib.kExprLexAssignment)] = 'Assignment', } eltkn_mul_type_tab = { @@ -118,6 +121,8 @@ local function eltkn2lua(pstate, tkn) ret.data.val = tonumber(tkn.data.num.is_float and tkn.data.num.val.floating or tkn.data.num.val.integer) + elseif ret.type == 'Assignment' then + ret.data = { type = conv_expr_asgn_type(tkn.data.ass.type) } elseif ret.type == 'Invalid' then ret.data = { error = ffi.string(tkn.data.err.msg) } end @@ -198,7 +203,9 @@ describe('Expressions lexer', function() singl_eltkn_test('Question', '?') singl_eltkn_test('Colon', ':') singl_eltkn_test('Dot', '.') + singl_eltkn_test('Assignment', '.=', {type='Concat'}) singl_eltkn_test('Plus', '+') + singl_eltkn_test('Assignment', '+=', {type='Add'}) singl_eltkn_test('Comma', ',') singl_eltkn_test('Multiplication', '*', {type='Mul'}) singl_eltkn_test('Multiplication', '/', {type='Div'}) @@ -266,12 +273,13 @@ describe('Expressions lexer', function() singl_eltkn_test('DoubleQuotedString', '"x\\"', {closed=false}) singl_eltkn_test('DoubleQuotedString', '"\\"x', {closed=false}) singl_eltkn_test('Not', '!') - singl_eltkn_test('Invalid', '=', {error='E15: Expected == or =~: %.*s'}) + singl_eltkn_test('Assignment', '=', {type='Plain'}) comparison_test('==', '!=', 'Equal') comparison_test('=~', '!~', 'Matches') comparison_test('>', '<=', 'Greater') comparison_test('>=', '<', 'GreaterOrEqual') singl_eltkn_test('Minus', '-') + singl_eltkn_test('Assignment', '-=', {type='Subtract'}) singl_eltkn_test('Arrow', '->') singl_eltkn_test('Invalid', '~', {error='E15: Unidentified character: %.*s'}) simple_test({{data=nil, size=0}}, 'EOC', 0, {error='start.col >= #pstr'}) -- cgit