logo

oasis-root

Compiled tree of Oasis Linux based on own branch at <https://hacktivis.me/git/oasis/> git clone https://anongit.hacktivis.me/git/oasis-root.git
commit: 548c8993e77c5cb43bcf00db2a76e3a59abc1ccb
parent fd7e0c27d4dd63e2a50cbba60dfaeef821e71950
Author: Haelwenn (lanodan) Monnier <contact@hacktivis.me>
Date:   Thu,  6 Nov 2025 23:35:17 +0100

oasis e26ed35625

Diffstat:

Mbin/vis0
Dshare/vis/lexer.lua2--
Dshare/vis/lexers/actionscript.lua60------------------------------------------------------------
Dshare/vis/lexers/ada.lua49-------------------------------------------------
Dshare/vis/lexers/ansi_c.lua199-------------------------------------------------------------------------------
Dshare/vis/lexers/antlr.lua57---------------------------------------------------------
Dshare/vis/lexers/apdl.lua79-------------------------------------------------------------------------------
Dshare/vis/lexers/apl.lua57---------------------------------------------------------
Dshare/vis/lexers/applescript.lua71-----------------------------------------------------------------------
Dshare/vis/lexers/asm.lua399-------------------------------------------------------------------------------
Dshare/vis/lexers/asp.lua31-------------------------------
Dshare/vis/lexers/autohotkey.lua164-------------------------------------------------------------------------------
Dshare/vis/lexers/autoit.lua137-------------------------------------------------------------------------------
Dshare/vis/lexers/awk.lua295-------------------------------------------------------------------------------
Dshare/vis/lexers/bash.lua138-------------------------------------------------------------------------------
Dshare/vis/lexers/batch.lua55-------------------------------------------------------
Dshare/vis/lexers/bibtex.lua48------------------------------------------------
Dshare/vis/lexers/boo.lua66------------------------------------------------------------------
Dshare/vis/lexers/caml.lua65-----------------------------------------------------------------
Dshare/vis/lexers/chuck.lua102-------------------------------------------------------------------------------
Dshare/vis/lexers/clojure.lua148-------------------------------------------------------------------------------
Dshare/vis/lexers/cmake.lua493-------------------------------------------------------------------------------
Dshare/vis/lexers/coffeescript.lua50--------------------------------------------------
Dshare/vis/lexers/container.lua5-----
Dshare/vis/lexers/context.lua54------------------------------------------------------
Dshare/vis/lexers/cpp.lua287-------------------------------------------------------------------------------
Dshare/vis/lexers/crystal.lua98-------------------------------------------------------------------------------
Dshare/vis/lexers/csharp.lua65-----------------------------------------------------------------
Dshare/vis/lexers/css.lua201-------------------------------------------------------------------------------
Dshare/vis/lexers/cuda.lua62--------------------------------------------------------------
Dshare/vis/lexers/dart.lua56--------------------------------------------------------
Dshare/vis/lexers/desktop.lua49-------------------------------------------------
Dshare/vis/lexers/diff.lua25-------------------------
Dshare/vis/lexers/django.lua63---------------------------------------------------------------
Dshare/vis/lexers/dmd.lua141-------------------------------------------------------------------------------
Dshare/vis/lexers/dockerfile.lua47-----------------------------------------------
Dshare/vis/lexers/dot.lua57---------------------------------------------------------
Dshare/vis/lexers/dsv.lua12------------
Dshare/vis/lexers/eiffel.lua59-----------------------------------------------------------
Dshare/vis/lexers/elixir.lua99-------------------------------------------------------------------------------
Dshare/vis/lexers/elm.lua45---------------------------------------------
Dshare/vis/lexers/erlang.lua90-------------------------------------------------------------------------------
Dshare/vis/lexers/fantom.lua84-------------------------------------------------------------------------------
Dshare/vis/lexers/faust.lua46----------------------------------------------
Dshare/vis/lexers/fennel.lua45---------------------------------------------
Dshare/vis/lexers/fish.lua58----------------------------------------------------------
Dshare/vis/lexers/forth.lua58----------------------------------------------------------
Dshare/vis/lexers/fortran.lua87-------------------------------------------------------------------------------
Dshare/vis/lexers/fsharp.lua59-----------------------------------------------------------
Dshare/vis/lexers/fstab.lua127-------------------------------------------------------------------------------
Dshare/vis/lexers/gap.lua45---------------------------------------------
Dshare/vis/lexers/gemini.lua23-----------------------
Dshare/vis/lexers/gettext.lua31-------------------------------
Dshare/vis/lexers/gherkin.lua39---------------------------------------
Dshare/vis/lexers/git-rebase.lua39---------------------------------------
Dshare/vis/lexers/gleam.lua120-------------------------------------------------------------------------------
Dshare/vis/lexers/glsl.lua117-------------------------------------------------------------------------------
Dshare/vis/lexers/gnuplot.lua70----------------------------------------------------------------------
Dshare/vis/lexers/go.lua71-----------------------------------------------------------------------
Dshare/vis/lexers/groovy.lua68--------------------------------------------------------------------
Dshare/vis/lexers/gtkrc.lua57---------------------------------------------------------
Dshare/vis/lexers/hare.lua81-------------------------------------------------------------------------------
Dshare/vis/lexers/haskell.lua47-----------------------------------------------
Dshare/vis/lexers/html.lua152-------------------------------------------------------------------------------
Dshare/vis/lexers/icon.lua61-------------------------------------------------------------
Dshare/vis/lexers/idl.lua52----------------------------------------------------
Dshare/vis/lexers/inform.lua76----------------------------------------------------------------------------
Dshare/vis/lexers/ini.lua39---------------------------------------
Dshare/vis/lexers/io_lang.lua51---------------------------------------------------
Dshare/vis/lexers/java.lua142-------------------------------------------------------------------------------
Dshare/vis/lexers/javascript.lua98-------------------------------------------------------------------------------
Dshare/vis/lexers/jq.lua84-------------------------------------------------------------------------------
Dshare/vis/lexers/json.lua28----------------------------
Dshare/vis/lexers/jsp.lua20--------------------
Dshare/vis/lexers/julia.lua112-------------------------------------------------------------------------------
Dshare/vis/lexers/latex.lua44--------------------------------------------
Dshare/vis/lexers/ledger.lua45---------------------------------------------
Dshare/vis/lexers/less.lua19-------------------
Dshare/vis/lexers/lexer.lua1989-------------------------------------------------------------------------------
Dshare/vis/lexers/lilypond.lua32--------------------------------
Dshare/vis/lexers/lisp.lua59-----------------------------------------------------------
Dshare/vis/lexers/litcoffee.lua21---------------------
Dshare/vis/lexers/logtalk.lua64----------------------------------------------------------------
Dshare/vis/lexers/lua.lua144-------------------------------------------------------------------------------
Dshare/vis/lexers/makefile.lua121-------------------------------------------------------------------------------
Dshare/vis/lexers/man.lua22----------------------
Dshare/vis/lexers/markdown.lua95-------------------------------------------------------------------------------
Dshare/vis/lexers/matlab.lua90-------------------------------------------------------------------------------
Dshare/vis/lexers/mediawiki.lua44--------------------------------------------
Dshare/vis/lexers/meson.lua129-------------------------------------------------------------------------------
Dshare/vis/lexers/moonscript.lua144-------------------------------------------------------------------------------
Dshare/vis/lexers/myrddin.lua54------------------------------------------------------
Dshare/vis/lexers/nemerle.lua66------------------------------------------------------------------
Dshare/vis/lexers/networkd.lua101-------------------------------------------------------------------------------
Dshare/vis/lexers/nim.lua97-------------------------------------------------------------------------------
Dshare/vis/lexers/nsis.lua150-------------------------------------------------------------------------------
Dshare/vis/lexers/null.lua4----
Dshare/vis/lexers/objeck.lua59-----------------------------------------------------------
Dshare/vis/lexers/objective_c.lua69---------------------------------------------------------------------
Dshare/vis/lexers/output.lua97-------------------------------------------------------------------------------
Dshare/vis/lexers/pascal.lua64----------------------------------------------------------------
Dshare/vis/lexers/perl.lua161-------------------------------------------------------------------------------
Dshare/vis/lexers/php.lua107-------------------------------------------------------------------------------
Dshare/vis/lexers/pico8.lua35-----------------------------------
Dshare/vis/lexers/pike.lua54------------------------------------------------------
Dshare/vis/lexers/pkgbuild.lua79-------------------------------------------------------------------------------
Dshare/vis/lexers/pony.lua96-------------------------------------------------------------------------------
Dshare/vis/lexers/powershell.lua62--------------------------------------------------------------
Dshare/vis/lexers/prolog.lua354-------------------------------------------------------------------------------
Dshare/vis/lexers/props.lua36------------------------------------
Dshare/vis/lexers/protobuf.lua48------------------------------------------------
Dshare/vis/lexers/ps.lua49-------------------------------------------------
Dshare/vis/lexers/pure.lua50--------------------------------------------------
Dshare/vis/lexers/python.lua128-------------------------------------------------------------------------------
Dshare/vis/lexers/rails.lua41-----------------------------------------
Dshare/vis/lexers/rc.lua52----------------------------------------------------
Dshare/vis/lexers/reason.lua67-------------------------------------------------------------------
Dshare/vis/lexers/rebol.lua103-------------------------------------------------------------------------------
Dshare/vis/lexers/rest.lua215-------------------------------------------------------------------------------
Dshare/vis/lexers/rexx.lua78------------------------------------------------------------------------------
Dshare/vis/lexers/rhtml.lua20--------------------
Dshare/vis/lexers/routeros.lua60------------------------------------------------------------
Dshare/vis/lexers/rpmspec.lua33---------------------------------
Dshare/vis/lexers/rstats.lua52----------------------------------------------------
Dshare/vis/lexers/ruby.lua127-------------------------------------------------------------------------------
Dshare/vis/lexers/rust.lua90-------------------------------------------------------------------------------
Dshare/vis/lexers/sass.lua21---------------------
Dshare/vis/lexers/scala.lua61-------------------------------------------------------------
Dshare/vis/lexers/scheme.lua175-------------------------------------------------------------------------------
Dshare/vis/lexers/smalltalk.lua46----------------------------------------------
Dshare/vis/lexers/sml.lua93-------------------------------------------------------------------------------
Dshare/vis/lexers/snobol4.lua71-----------------------------------------------------------------------
Dshare/vis/lexers/spin.lua70----------------------------------------------------------------------
Dshare/vis/lexers/sql.lua64----------------------------------------------------------------
Dshare/vis/lexers/strace.lua31-------------------------------
Dshare/vis/lexers/systemd.lua133-------------------------------------------------------------------------------
Dshare/vis/lexers/taskpaper.lua27---------------------------
Dshare/vis/lexers/tcl.lua47-----------------------------------------------
Dshare/vis/lexers/tex.lua28----------------------------
Dshare/vis/lexers/texinfo.lua207-------------------------------------------------------------------------------
Dshare/vis/lexers/text.lua10----------
Dshare/vis/lexers/toml.lua44--------------------------------------------
Dshare/vis/lexers/troff.lua42------------------------------------------
Dshare/vis/lexers/txt2tags.lua131-------------------------------------------------------------------------------
Dshare/vis/lexers/typescript.lua18------------------
Dshare/vis/lexers/vala.lua62--------------------------------------------------------------
Dshare/vis/lexers/vb.lua68--------------------------------------------------------------------
Dshare/vis/lexers/vbscript.lua63---------------------------------------------------------------
Dshare/vis/lexers/vcard.lua71-----------------------------------------------------------------------
Dshare/vis/lexers/verilog.lua89-------------------------------------------------------------------------------
Dshare/vis/lexers/vhdl.lua72------------------------------------------------------------------------
Dshare/vis/lexers/wsf.lua87-------------------------------------------------------------------------------
Dshare/vis/lexers/xml.lua75---------------------------------------------------------------------------
Dshare/vis/lexers/xs.lua60------------------------------------------------------------
Dshare/vis/lexers/xtend.lua88-------------------------------------------------------------------------------
Dshare/vis/lexers/yaml.lua106-------------------------------------------------------------------------------
Dshare/vis/lexers/zig.lua93-------------------------------------------------------------------------------
Dshare/vis/plugins/complete-filename.lua51---------------------------------------------------
Dshare/vis/plugins/complete-word.lua38--------------------------------------
Dshare/vis/plugins/digraph.lua23-----------------------
Dshare/vis/plugins/filetype.lua638-------------------------------------------------------------------------------
Dshare/vis/plugins/number-inc-dec.lua59-----------------------------------------------------------
Dshare/vis/plugins/textobject-lexer.lua31-------------------------------
Dshare/vis/themes/base-16.lua156-------------------------------------------------------------------------------
Dshare/vis/themes/default.lua2--
Dshare/vis/themes/solarized.lua69---------------------------------------------------------------------
Dshare/vis/themes/zenburn.lua39---------------------------------------
Dshare/vis/vis-std.lua143-------------------------------------------------------------------------------
Dshare/vis/vis.lua335-------------------------------------------------------------------------------
Dshare/vis/visrc.lua11-----------
170 files changed, 0 insertions(+), 16435 deletions(-)

diff --git a/bin/vis b/bin/vis Binary files differ. diff --git a/share/vis/lexer.lua b/share/vis/lexer.lua @@ -1 +0,0 @@ -lexers/lexer.lua -\ No newline at end of file diff --git a/share/vis/lexers/actionscript.lua b/share/vis/lexers/actionscript.lua @@ -1,60 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Actionscript LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('actionscript') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'break', 'continue', 'delete', 'do', 'else', 'for', 'function', 'if', 'in', 'new', 'on', 'return', - 'this', 'typeof', 'var', 'void', 'while', 'with', 'NaN', 'Infinity', 'false', 'null', 'true', - 'undefined', - -- Reserved for future use. - 'abstract', 'case', 'catch', 'class', 'const', 'debugger', 'default', 'export', 'extends', - 'final', 'finally', 'goto', 'implements', 'import', 'instanceof', 'interface', 'native', - 'package', 'private', 'Void', 'protected', 'public', 'dynamic', 'static', 'super', 'switch', - 'synchonized', 'throw', 'throws', 'transient', 'try', 'volatile' -})) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match{ - 'Array', 'Boolean', 'Color', 'Date', 'Function', 'Key', 'MovieClip', 'Math', 'Mouse', 'Number', - 'Object', 'Selection', 'Sound', 'String', 'XML', 'XMLNode', 'XMLSocket', - -- Reserved for future use. - 'boolean', 'byte', 'char', 'double', 'enum', 'float', 'int', 'long', 'short' -})) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"', true) -local ml_str = lexer.range('<![CDATA[', ']]>') -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str + ml_str)) - --- Comments. -local line_comment = lexer.to_eol('//') -local block_comment = lexer.range('/*', '*/') -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number * S('LlUuFf')^-2)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('=!<>+-/*%&|^~.,;?()[]{}'))) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '{', '}') -lex:add_fold_point(lexer.COMMENT, '/*', '*/') -lex:add_fold_point(lexer.STRING, '<![CDATA[', ']]>') - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/ada.lua b/share/vis/lexers/ada.lua @@ -1,49 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Ada LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('ada') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match({ - 'abort', 'abs', 'abstract', 'accept', 'access', 'aliased', 'all', 'and', 'array', 'at', 'begin', - 'body', 'case', 'constant', 'declare', 'delay', 'delta', 'digits', 'do', 'else', 'elsif', 'end', - 'entry', 'exception', 'exit', 'for', 'function', 'generic', 'goto', 'if', 'in', 'interface', 'is', - 'limited', 'loop', 'mod', 'new', 'not', 'null', 'of', 'or', 'others', 'out', 'overriding', - 'package', 'parallel', 'pragma', 'private', 'procedure', 'protected', 'raise', 'range', 'record', - 'rem', 'renames', 'requeue', 'return', 'reverse', 'select', 'separate', 'some', 'subtype', - 'synchronized', 'tagged', 'task', 'terminate', 'then', 'type', 'until', 'use', 'when', 'while', - 'with', 'xor', -- - 'true', 'false' -}, true))) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match({ - 'boolean', 'character', 'count', 'duration', 'float', 'integer', 'long_float', 'long_integer', - 'priority', 'short_float', 'short_integer', 'string' -}, true))) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -lex:add_rule('string', token(lexer.STRING, lexer.range('"', true, false))) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('--'))) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number_('_'))) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S(':;=<>&+-*/.()'))) - -lexer.property['scintillua.comment'] = '--' - -return lex diff --git a/share/vis/lexers/ansi_c.lua b/share/vis/lexers/ansi_c.lua @@ -1,199 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- C LPeg lexer. - -local lexer = lexer -local P, S, B = lpeg.P, lpeg.S, lpeg.B - -local lex = lexer.new(...) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD))) - --- Types. -lex:add_rule('type', lex:tag(lexer.TYPE, lex:word_match(lexer.TYPE))) - --- Functions. -local builtin_func = -(B('.') + B('->')) * - lex:tag(lexer.FUNCTION_BUILTIN, lex:word_match(lexer.FUNCTION_BUILTIN)) -local func = lex:tag(lexer.FUNCTION, lexer.word) -local method = (B('.') + B('->')) * lex:tag(lexer.FUNCTION_METHOD, lexer.word) -lex:add_rule('function', (builtin_func + method + func) * #(lexer.space^0 * '(')) - --- Constants. -lex:add_rule('constants', lex:tag(lexer.CONSTANT_BUILTIN, - -(B('.') + B('->')) * lex:word_match(lexer.CONSTANT_BUILTIN))) - --- Labels. -lex:add_rule('label', lex:tag(lexer.LABEL, lexer.starts_line(lexer.word * ':'))) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"', true) -lex:add_rule('string', lex:tag(lexer.STRING, P('L')^-1 * (sq_str + dq_str))) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) - --- Comments. -local line_comment = lexer.to_eol('//', true) -local block_comment = lexer.range('/*', '*/') + - lexer.range('#if' * S(' \t')^0 * '0' * lexer.space, '#endif') -lex:add_rule('comment', lex:tag(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -local integer = lexer.integer * lexer.word_match('u l ll ul ull lu llu', true)^-1 -local float = lexer.float * P('f')^-1 -lex:add_rule('number', lex:tag(lexer.NUMBER, float + integer)) - --- Preprocessor. -local include = lex:tag(lexer.PREPROCESSOR, '#' * S('\t ')^0 * 'include') * - (lex:get_rule('whitespace') * lex:tag(lexer.STRING, lexer.range('<', '>', true)))^-1 -local preproc = lex:tag(lexer.PREPROCESSOR, '#' * S('\t ')^0 * lex:word_match(lexer.PREPROCESSOR)) -lex:add_rule('preprocessor', include + preproc) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('+-/*%<>~!=^&|?~:;,.()[]{}'))) - --- Fold points. -lex:add_fold_point(lexer.PREPROCESSOR, '#if', '#endif') -lex:add_fold_point(lexer.PREPROCESSOR, '#ifdef', '#endif') -lex:add_fold_point(lexer.PREPROCESSOR, '#ifndef', '#endif') -lex:add_fold_point(lexer.OPERATOR, '{', '}') -lex:add_fold_point(lexer.COMMENT, '/*', '*/') - --- Word lists. -lex:set_word_list(lexer.KEYWORD, { - 'auto', 'break', 'case', 'const', 'continue', 'default', 'do', 'else', 'enum', 'extern', 'for', - 'goto', 'if', 'inline', 'register', 'restrict', 'return', 'sizeof', 'static', 'switch', 'typedef', - 'volatile', 'while', -- - 'false', 'true', -- C99 - 'alignas', 'alignof', '_Atomic', '_Generic', 'noreturn', '_Static_assert', 'thread_local', -- C11 - -- Compiler. - 'asm', '__asm', '__asm__', '__restrict__', '__inline', '__inline__', '__attribute__', '__declspec' -}) - -lex:set_word_list(lexer.TYPE, { - 'bool', 'char', 'double', 'float', 'int', 'long', 'short', 'signed', 'struct', 'union', - 'unsigned', 'void', -- - 'complex', 'imaginary', '_Complex', '_Imaginary', -- complex.h C99 - 'lconv', -- locale.h - 'div_t', -- math.h - 'va_list', -- stdarg.h - 'bool', '_Bool', -- stdbool.h C99 - -- stddef.h. - 'size_t', 'ptrdiff_t', -- - 'max_align_t', -- C11 - -- stdint.h. - 'int8_t', 'int16_t', 'int32_t', 'int64_t', 'int_fast8_t', 'int_fast16_t', 'int_fast32_t', - 'int_fast64_t', 'int_least8_t', 'int_least16_t', 'int_least32_t', 'int_least64_t', 'intmax_t', - 'intptr_t', 'uint8_t', 'uint16_t', 'uint32_t', 'uint64_t', 'uint_fast8_t', 'uint_fast16_t', - 'uint_fast32_t', 'uint_fast64_t', 'uint_least8_t', 'uint_least16_t', 'uint_least32_t', - 'uint_least64_t', 'uintmax_t', 'uintptr_t', -- - 'FILE', 'fpos_t', -- stdio.h - 'div_t', 'ldiv_t', -- stdlib.h - -- time.h. - 'tm', 'time_t', 'clock_t', -- - 'timespec' -- C11 -}) - -lex:set_word_list(lexer.FUNCTION_BUILTIN, { - 'assert', -- assert.h - -- complex.h. - 'CMPLX', 'creal', 'cimag', 'cabs', 'carg', 'conj', 'cproj', - -- C99 - 'cexp', 'cpow', 'csin', 'ccos', 'ctan', 'casin', 'cacos', 'catan', 'csinh', 'ccosh', 'ctanh', - 'casinh', 'cacosh', 'catanh', - -- ctype.h. - 'isalnum', 'isalpha', 'islower', 'isupper', 'isdigit', 'isxdigit', 'iscntrl', 'isgraph', - 'isspace', 'isprint', 'ispunct', 'tolower', 'toupper', -- - 'isblank', -- C99 - -- inttypes.h. - 'INT8_C', 'INT16_C', 'INT32_C', 'INT64_C', 'INTMAX_C', 'UINT8_C', 'UINT16_C', 'UINT32_C', - 'UINT64_C', 'UINTMAX_C', -- - 'setlocale', 'localeconv', -- locale.h - -- math.h. - 'abs', 'div', 'fabs', 'fmod', 'exp', 'log', 'log10', 'pow', 'sqrt', 'sin', 'cos', 'tan', 'asin', - 'acos', 'atan', 'atan2', 'sinh', 'cosh', 'tanh', 'ceil', 'floor', 'frexp', 'ldexp', 'modf', - -- C99. - 'remainder', 'remquo', 'fma', 'fmax', 'fmin', 'fdim', 'nan', 'exp2', 'expm1', 'log2', 'log1p', - 'cbrt', 'hypot', 'asinh', 'acosh', 'atanh', 'erf', 'erfc', 'tgamma', 'lgamma', 'trunc', 'round', - 'nearbyint', 'rint', 'scalbn', 'ilogb', 'logb', 'nextafter', 'nexttoward', 'copysign', 'isfinite', - 'isinf', 'isnan', 'isnormal', 'signbit', 'isgreater', 'isgreaterequal', 'isless', 'islessequal', - 'islessgreater', 'isunordered', -- - 'strtoimax', 'strtoumax', -- inttypes.h C99 - 'signal', 'raise', -- signal.h - 'setjmp', 'longjmp', -- setjmp.h - 'va_start', 'va_arg', 'va_end', -- stdarg.h - -- stdio.h. - 'fopen', 'freopen', 'fclose', 'fflush', 'setbuf', 'setvbuf', 'fwide', 'fread', 'fwrite', 'fgetc', - 'getc', 'fgets', 'fputc', 'putc', 'getchar', 'gets', 'putchar', 'puts', 'ungetc', 'scanf', - 'fscanf', 'sscanf', 'printf', 'fprintf', 'sprintf', 'vprintf', 'vfprintf', 'vsprintf', 'ftell', - 'fgetpos', 'fseek', 'fsetpos', 'rewind', 'clearerr', 'feof', 'ferror', 'perror', 'remove', - 'rename', 'tmpfile', 'tmpnam', - -- stdlib.h. - 'abort', 'exit', 'atexit', 'system', 'getenv', 'malloc', 'calloc', 'realloc', 'free', 'atof', - 'atoi', 'atol', 'strtol', 'strtoul', 'strtod', 'mblen', 'mbsinit', 'mbrlen', 'qsort', 'bsearch', - 'rand', 'srand', -- - 'quick_exit', '_Exit', 'at_quick_exit', 'aligned_alloc', -- C11 - -- string.h. - 'strcpy', 'strncpy', 'strcat', 'strncat', 'strxfrm', 'strlen', 'strcmp', 'strncmp', 'strcoll', - 'strchr', 'strrchr', 'strspn', 'strcspn', 'strpbrk', 'strstr', 'strtok', 'memchr', 'memcmp', - 'memset', 'memcpy', 'memmove', 'strerror', - -- time.h. - 'difftime', 'time', 'clock', 'asctime', 'ctime', 'gmtime', 'localtime', 'mktime', -- - 'timespec_get' -- C11 -}) - -lex:set_word_list(lexer.CONSTANT_BUILTIN, { - 'NULL', -- - '__DATE__', '__FILE__', '__LINE__', '__TIME__', '__func__', -- preprocessor - -- errno.h. - 'errno', -- - 'E2BIG', 'EACCES', 'EADDRINUSE', 'EADDRNOTAVAIL', 'EAFNOSUPPORT', 'EAGAIN', 'EALREADY', 'EBADF', - 'EBADMSG', 'EBUSY', 'ECANCELED', 'ECHILD', 'ECONNABORTED', 'ECONNREFUSED', 'ECONNRESET', - 'EDEADLK', 'EDESTADDRREQ', 'EDOM', 'EDQUOT', 'EEXIST', 'EFAULT', 'EFBIG', 'EHOSTUNREACH', 'EIDRM', - 'EILSEQ', 'EINPROGRESS', 'EINTR', 'EINVAL', 'EIO', 'EISCONN', 'EISDIR', 'ELOOP', 'EMFILE', - 'EMLINK', 'EMSGSIZE', 'EMULTIHOP', 'ENAMETOOLONG', 'ENETDOWN', 'ENETRESET', 'ENETUNREACH', - 'ENFILE', 'ENOBUFS', 'ENODATA', 'ENODEV', 'ENOENT', 'ENOEXEC', 'ENOLCK', 'ENOLINK', 'ENOMEM', - 'ENOMSG', 'ENOPROTOOPT', 'ENOSPC', 'ENOSR', 'ENOSTR', 'ENOSYS', 'ENOTCONN', 'ENOTDIR', - 'ENOTEMPTY', 'ENOTRECOVERABLE', 'ENOTSOCK', 'ENOTSUP', 'ENOTTY', 'ENXIO', 'EOPNOTSUPP', - 'EOVERFLOW', 'EOWNERDEAD', 'EPERM', 'EPIPE', 'EPROTO', 'EPROTONOSUPPORT', 'EPROTOTYPE', 'ERANGE', - 'EROFS', 'ESPIPE', 'ESRCH', 'ESTALE', 'ETIME', 'ETIMEDOUT', 'ETXTBSY', 'EWOULDBLOCK', 'EXDEV', - -- float.h. - 'FLT_MIN', 'DBL_MIN', 'LDBL_MIN', 'FLT_MAX', 'DBL_MAX', 'LDBL_MAX', - -- limits.h. - 'CHAR_BIT', 'MB_LEN_MAX', 'CHAR_MIN', 'CHAR_MAX', 'SCHAR_MIN', 'SHRT_MIN', 'INT_MIN', 'LONG_MIN', - 'SCHAR_MAX', 'SHRT_MAX', 'INT_MAX', 'LONG_MAX', 'UCHAR_MAX', 'USHRT_MAX', 'UINT_MAX', 'ULONG_MAX', - -- C99. - 'LLONG_MIN', 'ULLONG_MAX', 'PTRDIFF_MIN', 'PTRDIFF_MAX', 'SIZE_MAX', 'SIG_ATOMIC_MIN', - 'SIG_ATOMIC_MAX', 'WINT_MIN', 'WINT_MAX', 'WCHAR_MIN', 'WCHAR_MAX', -- - 'LC_ALL', 'LC_COLLATE', 'LC_CTYPE', 'LC_MONETARY', 'LC_NUMERIC', 'LC_TIME', -- locale.h - -- math.h. - 'HUGE_VAL', -- - 'INFINITY', 'NAN', -- C99 - -- stdint.h. - 'INT8_MIN', 'INT16_MIN', 'INT32_MIN', 'INT64_MIN', 'INT_FAST8_MIN', 'INT_FAST16_MIN', - 'INT_FAST32_MIN', 'INT_FAST64_MIN', 'INT_LEAST8_MIN', 'INT_LEAST16_MIN', 'INT_LEAST32_MIN', - 'INT_LEAST64_MIN', 'INTPTR_MIN', 'INTMAX_MIN', 'INT8_MAX', 'INT16_MAX', 'INT32_MAX', 'INT64_MAX', - 'INT_FAST8_MAX', 'INT_FAST16_MAX', 'INT_FAST32_MAX', 'INT_FAST64_MAX', 'INT_LEAST8_MAX', - 'INT_LEAST16_MAX', 'INT_LEAST32_MAX', 'INT_LEAST64_MAX', 'INTPTR_MAX', 'INTMAX_MAX', 'UINT8_MAX', - 'UINT16_MAX', 'UINT32_MAX', 'UINT64_MAX', 'UINT_FAST8_MAX', 'UINT_FAST16_MAX', 'UINT_FAST32_MAX', - 'UINT_FAST64_MAX', 'UINT_LEAST8_MAX', 'UINT_LEAST16_MAX', 'UINT_LEAST32_MAX', 'UINT_LEAST64_MAX', - 'UINTPTR_MAX', 'UINTMAX_MAX', - -- stdio.h - 'stdin', 'stdout', 'stderr', 'EOF', 'FOPEN_MAX', 'FILENAME_MAX', 'BUFSIZ', '_IOFBF', '_IOLBF', - '_IONBF', 'SEEK_SET', 'SEEK_CUR', 'SEEK_END', 'TMP_MAX', -- - 'EXIT_SUCCESS', 'EXIT_FAILURE', 'RAND_MAX', -- stdlib.h - -- signal.h. - 'SIG_DFL', 'SIG_IGN', 'SIG_ERR', 'SIGABRT', 'SIGFPE', 'SIGILL', 'SIGINT', 'SIGSEGV', 'SIGTERM', -- - 'CLOCKS_PER_SEC' -- time.h. -}) - -lex:set_word_list(lexer.PREPROCESSOR, { - 'define', 'defined', 'elif', 'else', 'endif', 'error', 'if', 'ifdef', 'ifndef', 'line', 'pragma', - 'undef' -}) - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/antlr.lua b/share/vis/lexers/antlr.lua @@ -1,57 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- ANTLR LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('antlr') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'abstract', 'break', 'case', 'catch', 'continue', 'default', 'do', 'else', 'extends', 'final', - 'finally', 'for', 'if', 'implements', 'instanceof', 'native', 'new', 'private', 'protected', - 'public', 'return', 'static', 'switch', 'synchronized', 'throw', 'throws', 'transient', 'try', - 'volatile', 'while', 'package', 'import', 'header', 'options', 'tokens', 'strictfp', 'false', - 'null', 'super', 'this', 'true' -})) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match( - 'boolean byte char class double float int interface long short void'))) - --- Functions. -lex:add_rule('func', token(lexer.FUNCTION, 'assert')) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Comments. -local line_comment = lexer.to_eol('//') -local block_comment = lexer.range('/*', '*/') -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Actions. -local open_brace = token(lexer.OPERATOR, '{') -local close_brace = token(lexer.OPERATOR, '}') -lex:add_rule('action', open_brace * token('action', (1 - P('}'))^0) * close_brace^-1) -lex:add_style('action', lexer.styles.nothing) - --- Strings. -lex:add_rule('string', token(lexer.STRING, lexer.range("'", true))) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('$@:;|.=+*?~!^>-()[]{}'))) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, ':', ';') -lex:add_fold_point(lexer.OPERATOR, '(', ')') -lex:add_fold_point(lexer.OPERATOR, '{', '}') -lex:add_fold_point(lexer.COMMENT, '/*', '*/') - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/apdl.lua b/share/vis/lexers/apdl.lua @@ -1,79 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- APDL LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('apdl', {case_insensitive_fold_points = true}) - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match({ - '*abbr', '*abb', '*afun', '*afu', '*ask', '*cfclos', '*cfc', '*cfopen', '*cfo', '*cfwrite', - '*cfw', '*create', '*cre', '*cycle', '*cyc', '*del', '*dim', '*do', '*elseif', '*else', '*enddo', - '*endif', '*end', '*eval', '*eva', '*exit', '*exi', '*get', '*go', '*if', '*list', '*lis', - '*mfouri', '*mfo', '*mfun', '*mfu', '*mooney', '*moo', '*moper', '*mop', '*msg', '*repeat', - '*rep', '*set', '*status', '*sta', '*tread', '*tre', '*ulib', '*uli', '*use', '*vabs', '*vab', - '*vcol', '*vco', '*vcum', '*vcu', '*vedit', '*ved', '*vfact', '*vfa', '*vfill', '*vfi', '*vfun', - '*vfu', '*vget', '*vge', '*vitrp', '*vit', '*vlen', '*vle', '*vmask', '*vma', '*voper', '*vop', - '*vplot', '*vpl', '*vput', '*vpu', '*vread', '*vre', '*vscfun', '*vsc', '*vstat', '*vst', - '*vwrite', '*vwr', -- - '/anfile', '/anf', '/angle', '/ang', '/annot', '/ann', '/anum', '/anu', '/assign', '/ass', - '/auto', '/aut', '/aux15', '/aux2', '/aux', '/axlab', '/axl', '/batch', '/bat', '/clabel', '/cla', - '/clear', '/cle', '/clog', '/clo', '/cmap', '/cma', '/color', '/col', '/com', '/config', - '/contour', '/con', '/copy', '/cop', '/cplane', '/cpl', '/ctype', '/cty', '/cval', '/cva', - '/delete', '/del', '/devdisp', '/device', '/dev', '/dist', '/dis', '/dscale', '/dsc', '/dv3d', - '/dv3', '/edge', '/edg', '/efacet', '/efa', '/eof', '/erase', '/era', '/eshape', '/esh', '/exit', - '/exi', '/expand', '/exp', '/facet', '/fac', '/fdele', '/fde', '/filname', '/fil', '/focus', - '/foc', '/format', '/for', '/ftype', '/fty', '/gcmd', '/gcm', '/gcolumn', '/gco', '/gfile', - '/gfi', '/gformat', '/gfo', '/gline', '/gli', '/gmarker', '/gma', '/golist', '/gol', '/gopr', - '/gop', '/go', '/graphics', '/gra', '/gresume', '/gre', '/grid', '/gri', '/gropt', '/gro', - '/grtyp', '/grt', '/gsave', '/gsa', '/gst', '/gthk', '/gth', '/gtype', '/gty', '/header', '/hea', - '/input', '/inp', '/larc', '/lar', '/light', '/lig', '/line', '/lin', '/lspec', '/lsp', - '/lsymbol', '/lsy', '/menu', '/men', '/mplib', '/mpl', '/mrep', '/mre', '/mstart', '/mst', - '/nerr', '/ner', '/noerase', '/noe', '/nolist', '/nol', '/nopr', '/nop', '/normal', '/nor', - '/number', '/num', '/opt', '/output', '/out', '/page', '/pag', '/pbc', '/pbf', '/pcircle', '/pci', - '/pcopy', '/pco', '/plopts', '/plo', '/pmacro', '/pma', '/pmeth', '/pme', '/pmore', '/pmo', - '/pnum', '/pnu', '/polygon', '/pol', '/post26', '/post1', '/pos', '/prep7', '/pre', '/psearch', - '/pse', '/psf', '/pspec', '/psp', '/pstatus', '/pst', '/psymb', '/psy', '/pwedge', '/pwe', - '/quit', '/qui', '/ratio', '/rat', '/rename', '/ren', '/replot', '/rep', '/reset', '/res', '/rgb', - '/runst', '/run', '/seclib', '/sec', '/seg', '/shade', '/sha', '/showdisp', '/show', '/sho', - '/shrink', '/shr', '/solu', '/sol', '/sscale', '/ssc', '/status', '/sta', '/stitle', '/sti', - '/syp', '/sys', '/title', '/tit', '/tlabel', '/tla', '/triad', '/tri', '/trlcy', '/trl', '/tspec', - '/tsp', '/type', '/typ', '/ucmd', '/ucm', '/uis', '/ui', '/units', '/uni', '/user', '/use', - '/vcone', '/vco', '/view', '/vie', '/vscale', '/vsc', '/vup', '/wait', '/wai', '/window', '/win', - '/xrange', '/xra', '/yrange', '/yra', '/zoom', '/zoo' -}, true))) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -lex:add_rule('string', token(lexer.STRING, lexer.range("'", true, false))) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Functions. -lex:add_rule('function', token(lexer.FUNCTION, lexer.range('%', true, false))) - --- Labels. -lex:add_rule('label', token(lexer.LABEL, lexer.starts_line(':') * lexer.word)) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('!'))) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('+-*/$=,;()'))) - --- Fold points. -lex:add_fold_point(lexer.KEYWORD, '*if', '*endif') -lex:add_fold_point(lexer.KEYWORD, '*do', '*enddo') -lex:add_fold_point(lexer.KEYWORD, '*dowhile', '*enddo') - -lexer.property['scintillua.comment'] = '!' - -return lex diff --git a/share/vis/lexers/apl.lua b/share/vis/lexers/apl.lua @@ -1,57 +0,0 @@ --- Copyright 2015-2024 David B. Lamkins <david@lamkins.net>. See LICENSE. --- APL LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('apl') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol(P('⍝') + '#'))) - --- Strings. -local sq_str = lexer.range("'", false, false) -local dq_str = lexer.range('"') -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) - --- Numbers. -local dig = lexer.digit -local rad = P('.') -local exp = S('eE') -local img = S('jJ') -local sgn = P('¯')^-1 -local float = sgn * (dig^0 * rad * dig^1 + dig^1 * rad * dig^0 + dig^1) * (exp * sgn * dig^1)^-1 -lex:add_rule('number', token(lexer.NUMBER, float * img * float + float)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, - P('⍞') + 'χ' + '⍺' + '⍶' + '⍵' + '⍹' + '⎕' * lexer.alpha^0)) - --- Names. -local n1l = lexer.alpha -local n1b = P('_') + '∆' + '⍙' -local n2l = n1l + lexer.digit -local n2b = n1b + '¯' -local n1 = n1l + n1b -local n2 = n2l + n2b -local name = n1 * n2^0 - --- Labels. -lex:add_rule('label', token(lexer.LABEL, name * ':')) - --- Variables. -lex:add_rule('variable', token(lexer.VARIABLE, name)) - --- Special. -lex:add_rule('special', token(lexer.TYPE, S('{}[]();') + '←' + '→' + '◊')) - --- Nabla. -lex:add_rule('nabla', token(lexer.PREPROCESSOR, P('∇') + '⍫')) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/applescript.lua b/share/vis/lexers/applescript.lua @@ -1,71 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Applescript LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('applescript') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match({ - 'script', 'property', 'prop', 'end', 'copy', 'to', 'set', 'global', 'local', 'on', 'to', 'of', - 'in', 'given', 'with', 'without', 'return', 'continue', 'tell', 'if', 'then', 'else', 'repeat', - 'times', 'while', 'until', 'from', 'exit', 'try', 'error', 'considering', 'ignoring', 'timeout', - 'transaction', 'my', 'get', 'put', 'into', 'is', - -- References. - 'each', 'some', 'every', 'whose', 'where', 'id', 'index', 'first', 'second', 'third', 'fourth', - 'fifth', 'sixth', 'seventh', 'eighth', 'ninth', 'tenth', 'last', 'front', 'back', 'st', 'nd', - 'rd', 'th', 'middle', 'named', 'through', 'thru', 'before', 'after', 'beginning', 'the', - -- Commands. - 'close', 'copy', 'count', 'delete', 'duplicate', 'exists', 'launch', 'make', 'move', 'open', - 'print', 'quit', 'reopen', 'run', 'save', 'saving', - -- Operators. - 'div', 'mod', 'and', 'not', 'or', 'as', 'contains', 'equal', 'equals', 'isn\'t' -}, true))) - --- Constants. -lex:add_rule('constant', token(lexer.CONSTANT, word_match({ - 'case', 'diacriticals', 'expansion', 'hyphens', 'punctuation', - -- Predefined variables. - 'it', 'me', 'version', 'pi', 'result', 'space', 'tab', 'anything', - -- Text styles. - 'bold', 'condensed', 'expanded', 'hidden', 'italic', 'outline', 'plain', 'shadow', - 'strikethrough', 'subscript', 'superscript', 'underline', - -- Save options. - 'ask', 'no', 'yes', - -- Booleans. - 'false', 'true', - -- Date and time. - 'weekday', 'monday', 'mon', 'tuesday', 'tue', 'wednesday', 'wed', 'thursday', 'thu', 'friday', - 'fri', 'saturday', 'sat', 'sunday', 'sun', 'month', 'january', 'jan', 'february', 'feb', 'march', - 'mar', 'april', 'apr', 'may', 'june', 'jun', 'july', 'jul', 'august', 'aug', 'september', 'sep', - 'october', 'oct', 'november', 'nov', 'december', 'dec', 'minutes', 'hours', 'days', 'weeks' -}, true))) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.alpha * (lexer.alnum + '_')^0)) - --- Strings. -lex:add_rule('string', token(lexer.STRING, lexer.range('"', true))) - --- Comments. -local line_comment = lexer.to_eol('--') -local block_comment = lexer.range('(*', '*)') -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('+-^*/&<>=:,(){}'))) - --- Fold points. -lex:add_fold_point(lexer.COMMENT, '(*', '*)') - -lexer.property['scintillua.comment'] = '--' - -return lex diff --git a/share/vis/lexers/asm.lua b/share/vis/lexers/asm.lua @@ -1,399 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- NASM Assembly LPeg lexer. - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(...) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD))) - --- Instructions. -lex:add_rule('instruction', - lex:tag(lexer.FUNCTION_BUILTIN .. '.instruction', lex:word_match('instruction'))) - --- Registers. -lex:add_rule('register', lex:tag(lexer.CONSTANT_BUILTIN .. '.register', lex:word_match('register'))) - --- Types. -local sizes = lex:word_match('size') -local wrt_types = '..' * lex:word_match(lexer.TYPE .. '.wrt') -lex:add_rule('type', lex:tag(lexer.TYPE, sizes + wrt_types)) - --- Constants. -local word = (lexer.alpha + S('$._?')) * (lexer.alnum + S('$._?#@~'))^0 -lex:add_rule('constant', lex:tag(lexer.CONSTANT_BUILTIN, - lex:word_match(lexer.CONSTANT_BUILTIN) + '$' * P('$')^-1 * -word)) - --- Labels. -lex:add_rule('label', lex:tag(lexer.LABEL, word * ':')) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, word)) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"', true) -lex:add_rule('string', lex:tag(lexer.STRING, sq_str + dq_str)) - --- Comments. -lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.to_eol(';'))) - --- Numbers. -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number * S('hqb')^-1)) - --- Preprocessor. -local pp_word = lex:word_match(lexer.PREPROCESSOR) -local pp_symbol = '??' + S('!$+?') + '%' * -lexer.space + lexer.digit^1 -lex:add_rule('preproc', lex:tag(lexer.PREPROCESSOR, '%' * (pp_word + pp_symbol))) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('+-/*%<>!=^&|~:,()[]'))) - --- Fold points. -lex:add_fold_point(lexer.PREPROCESSOR, '%if', '%endif') -lex:add_fold_point(lexer.PREPROCESSOR, '%macro', '%endmacro') -lex:add_fold_point(lexer.PREPROCESSOR, '%rep', '%endrep') -lex:add_fold_point(lexer.PREPROCESSOR, '%while', '%endwhile') -lex:add_fold_point(lexer.KEYWORD, 'struc', 'endstruc') - --- Word lists. -lex:set_word_list(lexer.KEYWORD, { - -- Preprocessor macros. - 'struc', 'endstruc', 'istruc', 'at', 'iend', 'align', 'alignb', 'sectalign', '.nolist', - -- Preprocessor Packages. - 'altreg', 'smartalign', 'fp', 'ifunc', - -- Directives. - 'absolute', 'bits', 'class', 'common', 'common', 'cpu', 'default', 'export', 'extern', 'float', - 'global', 'group', 'import', 'osabi', 'overlay', 'private', 'public', '__SECT__', 'section', - 'segment', 'stack', 'use16', 'use32', 'use64', - -- Section Names. - '.bss', '.comment', '.data', '.lbss', '.ldata', '.lrodata', '.rdata', '.rodata', '.tbss', - '.tdata', '.text', - -- Section Qualifiers. - 'alloc', 'bss', 'code', 'exec', 'data', 'noalloc', 'nobits', 'noexec', 'nowrite', 'progbits', - 'rdata', 'tls', 'write', - -- Operators. - 'abs', 'rel', 'seg', 'wrt', 'strict', '__utf16__', '__utf16be__', '__utf16le__', '__utf32__', - '__utf32be__', '__utf32le__' -}) - --- awk '{print $1}'|uniq|tr '[:upper:]' '[:lower:]'| --- lua -e "for l in io.lines() do print(\"'\"..l..\"',\") end"|fmt -w 98 -lex:set_word_list('instruction', { - -- Special Instructions. - 'db', 'dd', 'do', 'dq', 'dt', 'dw', 'dy', 'resb', 'resd', 'reso', 'resq', 'rest', 'resw', 'resy', - -- Conventional Instructions. - 'aaa', 'aad', 'aam', 'aas', 'adc', 'add', 'and', 'arpl', 'bb0_reset', 'bb1_reset', 'bound', 'bsf', - 'bsr', 'bswap', 'bt', 'btc', 'btr', 'bts', 'call', 'cbw', 'cdq', 'cdqe', 'clc', 'cld', 'cli', - 'clts', 'cmc', 'cmp', 'cmpsb', 'cmpsd', 'cmpsq', 'cmpsw', 'cmpxchg', 'cmpxchg486', 'cmpxchg8b', - 'cmpxchg16b', 'cpuid', 'cpu_read', 'cpu_write', 'cqo', 'cwd', 'cwde', 'daa', 'das', 'dec', 'div', - 'dmint', 'emms', 'enter', 'equ', 'f2xm1', 'fabs', 'fadd', 'faddp', 'fbld', 'fbstp', 'fchs', - 'fclex', 'fcmovb', 'fcmovbe', 'fcmove', 'fcmovnb', 'fcmovnbe', 'fcmovne', 'fcmovnu', 'fcmovu', - 'fcom', 'fcomi', 'fcomip', 'fcomp', 'fcompp', 'fcos', 'fdecstp', 'fdisi', 'fdiv', 'fdivp', - 'fdivr', 'fdivrp', 'femms', 'feni', 'ffree', 'ffreep', 'fiadd', 'ficom', 'ficomp', 'fidiv', - 'fidivr', 'fild', 'fimul', 'fincstp', 'finit', 'fist', 'fistp', 'fisttp', 'fisub', 'fisubr', - 'fld', 'fld1', 'fldcw', 'fldenv', 'fldl2e', 'fldl2t', 'fldlg2', 'fldln2', 'fldpi', 'fldz', 'fmul', - 'fmulp', 'fnclex', 'fndisi', 'fneni', 'fninit', 'fnop', 'fnsave', 'fnstcw', 'fnstenv', 'fnstsw', - 'fpatan', 'fprem', 'fprem1', 'fptan', 'frndint', 'frstor', 'fsave', 'fscale', 'fsetpm', 'fsin', - 'fsincos', 'fsqrt', 'fst', 'fstcw', 'fstenv', 'fstp', 'fstsw', 'fsub', 'fsubp', 'fsubr', 'fsubrp', - 'ftst', 'fucom', 'fucomi', 'fucomip', 'fucomp', 'fucompp', 'fxam', 'fxch', 'fxtract', 'fyl2x', - 'fyl2xp1', 'hlt', 'ibts', 'icebp', 'idiv', 'imul', 'in', 'inc', 'incbin', 'insb', 'insd', 'insw', - 'int', 'int01', 'int1', 'int03', 'int3', 'into', 'invd', 'invpcid', 'invlpg', 'invlpga', 'iret', - 'iretd', 'iretq', 'iretw', 'jcxz', 'jecxz', 'jrcxz', 'jmp', 'jmpe', 'lahf', 'lar', 'lds', 'lea', - 'leave', 'les', 'lfence', 'lfs', 'lgdt', 'lgs', 'lidt', 'lldt', 'lmsw', 'loadall', 'loadall286', - 'lodsb', 'lodsd', 'lodsq', 'lodsw', 'loop', 'loope', 'loopne', 'loopnz', 'loopz', 'lsl', 'lss', - 'ltr', 'mfence', 'monitor', 'mov', 'movd', 'movq', 'movsb', 'movsd', 'movsq', 'movsw', 'movsx', - 'movsxd', 'movsx', 'movzx', 'mul', 'mwait', 'neg', 'nop', 'not', 'or', 'out', 'outsb', 'outsd', - 'outsw', 'packssdw', 'packsswb', 'packuswb', 'paddb', 'paddd', 'paddsb', 'paddsiw', 'paddsw', - 'paddusb', 'paddusw', 'paddw', 'pand', 'pandn', 'pause', 'paveb', 'pavgusb', 'pcmpeqb', 'pcmpeqd', - 'pcmpeqw', 'pcmpgtb', 'pcmpgtd', 'pcmpgtw', 'pdistib', 'pf2id', 'pfacc', 'pfadd', 'pfcmpeq', - 'pfcmpge', 'pfcmpgt', 'pfmax', 'pfmin', 'pfmul', 'pfrcp', 'pfrcpit1', 'pfrcpit2', 'pfrsqit1', - 'pfrsqrt', 'pfsub', 'pfsubr', 'pi2fd', 'pmachriw', 'pmaddwd', 'pmagw', 'pmulhriw', 'pmulhrwa', - 'pmulhrwc', 'pmulhw', 'pmullw', 'pmvgezb', 'pmvlzb', 'pmvnzb', 'pmvzb', 'pop', 'popa', 'popad', - 'popaw', 'popf', 'popfd', 'popfq', 'popfw', 'por', 'prefetch', 'prefetchw', 'pslld', 'psllq', - 'psllw', 'psrad', 'psraw', 'psrld', 'psrlq', 'psrlw', 'psubb', 'psubd', 'psubsb', 'psubsiw', - 'psubsw', 'psubusb', 'psubusw', 'psubw', 'punpckhbw', 'punpckhdq', 'punpckhwd', 'punpcklbw', - 'punpckldq', 'punpcklwd', 'push', 'pusha', 'pushad', 'pushaw', 'pushf', 'pushfd', 'pushfq', - 'pushfw', 'pxor', 'rcl', 'rcr', 'rdshr', 'rdmsr', 'rdpmc', 'rdtsc', 'rdtscp', 'ret', 'retf', - 'retn', 'rol', 'ror', 'rdm', 'rsdc', 'rsldt', 'rsm', 'rsts', 'sahf', 'sal', 'salc', 'sar', 'sbb', - 'scasb', 'scasd', 'scasq', 'scasw', 'sfence', 'sgdt', 'shl', 'shld', 'shr', 'shrd', 'sidt', - 'sldt', 'skinit', 'smi', 'smint', 'smintold', 'smsw', 'stc', 'std', 'sti', 'stosb', 'stosd', - 'stosq', 'stosw', 'str', 'sub', 'svdc', 'svldt', 'svts', 'swapgs', 'syscall', 'sysenter', - 'sysexit', 'sysret', 'test', 'ud0', 'ud1', 'ud2b', 'ud2', 'ud2a', 'umov', 'verr', 'verw', 'fwait', - 'wbinvd', 'wrshr', 'wrmsr', 'xadd', 'xbts', 'xchg', 'xlatb', 'xlat', 'xor', 'xor', 'cmova', - 'cmovae', 'cmovb', 'cmovbe', 'cmovc', 'cmove', 'cmovg', 'cmovge', 'cmovl', 'cmovle', 'cmovna', - 'cmovnae', 'cmovnb', 'cmovnbe', 'cmovnc', 'cmovne', 'cmovng', 'cmovnge', 'cmovnl', 'cmovnle', - 'cmovno', 'cmovnp', 'cmovns', 'cmovnz', 'cmovo', 'cmovp', 'cmovpe', 'cmovpo', 'cmovs', 'cmovz', - 'cmovcc', 'ja', 'jae', 'jb', 'jbe', 'jc', 'je', 'jg', 'jge', 'jl', 'jle', 'jna', 'jnae', 'jnb', - 'jnbe', 'jnc', 'jne', 'jng', 'jnge', 'jnl', 'jnle', 'jno', 'jnp', 'jns', 'jnz', 'jo', 'jp', 'jpe', - 'jpo', 'js', 'jz', 'seta', 'setae', 'setb', 'setbe', 'setc', 'sete', 'setg', 'setge', 'setl', - 'setle', 'setna', 'setnae', 'setnb', 'setnbe', 'setnc', 'setne', 'setng', 'setnge', 'setnl', - 'setnle', 'setno', 'setnp', 'setns', 'setnz', 'seto', 'setp', 'setpe', 'setpo', 'sets', 'setz', - -- Katmai Streaming SIMD instructions (SSE -- a.k.a. KNI XMM MMX2). - 'addps', 'addss', 'andnps', 'andps', 'cmpeqps', 'cmpeqss', 'cmpleps', 'cmpless', 'cmpltps', - 'cmpltss', 'cmpneqps', 'cmpneqss', 'cmpnleps', 'cmpnless', 'cmpnltps', 'cmpnltss', 'cmpordps', - 'cmpordss', 'cmpunordps', 'cmpunordss', 'cmpps', 'cmpss', 'comiss', 'cvtpi2ps', 'cvtps2pi', - 'cvtsi2ss', 'cvtss2si', 'cvttps2pi', 'cvttss2si', 'divps', 'divss', 'ldmxcsr', 'maxps', 'maxss', - 'minps', 'minss', 'movaps', 'movhps', 'movlhps', 'movlps', 'movhlps', 'movmskps', 'movntps', - 'movss', 'movups', 'mulps', 'mulss', 'orps', 'rcpps', 'rcpss', 'rsqrtps', 'rsqrtss', 'shufps', - 'sqrtps', 'sqrtss', 'stmxcsr', 'subps', 'subss', 'ucomiss', 'unpckhps', 'unpcklps', 'xorps', - -- Introduced in Deschutes but necessary for SSE support. - 'fxrstor', 'fxrstor64', 'fxsave', 'fxsave64', - -- XSAVE group (AVX and extended state). - 'xgetbv', 'xsetbv', 'xsave', 'xsave64', 'xsaveopt', 'xsaveopt64', 'xrstor', 'xrstor64', - -- Generic memory operations. - 'prefetchnta', 'prefetcht0', 'prefetcht1', 'prefetcht2', 'sfence', - -- New MMX instructions introduced in Katmai. - 'maskmovq', 'movntq', 'pavgb', 'pavgw', 'pextrw', 'pinsrw', 'pmaxsw', 'pmaxub', 'pminsw', - 'pminub', 'pmovmskb', 'pmulhuw', 'psadbw', 'pshufw', - -- AMD Enhanced 3DNow! (Athlon) instructions. - 'pf2iw', 'pfnacc', 'pfpnacc', 'pi2fw', 'pswapd', - -- Willamette SSE2 Cacheability Instructions. - 'maskmovdqu', 'clflush', 'movntdq', 'movnti', 'movntpd', 'lfence', 'mfence', - -- Willamette MMX instructions (SSE2 SIMD Integer Instructions). - 'movd', 'movdqa', 'movdqu', 'movdq2q', 'movq', 'movq2dq', 'packsswb', 'packssdw', 'packuswb', - 'paddb', 'paddw', 'paddd', 'paddq', 'paddsb', 'paddsw', 'paddusb', 'paddusw', 'pand', 'pandn', - 'pavgb', 'pavgw', 'pcmpeqb', 'pcmpeqw', 'pcmpeqd', 'pcmpgtb', 'pcmpgtw', 'pcmpgtd', 'pextrw', - 'pinsrw', 'pmaddwd', 'pmaxsw', 'pmaxub', 'pminsw', 'pminub', 'pmovmskb', 'pmulhuw', 'pmulhw', - 'pmullw', 'pmuludq', 'por', 'psadbw', 'pshufd', 'pshufhw', 'pshuflw', 'pslldq', 'psllw', 'pslld', - 'psllq', 'psraw', 'psrad', 'psrldq', 'psrlw', 'psrld', 'psrlq', 'psubb', 'psubw', 'psubd', - 'psubq', 'psubsb', 'psubsw', 'psubusb', 'psubusw', 'punpckhbw', 'punpckhwd', 'punpckhdq', - 'punpckhqdq', 'punpcklbw', 'punpcklwd', 'punpckldq', 'punpcklqdq', 'pxor', - -- Willamette Streaming SIMD instructions (SSE2). - 'addpd', 'addsd', 'andnpd', 'andpd', 'cmpeqpd', 'cmpeqsd', 'cmplepd', 'cmplesd', 'cmpltpd', - 'cmpltsd', 'cmpneqpd', 'cmpneqsd', 'cmpnlepd', 'cmpnlesd', 'cmpnltpd', 'cmpnltsd', 'cmpordpd', - 'cmpordsd', 'cmpunordpd', 'cmpunordsd', 'cmppd', 'cmpsd', 'comisd', 'cvtdq2pd', 'cvtdq2ps', - 'cvtpd2dq', 'cvtpd2pi', 'cvtpd2ps', 'cvtpi2pd', 'cvtps2dq', 'cvtps2pd', 'cvtsd2si', 'cvtsd2ss', - 'cvtsi2sd', 'cvtss2sd', 'cvttpd2pi', 'cvttpd2dq', 'cvttps2dq', 'cvttsd2si', 'divpd', 'divsd', - 'maxpd', 'maxsd', 'minpd', 'minsd', 'movapd', 'movhpd', 'movlpd', 'movmskpd', 'movsd', 'movupd', - 'mulpd', 'mulsd', 'orpd', 'shufpd', 'sqrtpd', 'sqrtsd', 'subpd', 'subsd', 'ucomisd', 'unpckhpd', - 'unpcklpd', 'xorpd', - -- Prescott New Instructions (SSE3). - 'addsubpd', 'addsubps', 'haddpd', 'haddps', 'hsubpd', 'hsubps', 'lddqu', 'movddup', 'movshdup', - 'movsldup', - -- VMX/SVM Instructions. - 'clgi', 'stgi', 'vmcall', 'vmclear', 'vmfunc', 'vmlaunch', 'vmload', 'vmmcall', 'vmptrld', - 'vmptrst', 'vmread', 'vmresume', 'vmrun', 'vmsave', 'vmwrite', 'vmxoff', 'vmxon', - -- Extended Page Tables VMX instructions. - 'invept', 'invvpid', - -- Tejas New Instructions (SSSE3). - 'pabsb', 'pabsw', 'pabsd', 'palignr', 'phaddw', 'phaddd', 'phaddsw', 'phsubw', 'phsubd', - 'phsubsw', 'pmaddubsw', 'pmulhrsw', 'pshufb', 'psignb', 'psignw', 'psignd', - -- AMD SSE4A. - 'extrq', 'insertq', 'movntsd', 'movntss', - -- New instructions in Barcelona. - 'lzcnt', - -- Penryn New Instructions (SSE4.1). - 'blendpd', 'blendps', 'blendvpd', 'blendvps', 'dppd', 'dpps', 'extractps', 'insertps', 'movntdqa', - 'mpsadbw', 'packusdw', 'pblendvb', 'pblendw', 'pcmpeqq', 'pextrb', 'pextrd', 'pextrq', 'pextrw', - 'phminposuw', 'pinsrb', 'pinsrd', 'pinsrq', 'pmaxsb', 'pmaxsd', 'pmaxud', 'pmaxuw', 'pminsb', - 'pminsd', 'pminud', 'pminuw', 'pmovsxbw', 'pmovsxbd', 'pmovsxbq', 'pmovsxwd', 'pmovsxwq', - 'pmovsxdq', 'pmovzxbw', 'pmovzxbd', 'pmovzxbq', 'pmovzxwd', 'pmovzxwq', 'pmovzxdq', 'pmuldq', - 'pmulld', 'ptest', 'roundpd', 'roundps', 'roundsd', 'roundss', - -- Nehalem New Instructions (SSE4.2). - 'crc32', 'pcmpestri', 'pcmpestrm', 'pcmpistri', 'pcmpistrm', 'pcmpgtq', 'popcnt', - -- Intel SMX. - 'getsec', - -- Geode (Cyrix) 3DNow! additions. - 'pfrcpv', 'pfrsqrtv', - -- Intel new instructions in ???. - 'movbe', - -- Intel AES instructions. - 'aesenc', 'aesenclast', 'aesdec', 'aesdeclast', 'aesimc', 'aeskeygenassist', - -- Intel AVX AES instructions. - 'vaesenc', 'vaesenclast', 'vaesdec', 'vaesdeclast', 'vaesimc', 'vaeskeygenassist', - -- Intel AVX instructions. - 'vaddpd', 'vaddps', 'vaddsd', 'vaddss', 'vaddsubpd', 'vaddsubps', 'vandpd', 'vandps', 'vandnpd', - 'vandnps', 'vblendpd', 'vblendps', 'vblendvpd', 'vblendvps', 'vbroadcastss', 'vbroadcastsd', - 'vbroadcastf128', 'vcmpeq_ospd', 'vcmpeqpd', 'vcmplt_ospd', 'vcmpltpd', 'vcmple_ospd', 'vcmplepd', - 'vcmpunord_qpd', 'vcmpunordpd', 'vcmpneq_uqpd', 'vcmpneqpd', 'vcmpnlt_uspd', 'vcmpnltpd', - 'vcmpnle_uspd', 'vcmpnlepd', 'vcmpord_qpd', 'vcmpordpd', 'vcmpeq_uqpd', 'vcmpnge_uspd', - 'vcmpngepd', 'vcmpngt_uspd', 'vcmpngtpd', 'vcmpfalse_oqpd', 'vcmpfalsepd', 'vcmpneq_oqpd', - 'vcmpge_ospd', 'vcmpgepd', 'vcmpgt_ospd', 'vcmpgtpd', 'vcmptrue_uqpd', 'vcmptruepd', - 'vcmpeq_ospd', 'vcmplt_oqpd', 'vcmple_oqpd', 'vcmpunord_spd', 'vcmpneq_uspd', 'vcmpnlt_uqpd', - 'vcmpnle_uqpd', 'vcmpord_spd', 'vcmpeq_uspd', 'vcmpnge_uqpd', 'vcmpngt_uqpd', 'vcmpfalse_ospd', - 'vcmpneq_ospd', 'vcmpge_oqpd', 'vcmpgt_oqpd', 'vcmptrue_uspd', 'vcmppd', 'vcmpeq_osps', - 'vcmpeqps', 'vcmplt_osps', 'vcmpltps', 'vcmple_osps', 'vcmpleps', 'vcmpunord_qps', 'vcmpunordps', - 'vcmpneq_uqps', 'vcmpneqps', 'vcmpnlt_usps', 'vcmpnltps', 'vcmpnle_usps', 'vcmpnleps', - 'vcmpord_qps', 'vcmpordps', 'vcmpeq_uqps', 'vcmpnge_usps', 'vcmpngeps', 'vcmpngt_usps', - 'vcmpngtps', 'vcmpfalse_oqps', 'vcmpfalseps', 'vcmpneq_oqps', 'vcmpge_osps', 'vcmpgeps', - 'vcmpgt_osps', 'vcmpgtps', 'vcmptrue_uqps', 'vcmptrueps', 'vcmpeq_osps', 'vcmplt_oqps', - 'vcmple_oqps', 'vcmpunord_sps', 'vcmpneq_usps', 'vcmpnlt_uqps', 'vcmpnle_uqps', 'vcmpord_sps', - 'vcmpeq_usps', 'vcmpnge_uqps', 'vcmpngt_uqps', 'vcmpfalse_osps', 'vcmpneq_osps', 'vcmpge_oqps', - 'vcmpgt_oqps', 'vcmptrue_usps', 'vcmpps', 'vcmpeq_ossd', 'vcmpeqsd', 'vcmplt_ossd', 'vcmpltsd', - 'vcmple_ossd', 'vcmplesd', 'vcmpunord_qsd', 'vcmpunordsd', 'vcmpneq_uqsd', 'vcmpneqsd', - 'vcmpnlt_ussd', 'vcmpnltsd', 'vcmpnle_ussd', 'vcmpnlesd', 'vcmpord_qsd', 'vcmpordsd', - 'vcmpeq_uqsd', 'vcmpnge_ussd', 'vcmpngesd', 'vcmpngt_ussd', 'vcmpngtsd', 'vcmpfalse_oqsd', - 'vcmpfalsesd', 'vcmpneq_oqsd', 'vcmpge_ossd', 'vcmpgesd', 'vcmpgt_ossd', 'vcmpgtsd', - 'vcmptrue_uqsd', 'vcmptruesd', 'vcmpeq_ossd', 'vcmplt_oqsd', 'vcmple_oqsd', 'vcmpunord_ssd', - 'vcmpneq_ussd', 'vcmpnlt_uqsd', 'vcmpnle_uqsd', 'vcmpord_ssd', 'vcmpeq_ussd', 'vcmpnge_uqsd', - 'vcmpngt_uqsd', 'vcmpfalse_ossd', 'vcmpneq_ossd', 'vcmpge_oqsd', 'vcmpgt_oqsd', 'vcmptrue_ussd', - 'vcmpsd', 'vcmpeq_osss', 'vcmpeqss', 'vcmplt_osss', 'vcmpltss', 'vcmple_osss', 'vcmpless', - 'vcmpunord_qss', 'vcmpunordss', 'vcmpneq_uqss', 'vcmpneqss', 'vcmpnlt_usss', 'vcmpnltss', - 'vcmpnle_usss', 'vcmpnless', 'vcmpord_qss', 'vcmpordss', 'vcmpeq_uqss', 'vcmpnge_usss', - 'vcmpngess', 'vcmpngt_usss', 'vcmpngtss', 'vcmpfalse_oqss', 'vcmpfalsess', 'vcmpneq_oqss', - 'vcmpge_osss', 'vcmpgess', 'vcmpgt_osss', 'vcmpgtss', 'vcmptrue_uqss', 'vcmptruess', - 'vcmpeq_osss', 'vcmplt_oqss', 'vcmple_oqss', 'vcmpunord_sss', 'vcmpneq_usss', 'vcmpnlt_uqss', - 'vcmpnle_uqss', 'vcmpord_sss', 'vcmpeq_usss', 'vcmpnge_uqss', 'vcmpngt_uqss', 'vcmpfalse_osss', - 'vcmpneq_osss', 'vcmpge_oqss', 'vcmpgt_oqss', 'vcmptrue_usss', 'vcmpss', 'vcomisd', 'vcomiss', - 'vcvtdq2pd', 'vcvtdq2ps', 'vcvtpd2dq', 'vcvtpd2ps', 'vcvtps2dq', 'vcvtps2pd', 'vcvtsd2si', - 'vcvtsd2ss', 'vcvtsi2sd', 'vcvtsi2ss', 'vcvtss2sd', 'vcvtss2si', 'vcvttpd2dq', 'vcvttps2dq', - 'vcvttsd2si', 'vcvttss2si', 'vdivpd', 'vdivps', 'vdivsd', 'vdivss', 'vdppd', 'vdpps', - 'vextractf128', 'vextractps', 'vhaddpd', 'vhaddps', 'vhsubpd', 'vhsubps', 'vinsertf128', - 'vinsertps', 'vlddqu', 'vldqqu', 'vlddqu', 'vldmxcsr', 'vmaskmovdqu', 'vmaskmovps', 'vmaskmovpd', - 'vmaxpd', 'vmaxps', 'vmaxsd', 'vmaxss', 'vminpd', 'vminps', 'vminsd', 'vminss', 'vmovapd', - 'vmovaps', 'vmovd', 'vmovq', 'vmovddup', 'vmovdqa', 'vmovqqa', 'vmovdqa', 'vmovdqu', 'vmovqqu', - 'vmovdqu', 'vmovhlps', 'vmovhpd', 'vmovhps', 'vmovlhps', 'vmovlpd', 'vmovlps', 'vmovmskpd', - 'vmovmskps', 'vmovntdq', 'vmovntqq', 'vmovntdq', 'vmovntdqa', 'vmovntpd', 'vmovntps', 'vmovsd', - 'vmovshdup', 'vmovsldup', 'vmovss', 'vmovupd', 'vmovups', 'vmpsadbw', 'vmulpd', 'vmulps', - 'vmulsd', 'vmulss', 'vorpd', 'vorps', 'vpabsb', 'vpabsw', 'vpabsd', 'vpacksswb', 'vpackssdw', - 'vpackuswb', 'vpackusdw', 'vpaddb', 'vpaddw', 'vpaddd', 'vpaddq', 'vpaddsb', 'vpaddsw', - 'vpaddusb', 'vpaddusw', 'vpalignr', 'vpand', 'vpandn', 'vpavgb', 'vpavgw', 'vpblendvb', - 'vpblendw', 'vpcmpestri', 'vpcmpestrm', 'vpcmpistri', 'vpcmpistrm', 'vpcmpeqb', 'vpcmpeqw', - 'vpcmpeqd', 'vpcmpeqq', 'vpcmpgtb', 'vpcmpgtw', 'vpcmpgtd', 'vpcmpgtq', 'vpermilpd', 'vpermilps', - 'vperm2f128', 'vpextrb', 'vpextrw', 'vpextrd', 'vpextrq', 'vphaddw', 'vphaddd', 'vphaddsw', - 'vphminposuw', 'vphsubw', 'vphsubd', 'vphsubsw', 'vpinsrb', 'vpinsrw', 'vpinsrd', 'vpinsrq', - 'vpmaddwd', 'vpmaddubsw', 'vpmaxsb', 'vpmaxsw', 'vpmaxsd', 'vpmaxub', 'vpmaxuw', 'vpmaxud', - 'vpminsb', 'vpminsw', 'vpminsd', 'vpminub', 'vpminuw', 'vpminud', 'vpmovmskb', 'vpmovsxbw', - 'vpmovsxbd', 'vpmovsxbq', 'vpmovsxwd', 'vpmovsxwq', 'vpmovsxdq', 'vpmovzxbw', 'vpmovzxbd', - 'vpmovzxbq', 'vpmovzxwd', 'vpmovzxwq', 'vpmovzxdq', 'vpmulhuw', 'vpmulhrsw', 'vpmulhw', 'vpmullw', - 'vpmulld', 'vpmuludq', 'vpmuldq', 'vpor', 'vpsadbw', 'vpshufb', 'vpshufd', 'vpshufhw', 'vpshuflw', - 'vpsignb', 'vpsignw', 'vpsignd', 'vpslldq', 'vpsrldq', 'vpsllw', 'vpslld', 'vpsllq', 'vpsraw', - 'vpsrad', 'vpsrlw', 'vpsrld', 'vpsrlq', 'vptest', 'vpsubb', 'vpsubw', 'vpsubd', 'vpsubq', - 'vpsubsb', 'vpsubsw', 'vpsubusb', 'vpsubusw', 'vpunpckhbw', 'vpunpckhwd', 'vpunpckhdq', - 'vpunpckhqdq', 'vpunpcklbw', 'vpunpcklwd', 'vpunpckldq', 'vpunpcklqdq', 'vpxor', 'vrcpps', - 'vrcpss', 'vrsqrtps', 'vrsqrtss', 'vroundpd', 'vroundps', 'vroundsd', 'vroundss', 'vshufpd', - 'vshufps', 'vsqrtpd', 'vsqrtps', 'vsqrtsd', 'vsqrtss', 'vstmxcsr', 'vsubpd', 'vsubps', 'vsubsd', - 'vsubss', 'vtestps', 'vtestpd', 'vucomisd', 'vucomiss', 'vunpckhpd', 'vunpckhps', 'vunpcklpd', - 'vunpcklps', 'vxorpd', 'vxorps', 'vzeroall', 'vzeroupper', - -- Intel Carry-Less Multiplication instructions (CLMUL). - 'pclmullqlqdq', 'pclmulhqlqdq', 'pclmullqhqdq', 'pclmulhqhqdq', 'pclmulqdq', - -- Intel AVX Carry-Less Multiplication instructions (CLMUL). - 'vpclmullqlqdq', 'vpclmulhqlqdq', 'vpclmullqhqdq', 'vpclmulhqhqdq', 'vpclmulqdq', - -- Intel Fused Multiply-Add instructions (FMA). - 'vfmadd132ps', 'vfmadd132pd', 'vfmadd312ps', 'vfmadd312pd', 'vfmadd213ps', 'vfmadd213pd', - 'vfmadd123ps', 'vfmadd123pd', 'vfmadd231ps', 'vfmadd231pd', 'vfmadd321ps', 'vfmadd321pd', - 'vfmaddsub132ps', 'vfmaddsub132pd', 'vfmaddsub312ps', 'vfmaddsub312pd', 'vfmaddsub213ps', - 'vfmaddsub213pd', 'vfmaddsub123ps', 'vfmaddsub123pd', 'vfmaddsub231ps', 'vfmaddsub231pd', - 'vfmaddsub321ps', 'vfmaddsub321pd', 'vfmsub132ps', 'vfmsub132pd', 'vfmsub312ps', 'vfmsub312pd', - 'vfmsub213ps', 'vfmsub213pd', 'vfmsub123ps', 'vfmsub123pd', 'vfmsub231ps', 'vfmsub231pd', - 'vfmsub321ps', 'vfmsub321pd', 'vfmsubadd132ps', 'vfmsubadd132pd', 'vfmsubadd312ps', - 'vfmsubadd312pd', 'vfmsubadd213ps', 'vfmsubadd213pd', 'vfmsubadd123ps', 'vfmsubadd123pd', - 'vfmsubadd231ps', 'vfmsubadd231pd', 'vfmsubadd321ps', 'vfmsubadd321pd', 'vfnmadd132ps', - 'vfnmadd132pd', 'vfnmadd312ps', 'vfnmadd312pd', 'vfnmadd213ps', 'vfnmadd213pd', 'vfnmadd123ps', - 'vfnmadd123pd', 'vfnmadd231ps', 'vfnmadd231pd', 'vfnmadd321ps', 'vfnmadd321pd', 'vfnmsub132ps', - 'vfnmsub132pd', 'vfnmsub312ps', 'vfnmsub312pd', 'vfnmsub213ps', 'vfnmsub213pd', 'vfnmsub123ps', - 'vfnmsub123pd', 'vfnmsub231ps', 'vfnmsub231pd', 'vfnmsub321ps', 'vfnmsub321pd', 'vfmadd132ss', - 'vfmadd132sd', 'vfmadd312ss', 'vfmadd312sd', 'vfmadd213ss', 'vfmadd213sd', 'vfmadd123ss', - 'vfmadd123sd', 'vfmadd231ss', 'vfmadd231sd', 'vfmadd321ss', 'vfmadd321sd', 'vfmsub132ss', - 'vfmsub132sd', 'vfmsub312ss', 'vfmsub312sd', 'vfmsub213ss', 'vfmsub213sd', 'vfmsub123ss', - 'vfmsub123sd', 'vfmsub231ss', 'vfmsub231sd', 'vfmsub321ss', 'vfmsub321sd', 'vfnmadd132ss', - 'vfnmadd132sd', 'vfnmadd312ss', 'vfnmadd312sd', 'vfnmadd213ss', 'vfnmadd213sd', 'vfnmadd123ss', - 'vfnmadd123sd', 'vfnmadd231ss', 'vfnmadd231sd', 'vfnmadd321ss', 'vfnmadd321sd', 'vfnmsub132ss', - 'vfnmsub132sd', 'vfnmsub312ss', 'vfnmsub312sd', 'vfnmsub213ss', 'vfnmsub213sd', 'vfnmsub123ss', - 'vfnmsub123sd', 'vfnmsub231ss', 'vfnmsub231sd', 'vfnmsub321ss', 'vfnmsub321sd', - -- Intel post-32 nm processor instructions. - 'rdfsbase', 'rdgsbase', 'rdrand', 'wrfsbase', 'wrgsbase', 'vcvtph2ps', 'vcvtps2ph', 'adcx', - 'adox', 'rdseed', 'clac', 'stac', - -- VIA (Centaur) security instructions. - 'xstore', 'xcryptecb', 'xcryptcbc', 'xcryptctr', 'xcryptcfb', 'xcryptofb', 'montmul', 'xsha1', - 'xsha256', - -- AMD Lightweight Profiling (LWP) instructions. - 'llwpcb', 'slwpcb', 'lwpval', 'lwpins', - -- AMD XOP and FMA4 instructions (SSE5). - 'vfmaddpd', 'vfmaddps', 'vfmaddsd', 'vfmaddss', 'vfmaddsubpd', 'vfmaddsubps', 'vfmsubaddpd', - 'vfmsubaddps', 'vfmsubpd', 'vfmsubps', 'vfmsubsd', 'vfmsubss', 'vfnmaddpd', 'vfnmaddps', - 'vfnmaddsd', 'vfnmaddss', 'vfnmsubpd', 'vfnmsubps', 'vfnmsubsd', 'vfnmsubss', 'vfrczpd', - 'vfrczps', 'vfrczsd', 'vfrczss', 'vpcmov', 'vpcomb', 'vpcomd', 'vpcomq', 'vpcomub', 'vpcomud', - 'vpcomuq', 'vpcomuw', 'vpcomw', 'vphaddbd', 'vphaddbq', 'vphaddbw', 'vphadddq', 'vphaddubd', - 'vphaddubq', 'vphaddubw', 'vphaddudq', 'vphadduwd', 'vphadduwq', 'vphaddwd', 'vphaddwq', - 'vphsubbw', 'vphsubdq', 'vphsubwd', 'vpmacsdd', 'vpmacsdqh', 'vpmacsdql', 'vpmacssdd', - 'vpmacssdqh', 'vpmacssdql', 'vpmacsswd', 'vpmacssww', 'vpmacswd', 'vpmacsww', 'vpmadcsswd', - 'vpmadcswd', 'vpperm', 'vprotb', 'vprotd', 'vprotq', 'vprotw', 'vpshab', 'vpshad', 'vpshaq', - 'vpshaw', 'vpshlb', 'vpshld', 'vpshlq', 'vpshlw', - -- Intel AVX2 instructions. - 'vmpsadbw', 'vpabsb', 'vpabsw', 'vpabsd', 'vpacksswb', 'vpackssdw', 'vpackusdw', 'vpackuswb', - 'vpaddb', 'vpaddw', 'vpaddd', 'vpaddq', 'vpaddsb', 'vpaddsw', 'vpaddusb', 'vpaddusw', 'vpalignr', - 'vpand', 'vpandn', 'vpavgb', 'vpavgw', 'vpblendvb', 'vpblendw', 'vpcmpeqb', 'vpcmpeqw', - 'vpcmpeqd', 'vpcmpeqq', 'vpcmpgtb', 'vpcmpgtw', 'vpcmpgtd', 'vpcmpgtq', 'vphaddw', 'vphaddd', - 'vphaddsw', 'vphsubw', 'vphsubd', 'vphsubsw', 'vpmaddubsw', 'vpmaddwd', 'vpmaxsb', 'vpmaxsw', - 'vpmaxsd', 'vpmaxub', 'vpmaxuw', 'vpmaxud', 'vpminsb', 'vpminsw', 'vpminsd', 'vpminub', 'vpminuw', - 'vpminud', 'vpmovmskb', 'vpmovsxbw', 'vpmovsxbd', 'vpmovsxbq', 'vpmovsxwd', 'vpmovsxwq', - 'vpmovsxdq', 'vpmovzxbw', 'vpmovzxbd', 'vpmovzxbq', 'vpmovzxwd', 'vpmovzxwq', 'vpmovzxdq', - 'vpmuldq', 'vpmulhrsw', 'vpmulhuw', 'vpmulhw', 'vpmullw', 'vpmulld', 'vpmuludq', 'vpor', - 'vpsadbw', 'vpshufb', 'vpshufd', 'vpshufhw', 'vpshuflw', 'vpsignb', 'vpsignw', 'vpsignd', - 'vpslldq', 'vpsllw', 'vpslld', 'vpsllq', 'vpsraw', 'vpsrad', 'vpsrldq', 'vpsrlw', 'vpsrld', - 'vpsrlq', 'vpsubb', 'vpsubw', 'vpsubd', 'vpsubq', 'vpsubsb', 'vpsubsw', 'vpsubusb', 'vpsubusw', - 'vpunpckhbw', 'vpunpckhwd', 'vpunpckhdq', 'vpunpckhqdq', 'vpunpcklbw', 'vpunpcklwd', 'vpunpckldq', - 'vpunpcklqdq', 'vpxor', 'vmovntdqa', 'vbroadcastss', 'vbroadcastsd', 'vbroadcasti128', 'vpblendd', - 'vpbroadcastb', 'vpbroadcastw', 'vpbroadcastd', 'vpbroadcastq', 'vpermd', 'vpermpd', 'vpermps', - 'vpermq', 'vperm2i128', 'vextracti128', 'vinserti128', 'vpmaskmovd', 'vpmaskmovq', 'vpmaskmovd', - 'vpmaskmovq', 'vpsllvd', 'vpsllvq', 'vpsllvd', 'vpsllvq', 'vpsravd', 'vpsrlvd', 'vpsrlvq', - 'vpsrlvd', 'vpsrlvq', 'vgatherdpd', 'vgatherqpd', 'vgatherdpd', 'vgatherqpd', 'vgatherdps', - 'vgatherqps', 'vgatherdps', 'vgatherqps', 'vpgatherdd', 'vpgatherqd', 'vpgatherdd', 'vpgatherqd', - 'vpgatherdq', 'vpgatherqq', 'vpgatherdq', 'vpgatherqq', - -- Transactional Synchronization Extensions (TSX). - 'xabort', 'xbegin', 'xend', 'xtest', - -- Intel BMI1 and BMI2 instructions AMD TBM instructions. - 'andn', 'bextr', 'blci', 'blcic', 'blsi', 'blsic', 'blcfill', 'blsfill', 'blcmsk', 'blsmsk', - 'blsr', 'blcs', 'bzhi', 'mulx', 'pdep', 'pext', 'rorx', 'sarx', 'shlx', 'shrx', 'tzcnt', 'tzmsk', - 't1mskc', - -- Systematic names for the hinting nop instructions. - 'hint_nop0', 'hint_nop1', 'hint_nop2', 'hint_nop3', 'hint_nop4', 'hint_nop5', 'hint_nop6', - 'hint_nop7', 'hint_nop8', 'hint_nop9', 'hint_nop10', 'hint_nop11', 'hint_nop12', 'hint_nop13', - 'hint_nop14', 'hint_nop15', 'hint_nop16', 'hint_nop17', 'hint_nop18', 'hint_nop19', 'hint_nop20', - 'hint_nop21', 'hint_nop22', 'hint_nop23', 'hint_nop24', 'hint_nop25', 'hint_nop26', 'hint_nop27', - 'hint_nop28', 'hint_nop29', 'hint_nop30', 'hint_nop31', 'hint_nop32', 'hint_nop33', 'hint_nop34', - 'hint_nop35', 'hint_nop36', 'hint_nop37', 'hint_nop38', 'hint_nop39', 'hint_nop40', 'hint_nop41', - 'hint_nop42', 'hint_nop43', 'hint_nop44', 'hint_nop45', 'hint_nop46', 'hint_nop47', 'hint_nop48', - 'hint_nop49', 'hint_nop50', 'hint_nop51', 'hint_nop52', 'hint_nop53', 'hint_nop54', 'hint_nop55', - 'hint_nop56', 'hint_nop57', 'hint_nop58', 'hint_nop59', 'hint_nop60', 'hint_nop61', 'hint_nop62', - 'hint_nop63' -}) - -lex:set_word_list('register', { - -- 32-bit registers. - 'ah', 'al', 'ax', 'bh', 'bl', 'bp', 'bx', 'ch', 'cl', 'cx', 'dh', 'di', 'dl', 'dx', 'eax', 'ebx', - 'ebx', 'ecx', 'edi', 'edx', 'esi', 'esp', 'fs', 'mm0', 'mm1', 'mm2', 'mm3', 'mm4', 'mm5', 'mm6', - 'mm7', 'si', 'st0', 'st1', 'st2', 'st3', 'st4', 'st5', 'st6', 'st7', 'xmm0', 'xmm1', 'xmm2', - 'xmm3', 'xmm4', 'xmm5', 'xmm6', 'xmm7', 'ymm0', 'ymm1', 'ymm2', 'ymm3', 'ymm4', 'ymm5', 'ymm6', - 'ymm7', - -- 64-bit registers. - 'bpl', 'dil', 'gs', 'r8', 'r8b', 'r8w', 'r9', 'r9b', 'r9w', 'r10', 'r10b', 'r10w', 'r11', 'r11b', - 'r11w', 'r12', 'r12b', 'r12w', 'r13', 'r13b', 'r13w', 'r14', 'r14b', 'r14w', 'r15', 'r15b', - 'r15w', 'rax', 'rbp', 'rbx', 'rcx', 'rdi', 'rdx', 'rsi', 'rsp', 'sil', 'xmm8', 'xmm9', 'xmm10', - 'xmm11', 'xmm12', 'xmm13', 'xmm14', 'xmm15', 'ymm8', 'ymm9', 'ymm10', 'ymm11', 'ymm12', 'ymm13', - 'ymm14', 'ymm15' -}) - -lex:set_word_list('size', { - 'byte', 'word', 'dword', 'qword', 'tword', 'oword', 'yword', -- - 'a16', 'a32', 'a64', 'o16', 'o32', 'o64' -- instructions -}) - -lex:set_word_list(lexer.TYPE .. '.wrt', 'start gotpc gotoff gottpoff got plt sym tlsie') - -lex:set_word_list(lexer.CONSTANT_BUILTIN, { - '__float128h__', '__float128l__', '__float16__', '__float32__', '__float64__', '__float8__', - '__float80e__', '__float80m__', '__Infinity__', '__NaN__', '__QNaN__', '__SNaN__' -}) - -lex:set_word_list(lexer.PREPROCESSOR, { - 'arg', 'assign', 'clear', 'define', 'defstr', 'deftok', 'depend', 'elif', 'elifctx', 'elifdef', - 'elifempty', 'elifenv', 'elifid', 'elifidn', 'elifidni', 'elifmacro', 'elifn', 'elifnctx', - 'elifndef', 'elifnempty', 'elifnenv', 'elifnid', 'elifnidn', 'elifnidni', 'elifnmacro', - 'elifnnum', 'elifnstr', 'elifntoken', 'elifnum', 'elifstr', 'eliftoken', 'else', 'endif', - 'endmacro', 'endrep', 'endwhile', 'error', 'exitmacro', 'exitrep', 'exitwhile', 'fatal', 'final', - 'idefine', 'idefstr', 'ideftok', 'if', 'ifctx', 'ifdef', 'ifempty', 'ifenv', 'ifid', 'ifidn', - 'ifidni', 'ifmacro', 'ifn', 'ifnctx', 'ifndef', 'ifnempty', 'ifnenv', 'ifnid', 'ifnidn', - 'ifnidni', 'ifnmacro', 'ifnnum', 'ifnstr', 'ifntoken', 'ifnum', 'ifstr', 'iftoken', 'imacro', - 'include', 'ixdefine', 'line', 'local', 'macro', 'pathsearch', 'pop', 'push', 'rep', 'repl', - 'rmacro', 'rotate', 'stacksize', 'strcat', 'strlen', 'substr', 'undef', 'unmacro', 'use', - 'warning', 'while', 'xdefine' -}) - -lexer.property['scintillua.comment'] = ';' - -return lex diff --git a/share/vis/lexers/asp.lua b/share/vis/lexers/asp.lua @@ -1,31 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- ASP LPeg lexer. - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local html = lexer.load('html') -local lex = lexer.new(..., {inherit = html}) -- proxy for HTML - --- Embedded VB. -local vb = lexer.load('vb') -local vb_start_rule = lex:tag(lexer.PREPROCESSOR, '<%' * P('=')^-1) -local vb_end_rule = lex:tag(lexer.PREPROCESSOR, '%>') -lex:embed(vb, vb_start_rule, vb_end_rule) - --- Embedded VBScript. -local vbs = lexer.load('vb', 'vbscript') -local script_element = lexer.word_match('script', true) -local vbs_start_rule = #('<' * script_element * (P(function(input, index) - if input:find('^%s+language%s*=%s*(["\'])vbscript%1', index) or - input:find('^%s+type%s*=%s*(["\'])text/vbscript%1', index) then return true end -end) + '>')) * html.embed_start_tag -- <script language="vbscript"> -local vbs_end_rule = #('</' * script_element * '>') * html.embed_end_tag -- </script> -lex:embed(vbs, vbs_start_rule, vbs_end_rule) - --- Fold points. -lex:add_fold_point(lexer.PREPROCESSOR, '<%', '%>') - -lexer.property['scintillua.comment'] = '<!--|-->' - -return lex diff --git a/share/vis/lexers/autohotkey.lua b/share/vis/lexers/autohotkey.lua @@ -1,164 +0,0 @@ --- Copyright 2022-2024 Mitchell. See LICENSE. --- AutoHotkey LPeg lexer. --- Contributed by Snoopy. - -local lexer = lexer -local P, S, B = lpeg.P, lpeg.S, lpeg.B - -local lex = lexer.new(...) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD, true))) - --- Variables. -lex:add_rule('variable', - lex:tag(lexer.VARIABLE_BUILTIN, 'A_' * lex:word_match(lexer.VARIABLE_BUILTIN, true))) - --- Constants. -lex:add_rule('constant', lex:tag(lexer.CONSTANT_BUILTIN, S('fF') * lexer.digit * (lexer.digit)^-1 + - lex:word_match(lexer.CONSTANT_BUILTIN, true))) - --- Functions. -local builtin_func = -B('.') * - lex:tag(lexer.FUNCTION_BUILTIN, lex:word_match(lexer.FUNCTION_BUILTIN, true)) -local func = lex:tag(lexer.FUNCTION, lexer.word) -local method = B('.') * lex:tag(lexer.FUNCTION_METHOD, lexer.word) -lex:add_rule('function', (builtin_func + method + func) * #(lexer.space^0 * '(')) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) - --- Comments. -local line_comment = lexer.to_eol(';') -local block_comment = lexer.range('/*', '*/') -lex:add_rule('comment', lex:tag(lexer.COMMENT, line_comment + block_comment)) - --- Preprocessor. -lex:add_rule('preprocessor', - lex:tag(lexer.PREPROCESSOR, '#' * lex:word_match(lexer.PREPROCESSOR, true))) - --- Strings. -local dq_str = lexer.range('"', true, false) -local sq_str = lexer.range("'", true, false) -lex:add_rule('string', lex:tag(lexer.STRING, dq_str + sq_str)) - --- Numbers. -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number)) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('~+-^*/&<>=?:()[]{}'))) - -lex:set_word_list(lexer.KEYWORD, { - 'as', 'and', 'class', 'contains', 'extends', 'false', 'in', 'is', 'IsSet', 'not', 'or', 'super', - 'true', 'unset', 'Break', 'Catch', 'Continue', 'Else', 'Finally', 'For', 'Global', 'Goto', 'If', - 'Local', 'Loop', 'Return', 'Static', 'Throw', 'Try', 'Until', 'While' -}) - -lex:set_word_list(lexer.FUNCTION_BUILTIN, { - 'Abs', 'AutoTrim', 'Asc', 'ASin', 'ACos', 'ATan', 'BlockInput', 'Ceil', 'Chr', 'Click', - 'ClipWait', 'ComObjActive', 'ComObjArray', 'ComObjConnect', 'ComObjCreate', 'ComObject', - 'ComObjEnwrap', 'ComObjUnwrap', 'ComObjError', 'ComObjFlags', 'ComObjGet', 'ComObjMissing', - 'ComObjParameter', 'ComObjQuery', 'ComObjType', 'ComObjValue', 'Control', 'ControlClick', - 'ControlFocus', 'ControlGet', 'ControlGetFocus', 'ControlGetPos', 'ControlGetText', 'ControlMove', - 'ControlSend', 'ControlSendRaw', 'ControlSetText', 'CoordMode', 'Cos', 'Critical', - 'DetectHiddenText', 'DetectHiddenWindows', 'DllCall', 'Drive', 'DriveGet', 'DriveSpaceFree', - 'Edit', 'Else', 'EnvAdd', 'EnvDiv', 'EnvGet', 'EnvMult', 'EnvSet', 'EnvSub', 'EnvUpdate', - 'Exception', 'Exit', 'ExitApp', 'Exp', 'FileAppend', 'FileCopy', 'FileCopyDir', 'FileCreateDir', - 'FileCreateShortcut', 'FileDelete', 'FileEncoding', 'FileExist', 'FileInstall', 'FileGetAttrib', - 'FileGetShortcut', 'FileGetSize', 'FileGetTime', 'FileGetVersion', 'FileMove', 'FileMoveDir', - 'FileOpen', 'FileRead', 'FileReadLine', 'FileRecycle', 'FileRecycleEmpty', 'FileRemoveDir', - 'FileSelectFile', 'FileSelectFolder', 'FileSetAttrib', 'FileSetTime', 'Floor', 'Format', - 'FormatTime', 'Func', 'GetKeyName', 'GetKeyVK', 'GetKeySC', 'GetKeyState', 'GetKeyState', 'Gosub', - 'GroupActivate', 'GroupAdd', 'GroupClose', 'GroupDeactivate', 'Gui', 'GuiControl', - 'GuiControlGet', 'Hotkey', 'Hotstring', 'IfEqual', 'IfNotEqual', 'IfExist', 'IfNotExist', - 'IfGreater', 'IfGreaterOrEqual', 'IfInString', 'IfNotInString', 'IfLess', 'IfLessOrEqual', - 'IfMsgBox', 'IfWinActive', 'IfWinNotActive', 'IfWinExist', 'IfWinNotExist', 'IL_Create', 'IL_Add', - 'IL_Destroy', 'ImageSearch', 'IniDelete', 'IniRead', 'IniWrite', 'Input', 'InputBox', 'InputHook', - 'InStr', 'IsByRef', 'IsFunc', 'IsLabel', 'IsObject', 'IsSet', 'KeyHistory', 'KeyWait', - 'ListHotkeys', 'ListLines', 'ListVars', 'LoadPicture', 'Log', 'Ln', 'LV_Add', 'LV_Delete', - 'LV_DeleteCol', 'LV_GetCount', 'LV_GetNext', 'LV_GetText', 'LV_Insert', 'LV_InsertCol', - 'LV_Modify', 'LV_ModifyCol', 'LV_SetImageList', 'Max', 'Menu', 'MenuGetHandle', 'MenuGetName', - 'Min', 'Mod', 'MouseClick', 'MouseClickDrag', 'MouseGetPos', 'MouseMove', 'MsgBox', 'NumGet', - 'NumPut', 'ObjAddRef', 'ObjRelease', 'ObjBindMethod', 'ObjClone', 'ObjCount', 'ObjDelete', - 'ObjGetAddress', 'ObjGetCapacity', 'ObjHasKey', 'ObjInsert', 'ObjInsertAt', 'ObjLength', - 'ObjMaxIndex', 'ObjMinIndex', 'ObjNewEnum', 'ObjPop', 'ObjPush', 'ObjRemove', 'ObjRemoveAt', - 'ObjSetCapacity', 'ObjGetBase', 'ObjRawGet', 'ObjRawSet', 'ObjSetBase', 'OnClipboardChange', - 'OnError', 'OnExit', 'OnExit', 'OnMessage', 'Ord', 'OutputDebug', 'Pause', 'PixelGetColor', - 'PixelSearch', 'PostMessage', 'Process', 'Progress', 'Random', 'RegExMatch', 'RegExReplace', - 'RegDelete', 'RegRead', 'RegWrite', 'RegisterCallback', 'Reload', 'Round', 'Run', 'RunAs', - 'RunWait', 'SB_SetIcon', 'SB_SetParts', 'SB_SetText', 'Send', 'SendRaw', 'SendInput', 'SendPlay', - 'SendEvent', 'SendLevel', 'SendMessage', 'SendMode', 'SetBatchLines', 'SetCapsLockState', - 'SetControlDelay', 'SetDefaultMouseSpeed', 'SetEnv', 'SetFormat', 'SetKeyDelay', 'SetMouseDelay', - 'SetNumLockState', 'SetScrollLockState', 'SetRegView', 'SetStoreCapsLockMode', 'SetTimer', - 'SetTitleMatchMode', 'SetWinDelay', 'SetWorkingDir', 'Shutdown', 'Sin', 'Sleep', 'Sort', - 'SoundBeep', 'SoundGet', 'SoundGetWaveVolume', 'SoundPlay', 'SoundSet', 'SoundSetWaveVolume', - 'SplashImage', 'SplashTextOn', 'SplashTextOff', 'SplitPath', 'Sqrt', 'StatusBarGetText', - 'StatusBarWait', 'StrGet', 'StringCaseSense', 'StringGetPos', 'StringLeft', 'StringLen', - 'StringLower', 'StringMid', 'StringReplace', 'StringRight', 'StringSplit', 'StringTrimLeft', - 'StringTrimRight', 'StringUpper', 'StrLen', 'StrPut', 'StrReplace', 'StrSplit', 'SubStr', - 'Suspend', 'Switch', 'SysGet', 'Tan', 'Thread', 'ToolTip', 'Transform', 'TrayTip', 'Trim', - 'LTrim', 'RTrim', 'TV_Add', 'TV_Delete', 'TV_Get', 'TV_GetChild', 'TV_GetCount', 'TV_GetNext', - 'TV_GetParent', 'TV_GetPrev', 'TV_GetSelection', 'TV_GetText', 'TV_Modify', 'TV_SetImageList', - 'UrlDownloadToFile', 'VarSetCapacity', 'WinActivate', 'WinActivateBottom', 'WinActive', - 'WinClose', 'WinExist', 'WinGetActiveStats', 'WinGetActiveTitle', 'WinGetClass', 'WinGet', - 'WinGetPos', 'WinGetText', 'WinGetTitle', 'WinHide', 'WinKill', 'WinMaximize', - 'WinMenuSelectItem', 'WinMinimize', 'WinMinimizeAll', 'WinMinimizeAllUndo', 'WinMove', - 'WinRestore', 'WinSet', 'WinSetTitle', 'WinShow', 'WinWait', 'WinWaitActive', 'WinWaitNotActive', - 'WinWaitClose' -}) - -lex:set_word_list(lexer.PREPROCESSOR, { - 'ClipboardTimeout', 'CommentFlag', 'Delimiter', 'DerefChar', 'ErrorStdOut', 'EscapeChar', - 'HotkeyInterval', 'HotkeyModifierTimeout', 'Hotstring', 'If', 'IfTimeout', 'IfWinActive', - 'IfWinNotActive', 'IfWinExist', 'IfWinNotExist', 'Include', 'IncludeAgain', 'InputLevel', - 'InstallKeybdHook', 'InstallMouseHook', 'KeyHistory', 'LTrim', 'MaxHotkeysPerInterval', 'MaxMem', - 'MaxThreads', 'MaxThreadsBuffer', 'MaxThreadsPerHotkey', 'MenuMaskKey', 'NoEnv', 'NoTrayIcon', - 'Persistent', 'Requires', 'SingleInstance', 'UseHook', 'Warn', 'WinActivateForce' -}) - -lex:set_word_list(lexer.CONSTANT_BUILTIN, { - 'LButton', 'RButton', 'MButton', 'Advanced Buttons', 'XButton1', 'XButton2', 'Wheel', 'WheelDown', - 'WheelUp', 'WheelLeft', 'WheelRight', 'CapsLock', 'Space', 'Tab', 'Enter', 'Return', 'Esc', - 'Escape', 'BS', 'Backspace', 'ScrollLock', 'Del', 'Delete', 'Ins', 'Insert', 'Home', 'End', - 'PgUp', 'PgDn', 'Up', 'Down', 'Left', 'Right', 'Numpad0', 'NumpadIns', 'Numpad1', 'NumpadEnd', - 'Numpad2', 'NumpadDown', 'Numpad3', 'NumpadPgDn', 'Numpad4', 'NumpadLeft', 'Numpad5', - 'NumpadClear', 'Numpad6', 'NumpadRight', 'Numpad7', 'NumpadHome', 'Numpad8', 'NumpadUp', - 'Numpad9', 'NumpadPgUp', 'NumpadDot', 'NumpadDel', 'NumLock', 'NumpadDiv', 'NumpadMult', - 'NumpadAdd', 'NumpadSub', 'NumpadEnter', 'LWin', 'RWin', 'Ctrl', 'Control', 'Alt', 'Shift', - 'LCtrl', 'LControl', 'RCtrl', 'RControl', 'LShift', 'RShift', 'LAlt', 'RAlt', 'Browser_Back', - 'Browser_Forward', 'Browser_Refresh', 'Browser_Stop', 'Browser_Search', 'Browser_Favorites', - 'Browser_Home', 'Volume_Mute', 'Volume_Down', 'Volume_Up', 'Media_Next', 'Media_Prev', - 'Media_Stop', 'Media_Play_Pause', 'Launch_Mail', 'Launch_Media', 'Launch_App1', 'Launch_App2', - 'AppsKey', 'PrintScreen', 'CtrlBreak', 'Pause', 'Break', 'Help' -}) - -lex:set_word_list(lexer.VARIABLE_BUILTIN, { - 'Space', 'Tab', 'Args', 'WorkingDir', 'InitialWorkingDir', 'ScriptDir', 'ScriptName', - 'ScriptFullPath', 'ScriptHwnd', 'LineNumber', 'LineFile', 'ThisFunc', 'ThisLabel', 'AhkVersion', - 'AhkPath', 'IsUnicode', 'IsCompiled', 'ExitReason', 'Year', 'MM', 'DD', 'MMMM', 'MMM', 'DDDD', - 'DDD', 'WDay', 'YDay', 'YWeek', 'Hour', 'Min', 'Sec', 'MSec', 'Now', 'NowUTC', 'TickCount', - 'IsSuspended', 'IsPaused', 'IsCritical', 'BatchLines', 'ListLines', 'TitleMatchMode', - 'TitleMatchModeSpeed', 'DetectHiddenWindows', 'DetectHiddenText', 'AutoTrim', 'StringCaseSense', - 'FileEncoding', 'FormatInteger', 'FormatFloat', 'SendMode', 'SendLevel', 'StoreCapsLockMode', - 'KeyDelay', 'KeyDuration', 'KeyDelayPlay', 'KeyDurationPlay', 'WinDelay', 'ControlDelay', - 'MouseDelay', 'MouseDelayPlay', 'DefaultMouseSpeed', 'CoordModeToolTip', 'CoordModePixel', - 'CoordModeMouse', 'CoordModeCaret', 'CoordModeMenu', 'RegView', 'IconHidden', 'IconTip', - 'IconFile', 'IconNumber', 'TimeIdle', 'TimeIdlePhysical', 'TimeIdleKeyboard', 'TimeIdleMouse', - 'DefaultGui', 'DefaultListView', 'DefaultTreeView', 'Gui', 'GuiControl', 'GuiWidth', 'GuiHeight', - 'GuiX', 'GuiY', 'GuiEvent', 'GuiControlEvent', 'EventInfo', 'ThisMenuItem', 'ThisMenu', - 'ThisMenuItemPos', 'ThisHotkey', 'PriorHotkey', 'PriorKey', 'TimeSinceThisHotkey', - 'TimeSincePriorHotkey', 'EndChar', 'ComSpec', 'Temp', 'OSType', 'OSVersion', 'Is64bitOS', - 'PtrSize', 'Language', 'ComputerName', 'UserName', 'WinDir', 'ProgramFiles', 'AppData', - 'AppDataCommon', 'Desktop', 'DesktopCommon', 'StartMenu', 'StartMenuCommon', 'Programs', - 'ProgramsCommon', 'Startup', 'StartupCommon', 'MyDocuments', 'IsAdmin', 'ScreenWidth', - 'ScreenHeight', 'ScreenDPI', 'Cursor', 'CaretX', 'CaretY', 'Clipboard', 'LastError', 'Index', - 'LoopFileName', 'LoopRegName', 'LoopReadLine', 'LoopField' -}) - -lexer.property['scintillua.comment'] = ';' - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '{', '}') -lex:add_fold_point(lexer.COMMENT, '/*', '*/') - -return lex diff --git a/share/vis/lexers/autoit.lua b/share/vis/lexers/autoit.lua @@ -1,137 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- AutoIt LPeg lexer. --- Contributed by Jeff Stone. - -local lexer = lexer -local P, S, B = lpeg.P, lpeg.S, lpeg.B - -local lex = lexer.new(...) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD, true))) - --- Functions. -local builtin_func = -B('.') * - lex:tag(lexer.FUNCTION_BUILTIN, lex:word_match(lexer.FUNCTION_BUILTIN, true)) -local func = lex:tag(lexer.FUNCTION, lexer.word) -local method = B('.') * lex:tag(lexer.FUNCTION_METHOD, lexer.word) -lex:add_rule('function', (builtin_func + method + func) * #(lexer.space^0 * '(')) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) - --- Comments. -local line_comment = lexer.to_eol(';') -local block_comment = lexer.range('#comments-start', '#comments-end') + lexer.range('#cs', '#ce') -lex:add_rule('comment', lex:tag(lexer.COMMENT, line_comment + block_comment)) - --- Preprocessor. -lex:add_rule('preprocessor', - lex:tag(lexer.PREPROCESSOR, '#' * lex:word_match(lexer.PREPROCESSOR, true))) - --- Strings. -local dq_str = lexer.range('"', true, false) -local sq_str = lexer.range("'", true, false) -local inc = lexer.range('<', '>', true, false, true) -lex:add_rule('string', lex:tag(lexer.STRING, dq_str + sq_str + inc)) - --- Macros. -lex:add_rule('macro', lex:tag(lexer.CONSTANT_BUILTIN, '@' * (lexer.alnum + '_')^1)) - --- Variables. -lex:add_rule('variable', lex:tag(lexer.VARIABLE, '$' * (lexer.alnum + '_')^1)) - --- Numbers. -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number)) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('+-^*/&<>=?:()[]'))) - -lex:set_word_list(lexer.KEYWORD, { - 'False', 'True', 'And', 'Or', 'Not', 'ContinueCase', 'ContinueLoop', 'Default', 'Dim', 'Global', - 'Local', 'Const', 'Do', 'Until', 'Enum', 'Exit', 'ExitLoop', 'For', 'To', 'Step', 'Next', 'In', - 'Func', 'Return', 'EndFunc', 'If', 'Then', 'ElseIf', 'Else', 'EndIf', 'Null', 'ReDim', 'Select', - 'Case', 'EndSelect', 'Static', 'Switch', 'EndSwitch', 'Volatile', 'While', 'WEnd', 'With', - 'EndWith' -}) - -lex:set_word_list(lexer.FUNCTION_BUILTIN, { - 'Abs', 'ACos', 'AdlibRegister', 'AdlibUnRegister', 'Asc', 'AscW', 'ASin', 'Assign', 'ATan', - 'AutoItSetOption', 'AutoItWinGetTitle', 'AutoItWinSetTitle', 'Beep', 'Binary', 'BinaryLen', - 'BinaryMid', 'BinaryToString', 'BitAND', 'BitNOT', 'BitOR', 'BitRotate', 'BitShift', 'BitXOR', - 'BlockInput', 'Break', 'Call', 'CDTray', 'Ceiling', 'Chr', 'ChrW', 'ClipGet', 'ClipPut', - 'ConsoleRead', 'ConsoleWrite', 'ConsoleWriteError', 'ControlClick', 'ControlCommand', - 'ControlDisable', 'ControlEnable', 'ControlFocus', 'ControlGetFocus', 'ControlGetHandle', - 'ControlGetPos', 'ControlGetText', 'ControlHide', 'ControlListView', 'ControlMove', 'ControlSend', - 'ControlSetText', 'ControlShow', 'ControlTreeView', 'Cos', 'Dec', 'DirCopy', 'DirCreate', - 'DirGetSize', 'DirMove', 'DirRemove', 'DllCall', 'DllCallAddress', 'DllCallbackFree', - 'DllCallbackGetPtr', 'DllCallbackRegister', 'DllClose', 'DllOpen', 'DllStructCreate', - 'DllStructGetData', 'DllStructGetPtr', 'DllStructGetSize', 'DllStructSetData', 'DriveGetDrive', - 'DriveGetFileSystem', 'DriveGetLabel', 'DriveGetSerial', 'DriveGetType', 'DriveMapAdd', - 'DriveMapDel', 'DriveMapGet', 'DriveSetLabel', 'DriveSpaceFree', 'DriveSpaceTotal', 'DriveStatus', - 'EnvGet', 'EnvSet', 'EnvUpdate', 'Eval', 'Execute', 'Exp', 'FileChangeDir', 'FileClose', - 'FileCopy', 'FileCreateNTFSLink', 'FileCreateShortcut', 'FileDelete', 'FileExists', - 'FileFindFirstFile', 'FileFindNextFile', 'FileFlush', 'FileGetAttrib', 'FileGetEncoding', - 'FileGetLongName', 'FileGetPos', 'FileGetShortcut', 'FileGetShortName', 'FileGetSize', - 'FileGetTime', 'FileGetVersion', 'FileInstall', 'FileMove', 'FileOpen', 'FileOpenDialog', - 'FileRead', 'FileReadLine', 'FileReadToArray', 'FileRecycle', 'FileRecycleEmpty', - 'FileSaveDialog', 'FileSelectFolder', 'FileSetAttrib', 'FileSetEnd', 'FileSetPos', 'FileSetTime', - 'FileWrite', 'FileWriteLine', 'Floor', 'FtpSetProxy', 'FuncName', 'GUICreate', 'GUICtrlCreateAvi', - 'GUICtrlCreateButton', 'GUICtrlCreateCheckbox', 'GUICtrlCreateCombo', 'GUICtrlCreateContextMenu', - 'GUICtrlCreateDate', 'GUICtrlCreateDummy', 'GUICtrlCreateEdit', 'GUICtrlCreateGraphic', - 'GUICtrlCreateGroup', 'GUICtrlCreateIcon', 'GUICtrlCreateInput', 'GUICtrlCreateLabel', - 'GUICtrlCreateList', 'GUICtrlCreateListView', 'GUICtrlCreateListViewItem', 'GUICtrlCreateMenu', - 'GUICtrlCreateMenuItem', 'GUICtrlCreateMonthCal', 'GUICtrlCreateObj', 'GUICtrlCreatePic', - 'GUICtrlCreateProgress', 'GUICtrlCreateRadio', 'GUICtrlCreateSlider', 'GUICtrlCreateTab', - 'GUICtrlCreateTabItem', 'GUICtrlCreateTreeView', 'GUICtrlCreateTreeViewItem', - 'GUICtrlCreateUpdown', 'GUICtrlDelete', 'GUICtrlGetHandle', 'GUICtrlGetState', 'GUICtrlRead', - 'GUICtrlRecvMsg', 'GUICtrlRegisterListViewSort', 'GUICtrlSendMsg', 'GUICtrlSendToDummy', - 'GUICtrlSetBkColor', 'GUICtrlSetColor', 'GUICtrlSetCursor', 'GUICtrlSetData', - 'GUICtrlSetDefBkColor', 'GUICtrlSetDefColor', 'GUICtrlSetFont', 'GUICtrlSetGraphic', - 'GUICtrlSetImage', 'GUICtrlSetLimit', 'GUICtrlSetOnEvent', 'GUICtrlSetPos', 'GUICtrlSetResizing', - 'GUICtrlSetState', 'GUICtrlSetStyle', 'GUICtrlSetTip', 'GUIDelete', 'GUIGetCursorInfo', - 'GUIGetMsg', 'GUIGetStyle', 'GUIRegisterMsg', 'GUISetAccelerators', 'GUISetBkColor', - 'GUISetCoord', 'GUISetCursor', 'GUISetFont', 'GUISetHelp', 'GUISetIcon', 'GUISetOnEvent', - 'GUISetState', 'GUISetStyle', 'GUIStartGroup', 'GUISwitch', 'Hex', 'HotKeySet', 'HttpSetProxy', - 'HttpSetUserAgent', 'HWnd', 'InetClose', 'InetGet', 'InetGetInfo', 'InetGetSize', 'InetRead', - 'IniDelete', 'IniRead', 'IniReadSection', 'IniReadSectionNames', 'IniRenameSection', 'IniWrite', - 'IniWriteSection', 'InputBox', 'Int', 'IsAdmin', 'IsArray', 'IsBinary', 'IsBool', 'IsDeclared', - 'IsDllStruct', 'IsFloat', 'IsFunc', 'IsHWnd', 'IsInt', 'IsKeyword', 'IsNumber', 'IsObj', 'IsPtr', - 'IsString', 'Log', 'MemGetStats', 'Mod', 'MouseClick', 'MouseClickDrag', 'MouseDown', - 'MouseGetCursor', 'MouseGetPos', 'MouseMove', 'MouseUp', 'MouseWheel', 'MsgBox', 'Number', - 'ObjCreate', 'ObjCreateInterface', 'ObjEvent', 'ObjGet', 'ObjName', 'OnAutoItExitRegister', - 'OnAutoItExitUnRegister', 'Ping', 'PixelChecksum', 'PixelGetColor', 'PixelSearch', 'ProcessClose', - 'ProcessExists', 'ProcessGetStats', 'ProcessList', 'ProcessSetPriority', 'ProcessWait', - 'ProcessWaitClose', 'ProgressOff', 'ProgressOn', 'ProgressSet', 'Ptr', 'Random', 'RegDelete', - 'RegEnumKey', 'RegEnumVal', 'RegRead', 'RegWrite', 'Round', 'Run', 'RunAs', 'RunAsWait', - 'RunWait', 'Send', 'SendKeepActive', 'SetError', 'SetExtended', 'ShellExecute', - 'ShellExecuteWait', 'Shutdown', 'Sin', 'Sleep', 'SoundPlay', 'SoundSetWaveVolume', - 'SplashImageOn', 'SplashOff', 'SplashTextOn', 'Sqrt', 'SRandom', 'StatusbarGetText', 'StderrRead', - 'StdinWrite', 'StdioClose', 'StdoutRead', 'String', 'StringAddCR', 'StringCompare', - 'StringFormat', 'StringFromASCIIArray', 'StringInStr', 'StringIsAlNum', 'StringIsAlpha', - 'StringIsASCII', 'StringIsDigit', 'StringIsFloat', 'StringIsInt', 'StringIsLower', - 'StringIsSpace', 'StringIsUpper', 'StringIsXDigit', 'StringLeft', 'StringLen', 'StringLower', - 'StringMid', 'StringRegExp', 'StringRegExpReplace', 'StringReplace', 'StringReverse', - 'StringRight', 'StringSplit', 'StringStripCR', 'StringStripWS', 'StringToASCIIArray', - 'StringToBinary', 'StringTrimLeft', 'StringTrimRight', 'StringUpper', 'Tan', 'TCPAccept', - 'TCPCloseSocket', 'TCPConnect', 'TCPListen', 'TCPNameToIP', 'TCPRecv', 'TCPSend', 'TCPShutdown', - 'TCPStartup', 'TimerDiff', 'TimerInit', 'ToolTip', 'TrayCreateItem', 'TrayCreateMenu', - 'TrayGetMsg', 'TrayItemDelete', 'TrayItemGetHandle', 'TrayItemGetState', 'TrayItemGetText', - 'TrayItemSetOnEvent', 'TrayItemSetState', 'TrayItemSetText', 'TraySetClick', 'TraySetIcon', - 'TraySetOnEvent', 'TraySetPauseIcon', 'TraySetState', 'TraySetToolTip', 'TrayTip', 'UBound', - 'UDPBind', 'UDPCloseSocket', 'UDPOpen', 'UDPRecv', 'UDPSend', 'UDPShutdown', 'UDPStartup', - 'VarGetType', 'WinActivate', 'WinActive', 'WinClose', 'WinExists', 'WinFlash', 'WinGetCaretPos', - 'WinGetClassList', 'WinGetClientSize', 'WinGetHandle', 'WinGetPos', 'WinGetProcess', - 'WinGetState', 'WinGetText', 'WinGetTitle', 'WinKill', 'WinList', 'WinMenuSelectItem', - 'WinMinimizeAll', 'WinMinimizeAllUndo', 'WinMove', 'WinSetOnTop', 'WinSetState', 'WinSetTitle', - 'WinSetTrans', 'WinWait', 'WinWaitActive', 'WinWaitClose', 'WinWaitNotActive' -}) - -lex:set_word_list(lexer.PREPROCESSOR, { - 'include-once', 'include', 'pragma', 'forceref', 'RequireAdmin', 'NoTrayIcon', - 'OnAutoItStartRegister' -}) - -lexer.property['scintillua.comment'] = ';' - -return lex diff --git a/share/vis/lexers/awk.lua b/share/vis/lexers/awk.lua @@ -1,295 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- AWK LPeg lexer. --- Modified by Wolfgang Seeberg 2012, 2013. - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(...) - -local LEFTBRACKET = '[' -local RIGHTBRACKET = ']' -local SLASH = '/' -local BACKSLASH = '\\' -local CARET = '^' -local CR = '\r' -local LF = '\n' -local CRLF = CR .. LF -local DQUOTE = '"' -local DELIMITER_MATCHES = {['('] = ')', ['['] = ']'} -local COMPANION = {['('] = '[', ['['] = '('} -local CC = { - alnum = 1, alpha = 1, blank = 1, cntrl = 1, digit = 1, graph = 1, lower = 1, print = 1, punct = 1, - space = 1, upper = 1, xdigit = 1 -} -local LastRegexEnd = 0 -local BackslashAtCommentEnd = 0 -local KW_BEFORE_RX = { - case = 1, ['do'] = 1, ['else'] = 1, exit = 1, print = 1, printf = 1, ['return'] = 1 -} - -local function findKeyword(input, e) - local i = e - while i > 0 and input:find("^[%l]", i) do i = i - 1 end - local w = input:sub(i + 1, e) - if i == 0 then - return KW_BEFORE_RX[w] == 1 - elseif input:find("^[%u%d_]", i) then - return false - else - return KW_BEFORE_RX[w] == 1 - end -end - -local function isRegex(input, i) - while i >= 1 and input:find('^[ \t]', i) do i = i - 1 end - if i < 1 then return true end - if input:find("^[-!%%&(*+,:;<=>?[^{|}~\f]", i) or findKeyword(input, i) then - return true - elseif input:sub(i, i) == SLASH then - return i ~= LastRegexEnd -- deals with /xx/ / /yy/. - elseif input:find('^[]%w)."]', i) then - return false - elseif input:sub(i, i) == LF then - if i == 1 then return true end - i = i - 1 - if input:sub(i, i) == CR then - if i == 1 then return true end - i = i - 1 - end - elseif input:sub(i, i) == CR then - if i == 1 then return true end - i = i - 1 - else - return false - end - if input:sub(i, i) == BACKSLASH and i ~= BackslashAtCommentEnd then - return isRegex(input, i - 1) - else - return true - end -end - -local function eatCharacterClass(input, s, e) - local i = s - while i <= e do - if input:find('^[\r\n]', i) then - return false - elseif input:sub(i, i + 1) == ':]' then - local str = input:sub(s, i - 1) - return CC[str] == 1 and i + 1 - end - i = i + 1 - end - return false -end - -local function eatBrackets(input, i, e) - if input:sub(i, i) == CARET then i = i + 1 end - if input:sub(i, i) == RIGHTBRACKET then i = i + 1 end - while i <= e do - if input:find('^[\r\n]', i) then - return false - elseif input:sub(i, i) == RIGHTBRACKET then - return i - elseif input:sub(i, i + 1) == '[:' then - i = eatCharacterClass(input, i + 2, e) - if not i then return false end - elseif input:sub(i, i) == BACKSLASH then - i = i + 1 - if input:sub(i, i + 1) == CRLF then i = i + 1 end - end - i = i + 1 - end - return false -end - -local function eatRegex(input, i) - local e = #input - while i <= e do - if input:find('^[\r\n]', i) then - return false - elseif input:sub(i, i) == SLASH then - LastRegexEnd = i - return i - elseif input:sub(i, i) == LEFTBRACKET then - i = eatBrackets(input, i + 1, e) - if not i then return false end - elseif input:sub(i, i) == BACKSLASH then - i = i + 1 - if input:sub(i, i + 1) == CRLF then i = i + 1 end - end - i = i + 1 - end - return false -end - -local ScanRegexResult -local function scanGawkRegex(input, index) - if isRegex(input, index - 2) then - local i = eatRegex(input, index) - if not i then - ScanRegexResult = false - return false - end - local rx = input:sub(index - 1, i) - for bs in rx:gmatch("[^\\](\\+)[BSsWwy<>`']") do - -- /\S/ is special, but /\\S/ is not. - if #bs % 2 == 1 then return i + 1 end - end - ScanRegexResult = i + 1 - else - ScanRegexResult = false - end - return false -end --- Is only called immediately after scanGawkRegex(). -local function scanRegex() return ScanRegexResult end - -local function scanString(input, index) - local i = index - local e = #input - while i <= e do - if input:find('^[\r\n]', i) then - return false - elseif input:sub(i, i) == DQUOTE then - return i + 1 - elseif input:sub(i, i) == BACKSLASH then - i = i + 1 - -- lexer.range() doesn't handle CRLF. - if input:sub(i, i + 1) == CRLF then i = i + 1 end - end - i = i + 1 - end - return false -end - --- purpose: prevent isRegex() from entering a comment line that ends with a backslash. -local function scanComment(input, index) - local _, i = input:find('[^\r\n]*', index) - if input:sub(i, i) == BACKSLASH then BackslashAtCommentEnd = i end - return i + 1 -end - -local function scanFieldDelimiters(input, index) - local i = index - local e = #input - local left = input:sub(i - 1, i - 1) - local count = 1 - local right = DELIMITER_MATCHES[left] - local left2 = COMPANION[left] - local count2 = 0 - local right2 = DELIMITER_MATCHES[left2] - while i <= e do - if input:find('^[#\r\n]', i) then - return false - elseif input:sub(i, i) == right then - count = count - 1 - if count == 0 then return count2 == 0 and i + 1 end - elseif input:sub(i, i) == left then - count = count + 1 - elseif input:sub(i, i) == right2 then - count2 = count2 - 1 - if count2 < 0 then return false end - elseif input:sub(i, i) == left2 then - count2 = count2 + 1 - elseif input:sub(i, i) == DQUOTE then - i = scanString(input, i + 1) - if not i then return false end - i = i - 1 - elseif input:sub(i, i) == SLASH then - if isRegex(input, i - 1) then - i = eatRegex(input, i + 1) - if not i then return false end - end - elseif input:sub(i, i) == BACKSLASH then - if input:sub(i + 1, i + 2) == CRLF then - i = i + 2 - elseif input:find('^[\r\n]', i + 1) then - i = i + 1 - end - end - i = i + 1 - end - return false -end - --- Comments. -lex:add_rule('comment', lex:tag(lexer.COMMENT, '#' * P(scanComment))) - --- Strings. -lex:add_rule('string', lex:tag(lexer.STRING, DQUOTE * P(scanString))) - --- No leading sign because it might be binary. -local float = ((lexer.digit^1 * ('.' * lexer.digit^0)^-1) + ('.' * lexer.digit^1)) * - (S('eE') * S('+-')^-1 * lexer.digit^1)^-1 - --- Fields. E.g. $1, $a, $(x), $a(x), $a[x], $"1", $$a, etc. -lex:add_rule('field', lex:tag(lexer.VARIABLE .. '.field', '$' * S('$+-')^0 * - (float + lexer.word^0 * '(' * P(scanFieldDelimiters) + lexer.word^1 * - ('[' * P(scanFieldDelimiters))^-1 + '"' * P(scanString) + '/' * P(eatRegex) * '/'))) - --- Regular expressions. --- Slash delimited regular expressions are preceded by most operators or the keywords 'print' --- and 'case', possibly on a preceding line. They can contain unescaped slashes and brackets --- in brackets. Some escape sequences like '\S', '\s' have special meanings with Gawk. Tokens --- that contain them are displayed differently. -lex:add_rule('gawkRegex', lex:tag(lexer.REGEX .. '.gawk', SLASH * P(scanGawkRegex))) -lex:add_rule('regex', lex:tag(lexer.REGEX, SLASH * P(scanRegex))) - --- Operators. -lex:add_rule('gawkOperator', lex:tag(lexer.OPERATOR .. '.gawk', P("|&") + "@" + "**=" + "**")) -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('!%&()*+,-/:;<=>?[\\]^{|}~'))) - --- Numbers. -lex:add_rule('gawkNumber', lex:tag(lexer.NUMBER .. '.gawk', lexer.hex_num + lexer.oct_num)) -lex:add_rule('number', lex:tag(lexer.NUMBER, float)) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD))) - -lex:add_rule('builtInVariable', - lex:tag(lexer.VARIABLE_BUILTIN, lex:word_match(lexer.VARIABLE_BUILTIN))) - -lex:add_rule('gawkBuiltInVariable', lex:tag(lexer.VARIABLE_BUILTIN .. '.gawk', - lex:word_match(lexer.VARIABLE_BUILTIN .. '.gawk'))) - --- Functions. -local builtin_func = lex:tag(lexer.FUNCTION_BUILTIN, lex:word_match(lexer.FUNCTION_BUILTIN)) -local func = lex:tag(lexer.FUNCTION, lexer.word) -lex:add_rule('function', (builtin_func + func) * #P('(')) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '{', '}') - --- Word lists. -lex:set_word_list(lexer.KEYWORD, { - 'BEGIN', 'END', 'break', 'continue', 'do', 'else', 'for', 'if', 'in', 'while', -- - 'delete', -- array - 'print', 'printf', 'getline', 'close', 'fflush', 'system', -- I/O - 'function', 'return', -- functions - 'next', 'nextfile', 'exit' -- program execution -}) - -lex:set_word_list(lexer.FUNCTION_BUILTIN, { - 'gsub', 'index', 'length', 'match', 'split', 'sprintf', 'sub', 'substr', 'tolower', 'toupper', -- string - 'mktime', 'strftime', 'systime', -- time - 'atan2', 'cos', 'exp', 'int', 'log', 'rand', 'sin', 'sqrt', 'srand' -- arithmetic -}) - -lex:set_word_list(lexer.VARIABLE_BUILTIN, { - 'ARGC', 'ARGV', 'CONVFMT', 'ENVIRON', 'FILENAME', 'FNR', 'FS', 'NF', 'NR', 'OFMT', 'OFS', 'ORS', - 'RLENGTH', 'RS', 'RSTART', 'SUBSEP' -}) - -lex:set_word_list(lexer.VARIABLE_BUILTIN .. '.gawk', { - 'ARGIND', 'BINMODE', 'ERRNO', 'FIELDWIDTHS', 'FPAT', 'FUNCTAB', 'IGNORECASE', 'LINT', 'PREC', - 'PROCINFO', 'ROUNDMODE', 'RT', 'SYMTAB', 'TEXTDOMAIN' -}) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/bash.lua b/share/vis/lexers/bash.lua @@ -1,138 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Shell LPeg lexer. - -local lexer = lexer -local P, S, B = lpeg.P, lpeg.S, lpeg.B - -local lex = lexer.new(...) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD))) - --- Builtins. -lex:add_rule('builtin', - lex:tag(lexer.FUNCTION_BUILTIN, lex:word_match(lexer.FUNCTION_BUILTIN)) * -P('=')) - --- Variable assignment. -local assign = lex:tag(lexer.VARIABLE, lexer.word) * lex:tag(lexer.OPERATOR, '=') -lex:add_rule('assign', lexer.starts_line(assign, true)) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local sq_str = -B('\\') * lexer.range("'", false, false) -local dq_str = -B('\\') * lexer.range('"') -local heredoc = '<<' * P(function(input, index) - local _, e, minus, _, delimiter = input:find('^(%-?)%s*(["\']?)([%w_]+)%2[^\n]*[\n\r\f;]+', index) - if not delimiter then return nil end - -- If the starting delimiter of a here-doc begins with "-", then spaces are allowed to come - -- before the closing delimiter. - _, e = - input:find((minus == '' and '[\n\r\f]+' or '[\n\r\f]+[ \t]*') .. delimiter .. '%f[^%w_]', e) - return e and e + 1 or #input + 1 -end) -local ex_str = -B('\\') * '`' -lex:add_rule('string', - lex:tag(lexer.STRING, sq_str + dq_str + heredoc) + lex:tag(lexer.EMBEDDED, ex_str)) - --- Comments. -lex:add_rule('comment', lex:tag(lexer.COMMENT, -B('\\') * lexer.to_eol('#'))) - --- Numbers. -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number)) - --- Variables. -local builtin_var = lex:tag(lexer.OPERATOR, '$' * P('{')^-1) * lex:tag(lexer.VARIABLE_BUILTIN, - lex:word_match(lexer.VARIABLE_BUILTIN) + S('!#?*@$-') * -lexer.alnum + lexer.digit^1) -local var_ref = lex:tag(lexer.OPERATOR, '$' * ('{' * S('!#')^-1)^-1) * - lex:tag(lexer.VARIABLE, lexer.word) -local patt_expansion = lex:tag(lexer.DEFAULT, '/#' + '#' * P('#')^-1) -lex:add_rule('variable', builtin_var + var_ref * patt_expansion^-1) - --- Operators. -local op = S('!<>&|;$()[]{}') + lpeg.B(lexer.space) * S('.:') * #lexer.space - -local function in_expr(constructs) - return P(function(input, index) - local line = input:sub(1, index):match('[^\r\n]*$') - for k, v in pairs(constructs) do - local s = line:find(k, 1, true) - if not s then goto continue end - local e = line:find(v, 1, true) - if not e or e < s then return true end - ::continue:: - end - return nil - end) -end - -local file_op = '-' * (S('abcdefghkprstuwxGLNOS') + 'ef' + 'nt' + 'ot') -local shell_op = '-o' -local var_op = '-' * S('vR') -local string_op = '-' * S('zn') + S('!=')^-1 * '=' + S('<>') -local num_op = '-' * lexer.word_match('eq ne lt le gt ge') -local in_cond_expr = in_expr{['[[ '] = ' ]]', ['[ '] = ' ]'} -local conditional_op = (num_op + file_op + shell_op + var_op + string_op) * #lexer.space * - in_cond_expr - -local in_arith_expr = in_expr{['(('] = '))'} -local arith_op = (S('+!~*/%<>=&^|?:,') + '--' + '-' * #S(' \t')) * in_arith_expr - --- TODO: performance is terrible on large files. --- lex:add_rule('operator', lex:tag(lexer.OPERATOR, op + conditional_op + arith_op)) -lex:add_rule('operator', lex:tag(lexer.OPERATOR, op)) - --- Flags/options. -lex:add_rule('flag', lex:tag(lexer.DEFAULT, '-' * P('-')^-1 * lexer.word * ('-' * lexer.word)^0)) - --- Fold points. -lex:add_fold_point(lexer.KEYWORD, 'if', 'fi') -lex:add_fold_point(lexer.KEYWORD, 'case', 'esac') -lex:add_fold_point(lexer.KEYWORD, 'do', 'done') -lex:add_fold_point(lexer.OPERATOR, '{', '}') - --- Word lists. -lex:set_word_list(lexer.KEYWORD, { - 'if', 'then', 'elif', 'else', 'fi', 'time', 'for', 'in', 'until', 'while', 'do', 'done', 'case', - 'esac', 'coproc', 'select', 'function' -}) - -lex:set_word_list(lexer.FUNCTION_BUILTIN, { - -- Shell built-ins. - 'break', 'cd', 'continue', 'eval', 'exec', 'exit', 'export', 'getopts', 'hash', 'pwd', 'readonly', - 'return', 'shift', 'test', 'times', 'trap', 'umask', 'unset', - -- Bash built-ins. - 'alias', 'bind', 'builtin', 'caller', 'command', 'declare', 'echo', 'enable', 'help', 'let', - 'local', 'logout', 'mapfile', 'printf', 'read', 'readarray', 'source', 'type', 'typeset', - 'ulimit', 'unalias', -- - 'set', 'shopt', -- shell behavior - 'dirs', 'popd', 'pushd', -- directory stack - 'bg', 'fg', 'jobs', 'kill', 'wait', 'disown', 'suspend', -- job control - 'fc', 'history' -- history -}) - -lex:set_word_list(lexer.VARIABLE_BUILTIN, { - -- Shell built-ins. - 'CDPATH', 'HOME', 'IFS', 'MAIL', 'MAILPATH', 'OPTARG', 'OPTIND', 'PATH', 'PS1', 'PS2', - -- Bash built-ins. - 'BASH', 'BASHOPTS', 'BASHPID', 'BASH_ALIASES', 'BASH_ARGC', 'BASH_ARGV', 'BASH_ARGV0', - 'BASH_CMDS', 'BASH_COMMAND', 'BASH_COMPAT', 'BASH_ENV', 'BASH_EXECUTION_STRING', 'BASH_LINENO', - 'BASH_LOADABLES_PATH', 'BASH_REMATCH', 'BASH_SOURCE', 'BASH_SUBSHELL', 'BASH_VERSINFO', - 'BASH_VERSION', 'BASH_XTRACEFD', 'CHILD_MAX', 'COLUMNS', 'COMP_CWORD', 'COMP_LINE', 'COMP_POINT', - 'COMP_TYPE', 'COMP_KEY', 'COMP_WORDBREAKS', 'COMP_WORDS', 'COMP_REPLY', 'COPROC', 'DIRSTACK', - 'EMACS', 'ENV', 'EPOCHREALTIME', 'EPOCHSECONDS', 'EUID', 'EXECIGNORE', 'FCEDIT', 'FIGNORE', - 'FUNCNAME', 'FUNCNEST', 'GLOBIGNORE', 'GROUPS', 'histchars', 'HISTCMD', 'HISTCONTROL', 'HISTFILE', - 'HISTFILESIZE', 'HISTIGNORE', 'HISTSIZE', 'HISTTIMEFORMAT', 'HOSTFILE', 'HOSTNAME', 'HOSTTYPE', - 'IGNOREEOF', 'INPUTRC', 'INSIDE_EMACS', 'LANG', 'LC_ALL', 'LC_COLLATE', 'LC_CTYPE', 'LC_MESSAGES', - 'LC_NUMERIC', 'LC_TIME', 'LINENO', 'LINES', 'MACHTYPE', 'MAILCHECK', 'MAPFILE', 'OLDPWD', - 'OPTERR', 'OSTYPE', 'PIPESTATUS', 'POSIXLY_CORRECT', 'PPID', 'PROMPT_COMMAND', 'PROMPT_DIRTRIM', - 'PSO', 'PS3', 'PS4', 'PWD', 'RANDOM', 'READLINE_LINE', 'READLINE_MARK', 'READLINE_POINT', 'REPLY', - 'SECONDS', 'SHELL', 'SHELLOPTS', 'SHLVL', 'SRANDOM', 'TIMEFORMAT', 'TMOUT', 'TMPDIR', 'UID', - -- Job control. - 'auto_resume' -}) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/batch.lua b/share/vis/lexers/batch.lua @@ -1,55 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Batch LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('batch', {case_insensitive_fold_points = true}) - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match({ - 'cd', 'chdir', 'md', 'mkdir', 'cls', 'for', 'if', 'echo', 'echo.', 'move', 'copy', 'ren', 'del', - 'set', 'call', 'exit', 'setlocal', 'shift', 'endlocal', 'pause', 'defined', 'exist', 'errorlevel', - 'else', 'in', 'do', 'NUL', 'AUX', 'PRN', 'not', 'goto', 'pushd', 'popd' -}, true))) - --- Functions. -lex:add_rule('function', token(lexer.FUNCTION, word_match({ - 'APPEND', 'ATTRIB', 'CHKDSK', 'CHOICE', 'DEBUG', 'DEFRAG', 'DELTREE', 'DISKCOMP', 'DISKCOPY', - 'DOSKEY', 'DRVSPACE', 'EMM386', 'EXPAND', 'FASTOPEN', 'FC', 'FDISK', 'FIND', 'FORMAT', 'GRAPHICS', - 'KEYB', 'LABEL', 'LOADFIX', 'MEM', 'MODE', 'MORE', 'MOVE', 'MSCDEX', 'NLSFUNC', 'POWER', 'PRINT', - 'RD', 'REPLACE', 'RESTORE', 'SETVER', 'SHARE', 'SORT', 'SUBST', 'SYS', 'TREE', 'UNDELETE', - 'UNFORMAT', 'VSAFE', 'XCOPY' -}, true))) - --- Comments. -local rem = (P('REM') + 'rem') * #lexer.space -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol(rem + '::'))) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -lex:add_rule('string', token(lexer.STRING, lexer.range('"', true))) - --- Variables. -local arg = '%' * lexer.digit + '%~' * lexer.alnum^1 -local variable = lexer.range('%', true, false) -lex:add_rule('variable', token(lexer.VARIABLE, arg + variable)) - --- Labels. -lex:add_rule('label', token(lexer.LABEL, ':' * lexer.word)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('+|&!<>='))) - --- Fold points. -lex:add_fold_point(lexer.KEYWORD, 'setlocal', 'endlocal') - -lexer.property['scintillua.comment'] = 'REM ' - -return lex diff --git a/share/vis/lexers/bibtex.lua b/share/vis/lexers/bibtex.lua @@ -1,48 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Bibtex LPeg lexer. - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(...) - --- Fields. -lex:add_rule('field', lex:tag(lexer.VARIABLE_BUILTIN, lex:word_match(lexer.VARIABLE_BUILTIN, true))) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local dq_str = lexer.range('"') -local br_str = lexer.range('{', '}', false, false, true) -lex:add_rule('string', lex:tag(lexer.STRING, dq_str + br_str)) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S(',='))) - --- Embedded in Latex. -local latex = lexer.load('latex') - --- Embedded Bibtex. -local entry = lex:tag(lexer.PREPROCESSOR, '@' * lex:word_match('entry', true)) -local bibtex_start_rule = entry * lex:get_rule('whitespace')^0 * lex:tag(lexer.OPERATOR, '{') -local bibtex_end_rule = lex:tag(lexer.OPERATOR, '}') -latex:embed(lex, bibtex_start_rule, bibtex_end_rule) - --- Word lists. -lex:set_word_list(lexer.VARIABLE_BUILTIN, { - 'author', 'title', 'journal', 'year', 'volume', 'number', 'pages', 'month', 'note', 'key', - 'publisher', 'editor', 'series', 'address', 'edition', 'howpublished', 'booktitle', - 'organization', 'chapter', 'school', 'institution', 'type', 'isbn', 'issn', 'affiliation', - 'issue', 'keyword', 'url' -}) - -lex:set_word_list('entry', { - 'string', -- - 'book', 'article', 'booklet', 'conference', 'inbook', 'incollection', 'inproceedings', 'manual', - 'mastersthesis', 'lambda', 'misc', 'phdthesis', 'proceedings', 'techreport', 'unpublished' -}) - -lexer.property['scintillua.comment'] = '%' - -return lex diff --git a/share/vis/lexers/boo.lua b/share/vis/lexers/boo.lua @@ -1,66 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Boo LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('boo') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'and', 'break', 'cast', 'continue', 'elif', 'else', 'ensure', 'except', 'for', 'given', 'goto', - 'if', 'in', 'isa', 'is', 'not', 'or', 'otherwise', 'pass', 'raise', 'ref', 'try', 'unless', - 'when', 'while', - -- Definitions. - 'abstract', 'callable', 'class', 'constructor', 'def', 'destructor', 'do', 'enum', 'event', - 'final', 'get', 'interface', 'internal', 'of', 'override', 'partial', 'private', 'protected', - 'public', 'return', 'set', 'static', 'struct', 'transient', 'virtual', 'yield', - -- Namespaces. - 'as', 'from', 'import', 'namespace', - -- Other. - 'self', 'super', 'null', 'true', 'false' -})) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match{ - 'bool', 'byte', 'char', 'date', 'decimal', 'double', 'duck', 'float', 'int', 'long', 'object', - 'operator', 'regex', 'sbyte', 'short', 'single', 'string', 'timespan', 'uint', 'ulong', 'ushort' -})) - --- Functions. -lex:add_rule('function', token(lexer.FUNCTION, word_match{ - 'array', 'assert', 'checked', 'enumerate', '__eval__', 'filter', 'getter', 'len', 'lock', 'map', - 'matrix', 'max', 'min', 'normalArrayIndexing', 'print', 'property', 'range', 'rawArrayIndexing', - 'required', '__switch__', 'typeof', 'unchecked', 'using', 'yieldAll', 'zip' -})) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"', true) -local tq_str = lexer.range('"""') -local string = token(lexer.STRING, tq_str + sq_str + dq_str) -local regex_str = lexer.after_set('!%^&*([{-=+|:;,?<>~', lexer.range('/', true)) -local regex = token(lexer.REGEX, regex_str) -lex:add_rule('string', string + regex) - --- Comments. -local line_comment = lexer.to_eol('#', true) -local block_comment = lexer.range('/*', '*/') -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number * (S('msdhsfFlL') + 'ms')^-1)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('!%^&*()[]{}-=+/|:;.,?<>~`'))) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/caml.lua b/share/vis/lexers/caml.lua @@ -1,65 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- OCaml LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('caml') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'and', 'as', 'asr', 'begin', 'class', 'closed', 'constraint', 'do', 'done', 'downto', 'else', - 'end', 'exception', 'external', 'failwith', 'false', 'flush', 'for', 'fun', 'function', 'functor', - 'if', 'in', 'include', 'incr', 'inherit', 'land', 'let', 'load', 'los', 'lsl', 'lsr', 'lxor', - 'match', 'method', 'mod', 'module', 'mutable', 'new', 'not', 'of', 'open', 'option', 'or', - 'parser', 'private', 'raise', 'rec', 'ref', 'regexp', 'sig', 'stderr', 'stdin', 'stdout', - 'struct', 'then', 'to', 'true', 'try', 'type', 'val', 'virtual', 'when', 'while', 'with' -})) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match('bool char float int string unit'))) - --- Functions. -lex:add_rule('function', token(lexer.FUNCTION, word_match{ - 'abs', 'abs_float', 'acos', 'asin', 'atan', 'atan2', 'at_exit', 'bool_of_string', 'ceil', - 'char_of_int', 'classify_float', 'close_in', 'close_in_noerr', 'close_out', 'close_out_noerr', - 'compare', 'cos', 'cosh', 'decr', 'epsilon_float', 'exit', 'exp', 'failwith', 'float', - 'float_of_int', 'float_of_string', 'floor', 'flush', 'flush_all', 'format_of_string', 'frexp', - 'fst', 'ignore', 'in_channel_length', 'incr', 'infinity', 'input', 'input_binary_int', - 'input_byte', 'input_char', 'input_line', 'input_value', 'int_of_char', 'int_of_float', - 'int_of_string', 'invalid_arg', 'ldexp', 'log', 'log10', 'max', 'max_float', 'max_int', 'min', - 'min_float', 'min_int', 'mod', 'modf', 'mod_float', 'nan', 'open_in', 'open_in_bin', - 'open_in_gen', 'open_out', 'open_out_bin', 'open_out_gen', 'out_channel_length', 'output', - 'output_binary_int', 'output_byte', 'output_char', 'output_string', 'output_value', 'pos_in', - 'pos_out', 'pred', 'prerr_char', 'prerr_endline', 'prerr_float', 'prerr_int', 'prerr_newline', - 'prerr_string', 'print_char', 'print_endline', 'print_float', 'print_int', 'print_newline', - 'print_string', 'raise', 'read_float', 'read_int', 'read_line', 'really_input', 'seek_in', - 'seek_out', 'set_binary_mode_in', 'set_binary_mode_out', 'sin', 'sinh', 'snd', 'sqrt', 'stderr', - 'stdin', 'stdout', 'string_of_bool', 'string_of_float', 'string_of_format', 'string_of_int', - 'succ', 'tan', 'tanh', 'truncate' -})) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"', true) -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.range('(*', '*)', false, false, true))) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('=<>+-*/.,:;~!#%^&|?[](){}'))) - -lexer.property['scintillua.comment'] = '(*|*)' - -return lex diff --git a/share/vis/lexers/chuck.lua b/share/vis/lexers/chuck.lua @@ -1,102 +0,0 @@ --- Copyright 2010-2024 Martin Morawetz. See LICENSE. --- ChucK LPeg lexer. - -local lexer = lexer -local word_match = lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(...) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD))) - --- Constants. -lex:add_rule('constant', lex:tag(lexer.CONSTANT_BUILTIN, lex:word_match(lexer.CONSTANT_BUILTIN))) - --- Types. -lex:add_rule('type', lex:tag(lexer.TYPE, lex:word_match(lexer.TYPE))) - --- Classes. -lex:add_rule('class', lex:tag(lexer.CLASS, lex:word_match(lexer.CLASS))) - --- Functions. -local std = 'Std.' * lex:word_match(lexer.FUNCTION_BUILTIN) -local machine = 'Machine.' * lex:word_match(lexer.FUNCTION_BUILTIN .. '.machine') -local math = 'Math.' * lex:word_match(lexer.FUNCTION_BUILTIN .. '.math') -local func = lex:tag(lexer.FUNCTION, lexer.word) * #P('(') -lex:add_rule('function', lex:tag(lexer.FUNCTION_BUILTIN, std + machine + math) + func) - --- Constants. -lex:add_rule('constant', lex:tag(lexer.CONSTANT_BUILTIN, - 'Math.' * lex:word_match(lexer.CONSTANT_BUILTIN .. '.math'))) - --- Global ugens. -lex:add_rule('ugen', lex:tag(lexer.CONSTANT_BUILTIN .. '.ugen', word_match('dac adc blackhole'))) - --- Times. -lex:add_rule('time', lex:tag(lexer.NUMBER, word_match('samp ms second minute hour day week'))) - --- Special special value. -lex:add_rule('now', lex:tag(lexer.CONSTANT_BUILTIN .. '.now', word_match('now'))) - --- Strings. -local sq_str = P('L')^-1 * lexer.range("'", true) -local dq_str = P('L')^-1 * lexer.range('"', true) -lex:add_rule('string', lex:tag(lexer.STRING, sq_str + dq_str)) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) - --- Comments. -local line_comment = lexer.to_eol('//', true) -local block_comment = lexer.range('/*', '*/') -lex:add_rule('comment', lex:tag(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number)) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;.()[]{}@'))) - --- Word lists. -lex:set_word_list(lexer.KEYWORD, { - -- Control structures. - 'break', 'continue', 'else', 'for', 'if', 'repeat', 'return', 'switch', 'until', 'while', - -- Other chuck keywords. - 'function', 'fun', 'spork', 'const', 'new' -}) - -lex:set_word_list(lexer.CONSTANT_BUILTIN, { - 'false', 'maybe', 'me', 'null', 'NULL', 'pi', 'true' -- special values -}) - -lex:set_word_list(lexer.TYPE, 'float int time dur void same') - --- Class keywords. -lex:set_word_list(lexer.CLASS, { - 'class', 'extends', 'implements', 'interface', 'private', 'protected', 'public', 'pure', 'static', - 'super', 'this' -}) - -lex:set_word_list(lexer.FUNCTION_BUILTIN, { - 'abs', 'fabs', 'sgn', 'system', 'atoi', 'atof', 'getenv', 'setenv', 'mtof', 'ftom', 'powtodb', - 'rmstodb', 'dbtopow', 'dbtorms' -}) - -lex:set_word_list(lexer.FUNCTION_BUILTIN .. '.machine', { - 'add', 'spork', 'remove', 'replace', 'status', 'crash' -}) - -lex:set_word_list(lexer.FUNCTION_BUILTIN .. '.math', { - 'sin', 'cos', 'tan', 'asin', 'acos', 'atan', 'atan2', 'sinh', 'cosh', 'tanh', 'hypot', 'pow', - 'sqrt', 'exp', 'log', 'log2', 'log10', 'random', 'random2', 'randomf', 'random2f', 'srandom', - 'floor', 'ceil', 'round', 'trunc', 'fmod', 'remainder', 'min', 'max', 'nextpow2', 'isinf', 'isnan' -}) - -lex:set_word_list(lexer.CONSTANT_BUILTIN .. '.math', { - 'PI', 'TWO_PI', 'e', 'E', 'i', 'I', 'j', 'J', 'RANDOM_MAX' -}) - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/clojure.lua b/share/vis/lexers/clojure.lua @@ -1,148 +0,0 @@ --- Copyright 2018-2024 Mitchell. See LICENSE. --- Clojure LPeg lexer. --- Contributed by Christos Chatzifountas. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('clojure') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'fn', 'try', 'catch', 'finaly', 'defonce', 'and', 'case', 'cond', 'def', 'defn', 'defmacro', 'do', - 'else', 'when', 'when-let', 'if-let', 'if', 'let', 'loop', 'or', 'recur', 'quote' -})) - --- Functions. -lex:add_rule('function', token(lexer.FUNCTION, word_match{ - '*', '+', '-', '->ArrayChunk', '->Eduction', '->Vec', '->VecNode', '->VecSeq', '/', '<', '<=', - '=', '==', '>', '>=', 'StackTraceElement->vec', 'Throwable->map', 'accessor', 'aclone', - 'add-classpath', 'add-watch', 'agent', 'agent-error', 'agent-errors', 'aget', 'alength', 'alias', - 'all-ns', 'alter', 'alter-meta!', 'alter-var-root', 'ancestors', 'any?', 'apply', 'array-map', - 'aset', 'aset-boolean', 'aset-byte', 'aset-char', 'aset-double', 'aset-float', 'aset-int', - 'aset-long', 'aset-short', 'assoc', 'assoc!', 'assoc-in', 'associative?', 'atom', 'await', - 'await-for', 'bases', 'bean', 'bigdec', 'bigint', 'biginteger', 'bit-and', 'bit-and-not', - 'bit-clear', 'bit-flip', 'bit-not', 'bit-or', 'bit-set', 'bit-shift-left', 'bit-shift-right', - 'bit-test', 'bit-xor', 'boolean', 'boolean-array', 'boolean?', 'booleans', 'bound-fn*', 'bound?', - 'bounded-count', 'butlast', 'byte', 'byte-array', 'bytes', 'bytes?', 'cast', 'cat', 'char', - 'char-array', 'char?', 'chars', 'class', 'class?', 'clear-agent-errors', 'clojure-version', - 'coll?', 'commute', 'comp', 'comparator', 'compare', 'compare-and-set!', 'compile', 'complement', - 'completing', 'concat', 'conj', 'conj!', 'cons', 'constantly', 'construct-proxy', 'contains?', - 'count', 'counted?', 'create-ns', 'create-struct', 'cycle', 'dec', 'decimal?', 'dedupe', 'delay?', - 'deliver', 'denominator', 'deref', 'derive', 'descendants', 'disj', 'disj!', 'dissoc', 'dissoc!', - 'distinct', 'distinct?', 'doall', 'dorun', 'double', 'double-array', 'double?', 'doubles', 'drop', - 'drop-last', 'drop-while', 'eduction', 'empty', 'empty?', 'ensure', 'ensure-reduced', - 'enumeration-seq', 'error-handler', 'error-mode', 'eval', 'even?', 'every-pred', 'every?', - 'ex-data', 'ex-info', 'extend', 'extenders', 'extends?', 'false?', 'ffirst', 'file-seq', 'filter', - 'filterv', 'find', 'find-keyword', 'find-ns', 'find-var', 'first', 'flatten', 'float', - 'float-array', 'float?', 'floats', 'flush', 'fn?', 'fnext', 'fnil', 'force', 'format', - 'frequencies', 'future-call', 'future-cancel', 'future-cancelled?', 'future-done?', 'future?', - 'gensym', 'get', 'get-in', 'get-method', 'get-proxy-class', 'get-thread-bindings', - 'get-validator', 'group-by', 'halt-when', 'hash', 'hash-map', 'hash-ordered-coll', 'hash-set', - 'hash-unordered-coll', 'ident?', 'identical?', 'identity', 'ifn?', 'in-ns', 'inc', 'inc', - 'indexed?', 'init-proxy', 'inst-ms', 'inst?', 'instance?', 'int', 'int-array', 'int?', 'integer?', - 'interleave', 'intern', 'interpose', 'into', 'into-array', 'ints', 'isa?', 'iterate', - 'iterator-seq', 'juxt', 'keep', 'keep-indexed', 'key', 'keys', 'keyword', 'keyword?', 'last', - 'line-seq', 'list', 'list*', 'list?', 'load', 'load-file', 'load-reader', 'load-string', - 'loaded-libs', 'long', 'long-array', 'longs', 'macroexpand', 'macroexpand-1', 'make-array', - 'make-hierarchy', 'map', 'map-entry?', 'map-indexed', 'map?', 'mapcat', 'mapv', 'max', 'max-key', - 'memoize', 'merge', 'merge-with', 'meta', 'methods', 'min', 'min-key', 'mix-collection-hash', - 'mod', 'name', 'namespace', 'namespace-munge', 'nat-int?', 'neg-int?', 'neg?', 'newline', 'next', - 'nfirst', 'nil?', 'nnext', 'not', 'not-any?', 'not-empty', 'not-every?', 'not=', 'ns-aliases', - 'ns-imports', 'ns-interns', 'ns-map', 'ns-name', 'ns-publics', 'ns-refers', 'ns-resolve', - 'ns-unalias', 'ns-unmap', 'nth', 'nthnext', 'nthrest', 'num', 'number?', 'numerator', - 'object-array', 'odd?', 'parents', 'partial', 'partition', 'partition-all', 'partition-by', - 'pcalls', 'peek', 'persistent!', 'pmap', 'pop', 'pop!', 'pop-thread-bindings', 'pos-int?', 'pos?', - 'pr-str', 'prefer-method', 'prefers', 'print', 'print-str', 'printf', 'println', 'println-str', - 'prn', 'prn-str', 'promise', 'proxy-mappings', 'push-thread-bindings', 'qualified-ident?', - 'qualified-keyword?', 'qualified-symbol?', 'quot', 'rand', 'rand-int', 'rand-nth', - 'random-sample', 'range', 'ratio?', 'rational?', 'rationalize', 're-find', 're-groups', - 're-matcher', 're-matches', 're-pattern', 're-seq', 'read', 'read-line', 'read-string', - 'reader-conditional', 'reader-conditional?', 'realized?', 'record?', 'reduce', 'reduce-kv', - 'reduced', 'reduced?', 'reductions', 'ref', 'ref-history-count', 'ref-max-history', - 'ref-min-history', 'ref-set', 'refer', 'release-pending-sends', 'rem', 'remove', - 'remove-all-methods', 'remove-method', 'remove-ns', 'remove-watch', 'repeat', 'repeatedly', - 'replace', 'replicate', 'require', 'reset!', 'reset-meta!', 'reset-vals!', 'resolve', 'rest', - 'restart-agent', 'resultset-seq', 'reverse', 'reversible?', 'rseq', 'rsubseq', 'run!', - 'satisfies?', 'second', 'select-keys', 'send', 'send-off', 'send-via', 'seq', 'seq?', 'seqable?', - 'seque', 'sequence', 'sequential?', 'set', 'set-agent-send-executor!', - 'set-agent-send-off-executor!', 'set-error-handler!', 'set-error-mode!', 'set-validator!', 'set?', - 'short', 'short-array', 'shorts', 'shuffle', 'shutdown-agents', 'simple-ident?', - 'simple-keyword?', 'simple-symbol?', 'slurp', 'some', 'some-fn', 'some?', 'sort', 'sort-by', - 'sorted-map', 'sorted-map-by', 'sorted-set', 'sorted-set-by', 'sorted?', 'special-symbol?', - 'spit', 'split-at', 'split-with', 'str', 'string?', 'struct', 'struct-map', 'subs', 'subseq', - 'subvec', 'supers', 'swap!', 'swap-vals!', 'symbol', 'symbol?', 'tagged-literal', - 'tagged-literal?', 'take', 'take-last', 'take-nth', 'take-while', 'test', 'the-ns', - 'thread-bound?', 'to-array', 'to-array-2d', 'trampoline', 'transduce', 'transient', 'tree-seq', - 'true?', 'type', 'unchecked-add', 'unchecked-add-int', 'unchecked-byte', 'unchecked-char', - 'unchecked-dec', 'unchecked-dec-int', 'unchecked-divide-int', 'unchecked-double', - 'unchecked-float', 'unchecked-inc', 'unchecked-inc-int', 'unchecked-int', 'unchecked-long', - 'unchecked-multiply', 'unchecked-multiply-int', 'unchecked-negate', 'unchecked-negate-int', - 'unchecked-remainder-int', 'unchecked-short', 'unchecked-subtract', 'unchecked-subtract-int', - 'underive', 'unreduced', 'unsigned-bit-shift-right', 'update', 'update-in', 'update-proxy', - 'uri?', 'use', 'uuid?', 'val', 'vals', 'var-get', 'var-set', 'var?', 'vary-meta', 'vec', 'vector', - 'vector-of', 'vector?', 'volatile!', 'volatile?', 'vreset!', 'with-bindings*', 'with-meta', - 'with-redefs-fn', 'xml-seq', 'zero?', 'zipmap', 'diff-similar', 'equality-partition', 'diff', - 'inspect', 'inspect-table', 'inspect-tree', 'validated', 'browse-url', 'as-file', 'as-url', - 'make-input-stream', 'make-output-stream', 'make-reader', 'make-writer', 'as-relative-path', - 'copy', 'delete-file', 'file', 'input-stream', 'make-parents', 'output-stream', 'reader', - 'resource', 'writer', 'add-local-javadoc', 'add-remote-javadoc', 'javadoc', 'sh', 'demunge', - 'load-script', 'main', 'repl', 'repl-caught', 'repl-exception', 'repl-prompt', 'repl-read', - 'root-cause', 'skip-if-eol', 'skip-whitespace', 'stack-element-str', 'cl-format', 'fresh-line', - 'get-pretty-writer', 'pprint', 'pprint-indent', 'pprint-newline', 'pprint-tab', 'print-table', - 'set-pprint-dispatch', 'write', 'write-out', 'resolve-class', 'do-reflect', 'typename', - '->AsmReflector', '->Constructor', '->Field', '->JavaReflector', '->Method', 'map->Constructor', - 'map->Field', 'map->Method', 'reflect', 'type-reflect', 'apropos', 'dir-fn', 'find-doc', 'pst', - 'set-break-handler!', 'source-fn', 'thread-stopper', 'difference', 'index', 'intersection', - 'join', 'map-invert', 'project', 'rename', 'rename-keys', 'select', 'subset?', 'superset?', - 'union', 'e', 'print-cause-trace', 'print-stack-trace', 'print-throwable', 'print-trace-element', - 'blank?', 'capitalize', 'ends-with?', 'escape', 'includes?', 'index-of', 'last-index-of', - 'lower-case', 're-quote-replacement', 'replace-first', 'split', 'split-lines', 'starts-with?', - 'trim', 'trim-newline', 'triml', 'trimr', 'upper-case', 'apply-template', 'assert-any', - 'assert-predicate', 'compose-fixtures', 'do-report', 'file-position', 'function?', - 'get-possibly-unbound-var', 'inc-report-counter', 'join-fixtures', 'run-all-tests', 'run-tests', - 'successful?', 'test-all-vars', 'test-ns', 'test-vars', 'testing-contexts-str', - 'testing-vars-str', 'keywordize-keys', 'macroexpand-all', 'postwalk', 'postwalk-demo', - 'postwalk-replace', 'prewalk', 'prewalk-demo', 'prewalk-replace', 'stringify-keys', 'walk', - 'append-child', 'branch?', 'children', 'down', 'edit', 'end?', 'insert-child', 'insert-left', - 'insert-right', 'left', 'leftmost', 'lefts', 'make-node', 'node', 'path', 'prev', 'right', - 'rightmost', 'rights', 'root', 'seq-zip', 'up', 'vector-zip', 'xml-zip', 'zipper' -})) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, P('-')^-1 * lexer.digit^1 * (S('./') * lexer.digit^1)^-1)) - --- Identifiers. -local word = (lexer.alpha + S('-!?*$=-')) * (lexer.alnum + S('.-!?*$+-'))^0 -lex:add_rule('identifier', token(lexer.IDENTIFIER, word)) - --- Strings. -lex:add_rule('string', token(lexer.STRING, lexer.range('"'))) - --- Comments. -local line_comment = lexer.to_eol(';') -local block_comment = lexer.range('#_(', ')') -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('`@()'))) - --- Clojure keywords. -lex:add_rule('clojure_keyword', token('clojure_keyword', ':' * S(':')^-1 * word * ('/' * word)^-1)) -lex:add_style('clojure_keyword', lexer.styles.type) -lex:add_rule('clojure_symbol', token('clojure_symbol', "\'" * word * ('/' * word)^-1)) -lex:add_style('clojure_symbol', lexer.styles.type .. {bold = true}) - --- Fold points. -lex:add_fold_point(lexer.COMMENT, '#_(', ')') -lex:add_fold_point(lexer.OPERATOR, '(', ')') -lex:add_fold_point(lexer.OPERATOR, '[', ']') -lex:add_fold_point(lexer.OPERATOR, '{', '}') - -lexer.property['scintillua.comment'] = ';' - -return lex diff --git a/share/vis/lexers/cmake.lua b/share/vis/lexers/cmake.lua @@ -1,493 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- CMake LPeg lexer. - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(..., {case_insensitive_fold_points = true}) - --- Commands. -local word = (lexer.alpha + S('_-')) * (lexer.alnum + S('_-'))^0 -local builtin_command = lex:tag(lexer.FUNCTION_BUILTIN, lex:word_match('command', true)) -local command = lex:tag(lexer.FUNCTION, word) -lex:add_rule('command', (builtin_command + command) * #P('(')) - --- Constants. -local constant = lex:word_match(lexer.CONSTANT_BUILTIN, true) -local module = lex:word_match('module') -lex:add_rule('constant', lex:tag(lexer.CONSTANT_BUILTIN, constant + module)) - --- Variables. -local builtin_var = lex:word_match(lexer.VARIABLE_BUILTIN) -local expansion_var = (P('CACHE') + 'ENV') * #P('{') -lex:add_rule('variable', lex:tag(lexer.VARIABLE_BUILTIN, builtin_var + expansion_var)) - --- Generator expressions. -lex:add_rule('generator', - lex:tag(lexer.PREPROCESSOR, lpeg.B('$<') * lex:word_match(lexer.PREPROCESSOR))) - --- Operators. -lex:add_rule('operator', - lex:tag(lexer.OPERATOR, lex:word_match(lexer.OPERATOR, true) + S('=()${}<>'))) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, word)) - --- Strings. -local bracket = lpeg.Cmt('[' * lpeg.C(P('=')^0) * '[', function(input, index, eq) - local _, e = input:find(']' .. eq .. ']', index, true) - return (e or #input) + 1 -end) -local quoted = lexer.range('"') -lex:add_rule('string', lex:tag(lexer.STRING, bracket + quoted)) - --- Comments. -local line_comment = lexer.to_eol('#') -local bracket_comment = '#' * bracket -lex:add_rule('comment', lex:tag(lexer.COMMENT, bracket_comment + line_comment)) - --- Fold points. -lex:add_fold_point(lexer.FUNCTION_BUILTIN, 'if', 'endif') -lex:add_fold_point(lexer.FUNCTION_BUILTIN, 'foreach', 'endforeach') -lex:add_fold_point(lexer.FUNCTION_BUILTIN, 'while', 'endwhile') -lex:add_fold_point(lexer.FUNCTION_BUILTIN, 'macro', 'endmacro') -lex:add_fold_point(lexer.FUNCTION_BUILTIN, 'function', 'endfunction') -lex:add_fold_point(lexer.OPERATOR, '(', ')') - --- Word lists. -lex:set_word_list('command', { - -- Scripting commands. - 'block', 'break', 'cmake_host_system_information', 'cmake_language', 'cmake_minimum_required', - 'cmake_parse_arguments', 'cmake_path', 'cmake_policy', 'configure_file', 'continue', 'else', - 'elseif', 'endblock', 'endforeach', 'endfunction', 'endif', 'endmacro', 'endwhile', - 'execute_process', 'file', 'find_file', 'find_library', 'find_package', 'find_path', - 'find_program', 'foreach', 'function', 'get_cmake_property', 'get_directory_property', - 'get_filename_component', 'get_property', 'if', 'include', 'include_guard', 'list', 'macro', - 'mark_as_advanced', 'math', 'message', 'option', 'return', 'separate_arguments', 'set', - 'set_directory_properties', 'set_property', 'site_name', 'string', 'unset', 'variable_watch', - 'while', - -- Project commands. - 'add_compile_definitions', 'add_compile_options', 'add_custom_command', 'add_custom_target', - 'add_definitions', 'add_dependencies', 'add_executable', 'add_library', 'add_link_options', - 'add_subdirectory', 'add_test', 'aux_source_directory', 'build_command', 'create_test_sourcelist', - 'define_property', 'enable_language', 'enable_testing', 'export', 'fltk_wrap_ui', - 'get_source_file_property', 'get_target_property', 'get_test_property', 'include_directories', - 'include_external_msproject', 'include_regular_expression', 'install', 'link_directories', - 'link_libraries', 'load_cache', 'project', 'remove_definitions', 'set_source_files_properties', - 'set_target_properties', 'set_tests_properties', 'source_group', 'target_compile_definitions', - 'target_compile_features', 'target_compile_options', 'target_include_directories', - 'target_link_directories', 'target_link_libraries', 'target_link_options', - 'target_precompile_headers', 'target_sources', 'try_compile', 'try_run', - -- CTest commands. - 'ctest_build', 'ctest_configure', 'ctest_coverage', 'ctest_empty_binary_directory', - 'ctest_memcheck', 'ctest_read_custom_files', 'ctest_run_script', 'ctest_sleep', 'ctest_start', - 'ctest_submit', 'ctest_test', 'ctest_update', 'ctest_upload' -}) - -lex:set_word_list(lexer.CONSTANT_BUILTIN, 'on yes true y off no false n ignore notfound') - -lex:set_word_list('module', { - 'AndroidTestUtilities', 'BundleUtilities', 'CheckCCompilerFlag', 'CheckCompilerFlag', - 'CheckCSourceCompiles', 'CheckCSourceRuns', 'CheckCXXCompilerFlag', 'CheckCXXSourceCompiles', - 'CheckCXXSourceRuns', 'CheckCXXSymbolExists', 'CheckFortranCompilerFlag', - 'CheckFortranFunctionExists', 'CheckFortranSourceCompiles', 'CheckFortranSourceRuns', - 'CheckFunctionExists', 'CheckIncludeFileCXX', 'CheckIncludeFile', 'CheckIncludeFiles', - 'CheckIPOSupported', 'CheckLanguage', 'CheckLibraryExists', 'CheckLinkerFlag', - 'CheckOBJCCompilerFlag', 'CheckOBJCSourceCompiles', 'CheckOBJCSourceRuns', - 'CheckOBJCXXCompilerFlag', 'CheckOBJCXXSourceCompiles', 'CheckOBJCXXSourceRuns', - 'CheckPIESupported', 'CheckPrototypeDefinition', 'CheckSourceCompiles', 'CheckSourceRuns', - 'CheckStructHasMember', 'CheckSymbolExists', 'CheckTypeSize', 'CheckVariableExists', - 'CMakeAddFortranSubdirectory', 'CMakeBackwardCompatibilityCXX', 'CMakeDependentOption', - 'CMakeFindDependencyMacro', 'CMakeFindFrameworks', 'CMakeFindPackageMode', 'CMakeGraphVizOptions', - 'CMakePackageConfigHelpers', 'CMakePrintHelpers', 'CMakePrintSystemInformation', - 'CMakePushCheckState', 'CMakeVerifyManifest', 'CPack', 'CPackComponent', 'CPackIFW', - 'CPackIFWConfigureFile', 'CSharpUtilities', 'CTest', 'CTestCoverageCollectGCOV', - 'CTestScriptMode', 'CTestUseLaunchers', 'Dart', 'DeployQt4', 'ExternalData', 'ExternalProject', - 'FeatureSummary', 'FetchContent', 'FindPackageHandleStandardArgs', 'FindPackageMessage', - 'FortranCInterface', 'GenerateExportHeader', 'GetPrerequisites', 'GNUInstallDirs', 'GoogleTest', - 'InstallRequiredSystemLibraries', 'ProcessorCount', 'SelectLibraryConfigurations', - 'SquishTestScript', 'TestBigEndian', 'TestForANSIForScope', 'TestForANSIStreamHeaders', - 'TestForSSTREAM', 'TestForSTDNamespace', 'UseEcos', 'UseJava', 'UseSWIG', 'UsewxWidgets' -}) - -lex:set_word_list(lexer.VARIABLE_BUILTIN, { - -- Variables that provide information. - 'CMAKE_AR', 'CMAKE_ARGC', 'CMAKE_ARGV0', 'CMAKE_BINARY_DIR', 'CMAKE_BUILD_TOOL', - 'CMAKE_CACHE_MAJOR_VERSION', 'CMAKE_CACHE_MINOR_VERSION', 'CMAKE_CACHE_PATCH_VERSION', - 'CMAKE_CACHEFILE_DIR', 'CMAKE_CFG_INTDIR', 'CMAKE_COMMAND', 'CMAKE_CPACK_COMMAND', - 'CMAKE_CROSSCOMPILING', 'CMAKE_CROSSCOMPILING_EMULATOR', 'CMAKE_CTEST_COMMAND', - 'CMAKE_CURRENT_BINARY_DIR', 'CMAKE_CURRENT_FUNCTION', 'CMAKE_CURRENT_FUNCTION_LIST_DIR', - 'CMAKE_CURRENT_FUNCTION_LIST_FILE', 'CMAKE_CURRENT_FUNCTION_LIST_LINE', 'CMAKE_CURRENT_LIST_DIR', - 'CMAKE_CURRENT_LIST_FILE', 'CMAKE_CURRENT_LIST_LINE', 'CMAKE_CURRENT_SOURCE_DIR', - 'CMAKE_DEBUG_TARGET_PROPERTIES', 'CMAKE_DIRECTORY_LABELS', 'CMAKE_DL_LIBS', 'CMAKE_DOTNET_SDK', - 'CMAKE_DOTNET_TARGET_FRAMEWORK', 'CMAKE_DOTNET_TARGET_FRAMEWORK_VERSION', 'CMAKE_EDIT_COMMAND', - 'CMAKE_EXECUTABLE_SUFFIX', - -- 'CMAKE_EXECUTABLE_SUFFIX_<LANG>', - 'CMAKE_EXTRA_GENERATOR', 'CMAKE_EXTRA_SHARED_LIBRARY_SUFFIXES', 'CMAKE_FIND_DEBUG_MODE', - 'CMAKE_FIND_PACKAGE_NAME', 'CMAKE_FIND_PACKAGE_REDIRECTS_DIR', - 'CMAKE_FIND_PACKAGE_SORT_DIRECTION', 'CMAKE_FIND_PACKAGE_SORT_ORDER', 'CMAKE_GENERATOR', - 'CMAKE_GENERATOR_INSTANCE', 'CMAKE_GENERATOR_PLATFORM', 'CMAKE_GENERATOR_TOOLSET', - 'CMAKE_IMPORT_LIBRARY_PREFIX', 'CMAKE_IMPORT_LIBRARY_SUFFIX', 'CMAKE_JOB_POOL_COMPILE', - 'CMAKE_JOB_POOL_LINK', 'CMAKE_JOB_POOL_PRECOMPILE_HEADER', 'CMAKE_JOB_POOLS', - -- 'CMAKE_<LANG>_COMPILER_AR', - -- 'CMAKE_<LANG>_COMPILER_FRONTEND_VARIANT', - -- 'CMAKE_<LANG>_COMPILER_RANLIB', - -- 'CMAKE_<LANG>_LINK_LIBRARY_SUFFIX', - 'CMAKE_LINK_LIBRARY_SUFFIX', 'CMAKE_LINK_SEARCH_END_STATIC', 'CMAKE_LINK_SEARCH_START_STATIC', - 'CMAKE_MAJOR_VERSION', 'CMAKE_MAKE_PROGRAM', 'CMAKE_MATCH_COUNT', - -- 'CMAKE_MATCH_<n>', - 'CMAKE_MINIMUM_REQUIRED_VERSION', 'CMAKE_MINOR_VERSION', 'CMAKE_NETRC', 'CMAKE_NETRC_FILE', - 'CMAKE_PARENT_LIST_FILE', 'CMAKE_PATCH_VERSION', 'CMAKE_PROJECT_DESCRIPTION', - 'CMAKE_PROJECT_HOMEPAGE_URL', 'CMAKE_PROJECT_NAME', 'CMAKE_PROJECT_VERSION', - 'CMAKE_PROJECT_VERSION_MAJOR', 'CMAKE_PROJECT_VERSION_MINOR', 'CMAKE_PROJECT_VERSION_PATCH', - 'CMAKE_PROJECT_VERSION_TWEAK', 'CMAKE_RANLIB', 'CMAKE_ROOT', 'CMAKE_RULE_MESSAGES', - 'CMAKE_SCRIPT_MODE_FILE', 'CMAKE_SHARED_LIBRARY_PREFIX', 'CMAKE_SHARED_LIBRARY_SUFFIX', - 'CMAKE_SHARED_MODULE_PREFIX', 'CMAKE_SHARED_MODULE_SUFFIX', 'CMAKE_SIZEOF_VOID_P', - 'CMAKE_SKIP_INSTALL_RULES', 'CMAKE_SKIP_RPATH', 'CMAKE_SOURCE_DIR', 'CMAKE_STATIC_LIBRARY_PREFIX', - 'CMAKE_STATIC_LIBRARY_SUFFIX', 'CMAKE_Swift_MODULE_DIRECTORY', 'CMAKE_Swift_NUM_THREADS', - 'CMAKE_TOOLCHAIN_FILE', 'CMAKE_TWEAK_VERSION', 'CMAKE_VERBOSE_MAKEFILE', 'CMAKE_VERSION', - 'CMAKE_VS_DEVENV_COMMAND', 'CMAKE_VS_MSBUILD_COMMAND', 'CMAKE_VS_NsightTegra_VERSION', - 'CMAKE_VS_NUGET_PACKAGE_RESTORE', 'CMAKE_VS_PLATFORM_NAME', 'CMAKE_VS_PLATFORM_NAME_DEFAULT', - 'CMAKE_VS_PLATFORM_TOOLSET', 'CMAKE_VS_PLATFORM_TOOLSET_CUDA', - 'CMAKE_VS_PLATFORM_TOOLSET_CUDA_CUSTOM_DIR', 'CMAKE_VS_PLATFORM_TOOLSET_HOST_ARCHITECTURE', - 'CMAKE_VS_PLATFORM_TOOLSET_VERSION', 'CMAKE_VS_TARGET_FRAMEWORK_IDENTIFIER', - 'CMAKE_VS_TARGET_FRAMEWORK_TARGETS_VERSION', 'CMAKE_VS_TARGET_FRAMEWORK_VERSION', - 'CMAKE_VS_WINDOWS_TARGET_PLATFORM_VERSION', 'CMAKE_VS_WINDOWS_TARGET_PLATFORM_VERSION_MAXIMUM', - 'CMAKE_XCODE_BUILD_SYSTEM', 'CMAKE_XCODE_PLATFORM_TOOLSET', - -- '<PROJECT-NAME>_BINARY_DIR', - -- '<PROJECT-NAME>_DESCRIPTION', - -- '<PROJECT-NAME>_HOMEPAGE_URL', - -- '<PROJECT-NAME>_IS_TOP_LEVEL', - -- '<PROJECT-NAME>_SOURCE_DIR', - -- '<PROJECT-NAME>_VERSION', - -- '<PROJECT-NAME>_VERSION_MAJOR', - -- '<PROJECT-NAME>_VERSION_MINOR', - -- '<PROJECT-NAME>_VERSION_PATCH', - -- '<PROJECT-NAME>_VERSION_TWEAK', - 'PROJECT_BINARY_DIR', 'PROJECT_DESCRIPTION', 'PROJECT_HOMEPAGE_URL', 'PROJECT_IS_TOP_LEVEL', - 'PROJECT_NAME', 'PROJECT_SOURCE_DIR', 'PROJECT_VERSION', 'PROJECT_VERSION_MAJOR', - 'PROJECT_VERSION_MINOR', 'PROJECT_VERSION_PATCH', 'PROJECT_VERSION_TWEAK', - - -- Variables that change behavior. - 'BUILD_SHARED_LIBS', 'CMAKE_ABSOLUTE_DESTINATION_FILES', 'CMAKE_APPBUNDLE_PATH', - 'CMAKE_AUTOMOC_RELAXED_MODE', 'CMAKE_BACKWARDS_COMPATIBILITY', 'CMAKE_BUILD_TYPE', - 'CMAKE_CLANG_VFS_OVERLAY', 'CMAKE_CODEBLOCKS_COMPILER_ID', - 'CMAKE_CODEBLOCKS_EXCLUDE_EXTERNAL_FILES', 'CMAKE_CODELITE_USE_TARGETS', - 'CMAKE_COLOR_DIAGNOSTICS', 'CMAKE_COLOR_MAKEFILE', 'CMAKE_CONFIGURATION_TYPES', - 'CMAKE_DEPENDS_IN_PROJECT_ONLY', - -- 'CMAKE_DISABLE_FIND_PACKAGE_<PackageName>', - 'CMAKE_ECLIPSE_GENERATE_LINKED_RESOURCES', 'CMAKE_ECLIPSE_GENERATE_SOURCE_PROJECT', - 'CMAKE_ECLIPSE_MAKE_ARGUMENTS', 'CMAKE_ECLIPSE_RESOURCE_ENCODING', 'CMAKE_ECLIPSE_VERSION', - 'CMAKE_ERROR_DEPRECATED', 'CMAKE_ERROR_ON_ABSOLUTE_INSTALL_DESTINATION', - 'CMAKE_EXECUTE_PROCESS_COMMAND_ECHO', 'CMAKE_EXPORT_COMPILE_COMMANDS', - 'CMAKE_EXPORT_PACKAGE_REGISTRY', 'CMAKE_EXPORT_NO_PACKAGE_REGISTRY', 'CMAKE_FIND_APPBUNDLE', - 'CMAKE_FIND_FRAMEWORK', 'CMAKE_FIND_LIBRARY_CUSTOM_LIB_SUFFIX', 'CMAKE_FIND_LIBRARY_PREFIXES', - 'CMAKE_FIND_LIBRARY_SUFFIXES', 'CMAKE_FIND_NO_INSTALL_PREFIX', - 'CMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY', 'CMAKE_FIND_PACKAGE_NO_SYSTEM_PACKAGE_REGISTRY', - 'CMAKE_FIND_PACKAGE_PREFER_CONFIG', 'CMAKE_FIND_PACKAGE_RESOLVE_SYMLINKS', - 'CMAKE_FIND_PACKAGE_TARGETS_GLOBAL', 'CMAKE_FIND_PACKAGE_WARN_NO_MODULE', 'CMAKE_FIND_ROOT_PATH', - 'CMAKE_FIND_ROOT_PATH_MODE_INCLUDE', 'CMAKE_FIND_ROOT_PATH_MODE_LIBRARY', - 'CMAKE_FIND_ROOT_PATH_MODE_PACKAGE', 'CMAKE_FIND_ROOT_PATH_MODE_PROGRAM', - 'CMAKE_FIND_USE_CMAKE_ENVIRONMENT_PATH', 'CMAKE_FIND_USE_CMAKE_PATH', - 'CMAKE_FIND_USE_CMAKE_SYSTEM_PATH', 'CMAKE_FIND_USE_INSTALL_PREFIX', - 'CMAKE_FIND_USE_PACKAGE_REGISTRY', 'CMAKE_FIND_USE_PACKAGE_ROOT_PATH', - 'CMAKE_FIND_USE_SYSTEM_ENVIRONMENT_PATH', 'CMAKE_FIND_USE_SYSTEM_PACKAGE_REGISTRY', - 'CMAKE_FRAMEWORK_PATH', 'CMAKE_IGNORE_PATH', 'CMAKE_IGNORE_PREFIX_PATH', - 'CMAKE_INCLUDE_DIRECTORIES_BEFORE', 'CMAKE_INCLUDE_DIRECTORIES_PROJECT_BEFORE', - 'CMAKE_INCLUDE_PATH', 'CMAKE_INSTALL_DEFAULT_COMPONENT_NAME', - 'CMAKE_INSTALL_DEFAULT_DIRECTORY_PERMISSIONS', 'CMAKE_INSTALL_MESSAGE', 'CMAKE_INSTALL_PREFIX', - 'CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT', 'CMAKE_LIBRARY_PATH', - 'CMAKE_LINK_DIRECTORIES_BEFORE', 'CMAKE_LINK_LIBRARIES_ONLY_TARGETS', - 'CMAKE_MAXIMUM_RECURSION_DEPTH', 'CMAKE_MESSAGE_CONTEXT', 'CMAKE_MESSAGE_CONTEXT_SHOW', - 'CMAKE_MESSAGE_INDENT', 'CMAKE_MESSAGE_LOG_LEVEL', 'CMAKE_MFC_FLAG', 'CMAKE_MODULE_PATH', - -- 'CMAKE_POLICY_DEFAULT_CMP<NNNN>', - -- 'CMAKE_POLICY_WARNING_CMP<NNNN>', - 'CMAKE_PREFIX_PATH', 'CMAKE_PROGRAM_PATH', 'CMAKE_PROJECT_INCLUDE', - 'CMAKE_PROJECT_INCLUDE_BEFORE', - -- 'CMAKE_PROJECT_<PROJECT-NAME>_INCLUDE', - -- 'CMAKE_PROJECT_<PROJECT-NAME>_INCLUDE_BEFORE', - 'CMAKE_PROJECT_TOP_LEVEL_INCLUDES', - -- 'CMAKE_REQUIRE_FIND_PACKAGE_<PackageName>', - 'CMAKE_SKIP_INSTALL_ALL_DEPENDENCY', 'CMAKE_STAGING_PREFIX', 'CMAKE_SUBLIME_TEXT_2_ENV_SETTINGS', - 'CMAKE_SUBLIME_TEXT_2_EXCLUDE_BUILD_TREE', 'CMAKE_SUPPRESS_REGENERATION', 'CMAKE_SYSROOT', - 'CMAKE_SYSROOT_COMPILE', 'CMAKE_SYSROOT_LINK', 'CMAKE_SYSTEM_APPBUNDLE_PATH', - 'CMAKE_SYSTEM_FRAMEWORK_PATH', 'CMAKE_SYSTEM_IGNORE_PATH', 'CMAKE_SYSTEM_IGNORE_PREFIX_PATH', - 'CMAKE_SYSTEM_INCLUDE_PATH', 'CMAKE_SYSTEM_LIBRARY_PATH', 'CMAKE_SYSTEM_PREFIX_PATH', - 'CMAKE_SYSTEM_PROGRAM_PATH', 'CMAKE_TLS_CAINFO', 'CMAKE_TLS_VERIFY', - 'CMAKE_USER_MAKE_RULES_OVERRIDE', 'CMAKE_WARN_DEPRECATED', - 'CMAKE_WARN_ON_ABSOLUTE_INSTALL_DESTINATION', 'CMAKE_XCODE_GENERATE_SCHEME', - 'CMAKE_XCODE_GENERATE_TOP_LEVEL_PROJECT_ONLY', 'CMAKE_XCODE_LINK_BUILD_PHASE_MODE', - 'CMAKE_XCODE_SCHEME_ADDRESS_SANITIZER', 'CMAKE_XCODE_SCHEME_ADDRESS_SANITIZER_USE_AFTER_RETURN', - 'CMAKE_XCODE_SCHEME_DEBUG_DOCUMENT_VERSIONING', 'CMAKE_XCODE_SCHEME_DISABLE_MAIN_THREAD_CHECKER', - 'CMAKE_XCODE_SCHEME_DYNAMIC_LIBRARY_LOADS', 'CMAKE_XCODE_SCHEME_DYNAMIC_LINKER_API_USAGE', - 'CMAKE_XCODE_SCHEME_ENABLE_GPU_API_VALIDATION', - 'CMAKE_XCODE_SCHEME_ENABLE_GPU_FRAME_CAPTURE_MODE', - 'CMAKE_XCODE_SCHEME_ENABLE_GPU_SHADER_VALIDATION', 'CMAKE_XCODE_SCHEME_ENVIRONMENT', - 'CMAKE_XCODE_SCHEME_GUARD_MALLOC', 'CMAKE_XCODE_SCHEME_LAUNCH_CONFIGURATION', - 'CMAKE_XCODE_SCHEME_LAUNCH_MODE', 'CMAKE_XCODE_SCHEME_MAIN_THREAD_CHECKER_STOP', - 'CMAKE_XCODE_SCHEME_MALLOC_GUARD_EDGES', 'CMAKE_XCODE_SCHEME_MALLOC_SCRIBBLE', - 'CMAKE_XCODE_SCHEME_MALLOC_STACK', 'CMAKE_XCODE_SCHEME_THREAD_SANITIZER', - 'CMAKE_XCODE_SCHEME_THREAD_SANITIZER_STOP', 'CMAKE_XCODE_SCHEME_UNDEFINED_BEHAVIOUR_SANITIZER', - 'CMAKE_XCODE_SCHEME_UNDEFINED_BEHAVIOUR_SANITIZER_STOP', 'CMAKE_XCODE_SCHEME_WORKING_DIRECTORY', - 'CMAKE_XCODE_SCHEME_ZOMBIE_OBJECTS', 'CMAKE_XCODE_XCCONFIG', - -- '<PackageName>_ROOT', - - -- Variables that describe the system. - 'ANDROID', 'APPLE', 'BORLAND', 'BSD', 'CMAKE_ANDROID_NDK_VERSION', 'CMAKE_CL_64', - 'CMAKE_COMPILER_2005', 'CMAKE_HOST_APPLE', 'CMAKE_HOST_BSD', 'CMAKE_HOST_LINUX', - 'CMAKE_HOST_SOLARIS', 'CMAKE_HOST_SYSTEM', 'CMAKE_HOST_SYSTEM_NAME', - 'CMAKE_HOST_SYSTEM_PROCESSOR', 'CMAKE_HOST_SYSTEM_VERSION', 'CMAKE_HOST_UNIX', 'CMAKE_HOST_WIN32', - 'CMAKE_LIBRARY_ARCHITECTURE', 'CMAKE_LIBRARY_ARCHITECTURE_REGEX', 'CMAKE_OBJECT_PATH_MAX', - 'CMAKE_SYSTEM', 'CMAKE_SYSTEM_NAME', 'CMAKE_SYSTEM_PROCESSOR', 'CMAKE_SYSTEM_VERSION', 'CYGWIN', - 'GHSMULTI', 'IOS', 'LINUX', 'MINGW', 'MSVC', 'MSVC10', 'MSVC11', 'MSVC12', 'MSVC14', 'MSVC60', - 'MSVC70', 'MSVC71', 'MSVC80', 'MSVC90', 'MSVC_IDE', 'MSVC_TOOLSET_VERSION', 'MSVC_VERSION', - 'MSYS', 'UNIX', 'WIN32', 'WINCE', 'WINDOWS_PHONE', 'WINDOWS_STORE', 'XCODE', 'XCODE_VERSION', - - -- Variables that control the build. - 'CMAKE_ADSP_ROOT', 'CMAKE_AIX_EXPORT_ALL_SYMBOLS', 'CMAKE_ANDROID_ANT_ADDITIONAL_OPTIONS', - 'CMAKE_ANDROID_API', 'CMAKE_ANDROID_API_MIN', 'CMAKE_ANDROID_ARCH', 'CMAKE_ANDROID_ARCH_ABI', - 'CMAKE_ANDROID_ARM_MODE', 'CMAKE_ANDROID_ARM_NEON', 'CMAKE_ANDROID_ASSETS_DIRECTORIES', - 'CMAKE_ANDROID_EXCEPTIONS', 'CMAKE_ANDROID_GUI', 'CMAKE_ANDROID_JAR_DEPENDENCIES', - 'CMAKE_ANDROID_JAR_DIRECTORIES', 'CMAKE_ANDROID_JAVA_SOURCE_DIR', - 'CMAKE_ANDROID_NATIVE_LIB_DEPENDENCIES', 'CMAKE_ANDROID_NATIVE_LIB_DIRECTORIES', - 'CMAKE_ANDROID_NDK', 'CMAKE_ANDROID_NDK_DEPRECATED_HEADERS', - 'CMAKE_ANDROID_NDK_TOOLCHAIN_HOST_TAG', 'CMAKE_ANDROID_NDK_TOOLCHAIN_VERSION', - 'CMAKE_ANDROID_PROCESS_MAX', 'CMAKE_ANDROID_PROGUARD', 'CMAKE_ANDROID_PROGUARD_CONFIG_PATH', - 'CMAKE_ANDROID_RTTI', 'CMAKE_ANDROID_SECURE_PROPS_PATH', 'CMAKE_ANDROID_SKIP_ANT_STEP', - 'CMAKE_ANDROID_STANDALONE_TOOLCHAIN', 'CMAKE_ANDROID_STL_TYPE', 'CMAKE_APPLE_SILICON_PROCESSOR', - 'CMAKE_ARCHIVE_OUTPUT_DIRECTORY', - -- 'CMAKE_ARCHIVE_OUTPUT_DIRECTORY_<CONFIG>', - 'CMAKE_AUTOGEN_ORIGIN_DEPENDS', 'CMAKE_AUTOGEN_PARALLEL', 'CMAKE_AUTOGEN_VERBOSE', - 'CMAKE_AUTOMOC', 'CMAKE_AUTOMOC_COMPILER_PREDEFINES', 'CMAKE_AUTOMOC_DEPEND_FILTERS', - 'CMAKE_AUTOMOC_MACRO_NAMES', 'CMAKE_AUTOMOC_MOC_OPTIONS', 'CMAKE_AUTOMOC_PATH_PREFIX', - 'CMAKE_AUTORCC', 'CMAKE_AUTORCC_OPTIONS', 'CMAKE_AUTOUIC', 'CMAKE_AUTOUIC_OPTIONS', - 'CMAKE_AUTOUIC_SEARCH_PATHS', 'CMAKE_BUILD_RPATH', 'CMAKE_BUILD_RPATH_USE_ORIGIN', - 'CMAKE_BUILD_WITH_INSTALL_NAME_DIR', 'CMAKE_BUILD_WITH_INSTALL_RPATH', - 'CMAKE_COMPILE_PDB_OUTPUT_DIRECTORY', - -- 'CMAKE_COMPILE_PDB_OUTPUT_DIRECTORY_<CONFIG>', - 'CMAKE_COMPILE_WARNING_AS_ERROR', - -- 'CMAKE_<CONFIG>_POSTFIX', - 'CMAKE_CROSS_CONFIGS', 'CMAKE_CTEST_ARGUMENTS', 'CMAKE_CUDA_RESOLVE_DEVICE_SYMBOLS', - 'CMAKE_CUDA_RUNTIME_LIBRARY', 'CMAKE_CUDA_SEPARABLE_COMPILATION', 'CMAKE_DEBUG_POSTFIX', - 'CMAKE_DEFAULT_BUILD_TYPE', 'CMAKE_DEFAULT_CONFIGS', 'CMAKE_DEPENDS_USE_COMPILER', - 'CMAKE_DISABLE_PRECOMPILE_HEADERS', 'CMAKE_ENABLE_EXPORTS', 'CMAKE_EXE_LINKER_FLAGS', - -- 'CMAKE_EXE_LINKER_FLAGS_<CONFIG>', - -- 'CMAKE_EXE_LINKER_FLAGS_<CONFIG>_INIT', - 'CMAKE_EXE_LINKER_FLAGS_INIT', 'CMAKE_FOLDER', 'CMAKE_Fortran_FORMAT', - 'CMAKE_Fortran_MODULE_DIRECTORY', 'CMAKE_Fortran_PREPROCESS', 'CMAKE_FRAMEWORK', - -- 'CMAKE_FRAMEWORK_MULTI_CONFIG_POSTFIX_<CONFIG>', - 'CMAKE_GHS_NO_SOURCE_GROUP_FILE', 'CMAKE_GLOBAL_AUTOGEN_TARGET', - 'CMAKE_GLOBAL_AUTOGEN_TARGET_NAME', 'CMAKE_GLOBAL_AUTORCC_TARGET', - 'CMAKE_GLOBAL_AUTORCC_TARGET_NAME', 'CMAKE_GNUtoMS', 'CMAKE_INCLUDE_CURRENT_DIR', - 'CMAKE_INCLUDE_CURRENT_DIR_IN_INTERFACE', 'CMAKE_INSTALL_NAME_DIR', - 'CMAKE_INSTALL_REMOVE_ENVIRONMENT_RPATH', 'CMAKE_INSTALL_RPATH', - 'CMAKE_INSTALL_RPATH_USE_LINK_PATH', 'CMAKE_INTERPROCEDURAL_OPTIMIZATION', - -- 'CMAKE_INTERPROCEDURAL_OPTIMIZATION_<CONFIG>', - 'CMAKE_IOS_INSTALL_COMBINED', - -- 'CMAKE_<LANG>_CLANG_TIDY', - -- 'CMAKE_<LANG>_COMPILER_LAUNCHER', - -- 'CMAKE_<LANG>_CPPCHECK', - -- 'CMAKE_<LANG>_CPPLINT', - -- 'CMAKE_<LANG>_INCLUDE_WHAT_YOU_USE', - -- 'CMAKE_<LANG>_LINK_GROUP_USING_<FEATURE>', - -- 'CMAKE_<LANG>_LINK_GROUP_USING_<FEATURE>_SUPPORTED', - -- 'CMAKE_<LANG>_LINK_LIBRARY_FILE_FLAG', - -- 'CMAKE_<LANG>_LINK_LIBRARY_FLAG', - -- 'CMAKE_<LANG>_LINK_LIBRARY_USING_<FEATURE>', - -- 'CMAKE_<LANG>_LINK_LIBRARY_USING_<FEATURE>_SUPPORTED', - -- 'CMAKE_<LANG>_LINK_WHAT_YOU_USE_FLAG', - -- 'CMAKE_<LANG>_LINKER_LAUNCHER', - -- 'CMAKE_<LANG>_VISIBILITY_PRESET', - 'CMAKE_LIBRARY_OUTPUT_DIRECTORY', - -- 'CMAKE_LIBRARY_OUTPUT_DIRECTORY_<CONFIG>', - 'CMAKE_LIBRARY_PATH_FLAG', 'CMAKE_LINK_DEF_FILE_FLAG', 'CMAKE_LINK_DEPENDS_NO_SHARED', - -- 'CMAKE_LINK_GROUP_USING_<FEATURE>', - -- 'CMAKE_LINK_GROUP_USING_<FEATURE>_SUPPORTED', - 'CMAKE_LINK_INTERFACE_LIBRARIES', 'CMAKE_LINK_LIBRARY_FILE_FLAG', 'CMAKE_LINK_LIBRARY_FLAG', - -- 'CMAKE_LINK_LIBRARY_USING_<FEATURE>', - -- 'CMAKE_LINK_LIBRARY_USING_<FEATURE>_SUPPORTED', - 'CMAKE_LINK_WHAT_YOU_USE', 'CMAKE_LINK_WHAT_YOU_USE_CHECK', 'CMAKE_MACOSX_BUNDLE', - 'CMAKE_MACOSX_RPATH', - -- 'CMAKE_MAP_IMPORTED_CONFIG_<CONFIG>', - 'CMAKE_MODULE_LINKER_FLAGS', - -- 'CMAKE_MODULE_LINKER_FLAGS_<CONFIG>', - -- 'CMAKE_MODULE_LINKER_FLAGS_<CONFIG>_INIT', - 'CMAKE_MODULE_LINKER_FLAGS_INIT', 'CMAKE_MSVC_DEBUG_INFORMATION_FORMAT', - 'CMAKE_MSVC_RUNTIME_LIBRARY', 'CMAKE_MSVCIDE_RUN_PATH', 'CMAKE_NINJA_OUTPUT_PATH_PREFIX', - 'CMAKE_NO_BUILTIN_CHRPATH', 'CMAKE_NO_SYSTEM_FROM_IMPORTED', 'CMAKE_OPTIMIZE_DEPENDENCIES', - 'CMAKE_OSX_ARCHITECTURES', 'CMAKE_OSX_DEPLOYMENT_TARGET', 'CMAKE_OSX_SYSROOT', - 'CMAKE_PCH_INSTANTIATE_TEMPLATES', 'CMAKE_PCH_WARN_INVALID', 'CMAKE_PDB_OUTPUT_DIRECTORY', - -- 'CMAKE_PDB_OUTPUT_DIRECTORY_<CONFIG>', - 'CMAKE_PLATFORM_NO_VERSIONED_SONAME', 'CMAKE_POSITION_INDEPENDENT_CODE', - 'CMAKE_RUNTIME_OUTPUT_DIRECTORY', - -- 'CMAKE_RUNTIME_OUTPUT_DIRECTORY_<CONFIG>', - 'CMAKE_SHARED_LINKER_FLAGS', - -- 'CMAKE_SHARED_LINKER_FLAGS_<CONFIG>', - -- 'CMAKE_SHARED_LINKER_FLAGS_<CONFIG>_INIT', - 'CMAKE_SHARED_LINKER_FLAGS_INIT', 'CMAKE_SKIP_BUILD_RPATH', 'CMAKE_SKIP_INSTALL_RPATH', - 'CMAKE_STATIC_LINKER_FLAGS', - -- 'CMAKE_STATIC_LINKER_FLAGS_<CONFIG>', - -- 'CMAKE_STATIC_LINKER_FLAGS_<CONFIG>_INIT', - 'CMAKE_STATIC_LINKER_FLAGS_INIT', 'CMAKE_TASKING_TOOLSET', 'CMAKE_TRY_COMPILE_CONFIGURATION', - 'CMAKE_TRY_COMPILE_NO_PLATFORM_VARIABLES', 'CMAKE_TRY_COMPILE_PLATFORM_VARIABLES', - 'CMAKE_TRY_COMPILE_TARGET_TYPE', 'CMAKE_UNITY_BUILD', 'CMAKE_UNITY_BUILD_BATCH_SIZE', - 'CMAKE_UNITY_BUILD_UNIQUE_ID', 'CMAKE_USE_RELATIVE_PATHS', 'CMAKE_VERIFY_INTERFACE_HEADER_SETS', - 'CMAKE_VISIBILITY_INLINES_HIDDEN', 'CMAKE_VS_GLOBALS', - 'CMAKE_VS_INCLUDE_INSTALL_TO_DEFAULT_BUILD', 'CMAKE_VS_INCLUDE_PACKAGE_TO_DEFAULT_BUILD', - 'CMAKE_VS_JUST_MY_CODE_DEBUGGING', 'CMAKE_VS_NO_COMPILE_BATCHING', - 'CMAKE_VS_SDK_EXCLUDE_DIRECTORIES', 'CMAKE_VS_SDK_EXECUTABLE_DIRECTORIES', - 'CMAKE_VS_SDK_INCLUDE_DIRECTORIES', 'CMAKE_VS_SDK_LIBRARY_DIRECTORIES', - 'CMAKE_VS_SDK_LIBRARY_WINRT_DIRECTORIES', 'CMAKE_VS_SDK_REFERENCE_DIRECTORIES', - 'CMAKE_VS_SDK_SOURCE_DIRECTORIES', 'CMAKE_VS_WINRT_BY_DEFAULT', 'CMAKE_WATCOM_RUNTIME_LIBRARY', - 'CMAKE_WIN32_EXECUTABLE', 'CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS', - -- 'CMAKE_XCODE_ATTRIBUTE_<an-attribute>', - 'EXECUTABLE_OUTPUT_PATH', 'LIBRARY_OUTPUT_PATH', - - -- Variables for languages. - 'CMAKE_C_COMPILE_FEATURES', 'CMAKE_C_EXTENSIONS', 'CMAKE_C_STANDARD', 'CMAKE_C_STANDARD_REQUIRED', - 'CMAKE_COMPILER_IS_GNUCC', 'CMAKE_COMPILER_IS_GNUCXX', 'CMAKE_COMPILER_IS_GNUG77', - 'CMAKE_CUDA_ARCHITECTURES', 'CMAKE_CUDA_COMPILE_FEATURES', 'CMAKE_CUDA_EXTENSIONS', - 'CMAKE_CUDA_HOST_COMPILER', 'CMAKE_CUDA_STANDARD', 'CMAKE_CUDA_STANDARD_REQUIRED', - 'CMAKE_CUDA_TOOLKIT_INCLUDE_DIRECTORIES', 'CMAKE_CXX_COMPILE_FEATURES', 'CMAKE_CXX_EXTENSIONS', - 'CMAKE_CXX_STANDARD', 'CMAKE_CXX_STANDARD_REQUIRED', 'CMAKE_Fortran_MODDIR_DEFAULT', - 'CMAKE_Fortran_MODDIR_FLAG', 'CMAKE_Fortran_MODOUT_FLAG', 'CMAKE_HIP_ARCHITECTURES', - 'CMAKE_HIP_EXTENSIONS', 'CMAKE_HIP_STANDARD', 'CMAKE_HIP_STANDARD_REQUIRED', - 'CMAKE_ISPC_HEADER_DIRECTORY', 'CMAKE_ISPC_HEADER_SUFFIX', 'CMAKE_ISPC_INSTRUCTION_SETS', - -- 'CMAKE_<LANG>_ANDROID_TOOLCHAIN_MACHINE', - -- 'CMAKE_<LANG>_ANDROID_TOOLCHAIN_PREFIX', - -- 'CMAKE_<LANG>_ANDROID_TOOLCHAIN_SUFFIX', - -- 'CMAKE_<LANG>_ARCHIVE_APPEND', - -- 'CMAKE_<LANG>_ARCHIVE_CREATE', - -- 'CMAKE_<LANG>_ARCHIVE_FINISH', - -- 'CMAKE_<LANG>_BYTE_ORDER', - -- 'CMAKE_<LANG>_COMPILE_OBJECT', - -- 'CMAKE_<LANG>_COMPILER', - -- 'CMAKE_<LANG>_COMPILER_EXTERNAL_TOOLCHAIN', - -- 'CMAKE_<LANG>_COMPILER_ID', - -- 'CMAKE_<LANG>_COMPILER_LOADED', - -- 'CMAKE_<LANG>_COMPILER_PREDEFINES_COMMAND', - -- 'CMAKE_<LANG>_COMPILER_TARGET', - -- 'CMAKE_<LANG>_COMPILER_VERSION', - -- 'CMAKE_<LANG>_CREATE_SHARED_LIBRARY', - -- 'CMAKE_<LANG>_CREATE_SHARED_MODULE', - -- 'CMAKE_<LANG>_CREATE_STATIC_LIBRARY', - -- 'CMAKE_<LANG>_EXTENSIONS', - -- 'CMAKE_<LANG>_EXTENSIONS_DEFAULT', - -- 'CMAKE_<LANG>_FLAGS', - -- 'CMAKE_<LANG>_FLAGS_<CONFIG>', - -- 'CMAKE_<LANG>_FLAGS_<CONFIG>_INIT', - -- 'CMAKE_<LANG>_FLAGS_DEBUG', - -- 'CMAKE_<LANG>_FLAGS_DEBUG_INIT', - -- 'CMAKE_<LANG>_FLAGS_INIT', - -- 'CMAKE_<LANG>_FLAGS_MINSIZEREL', - -- 'CMAKE_<LANG>_FLAGS_MINSIZEREL_INIT', - -- 'CMAKE_<LANG>_FLAGS_RELEASE', - -- 'CMAKE_<LANG>_FLAGS_RELEASE_INIT', - -- 'CMAKE_<LANG>_FLAGS_RELWITHDEBINFO', - -- 'CMAKE_<LANG>_FLAGS_RELWITHDEBINFO_INIT', - -- 'CMAKE_<LANG>_IGNORE_EXTENSIONS', - -- 'CMAKE_<LANG>_IMPLICIT_INCLUDE_DIRECTORIES', - -- 'CMAKE_<LANG>_IMPLICIT_LINK_DIRECTORIES', - -- 'CMAKE_<LANG>_IMPLICIT_LINK_FRAMEWORK_DIRECTORIES', - -- 'CMAKE_<LANG>_IMPLICIT_LINK_LIBRARIES', - -- 'CMAKE_<LANG>_LIBRARY_ARCHITECTURE', - -- 'CMAKE_<LANG>_LINK_EXECUTABLE', - -- 'CMAKE_<LANG>_LINKER_WRAPPER_FLAG', - -- 'CMAKE_<LANG>_LINKER_WRAPPER_FLAG_SEP', - -- 'CMAKE_<LANG>_OUTPUT_EXTENSION', - -- 'CMAKE_<LANG>_SIMULATE_ID', - -- 'CMAKE_<LANG>_SIMULATE_VERSION', - -- 'CMAKE_<LANG>_SIZEOF_DATA_PTR', - -- 'CMAKE_<LANG>_SOURCE_FILE_EXTENSIONS', - -- 'CMAKE_<LANG>_STANDARD', - -- 'CMAKE_<LANG>_STANDARD_DEFAULT', - -- 'CMAKE_<LANG>_STANDARD_INCLUDE_DIRECTORIES', - -- 'CMAKE_<LANG>_STANDARD_LIBRARIES', - -- 'CMAKE_<LANG>_STANDARD_REQUIRED', - 'CMAKE_OBJC_EXTENSIONS', 'CMAKE_OBJC_STANDARD', 'CMAKE_OBJC_STANDARD_REQUIRED', - 'CMAKE_OBJCXX_EXTENSIONS', 'CMAKE_OBJCXX_STANDARD', 'CMAKE_OBJCXX_STANDARD_REQUIRED', - 'CMAKE_Swift_LANGUAGE_VERSION', - -- 'CMAKE_USER_MAKE_RULES_OVERRIDE_<LANG>', - - -- Variables for CTest. - 'CTEST_BINARY_DIRECTORY', 'CTEST_BUILD_COMMAND', 'CTEST_BUILD_NAME', 'CTEST_BZR_COMMAND', - 'CTEST_BZR_UPDATE_OPTIONS', 'CTEST_CHANGE_ID', 'CTEST_CHECKOUT_COMMAND', - 'CTEST_CONFIGURATION_TYPE', 'CTEST_CONFIGURE_COMMAND', 'CTEST_COVERAGE_COMMAND', - 'CTEST_COVERAGE_EXTRA_FLAGS', 'CTEST_CURL_OPTIONS', 'CTEST_CUSTOM_COVERAGE_EXCLUDE', - 'CTEST_CUSTOM_ERROR_EXCEPTION', 'CTEST_CUSTOM_ERROR_MATCH', 'CTEST_CUSTOM_ERROR_POST_CONTEXT', - 'CTEST_CUSTOM_ERROR_PRE_CONTEXT', 'CTEST_CUSTOM_MAXIMUM_FAILED_TEST_OUTPUT_SIZE', - 'CTEST_CUSTOM_MAXIMUM_NUMBER_OF_ERRORS', 'CTEST_CUSTOM_MAXIMUM_NUMBER_OF_WARNINGS', - 'CTEST_CUSTOM_MAXIMUM_PASSED_TEST_OUTPUT_SIZE', 'CTEST_CUSTOM_MEMCHECK_IGNORE', - 'CTEST_CUSTOM_POST_MEMCHECK', 'CTEST_CUSTOM_POST_TEST', 'CTEST_CUSTOM_PRE_MEMCHECK', - 'CTEST_CUSTOM_PRE_TEST', 'CTEST_CUSTOM_TEST_OUTPUT_TRUNCATION', 'CTEST_CUSTOM_TESTS_IGNORE', - 'CTEST_CUSTOM_WARNING_EXCEPTION', 'CTEST_CUSTOM_WARNING_MATCH', 'CTEST_CVS_CHECKOUT', - 'CTEST_CVS_COMMAND', 'CTEST_CVS_UPDATE_OPTIONS', 'CTEST_DROP_LOCATION', 'CTEST_DROP_METHOD', - 'CTEST_DROP_SITE', 'CTEST_DROP_SITE_CDASH', 'CTEST_DROP_SITE_PASSWORD', 'CTEST_DROP_SITE_USER', - 'CTEST_EXTRA_COVERAGE_GLOB', 'CTEST_GIT_COMMAND', 'CTEST_GIT_INIT_SUBMODULES', - 'CTEST_GIT_UPDATE_CUSTOM', 'CTEST_GIT_UPDATE_OPTIONS', 'CTEST_HG_COMMAND', - 'CTEST_HG_UPDATE_OPTIONS', 'CTEST_LABELS_FOR_SUBPROJECTS', 'CTEST_MEMORYCHECK_COMMAND', - 'CTEST_MEMORYCHECK_COMMAND_OPTIONS', 'CTEST_MEMORYCHECK_SANITIZER_OPTIONS', - 'CTEST_MEMORYCHECK_SUPPRESSIONS_FILE', 'CTEST_MEMORYCHECK_TYPE', 'CTEST_NIGHTLY_START_TIME', - 'CTEST_P4_CLIENT', 'CTEST_P4_COMMAND', 'CTEST_P4_OPTIONS', 'CTEST_P4_UPDATE_OPTIONS', - 'CTEST_RESOURCE_SPEC_FILE', 'CTEST_RUN_CURRENT_SCRIPT', 'CTEST_SCP_COMMAND', - 'CTEST_SCRIPT_DIRECTORY', 'CTEST_SITE', 'CTEST_SOURCE_DIRECTORY', - 'CTEST_SUBMIT_INACTIVITY_TIMEOUT', 'CTEST_SUBMIT_URL', 'CTEST_SVN_COMMAND', 'CTEST_SVN_OPTIONS', - 'CTEST_SVN_UPDATE_OPTIONS', 'CTEST_TEST_LOAD', 'CTEST_TEST_TIMEOUT', 'CTEST_TRIGGER_SITE', - 'CTEST_UPDATE_COMMAND', 'CTEST_UPDATE_OPTIONS', 'CTEST_UPDATE_VERSION_ONLY', - 'CTEST_UPDATE_VERSION_OVERRIDE', 'CTEST_USE_LAUNCHERS', - - -- Variables for CPack. - 'CPACK_ABSOLUTE_DESTINATION_FILES', 'CPACK_COMPONENT_INCLUDE_TOPLEVEL_DIRECTORY', - 'CPACK_CUSTOM_INSTALL_VARIABLES', 'CPACK_ERROR_ON_ABSOLUTE_INSTALL_DESTINATION', - 'CPACK_INCLUDE_TOPLEVEL_DIRECTORY', 'CPACK_INSTALL_DEFAULT_DIRECTORY_PERMISSIONS', - 'CPACK_PACKAGING_INSTALL_PREFIX', 'CPACK_SET_DESTDIR', - 'CPACK_WARN_ON_ABSOLUTE_INSTALL_DESTINATION' -}) - -lex:set_word_list(lexer.PREPROCESSOR, { - 'IF', 'BOOL', -- conditional - 'AND', 'OR', 'NOT', -- logical - 'STREQUAL', 'EQUAL', -- string comparison - 'VERSION_LESS', 'VERSION_GREATER', 'VERSION_EQUAL', 'VERSION_LESS_EQUAL', 'VERSION_GREATER_EQUAL', -- version comparison - 'LOWER_CASE', 'UPPER_CASE', 'MAKE_C_IDENTIFIER', -- string transformations - 'IN_LIST', 'JOIN', 'REMOVE_DUPLICATES', 'FILTER', -- list expressions - 'PATH_EQUAL', 'PATH', -- path expressions (note: cannot use ':' parts) - 'SHELL_PATH', -- shell paths - 'CONFIG', 'OUTPUT_CONFIG', 'COMMAND_CONFIG', -- configuration expressions - 'PLATFORM_ID', -- platform - 'C_COMPILER_VERSION', 'CXX_COMPILER_VERSION', 'CUDA_COMPILER_VERSION', 'OBJC_COMPILER_VERSION', - 'OBJCXX_COMPILER_VERSION', 'Fortran_COMPILER_VERSION', 'HIP_COMPILER_VERSION', - 'ISPC_COMPILER_VERSION', -- compiler version - 'C_COMPILER_ID', 'CXX_COMPILER_ID', 'CUDA_COMPILER_ID', 'OBJC_COMPILER_ID', 'OBJCXX_COMPILER_ID', - 'Fortran_COMPILER_ID', 'HIP_COMPILER_ID', 'ISPC_COMPILER_ID', 'COMPILE_LANGUAGE', - 'COMPILE_LANG_AND_ID', 'COMPILE_FEATURES', -- compile features - 'LINK_LANGUAGE', 'LINK_LANG_AND_ID', -- linker language and ID - 'LINK_LIBRARY', 'LINK_GROUP', -- link features - 'LINK_ONLY', 'DEVICE_LINK', 'HOST_LINK', -- link context - 'TARGET_EXISTS', 'TARGET_NAME_IF_EXISTS', 'TARGET_NAME', 'TARGET_PROPERTY', 'TARGET_OBJECTS', - 'TARGET_POLICY', 'TARGET_FILE', 'TARGET_FILE_BASE_NAME', 'TARGET_FILE_PREFIX', - 'TARGET_FILE_SUFFIX', 'TARGET_FILE_NAME', 'TARGET_FILE_DIR', 'TARGET_LINKER_FILE', - 'TARGET_LINKER_FILE_BASE_NAME', 'TARGET_LINKER_FILE_PREFIX', 'TARGET_LINKER_FILE_SUFFIX', - 'TARGET_LINKER_FILE_NAME', 'TARGET_LINKER_FILE_DIR', 'TARGET_SONAME_FILE', - 'TARGET_SONAME_FILE_NAME', 'TARGET_SONAME_FILE_DIR', 'TARGET_PDB_FILE', - 'TARGET_PDB_FILE_BASE_NAME', 'TARGET_PDB_FILE_NAME', 'TARGET_PDB_FILE_DIR', 'TARGET_BUNDLE_DIR', - 'TARGET_BUNDLE_DIR_NAME', 'TARGET_BUNDLE_CONTENT_DIR', 'TARGET_RUNTIME_DLLS', -- target-dependent expressions - 'INSTALL_INTERFACE', 'BUILD_INTERFACE', 'INSTALL_PREFIX', -- export and install expressions - 'GENEX_EVAL', 'TARGET_GENEX_EVAL', -- multi-level expression evaluation - 'ANGLE-R', 'COMMA', 'SEMICOLON' -- escaped characters -}) - -lex:set_word_list(lexer.OPERATOR, { - -- Unary test. - 'EXISTS', 'COMMAND', 'DEFINED', - -- Binary test. - 'EQUAL', 'LESS', 'LESS_EQUAL', 'GREATER', 'GREATER_EQUAL', 'STREQUAL', 'STRLESS', 'STRLESS_EQUAL', - 'STRGREATER', 'STRGREATER_EQUAL', 'VERSION_EQUAL', 'VERSION_LESS', 'VERSION_LESS_EQUAL', - 'VERSION_GREATER', 'VERSION_GREATER_EQUAL', 'PATH_EQUAL', 'MATCHES', - -- Logical. - 'NOT', 'AND', 'OR' -}) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/coffeescript.lua b/share/vis/lexers/coffeescript.lua @@ -1,50 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- CoffeeScript LPeg lexer. - -local lexer = require('lexer') -local word_match = lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('coffeescript', {fold_by_indentation = true}) - --- Whitespace. -lex:add_rule('whitespace', lex:tag(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, word_match{ - 'all', 'and', 'bind', 'break', 'by', 'case', 'catch', 'class', 'const', 'continue', 'default', - 'delete', 'do', 'each', 'else', 'enum', 'export', 'extends', 'false', 'finally', 'for', - 'function', 'if', 'import', 'in', 'instanceof', 'is', 'isnt', 'let', 'loop', 'native', 'new', - 'no', 'not', 'of', 'off', 'on', 'or', 'return', 'super', 'switch', 'then', 'this', 'throw', - 'true', 'try', 'typeof', 'unless', 'until', 'var', 'void', 'when', 'while', 'with', 'yes' -})) - --- Fields: object properties and methods. -lex:add_rule('field', - lex:tag(lexer.FUNCTION, '.' * (S('_$') + lexer.alpha) * (S('_$') + lexer.alnum)^0)) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local sq_str = lexer.range("'") -local dq_str = lexer.range('"') -local string = lex:tag(lexer.STRING, sq_str + dq_str) -local regex_str = lexer.after_set('+-*%<>!=^&|?~:;,([{', lexer.range('/', true) * S('igm')^0) -local regex = lex:tag(lexer.REGEX, regex_str) -lex:add_rule('string', string + regex) - --- Comments. -local block_comment = lexer.range('###') -local line_comment = lexer.to_eol('#', true) -lex:add_rule('comment', lex:tag(lexer.COMMENT, block_comment + line_comment)) - --- Numbers. -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number)) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;,.()[]{}'))) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/container.lua b/share/vis/lexers/container.lua @@ -1,5 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Container LPeg lexer. --- This is SciTE's plain text lexer. - -return require('lexer').new('container') diff --git a/share/vis/lexers/context.lua b/share/vis/lexers/context.lua @@ -1,54 +0,0 @@ --- Copyright 2006-2024 Robert Gieseke, Lars Otter. See LICENSE. --- ConTeXt LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('context') - --- TeX and ConTeXt mkiv environment definitions. -local beginend = (P('begin') + 'end') -local startstop = (P('start') + 'stop') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('%'))) - --- Sections. -local wm_section = word_match{ - 'chapter', 'part', 'section', 'subject', 'subsection', 'subsubject', 'subsubsection', - 'subsubsubject', 'subsubsubsection', 'subsubsubsubject', 'title' -} -local section = token(lexer.CLASS, '\\' * startstop^-1 * wm_section) -lex:add_rule('section', section) - --- TeX and ConTeXt mkiv environments. -local environment = token(lexer.STRING, '\\' * (beginend + startstop) * lexer.alpha^1) -lex:add_rule('environment', environment) - --- Commands. -local command = token(lexer.KEYWORD, '\\' * - (lexer.alpha^1 * P('\\') * lexer.space^1 + lexer.alpha^1 + S('!"#$%&\',./;=[\\]_{|}~`^-'))) -lex:add_rule('command', command) - --- Operators. -local operator = token(lexer.OPERATOR, S('#$_[]{}~^')) -lex:add_rule('operator', operator) - --- Fold points. -lex:add_fold_point('environment', '\\start', '\\stop') -lex:add_fold_point('environment', '\\begin', '\\end') -lex:add_fold_point(lexer.OPERATOR, '{', '}') - --- Embedded Lua. -local luatex = lexer.load('lua') -local luatex_start_rule = #P('\\startluacode') * environment -local luatex_end_rule = #P('\\stopluacode') * environment -lex:embed(luatex, luatex_start_rule, luatex_end_rule) - -lexer.property['scintillua.comment'] = '%' - -return lex diff --git a/share/vis/lexers/cpp.lua b/share/vis/lexers/cpp.lua @@ -1,287 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- C++ LPeg lexer. - -local lexer = lexer -local P, S, B = lpeg.P, lpeg.S, lpeg.B - -local lex = lexer.new(...) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD))) - --- Types. -local basic_type = lex:tag(lexer.TYPE, lex:word_match(lexer.TYPE)) -local stl_type = lex:tag(lexer.TYPE .. '.stl', 'std::' * lex:word_match(lexer.TYPE .. '.stl')) -lex:add_rule('type', basic_type + stl_type * -P('::')) - --- Functions. -local non_member = -(B('.') + B('->') + B('::')) -local builtin_func = lex:tag(lexer.FUNCTION_BUILTIN, - P('std::')^-1 * lex:word_match(lexer.FUNCTION_BUILTIN)) -local stl_func = lex:tag(lexer.FUNCTION_BUILTIN .. '.stl', - 'std::' * lex:word_match(lexer.FUNCTION_BUILTIN .. '.stl')) -local func = lex:tag(lexer.FUNCTION, lexer.word) -local method = (B('.') + B('->')) * lex:tag(lexer.FUNCTION_METHOD, lexer.word) -lex:add_rule('function', - (non_member * (stl_func + builtin_func) + method + func) * #(lexer.space^0 * '(')) - --- Constants. -local const = - lex:tag(lexer.CONSTANT_BUILTIN, P('std::')^-1 * lex:word_match(lexer.CONSTANT_BUILTIN)) -local stl_const = lex:tag(lexer.CONSTANT_BUILTIN .. '.stl', - 'std::' * lex:word_match(lexer.CONSTANT_BUILTIN .. '.stl')) -lex:add_rule('constants', stl_const + const) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"', true) -lex:add_rule('string', lex:tag(lexer.STRING, ('u8' + S('LuU'))^-1 * (sq_str + dq_str))) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) - --- Comments. -local line_comment = lexer.to_eol('//', true) -local block_comment = lexer.range('/*', '*/') -lex:add_rule('comment', lex:tag(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number_("'"))) - --- Preprocessor. -local include = lex:tag(lexer.PREPROCESSOR, '#' * S('\t ')^0 * 'include') * - (lex:get_rule('whitespace') * lex:tag(lexer.STRING, lexer.range('<', '>', true)))^-1 -local preproc = lex:tag(lexer.PREPROCESSOR, '#' * S('\t ')^0 * lex:word_match(lexer.PREPROCESSOR)) -lex:add_rule('preprocessor', include + preproc) - --- Attributes. -lex:add_rule('attribute', lex:tag(lexer.ATTRIBUTE, '[[' * lex:word_match(lexer.ATTRIBUTE) * ']]')) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;,.()[]{}'))) - --- Fold points. -lex:add_fold_point(lexer.PREPROCESSOR, 'if', 'endif') -lex:add_fold_point(lexer.PREPROCESSOR, 'ifdef', 'endif') -lex:add_fold_point(lexer.PREPROCESSOR, 'ifndef', 'endif') -lex:add_fold_point(lexer.OPERATOR, '{', '}') -lex:add_fold_point(lexer.COMMENT, '/*', '*/') - --- Word lists. -lex:set_word_list(lexer.KEYWORD, { - 'asm', 'auto', 'break', 'case', 'catch', 'class', 'const', 'const_cast', 'continue', 'default', - 'delete', 'do', 'dynamic_cast', 'else', 'explicit', 'export', 'extern', 'false', 'for', 'friend', - 'goto', 'if', 'inline', 'mutable', 'namespace', 'new', 'operator', 'private', 'protected', - 'public', 'register', 'reinterpret_cast', 'return', 'sizeof', 'static', 'static_cast', 'switch', - 'template', 'this', 'throw', 'true', 'try', 'typedef', 'typeid', 'typename', 'using', 'virtual', - 'volatile', 'while', - -- Operators. - 'and', 'and_eq', 'bitand', 'bitor', 'compl', 'not', 'not_eq', 'or', 'or_eq', 'xor', 'xor_eq', - -- C++11. - 'alignas', 'alignof', 'constexpr', 'decltype', 'final', 'noexcept', 'nullptr', 'override', - 'static_assert', 'thread_local', -- - 'consteval', 'constinit', 'co_await', 'co_return', 'co_yield', 'requires' -- C++20 -}) - -lex:set_word_list(lexer.TYPE, { - 'bool', 'char', 'double', 'enum', 'float', 'int', 'long', 'short', 'signed', 'struct', 'union', - 'unsigned', 'void', 'wchar_t', -- - 'char16_t', 'char32_t', -- C++11 - 'char8_t', -- C++20 - -- <cstddef> - 'size_t', 'ptrdiff_t', 'max_align_t', -- - 'byte', -- C++17 - -- <cstdint> - 'int8_t', 'int16_t', 'int32_t', 'int64_t', 'int_fast8_t', 'int_fast16_t', 'int_fast32_t', - 'int_fast64_t', 'int_least8_t', 'int_least16_t', 'int_least32_t', 'int_least64_t', 'intmax_t', - 'intptr_t', 'uint8_t', 'uint16_t', 'uint32_t', 'uint64_t', 'uint_fast8_t', 'uint_fast16_t', - 'uint_fast32_t', 'uint_fast64_t', 'uint_least8_t', 'uint_least16_t', 'uint_least32_t', - 'uint_least64_t', 'uintmax_t', 'uintptr_t' -}) - -lex:set_word_list(lexer.TYPE .. '.stl', { - 'any', 'bad_any_cast', -- <any> C++17 - 'array', -- <array> C++11 - 'atomic', -- <atomic> C++11 - 'barrier', -- <barrier> C++20 - 'bitset', -- <bitset> - -- <concepts> C++20 - 'same_as', 'derived_from', 'convertible_to', 'common_reference_with', 'common_with', 'integral', - 'signed_integral', 'unsigned_integral', 'floating_point', 'assignable_from', 'swappable', - 'swappable_with', 'destructible', 'constructible_from', 'default_initializable', - 'move_constructible', 'copy_constructible', 'equality_comparable', 'equality_comparable_with', - 'movable', 'copyable', 'semiregular', 'regular', 'invocable', 'regular_invocable', 'predicate', - 'relation', 'equivalence_relation', 'strict_weak_order', -- - 'complex', -- <complex> - 'deque', -- <deque> - 'exception', 'bad_exception', -- <exception> - 'forward_list', -- <forward_list> C++11 - 'function', 'hash', -- <functional> C++11 - -- <future> C++11 - 'promise', 'packaged_task', 'future', 'shared_future', 'launch', 'future_status', 'future_error', - 'future_errc', -- - 'initializer_list', -- <initializer_list> - 'istream', 'iostream', -- <istream> - -- <iterator> - 'reverse_iterator', 'back_insert_iterator', 'front_insert_iterator', 'insert_iterator', - 'istream_iterator', 'ostream_iterator', 'istreambuf_iterator', 'ostreambuf_iterator', -- - 'move_iterator', -- C++11 - 'latch', -- <latch> C++20 - 'list', -- <list> - -- <map> - 'map', 'multimap', -- - 'unordered_set', 'unordered_map', 'unordered_multiset', 'unordered_multimap', -- C++11 - 'unique_ptr', 'shared_ptr', 'weak_ptr', -- <memory> C++11 - -- <mutex> C++11 - 'mutex', 'timed_mutex', 'recursive_mutex', 'recursive_timed_mutex', 'lock_guard', 'unique_lock', -- - 'scoped_lock', -- C++17 - 'optional', 'bad_optional_access', -- <optional> C++17 - 'ostream', -- <ostream> - 'queue', 'priority_queue', -- <queue> - -- <random> C++11 - 'linear_congruential_engine', 'mersenne_twister_engine', 'subtract_with_carry_engine', - 'discard_block_engine', 'independent_bits_engine', 'shuffle_order_engine', 'random_device', - 'uniform_int_distribution', 'uniform_real_distribution', 'bernoulli_distribution', - 'binomial_distribution', 'negative_binomial_distribution', 'geometric_distribution', - 'poisson_distribution', 'exponential_distribution', 'gamma_distribution', 'weibull_distribution', - 'extreme_value_distribution', 'normal_distribution', 'lognormal_distribution', - 'chi_squared_distribution', 'cauchy_distribution', 'fisher_f_distribution', - 'student_t_distribution', 'discrete_distibution', 'piecewise_constant_distribution', - 'piecewise_linear_distribution', 'seed_seq', -- - 'ratio', -- <ratio> C++11 - -- <regex> C++11 - 'regex', 'csub_match', 'ssub_match', 'cmatch', 'smatch', 'cregex_iterator', 'sregex_iterator', - 'cregex_token_iterator', 'sregex_token_iterator', 'regex_error', 'regex_traits', -- - 'counting_semaphore', 'binary_semaphore', -- <semaphore> C++20 - 'set', 'multiset', -- <set> - 'span', -- <span> C++20 - 'stringbuf', 'istringstream', 'ostringstream', 'stringstream', -- <stringstream> - 'stack', -- <stack> - -- <stdexcept> - 'logic_error', 'invalid_argument', 'domain_error', 'length_error', 'out_of_range', - 'runtime_error', 'range_error', 'overflow_error', 'underflow_error', -- - 'streambuf', -- <streambuf> - -- <string> - 'string', -- - 'u16string', 'u32string', -- C++11 - 'u8string', -- C++20 - -- <string_view> C++17 - 'string_view', 'u16string_view', 'u32string_view', -- - 'u8string_view', -- C++20 - 'syncbuf', 'osyncstream', -- <syncstream> C++20 - 'thread', -- <thread> C++11 - 'tuple', 'tuple_size', 'tuple_element', -- <tuple> C++11 - 'pair', -- <utility> - 'variant', 'monostate', 'bad_variant_access', 'variant_size', 'variant_alternative', -- <variant> C++17 - 'vector' -- <vector> -}) - -lex:set_word_list(lexer.FUNCTION_BUILTIN, { - 'assert', -- <cassert> - -- <cctype> - 'isalnum', 'isalpha', 'islower', 'isupper', 'isdigit', 'isxdigit', 'iscntrl', 'isgraph', - 'isspace', 'isprint', 'ispunct', 'tolower', 'toupper', -- - 'isblank', -- C++11 - 'va_start', 'va_arg', 'va_end', -- <cstdarg> - -- <cmath> - 'abs', 'fmod', 'exp', 'log', 'log10', 'pow', 'sqrt', 'sin', 'cos', 'tan', 'asin', 'acos', 'atan', - 'atan2', 'sinh', 'cosh', 'tanh', 'ceil', 'floor', 'frexp', 'ldexp', 'modf', - -- C++11. - 'remainder', 'remquo', 'exp2', 'expm1', 'log2', 'log1p', 'cbrt', 'hypot', 'asinh', 'acosh', - 'atanh', 'erf', 'erfc', 'tgamma', 'lgamma', 'trunc', 'round', 'nearbyint', 'rint', 'scalbn', - 'ilogb', 'logb', 'nextafter', 'copysign', 'isfinite', 'isinf', 'isnan', 'isnormal', 'signbit', - 'isgreater', 'isgreaterequal', 'isless', 'islessequal', 'islessgreater', 'isunordered', -- - -- C++17. - 'assoc_laguerre', 'assoc_legendre', 'beta', 'comp_ellint_1', 'comp_ellint_2', 'comp_ellint_3', - 'cyl_bessel_i', 'cyl_bessel_j', 'cyl_bessel_k', 'cyl_neumann', 'ellint_1', 'ellint_2', 'ellint_3', - 'expint', 'lhermite', 'lgendre', 'laguerre', 'riemann_zeta', 'sph_bessel', 'sph_legendre', - 'sph_neumann', -- - 'lerp', -- C++20 - -- <cstring> - 'strcpy', 'strncpy', 'strcat', 'strncat', 'strxfrm', 'strlen', 'strcmp', 'strncmp', 'strcoll', - 'strchr', 'strrchr', 'strspn', 'strcspn', 'strpbrk', 'strstr', 'strtok', 'memchr', 'memcmp', - 'memset', 'memcpy', 'memmove', 'strerror' -}) - -lex:set_word_list(lexer.FUNCTION_BUILTIN .. '.stl', { - -- <algorithm> - 'for_each', 'count', 'count_if', 'mismatch', 'find', 'find_if', 'find_end', 'find_first_of', - 'adjacent_find', 'search', 'search_n', 'copy', 'copy_backward', 'fill', 'fill_n', 'transform', - 'generate', 'generate_n', 'remove', 'remove_if', 'remove_copy', 'remove_copy_if', 'replace', - 'replace_if', 'replace_copy', 'replace_copy_if', 'swap', 'swap_ranges', 'iter_swap', 'reverse', - 'reverse_copy', 'rotate', 'rotate_copy', 'unique_copy', 'partition', 'stable_partition', 'sort', - 'partial_sort', 'partial_sort_copy', 'stable_sort', 'nth_element', 'lower_bound', 'upper_bound', - 'binary_search', 'equal_range', 'merge', 'inplace_merge', 'includes', 'set_difference', - 'set_intersection', 'set_symmetric_difference', 'set_union', 'make_heap', 'push_heap', 'pop_heap', - 'sort_heap', 'max', 'max_element', 'min', 'min_element', 'equal', 'lexicographical_compare', - 'next_permutation', 'prev_permutation', -- - -- C++11. - 'all_of', 'any_of', 'none_of', 'find_if_not', 'copy_if', 'copy_n', 'move', 'move_backward', - 'shuffle', 'is_partitioned', 'partition_copy', 'partition_point', 'is_sorted', 'is_sorted_until', - 'is_heap', 'is_heap_until', 'minmax', 'minmax_element', 'is_permutation', -- - 'for_each_n', 'random_shuffle', 'sample', 'clamp', -- C++17 - 'shift_left', 'shift_right', 'lexicographical_compare_three_way', -- C++20 - 'make_any', 'any_cast', -- <any> C++17 - -- <bit> C++20 - 'bit_cast', 'byteswap', 'has_single_bit', 'bit_ceil', 'bit_floor', 'bit_width', 'rotl', 'rotr', - 'countl_zero', 'countl_one', 'countl_zero', 'countr_one', 'popcount', -- - 'from_chars', 'to_chars', -- <charconv> C++17 - -- <format> C++20 - 'format', 'format_to', 'format_to_n', 'formatted_size', 'vformat', 'vformat_to', - 'visit_format_arg', 'make_format_args', -- - 'async', 'future_category', -- <future> C++11 - -- <iterator> - 'front_inserter', 'back_inserter', 'inserter', -- - 'make_move_iterator', -- C++11 - 'make_reverse_iterator', -- C++14 - -- <memory> - 'make_shared', 'allocate_shared', 'static_pointer_cast', 'dynamic_pointer_cast', - 'const_pointer_cast', -- - 'make_unique', -- C++14 - 'reinterpret_pointer_cast', -- C++17 - 'try_lock', 'lock', 'call_once', -- <mutex> C++11 - -- <numeric> - 'accumulate', 'inner_product', 'adjacent_difference', 'partial_sum', -- - 'iota', -- C++11 - 'reduce', 'transform_reduce', 'inclusive_scan', 'exclusive_scan', 'gcd', 'lcm', -- C++17 - 'midpoint', -- C++20 - 'make_optional', -- <optional> C++17 - 'generate_canonical', -- <random> C++11 - 'regex_match', 'regex_search', 'regex_replace', -- <regex> C++11 - 'as_bytes', 'as_writable_bytes', -- <span> C++20 - -- <tuple> C++11 - 'make_tuple', 'tie', 'forward_as_tuple', 'tuple_cat', -- - 'apply', 'make_from_tuple', -- C++17 - -- <utility> - 'swap', 'make_pair', 'get', -- - 'forward', 'move', 'move_if_noexcept', 'declval', -- C++11 - 'exchange', -- C++14 - 'as_const', -- C++17 - -- C++20. - 'cmp_equal', 'cmp_not_equal', 'cmp_less', 'cmp_greater', 'cmp_less_equal', 'cmp_greater_equal', - 'in_range', -- - 'visit', 'holds_alternative', 'get_if' -- <variant> C++17 -}) - -lex:set_word_list(lexer.CONSTANT_BUILTIN .. '.stl', { - 'cin', 'cout', 'cerr', 'clog', -- <iostream> - 'endl', 'ends', 'flush', -- <ostream> - 'nullopt' -- <optional> C++17 -}) - -lex:set_word_list(lexer.PREPROCESSOR, { - 'define', 'defined', 'elif', 'else', 'endif', 'error', 'if', 'ifdef', 'ifndef', 'import', 'line', - 'pragma', 'undef', 'using', 'warning', -- - 'export', 'include', 'module' -- C++20 -}) - -lex:set_word_list(lexer.ATTRIBUTE, { - 'carries_dependency', 'noreturn', -- C++11 - 'deprecated', -- C++14 - 'fallthrough', 'maybe_unused', 'nodiscard', -- C++17 - 'likely', 'no_unique_address', 'unlikely' -- C++20 -}) - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/crystal.lua b/share/vis/lexers/crystal.lua @@ -1,98 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Copyright 2017 Michel Martens. --- Crystal LPeg lexer (based on Ruby). - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('crystal') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'alias', 'begin', 'break', 'case', 'class', 'def', 'defined?', 'do', 'else', 'elsif', 'end', - 'ensure', 'false', 'for', 'if', 'in', 'module', 'next', 'nil', 'not', 'redo', 'rescue', 'retry', - 'return', 'self', 'super', 'then', 'true', 'undef', 'unless', 'until', 'when', 'while', 'yield', - '__FILE__', '__LINE__' -})) - --- Functions. -lex:add_rule('function', token(lexer.FUNCTION, word_match{ - 'abort', 'at_exit', 'caller', 'delay', 'exit', 'fork', 'future', 'get_stack_top', 'gets', 'lazy', - 'loop', 'main', 'p', 'print', 'printf', 'puts', 'raise', 'rand', 'read_line', 'require', 'sleep', - 'spawn', 'sprintf', 'system', 'with_color', - -- Macros. - 'assert_responds_to', 'debugger', 'parallel', 'pp', 'record', 'redefine_main' -}) * -S('.:|')) - --- Identifiers. -local word_char = lexer.alnum + S('_!?') -local word = (lexer.alpha + '_') * word_char^0 -lex:add_rule('identifier', token(lexer.IDENTIFIER, word)) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#', true))) - --- Strings. -local cmd_str = lexer.range('`') -local sq_str = lexer.range("'") -local dq_str = lexer.range('"') -local heredoc = '<<' * P(function(input, index) - local _, e, indented, _, delimiter = input:find('^(%-?)(["`]?)([%a_][%w_]*)%2[\n\r\f;]+', index) - if not delimiter then return end - local end_heredoc = (#indented > 0 and '[\n\r\f]+ *' or '[\n\r\f]+') - _, e = input:find(end_heredoc .. delimiter, e) - return e and e + 1 or #input + 1 -end) -local string = token(lexer.STRING, (sq_str + dq_str + heredoc + cmd_str) * S('f')^-1) --- TODO: regex_str fails with `obj.method /patt/` syntax. -local regex_str = lexer.after_set('!%^&*([{-=+|:;,?<>~', lexer.range('/', true) * S('iomx')^0) -local regex = token(lexer.REGEX, regex_str) -lex:add_rule('string', string + regex) - --- Numbers. -local numeric_literal = '?' * (lexer.any - lexer.space) * -word_char -- TODO: meta, control, etc. -lex:add_rule('number', token(lexer.NUMBER, lexer.number_('_') * S('ri')^-1 + numeric_literal)) - --- Variables. -local global_var = '$' * - (word + S('!@L+`\'=~/\\,.;<>_*"$?:') + lexer.digit + '-' * S('0FadiIKlpvw')) -local class_var = '@@' * word -local inst_var = '@' * word -lex:add_rule('variable', token(lexer.VARIABLE, global_var + class_var + inst_var)) - --- Symbols. -lex:add_rule('symbol', token('symbol', ':' * P(function(input, index) - if input:sub(index - 2, index - 2) ~= ':' then return true end -end) * (word_char^1 + sq_str + dq_str))) -lex:add_style('symbol', lexer.styles.constant) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('!%^&*()[]{}-=+/|:;.,?<>~'))) - --- Fold points. -local function disambiguate(text, pos, line, s) - return line:sub(1, s - 1):match('^%s*$') and not text:sub(1, pos - 1):match('\\[ \t]*\r?\n$') and - 1 or 0 -end -lex:add_fold_point(lexer.KEYWORD, 'begin', 'end') -lex:add_fold_point(lexer.KEYWORD, 'case', 'end') -lex:add_fold_point(lexer.KEYWORD, 'class', 'end') -lex:add_fold_point(lexer.KEYWORD, 'def', 'end') -lex:add_fold_point(lexer.KEYWORD, 'do', 'end') -lex:add_fold_point(lexer.KEYWORD, 'for', 'end') -lex:add_fold_point(lexer.KEYWORD, 'module', 'end') -lex:add_fold_point(lexer.KEYWORD, 'if', disambiguate) -lex:add_fold_point(lexer.KEYWORD, 'while', disambiguate) -lex:add_fold_point(lexer.KEYWORD, 'unless', disambiguate) -lex:add_fold_point(lexer.KEYWORD, 'until', disambiguate) -lex:add_fold_point(lexer.OPERATOR, '(', ')') -lex:add_fold_point(lexer.OPERATOR, '[', ']') -lex:add_fold_point(lexer.OPERATOR, '{', '}') - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/csharp.lua b/share/vis/lexers/csharp.lua @@ -1,65 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- C# LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('csharp') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'class', 'delegate', 'enum', 'event', 'interface', 'namespace', 'struct', 'using', 'abstract', - 'const', 'explicit', 'extern', 'fixed', 'implicit', 'internal', 'lock', 'out', 'override', - 'params', 'partial', 'private', 'protected', 'public', 'ref', 'sealed', 'static', 'readonly', - 'unsafe', 'virtual', 'volatile', 'add', 'as', 'assembly', 'base', 'break', 'case', 'catch', - 'checked', 'continue', 'default', 'do', 'else', 'finally', 'for', 'foreach', 'get', 'goto', 'if', - 'in', 'is', 'new', 'remove', 'return', 'set', 'sizeof', 'stackalloc', 'super', 'switch', 'this', - 'throw', 'try', 'typeof', 'unchecked', 'value', 'var', 'void', 'while', 'yield', 'null', 'true', - 'false' -})) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match{ - 'bool', 'byte', 'char', 'decimal', 'double', 'float', 'int', 'long', 'object', 'operator', - 'sbyte', 'short', 'string', 'uint', 'ulong', 'ushort' -})) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Comments. -local line_comment = lexer.to_eol('//', true) -local block_comment = lexer.range('/*', '*/') -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"', true) -local ml_str = P('@')^-1 * lexer.range('"', false, false) -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str + ml_str)) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number * S('lLdDfFmM')^-1)) - --- Preprocessor. -lex:add_rule('preprocessor', token(lexer.PREPROCESSOR, '#' * S('\t ')^0 * - word_match('define elif else endif error if line undef warning region endregion'))) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('~!.,:;+-*/<>=\\^|&%?()[]{}'))) - --- Fold points. -lex:add_fold_point(lexer.PREPROCESSOR, 'if', 'endif') -lex:add_fold_point(lexer.PREPROCESSOR, 'ifdef', 'endif') -lex:add_fold_point(lexer.PREPROCESSOR, 'ifndef', 'endif') -lex:add_fold_point(lexer.PREPROCESSOR, 'region', 'endregion') -lex:add_fold_point(lexer.OPERATOR, '{', '}') -lex:add_fold_point(lexer.COMMENT, '/*', '*/') - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/css.lua b/share/vis/lexers/css.lua @@ -1,201 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- CSS LPeg lexer. - -local lexer = lexer -local P, S, B = lpeg.P, lpeg.S, lpeg.B - -local lex = lexer.new(..., {no_user_word_lists = true}) - --- Tags. -local sel_prefix = B(S('#.')) -local sel_suffix = lexer.space^0 * S('!>+.,:[{') -lex:add_rule('tag', -sel_prefix * lex:tag(lexer.TAG, lex:word_match(lexer.TAG) * #sel_suffix)) - --- Properties. -lex:add_rule('property', lex:tag('property', lex:word_match('property')) * #P(':')) - --- Values. -lex:add_rule('value', - -sel_prefix * lex:tag(lexer.CONSTANT_BUILTIN, lex:word_match('value')) * -sel_suffix) - --- Functions. -lex:add_rule('function', lex:tag(lexer.FUNCTION_BUILTIN, lex:word_match(lexer.FUNCTION_BUILTIN))) - --- Colors. -local color_name = lex:tag(lexer.CONSTANT_BUILTIN, lex:word_match('color')) -local xdigit = lexer.xdigit -local color_value = lex:tag(lexer.NUMBER, - '#' * xdigit * xdigit * xdigit * (xdigit * xdigit * xdigit)^-1) -lex:add_rule('color', color_name + color_value) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.alpha * (lexer.alnum + S('_-'))^0)) - --- Pseudo classes and pseudo elements. -lex:add_rule('pseudoclass', ':' * lex:tag('pseudoclass', lex:word_match('pseudoclass'))) -lex:add_rule('pseudoelement', '::' * lex:tag('pseudoelement', lex:word_match('pseudoelement'))) - --- Strings. -local sq_str = lexer.range("'") -local dq_str = lexer.range('"') -lex:add_rule('string', lex:tag(lexer.STRING, sq_str + dq_str)) - --- Comments. -lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.range('/*', '*/'))) - --- Numbers. -local unit = lex:word_match('unit') + '%' -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number * unit^-1)) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('~!#*>+=|.,:;()[]{}'))) - --- At rule. -lex:add_rule('at_rule', lex:tag(lexer.PREPROCESSOR, '@' * lex:word_match('at_rule'))) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '{', '}') -lex:add_fold_point(lexer.COMMENT, '/*', '*/') - --- Word lists. -lex:set_word_list(lexer.TAG, { - 'a', 'abbr', 'address', 'article', 'aside', 'audio', 'b', 'bdi', 'bdo', 'blockquote', 'body', - 'button', 'canvas', 'caption', 'cite', 'code', 'colgroup', 'content', 'data', 'datalist', 'dd', - 'decorator', 'del', 'details', 'dfn', 'div', 'dl', 'dt', 'element', 'em', 'fieldset', - 'figcaption', 'figure', 'footer', 'form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'head', 'header', - 'html', 'i', 'iframe', 'ins', 'kbd', 'label', 'legend', 'li', 'main', 'map', 'mark', 'menu', - 'menuitem', 'meter', 'nav', 'noscript', 'object', 'ol', 'optgroup', 'option', 'output', 'p', - 'pre', 'progress', 'q', 'rp', 'rt', 'ruby', 's', 'samp', 'script', 'section', 'select', 'shadow', - 'small', 'spacer', 'span', 'strong', 'style', 'sub', 'summary', 'sup', 'table', 'tbody', 'td', - 'template', 'textarea', 'tfoot', 'th', 'thead', 'time', 'title', 'tr', 'u', 'ul', 'var', 'video', -- - 'area', 'base', 'br', 'col', 'command', 'embed', 'hr', 'img', 'input', 'keygen', 'link', 'meta', - 'param', 'source', 'track', 'wbr' -}) - -lex:set_word_list('property', { - -- CSS 1. - 'color', 'background-color', 'background-image', 'background-repeat', 'background-attachment', - 'background-position', 'background', 'font-family', 'font-style', 'font-variant', 'font-weight', - 'font-size', 'font', 'word-spacing', 'letter-spacing', 'text-decoration', 'vertical-align', - 'text-transform', 'text-align', 'text-indent', 'line-height', 'margin-top', 'margin-right', - 'margin-bottom', 'margin-left', 'margin', 'padding-top', 'padding-right', 'padding-bottom', - 'padding-left', 'padding', 'border-top-width', 'border-right-width', 'border-bottom-width', - 'border-left-width', 'border-width', 'border-top', 'border-right', 'border-bottom', 'border-left', - 'border', 'border-color', 'border-style', 'width', 'height', 'float', 'clear', 'display', - 'white-space', 'list-style-type', 'list-style-image', 'list-style-position', 'list-style', - -- CSS 2. - 'border-top-color', 'border-right-color', 'border-bottom-color', 'border-left-color', - 'border-color', 'border-top-style', 'border-right-style', 'border-bottom-style', - 'border-left-style', 'border-style', 'top', 'right', 'bottom', 'left', 'position', 'z-index', - 'direction', 'unicode-bidi', 'min-width', 'max-width', 'min-height', 'max-height', 'overflow', - 'clip', 'visibility', 'content', 'quotes', 'counter-reset', 'counter-increment', 'marker-offset', - 'size', 'marks', 'page-break-before', 'page-break-after', 'page-break-inside', 'page', 'orphans', - 'widows', 'font-stretch', 'font-size-adjust', 'unicode-range', 'units-per-em', 'src', 'panose-1', - 'stemv', 'stemh', 'slope', 'cap-height', 'x-height', 'ascent', 'descent', 'widths', 'bbox', - 'definition-src', 'baseline', 'centerline', 'mathline', 'topline', 'text-shadow', 'caption-side', - 'table-layout', 'border-collapse', 'border-spacing', 'empty-cells', 'speak-header', 'cursor', - 'outline', 'outline-width', 'outline-style', 'outline-color', 'volume', 'speak', 'pause-before', - 'pause-after', 'pause', 'cue-before', 'cue-after', 'cue', 'play-during', 'azimuth', 'elevation', - 'speech-rate', 'voice-family', 'pitch', 'pitch-range', 'stress', 'richness', 'speak-punctuation', - 'speak-numeral', - -- CSS 3. - 'flex', 'flex-basis', 'flex-direction', 'flex-flow', 'flex-grow', 'flex-shrink', 'flex-wrap', - 'align-content', 'align-items', 'align-self', 'justify-content', 'order', 'border-radius', - 'transition', 'transform', 'box-shadow', 'filter', 'opacity', 'resize', 'word-break', 'word-wrap', - 'box-sizing', 'animation', 'text-overflow' -}) - -lex:set_word_list('value', { - -- CSS 1. - 'auto', 'none', 'normal', 'italic', 'oblique', 'small-caps', 'bold', 'bolder', 'lighter', - 'xx-small', 'x-small', 'small', 'medium', 'large', 'x-large', 'xx-large', 'larger', 'smaller', - 'transparent', 'repeat', 'repeat-x', 'repeat-y', 'no-repeat', 'scroll', 'fixed', 'top', 'bottom', - 'left', 'center', 'right', 'justify', 'both', 'underline', 'overline', 'line-through', 'blink', - 'baseline', 'sub', 'super', 'text-top', 'middle', 'text-bottom', 'capitalize', 'uppercase', - 'lowercase', 'thin', 'medium', 'thick', 'dotted', 'dashed', 'solid', 'double', 'groove', 'ridge', - 'inset', 'outset', 'block', 'inline', 'list-item', 'pre', 'no-wrap', 'inside', 'outside', 'disc', - 'circle', 'square', 'decimal', 'lower-roman', 'upper-roman', 'lower-alpha', 'upper-alpha', 'aqua', - 'black', 'blue', 'fuchsia', 'gray', 'green', 'lime', 'maroon', 'navy', 'olive', 'purple', 'red', - 'silver', 'teal', 'white', 'yellow', - -- CSS 2. - 'inherit', 'run-in', 'compact', 'marker', 'table', 'inline-table', 'table-row-group', - 'table-header-group', 'table-footer-group', 'table-row', 'table-column-group', 'table-column', - 'table-cell', 'table-caption', 'static', 'relative', 'absolute', 'fixed', 'ltr', 'rtl', 'embed', - 'bidi-override', 'visible', 'hidden', 'scroll', 'collapse', 'open-quote', 'close-quote', - 'no-open-quote', 'no-close-quote', 'decimal-leading-zero', 'lower-greek', 'lower-latin', - 'upper-latin', 'hebrew', 'armenian', 'georgian', 'cjk-ideographic', 'hiragana', 'katakana', - 'hiragana-iroha', 'katakana-iroha', 'landscape', 'portrait', 'crop', 'cross', 'always', 'avoid', - 'wider', 'narrower', 'ultra-condensed', 'extra-condensed', 'condensed', 'semi-condensed', - 'semi-expanded', 'expanded', 'extra-expanded', 'ultra-expanded', 'caption', 'icon', 'menu', - 'message-box', 'small-caption', 'status-bar', 'separate', 'show', 'hide', 'once', 'crosshair', - 'default', 'pointer', 'move', 'text', 'wait', 'help', 'e-resize', 'ne-resize', 'nw-resize', - 'n-resize', 'se-resize', 'sw-resize', 's-resize', 'w-resize', 'ActiveBorder', 'ActiveCaption', - 'AppWorkspace', 'Background', 'ButtonFace', 'ButtonHighlight', 'ButtonShadow', - 'InactiveCaptionText', 'ButtonText', 'CaptionText', 'GrayText', 'Highlight', 'HighlightText', - 'InactiveBorder', 'InactiveCaption', 'InfoBackground', 'InfoText', 'Menu', 'MenuText', - 'Scrollbar', 'ThreeDDarkShadow', 'ThreeDFace', 'ThreeDHighlight', 'ThreeDLightShadow', - 'ThreeDShadow', 'Window', 'WindowFrame', 'WindowText', 'silent', 'x-soft', 'soft', 'medium', - 'loud', 'x-loud', 'spell-out', 'mix', 'left-side', 'far-left', 'center-left', 'center-right', - 'far-right', 'right-side', 'behind', 'leftwards', 'rightwards', 'below', 'level', 'above', - 'higher', 'lower', 'x-slow', 'slow', 'medium', 'fast', 'x-fast', 'faster', 'slower', 'male', - 'female', 'child', 'x-low', 'low', 'high', 'x-high', 'code', 'digits', 'continous', - -- CSS 3. - 'flex', 'row', 'column', 'ellipsis', 'inline-block' -}) - -lex:set_word_list(lexer.FUNCTION_BUILTIN, { - 'attr', 'blackness', 'blend', 'blenda', 'blur', 'brightness', 'calc', 'circle', 'color-mod', - 'contrast', 'counter', 'cubic-bezier', 'device-cmyk', 'drop-shadow', 'ellipse', 'gray', - 'grayscale', 'hsl', 'hsla', 'hue', 'hue-rotate', 'hwb', 'image', 'inset', 'invert', 'lightness', - 'linear-gradient', 'matrix', 'matrix3d', 'opacity', 'perspective', 'polygon', 'radial-gradient', - 'rect', 'repeating-linear-gradient', 'repeating-radial-gradient', 'rgb', 'rgba', 'rotate', - 'rotate3d', 'rotateX', 'rotateY', 'rotateZ', 'saturate', 'saturation', 'scale', 'scale3d', - 'scaleX', 'scaleY', 'scaleZ', 'sepia', 'shade', 'skewX', 'skewY', 'steps', 'tint', 'toggle', - 'translate', 'translate3d', 'translateX', 'translateY', 'translateZ', 'url', 'whiteness', 'var' -}) - -lex:set_word_list('color', { - 'aliceblue', 'antiquewhite', 'aqua', 'aquamarine', 'azure', 'beige', 'bisque', 'black', - 'blanchedalmond', 'blue', 'blueviolet', 'brown', 'burlywood', 'cadetblue', 'chartreuse', - 'chocolate', 'coral', 'cornflowerblue', 'cornsilk', 'crimson', 'cyan', 'darkblue', 'darkcyan', - 'darkgoldenrod', 'darkgray', 'darkgreen', 'darkgrey', 'darkkhaki', 'darkmagenta', - 'darkolivegreen', 'darkorange', 'darkorchid', 'darkred', 'darksalmon', 'darkseagreen', - 'darkslateblue', 'darkslategray', 'darkslategrey', 'darkturquoise', 'darkviolet', 'deeppink', - 'deepskyblue', 'dimgray', 'dimgrey', 'dodgerblue', 'firebrick', 'floralwhite', 'forestgreen', - 'fuchsia', 'gainsboro', 'ghostwhite', 'gold', 'goldenrod', 'gray', 'green', 'greenyellow', 'grey', - 'honeydew', 'hotpink', 'indianred', 'indigo', 'ivory', 'khaki', 'lavender', 'lavenderblush', - 'lawngreen', 'lemonchiffon', 'lightblue', 'lightcoral', 'lightcyan', 'lightgoldenrodyellow', - 'lightgray', 'lightgreen', 'lightgrey', 'lightpink', 'lightsalmon', 'lightseagreen', - 'lightskyblue', 'lightslategray', 'lightslategrey', 'lightsteelblue', 'lightyellow', 'lime', - 'limegreen', 'linen', 'magenta', 'maroon', 'mediumaquamarine', 'mediumblue', 'mediumorchid', - 'mediumpurple', 'mediumseagreen', 'mediumslateblue', 'mediumspringgreen', 'mediumturquoise', - 'mediumvioletred', 'midnightblue', 'mintcream', 'mistyrose', 'moccasin', 'navajowhite', 'navy', - 'oldlace', 'olive', 'olivedrab', 'orange', 'orangered', 'orchid', 'palegoldenrod', 'palegreen', - 'paleturquoise', 'palevioletred', 'papayawhip', 'peachpuff', 'peru', 'pink', 'plum', 'powderblue', - 'purple', 'rebeccapurple', 'red', 'rosybrown', 'royalblue', 'saddlebrown', 'salmon', 'sandybrown', - 'seagreen', 'seashell', 'sienna', 'silver', 'skyblue', 'slateblue', 'slategray', 'slategrey', - 'snow', 'springgreen', 'steelblue', 'tan', 'teal', 'thistle', 'tomato', 'transparent', - 'turquoise', 'violet', 'wheat', 'white', 'whitesmoke', 'yellow', 'yellowgreen' -}) - -lex:set_word_list('pseudoclass', { - 'active', 'checked', 'disabled', 'empty', 'enabled', 'first-child', 'first-of-type', 'focus', - 'hover', 'in-range', 'invalid', 'lang', 'last-child', 'last-of-type', 'link', 'not', 'nth-child', - 'nth-last-child', 'nth-last-of-type', 'nth-of-type', 'only-of-type', 'only-child', 'optional', - 'out-of-range', 'read-only', 'read-write', 'required', 'root', 'target', 'valid', 'visited' -}) - -lex:set_word_list('pseudoelement', 'after before first-letter first-line selection') - -lex:set_word_list('unit', { - 'ch', 'cm', 'deg', 'dpcm', 'dpi', 'dppx', 'em', 'ex', 'grad', 'Hz', 'in', 'kHz', 'mm', 'ms', 'pc', - 'pt', 'px', 'q', 'rad', 'rem', 's', 'turn', 'vh', 'vmax', 'vmin', 'vw' -}) - -lex:set_word_list('at_rule', 'charset font-face media page import namespace keyframes') - -lexer.property['scintillua.comment'] = '/*|*/' -lexer.property['scintillua.word.chars'] = - 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-' - -return lex diff --git a/share/vis/lexers/cuda.lua b/share/vis/lexers/cuda.lua @@ -1,62 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- CUDA LPeg lexer. - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(..., {inherit = lexer.load('cpp')}) - --- Word lists. -lex:set_word_list(lexer.KEYWORD, '__global__ __host__ __device__ __constant__ __shared__', true) - -lex:set_word_list(lexer.TYPE, { - 'uint', 'int1', 'uint1', 'int2', 'uint2', 'int3', 'uint3', 'int4', 'uint4', 'float1', 'float2', - 'float3', 'float4', 'char1', 'char2', 'char3', 'char4', 'uchar1', 'uchar2', 'uchar3', 'uchar4', - 'short1', 'short2', 'short3', 'short4', 'dim1', 'dim2', 'dim3', 'dim4' -}, true) - -lex:set_word_list(lexer.FUNCTION_BUILTIN, { - -- Atom. - 'atomicAdd', 'atomicAnd', 'atomicCAS', 'atomicDec', 'atomicExch', 'atomicInc', 'atomicMax', - 'atomicMin', 'atomicOr', 'atomicSub', 'atomicXor', -- - -- Dev. - 'tex1D', 'tex1Dfetch', 'tex2D', '__float_as_int', '__int_as_float', '__float2int_rn', - '__float2int_rz', '__float2int_ru', '__float2int_rd', '__float2uint_rn', '__float2uint_rz', - '__float2uint_ru', '__float2uint_rd', '__int2float_rn', '__int2float_rz', '__int2float_ru', - '__int2float_rd', '__uint2float_rn', '__uint2float_rz', '__uint2float_ru', '__uint2float_rd', - '__fadd_rz', '__fmul_rz', '__fdividef', '__mul24', '__umul24', '__mulhi', '__umulhi', '__mul64hi', - '__umul64hi', 'min', 'umin', 'fminf', 'fmin', 'max', 'umax', 'fmaxf', 'fmax', 'abs', 'fabsf', - 'fabs', 'sqrtf', 'sqrt', 'sinf', '__sinf', 'sin', 'cosf', '__cosf', 'cos', 'sincosf', '__sincosf', - 'expf', '__expf', 'exp', 'logf', '__logf', 'log', -- - -- Runtime. - 'cudaBindTexture', 'cudaBindTextureToArray', 'cudaChooseDevice', 'cudaConfigureCall', - 'cudaCreateChannelDesc', 'cudaD3D10GetDevice', 'cudaD3D10MapResources', - 'cudaD3D10RegisterResource', 'cudaD3D10ResourceGetMappedArray', 'cudaD3D10ResourceGetMappedPitch', - 'cudaD3D10ResourceGetMappedPointer', 'cudaD3D10ResourceGetMappedSize', - 'cudaD3D10ResourceGetSurfaceDimensions', 'cudaD3D10ResourceSetMapFlags', - 'cudaD3D10SetDirect3DDevice', 'cudaD3D10UnmapResources', 'cudaD3D10UnregisterResource', - 'cudaD3D9GetDevice', 'cudaD3D9GetDirect3DDevice', 'cudaD3D9MapResources', - 'cudaD3D9RegisterResource', 'cudaD3D9ResourceGetMappedArray', 'cudaD3D9ResourceGetMappedPitch', - 'cudaD3D9ResourceGetMappedPointer', 'cudaD3D9ResourceGetMappedSize', - 'cudaD3D9ResourceGetSurfaceDimensions', 'cudaD3D9ResourceSetMapFlags', - 'cudaD3D9SetDirect3DDevice', 'cudaD3D9UnmapResources', 'cudaD3D9UnregisterResource', - 'cudaEventCreate', 'cudaEventDestroy', 'cudaEventElapsedTime', 'cudaEventQuery', - 'cudaEventRecord', 'cudaEventSynchronize', 'cudaFree', 'cudaFreeArray', 'cudaFreeHost', - 'cudaGetChannelDesc', 'cudaGetDevice', 'cudaGetDeviceCount', 'cudaGetDeviceProperties', - 'cudaGetErrorString', 'cudaGetLastError', 'cudaGetSymbolAddress', 'cudaGetSymbolSize', - 'cudaGetTextureAlignmentOffset', 'cudaGetTextureReference', 'cudaGLMapBufferObject', - 'cudaGLRegisterBufferObject', 'cudaGLSetGLDevice', 'cudaGLUnmapBufferObject', - 'cudaGLUnregisterBufferObject', 'cudaLaunch', 'cudaMalloc', 'cudaMalloc3D', 'cudaMalloc3DArray', - 'cudaMallocArray', 'cudaMallocHost', 'cudaMallocPitch', 'cudaMemcpy', 'cudaMemcpy2D', - 'cudaMemcpy2DArrayToArray', 'cudaMemcpy2DFromArray', 'cudaMemcpy2DToArray', 'cudaMemcpy3D', - 'cudaMemcpyArrayToArray', 'cudaMemcpyFromArray', 'cudaMemcpyFromSymbol', 'cudaMemcpyToArray', - 'cudaMemcpyToSymbol', 'cudaMemset', 'cudaMemset2D', 'cudaMemset3D', 'cudaSetDevice', - 'cudaSetupArgument', 'cudaStreamCreate', 'cudaStreamDestroy', 'cudaStreamQuery', - 'cudaStreamSynchronize', 'cudaThreadExit', 'cudaThreadSynchronize', 'cudaUnbindTexture' -}, true) - -lex:set_word_list(lexer.CONSTANT_BUILTIN, 'gridDim blockIdx blockDim threadIdx', true) - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/dart.lua b/share/vis/lexers/dart.lua @@ -1,56 +0,0 @@ --- Copyright 2013-2024 Mitchell. See LICENSE. --- Dart LPeg lexer. --- Written by Brian Schott (@Hackerpilot on Github). - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('dart') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'assert', 'break', 'case', 'catch', 'class', 'const', 'continue', 'default', 'do', 'else', 'enum', - 'extends', 'false', 'final', 'finally', 'for', 'if', 'in', 'is', 'new', 'null', 'rethrow', - 'return', 'super', 'switch', 'this', 'throw', 'true', 'try', 'var', 'void', 'while', 'with' -})) - --- Built-ins. -lex:add_rule('builtin', token(lexer.CONSTANT, word_match{ - 'abstract', 'as', 'dynamic', 'export', 'external', 'factory', 'get', 'implements', 'import', - 'library', 'operator', 'part', 'set', 'static', 'typedef' -})) - --- Strings. -local sq_str = S('r')^-1 * lexer.range("'", true) -local dq_str = S('r')^-1 * lexer.range('"', true) -local tq_str = S('r')^-1 * (lexer.range("'''") + lexer.range('"""')) -lex:add_rule('string', token(lexer.STRING, tq_str + sq_str + dq_str)) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Comments. -local line_comment = lexer.to_eol('//', true) -local block_comment = lexer.range('/*', '*/', false, false, true) -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('#?=!<>+-*$/%&|^~.,;()[]{}'))) - --- Annotations. -lex:add_rule('annotation', token(lexer.ANNOTATION, '@' * lexer.word^1)) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '{', '}') -lex:add_fold_point(lexer.COMMENT, '/*', '*/') - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/desktop.lua b/share/vis/lexers/desktop.lua @@ -1,49 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Desktop Entry LPeg lexer. - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(...) - --- Keys. -lex:add_rule('key', lex:tag(lexer.VARIABLE_BUILTIN, lex:word_match(lexer.VARIABLE_BUILTIN))) - --- Values. -lex:add_rule('value', lex:tag(lexer.CONSTANT_BUILTIN, lexer.word_match('true false'))) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.alpha * (lexer.alnum + S('_-'))^0)) - --- Group headers. -local bracketed = lexer.range('[', ']') -lex:add_rule('header', lexer.starts_line(lex:tag(lexer.HEADING, bracketed))) - --- Locales. -lex:add_rule('locale', lex:tag(lexer.TYPE, bracketed)) - --- Strings. -lex:add_rule('string', lex:tag(lexer.STRING, lexer.range('"'))) - --- Comments. -lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.to_eol('#'))) - --- Numbers. -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number)) - --- Field codes. -lex:add_rule('code', lex:tag(lexer.CONSTANT_BUILTIN, '%' * S('fFuUdDnNickvm'))) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('='))) - --- Word lists. -lex:set_word_list(lexer.VARIABLE_BUILTIN, { - 'Type', 'Version', 'Name', 'GenericName', 'NoDisplay', 'Comment', 'Icon', 'Hidden', 'OnlyShowIn', - 'NotShowIn', 'TryExec', 'Exec', 'Exec', 'Path', 'Terminal', 'MimeType', 'Categories', - 'StartupNotify', 'StartupWMClass', 'URL' -}) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/diff.lua b/share/vis/lexers/diff.lua @@ -1,25 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Diff LPeg lexer. - -local lexer = lexer -local to_eol, starts_line = lexer.to_eol, lexer.starts_line -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(..., {lex_by_line = true}) - --- Text, file headers, and separators. -lex:add_rule('index', lex:tag(lexer.COMMENT, to_eol(starts_line('Index: ')))) -lex:add_rule('header', lex:tag(lexer.HEADING, to_eol(starts_line(P('*** ') + '--- ' + '+++ ')))) -lex:add_rule('separator', lex:tag(lexer.COMMENT, to_eol(starts_line(P('---') + '****' + '=')))) - --- Location. -lex:add_rule('location', lex:tag(lexer.NUMBER, to_eol(starts_line('@@' + lexer.dec_num + '****')))) - --- Additions, deletions, and changes. -lex:add_rule('addition', lex:tag('addition', to_eol(starts_line(S('>+'))))) -lex:add_rule('deletion', lex:tag('deletion', to_eol(starts_line(S('<-'))))) -lex:add_rule('change', lex:tag('change', to_eol(starts_line('!')))) - -lex:add_rule('any_line', lex:tag(lexer.DEFAULT, lexer.to_eol())) - -return lex diff --git a/share/vis/lexers/django.lua b/share/vis/lexers/django.lua @@ -1,63 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Django LPeg lexer. - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(...) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD))) - --- Functions. -lex:add_rule('function', - lpeg.B('|') * lex:tag(lexer.FUNCTION_BUILTIN, lex:word_match(lexer.FUNCTION_BUILTIN))) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) - --- Strings. -lex:add_rule('string', lex:tag(lexer.STRING, lexer.range('"', false, false))) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S(':,.|'))) - --- Embed Django in HTML. -local html = lexer.load('html') -html:add_rule('django_comment', lex:tag(lexer.COMMENT, lexer.range('{#', '#}', true))) -local django_start_rule = lex:tag(lexer.PREPROCESSOR, '{' * S('{%')) -local django_end_rule = lex:tag(lexer.PREPROCESSOR, S('%}') * '}') -html:embed(lex, django_start_rule, django_end_rule) - --- Fold points. -lex:add_fold_point(lexer.PREPROCESSOR, '{{', '}}') -lex:add_fold_point(lexer.PREPROCESSOR, '{%', '%}') - --- Word lists. -lex:set_word_list(lexer.KEYWORD, { - 'autoescape', 'endautoescape', 'block', 'endblock', 'comment', 'endcomment', 'csrf_token', - 'cycle', 'as', 'debug', 'extends', 'filter', 'endfilter', 'firstof', 'for', 'in', 'endfor', - 'empty', 'if', 'elif', 'else', 'endif', 'and', 'or', 'not', 'is', 'ifchanged', 'endifchanged', - 'include', 'load', 'lorem', 'now', 'regroup', 'resetcycle', 'spaceless', 'endspaceless', - 'templatetag', 'url', 'verbatim', 'endverbatim', 'widthratio', 'with', 'endwith', -- - 'blocktranslate', 'endblocktranslate', 'translate', 'language', 'get_available_languages', - 'get_current_language', 'get_current_language_bidi', 'get_language_info', - 'get_language_info_list', -- - 'get_static_prefix', 'get_media_prefix' -}) - -lex:set_word_list(lexer.FUNCTION_BUILTIN, { - 'add', 'addslashes', 'capfirst', 'center', 'cut', 'date', 'default', 'default_if_none', - 'dictsort', 'dictsortreversed', 'divisibleby', 'escape', 'escapejs', 'filesizeformat', 'first', - 'floatformat', 'force_escape', 'get_digit', 'iriencode', 'join', 'json_script', 'last', 'length', - 'length_is', 'linebreaks', 'linebreaksbr', 'linenumbers', 'ljust', 'lower', 'make_list', - 'phone2numeric', 'pluralize', 'pprint', 'random', 'rjust', 'safe', 'safeseq', 'slice', 'slugify', - 'stringformat', 'striptags', 'time', 'timesince', 'timeuntil', 'title', 'truncatechars_html', - 'truncatewords', 'truncatewords_html', 'unordered_list', 'upper', 'urlencode', 'urlize', - 'urlizetrunc', 'wordcount', 'wordwrap', 'yesno', -- - 'language_name', 'language_name_local', 'language_bidi', 'language_name_translated' -}) - -lexer.property['scintillua.comment'] = '{#|#}' - -return lex diff --git a/share/vis/lexers/dmd.lua b/share/vis/lexers/dmd.lua @@ -1,141 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- D LPeg lexer. --- Heavily modified by Brian Schott (@Hackerpilot on Github). - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(...) - --- Class names. -local ws = lex:get_rule('whitespace') -lex:add_rule('class', - lex:tag(lexer.TYPE, P('class') + 'struct') * ws^-1 * lex:tag(lexer.CLASS, lexer.word)) - --- Versions. -local open_paren = lex:tag(lexer.OPERATOR, '(') -lex:add_rule('version', lex:tag(lexer.KEYWORD, 'version') * ws^-1 * open_paren * ws^-1 * - lex:tag(lexer.CONSTANT_BUILTIN .. '.version', lex:word_match('version'))) - --- Scopes. -lex:add_rule('scope', lex:tag(lexer.KEYWORD, 'scope') * ws^-1 * open_paren * ws^-1 * - lex:tag(lexer.CONSTANT_BUILTIN .. '.scope', lexer.word_match('exit success failure'))) - --- Traits. -lex:add_rule('trait', lex:tag(lexer.KEYWORD, '__traits') * ws^-1 * open_paren * ws^-1 * - lex:tag(lexer.VARIABLE_BUILTIN .. '.traits', lex:word_match('trait'))) - --- Function names. -local func = lex:tag(lexer.FUNCTION, lexer.word) -local method = lpeg.B('.') * lex:tag(lexer.FUNCTION_METHOD, lexer.word) -lex:add_rule('function', (method + func) * #(ws^-1 * ('!' * lexer.word^-1 * ws^-1)^-1 * '(')) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD))) - --- Types. -lex:add_rule('type', lex:tag(lexer.TYPE, lex:word_match(lexer.TYPE))) - --- Constants. -lex:add_rule('constant', lex:tag(lexer.CONSTANT_BUILTIN, lex:word_match(lexer.CONSTANT_BUILTIN))) - --- Properties. -local dot = lex:tag(lexer.OPERATOR, '.') -lex:add_rule('property', lpeg.B(lexer.alnum + ')') * dot * - lex:tag(lexer.VARIABLE_BUILTIN, lex:word_match('property'))) - --- Strings. -local sq_str = lexer.range("'", true) * S('cwd')^-1 -local dq_str = lexer.range('"') * S('cwd')^-1 -local lit_str = 'r' * lexer.range('"', false, false) * S('cwd')^-1 -local bt_str = lexer.range('`', false, false) * S('cwd')^-1 -local hex_str = 'x' * lexer.range('"') * S('cwd')^-1 -local other_hex_str = '\\x' * (lexer.xdigit * lexer.xdigit)^1 -local str = sq_str + dq_str + lit_str + bt_str + hex_str + other_hex_str -for left, right in pairs{['['] = ']', ['('] = ')', ['{'] = '}', ['<'] = '>'} do - str = str + lexer.range('q"' .. left, right .. '"', false, false, true) * S('cwd')^-1 -end -lex:add_rule('string', lex:tag(lexer.STRING, str)) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) - --- Comments. -local line_comment = lexer.to_eol('//', true) -local block_comment = lexer.range('/*', '*/') -local nested_comment = lexer.range('/+', '+/', false, false, true) -lex:add_rule('comment', lex:tag(lexer.COMMENT, line_comment + block_comment + nested_comment)) - --- Numbers. -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number_('_') * S('uULdDfFi')^-1)) - --- Preprocessor. -lex:add_rule('annotation', lex:tag(lexer.ANNOTATION, '@' * lexer.word^1)) -lex:add_rule('preprocessor', lex:tag(lexer.PREPROCESSOR, lexer.to_eol('#'))) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('?=!<>+-*$/%&|^~.,;:()[]{}'))) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '{', '}') -lex:add_fold_point(lexer.COMMENT, '/*', '*/') -lex:add_fold_point(lexer.COMMENT, '/+', '+/') - --- Word lists. -lex:set_word_list('version', { - 'AArch64', 'AIX', 'all', 'Alpha', 'Alpha_HardFloat', 'Alpha_SoftFloat', 'Android', 'ARM', - 'ARM_HardFloat', 'ARM_SoftFloat', 'ARM_SoftFP', 'ARM_Thumb', 'assert', 'BigEndian', 'BSD', - 'Cygwin', 'D_Coverage', 'D_Ddoc', 'D_HardFloat', 'DigitalMars', 'D_InlineAsm_X86', - 'D_InlineAsm_X86_64', 'D_LP64', 'D_NoBoundsChecks', 'D_PIC', 'DragonFlyBSD', 'D_SIMD', - 'D_SoftFloat', 'D_Version2', 'D_X32', 'FreeBSD', 'GNU', 'Haiku', 'HPPA', 'HPPA64', 'Hurd', 'IA64', - 'LDC', 'linux', 'LittleEndian', 'MIPS32', 'MIPS64', 'MIPS_EABI', 'MIPS_HardFloat', 'MIPS_N32', - 'MIPS_N64', 'MIPS_O32', 'MIPS_O64', 'MIPS_SoftFloat', 'NetBSD', 'none', 'OpenBSD', 'OSX', 'Posix', - 'PPC', 'PPC64', 'PPC_HardFloat', 'PPC_SoftFloat', 'S390', 'S390X', 'SDC', 'SH', 'SH64', 'SkyOS', - 'Solaris', 'SPARC', 'SPARC64', 'SPARC_HardFloat', 'SPARC_SoftFloat', 'SPARC_V8Plus', 'SysV3', - 'SysV4', 'unittest', 'Win32', 'Win64', 'Windows', 'X86', 'X86_64' -}) - -lex:set_word_list('trait', { - 'allMembers', 'classInstanceSize', 'compiles', 'derivedMembers', 'getAttributes', 'getMember', - 'getOverloads', 'getProtection', 'getUnitTests', 'getVirtualFunctions', 'getVirtualIndex', - 'getVirtualMethods', 'hasMember', 'identifier', 'isAbstractClass', 'isAbstractFunction', - 'isArithmetic', 'isAssociativeArray', 'isFinalClass', 'isFinalFunction', 'isFloating', - 'isIntegral', 'isLazy', 'isNested', 'isOut', 'isOverrideFunction', 'isPOD', 'isRef', 'isSame', - 'isScalar', 'isStaticArray', 'isStaticFunction', 'isUnsigned', 'isVirtualFunction', - 'isVirtualMethod', 'parent' -}) - -lex:set_word_list(lexer.KEYWORD, { - 'abstract', 'align', 'asm', 'assert', 'auto', 'body', 'break', 'case', 'cast', 'catch', 'const', - 'continue', 'debug', 'default', 'delete', 'deprecated', 'do', 'else', 'extern', 'export', 'false', - 'final', 'finally', 'for', 'foreach', 'foreach_reverse', 'goto', 'if', 'import', 'immutable', - 'in', 'inout', 'invariant', 'is', 'lazy', 'macro', 'mixin', 'new', 'nothrow', 'null', 'out', - 'override', 'pragma', 'private', 'protected', 'public', 'pure', 'ref', 'return', 'scope', - 'shared', 'static', 'super', 'switch', 'synchronized', 'this', 'throwtrue', 'try', 'typeid', - 'typeof', 'unittest', 'version', 'virtual', 'volatile', 'while', 'with', '__gshared', '__thread', - '__traits', '__vector', '__parameters' -}) - -lex:set_word_list(lexer.TYPE, { - 'alias', 'bool', 'byte', 'cdouble', 'cent', 'cfloat', 'char', 'class', 'creal', 'dchar', - 'delegate', 'double', 'enum', 'float', 'function', 'idouble', 'ifloat', 'int', 'interface', - 'ireal', 'long', 'module', 'package', 'ptrdiff_t', 'real', 'short', 'size_t', 'struct', - 'template', 'typedef', 'ubyte', 'ucent', 'uint', 'ulong', 'union', 'ushort', 'void', 'wchar', - 'string', 'wstring', 'dstring', 'hash_t', 'equals_t' -}) - -lex:set_word_list(lexer.CONSTANT_BUILTIN, { - '__FILE__', '__LINE__', '__DATE__', '__EOF__', '__TIME__', '__TIMESTAMP__', '__VENDOR__', - '__VERSION__', '__FUNCTION__', '__PRETTY_FUNCTION__', '__MODULE__' -}) - -lex:set_word_list('property', { - 'alignof', 'dig', 'dup', 'epsilon', 'idup', 'im', 'init', 'infinity', 'keys', 'length', - 'mangleof', 'mant_dig', 'max', 'max_10_exp', 'max_exp', 'min', 'min_normal', 'min_10_exp', - 'min_exp', 'nan', 'offsetof', 'ptr', 're', 'rehash', 'reverse', 'sizeof', 'sort', 'stringof', - 'tupleof', 'values' -}) - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/dockerfile.lua b/share/vis/lexers/dockerfile.lua @@ -1,47 +0,0 @@ --- Copyright 2016-2024 Alejandro Baez (https://keybase.io/baez). See LICENSE. --- Dockerfile LPeg lexer. - -local lexer = lexer -local P, S, B = lpeg.P, lpeg.S, lpeg.B - -local lex = lexer.new(..., {fold_by_indentation = true}) - --- Keywords. -local keyword = lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD)) -lex:add_rule('keyword', keyword) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) - --- Variable. -lex:add_rule('variable', --B('\\') * lex:tag(lexer.OPERATOR, '$' * P('{')^-1) * lex:tag(lexer.VARIABLE, lexer.word)) - --- Strings. -local sq_str = lexer.range("'", false, false) -local dq_str = lexer.range('"') -lex:add_rule('string', lex:tag(lexer.STRING, sq_str + dq_str)) - --- Comments. -lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.to_eol('#'))) - --- Numbers. -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number)) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('\\[],=:{}'))) - -local bash = lexer.load('bash') -local start_rule = #P('RUN') * keyword * bash:get_rule('whitespace') -local end_rule = -B('\\') * #lexer.newline * lex:get_rule('whitespace') -lex:embed(bash, start_rule, end_rule) - --- Word lists. -lex:set_word_list(lexer.KEYWORD, { - 'ADD', 'ARG', 'CMD', 'COPY', 'ENTRYPOINT', 'ENV', 'EXPOSE', 'FROM', 'LABEL', 'MAINTAINER', - 'ONBUILD', 'RUN', 'STOPSIGNAL', 'USER', 'VOLUME', 'WORKDIR' -}) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/dot.lua b/share/vis/lexers/dot.lua @@ -1,57 +0,0 @@ --- Copyright 2006-2024 Brian "Sir Alaran" Schott. See LICENSE. --- Dot LPeg lexer. --- Based off of lexer code by Mitchell. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('dot') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'graph', 'node', 'edge', 'digraph', 'fontsize', 'rankdir', 'fontname', 'shape', 'label', - 'arrowhead', 'arrowtail', 'arrowsize', 'color', 'comment', 'constraint', 'decorate', 'dir', - 'headlabel', 'headport', 'headURL', 'labelangle', 'labeldistance', 'labelfloat', 'labelfontcolor', - 'labelfontname', 'labelfontsize', 'layer', 'lhead', 'ltail', 'minlen', 'samehead', 'sametail', - 'style', 'taillabel', 'tailport', 'tailURL', 'weight', 'subgraph' -})) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match{ - ' box', 'polygon', 'ellipse', 'circle', 'point', 'egg', 'triangle', 'plaintext', 'diamond', - 'trapezium', 'parallelogram', 'house', 'pentagon', 'hexagon', 'septagon', 'octagon', - 'doublecircle', 'doubleoctagon', 'tripleoctagon', 'invtriangle', 'invtrapezium', 'invhouse', - 'Mdiamond', 'Msquare', 'Mcircle', 'rect', 'rectangle', 'none', 'note', 'tab', 'folder', 'box3d', - 'record' -})) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local sq_str = lexer.range("'") -local dq_str = lexer.range('"') -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) - --- Comments. -local line_comment = lexer.to_eol('//', true) -local block_comment = lexer.range('/*', '*/') -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.dec_num + lexer.float)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('->()[]{};'))) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '{', '}') -lex:add_fold_point(lexer.COMMENT, '/*', '*/') - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/dsv.lua b/share/vis/lexers/dsv.lua @@ -1,12 +0,0 @@ --- Copyright 2016 Christian Hesse --- delimiter separated values LPeg lexer. - -local lexer = require('lexer') -local token = lexer.token -local S = lpeg.S - -local lex = lexer.new('dsv') - -lex:add_rule('operator', token(lexer.OPERATOR, S(',;:|'))) - -return lex diff --git a/share/vis/lexers/eiffel.lua b/share/vis/lexers/eiffel.lua @@ -1,59 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Eiffel LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('eiffel') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'alias', 'all', 'and', 'as', 'check', 'class', 'creation', 'debug', 'deferred', 'do', 'else', - 'elseif', 'end', 'ensure', 'expanded', 'export', 'external', 'feature', 'from', 'frozen', 'if', - 'implies', 'indexing', 'infix', 'inherit', 'inspect', 'invariant', 'is', 'like', 'local', 'loop', - 'not', 'obsolete', 'old', 'once', 'or', 'prefix', 'redefine', 'rename', 'require', 'rescue', - 'retry', 'select', 'separate', 'then', 'undefine', 'until', 'variant', 'when', 'xor', -- - 'current', 'false', 'precursor', 'result', 'strip', 'true', 'unique', 'void' -})) - --- Types. -lex:add_rule('type', - token(lexer.TYPE, word_match('character string bit boolean integer real none any'))) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"', true) -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('--'))) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('=!<>+-/*%&|^~.,:;?()[]{}'))) - --- Fold points. -lex:add_fold_point(lexer.KEYWORD, 'check', 'end') -lex:add_fold_point(lexer.KEYWORD, 'debug', 'end') -lex:add_fold_point(lexer.KEYWORD, 'deferred', - function(text, pos, line, s) return line:find('deferred%s+class') and 0 or 1 end) -lex:add_fold_point(lexer.KEYWORD, 'do', 'end') -lex:add_fold_point(lexer.KEYWORD, 'from', 'end') -lex:add_fold_point(lexer.KEYWORD, 'if', 'end') -lex:add_fold_point(lexer.KEYWORD, 'inspect', 'end') -lex:add_fold_point(lexer.KEYWORD, 'once', 'end') -lex:add_fold_point(lexer.KEYWORD, 'class', - function(text, pos, line, s) return line:find('deferred%s+class') and 0 or 1 end) - -lexer.property['scintillua.comment'] = '--' - -return lex diff --git a/share/vis/lexers/elixir.lua b/share/vis/lexers/elixir.lua @@ -1,99 +0,0 @@ --- Copyright 2015-2024 Mitchell. See LICENSE. --- Contributed by Richard Philips. --- Elixir LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local B, P, S = lpeg.B, lpeg.P, lpeg.S - -local lex = lexer.new('elixir', {fold_by_indentation = true}) - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Sigils. -local sigil11 = '~' * S('CRSW') * lexer.range('<', '>') -local sigil12 = '~' * S('CRSW') * lexer.range('{', '}') -local sigil13 = '~' * S('CRSW') * lexer.range('[', ']') -local sigil14 = '~' * S('CRSW') * lexer.range('(', ')') -local sigil15 = '~' * S('CRSW') * lexer.range('|', false, false) -local sigil16 = '~' * S('CRSW') * lexer.range('/', false, false) -local sigil17 = '~' * S('CRSW') * lexer.range('"', false, false) -local sigil18 = '~' * S('CRSW') * lexer.range("'", false, false) -local sigil19 = '~' * S('CRSW') * lexer.range('"""') -local sigil10 = '~' * S('CRSW') * lexer.range("'''") -local sigil21 = '~' * S('crsw') * lexer.range('<', '>') -local sigil22 = '~' * S('crsw') * lexer.range('{', '}') -local sigil23 = '~' * S('crsw') * lexer.range('[', ']') -local sigil24 = '~' * S('crsw') * lexer.range('(', ')') -local sigil25 = '~' * S('crsw') * lexer.range('|') -local sigil26 = '~' * S('crsw') * lexer.range('/') -local sigil27 = '~' * S('crsw') * lexer.range('"') -local sigil28 = '~' * S('crsw') * lexer.range("'") -local sigil29 = '~' * S('crsw') * lexer.range('"""') -local sigil20 = '~' * S('crsw') * lexer.range("'''") -local sigil_token = token(lexer.REGEX, - sigil10 + sigil19 + sigil11 + sigil12 + sigil13 + sigil14 + sigil15 + sigil16 + sigil17 + sigil18 + - sigil20 + sigil29 + sigil21 + sigil22 + sigil23 + sigil24 + sigil25 + sigil26 + sigil27 + - sigil28) -local sigiladdon_token = token(lexer.EMBEDDED, lexer.alpha^0) -lex:add_rule('sigil', sigil_token * sigiladdon_token) - --- Atoms. -local atom1 = B(1 - P(':')) * ':' * lexer.range('"') -local atom2 = B(1 - P(':')) * ':' * lexer.alpha * (lexer.alnum + S('_@'))^0 * S('?!')^-1 -local atom3 = B(1 - (lexer.alnum + S('_:'))) * lexer.upper * (lexer.alnum + S('_@'))^0 * S('?!')^-1 -lex:add_rule('atom', token(lexer.CONSTANT, atom1 + atom2 + atom3)) - --- Strings. -local dq_str = lexer.range('"') -local triple_dq_str = lexer.range('"""') -lex:add_rule('string', token(lexer.STRING, triple_dq_str + dq_str)) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#', true))) - --- Attributes. -lex:add_rule('attribute', token(lexer.LABEL, B(1 - (lexer.alnum + '_')) * '@' * lexer.alpha * - (lexer.alnum + '_')^0)) - --- Booleans. -lex:add_rule('boolean', token(lexer.NUMBER, P(':')^-1 * word_match('true false nil'))) - --- Functions. -lex:add_rule('function', token(lexer.FUNCTION, word_match{ - 'defstruct', 'defrecordp', 'defrecord', 'defprotocol', 'defp', 'defoverridable', 'defmodule', - 'defmacrop', 'defmacro', 'defimpl', 'defexception', 'defdelegate', 'defcallback', 'def' -})) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'is_atom', 'is_binary', 'is_bitstring', 'is_boolean', 'is_float', 'is_function', 'is_integer', - 'is_list', 'is_map', 'is_number', 'is_pid', 'is_port', 'is_record', 'is_reference', 'is_tuple', - 'is_exception', 'case', 'when', 'cond', 'for', 'if', 'unless', 'try', 'receive', 'send', 'exit', - 'raise', 'throw', 'after', 'rescue', 'catch', 'else', 'do', 'end', 'quote', 'unquote', 'super', - 'import', 'require', 'alias', 'use', 'self', 'with', 'fn' -})) - --- Operators -local operator1 = word_match('and or not when xor in') -local operator2 = P('!==') + '!=' + '!' + '=~' + '===' + '==' + '=' + '<<<' + '<<' + '<=' + '<-' + - '<' + '>>>' + '>>' + '>=' + '>' + '->' + '--' + '-' + '++' + '+' + '&&&' + '&&' + '&' + '|||' + - '||' + '|>' + '|' + '..' + '.' + '^^^' + '^' + '\\\\' + '::' + '*' + '/' + '~~~' + '@' -lex:add_rule('operator', token(lexer.OPERATOR, operator1 + operator2)) - --- Identifiers -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word * S('?!')^-1)) - --- Numbers -local dec = lexer.digit * (lexer.digit + '_')^0 -local bin = '0b' * S('01')^1 -local oct = '0o' * lpeg.R('07')^1 -local integer = bin + lexer.hex_num + oct + dec -local float = lexer.digit^1 * '.' * lexer.digit^1 * S('eE') * (S('+-')^-1 * lexer.digit^1)^-1 -lex:add_rule('number', - B(1 - (lexer.alpha + '_')) * S('+-')^-1 * token(lexer.NUMBER, float + integer)) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/elm.lua b/share/vis/lexers/elm.lua @@ -1,45 +0,0 @@ --- Copyright 2020-2024 Mitchell. See LICENSE. --- Elm LPeg lexer --- Adapted from Haskell LPeg lexer by Karl Schultheisz. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('elm', {fold_by_indentation = true}) - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match( - 'if then else case of let in module import as exposing type alias port'))) - --- Types & type constructors. -local word = (lexer.alnum + S("._'#"))^0 -local op = lexer.punct - S('()[]{}') -lex:add_rule('type', token(lexer.TYPE, lexer.upper * word + ':' * (op^1 - ':'))) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, (lexer.alpha + '_') * word)) - --- Strings. -lex:add_rule('string', token(lexer.STRING, lexer.range('"'))) - --- Chars. -lex:add_rule('character', token(lexer.STRING, lexer.range("'", true))) - --- Comments. -local line_comment = lexer.to_eol('--', true) -local block_comment = lexer.range('{-', '-}', false, false, true) -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, op)) - -lexer.property['scintillua.comment'] = '--' - -return lex diff --git a/share/vis/lexers/erlang.lua b/share/vis/lexers/erlang.lua @@ -1,90 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Erlang LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('erlang') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'after', 'begin', 'case', 'catch', 'cond', 'end', 'fun', 'if', 'let', 'of', 'query', 'receive', - 'try', 'when', - -- Operators. - 'div', 'rem', 'or', 'xor', 'bor', 'bxor', 'bsl', 'bsr', 'and', 'band', 'not', 'bnot', 'badarg', - 'nocookie', 'orelse', 'andalso', 'false', 'true' -})) - --- Functions. -lex:add_rule('function', token(lexer.FUNCTION, word_match{ - 'abs', 'alive', 'apply', 'atom_to_list', 'binary_to_list', 'binary_to_term', 'concat_binary', - 'date', 'disconnect_node', 'element', 'erase', 'exit', 'float', 'float_to_list', 'get', - 'get_keys', 'group_leader', 'halt', 'hd', 'integer_to_list', 'is_alive', 'is_record', 'length', - 'link', 'list_to_atom', 'list_to_binary', 'list_to_float', 'list_to_integer', 'list_to_pid', - 'list_to_tuple', 'load_module', 'make_ref', 'monitor_node', 'node', 'nodes', 'now', 'open_port', - 'pid_to_list', 'process_flag', 'process_info', 'process', 'put', 'register', 'registered', - 'round', 'self', 'setelement', 'size', 'spawn', 'spawn_link', 'split_binary', 'statistics', - 'term_to_binary', 'throw', 'time', 'tl', 'trunc', 'tuple_to_list', 'unlink', 'unregister', - 'whereis', - -- Others. - 'any', 'atom', 'binary', 'bitstring', 'byte', 'constant', 'function', 'integer', 'list', 'map', - 'mfa', 'non_neg_integer', 'number', 'pid', 'ports', 'port_close', 'port_info', 'pos_integer', - 'reference', 'record', - -- Erlang. - 'check_process_code', 'delete_module', 'get_cookie', 'hash', 'math', 'module_loaded', 'preloaded', - 'processes', 'purge_module', 'set_cookie', 'set_node', - -- Math. - 'acos', 'asin', 'atan', 'atan2', 'cos', 'cosh', 'exp', 'log', 'log10', 'min', 'max', 'pi', 'pow', - 'power', 'sin', 'sinh', 'sqrt', 'tan', 'tanh' -})) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.lower * ('_' + lexer.alnum)^0)) - --- Variables. -lex:add_rule('variable', token(lexer.VARIABLE, P('_')^0 * lexer.upper * ('_' + lexer.alnum)^0)) - --- Directives. -lex:add_rule('directive', token(lexer.PREPROCESSOR, '-' * word_match{ - 'author', 'behaviour', 'behavior', 'compile', 'copyright', 'define', 'doc', 'else', 'endif', - 'export', 'file', 'ifdef', 'ifndef', 'import', 'include', 'include_lib', 'module', 'record', - 'spec', 'type', 'undef' -})) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"') -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str + '$' * lexer.any * lexer.alnum^0)) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('%'))) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('-<>.;=/|+*:,!()[]{}'))) - --- Preprocessor. -lex:add_rule('preprocessor', token(lexer.TYPE, '?' * lexer.word)) - --- Records. -lex:add_rule('type', token(lexer.TYPE, '#' * lexer.word)) - --- Fold points. -lex:add_fold_point(lexer.KEYWORD, 'case', 'end') -lex:add_fold_point(lexer.KEYWORD, 'fun', 'end') -lex:add_fold_point(lexer.KEYWORD, 'if', 'end') -lex:add_fold_point(lexer.KEYWORD, 'query', 'end') -lex:add_fold_point(lexer.KEYWORD, 'receive', 'end') -lex:add_fold_point(lexer.OPERATOR, '(', ')') -lex:add_fold_point(lexer.OPERATOR, '[', ']') -lex:add_fold_point(lexer.OPERATOR, '{', '}') - -lexer.property['scintillua.comment'] = '%' - -return lex diff --git a/share/vis/lexers/fantom.lua b/share/vis/lexers/fantom.lua @@ -1,84 +0,0 @@ --- Copyright 2018-2024 Simeon Maryasin (MarSoft). See LICENSE. --- Fantom LPeg lexer. --- Based on Java LPeg lexer by Mitchell and Vim's Fantom syntax. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('fantom') - --- Whitespace. -local ws = token(lexer.WHITESPACE, lexer.space^1) -lex:add_rule('whitespace', ws) - --- Classes. -local type = token(lexer.TYPE, lexer.word) -lex:add_rule('class_sequence', - token(lexer.KEYWORD, 'class') * ws * type * ( -- at most one inheritance spec - ws * token(lexer.OPERATOR, ':') * ws * type * - ( -- at least 0 (i.e. any number) of additional classes - ws^-1 * token(lexer.OPERATOR, ',') * ws^-1 * type)^0)^-1) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'using', 'native', -- external - 'goto', 'void', 'serializable', 'volatile', -- error - 'if', 'else', 'switch', -- conditional - 'do', 'while', 'for', 'foreach', 'each', -- repeat - 'true', 'false', -- boolean - 'null', -- constant - 'this', 'super', -- typedef - 'new', 'is', 'isnot', 'as', -- operator - 'plus', 'minus', 'mult', 'div', 'mod', 'get', 'set', 'slice', 'lshift', 'rshift', 'and', 'or', - 'xor', 'inverse', 'negate', -- - 'increment', 'decrement', 'equals', 'compare', -- long operator - 'return', -- stmt - 'static', 'const', 'final', -- storage class - 'virtual', 'override', 'once', -- slot - 'readonly', -- field - 'throw', 'try', 'catch', 'finally', -- exceptions - 'assert', -- assert - 'class', 'enum', 'mixin', -- typedef - 'break', 'continue', -- branch - 'default', 'case', -- labels - 'public', 'internal', 'protected', 'private', 'abstract' -- scope decl -})) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match( - 'Void Bool Int Float Decimal Str Duration Uri Type Range List Map Obj Err Env'))) - --- Functions. --- lex:add_rule('function', token(lexer.FUNCTION, lexer.word) * #P('(')) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"', true) -local bq_str = lexer.range('`', true) -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str + bq_str)) - --- Comments. -local line_comment = lexer.to_eol('//', true) -local block_comment = lexer.range('/*', '*/') -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number * S('LlFfDd')^-1)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;.()[]{}#'))) - --- Annotations. -lex:add_rule('facet', token(lexer.ANNOTATION, '@' * lexer.word)) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '{', '}') -lex:add_fold_point(lexer.COMMENT, '/*', '*/') - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/faust.lua b/share/vis/lexers/faust.lua @@ -1,46 +0,0 @@ --- Copyright 2015-2024 David B. Lamkins <david@lamkins.net>. See LICENSE. --- Faust LPeg lexer, see http://faust.grame.fr/ - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('faust') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'declare', 'import', 'mdoctags', 'dependencies', 'distributed', 'inputs', 'outputs', 'par', 'seq', - 'sum', 'prod', 'xor', 'with', 'environment', 'library', 'component', 'ffunction', 'fvariable', - 'fconstant', 'int', 'float', 'case', 'waveform', 'h:', 'v:', 't:' -})) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -lex:add_rule('string', token(lexer.STRING, lexer.range('"', true))) - --- Comments. -local line_comment = lexer.to_eol('//') -local block_comment = lexer.range('/*', '*/') -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -local int = lexer.digit^1 -local rad = P('.') -local exp = (P('e') * S('+-')^-1 * int)^-1 -local flt = int * (rad * int)^-1 * exp + int^-1 * rad * int * exp -lex:add_rule('number', token(lexer.NUMBER, flt + int)) - --- Pragmas. -lex:add_rule('pragma', token(lexer.PREPROCESSOR, lexer.range('<mdoc>', '</mdoc>'))) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>~!=^&|?~:;,.()[]{}@#$`\\\''))) - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/fennel.lua b/share/vis/lexers/fennel.lua @@ -1,45 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Fennel LPeg lexer. --- Contributed by Momohime Honda. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('fennel', {inherit = lexer.load('lua')}) - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:modify_rule('keyword', token(lexer.KEYWORD, word_match{ - '#', '%', '*', '+', '-', '->>', '->', '-?>>', '-?>', '..', '.', '//', '/', ':', '<=', '<', '=', - '>=', '>', '?.', '^', '~=', 'λ', 'accumulate', 'and', 'band', 'bnot', 'bor', 'bxor', 'collect', - 'comment', 'do', 'doto', 'each', 'eval-compiler', 'fn', 'for', 'global', 'hashfn', 'icollect', - 'if', 'import-macros', 'include', 'lambda', 'length', 'let', 'local', 'lshift', 'lua', 'macro', - 'macrodebug', 'macros', 'match', 'not', 'not=', 'or', 'partial', 'pick-args', 'pick-values', - 'quote', 'require-macros', 'rshift', 'set', 'set-forcibly!', 'tset', 'values', 'var', 'when', - 'while', 'with-open' -})) - --- Identifiers. -local initial = lexer.alpha + S('|$%&#*+-/<=>?~^_λ!') -local subsequent = initial + lexer.digit -lex:modify_rule('identifier', token(lexer.IDENTIFIER, initial * subsequent^0 * P('#')^-1)) - --- Strings. -local dq_str = lexer.range('"') -local kw_str = lpeg.B(1 - subsequent) * ':' * subsequent^1 -lex:modify_rule('string', token(lexer.STRING, dq_str + kw_str)) - --- Comments. -lex:modify_rule('comment', token(lexer.COMMENT, lexer.to_eol(';'))) - --- Ignore these rules. --- lex:modify_rule('longstring', P(false)) -lex:modify_rule('label', P(false)) -lex:modify_rule('operator', P(false)) - -lexer.property['scintillua.comment'] = ';' - -return lex diff --git a/share/vis/lexers/fish.lua b/share/vis/lexers/fish.lua @@ -1,58 +0,0 @@ --- Copyright 2015-2024 Jason Schindler. See LICENSE. --- Fish (http://fishshell.com/) script LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('fish') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'alias', 'and', 'begin', 'bg', 'bind', 'block', 'break', 'breakpoint', 'builtin', 'case', 'cd', - 'command', 'commandline', 'complete', 'contains', 'continue', 'count', 'dirh', 'dirs', 'echo', - 'else', 'emit', 'end', 'eval', 'exec', 'exit', 'fg', 'fish', 'fish_config', 'fishd', - 'fish_indent', 'fish_pager', 'fish_prompt', 'fish_right_prompt', 'fish_update_completions', 'for', - 'funced', 'funcsave', 'function', 'functions', 'help', 'history', 'if', 'in', 'isatty', 'jobs', - 'math', 'mimedb', 'nextd', 'not', 'open', 'or', 'popd', 'prevd', 'psub', 'pushd', 'pwd', 'random', - 'read', 'return', 'set', 'set_color', 'source', 'status', 'switch', 'test', 'trap', 'type', - 'ulimit', 'umask', 'vared', 'while' -})) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Variables. -lex:add_rule('variable', token(lexer.VARIABLE, '$' * (lexer.word + lexer.range('{', '}', true)))) - --- Strings. -local sq_str = lexer.range("'", false, false) -local dq_str = lexer.range('"') -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) - --- Shebang. -lex:add_rule('shebang', token(lexer.COMMENT, lexer.to_eol('#!/'))) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#'))) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('=!<>+-/*^&|~.,:;?()[]{}'))) - --- Fold points. -lex:add_fold_point(lexer.KEYWORD, 'begin', 'end') -lex:add_fold_point(lexer.KEYWORD, 'for', 'end') -lex:add_fold_point(lexer.KEYWORD, 'function', 'end') -lex:add_fold_point(lexer.KEYWORD, 'if', 'end') -lex:add_fold_point(lexer.KEYWORD, 'switch', 'end') -lex:add_fold_point(lexer.KEYWORD, 'while', 'end') - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/forth.lua b/share/vis/lexers/forth.lua @@ -1,58 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Forth LPeg lexer. --- Contributions from Joseph Eib. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('forth') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Strings. -local c_str = 'c' * lexer.range('"', true, false) -local s_str = 's' * lexer.range('"', true, false) -local s_bs_str = 's\\' * lexer.range('"', true) -local dot_str = '.' * lexer.range('"', true, false) -local dot_paren_str = '.' * lexer.range('(', ')', true) -local abort_str = 'abort' * lexer.range('"', true, false) -lex:add_rule('string', - token(lexer.STRING, c_str + s_str + s_bs_str + dot_str + dot_paren_str + abort_str)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match({ - '#>', '#s', '*/', '*/mod', '+loop', ',', '.', '.r', '/mod', '0<', '0<>', '0>', '0=', '1+', '1-', - '2!', '2*', '2/', '2>r', '2@', '2drop', '2dup', '2over', '2r>', '2r@', '2swap', ':noname', '<#', - '<>', '>body', '>in', '>number', '>r', '?do', '?dup', '@', 'abort', 'abs', 'accept', 'action-of', - 'again', 'align', 'aligned', 'allot', 'and', 'base', 'begin', 'bl', 'buffer:', 'c!', 'c,', 'c@', - 'case', 'cell+', 'cells', 'char', 'char+', 'chars', 'compile,', 'constant,', 'count', 'cr', - 'create', 'decimal', 'defer', 'defer!', 'defer@', 'depth', 'do', 'does>', 'drop', 'dup', 'else', - 'emit', 'endcase', 'endof', 'environment?', 'erase', 'evaluate', 'execute', 'exit', 'false', - 'fill', 'find', 'fm/mod', 'here', 'hex', 'hold', 'holds', 'i', 'if', 'immediate', 'invert', 'is', - 'j', 'key', 'leave', 'literal', 'loop', 'lshift', 'm*', 'marker', 'max', 'min', 'mod', 'move', - 'negate', 'nip', 'of', 'or', 'over', 'pad', 'parse', 'parse-name', 'pick', 'postpone', 'quit', - 'r>', 'r@', 'recurse', 'refill', 'restore-input', 'roll', 'rot', 'rshift', 's>d', 'save-input', - 'sign', 'sm/rem', 'source', 'source-id', 'space', 'spaces', 'state', 'swap', 'to', 'then', 'true', - 'tuck', 'type', 'u.', 'u.r', 'u>', 'u<', 'um*', 'um/mod', 'unloop', 'until', 'unused', 'value', - 'variable', 'while', 'within', 'word', 'xor', "[']", '[char]', '[compile]' -}, true))) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, (lexer.alnum + S('+-*=<>.?/\'%,_$#'))^1)) - --- Comments. -local line_comment = lexer.to_eol(S('|\\')) -local block_comment = lexer.range('(', ')') -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, P('-')^-1 * lexer.digit^1 * (S('./') * lexer.digit^1)^-1)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S(':;<>+*-/[]#'))) - -lexer.property['scintillua.comment'] = '\\' - -return lex diff --git a/share/vis/lexers/fortran.lua b/share/vis/lexers/fortran.lua @@ -1,87 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Fortran LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('fortran') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Comments. -local line_comment = lexer.to_eol(lexer.starts_line(S('CcDd!*')) + '!') -lex:add_rule('comment', token(lexer.COMMENT, line_comment)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match({ - 'include', 'interface', 'program', 'module', 'subroutine', 'function', 'contains', 'use', 'call', - 'return', - -- Statements. - 'case', 'select', 'default', 'continue', 'cycle', 'do', 'while', 'else', 'if', 'elseif', 'then', - 'elsewhere', 'end', 'endif', 'enddo', 'equivalence', 'external', 'forall', 'where', 'exit', - 'goto', 'pause', 'save', 'stop', - -- Operators. - '.not.', '.and.', '.or.', '.xor.', '.eqv.', '.neqv.', '.eq.', '.ne.', '.gt.', '.ge.', '.lt.', - '.le.', - -- Logical. - '.false.', '.true.', - -- Attributes and other keywords. - 'access', 'action', 'advance', 'assignment', 'block', 'entry', 'in', 'inout', 'intent', 'only', - 'out', 'optional', 'pointer', 'precision', 'procedure', 'recursive', 'result', 'sequence', 'size', - 'stat', 'target', 'type' -}, true))) - --- Functions. -lex:add_rule('function', token(lexer.FUNCTION, word_match({ - -- I/O. - 'backspace', 'close', 'endfile', 'inquire', 'open', 'print', 'read', 'rewind', 'write', 'format', - -- Type conversion utility and math. - 'aimag', 'aint', 'amax0', 'amin0', 'anint', 'ceiling', 'cmplx', 'conjg', 'dble', 'dcmplx', - 'dfloat', 'dim', 'dprod', 'float', 'floor', 'ifix', 'imag', 'int', 'logical', 'modulo', 'nint', - 'real', 'sign', 'sngl', 'transfer', 'zext', 'abs', 'acos', 'aimag', 'aint', 'alog', 'alog10', - 'amax0', 'amax1', 'amin0', 'amin1', 'amod', 'anint', 'asin', 'atan', 'atan2', 'cabs', 'ccos', - 'char', 'clog', 'cmplx', 'conjg', 'cos', 'cosh', 'csin', 'csqrt', 'dabs', 'dacos', 'dasin', - 'datan', 'datan2', 'dble', 'dcos', 'dcosh', 'ddim', 'dexp', 'dim', 'dint', 'dlog', 'dlog10', - 'dmax1', 'dmin1', 'dmod', 'dnint', 'dprod', 'dreal', 'dsign', 'dsin', 'dsinh', 'dsqrt', 'dtan', - 'dtanh', 'exp', 'float', 'iabs', 'ichar', 'idim', 'idint', 'idnint', 'ifix', 'index', 'int', - 'isign', 'len', 'lge', 'lgt', 'lle', 'llt', 'log', 'log10', 'max', 'max0', 'max1', 'min', 'min0', - 'min1', 'mod', 'nint', 'real', 'sign', 'sin', 'sinh', 'sngl', 'sqrt', 'tan', 'tanh', - -- Matrix math. - 'matmul', 'transpose', 'reshape', - -- Other frequently used built-in statements. - 'assign', 'nullify', - -- ISO C binding from Fortran 2003. - 'c_sizeof', 'c_f_pointer', 'c_associated' -}, true))) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match({ - 'implicit', 'explicit', 'none', 'data', 'parameter', 'allocate', 'allocatable', 'allocated', - 'deallocate', 'integer', 'real', 'double', 'precision', 'complex', 'logical', 'character', - 'dimension', 'kind', - -- ISO C binding from Fortran 2003 - 'bind', 'c_int', 'c_short', 'c_long', 'c_long_long', 'c_signed_char', 'c_size_t', 'c_int8_t', - 'c_int16_t', 'c_int32_t', 'c_int64_t', 'c_int128_t', 'c_intptr_t', 'c_float', 'c_double', - 'c_long_double', 'c_float128', 'c_float_complex', 'c_double_complex', 'c_long_double_complex', - 'c_float128_complex', 'c_bool', 'c_char', 'c_null_char', 'c_new_line', 'c_null_ptr', 'c_funptr' -}, true))) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number * -lexer.alpha)) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.alnum^1)) - --- Strings. -local sq_str = lexer.range("'", true, false) -local dq_str = lexer.range('"', true, false) -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('<>=&+-/*,()'))) - -lexer.property['scintillua.comment'] = '!' - -return lex diff --git a/share/vis/lexers/fsharp.lua b/share/vis/lexers/fsharp.lua @@ -1,59 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- F# LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('fsharp', {fold_by_indentation = true}) - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'abstract', 'and', 'as', 'assert', 'asr', 'begin', 'class', 'default', 'delegate', 'do', 'done', - 'downcast', 'downto', 'else', 'end', 'enum', 'exception', 'false', 'finaly', 'for', 'fun', - 'function', 'if', 'in', 'iherit', 'interface', 'land', 'lazy', 'let', 'lor', 'lsl', 'lsr', 'lxor', - 'match', 'member', 'mod', 'module', 'mutable', 'namespace', 'new', 'null', 'of', 'open', 'or', - 'override', 'sig', 'static', 'struct', 'then', 'to', 'true', 'try', 'type', 'val', 'when', - 'inline', 'upcast', 'while', 'with', 'async', 'atomic', 'break', 'checked', 'component', 'const', - 'constructor', 'continue', 'eager', 'event', 'external', 'fixed', 'functor', 'include', 'method', - 'mixin', 'process', 'property', 'protected', 'public', 'pure', 'readonly', 'return', 'sealed', - 'switch', 'virtual', 'void', 'volatile', 'where', - -- Booleans. - 'true', 'false' -})) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match{ - 'bool', 'byte', 'sbyte', 'int16', 'uint16', 'int', 'uint32', 'int64', 'uint64', 'nativeint', - 'unativeint', 'char', 'string', 'decimal', 'unit', 'void', 'float32', 'single', 'float', 'double' -})) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"', true) -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) - --- Comments. -local line_comment = lexer.to_eol('//') -local block_comment = lexer.range('(*', '*)', false, false, true) -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.float + lexer.integer * S('uUlL')^-1)) - --- Preprocessor. -lex:add_rule('preproc', token(lexer.PREPROCESSOR, lexer.starts_line('#') * S('\t ')^0 * - word_match('else endif endregion if ifdef ifndef light region'))) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('=<>+-*/^.,:;~!@#%^&|?[](){}'))) - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/fstab.lua b/share/vis/lexers/fstab.lua @@ -1,127 +0,0 @@ --- Copyright 2016-2024 Christian Hesse. See LICENSE. --- fstab LPeg lexer. - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(..., {lex_by_line = true}) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD))) - --- Numbers. -local uuid = lexer.xdigit^8 * ('-' * lexer.xdigit^4)^-3 * '-' * lexer.xdigit^12 -local integer = S('+-')^-1 * (lexer.hex_num + lexer.oct_num_('_') + lexer.dec_num_('_')) -lex:add_rule('number', lex:tag(lexer.NUMBER, uuid + lexer.float + integer)) - --- Identifiers. -lex:add_rule('identifier', - lex:tag(lexer.IDENTIFIER, (lexer.alpha + '_') * (lexer.alnum + S('_.'))^0)) - --- Comments. -lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.starts_line(lexer.to_eol('#')))) - --- Directories. -lex:add_rule('directory', lex:tag(lexer.VARIABLE, '/' * (1 - lexer.space)^0)) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('=,'))) - -lex:set_word_list(lexer.KEYWORD, { - -- Basic filesystem-independent mount options. - 'async', 'atime', 'auto', 'comment', 'context', 'defaults', 'defcontext', 'dev', 'dirsync', - 'exec', 'fscontext', 'group', 'iversion', 'lazytime', 'loud', 'mand', '_netdev', 'noatime', - 'noauto', 'nodev', 'nodiratime', 'noexec', 'nofail', 'noiversion', 'nolazytime', 'nomand', - 'norelatime', 'nostrictatime', 'nosuid', 'nouser', 'owner', 'relatime', 'remount', 'ro', - 'rootcontext', 'rw', 'silent', 'strictatime', 'suid', 'sync', 'user', 'users', - -- Mount options for systemd see systemd.mount(5). - 'x-systemd.automount', 'x-systemd.device-timeout', 'x-systemd.idle-timeout', - 'x-systemd.mount-timeout', 'x-systemd.requires', 'x-systemd.requires-mounts-for', - 'x-initrd.mount', - -- Mount options for adfs. - 'uid', 'gid', 'ownmask', 'othmask', - -- Mount options for affs. - 'uid', 'gid', 'setuid', 'setgid', 'mode', 'protect', 'usemp', 'verbose', 'prefix', 'volume', - 'reserved', 'root', 'bs', 'grpquota', 'noquota', 'quota', 'usrquota', - -- Mount options for btrfs. - 'alloc_start', 'autodefrag', 'check_int', 'check_int_data', 'check_int_print_mask', 'commit', - 'compress', 'zlib', 'lzo', 'no', 'compress-force', 'degraded', 'device', 'discard', - 'enospc_debug', 'fatal_errors', 'bug', 'panic', 'flushoncommit', 'inode_cache', 'max_inline', - 'metadata_ratio', 'noacl', 'nobarrier', 'nodatacow', 'nodatasum', 'notreelog', 'recovery', - 'rescan_uuid_tree', 'skip_balance', 'space_cache', 'nospace_cache', 'clear_cache', 'ssd', 'nossd', - 'ssd_spread', 'subvol', 'subvolid', 'subvolrootid', 'thread_pool', 'user_subvol_rm_allowed', - -- Mount options for devpts. - 'uid', 'gid', 'mode', 'newinstance', 'ptmxmode', - -- Mount options for ext2. - 'acl', 'noacl', 'bsddf', 'minixdf', 'check', 'nocheck', 'debug', 'errors', 'continue', - 'remount-ro', 'panic', 'grpid', 'bsdgroups', 'nogrpid', 'sysvgroups', 'grpquota', 'noquota', - 'quota', 'usrquota', 'nouid32', 'oldalloc', 'orlov', 'resgid', 'resuid', 'sb', 'user_xattr', - 'nouser_xattr', - -- Mount options for ext3. - 'journal', 'update', 'journal_dev', 'journal_path', 'norecoverynoload', 'data', 'journal', - 'ordered', 'writeback', 'data_err', 'ignore', 'abort', 'barrier', 'commit', 'user_xattr', 'acl', - 'usrjquota', 'grpjquota', 'jqfmt', - -- Mount options for ext4. - 'journal_checksum', 'journal_async_commit', 'barrier', 'nobarrier', 'inode_readahead_blks', - 'stripe', 'delalloc', 'nodelalloc', 'max_batch_time', 'min_batch_time', 'journal_ioprio', 'abort', - 'auto_da_alloc', 'noauto_da_alloc', 'noinit_itable', 'init_itable', 'discard', 'nodiscard', - 'nouid32', 'block_validity', 'noblock_validity', 'dioread_lock', 'dioread_nolock', - 'max_dir_size_kb', 'i_version', - -- Mount options for fat (common part of msdos umsdos and vfat). - 'blocksize', 'uid', 'gid', 'umask', 'dmask', 'fmask', 'allow_utime', 'check', 'relaxed', 'normal', - 'strict', 'codepage', 'conv', 'binary', 'text', 'auto', 'cvf_format', 'cvf_option', 'debug', - 'discard', 'dos1xfloppy', 'errors', 'panic', 'continue', 'remount-ro', 'fat', 'iocharset', 'nfs', - 'stale_rw', 'nostale_ro', 'tz', 'time_offset', 'quiet', 'rodir', 'showexec', 'sys_immutable', - 'flush', 'usefree', 'dots', 'nodots', 'dotsOK', - -- Mount options for hfs. - 'creator', 'type', 'uid', 'gid', 'dir_umask', 'file_umask', 'umask', 'session', 'part', 'quiet', - -- Mount options for hpfs. - 'uid', 'gid', 'umask', 'case', 'lower', 'asis', 'conv', 'binary', 'text', 'auto', 'nocheck', - -- Mount options for iso9660. - 'norock', 'nojoliet', 'check', 'relaxed', 'strict', 'uid', 'gid', 'map', 'normal', 'offacorn', - 'mode', 'unhide', 'block', 'conv', 'auto', 'binary', 'mtext', 'text', 'cruft', 'session', - 'sbsector', 'iocharset', 'utf8', - -- Mount options for jfs. - 'iocharset', 'resize', 'nointegrity', 'integrity', 'errors', 'continue', 'remount-ro', 'panic', - 'noquota', 'quota', 'usrquota', 'grpquota', - -- Mount options for ntfs. - 'iocharset', 'nls', 'utf8', 'uni_xlate', 'posix', 'uid', 'gid', 'umask', - -- Mount options for overlay. - 'lowerdir', 'upperdir', 'workdir', - -- Mount options for reiserfs. - 'conv', 'hash', 'rupasov', 'tea', 'r5', 'detect', 'hashed_relocation', 'no_unhashed_relocation', - 'noborder', 'nolog', 'notail', 'replayonly', 'resize', 'user_xattr', 'acl', 'barrier', 'none', - 'flush', - -- Mount options for tmpfs. - 'size', 'nr_blocks', 'nr_inodes', 'mode', 'uid', 'gid', 'mpol', 'default', 'prefer', 'bind', - 'interleave', - -- Mount options for ubifs. - 'bulk_read', 'no_bulk_read', 'chk_data_crc', 'no_chk_data_crc.', 'compr', 'none', 'lzo', 'zlib', - -- Mount options for udf. - 'gid', 'umask', 'uid', 'unhide', 'undelete', 'nostrict', 'iocharset', 'bs', 'novrs', 'session', - 'anchor', 'volume', 'partition', 'lastblock', 'fileset', 'rootdir', - -- Mount options for ufs. - 'ufstype', 'old', '44bsd', 'ufs2', '5xbsd', 'sun', 'sunx86', 'hp', 'nextstep', 'nextstep-cd', - 'openstep', 'onerror', 'lock', 'umount', 'repair', - -- Mount options for vfat. - 'uni_xlate', 'posix', 'nonumtail', 'utf8', 'shortname', 'lower', 'win95', 'winnt', 'mixed', - -- Mount options for usbfs. - 'devuid', 'devgid', 'devmode', 'busuid', 'busgid', 'busmode', 'listuid', 'listgid', 'listmode', - -- Mount options for proc. - 'hidepid', - -- Filesystems. - 'adfs', 'ados', 'affs', 'anon_inodefs', 'atfs', 'audiofs', 'auto', 'autofs', 'bdev', 'befs', - 'bfs', 'btrfs', 'binfmt_misc', 'cd9660', 'cfs', 'cgroup', 'cifs', 'coda', 'configfs', 'cpuset', - 'cramfs', 'devfs', 'devpts', 'devtmpfs', 'e2compr', 'efs', 'ext2', 'ext2fs', 'ext3', 'ext4', - 'fdesc', 'ffs', 'filecore', 'fuse', 'fuseblk', 'fusectl', 'hfs', 'hpfs', 'hugetlbfs', 'iso9660', - 'jffs', 'jffs2', 'jfs', 'kernfs', 'lfs', 'linprocfs', 'mfs', 'minix', 'mqueue', 'msdos', 'ncpfs', - 'nfs', 'nfsd', 'nilfs2', 'none', 'ntfs', 'null', 'nwfs', 'overlay', 'ovlfs', 'pipefs', 'portal', - 'proc', 'procfs', 'pstore', 'ptyfs', 'qnx4', 'reiserfs', 'ramfs', 'romfs', 'securityfs', 'shm', - 'smbfs', 'squashfs', 'sockfs', 'sshfs', 'std', 'subfs', 'swap', 'sysfs', 'sysv', 'tcfs', 'tmpfs', - 'udf', 'ufs', 'umap', 'umsdos', 'union', 'usbfs', 'userfs', 'vfat', 'vs3fs', 'vxfs', 'wrapfs', - 'wvfs', 'xenfs', 'xfs', 'zisofs' -}) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/gap.lua b/share/vis/lexers/gap.lua @@ -1,45 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Gap LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('gap') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'and', 'break', 'continue', 'do', 'elif', 'else', 'end', 'fail', 'false', 'fi', 'for', 'function', - 'if', 'in', 'infinity', 'local', 'not', 'od', 'or', 'rec', 'repeat', 'return', 'then', 'true', - 'until', 'while' -})) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"', true) -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#'))) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.dec_num * -lexer.alpha)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('*+-,./:;<=>~^#()[]{}'))) - --- Fold points. -lex:add_fold_point(lexer.KEYWORD, 'function', 'end') -lex:add_fold_point(lexer.KEYWORD, 'do', 'od') -lex:add_fold_point(lexer.KEYWORD, 'if', 'fi') -lex:add_fold_point(lexer.KEYWORD, 'repeat', 'until') - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/gemini.lua b/share/vis/lexers/gemini.lua @@ -1,23 +0,0 @@ --- Copyright 2020-2024 Haelwenn (lanodan) Monnier <contact+gemini.lua@hacktivis.me>. See LICENSE. --- Gemini / Gemtext LPeg lexer. --- See https://gemini.circumlunar.space/docs/specification.html - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(...) - -local header = lex:tag(lexer.HEADING .. '.h3', lexer.to_eol(lexer.starts_line('###'))) + - lex:tag(lexer.HEADING .. '.h2', lexer.to_eol(lexer.starts_line('##'))) + - lex:tag(lexer.HEADING .. '.h1', lexer.to_eol(lexer.starts_line('#'))) -lex:add_rule('header', header) - -lex:add_rule('list', lex:tag(lexer.LIST, lexer.to_eol(lexer.starts_line('*')))) - -lex:add_rule('blockquote', lex:tag(lexer.STRING, lexer.to_eol(lexer.starts_line('>')))) - -lex:add_rule('pre', lex:tag(lexer.CODE, lexer.to_eol(lexer.range('```', false, true)))) - -lex:add_rule('link', lex:tag(lexer.LINK, lexer.to_eol(lexer.starts_line('=>')))) - -return lex diff --git a/share/vis/lexers/gettext.lua b/share/vis/lexers/gettext.lua @@ -1,31 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Gettext LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('gettext') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match( - 'msgid msgid_plural msgstr fuzzy c-format no-c-format', true))) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Variables. -lex:add_rule('variable', token(lexer.VARIABLE, S('%$@') * lexer.word)) - --- Strings. -lex:add_rule('string', token(lexer.STRING, lexer.range('"', true))) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#' * S(': .~')))) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/gherkin.lua b/share/vis/lexers/gherkin.lua @@ -1,39 +0,0 @@ --- Copyright 2015-2024 Jason Schindler. See LICENSE. --- Gherkin (https://github.com/cucumber/cucumber/wiki/Gherkin) LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('gherkin', {fold_by_indentation = true}) - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match( - 'And Background But Examples Feature Given Outline Scenario Scenarios Then When'))) - --- Strings. -local doc_str = lexer.range('"""') -local dq_str = lexer.range('"') -lex:add_rule('string', token(lexer.STRING, doc_str + dq_str)) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#'))) - --- Numbers. --- lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Tags. -lex:add_rule('tag', token(lexer.LABEL, '@' * lexer.word^0)) - --- Placeholders. -lex:add_rule('placeholder', token(lexer.VARIABLE, lexer.range('<', '>', false, false, true))) - --- Examples. -lex:add_rule('example', token(lexer.DEFAULT, lexer.to_eol('|'))) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/git-rebase.lua b/share/vis/lexers/git-rebase.lua @@ -1,39 +0,0 @@ --- Copyright 2017-2024 Marc André Tanner. See LICENSE. --- git-rebase(1) LPeg lexer. - -local lexer = lexer -local P, R = lpeg.P, lpeg.R - -local lex = lexer.new(..., {lex_by_line = true}) - --- Comments. -lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.to_eol(lexer.starts_line('#')))) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lexer.starts_line(lex:word_match(lexer.KEYWORD)))) - --- Commit SHA1. -local function patn(pat, min, max) - return -pat^(max + 1) * pat^min -end - -lex:add_rule('commit', lex:tag(lexer.NUMBER, patn(R('09', 'af'), 7, 40))) - -lex:add_rule('message', lex:tag('message', lexer.to_eol())) - --- Word lists. -lex:set_word_list(lexer.KEYWORD, [[ - p pick - r reword - e edit - s squash - f fixup - x exec - d drop - b break - l label - t reset - m merge -]]) - -return lex diff --git a/share/vis/lexers/gleam.lua b/share/vis/lexers/gleam.lua @@ -1,120 +0,0 @@ --- Copyright 2021-2024 Mitchell. See LICENSE. --- Gleam LPeg lexer --- https://gleam.run/ --- Contributed by Tynan Beatty - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local KEY, OP = lexer.KEYWORD, lexer.OPERATOR - -local lex = lexer.new('gleam') - --- Whitespace. -local gleam_ws = token(lexer.WHITESPACE, lexer.space^1) -lex:add_rule('whitespace', gleam_ws) - --- Types. -local typ_tok = token(lexer.TYPE, lexer.upper * lexer.alnum^0) -lex:add_rule('type', typ_tok) - --- Modules. -local name = (lexer.lower + '_') * (lexer.lower + lexer.digit + '_')^0 -local fn_name = token(lexer.FUNCTION, name) -local mod_name = token('module', name) -local typ_or_fn = typ_tok + fn_name -local function mod_tok(ws) - return token(KEY, 'import') * ws^1 * mod_name * (ws^0 * token(OP, '/') * ws^0 * mod_name)^0 * - (ws^1 * token(KEY, 'as') * ws^1 * mod_name)^-1 * - (ws^0 * token(OP, '.') * ws^0 * token(OP, '{') * ws^0 * typ_or_fn * - (ws^0 * token(OP, ',') * ws^0 * typ_or_fn)^0 * ws^0 * token(OP, '}'))^-1 -end -lex:add_rule('module', mod_tok(gleam_ws)) -lex:add_style('module', lexer.styles.constant) - --- Keywords. -local key_tok = token(KEY, word_match( - 'as assert case const external fn if import let opaque pub todo try tuple type')) -lex:add_rule('keyword', key_tok) - --- Functions. -local function fn_tok(ws) - local mod_name_op = mod_name * ws^0 * token(OP, '.') - local fn_def_call = mod_name_op^-1 * ws^0 * fn_name * ws^0 * #P('(') - local fn_pipe = token(OP, '|>') * ws^0 * (token(KEY, 'fn') + mod_name_op^-1 * fn_name) - return fn_def_call + fn_pipe -end -lex:add_rule('function', fn_tok(gleam_ws)) - --- Labels. -local id = token(lexer.IDENTIFIER, name) -local function lab_tok(ws) - return token(OP, S('(,')) * ws^0 * token(lexer.LABEL, name) * #(ws^1 * id) -end -lex:add_rule('label', lab_tok(gleam_ws)) - --- Identifiers. -local discard_id = token('discard', '_' * name) -local id_tok = discard_id + id -lex:add_rule('identifier', id_tok) -lex:add_style('discard', lexer.styles.comment) - --- Strings. -local str_tok = token(lexer.STRING, lexer.range('"')) -lex:add_rule('string', str_tok) - --- Comments. -local com_tok = token(lexer.COMMENT, lexer.to_eol('//')) -lex:add_rule('comment', com_tok) - --- Numbers. -local function can_neg(patt) return (lpeg.B(lexer.space + S('+-/*%<>=&|:,.')) * '-')^-1 * patt end -local function can_sep(patt) return (P('_')^-1 * patt^1)^1 end -local dec = lexer.digit * can_sep(lexer.digit)^0 -local float = dec * '.' * dec^0 -local bin = '0' * S('bB') * can_sep(S('01')) * -lexer.xdigit -local oct = '0' * S('oO') * can_sep(lpeg.R('07')) -local hex = '0' * S('xX') * can_sep(lexer.xdigit) -local num_tok = token(lexer.NUMBER, can_neg(float) + bin + oct + hex + can_neg(dec)) -lex:add_rule('number', num_tok) - --- Operators. -local op_tok = token(OP, S('+-*/%#!=<>&|.,:;{}[]()')) -lex:add_rule('operator', op_tok) - --- Errors. -local err_tok = token(lexer.ERROR, lexer.any) -lex:add_rule('error', err_tok) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '{', '}') -lex:add_fold_point(lexer.OPERATOR, '[', ']') -lex:add_fold_point(lexer.OPERATOR, '(', ')') - --- Embedded Bit Strings. --- Mimic lexer.load() by creating a bitstring-specific whitespace style. -local bitstring = lexer.new(lex._name .. '_bitstring') -local bitstring_ws = token(bitstring._name .. '_whitespace', lexer.space^1) -bitstring:add_rule('whitespace', bitstring_ws) -bitstring:add_style(bitstring._name .. '_whitespace', lexer.styles.whitespace) -bitstring:add_rule('type', typ_tok) -bitstring:add_rule('module', mod_tok(bitstring_ws)) -bitstring:add_rule('keyword', key_tok + token(KEY, word_match{ - 'binary', 'bytes', 'int', 'float', 'bit_string', 'bits', 'utf8', 'utf16', 'utf32', - 'utf8_codepoint', 'utf16_codepoint', 'utf32_codepoint', 'signed', 'unsigned', 'big', 'little', - 'native', 'unit', 'size' -})) -bitstring:add_rule('function', fn_tok(bitstring_ws)) -bitstring:add_rule('label', lab_tok(bitstring_ws)) -bitstring:add_rule('identifier', id_tok) -bitstring:add_rule('string', str_tok) -bitstring:add_rule('comment', com_tok) -bitstring:add_rule('number', num_tok) -bitstring:add_rule('operator', op_tok) -bitstring:add_rule('error', err_tok) -lex:embed(bitstring, token(OP, '<<'), token(OP, '>>')) - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/glsl.lua b/share/vis/lexers/glsl.lua @@ -1,117 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- GLSL LPeg lexer. - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(..., {inherit = lexer.load('ansi_c')}) - --- Word lists. -lex:set_word_list(lexer.KEYWORD, { - 'attribute', 'const', 'uniform', 'varying', 'buffer', 'shared', 'coherent', 'volatile', - 'restrict', 'readonly', 'writeonly', 'layout', 'centroid', 'flat', 'smooth', 'noperspective', - 'patch', 'sample', 'break', 'continue', 'do', 'for', 'while', 'switch', 'case', 'default', 'if', - 'else', 'subroutine', 'in', 'inout', 'out', 'true', 'false', 'invariant', 'precise', 'discard', - 'return', 'lowp', 'mediump', 'highp', 'precision', 'struct', -- - -- Reserved. - 'common', 'partition', 'active', 'asm', 'class', 'union', 'enum', 'typedef', 'template', 'this', - 'resource', 'goto', 'inline', 'noinline', 'public', 'static', 'extern', 'external', 'interface', - 'superp', 'input', 'output', 'filter', 'sizeof', 'cast', 'namespace', 'using' -}) - -lex:set_word_list(lexer.TYPE, { - 'atomic_uint', 'float', 'double', 'int', 'void', 'bool', 'mat2', 'mat3', 'mat4', 'dmat2', 'dmat3', - 'dmat4', 'mat2x2', 'mat2x3', 'mat2x4', 'dmat2x2', 'dmat2x3', 'dmat2x4', 'mat3x2', 'mat3x3', - 'mat3x4', 'dmat3x2', 'dmat3x3', 'dmat3x4', 'mat4x2', 'mat4x3', 'mat4x4', 'dmat4x2', 'dmat4x3', - 'dmat4x4', 'vec2', 'vec3', 'vec4', 'ivec2', 'ivec3', 'ivec4', 'bvec2', 'bvec3', 'bvec4', 'dvec2', - 'dvec3', 'dvec4', 'uint', 'uvec2', 'uvec3', 'uvec4', 'sampler1D', 'sampler2D', 'sampler3D', - 'samplerCube', 'sampler1DShadow', 'sampler2DShadow', 'samplerCubeShadow', 'sampler1DArray', - 'sampler2DArray', 'sampler1DArrayShadow', 'sampler2DArrayShadow', 'isampler1D', 'isampler2D', - 'isampler3D', 'isamplerCube', 'isampler1DArray', 'isampler2DArray', 'usampler1D', 'usampler2D', - 'usampler3D', 'usamplerCube', 'usampler1DArray', 'usampler2DArray', 'sampler2DRect', - 'sampler2DRectShadow', 'isampler2DRect', 'usampler2DRect', 'samplerBuffer', 'isamplerBuffer', - 'usamplerBuffer', 'sampler2DMS', 'isampler2DMS', 'usampler2DMS', 'sampler2DMSArray', - 'isampler2DMSArray', 'usampler2DMSArray', 'samplerCubeArray', 'samplerCubeArrayShadow', - 'isamplerCubeArray', 'usamplerCubeArray', 'image1D', 'iimage1D', 'uimage1D', 'image2D', - 'iimage2D', 'uimage2D', 'image3D', 'iimage3D', 'uimage3D', 'image2DRect', 'iimage2DRect', - 'uimage2DRect', 'imageCube', 'iimageCube', 'uimageCube', 'imageBuffer', 'iimageBuffer', - 'uimageBuffer', 'image1DArray', 'iimage1DArray', 'uimage1DArray', 'image2DArray', 'iimage2DArray', - 'uimage2DArray', 'imageCubeArray', 'iimageCubeArray', 'uimageCubeArray', 'image2DMS', - 'iimage2DMS', 'uimage2DMS', 'image2DMSArray', 'iimage2DMSArray', 'uimage2DMSArray', - -- Reserved. - 'long', 'short', 'half', 'fixed', 'unsigned', 'hvec2', 'hvec3', 'hvec4', 'fvec2', 'fvec3', - 'fvec4', 'sampler3DRect' -}) - -lex:set_word_list(lexer.FUNCTION_BUILTIN, { - 'radians', 'degrees', 'sin', 'cos', 'tan', 'asin', 'acos', 'atan', 'sinh', 'cosh', 'tanh', - 'asinh', 'acosh', 'atanh', 'pow', 'exp', 'log', 'exp2', 'log2', 'sqrt', 'inversesqrt', 'abs', - 'sign', 'floor', 'trunc', 'round', 'roundEven', 'ceil', 'fract', 'mod', 'modf', 'min', 'max', - 'clamp', 'mix', 'step', 'smoothstep', 'isnan', 'isinf', 'floatBitsToInt', 'floatBitsToUint', - 'intBitsToFloat', 'uintBitsToFloat', 'fma', 'frexp', 'ldexp', 'packUnorm2x16', 'packUnorm4x8', - 'packSnorm4x8', 'unpackUnorm2x16', 'unpackUnorm4x8', 'unpackSnorm4x8', 'packDouble2x32', - 'unpackDouble2x32', 'length', 'distance', 'dot', 'cross', 'normalize', 'ftransform', - 'faceforward', 'reflect', 'refract', 'matrixCompMult', 'outerProduct', 'transpose', 'determinant', - 'inverse', 'lessThan', 'lessThanEqual', 'greaterThan', 'greaterThanEqual', 'equal', 'notEqual', - 'any', 'all', 'not', 'uaddCarry', 'usubBorrow', 'umulExtended', 'imulExtended', 'bitfieldExtract', - 'bitfildInsert', 'bitfieldReverse', 'bitCount', 'findLSB', 'findMSB', 'textureSize', - 'textureQueryLOD', 'texture', 'textureProj', 'textureLod', 'textureOffset', 'texelFetch', - 'texelFetchOffset', 'textureProjOffset', 'textureLodOffset', 'textureProjLod', - 'textureProjLodOffset', 'textureGrad', 'textureGradOffset', 'textureProjGrad', - 'textureProjGradOffset', 'textureGather', 'textureGatherOffset', 'texture1D', 'texture2D', - 'texture3D', 'texture1DProj', 'texture2DProj', 'texture3DProj', 'texture1DLod', 'texture2DLod', - 'texture3DLod', 'texture1DProjLod', 'texture2DProjLod', 'texture3DProjLod', 'textureCube', - 'textureCubeLod', 'shadow1D', 'shadow2D', 'shadow1DProj', 'shadow2DProj', 'shadow1DLod', - 'shadow2DLod', 'shadow1DProjLod', 'shadow2DProjLod', 'dFdx', 'dFdy', 'fwidth', - 'interpolateAtCentroid', 'interpolateAtSample', 'interpolateAtOffset', 'noise1', 'noise2', - 'noise3', 'noise4', 'EmitStreamVertex', 'EndStreamPrimitive', 'EmitVertex', 'EndPrimitive', - 'barrier' -}) - -lex:set_word_list(lexer.VARIABLE, { - 'gl_VertexID', 'gl_InstanceID', 'gl_Position', 'gl_PointSize', 'gl_ClipDistance', - 'gl_PrimitiveIDIn', 'gl_InvocationID', 'gl_PrimitiveID', 'gl_Layer', 'gl_PatchVerticesIn', - 'gl_TessLevelOuter', 'gl_TessLevelInner', 'gl_TessCoord', 'gl_FragCoord', 'gl_FrontFacing', - 'gl_PointCoord', 'gl_SampleID', 'gl_SamplePosition', 'gl_FragColor', 'gl_FragData', - 'gl_FragDepth', 'gl_SampleMask', 'gl_ClipVertex', 'gl_FrontColor', 'gl_BackColor', - 'gl_FrontSecondaryColor', 'gl_BackSecondaryColor', 'gl_TexCoord', 'gl_FogFragCoord', 'gl_Color', - 'gl_SecondaryColor', 'gl_Normal', 'gl_Vertex', 'gl_MultiTexCoord0', 'gl_MultiTexCoord1', - 'gl_MultiTexCoord2', 'gl_MultiTexCoord3', 'gl_MultiTexCoord4', 'gl_MultiTexCoord5', - 'gl_MultiTexCoord6', 'gl_MultiTexCoord7', 'gl_FogCoord' -}) - -lex:set_word_list(lexer.CONSTANT_BUILTIN, { - '__LINE__', '__FILE__', '__VERSION__', -- - 'gl_MaxVertexAttribs', 'gl_MaxVertexUniformComponents', 'gl_MaxVaryingFloats', - 'gl_MaxVaryingComponents', 'gl_MaxVertexOutputComponents', 'gl_MaxGeometryInputComponents', - 'gl_MaxGeometryOutputComponents', 'gl_MaxFragmentInputComponents', - 'gl_MaxVertexTextureImageUnits', 'gl_MaxCombinedTextureImageUnits', 'gl_MaxTextureImageUnits', - 'gl_MaxFragmentUniformComponents', 'gl_MaxDrawBuffers', 'gl_MaxClipDistances', - 'gl_MaxGeometryTextureImageUnits', 'gl_MaxGeometryOutputVertices', - 'gl_MaxGeometryTotalOutputComponents', 'gl_MaxGeometryUniformComponents', - 'gl_MaxGeometryVaryingComponents', 'gl_MaxTessControlInputComponents', - 'gl_MaxTessControlOutputComponents', 'gl_MaxTessControlTextureImageUnits', - 'gl_MaxTessControlUniformComponents', 'gl_MaxTessControlTotalOutputComponents', - 'gl_MaxTessEvaluationInputComponents', 'gl_MaxTessEvaluationOutputComponents', - 'gl_MaxTessEvaluationTextureImageUnits', 'gl_MaxTessEvaluationUniformComponents', - 'gl_MaxTessPatchComponents', 'gl_MaxPatchVertices', 'gl_MaxTessGenLevel', 'gl_MaxTextureUnits', - 'gl_MaxTextureCoords', 'gl_MaxClipPlanes', -- - 'gl_DepthRange', 'gl_ModelViewMatrix', 'gl_ProjectionMatrix', 'gl_ModelViewProjectionMatrix', - 'gl_TextureMatrix', 'gl_NormalMatrix', 'gl_ModelViewMatrixInverse', 'gl_ProjectionMatrixInverse', - 'gl_ModelViewProjectionMatrixInverse', 'gl_TextureMatrixInverse', 'gl_ModelViewMatrixTranspose', - 'gl_ProjectionMatrixTranspose', 'gl_ModelViewProjectionMatrixTranspose', - 'gl_TextureMatrixTranspose', 'gl_ModelViewMatrixInverseTranspose', - 'gl_ProjectionMatrixInverseTranspose', 'gl_ModelViewProjectionMatrixInverseTranspose', - 'gl_TextureMatrixInverseTranspose', 'gl_NormalScale', 'gl_ClipPlane', 'gl_Point', - 'gl_FrontMaterial', 'gl_BackMaterial', 'gl_LightSource', 'gl_LightModel', - 'gl_FrontLightModelProduct', 'gl_BackLightModelProduct', 'gl_FrontLightProduct', - 'gl_BackLightProduct', 'gl_TextureEnvColor', 'gl_EyePlaneS', 'gl_EyePlaneT', 'gl_EyePlaneR', - 'gl_EyePlaneQ', 'gl_ObjectPlaneS', 'gl_ObjectPlaneT', 'gl_ObjectPlaneR', 'gl_ObjectPlaneQ', - 'gl_Fog' -}) - -lex:set_word_list(lexer.PREPROCESSOR, 'extension version', true) - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/gnuplot.lua b/share/vis/lexers/gnuplot.lua @@ -1,70 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Gnuplot LPeg lexer. - -local lexer = lexer -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(...) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD))) - --- Functions. -lex:add_rule('function', lex:tag(lexer.FUNCTION_BUILTIN, lex:word_match(lexer.FUNCTION_BUILTIN))) - --- Variables. -lex:add_rule('variable', lex:tag(lexer.VARIABLE_BUILTIN, lex:word_match(lexer.VARIABLE_BUILTIN))) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local sq_str = lexer.range("'") -local dq_str = lexer.range('"') -local br_str = lexer.range('[', ']', true) + lexer.range('{', '}', true) -lex:add_rule('string', lex:tag(lexer.STRING, sq_str + dq_str + br_str)) - --- Comments. -lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.to_eol('#'))) - --- Numbers. -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number)) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('-+~!$*%=<>&|^?:;()'))) - --- Word lists. -lex:set_word_list(lexer.KEYWORD, { - 'cd', 'call', 'clear', 'exit', 'fit', 'help', 'history', 'if', 'load', 'pause', 'plot', 'using', - 'with', 'index', 'every', 'smooth', 'thru', 'print', 'pwd', 'quit', 'replot', 'reread', 'reset', - 'save', 'set', 'show', 'unset', 'shell', 'splot', 'system', 'test', 'unset', 'update' -}) - -lex:set_word_list(lexer.FUNCTION_BUILTIN, { - 'abs', 'acos', 'acosh', 'arg', 'asin', 'asinh', 'atan', 'atan2', 'atanh', 'besj0', 'besj1', - 'besy0', 'besy1', 'ceil', 'cos', 'cosh', 'erf', 'erfc', 'exp', 'floor', 'gamma', 'ibeta', - 'inverf', 'igamma', 'imag', 'invnorm', 'int', 'lambertw', 'lgamma', 'log', 'log10', 'norm', - 'rand', 'real', 'sgn', 'sin', 'sinh', 'sqrt', 'tan', 'tanh', 'column', 'defined', 'tm_hour', - 'tm_mday', 'tm_min', 'tm_mon', 'tm_sec', 'tm_wday', 'tm_yday', 'tm_year', 'valid' -}) - -lex:set_word_list(lexer.VARIABLE_BUILTIN, { - 'angles', 'arrow', 'autoscale', 'bars', 'bmargin', 'border', 'boxwidth', 'clabel', 'clip', - 'cntrparam', 'colorbox', 'contour', 'datafile', 'decimalsign', 'dgrid3d', 'dummy', 'encoding', - 'fit', 'fontpath', 'format', 'functions', 'function', 'grid', 'hidden3d', 'historysize', - 'isosamples', 'key', 'label', 'lmargin', 'loadpath', 'locale', 'logscale', 'mapping', 'margin', - 'mouse', 'multiplot', 'mx2tics', 'mxtics', 'my2tics', 'mytics', 'mztics', 'offsets', 'origin', - 'output', 'parametric', 'plot', 'pm3d', 'palette', 'pointsize', 'polar', 'print', 'rmargin', - 'rrange', 'samples', 'size', 'style', 'surface', 'terminal', 'tics', 'ticslevel', 'ticscale', - 'timestamp', 'timefmt', 'title', 'tmargin', 'trange', 'urange', 'variables', 'version', 'view', - 'vrange', 'x2data', 'x2dtics', 'x2label', 'x2mtics', 'x2range', 'x2tics', 'x2zeroaxis', 'xdata', - 'xdtics', 'xlabel', 'xmtics', 'xrange', 'xtics', 'xzeroaxis', 'y2data', 'y2dtics', 'y2label', - 'y2mtics', 'y2range', 'y2tics', 'y2zeroaxis', 'ydata', 'ydtics', 'ylabel', 'ymtics', 'yrange', - 'ytics', 'yzeroaxis', 'zdata', 'zdtics', 'cbdata', 'cbdtics', 'zero', 'zeroaxis', 'zlabel', - 'zmtics', 'zrange', 'ztics', 'cblabel', 'cbmtics', 'cbrange', 'cbtics' -}) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/go.lua b/share/vis/lexers/go.lua @@ -1,71 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Go LPeg lexer. - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(...) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD))) - --- Constants. -lex:add_rule('constant', lex:tag(lexer.CONSTANT_BUILTIN, lex:word_match(lexer.CONSTANT_BUILTIN))) - --- Types. -lex:add_rule('type', lex:tag(lexer.TYPE, lex:word_match(lexer.TYPE))) - --- Functions. -local builtin_func = -lpeg.B('.') * - lex:tag(lexer.FUNCTION_BUILTIN, lex:word_match(lexer.FUNCTION_BUILTIN)) -local func = lex:tag(lexer.FUNCTION, lexer.word) -local method = lpeg.B('.') * lex:tag(lexer.FUNCTION_METHOD, lexer.word) -lex:add_rule('function', (builtin_func + method + func) * #(lexer.space^0 * '(')) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"', true) -local raw_str = lexer.range('`', false, false) -lex:add_rule('string', lex:tag(lexer.STRING, sq_str + dq_str + raw_str)) - --- Comments. -local line_comment = lexer.to_eol('//') -local block_comment = lexer.range('/*', '*/') -lex:add_rule('comment', lex:tag(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number * P('i')^-1)) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('+-*/%&|^<>=!~:;.,()[]{}'))) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '{', '}') -lex:add_fold_point(lexer.COMMENT, '/*', '*/') - --- Word lists. -lex:set_word_list(lexer.KEYWORD, { - 'break', 'case', 'chan', 'const', 'continue', 'default', 'defer', 'else', 'fallthrough', 'for', - 'func', 'go', 'goto', 'if', 'import', 'interface', 'map', 'package', 'range', 'return', 'select', - 'struct', 'switch', 'type', 'var' -}) - -lex:set_word_list(lexer.CONSTANT_BUILTIN, 'true false iota nil') - -lex:set_word_list(lexer.TYPE, { - 'any', 'bool', 'byte', 'comparable', 'complex64', 'complex128', 'error', 'float32', 'float64', - 'int', 'int8', 'int16', 'int32', 'int64', 'rune', 'string', 'uint', 'uint8', 'uint16', 'uint32', - 'uint64', 'uintptr' -}) - -lex:set_word_list(lexer.FUNCTION_BUILTIN, { - 'append', 'cap', 'close', 'complex', 'copy', 'delete', 'imag', 'len', 'make', 'new', 'panic', - 'print', 'println', 'real', 'recover' -}) - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/groovy.lua b/share/vis/lexers/groovy.lua @@ -1,68 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Groovy LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('groovy') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'abstract', 'break', 'case', 'catch', 'continue', 'default', 'do', 'else', 'extends', 'final', - 'finally', 'for', 'if', 'implements', 'instanceof', 'native', 'new', 'private', 'protected', - 'public', 'return', 'static', 'switch', 'synchronized', 'throw', 'throws', 'transient', 'try', - 'volatile', 'while', 'strictfp', 'package', 'import', 'as', 'assert', 'def', 'mixin', 'property', - 'test', 'using', 'in', 'false', 'null', 'super', 'this', 'true', 'it' -})) - --- Functions. -lex:add_rule('function', token(lexer.FUNCTION, word_match{ - 'abs', 'any', 'append', 'asList', 'asWritable', 'call', 'collect', 'compareTo', 'count', 'div', - 'dump', 'each', 'eachByte', 'eachFile', 'eachLine', 'every', 'find', 'findAll', 'flatten', - 'getAt', 'getErr', 'getIn', 'getOut', 'getText', 'grep', 'immutable', 'inject', 'inspect', - 'intersect', 'invokeMethods', 'isCase', 'join', 'leftShift', 'minus', 'multiply', - 'newInputStream', 'newOutputStream', 'newPrintWriter', 'newReader', 'newWriter', 'next', 'plus', - 'pop', 'power', 'previous', 'print', 'println', 'push', 'putAt', 'read', 'readBytes', 'readLines', - 'reverse', 'reverseEach', 'round', 'size', 'sort', 'splitEachLine', 'step', 'subMap', 'times', - 'toInteger', 'toList', 'tokenize', 'upto', 'waitForOrKill', 'withPrintWriter', 'withReader', - 'withStream', 'withWriter', 'withWriterAppend', 'write', 'writeLine' -})) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match( - 'boolean byte char class double float int interface long short void'))) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Comments. -local line_comment = lexer.to_eol('//', true) -local block_comment = lexer.range('/*', '*/') -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Strings. -local sq_str = lexer.range("'") -local dq_str = lexer.range('"') -local tq_str = lexer.range("'''") + lexer.range('"""') -local string = token(lexer.STRING, tq_str + sq_str + dq_str) -local regex_str = lexer.after_set('=~|!<>+-*?&,:;([{', lexer.range('/', true)) -local regex = token(lexer.REGEX, regex_str) -lex:add_rule('string', string + regex) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('=~|!<>+-/*?&.,:;()[]{}'))) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '{', '}') -lex:add_fold_point(lexer.COMMENT, '/*', '*/') - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/gtkrc.lua b/share/vis/lexers/gtkrc.lua @@ -1,57 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Gtkrc LPeg lexer. - -local lexer = lexer -local word_match = lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(...) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, word_match( - 'binding class include module_path pixmap_path im_module_file style widget widget_class'))) - --- Variables. -lex:add_rule('variable', lex:tag(lexer.VARIABLE_BUILTIN, lex:word_match(lexer.VARIABLE_BUILTIN))) - --- States. -lex:add_rule('state', lex:tag(lexer.CONSTANT_BUILTIN, lex:word_match(lexer.CONSTANT_BUILTIN))) - --- Functions. -lex:add_rule('function', lex:tag(lexer.FUNCTION_BUILTIN, lex:word_match(lexer.FUNCTION_BUILTIN))) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.alpha * (lexer.alnum + S('_-'))^0)) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"', true) -lex:add_rule('string', lex:tag(lexer.STRING, sq_str + dq_str)) - --- Comments. -lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.to_eol('#'))) - --- Numbers. -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.digit^1 * ('.' * lexer.digit^1)^-1)) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S(':=,*()[]{}'))) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '{', '}') - --- Word lists. -lex:set_word_list(lexer.VARIABLE_BUILTIN, { - 'bg', 'fg', 'base', 'text', 'xthickness', 'ythickness', 'bg_pixmap', 'font', 'fontset', - 'font_name', 'stock', 'color', 'engine' -}) - -lex:set_word_list(lexer.CONSTANT_BUILTIN, { - 'ACTIVE', 'SELECTED', 'NORMAL', 'PRELIGHT', 'INSENSITIVE', 'TRUE', 'FALSE' -}) - -lex:set_word_list(lexer.FUNCTION_BUILTIN, {'mix', 'shade', 'lighter', 'darker'}) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/hare.lua b/share/vis/lexers/hare.lua @@ -1,81 +0,0 @@ --- Copyright 2021-2024 Mitchell. See LICENSE. --- Hare LPeg lexer - -local lexer = lexer -local P, R, S = lpeg.P, lpeg.R, lpeg.S - -local lex = lexer.new(...) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD))) - --- Types. -lex:add_rule('type', lex:tag(lexer.TYPE, lex:word_match(lexer.TYPE))) - --- Functions. -local builtin_func = lex:tag(lexer.FUNCTION_BUILTIN, - lex:word_match(lexer.FUNCTION_BUILTIN) + 'size' * #(lexer.space^0 * '(')) -local func = lex:tag(lexer.FUNCTION, lex:tag(lexer.FUNCTION, lexer.word * ('::' * lexer.word)^0 * - #(lexer.space^0 * '('))) -lex:add_rule('function', builtin_func + func) - --- Constants. -lex:add_rule('constant', lex:tag(lexer.CONSTANT_BUILTIN, lex:word_match(lexer.CONSTANT_BUILTIN))) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"') -local raw_str = lexer.range('`', false, false) -lex:add_rule('string', lex:tag(lexer.STRING, sq_str + dq_str + raw_str)) - --- Comments. -lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.to_eol('//'))) - --- Numbers. -local bin_num = '0b' * R('01')^1 * -lexer.xdigit -local oct_num = '0o' * R('07')^1 * -lexer.xdigit -local hex_num = '0x' * lexer.xdigit^1 -local int_suffix = lexer.word_match('i u z i8 i16 i32 i64 u8 u16 u32 u64') -local float_suffix = lexer.word_match('f32 f64') -local suffix = int_suffix + float_suffix -local integer = S('+-')^-1 * - ((hex_num + oct_num + bin_num) * int_suffix^-1 + lexer.dec_num * suffix^-1) -local float = lexer.float * float_suffix^-1 -lex:add_rule('number', lex:tag(lexer.NUMBER, integer + float)) - --- Error assertions -lex:add_rule('error_assert', lex:tag(lexer.ERROR .. '.assert', lpeg.B(')') * P('!'))) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('+-/*%^!=&|?~:;,.()[]{}<>'))) - --- Attributes. -lex:add_rule('attribute', lex:tag(lexer.ANNOTATION, '@' * lexer.word)) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '{', '}') - --- Word lists. -lex:set_word_list(lexer.KEYWORD, { - 'as', 'break', 'case', 'const', 'continue', 'def', 'defer', 'else', 'export', 'fn', 'for', 'if', - 'is', 'let', 'match', 'nullable', 'return', 'static', 'switch', 'type', 'use', 'yield', '_' -}) - -lex:set_word_list(lexer.TYPE, { - 'bool', 'enum', 'f32', 'f64', 'i16', 'i32', 'i64', 'i8', 'int', 'opaque', 'never', 'rune', 'size', - 'str', 'struct', 'u16', 'u32', 'u64', 'u8', 'uint', 'uintptr', 'union', 'valist' -}) - -lex:set_word_list(lexer.FUNCTION_BUILTIN, { - 'abort', 'align', 'alloc', 'append', 'assert', 'delete', 'free', 'insert', 'len', 'offset', - 'vaarg', 'vaend', 'vastart' -}) - -lex:set_word_list(lexer.CONSTANT_BUILTIN, 'false null true void') - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/haskell.lua b/share/vis/lexers/haskell.lua @@ -1,47 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Haskell LPeg lexer. --- Modified by Alex Suraci. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('haskell', {fold_by_indentation = true}) - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'case', 'class', 'data', 'default', 'deriving', 'do', 'else', 'if', 'import', 'in', 'infix', - 'infixl', 'infixr', 'instance', 'let', 'module', 'newtype', 'of', 'then', 'type', 'where', '_', - 'as', 'qualified', 'hiding' -})) - --- Types & type constructors. -local word = (lexer.alnum + S("._'#"))^0 -local op = lexer.punct - S('()[]{}') -lex:add_rule('type', token(lexer.TYPE, (lexer.upper * word) + (':' * (op^1 - ':')))) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, (lexer.alpha + '_') * word)) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"') -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) - --- Comments. -local line_comment = lexer.to_eol('--', true) -local block_comment = lexer.range('{-', '-}') -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, op)) - -lexer.property['scintillua.comment'] = '--' - -return lex diff --git a/share/vis/lexers/html.lua b/share/vis/lexers/html.lua @@ -1,152 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- HTML LPeg lexer. - -local lexer = lexer -local word_match = lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(..., {no_user_word_lists = true}) - --- Comments. -lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.range('<!--', '-->'))) - --- Doctype. -lex:add_rule('doctype', - lex:tag(lexer.TAG .. '.doctype', lexer.range('<!' * word_match('doctype', true), '>'))) - --- Tags. -local paired_tag = lex:tag(lexer.TAG, lex:word_match(lexer.TAG, true)) -local single_tag = lex:tag(lexer.TAG .. '.single', lex:word_match(lexer.TAG .. '.single', true)) -local known_tag = paired_tag + single_tag -local unknown_tag = lex:tag(lexer.TAG .. '.unknown', (lexer.alnum + '-')^1) -local tag = lex:tag(lexer.TAG .. '.chars', '<' * P('/')^-1) * (known_tag + unknown_tag) * -P(':') -lex:add_rule('tag', tag) - --- Closing tags. -local tag_close = lex:tag(lexer.TAG .. '.chars', P('/')^-1 * '>') -lex:add_rule('tag_close', tag_close) - --- Equals. --- TODO: performance is terrible on large files. -local in_tag = P(function(input, index) - local before = input:sub(1, index - 1) - local s, e = before:find('<[^>]-$'), before:find('>[^<]-$') - if s and e then return s > e end - if s then return true end - return input:find('^[^<]->', index) ~= nil -end) - -local equals = lex:tag(lexer.OPERATOR, '=') -- * in_tag --- lex:add_rule('equals', equals) - --- Attributes. -local known_attribute = lex:tag(lexer.ATTRIBUTE, lex:word_match(lexer.ATTRIBUTE, true) + - ((P('data-') + 'aria-') * (lexer.alnum + '-')^1)) -local unknown_attribute = lex:tag(lexer.ATTRIBUTE .. '.unknown', (lexer.alnum + '-')^1) -local ws = lex:get_rule('whitespace') -local attribute_eq = (known_attribute + unknown_attribute) * ws^-1 * equals -lex:add_rule('attribute', attribute_eq) - --- Strings. -local string = lex:tag(lexer.STRING, lexer.after_set('=', lexer.range("'") + lexer.range('"'))) -lex:add_rule('string', string) - --- Numbers. -local number = lex:tag(lexer.NUMBER, lexer.dec_num * P('%')^-1) -lex:add_rule('number', lexer.after_set('=', number)) -- *in_tag) - --- Entities. -lex:add_rule('entity', lex:tag(lexer.CONSTANT_BUILTIN .. '.entity', - '&' * (lexer.any - lexer.space - ';')^1 * ';')) - --- Fold points. -local function disambiguate_lt(text, pos, line, s) - if line:find('/>', s) then - return 0 - elseif line:find('^</', s) then - return -1 - else - return 1 - end -end -lex:add_fold_point(lexer.TAG .. '.chars', '<', disambiguate_lt) -lex:add_fold_point(lexer.COMMENT, '<!--', '-->') - --- Tags that start embedded languages. --- Export these patterns for proxy lexers (e.g. ASP) that need them. -lex.embed_start_tag = tag * (ws * attribute_eq * ws^-1 * string)^0 * ws^-1 * tag_close -lex.embed_end_tag = tag * tag_close - --- Embedded CSS (<style type="text/css"> ... </style>). -local css = lexer.load('css') -local style_tag = word_match('style', true) -local css_start_rule = #('<' * style_tag * ('>' + P(function(input, index) - if input:find('^%s+type%s*=%s*(["\'])text/css%1', index) then return true end -end))) * lex.embed_start_tag -local css_end_rule = #('</' * style_tag * '>') * lex.embed_end_tag -lex:embed(css, css_start_rule, css_end_rule) --- Embedded CSS in style="" attribute. -local style = lexer.load('css', 'css.style') -css_start_rule = #(P('style') * lexer.space^0 * '=') * attribute_eq * ws^-1 * - lex:tag(lexer.STRING, '"') -css_end_rule = lex:tag(lexer.STRING, '"') -lex:embed(style, css_start_rule, css_end_rule) -- only double-quotes for now - --- Embedded JavaScript (<script type="text/javascript"> ... </script>). -local js = lexer.load('javascript') -local script_tag = word_match('script', true) -local js_start_rule = #('<' * script_tag * ('>' + P(function(input, index) - if input:find('^%s+type%s*=%s*(["\'])text/javascript%1', index) then return true end -end))) * lex.embed_start_tag -local js_end_rule = #('</' * script_tag * '>') * lex.embed_end_tag -lex:embed(js, js_start_rule, js_end_rule) - --- Embedded CoffeeScript (<script type="text/coffeescript"> ... </script>). -local cs = lexer.load('coffeescript') -script_tag = word_match('script', true) -local cs_start_rule = #('<' * script_tag * P(function(input, index) - if input:find('^[^>]+type%s*=%s*(["\'])text/coffeescript%1', index) then return true end -end)) * lex.embed_start_tag -local cs_end_rule = #('</' * script_tag * '>') * lex.embed_end_tag -lex:embed(cs, cs_start_rule, cs_end_rule) - --- Word lists. -lex:set_word_list(lexer.TAG .. '.single', { - 'area', 'base', 'br', 'col', 'command', 'embed', 'hr', 'img', 'input', 'keygen', 'link', 'meta', - 'param', 'source', 'track', 'wbr' -}) - -lex:set_word_list(lexer.TAG, { - 'a', 'abbr', 'address', 'article', 'aside', 'audio', 'b', 'bdi', 'bdo', 'blockquote', 'body', - 'button', 'canvas', 'caption', 'cite', 'code', 'colgroup', 'content', 'data', 'datalist', 'dd', - 'decorator', 'del', 'details', 'dfn', 'div', 'dl', 'dt', 'element', 'em', 'fieldset', - 'figcaption', 'figure', 'footer', 'form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'head', 'header', - 'html', 'i', 'iframe', 'ins', 'kbd', 'label', 'legend', 'li', 'main', 'map', 'mark', 'menu', - 'menuitem', 'meter', 'nav', 'noscript', 'object', 'ol', 'optgroup', 'option', 'output', 'p', - 'pre', 'progress', 'q', 'rp', 'rt', 'ruby', 's', 'samp', 'script', 'section', 'select', 'shadow', - 'small', 'spacer', 'span', 'strong', 'style', 'sub', 'summary', 'sup', 'table', 'tbody', 'td', - 'template', 'textarea', 'tfoot', 'th', 'thead', 'time', 'title', 'tr', 'u', 'ul', 'var', 'video' -}) - -lex:set_word_list(lexer.ATTRIBUTE, { - 'accept', 'accept-charset', 'accesskey', 'action', 'align', 'alt', 'async', 'autocomplete', - 'autofocus', 'autoplay', 'bgcolor', 'border', 'buffered', 'challenge', 'charset', 'checked', - 'cite', 'class', 'code', 'codebase', 'color', 'cols', 'colspan', 'content', 'contenteditable', - 'contextmenu', 'controls', 'coords', 'data', 'data-', 'datetime', 'default', 'defer', 'dir', - 'dirname', 'disabled', 'download', 'draggable', 'dropzone', 'enctype', 'for', 'form', 'headers', - 'height', 'hidden', 'high', 'href', 'hreflang', 'http-equiv', 'icon', 'id', 'ismap', 'itemprop', - 'keytype', 'kind', 'label', 'lang', 'language', 'list', 'loop', 'low', 'manifest', 'max', - 'maxlength', 'media', 'method', 'min', 'multiple', 'name', 'novalidate', 'open', 'optimum', - 'pattern', 'ping', 'placeholder', 'poster', 'preload', 'pubdate', 'radiogroup', 'readonly', 'rel', - 'required', 'reversed', 'role', 'rows', 'rowspan', 'sandbox', 'scope', 'scoped', 'seamless', - 'selected', 'shape', 'size', 'sizes', 'span', 'spellcheck', 'src', 'srcdoc', 'srclang', 'start', - 'step', 'style', 'summary', 'tabindex', 'target', 'title', 'type', 'usemap', 'value', 'width', - 'wrap' -}) - -lexer.property['scintillua.comment'] = '<!--|-->' -lexer.property['scintillua.angle.braces'] = '1' -lexer.property['scintillua.word.chars'] = - 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-' - -return lex diff --git a/share/vis/lexers/icon.lua b/share/vis/lexers/icon.lua @@ -1,61 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- LPeg lexer for the Icon programming language. --- http://www.cs.arizona.edu/icon --- Contributed by Carl Sturtivant. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('icon') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'break', 'by', 'case', 'create', 'default', 'do', 'else', 'end', 'every', 'fail', 'global', 'if', - 'initial', 'invocable', 'link', 'local', 'next', 'not', 'of', 'procedure', 'record', 'repeat', - 'return', 'static', 'suspend', 'then', 'to', 'until', 'while' -})) - --- Icon Keywords: unique to Icon. -lex:add_rule('special_keyword', token('special_keyword', '&' * word_match{ - 'allocated', 'ascii', 'clock', 'collections', 'cset', 'current', 'date', 'dateline', 'digits', - 'dump', 'e', 'error', 'errornumber', 'errortext', 'errorvalue', 'errout', 'fail', 'features', - 'file', 'host', 'input', 'lcase', 'letters', 'level', 'line', 'main', 'null', 'output', 'phi', - 'pi', 'pos', 'progname', 'random', 'regions', 'source', 'storage', 'subject', 'time', 'trace', - 'ucase', 'version' -})) -lex:add_style('special_keyword', lexer.styles.type) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local sq_str = lexer.range("'") -local dq_str = lexer.range('"') -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#', true))) - --- Numbers. -local radix_literal = P('-')^-1 * lexer.dec_num * S('rR') * lexer.alnum^1 -lex:add_rule('number', token(lexer.NUMBER, radix_literal + lexer.number)) - --- Preprocessor. -lex:add_rule('preproc', token(lexer.PREPROCESSOR, '$' * - word_match('define else endif error ifdef ifndef include line undef'))) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>~!=^&|?~@:;,.()[]{}'))) - --- Fold points. -lex:add_fold_point(lexer.PREPROCESSOR, 'ifdef', 'endif') -lex:add_fold_point(lexer.PREPROCESSOR, 'ifndef', 'endif') -lex:add_fold_point(lexer.KEYWORD, 'procedure', 'end') - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/idl.lua b/share/vis/lexers/idl.lua @@ -1,52 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- IDL LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('idl') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'abstract', 'attribute', 'case', 'const', 'context', 'custom', 'default', 'enum', 'exception', - 'factory', 'FALSE', 'in', 'inout', 'interface', 'local', 'module', 'native', 'oneway', 'out', - 'private', 'public', 'raises', 'readonly', 'struct', 'support', 'switch', 'TRUE', 'truncatable', - 'typedef', 'union', 'valuetype' -})) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match{ - 'any', 'boolean', 'char', 'double', 'fixed', 'float', 'long', 'Object', 'octet', 'sequence', - 'short', 'string', 'unsigned', 'ValueBase', 'void', 'wchar', 'wstring' -})) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"', true) -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) - --- Comments. -local line_comment = lexer.to_eol('//', true) -local block_comment = lexer.range('/*', '*/') -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Preprocessor. -lex:add_rule('preproc', token(lexer.PREPROCESSOR, lexer.starts_line('#') * - word_match('define undef ifdef ifndef if elif else endif include warning pragma'))) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('!<>=+-/*%&|^~.,:;?()[]{}'))) - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/inform.lua b/share/vis/lexers/inform.lua @@ -1,76 +0,0 @@ --- Copyright 2010-2024 Jeff Stone. See LICENSE. --- Inform 6 LPeg lexer for Scintillua. --- JMS 2010-04-25. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('inform') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'Abbreviate', 'Array', 'Attribute', 'Class', 'Constant', 'Default', 'End', 'Endif', 'Extend', - 'Global', 'Ifdef', 'Iffalse', 'Ifndef', 'Ifnot', 'Iftrue', 'Import', 'Include', 'Link', - 'Lowstring', 'Message', 'Object', 'Property', 'Release', 'Replace', 'Serial', 'StartDaemon', - 'Statusline', 'StopDaemon', 'Switches', 'Verb', -- - 'absent', 'action', 'actor', 'add_to_scope', 'address', 'additive', 'after', 'and', 'animate', - 'article', 'articles', 'before', 'bold', 'box', 'break', 'cant_go', 'capacity', 'char', 'class', - 'child', 'children', 'clothing', 'concealed', 'container', 'continue', 'creature', 'daemon', - 'deadflag', 'default', 'describe', 'description', 'do', 'door', 'door_dir', 'door_to', 'd_to', - 'd_obj', 'e_to', 'e_obj', 'each_turn', 'edible', 'else', 'enterable', 'false', 'female', 'first', - 'font', 'for', 'found_in', 'general', 'give', 'grammar', 'has', 'hasnt', 'held', 'if', 'in', - 'in_to', 'in_obj', 'initial', 'inside_description', 'invent', 'jump', 'last', 'life', 'light', - 'list_together', 'location', 'lockable', 'locked', 'male', 'move', 'moved', 'multi', - 'multiexcept', 'multiheld', 'multiinside', 'n_to', 'n_obj', 'ne_to', 'ne_obj', 'nw_to', 'nw_obj', - 'name', 'neuter', 'new_line', 'nothing', 'notin', 'noun', 'number', 'objectloop', 'ofclass', - 'off', 'on', 'only', 'open', 'openable', 'or', 'orders', 'out_to', 'out_obj', 'parent', - 'parse_name', 'player', 'plural', 'pluralname', 'print', 'print_ret', 'private', 'proper', - 'provides', 'random', 'react_after', 'react_before', 'remove', 'replace', 'return', 'reverse', - 'rfalseroman', 'rtrue', 's_to', 's_obj', 'se_to', 'se_obj', 'sw_to', 'sw_obj', 'scenery', 'scope', - 'score', 'scored', 'second', 'self', 'short_name', 'short_name_indef', 'sibling', 'spaces', - 'static', 'string', 'style', 'supporter', 'switch', 'switchable', 'talkable', 'thedark', - 'time_left', 'time_out', 'to', 'topic', 'transparent', 'true', 'underline', 'u_to', 'u_obj', - 'visited', 'w_to', 'w_obj', 'when_closed', 'when_off', 'when_on', 'when_open', 'while', 'with', - 'with_key', 'workflag', 'worn' -})) - --- Library actions. -lex:add_rule('action', token(lexer.FUNCTION_BUILTIN, word_match{ - 'Answer', 'Ask', 'AskFor', 'Attack', 'Blow', 'Burn', 'Buy', 'Climb', 'Close', 'Consult', 'Cut', - 'Dig', 'Disrobe', 'Drink', 'Drop', 'Eat', 'Empty', 'EmptyT', 'Enter', 'Examine', 'Exit', 'Fill', - 'FullScore', 'GetOff', 'Give', 'Go', 'GoIn', 'Insert', 'Inv', 'InvTall', 'InvWide', 'Jump', - 'JumpOver', 'Kiss', 'LetGo', 'Listen', 'LMode1', 'LMode2', 'LMode3', 'Lock', 'Look', 'LookUnder', - 'Mild', 'No', 'NotifyOff', 'NotifyOn', 'Objects', 'Open', 'Order', 'Places', 'Pray', 'Pronouns', - 'Pull', 'Push', 'PushDir', 'PutOn', 'Quit', 'Receive', 'Remove', 'Restart', 'Restore', 'Rub', - 'Save', 'Score', 'ScriptOff', 'ScriptOn', 'Search', 'Set', 'SetTo', 'Show', 'Sing', 'Sleep', - 'Smell', 'Sorry', 'Squeeze', 'Strong', 'Swim', 'Swing', 'SwitchOff', 'SwitchOn', 'Take', 'Taste', - 'Tell', 'Think', 'ThrowAt', 'ThrownAt', 'Tie', 'Touch', 'Transfer', 'Turn', 'Unlock', 'VagueGo', - 'Verify', 'Version', 'Wait', 'Wake', 'WakeOther', 'Wave', 'WaveHands', 'Wear', 'Yes' -})) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local sq_str = lexer.range("'") -local dq_str = lexer.range('"') -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('!'))) - --- Numbers. -local inform_hex = '$' * lexer.xdigit^1 -local inform_bin = '$$' * S('01')^1 -lex:add_rule('number', token(lexer.NUMBER, lexer.integer + inform_hex + inform_bin)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('@~=+-*/%^#=<>;:,.{}[]()&|?'))) - -lexer.property['scintillua.comment'] = '!' - -return lex diff --git a/share/vis/lexers/ini.lua b/share/vis/lexers/ini.lua @@ -1,39 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Ini LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('ini') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match('true false on off yes no'))) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, (lexer.alpha + '_') * (lexer.alnum + S('_.'))^0)) - --- Strings. -local sq_str = lexer.range("'") -local dq_str = lexer.range('"') -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) - --- Labels. -lex:add_rule('label', token(lexer.LABEL, lexer.range('[', ']', true))) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol(lexer.starts_line(S(';#'))))) - --- Numbers. -local integer = S('+-')^-1 * (lexer.hex_num + lexer.oct_num_('_') + lexer.dec_num_('_')) -lex:add_rule('number', token(lexer.NUMBER, lexer.float + integer)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, '=')) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/io_lang.lua b/share/vis/lexers/io_lang.lua @@ -1,51 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Io LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('io_lang') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'block', 'method', 'while', 'foreach', 'if', 'else', 'do', 'super', 'self', 'clone', 'proto', - 'setSlot', 'hasSlot', 'type', 'write', 'print', 'forward' -})) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match{ - 'Block', 'Buffer', 'CFunction', 'Date', 'Duration', 'File', 'Future', 'LinkedList', 'List', 'Map', - 'Message', 'Nil', 'Nop', 'Number', 'Object', 'String', 'WeakLink' -})) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local sq_str = lexer.range("'") -local dq_str = lexer.range('"') -local tq_str = lexer.range('"""') -lex:add_rule('string', token(lexer.STRING, tq_str + sq_str + dq_str)) - --- Comments. -local line_comment = lexer.to_eol(P('#') + '//') -local block_comment = lexer.range('/*', '*/') -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('`~@$%^&*-+/=\\<>?.,:;()[]{}'))) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '(', ')') -lex:add_fold_point(lexer.COMMENT, '/*', '*/') - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/java.lua b/share/vis/lexers/java.lua @@ -1,142 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Java LPeg lexer. --- Modified by Brian Schott. - -local lexer = require('lexer') -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(...) - --- Classes. -lex:add_rule('classdef', lex:tag(lexer.KEYWORD, 'class') * lex:get_rule('whitespace') * - lex:tag(lexer.CLASS, lexer.word)) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD))) - --- Functions. -local builtin_func = lex:tag(lexer.FUNCTION_BUILTIN, lex:word_match(lexer.FUNCTION_BUILTIN)) -local func = lex:tag(lexer.FUNCTION, lexer.word) -local method = lpeg.B('.') * lex:tag(lexer.FUNCTION_METHOD, lexer.word) -lex:add_rule('function', (builtin_func + method + func) * #(lexer.space^0 * '(')) - --- Constants. -lex:add_rule('constant', lex:tag(lexer.CONSTANT_BUILTIN, lex:word_match(lexer.CONSTANT_BUILTIN) + - 'System.' * lexer.word_match('err in out'))) - --- Types. -lex:add_rule('type', lex:tag(lexer.TYPE, lex:word_match(lexer.TYPE))) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"', true) -lex:add_rule('string', lex:tag(lexer.STRING, sq_str + dq_str)) - --- Comments. -local line_comment = lexer.to_eol('//', true) -local block_comment = lexer.range('/*', '*/') -lex:add_rule('comment', lex:tag(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number * S('LlFfDd')^-1)) - --- Annotations. -lex:add_rule('annotation', lex:tag(lexer.ANNOTATION, '@' * lexer.word)) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;.()[]{}'))) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '{', '}') -lex:add_fold_point(lexer.COMMENT, '/*', '*/') - --- Word lists. -lex:set_word_list(lexer.KEYWORD, { - 'abstract', 'assert', 'break', 'case', 'catch', 'class', 'const', 'continue', 'default', 'do', - 'else', 'enum', 'extends', 'final', 'finally', 'for', 'goto', 'if', 'implements', 'import', - 'instanceof', 'interface', 'native', 'new', 'package', 'private', 'protected', 'public', 'return', - 'static', 'strictfp', 'super', 'switch', 'synchronized', 'this', 'throw', 'throws', 'transient', - 'try', 'while', 'volatile', -- - 'true', 'false', 'null' -- literals -}) - -lex:set_word_list(lexer.FUNCTION_BUILTIN, { - 'clone', 'equals', 'finalize', 'getClass', 'hashCode', 'notify', 'notifyAll', 'toString', 'wait', -- - 'Boolean.compare', 'Boolean.getBoolean', 'Boolean.parseBoolean', 'Boolean.valueOf', -- - 'Byte.compare', 'Byte.decode', 'Byte.parseByte', 'Byte.valueOf', -- - 'Character.charCount', 'Character.codePointAt', 'Character.codePointBefore', - 'Character.codePointCount', 'Character.compare', 'Character.digit', 'Character.forDigit', - 'Character.getName', 'Character.getNumericValue', 'Character.getType', 'Character.isAlphabetic', - 'Character.isDefined', 'Character.isDigit', 'Character.isIdentifierIgnorable', - 'Character.isIdeographic', 'Character.isISOControl', 'Character.isJavaIdentifierPart', - 'Character.isJavaIdentifierStart', 'Character.isLetter', 'Character.isLetterOrDigit', - 'Character.isLowerCase', 'Character.isMirrored', 'Character.isSpaceChar', - 'Character.isSupplementaryCodePoint', 'Character.isSurrogate', 'Character.isSurrogatePair', - 'Character.isTitleCase', 'Character.isUnicodeIdentifierPart', - 'Character.isUnicodeIdentifierStart', 'Character.isUpperCase', 'Character.isValidCodePoint', - 'Character.isWhitespace', 'Character.offsetByCodePoints', 'Character.reverseBytes', - 'Character.toChars', 'Character.toCodePoint', 'Character.toLowerCase', 'Character.toTitleCase', - 'Character.toUpperCase', 'Character.valueOf', -- - 'Double.compare', 'Double.doubleToLongBits', 'Double.doubleToRawLongBits', 'Double.isInfinite', - 'Double.longBitsToDouble', 'Double.parseDouble', 'Double.toHexString', 'Double.valueOf', -- - 'Integer.bitCount', 'Integer.compare', 'Integer.decode', 'Integer.getInteger', - 'Integer.highestOneBit', 'Integer.lowestOneBit', 'Integer.numberOfLeadingZeros', - 'Integer.numberOfTrailingZeros', 'Integer.parseInt', 'Integer.reverse', 'Integer.reverseBytes', - 'Integer.rotateLeft', 'Integer.rotateRight', 'Integer.signum', 'Integer.toBinaryString', - 'Integer.toHexString', 'Integer.toOctalString', 'Integer.valueOf', -- - 'Math.abs', 'Math.acos', 'Math.asin', 'Math.atan', 'Math.atan2', 'Math.cbrt', 'Math.ceil', - 'Math.copySign', 'Math.cos', 'Math.cosh', 'Math.exp', 'Math.expm1', 'Math.floor', - 'Math.getExponent', 'Math.hypot', 'Math.IEEEremainder', 'Math.log', 'Math.log10', 'Math.log1p', - 'Math.max', 'Math.min', 'Math.nextAfter', 'Math.nextUp', 'Math.pow', 'Math.random', 'Math.rint', - 'Math.round', 'Math.scalb', 'Math.signum', 'Math.sin', 'Math.sinh', 'Math.sqrt', 'Math.tan', - 'Math.tanh', 'Math.toDegrees', 'Math.toRadians', 'Math.ulp', -- - 'Runtime.getRuntime', -- - 'String.copyValueOf', 'String.format', 'String.valueOf', -- - 'System.arraycopy', 'System.clearProperty', 'System.console', 'System.currentTimeMillis', - 'System.exit', 'System.gc', 'System.getenv', 'System.getProperties', 'System.getProperty', - 'System.getSecurityManager', 'System.identityHashCode', 'System.inheritedChannel', - 'System.lineSeparator', 'System.load', 'System.loadLibrary', 'System.mapLibraryName', - 'System.nanoTime', 'System.runFinalization', 'System.setErr', 'System.setIn', 'System.setOut', - 'System.setProperties', 'System.setProperty', 'System.setSecurityManager', -- - 'Thread.activeCount', 'Thread.currentThread', 'Thread.dumpStack', 'Thread.enumerate', - 'Thread.getAllStackTraces', 'Thread.getDefaultUncaughtExceptionHandler', 'Thread.holdsLock', - 'Thread.interrupted', 'Thread.setDefaultUncaughtExceptionHandler', 'Thread.sleep', 'Thread.yield' -- -}) - -lex:set_word_list(lexer.CONSTANT_BUILTIN, { - 'Double.MAX_EXPONENT', 'Double.MAX_VALUE', 'Double.MIN_EXPONENT', 'Double.MIN_NORMAL', - 'Double.MIN_VALUE', 'Double.NaN', 'Double.NEGATIVE_INFINITY', 'Double.POSITIVE_INFINITY', -- - 'Integer.MAX_VALUE', 'Integer.MIN_VALUE', -- - 'Math.E', 'Math.PI', -- - 'Thread.MAX_PRIORITY', 'Thread.MIN_PRIORITY', 'Thread.NORM_PRIORITY' -}) - -lex:set_word_list(lexer.TYPE, { - 'boolean', 'byte', 'char', 'double', 'float', 'int', 'long', 'short', 'void', 'Boolean', 'Byte', - 'Character', 'Class', 'Double', 'Enum', 'Float', 'Integer', 'Long', 'Object', 'Process', - 'Runtime', 'Short', 'String', 'StringBuffer', 'StringBuilder', 'Thread', 'Throwable', 'Void', - -- Exceptions. - 'ArithmeticException', 'ArrayIndexOutOfBoundsException', 'ArrayStoreException', - 'ClassCastException', 'ClassNotFoundException', 'CloneNotSupportedException', - 'EnumConstantNotPresentException', 'Exception', 'IllegalAccessException', - 'IllegalArgumentException', 'IllegalMonitorStateException', 'IllegalStateException', - 'IllegalThreadStateException', 'IndexOutOfBoundsException', 'InstantiationException', - 'InterruptedException', 'NegativeArraySizeException', 'NoSuchFieldException', - 'NoSuchMethodException', 'NullPointerException', 'NumberFormatException', - 'ReflectiveOperationException', 'RuntimeException', 'SecurityException', - 'StringIndexOutOfBoundsException', 'TypeNotPresentException', 'UnsupportedOperationException', - -- Errors. - 'AbstractMethodError', 'AssertionError', 'BootstrapMethodError', 'ClassCircularityError', - 'ClassFormatError', 'Error', 'ExceptionInInitializerError', 'IllegalAccessError', - 'IncompatibleClassChangeError', 'InstantiationError', 'InternalError', 'LinkageError', - 'NoClassDefFoundError', 'NoSuchFieldError', 'NoSuchMethodError', 'OutOfMemoryError', - 'StackOverflowError', 'ThreadDeath', 'UnknownError', 'UnsatisfiedLinkError', - 'UnsupportedClassVersionError', 'VerifyError', 'VirtualMachineError' -}) - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/javascript.lua b/share/vis/lexers/javascript.lua @@ -1,98 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- JavaScript LPeg lexer. - -local lexer = lexer -local P, S, B = lpeg.P, lpeg.S, lpeg.B - -local lex = lexer.new(...) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD))) - --- Types. -lex:add_rule('type', lex:tag(lexer.TYPE, lex:word_match(lexer.TYPE))) - --- Functions. -local builtin_func = -B('.') * - lex:tag(lexer.FUNCTION_BUILTIN, lex:word_match(lexer.FUNCTION_BUILTIN)) -local func = lex:tag(lexer.FUNCTION, lexer.word) -local method = B('.') * lex:tag(lexer.FUNCTION_METHOD, lexer.word) -lex:add_rule('function', (builtin_func + method + func) * #(lexer.space^0 * '(')) - --- Constants. -lex:add_rule('constant', lex:tag(lexer.CONSTANT_BUILTIN, lex:word_match(lexer.CONSTANT_BUILTIN))) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) - --- Comments. -local line_comment = lexer.to_eol('//', true) -local block_comment = lexer.range('/*', '*/') -lex:add_rule('comment', lex:tag(lexer.COMMENT, line_comment + block_comment)) - --- Strings. -local sq_str = lexer.range("'") -local dq_str = lexer.range('"') -local bq_str = lexer.range('`') -local string = lex:tag(lexer.STRING, sq_str + dq_str + bq_str) -local regex_str = lexer.after_set('+-*%^!=&|?:;,([{<>', lexer.range('/', true) * S('igm')^0) -local regex = lex:tag(lexer.REGEX, regex_str) -lex:add_rule('string', string + regex) - --- Numbers. -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number)) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('+-/*%^!=&|?:;,.()[]{}<>'))) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '{', '}') -lex:add_fold_point(lexer.COMMENT, '/*', '*/') - --- Word lists. -lex:set_word_list(lexer.KEYWORD, { - 'abstract', 'async', 'await', 'boolean', 'break', 'byte', 'case', 'catch', 'char', 'class', - 'const', 'continue', 'debugger', 'default', 'delete', 'do', 'double', 'else', 'enum', 'export', - 'extends', 'false', 'final', 'finally', 'float', 'for', 'function', 'get', 'goto', 'if', - 'implements', 'import', 'in', 'instanceof', 'int', 'interface', 'let', 'long', 'native', 'new', - 'null', 'of', 'package', 'private', 'protected', 'public', 'return', 'set', 'short', 'static', - 'super', 'switch', 'synchronized', 'this', 'throw', 'throws', 'transient', 'true', 'try', - 'typeof', 'var', 'void', 'volatile', 'while', 'with', 'yield' -}) - -lex:set_word_list(lexer.TYPE, { - -- Fundamental objects. - 'Object', 'Function', 'Boolean', 'Symbol', - -- Error Objects. - 'Error', 'AggregateError', 'EvalError', 'InternalError', 'RangeError', 'ReferenceError', - 'SyntaxError', 'TypeError', 'URIError', - -- Numbers and dates. - 'Number', 'BigInt', 'Math', 'Date', - -- Text Processing. - 'String', 'RegExp', - -- Indexed collections. - 'Array', 'Int8Array', 'Uint8Array', 'Uint8ClampedArray', 'Int16Array', 'Uint16Array', - 'Int32Array', 'Uint32Array', 'Float32Array', 'Float64Array', 'BigInt64Array', 'BigUint64Array', - -- Keyed collections. - 'Map', 'Set', 'WeakMap', 'WeakSet', - -- Structured data. - 'ArrayBuffer', 'SharedArrayBuffer', 'Atomics', 'DataView', 'JSON', - -- Control abstraction objects. - 'GeneratorFunction', 'AsyncGeneratorFunction', 'Generator', 'AsyncGenerator', 'AsyncFunction', - 'Promise', - -- Reflection. - 'Reflect', 'Proxy', - -- Other. - 'Intl', 'WebAssembly' -}) - -lex:set_word_list(lexer.FUNCTION_BUILTIN, { - 'eval', 'isFinite', 'isNaN', 'parseFloat', 'parseInt', 'decodeURI', 'decodeURIComponent', - 'encodeURI', 'encodeURIComponent' -}) - -lex:set_word_list(lexer.CONSTANT_BUILTIN, 'Infinity NaN undefined globalThis arguments') - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/jq.lua b/share/vis/lexers/jq.lua @@ -1,84 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- jq 1.6 Lua lexer -- https://stedolan.github.io/jq/wiki --- Anonymously contributed. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('jq') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - -- keywords not listed by jq's "builtins", minus operators 'and' and 'or', plus the '?' shorthand - 'as', 'break', 'catch', 'def', 'elif', 'else', 'end', 'foreach', 'if', 'import', 'include', - 'label', 'module', 'reduce', 'then', 'try' -} + '?')) - --- Functions. -lex:add_rule('function', token(lexer.FUNCTION, word_match{ - -- jq 1.6 built-in functions (SQL in upper caisse) - 'acos', 'acosh', 'add', 'all', 'any', 'arrays', 'ascii_downcase', 'ascii_upcase', 'asin', 'asinh', - 'atan', 'atan2', 'atanh', 'booleans', 'bsearch', 'builtins', 'capture', 'cbrt', 'ceil', - 'combinations', 'contains', 'copysign', 'cos', 'cosh', 'debug', 'del', 'delpaths', 'drem', - 'empty', 'endswith', 'env', 'erf', 'erfc', 'error', 'exp', 'exp10', 'exp2', 'explode', 'expm1', - 'fabs', 'fdim', 'finites', 'first', 'flatten', 'floor', 'fma', 'fmax', 'fmin', 'fmod', 'format', - 'frexp', 'from_entries', 'fromdate', 'fromdateiso8601', 'fromjson', 'fromstream', 'gamma', - 'get_jq_origin', 'get_prog_origin', 'get_search_list', 'getpath', 'gmtime', 'group_by', 'gsub', - 'halt', 'halt_error', 'has', 'hypot', 'implode', 'IN', 'in', 'INDEX', 'index', 'indices', - 'infinite', 'input', 'input_filename', 'input_line_number', 'inputs', 'inside', 'isempty', - 'isfinite', 'isinfinite', 'isnan', 'isnormal', 'iterables', 'j0', 'j1', 'jn', 'JOIN', 'join', - 'keys', 'keys_unsorted', 'last', 'ldexp', 'leaf_paths', 'length', 'lgamma', 'lgamma_r', 'limit', - 'localtime', 'log', 'log10', 'log1p', 'log2', 'logb', 'ltrimstr', 'map', 'map_values', 'match', - 'max', 'max_by', 'min', 'min_by', 'mktime', 'modf', 'modulemeta', 'nan', 'nearbyint', 'nextafter', - 'nexttoward', 'normals', 'not', 'now', 'nth', 'nulls', 'numbers', 'objects', 'path', 'paths', - 'pow', 'pow10', 'range', 'recurse', 'recurse_down', 'remainder', 'repeat', 'reverse', 'rindex', - 'rint', 'round', 'rtrimstr', 'scalars', 'scalars_or_empty', 'scalb', 'scalbln', 'scan', 'select', - 'setpath', 'significand', 'sin', 'sinh', 'sort', 'sort_by', 'split', 'splits', 'sqrt', - 'startswith', 'stderr', 'strflocaltime', 'strftime', 'strings', 'strptime', 'sub', 'tan', 'tanh', - 'test', 'tgamma', 'to_entries', 'todate', 'todateiso8601', 'tojson', 'tonumber', 'tostream', - 'tostring', 'transpose', 'trunc', 'truncate_stream', 'type', 'unique', 'unique_by', 'until', - 'utf8bytelength', 'values', 'walk', 'while', 'with_entries', 'y0', 'y1', 'yn' -})) - --- Strings. -local string = token(lexer.STRING, lexer.range('"', true)) -local literal = token(lexer.STRING, word_match('null false true')) -lex:add_rule('string', string + literal) - --- Operators. --- 'not' isn't an operator but a function (filter) -lex:add_rule('operator', token(lexer.OPERATOR, - P('.[]') + '?//' + '//=' + 'and' + '[]' + '//' + '==' + '!=' + '>=' + '<=' + '|=' + '+=' + '-=' + - '*=' + '/=' + '%=' + 'or' + S('=+-*/%<>()[]{}.,') + '|' + ';')) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#'))) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Formats. -lex:add_rule('format', - token('format', '@' * word_match('text json html uri csv tsv sh base64 base64d'))) -lex:add_style('format', lexer.styles.constant) - --- Variables. -lex:add_rule('sysvar', token('sysvar', '$' * word_match('ENV ORIGIN __loc__'))) -lex:add_style('sysvar', lexer.styles.constant .. {bold = true}) -lex:add_rule('variable', token(lexer.VARIABLE, '$' * lexer.word)) - --- Fold points. -lex:add_fold_point(lexer.KEYWORD, 'if', 'end') -lex:add_fold_point(lexer.OPERATOR, '[', ']') -lex:add_fold_point(lexer.OPERATOR, '{', '}') - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/json.lua b/share/vis/lexers/json.lua @@ -1,28 +0,0 @@ --- Copyright 2006-2024 Brian "Sir Alaran" Schott. See LICENSE. --- JSON LPeg lexer. --- Based off of lexer code by Mitchell. - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(...) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"', true) -lex:add_rule('string', lex:tag(lexer.STRING, sq_str + dq_str)) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lexer.word_match('true false null'))) - --- Numbers. -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number)) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('[]{}:,'))) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '[', ']') -lex:add_fold_point(lexer.OPERATOR, '{', '}') - -return lex diff --git a/share/vis/lexers/jsp.lua b/share/vis/lexers/jsp.lua @@ -1,20 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- JSP LPeg lexer. - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(..., {inherit = lexer.load('html')}) - --- Embedded Java. -local java = lexer.load('java') -local java_start_rule = lex:tag(lexer.PREPROCESSOR, '<%' * P('=')^-1) -local java_end_rule = lex:tag(lexer.PREPROCESSOR, '%>') -lex:embed(java, java_start_rule, java_end_rule, true) - --- Fold points. -lex:add_fold_point(lexer.PREPROCESSOR, '<%', '%>') - -lexer.property['scintillua.comment'] = '<!--|-->' - -return lex diff --git a/share/vis/lexers/julia.lua b/share/vis/lexers/julia.lua @@ -1,112 +0,0 @@ --- Copyright 2020-2024 Tobias Frilling. See LICENSE. --- Julia lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local B, P, S = lpeg.B, lpeg.P, lpeg.S - -local lex = lexer.new('julia') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - -local id = lexer.word * P('!')^0 - --- Keyword -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'baremodule', 'begin', 'break', 'catch', 'const', 'continue', 'do', 'else', 'elseif', 'end', - 'export', 'finally', 'for', 'function', 'global', 'if', 'in', 'isa', 'import', 'let', 'local', - 'macro', 'module', 'quote', 'return', 'struct', 'try', 'using', 'where', 'while' -} + 'abstract type' + 'mutable struct' + 'primitive type')) - --- Constant -local const_bool = word_match('true false') -local const_numerical = (P('Inf') + 'NaN') * (P('16') + '32' + '64')^-1 * -lexer.alnum -local const_special = word_match('nothing undef missing') -local const_env = word_match('ARGS ENV ENDIAN_BOM LOAD_PATH VERSION PROGRAM_FILE DEPOT_PATH') -local const_io = word_match('stdout stdin stderr devnull') -lex:add_rule('constant', token(lexer.CONSTANT, - const_bool + const_numerical + const_special + const_env + const_io)) - --- Type -local type_annotated = (B('::') + B(':: ')) * id -local type_para = id * #P('{') -local type_subtyping = id * #(lexer.space^0 * '<:') + (B('<:') + B('<: ')) * id -local type_struct = B('struct ') * id --- LuaFormatter off -local type_builtin_numerical = ((P('Abstract') + 'Big') * 'Float' + - 'Float' * (P('16') + '32' + '64') + - P('U')^-1 * 'Int' * (P('8') + '16' + '32' + '64' + '128')^-1 + - P('Abstract')^-1 * 'Irrational' -) * -lexer.alnum + word_match('Number Complex Real Integer Bool Signed Unsigned Rational') --- LuaFormatter on -local type_builtin_range = ((P('Lin') + 'Ordinal' + (P('Abstract')^-1 * P('Unit')^-1)) * 'Range' + - 'StepRange' * P('Len')^-1 - 'Range' -) * -lexer.alnum -local type_builtin_array = ((P('Abstract') + 'Bit' + 'Dense' + 'PermutedDims' + 'Sub')^-1 * - word_match('Array Vector Matrix VecOrMat') + - (P('Abstract') + 'Sym' + (P('Unit')^-1 * (P('Lower') + 'Upper')))^-1 * 'Triangular' -) * -lexer.alnum + - word_match('Adjoint Bidiagonal Diagonal Hermitian LQPackedQ Symmetric Transpose UpperHessenberg') -lex:add_rule('type', token(lexer.TYPE, - type_para + type_annotated + type_subtyping + type_struct + type_builtin_numerical + - type_builtin_range + type_builtin_array)) - --- Macro -lex:add_rule('macro', token(lexer.PREPROCESSOR, '@' * (id + '.'))) - --- Symbol -lex:add_rule('symbol', token('symbol', -B(P(':') + '<') * ':' * id)) -lex:add_style('symbol', lexer.styles.string) - --- Function -lex:add_rule('function', token(lexer.FUNCTION, id * #(P('.')^-1 * '('))) - --- Identifier -lex:add_rule('identifier', token(lexer.IDENTIFIER, id)) - --- Comment -local line_comment = lexer.to_eol('#') -local block_comment = lexer.range('#=', '=#') -lex:add_rule('comment', token(lexer.COMMENT, block_comment + line_comment)) - --- Number -local decimal = lexer.digit^1 * ('_' * lexer.digit^1)^0 -local hex_digits = lexer.xdigit^1 * ('_' * lexer.xdigit^1)^0 -local hexadecimal = '0x' * hex_digits -local binary = '0b' * S('01')^1 * ('_' * S('01')^1)^0 -local integer = binary + hexadecimal + decimal - -local float_dec_coeff = decimal^0 * '.' * decimal + decimal * '.' * decimal^0 -local float_dec_expon = S('eEf') * S('+-')^-1 * lexer.digit^1 -local float_dec = float_dec_coeff * float_dec_expon^-1 + decimal * float_dec_expon - -local float_hex_coeff = '0x' * (hex_digits^0 * '.' * hex_digits + hex_digits * '.' * hex_digits^0) -local float_hex_expon = 'p' * S('+-')^-1 * lexer.digit^1 -local float_hex = float_hex_coeff * float_hex_expon^-1 + hexadecimal * float_hex_expon - -local float = float_dec + float_hex - -local imaginary = (float_dec + decimal) * 'im' - -lex:add_rule('number', - token(lexer.NUMBER, S('+-')^-1 * (imaginary + float + integer) * -lexer.alpha)) - --- String & Character -local doc_str = lexer.range('"""') -local str = lexer.range('"') -lex:add_rule('string', token(lexer.STRING, doc_str + str)) - -local c_esc = '\\' * S('\\"\'nrbtfav') -local unicode = '\\' * S('uU') * lexer.xdigit^1 -local char = "'" * (lexer.alnum + c_esc + unicode) * "'" -lex:add_rule('character', token('character', char)) -lex:add_style('character', lexer.styles.constant) - --- Operator -lex:add_rule('operator', token(lexer.OPERATOR, S('+-*/<>=!%^&|~\\\':?.') + '÷' + '≠' + '≈' + - '≤' + '≥' + '⊻' + '√')) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/latex.lua b/share/vis/lexers/latex.lua @@ -1,44 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Latex LPeg lexer. --- Modified by Brian Schott. --- Modified by Robert Gieseke. - -local lexer = lexer -local word_match = lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(...) - --- Comments. -local line_comment = lexer.to_eol('%') -local block_comment = lexer.range('\\begin' * P(' ')^0 * '{comment}', - '\\end' * P(' ')^0 * '{comment}') -lex:add_rule('comment', lex:tag(lexer.COMMENT, line_comment + block_comment)) - --- Math environments. -local math_word = word_match('align displaymath eqnarray equation gather math multline') -local math_begin_end = (P('begin') + P('end')) * P(' ')^0 * '{' * math_word * P('*')^-1 * '}' -lex:add_rule('math', lex:tag('environment.math', '$' + '\\' * (S('[]()') + math_begin_end))) - --- LaTeX environments. -lex:add_rule('environment', lex:tag('environment', '\\' * (P('begin') + 'end') * P(' ')^0 * '{' * - lexer.word * P('*')^-1 * '}')) - --- Sections. -lex:add_rule('section', lex:tag('command.section', '\\' * - word_match('part chapter section subsection subsubsection paragraph subparagraph') * P('*')^-1)) - --- Commands. -lex:add_rule('command', lex:tag('command', '\\' * (lexer.alpha^1 + S('#$&~_^%{}\\')))) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('&#{}[]'))) - --- Fold points. -lex:add_fold_point(lexer.COMMENT, '\\begin', '\\end') -lex:add_fold_point('environment', '\\begin', '\\end') -lex:add_fold_point(lexer.OPERATOR, '{', '}') - -lexer.property['scintillua.comment'] = '%' - -return lex diff --git a/share/vis/lexers/ledger.lua b/share/vis/lexers/ledger.lua @@ -1,45 +0,0 @@ --- Copyright 2015-2024 Charles Lehner. See LICENSE. --- ledger journal LPeg lexer, see http://www.ledger-cli.org/ - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('ledger', {lex_by_line = true}) - -local delim = P('\t') + P(' ') - --- Account. -lex:add_rule('account', token(lexer.VARIABLE, lexer.starts_line(S(' \t')^1 * lexer.graph^1))) - --- Amount. -lex:add_rule('amount', token(lexer.NUMBER, delim * (1 - S(';\r\n'))^1)) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol(S(';#')))) - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Strings. -local sq_str = lexer.range("'") -local dq_str = lexer.range('"') -local label = lexer.range('[', ']', true) -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str + label)) - --- Date. -lex:add_rule('date', token(lexer.CONSTANT, lexer.starts_line((lexer.digit + S('/-'))^1))) - --- Automated transactions. -lex:add_rule('auto_tx', token(lexer.PREPROCESSOR, lexer.to_eol(lexer.starts_line(S('=~'))))) - --- Directives. -local directive_word = word_match{ - ' account', 'alias', 'assert', 'bucket', 'capture', 'check', 'comment', 'commodity', 'define', - 'end', 'fixed', 'endfixed', 'include', 'payee', 'apply', 'tag', 'test', 'year' -} + S('AYNDCIiOobh') -lex:add_rule('directive', token(lexer.KEYWORD, lexer.starts_line(S('!@')^-1 * directive_word))) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/less.lua b/share/vis/lexers/less.lua @@ -1,19 +0,0 @@ --- Copyright 2006-2024 Robert Gieseke. See LICENSE. --- Less CSS LPeg lexer. --- http://lesscss.org - -local lexer = require('lexer') -local token = lexer.token -local S = lpeg.S - -local lex = lexer.new('less', {inherit = lexer.load('css')}) - --- Line comments. -lex:add_rule('line_comment', token(lexer.COMMENT, lexer.to_eol('//'))) - --- Variables. -lex:add_rule('variable', token(lexer.VARIABLE, '@' * (lexer.alnum + S('_-{}'))^1)) - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/lexer.lua b/share/vis/lexers/lexer.lua @@ -1,1989 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. - ---- Lexes Scintilla documents and source code with Lua and LPeg. --- --- ### Writing Lua Lexers --- --- Lexers recognize and tag elements of source code for syntax highlighting. Scintilla (the --- editing component behind [Textadept][] and [SciTE][]) traditionally uses static, compiled C++ --- lexers which are notoriously difficult to create and/or extend. On the other hand, Lua makes --- it easy to to rapidly create new lexers, extend existing ones, and embed lexers within one --- another. Lua lexers tend to be more readable than C++ lexers too. --- --- While lexers can be written in plain Lua, Scintillua prefers using Parsing Expression --- Grammars, or PEGs, composed with the Lua [LPeg library][]. As a result, this document is --- devoted to writing LPeg lexers. The following table comes from the LPeg documentation and --- summarizes all you need to know about constructing basic LPeg patterns. This module provides --- convenience functions for creating and working with other more advanced patterns and concepts. --- --- Operator | Description --- -|- --- `lpeg.P(string)` | Matches `string` literally. --- `lpeg.P(`_`n`_`)` | Matches exactly _`n`_ number of characters. --- `lpeg.S(string)` | Matches any character in set `string`. --- `lpeg.R("`_`xy`_`")`| Matches any character between range `x` and `y`. --- `patt^`_`n`_ | Matches at least _`n`_ repetitions of `patt`. --- `patt^-`_`n`_ | Matches at most _`n`_ repetitions of `patt`. --- `patt1 * patt2` | Matches `patt1` followed by `patt2`. --- `patt1 + patt2` | Matches `patt1` or `patt2` (ordered choice). --- `patt1 - patt2` | Matches `patt1` if `patt2` does not also match. --- `-patt` | Matches if `patt` does not match, consuming no input. --- `#patt` | Matches `patt` but consumes no input. --- --- The first part of this document deals with rapidly constructing a simple lexer. The next part --- deals with more advanced techniques, such as embedding lexers within one another. Following --- that is a discussion about code folding, or being able to tell Scintilla which code blocks --- are "foldable" (temporarily hideable from view). After that are instructions on how to use --- Lua lexers with the aforementioned Textadept and SciTE editors. Finally there are comments --- on lexer performance and limitations. --- --- [LPeg library]: http://www.inf.puc-rio.br/~roberto/lpeg/lpeg.html --- [Textadept]: https://orbitalquark.github.io/textadept --- [SciTE]: https://scintilla.org/SciTE.html --- --- ### Lexer Basics --- --- The *lexers/* directory contains all of Scintillua's Lua lexers, including any new ones you --- write. Before attempting to write one from scratch though, first determine if your programming --- language is similar to any of the 100+ languages supported. If so, you may be able to copy --- and modify, or inherit from that lexer, saving some time and effort. The filename of your --- lexer should be the name of your programming language in lower case followed by a *.lua* --- extension. For example, a new Lua lexer has the name *lua.lua*. --- --- Note: Try to refrain from using one-character language names like "c", "d", or "r". For --- example, Scintillua uses "ansi_c", "dmd", and "rstats", respectively. --- --- #### New Lexer Template --- --- There is a *lexers/template.txt* file that contains a simple template for a new lexer. Feel --- free to use it, replacing the '?' with the name of your lexer. Consider this snippet from --- the template: --- --- -- ? LPeg lexer. --- --- local lexer = lexer --- local P, S = lpeg.P, lpeg.S --- --- local lex = lexer.new(...) --- --- [... lexer rules ...] --- --- -- Identifier. --- local identifier = lex:tag(lexer.IDENTIFIER, lexer.word) --- lex:add_rule('identifier', identifier) --- --- [... more lexer rules ...] --- --- return lex --- --- The first line of code is a Lua convention to store a global variable into a local variable --- for quick access. The second line simply defines often used convenience variables. The third --- and last lines [define](#lexer.new) and return the lexer object Scintillua uses; they are --- very important and must be part of every lexer. Note the `...` passed to `lexer.new()` is --- literal: the lexer will assume the name of its filename or an alternative name specified --- by `lexer.load()` in embedded lexer applications. The fourth line uses something called a --- "tag", an essential component of lexers. You will learn about tags shortly. The fifth line --- defines a lexer grammar rule, which you will learn about later. (Be aware that it is common --- practice to combine these two lines for short rules.) Note, however, the `local` prefix in --- front of variables, which is needed so-as not to affect Lua's global environment. All in all, --- this is a minimal, working lexer that you can build on. --- --- #### Tags --- --- Take a moment to think about your programming language's structure. What kind of key elements --- does it have? Most languages have elements like keywords, strings, and comments. The --- lexer's job is to break down source code into these elements and "tag" them for syntax --- highlighting. Therefore, tags are an essential component of lexers. It is up to you how --- specific your lexer is when it comes to tagging elements. Perhaps only distinguishing between --- keywords and identifiers is necessary, or maybe recognizing constants and built-in functions, --- methods, or libraries is desirable. The Lua lexer, for example, tags the following elements: --- keywords, functions, constants, identifiers, strings, comments, numbers, labels, attributes, --- and operators. Even though functions and constants are subsets of identifiers, Lua programmers --- find it helpful for the lexer to distinguish between them all. It is perfectly acceptable --- to just recognize keywords and identifiers. --- --- In a lexer, LPeg patterns that match particular sequences of characters are tagged with a --- tag name using the the `lexer.tag()` function. Let us examine the "identifier" tag used in --- the template shown earlier: --- --- local identifier = lex:tag(lexer.IDENTIFIER, lexer.word) --- --- At first glance, the first argument does not appear to be a string name and the second --- argument does not appear to be an LPeg pattern. Perhaps you expected something like: --- --- lex:tag('identifier', (lpeg.R('AZ', 'az') + '_') * (lpeg.R('AZ', 'az', '09') + '_')^0) --- --- The `lexer` module actually provides a convenient list of common tag names and common LPeg --- patterns for you to use. Tag names for programming languages include (but are not limited --- to) `lexer.DEFAULT`, `lexer.COMMENT`, `lexer.STRING`, `lexer.NUMBER`, `lexer.KEYWORD`, --- `lexer.IDENTIFIER`, `lexer.OPERATOR`, `lexer.ERROR`, `lexer.PREPROCESSOR`, `lexer.CONSTANT`, --- `lexer.CONSTANT_BUILTIN`, `lexer.VARIABLE`, `lexer.VARIABLE_BUILTIN`, `lexer.FUNCTION`, --- `lexer.FUNCTION_BUILTIN`, `lexer.FUNCTION_METHOD`, `lexer.CLASS`, `lexer.TYPE`, `lexer.LABEL`, --- `lexer.REGEX`, `lexer.EMBEDDED`, and `lexer.ANNOTATION`. Tag names for markup languages include --- (but are not limited to) `lexer.TAG`, `lexer.ATTRIBUTE`, `lexer.HEADING`, `lexer.BOLD`, --- `lexer.ITALIC`, `lexer.UNDERLINE`, `lexer.CODE`, `lexer.LINK`, `lexer.REFERENCE`, and --- `lexer.LIST`. Patterns include `lexer.any`, `lexer.alpha`, `lexer.digit`, `lexer.alnum`, --- `lexer.lower`, `lexer.upper`, `lexer.xdigit`, `lexer.graph`, `lexer.punct`, `lexer.space`, --- `lexer.newline`, `lexer.nonnewline`, `lexer.dec_num`, `lexer.hex_num`, `lexer.oct_num`, --- `lexer.bin_num`, `lexer.integer`, `lexer.float`, `lexer.number`, and `lexer.word`. You may --- use your own tag names if none of the above fit your language, but an advantage to using --- predefined tag names is that the language elements your lexer recognizes will inherit any --- universal syntax highlighting color theme that your editor uses. You can also "subclass" --- existing tag names by appending a '.*subclass*' string to them. For example, the HTML lexer --- tags unknown tags as `lexer.TAG .. '.unknown'`. This gives editors the opportunity to style --- those subclassed tags in a different way than normal tags, or fall back to styling them as --- normal tags. --- --- ##### Example Tags --- --- So, how might you recognize and tag elements like keywords, comments, and strings? Here are --- some examples. --- --- **Keywords** --- --- Instead of matching _n_ keywords with _n_ `P('keyword_`_`n`_`')` ordered choices, use one --- of of the following methods: --- --- 1. Use the convenience function `lexer.word_match()` optionally coupled with --- `lexer.set_word_list()`. It is much easier and more efficient to write word matches like: --- --- local keyword = lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD)) --- [...] --- lex:set_word_list(lexer.KEYWORD, { --- 'keyword_1', 'keyword_2', ..., 'keyword_n' --- }) --- --- local case_insensitive_word = lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD, true)) --- [...] --- lex:set_word_list(lexer.KEYWORD, { --- 'KEYWORD_1', 'keyword_2', ..., 'KEYword_n' --- }) --- --- local hyphenated_keyword = lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD)) --- [...] --- lex:set_word_list(lexer.KEYWORD, { --- 'keyword-1', 'keyword-2', ..., 'keyword-n' --- }) --- --- The benefit of using this method is that other lexers that inherit from, embed, or embed --- themselves into your lexer can set, replace, or extend these word lists. For example, --- the TypeScript lexer inherits from JavaScript, but extends JavaScript's keyword and type --- lists with more options. --- --- This method also allows applications that use your lexer to extend or replace your word --- lists. For example, the Lua lexer includes keywords and functions for the latest version --- of Lua (5.4 at the time of writing). However, editors using that lexer might want to use --- keywords from Lua version 5.1, which is still quite popular. --- --- Note that calling `lex:set_word_list()` is completely optional. Your lexer is allowed to --- expect the editor using it to supply word lists. Scintilla-based editors can do so via --- Scintilla's `ILexer5` interface. --- --- 2. Use the lexer-agnostic form of `lexer.word_match()`: --- --- local keyword = lex:tag(lexer.KEYWORD, lexer.word_match{ --- 'keyword_1', 'keyword_2', ..., 'keyword_n' --- }) --- --- local case_insensitive_keyword = lex:tag(lexer.KEYWORD, lexer.word_match({ --- 'KEYWORD_1', 'keyword_2', ..., 'KEYword_n' --- }, true)) --- --- local hyphened_keyword = lex:tag(lexer.KEYWORD, lexer.word_match{ --- 'keyword-1', 'keyword-2', ..., 'keyword-n' --- }) --- --- For short keyword lists, you can use a single string of words. For example: --- --- local keyword = lex:tag(lexer.KEYWORD, lexer.word_match('key_1 key_2 ... key_n')) --- --- You can use this method for static word lists that do not change, or where it does not --- make sense to allow applications or other lexers to extend or replace a word list. --- --- **Comments** --- --- Line-style comments with a prefix character(s) are easy to express: --- --- local shell_comment = lex:tag(lexer.COMMENT, lexer.to_eol('#')) --- local c_line_comment = lex:tag(lexer.COMMENT, lexer.to_eol('//', true)) --- --- The comments above start with a '#' or "//" and go to the end of the line (EOL). The second --- comment recognizes the next line also as a comment if the current line ends with a '\' --- escape character. --- --- C-style "block" comments with a start and end delimiter are also easy to express: --- --- local c_comment = lex:tag(lexer.COMMENT, lexer.range('/*', '*/')) --- --- This comment starts with a "/\*" sequence and contains anything up to and including an ending --- "\*/" sequence. The ending "\*/" is optional so the lexer can recognize unfinished comments --- as comments and highlight them properly. --- --- **Strings** --- --- Most programming languages allow escape sequences in strings such that a sequence like --- "\\&quot;" in a double-quoted string indicates that the '&quot;' is not the end of the --- string. `lexer.range()` handles escapes inherently. --- --- local dq_str = lexer.range('"') --- local sq_str = lexer.range("'") --- local string = lex:tag(lexer.STRING, dq_str + sq_str) --- --- In this case, the lexer treats '\' as an escape character in a string sequence. --- --- **Numbers** --- --- Most programming languages have the same format for integers and floats, so it might be as --- simple as using a predefined LPeg pattern: --- --- local number = lex:tag(lexer.NUMBER, lexer.number) --- --- However, some languages allow postfix characters on integers. --- --- local integer = P('-')^-1 * (lexer.dec_num * S('lL')^-1) --- local number = lex:tag(lexer.NUMBER, lexer.float + lexer.hex_num + integer) --- --- Other languages allow separaters within numbers for better readability. --- --- local number = lex:tag(lexer.NUMBER, lexer.number_('_')) -- recognize 1_000_000 --- --- Your language may need other tweaks, but it is up to you how fine-grained you want your --- highlighting to be. After all, you are not writing a compiler or interpreter! --- --- #### Rules --- --- Programming languages have grammars, which specify valid syntactic structure. For example, --- comments usually cannot appear within a string, and valid identifiers (like variable names) --- cannot be keywords. In Lua lexers, grammars consist of LPeg pattern rules, many of which --- are tagged. Recall from the lexer template the `lexer.add_rule()` call, which adds a rule --- to the lexer's grammar: --- --- lex:add_rule('identifier', identifier) --- --- Each rule has an associated name, but rule names are completely arbitrary and serve only to --- identify and distinguish between different rules. Rule order is important: if text does not --- match the first rule added to the grammar, the lexer tries to match the second rule added, and --- so on. Right now this lexer simply matches identifiers under a rule named "identifier". --- --- To illustrate the importance of rule order, here is an example of a simplified Lua lexer: --- --- lex:add_rule('keyword', lex:tag(lexer.KEYWORD, ...)) --- lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, ...)) --- lex:add_rule('string', lex:tag(lexer.STRING, ...)) --- lex:add_rule('comment', lex:tag(lexer.COMMENT, ...)) --- lex:add_rule('number', lex:tag(lexer.NUMBER, ...)) --- lex:add_rule('label', lex:tag(lexer.LABEL, ...)) --- lex:add_rule('operator', lex:tag(lexer.OPERATOR, ...)) --- --- Notice how identifiers come _after_ keywords. In Lua, as with most programming languages, --- the characters allowed in keywords and identifiers are in the same set (alphanumerics plus --- underscores). If the lexer added the "identifier" rule before the "keyword" rule, all keywords --- would match identifiers and thus would be incorrectly tagged (and likewise incorrectly --- highlighted) as identifiers instead of keywords. The same idea applies to function names, --- constants, etc. that you may want to distinguish between: their rules should come before --- identifiers. --- --- So what about text that does not match any rules? For example in Lua, the '!' character is --- meaningless outside a string or comment. Normally the lexer skips over such text. If instead --- you want to highlight these "syntax errors", add a final rule: --- --- lex:add_rule('keyword', keyword) --- ... --- lex:add_rule('error', lex:tag(lexer.ERROR, lexer.any)) --- --- This identifies and tags any character not matched by an existing rule as a `lexer.ERROR`. --- --- Even though the rules defined in the examples above contain a single tagged pattern, rules may --- consist of multiple tagged patterns. For example, the rule for an HTML tag could consist of a --- tagged tag followed by an arbitrary number of tagged attributes, separated by whitespace. This --- allows the lexer to produce all tags separately, but in a single, convenient rule. That rule --- might look something like this: --- --- local ws = lex:get_rule('whitespace') -- predefined rule for all lexers --- lex:add_rule('tag', tag_start * (ws * attributes)^0 * tag_end^-1) --- --- Note however that lexers with complex rules like these are more prone to lose track of their --- state, especially if they span multiple lines. --- --- #### Summary --- --- Lexers primarily consist of tagged patterns and grammar rules. These patterns match language --- elements like keywords, comments, and strings, and rules dictate the order in which patterns --- are matched. At your disposal are a number of convenience patterns and functions for rapidly --- creating a lexer. If you choose to use predefined tag names (or perhaps even subclassed --- names) for your patterns, you do not have to update your editor's theme to specify how to --- syntax-highlight those patterns. Your language's elements will inherit the default syntax --- highlighting color theme your editor uses. --- --- ### Advanced Techniques --- --- #### Line Lexers --- --- By default, lexers match the arbitrary chunks of text passed to them by Scintilla. These --- chunks may be a full document, only the visible part of a document, or even just portions --- of lines. Some lexers need to match whole lines. For example, a lexer for the output of a --- file "diff" needs to know if the line started with a '+' or '-' and then style the entire --- line accordingly. To indicate that your lexer matches by line, create the lexer with an --- extra parameter: --- --- local lex = lexer.new(..., {lex_by_line = true}) --- --- Now the input text for the lexer is a single line at a time. Keep in mind that line lexers --- do not have the ability to look ahead to subsequent lines. --- --- #### Embedded Lexers --- --- Scintillua lexers embed within one another very easily, requiring minimal effort. In the --- following sections, the lexer being embedded is called the "child" lexer and the lexer a child --- is being embedded in is called the "parent". For example, consider an HTML lexer and a CSS --- lexer. Either lexer stands alone for styling their respective HTML and CSS files. However, CSS --- can be embedded inside HTML. In this specific case, the CSS lexer is the "child" lexer with --- the HTML lexer being the "parent". Now consider an HTML lexer and a PHP lexer. This sounds --- a lot like the case with CSS, but there is a subtle difference: PHP _embeds itself into_ --- HTML while CSS is _embedded in_ HTML. This fundamental difference results in two types of --- embedded lexers: a parent lexer that embeds other child lexers in it (like HTML embedding CSS), --- and a child lexer that embeds itself into a parent lexer (like PHP embedding itself in HTML). --- --- ##### Parent Lexer --- --- Before embedding a child lexer into a parent lexer, the parent lexer needs to load the child --- lexer. This is done with the `lexer.load()` function. For example, loading the CSS lexer --- within the HTML lexer looks like: --- --- local css = lexer.load('css') --- --- The next part of the embedding process is telling the parent lexer when to switch over --- to the child lexer and when to switch back. The lexer refers to these indications as the --- "start rule" and "end rule", respectively, and are just LPeg patterns. Continuing with the --- HTML/CSS example, the transition from HTML to CSS is when the lexer encounters a "style" --- tag with a "type" attribute whose value is "text/css": --- --- local css_tag = P('<style') * P(function(input, index) --- if input:find('^[^>]+type="text/css"', index) then return true end --- end) --- --- This pattern looks for the beginning of a "style" tag and searches its attribute list for --- the text "`type="text/css"`". (In this simplified example, the Lua pattern does not consider --- whitespace between the '=' nor does it consider that using single quotes is valid.) If there --- is a match, the functional pattern returns `true`. However, we ultimately want to style the --- "style" tag as an HTML tag, so the actual start rule looks like this: --- --- local css_start_rule = #css_tag * tag --- --- Now that the parent knows when to switch to the child, it needs to know when to switch --- back. In the case of HTML/CSS, the switch back occurs when the lexer encounters an ending --- "style" tag, though the lexer should still style the tag as an HTML tag: --- --- local css_end_rule = #P('</style>') * tag --- --- Once the parent loads the child lexer and defines the child's start and end rules, it embeds --- the child with the `lexer.embed()` function: --- --- lex:embed(css, css_start_rule, css_end_rule) --- --- ##### Child Lexer --- --- The process for instructing a child lexer to embed itself into a parent is very similar to --- embedding a child into a parent: first, load the parent lexer into the child lexer with the --- `lexer.load()` function and then create start and end rules for the child lexer. However, --- in this case, call `lexer.embed()` with switched arguments. For example, in the PHP lexer: --- --- local html = lexer.load('html') --- local php_start_rule = lex:tag('php_tag', '<?php' * lexer.space) --- local php_end_rule = lex:tag('php_tag', '?>') --- html:embed(lex, php_start_rule, php_end_rule) --- --- Note that the use of a 'php_tag' tag will require the editor using the lexer to specify how --- to highlight text with that tag. In order to avoid this, you could use the `lexer.PREPROCESSOR` --- tag instead. --- --- #### Lexers with Complex State --- --- A vast majority of lexers are not stateful and can operate on any chunk of text in a --- document. However, there may be rare cases where a lexer does need to keep track of some --- sort of persistent state. Rather than using `lpeg.P` function patterns that set state --- variables, it is recommended to make use of Scintilla's built-in, per-line state integers via --- `lexer.line_state`. It was designed to accommodate up to 32 bit-flags for tracking state. --- `lexer.line_from_position()` will return the line for any position given to an `lpeg.P` --- function pattern. (Any positions derived from that position argument will also work.) --- --- Writing stateful lexers is beyond the scope of this document. --- --- ### Code Folding --- --- When reading source code, it is occasionally helpful to temporarily hide blocks of code like --- functions, classes, comments, etc. This is the concept of "folding". In the Textadept and --- SciTE editors for example, little indicators in the editor margins appear next to code that --- can be folded at places called "fold points". When the user clicks an indicator, the editor --- hides the code associated with the indicator until the user clicks the indicator again. The --- lexer specifies these fold points and what code exactly to fold. --- --- The fold points for most languages occur on keywords or character sequences. Examples of --- fold keywords are "if" and "end" in Lua and examples of fold character sequences are '{', --- '}', "/\*", and "\*/" in C for code block and comment delimiters, respectively. However, --- these fold points cannot occur just anywhere. For example, lexers should not recognize fold --- keywords that appear within strings or comments. The `lexer.add_fold_point()` function allows --- you to conveniently define fold points with such granularity. For example, consider C: --- --- lex:add_fold_point(lexer.OPERATOR, '{', '}') --- lex:add_fold_point(lexer.COMMENT, '/*', '*/') --- --- The first assignment states that any '{' or '}' that the lexer tagged as an `lexer.OPERATOR` --- is a fold point. Likewise, the second assignment states that any "/\*" or "\*/" that the --- lexer tagged as part of a `lexer.COMMENT` is a fold point. The lexer does not consider any --- occurrences of these characters outside their tagged elements (such as in a string) as fold --- points. How do you specify fold keywords? Here is an example for Lua: --- --- lex:add_fold_point(lexer.KEYWORD, 'if', 'end') --- lex:add_fold_point(lexer.KEYWORD, 'do', 'end') --- lex:add_fold_point(lexer.KEYWORD, 'function', 'end') --- lex:add_fold_point(lexer.KEYWORD, 'repeat', 'until') --- --- If your lexer has case-insensitive keywords as fold points, simply add a --- `case_insensitive_fold_points = true` option to `lexer.new()`, and specify keywords in --- lower case. --- --- If your lexer needs to do some additional processing in order to determine if a tagged element --- is a fold point, pass a function to `lex:add_fold_point()` that returns an integer. A return --- value of `1` indicates the element is a beginning fold point and a return value of `-1` --- indicates the element is an ending fold point. A return value of `0` indicates the element --- is not a fold point. For example: --- --- local function fold_strange_element(text, pos, line, s, symbol) --- if ... then --- return 1 -- beginning fold point --- elseif ... then --- return -1 -- ending fold point --- end --- return 0 --- end --- --- lex:add_fold_point('strange_element', '|', fold_strange_element) --- --- Any time the lexer encounters a '|' that is tagged as a "strange_element", it calls the --- `fold_strange_element` function to determine if '|' is a fold point. The lexer calls these --- functions with the following arguments: the text to identify fold points in, the beginning --- position of the current line in the text to fold, the current line's text, the position in --- the current line the fold point text starts at, and the fold point text itself. --- --- #### Fold by Indentation --- --- Some languages have significant whitespace and/or no delimiters that indicate fold points. If --- your lexer falls into this category and you would like to mark fold points based on changes --- in indentation, create the lexer with a `fold_by_indentation = true` option: --- --- local lex = lexer.new(..., {fold_by_indentation = true}) --- --- ### Using Lexers --- --- **Textadept** --- --- Place your lexer in your *~/.textadept/lexers/* directory so you do not overwrite it when --- upgrading Textadept. Also, lexers in this directory override default lexers. Thus, Textadept --- loads a user *lua* lexer instead of the default *lua* lexer. This is convenient for tweaking --- a default lexer to your liking. Then add a [file extension](#lexer.detect_extensions) for --- your lexer if necessary. --- --- **SciTE** --- --- Create a *.properties* file for your lexer and `import` it in either your *SciTEUser.properties* --- or *SciTEGlobal.properties*. The contents of the *.properties* file should contain: --- --- file.patterns.[lexer_name]=[file_patterns] --- lexer.$(file.patterns.[lexer_name])=scintillua.[lexer_name] --- keywords.$(file.patterns.[lexer_name])=scintillua --- keywords2.$(file.patterns.[lexer_name])=scintillua --- ... --- keywords9.$(file.patterns.[lexer_name])=scintillua --- --- where `[lexer_name]` is the name of your lexer (minus the *.lua* extension) and --- `[file_patterns]` is a set of file extensions to use your lexer for. The `keyword` settings are --- only needed if another SciTE properties file has defined keyword sets for `[file_patterns]`. --- The `scintillua` keyword setting instructs Scintillua to use the keyword sets defined within --- the lexer. You can override a lexer's keyword set(s) by specifying your own in the same order --- that the lexer calls `lex:set_word_list()`. For example, the Lua lexer's first set of keywords --- is for reserved words, the second is for built-in global functions, the third is for library --- functions, the fourth is for built-in global constants, and the fifth is for library constants. --- --- SciTE assigns styles to tag names in order to perform syntax highlighting. Since the set of --- tag names used for a given language changes, your *.properties* file should specify styles --- for tag names instead of style numbers. For example: --- --- scintillua.styles.my_tag=$(scintillua.styles.keyword),bold --- --- ### Migrating Legacy Lexers --- --- Legacy lexers are of the form: --- --- local lexer = require('lexer') --- local token, word_match = lexer.token, lexer.word_match --- local P, S = lpeg.P, lpeg.S --- --- local lex = lexer.new('?') --- --- -- Whitespace. --- lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) --- --- -- Keywords. --- lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ --- [...] --- })) --- --- [... other rule definitions ...] --- --- -- Custom. --- lex:add_rule('custom_rule', token('custom_token', ...)) --- lex:add_style('custom_token', lexer.styles.keyword .. {bold = true}) --- --- -- Fold points. --- lex:add_fold_point(lexer.OPERATOR, '{', '}') --- --- return lex --- --- While Scintillua will mostly handle such legacy lexers just fine without any changes, it is --- recommended that you migrate yours. The migration process is fairly straightforward: --- --- 1. `lexer` exists in the default lexer environment, so `require('lexer')` should be replaced --- by simply `lexer`. (Keep in mind `local lexer = lexer` is a Lua idiom.) --- 2. Every lexer created using `lexer.new()` should no longer specify a lexer name by string, --- but should instead use `...` (three dots), which evaluates to the lexer's filename or --- alternative name in embedded lexer applications. --- 3. Every lexer created using `lexer.new()` now includes a rule to match whitespace. Unless --- your lexer has significant whitespace, you can remove your legacy lexer's whitespace --- token and rule. Otherwise, your defined whitespace rule will replace the default one. --- 4. The concept of tokens has been replaced with tags. Instead of calling a `token()` function, --- call [`lex:tag()`](#lexer.tag) instead. --- 5. Lexers now support replaceable word lists. Instead of calling `lexer.word_match()` with --- large word lists, call it as an instance method with an identifier string (typically --- something like `lexer.KEYWORD`). Then at the end of the lexer (before `return lex`), call --- [`lex:set_word_list()`](#lexer.set_word_list) with the same identifier and the usual --- list of words to match. This allows users of your lexer to call `lex:set_word_list()` --- with their own set of words should they wish to. --- 6. Lexers no longer specify styling information. Remove any calls to `lex:add_style()`. You --- may need to add styling information for custom tags to your editor's theme. --- 7. `lexer.last_char_includes()` has been deprecated in favor of the new `lexer.after_set()`. --- Use the character set and pattern as arguments to that new function. --- --- As an example, consider the following sample legacy lexer: --- --- local lexer = require('lexer') --- local token, word_match = lexer.token, lexer.word_match --- local P, S = lpeg.P, lpeg.S --- --- local lex = lexer.new('legacy') --- --- lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) --- lex:add_rule('keyword', token(lexer.KEYWORD, word_match('foo bar baz'))) --- lex:add_rule('custom', token('custom', 'quux')) --- lex:add_style('custom', lexer.styles.keyword .. {bold = true}) --- lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) --- lex:add_rule('string', token(lexer.STRING, lexer.range('"'))) --- lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#'))) --- lex:add_rule('number', token(lexer.NUMBER, lexer.number)) --- lex:add_rule('operator', token(lexer.OPERATOR, S('+-*/%^=<>,.()[]{}'))) --- --- lex:add_fold_point(lexer.OPERATOR, '{', '}') --- --- return lex --- --- Following the migration steps would yield: --- --- local lexer = lexer --- local P, S = lpeg.P, lpeg.S --- --- local lex = lexer.new(...) --- --- lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD))) --- lex:add_rule('custom', lex:tag('custom', 'quux')) --- lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) --- lex:add_rule('string', lex:tag(lexer.STRING, lexer.range('"'))) --- lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.to_eol('#'))) --- lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number)) --- lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('+-*/%^=<>,.()[]{}'))) --- --- lex:add_fold_point(lexer.OPERATOR, '{', '}') --- --- lex:set_word_list(lexer.KEYWORD, {'foo', 'bar', 'baz'}) --- --- return lex --- --- Any editors using this lexer would have to add a style for the 'custom' tag. --- --- ### Considerations --- --- #### Performance --- --- There might be some slight overhead when initializing a lexer, but loading a file from disk --- into Scintilla is usually more expensive. Actually painting the syntax highlighted text to --- the screen is often more expensive than the lexing operation. On modern computer systems, --- I see no difference in speed between Lua lexers and Scintilla's C++ ones. Optimize lexers for --- speed by re-arranging `lexer.add_rule()` calls so that the most common rules match first. Do --- keep in mind that order matters for similar rules. --- --- In some cases, folding may be far more expensive than lexing, particularly in lexers with a --- lot of potential fold points. If your lexer is exhibiting signs of slowness, try disabling --- folding in your text editor first. If that speeds things up, you can try reducing the number --- of fold points you added, overriding `lexer.fold()` with your own implementation, or simply --- eliminating folding support from your lexer. --- --- #### Limitations --- --- Embedded preprocessor languages like PHP cannot completely embed themselves into their parent --- languages because the parent's tagged patterns do not support start and end rules. This --- mostly goes unnoticed, but code like --- --- <div id="<?php echo $id; ?>"> --- --- will not style correctly. Also, these types of languages cannot currently embed themselves --- into their parent's child languages either. --- --- A language cannot embed itself into something like an interpolated string because it is --- possible that if lexing starts within the embedded entity, it will not be detected as such, --- so a child to parent transition cannot happen. For example, the following Ruby code will --- not style correctly: --- --- sum = "1 + 2 = #{1 + 2}" --- --- Also, there is the potential for recursion for languages embedding themselves within themselves. --- --- #### Troubleshooting --- --- Errors in lexers can be tricky to debug. Lexers print Lua errors to `io.stderr` and `_G.print()` --- statements to `io.stdout`. Running your editor from a terminal is the easiest way to see --- errors as they occur. --- --- #### Risks --- --- Poorly written lexers have the ability to crash Scintilla (and thus its containing application), --- so unsaved data might be lost. However, I have only observed these crashes in early lexer --- development, when syntax errors or pattern errors are present. Once the lexer actually --- starts processing and tagging text (either correctly or incorrectly, it does not matter), --- I have not observed any crashes. --- --- #### Acknowledgements --- --- Thanks to Peter Odding for his [lexer post][] on the Lua mailing list that provided inspiration, --- and thanks to Roberto Ierusalimschy for LPeg. --- --- [lexer post]: http://lua-users.org/lists/lua-l/2007-04/msg00116.html --- @module lexer -local M = {} - ---- The tag name for default elements. --- @field DEFAULT - ---- The tag name for comment elements. --- @field COMMENT - ---- The tag name for string elements. --- @field STRING - ---- The tag name for number elements. --- @field NUMBER - ---- The tag name for keyword elements. --- @field KEYWORD - ---- The tag name for identifier elements. --- @field IDENTIFIER - ---- The tag name for operator elements. --- @field OPERATOR - ---- The tag name for error elements. --- @field ERROR - ---- The tag name for preprocessor elements. --- @field PREPROCESSOR - ---- The tag name for constant elements. --- @field CONSTANT - ---- The tag name for variable elements. --- @field VARIABLE - ---- The tag name for function elements. --- @field FUNCTION - ---- The tag name for class elements. --- @field CLASS - ---- The tag name for type elements. --- @field TYPE - ---- The tag name for label elements. --- @field LABEL - ---- The tag name for regex elements. --- @field REGEX - ---- The tag name for embedded elements. --- @field EMBEDDED - ---- The tag name for builtin function elements. --- @field FUNCTION_BUILTIN - ---- The tag name for builtin constant elements. --- @field CONSTANT_BUILTIN - ---- The tag name for function method elements. --- @field FUNCTION_METHOD - ---- The tag name for function tag elements, typically in markup. --- @field TAG - ---- The tag name for function attribute elements, typically in markup. --- @field ATTRIBUTE - ---- The tag name for builtin variable elements. --- @field VARIABLE_BUILTIN - ---- The tag name for heading elements, typically in markup. --- @field HEADING - ---- The tag name for bold elements, typically in markup. --- @field BOLD - ---- The tag name for builtin italic elements, typically in markup. --- @field ITALIC - ---- The tag name for underlined elements, typically in markup. --- @field UNDERLINE - ---- The tag name for code elements, typically in markup. --- @field CODE - ---- The tag name for link elements, typically in markup. --- @field LINK - ---- The tag name for reference elements, typically in markup. --- @field REFERENCE - ---- The tag name for annotation elements. --- @field ANNOTATION - ---- The tag name for list item elements, typically in markup. --- @field LIST - ---- The initial (root) fold level. --- @field FOLD_BASE - ---- Flag indicating that the line is blank. --- @field FOLD_BLANK - ---- Flag indicating the line is fold point. --- @field FOLD_HEADER - --- This comment is needed for LDoc to process the previous field. - -if not lpeg then lpeg = require('lpeg') end -- Scintillua's Lua environment defines _G.lpeg -local lpeg = lpeg -local P, R, S, V, B = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.B -local Ct, Cc, Cp, Cmt, C = lpeg.Ct, lpeg.Cc, lpeg.Cp, lpeg.Cmt, lpeg.C - ---- Default tags. -local default = { - 'whitespace', 'comment', 'string', 'number', 'keyword', 'identifier', 'operator', 'error', - 'preprocessor', 'constant', 'variable', 'function', 'class', 'type', 'label', 'regex', 'embedded', - 'function.builtin', 'constant.builtin', 'function.method', 'tag', 'attribute', 'variable.builtin', - 'heading', 'bold', 'italic', 'underline', 'code', 'link', 'reference', 'annotation', 'list' -} -for _, name in ipairs(default) do M[name:upper():gsub('%.', '_')] = name end ---- Names for predefined Scintilla styles. --- Having these here simplifies style number handling between Scintillua and Scintilla. -local predefined = { - 'default', 'line.number', 'brace.light', 'brace.bad', 'control.char', 'indent.guide', 'call.tip', - 'fold.display.text' -} -for _, name in ipairs(predefined) do M[name:upper():gsub('%.', '_')] = name end - ---- Creates and returns a pattern that tags pattern *patt* with name *name* in lexer *lexer*. --- If *name* is not a predefined tag name, its Scintilla style will likely need to be defined --- by the editor or theme using this lexer. --- @param lexer The lexer to tag the given pattern in. --- @param name The name to use. --- @param patt The LPeg pattern to tag. --- @return pattern --- @usage local number = lex:tag(lexer.NUMBER, lexer.number) --- @usage local addition = lex:tag('addition', '+' * lexer.word) -function M.tag(lexer, name, patt) - if not lexer._TAGS then - -- Create the initial maps for tag names to style numbers and styles. - local tags = {} - for i, name in ipairs(default) do tags[name], tags[i] = i, name end - for i, name in ipairs(predefined) do tags[name], tags[i + 32] = i + 32, name end - lexer._TAGS, lexer._num_styles = tags, #default + 1 - lexer._extra_tags = {} - end - if not assert(lexer._TAGS, 'not a lexer instance')[name] then - local num_styles = lexer._num_styles - if num_styles == 33 then num_styles = num_styles + 8 end -- skip predefined - assert(num_styles <= 256, 'too many styles defined (256 MAX)') - lexer._TAGS[name], lexer._TAGS[num_styles], lexer._num_styles = num_styles, name, num_styles + 1 - lexer._extra_tags[name] = true - -- If the lexer is a proxy or a child that embedded itself, make this tag name known to - -- the parent lexer. - if lexer._lexer then lexer._lexer:tag(name, false) end - end - return Cc(name) * (P(patt) / 0) * Cp() -end - ---- Returns a unique grammar rule name for the given lexer's i-th word list. -local function word_list_id(lexer, i) return lexer._name .. '_wordlist' .. i end - ---- Either returns a pattern for lexer *lexer* (if given) that matches one word in the word list --- identified by string *word_list*, ignoring case if *case_sensitive* is `true`, or, if *lexer* --- is not given, creates and returns a pattern that matches any single word in list or string --- *word_list*, ignoring case if *case_insensitive* is `true`. --- This is a convenience function for simplifying a set of ordered choice word patterns and --- potentially allowing downstream users to configure word lists. --- If there is ultimately no word list set via `set_word_list()`, no error will be raised, --- but the returned pattern will not match anything. --- @param[opt] lexer Optional lexer to match a word in a wordlist for. This parameter may be --- omitted for lexer-agnostic matching. --- @param word_list Either a string name of the word list to match from if *lexer* is given, --- or, if *lexer* is omitted, a list of words or a string list of words separated by spaces. --- @param[opt] case_insensitive Optional boolean flag indicating whether or not the word match --- is case-insensitive. The default value is `false`. --- @return pattern --- @usage lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD))) --- @usage local keyword = lex:tag(lexer.KEYWORD, lexer.word_match{'foo', 'bar', 'baz'}) --- @usage local keyword = lex:tag(lexer.KEYWORD, lexer.word_match({'foo-bar', 'foo-baz', --- 'bar-foo', 'bar-baz', 'baz-foo', 'baz-bar'}, true)) --- @usage local keyword = lex:tag(lexer.KEYWORD, lexer.word_match('foo bar baz')) -function M.word_match(lexer, word_list, case_insensitive) - if type(lexer) == 'table' and getmetatable(lexer) then - if lexer._lexer then - -- If this lexer is a proxy (e.g. rails), get the true parent (ruby) in order to get the - -- parent's word list. If this lexer is a child embedding itself (e.g. php), continue - -- getting its word list, not the parent's (html). - local parent = lexer._lexer - if not parent._CHILDREN or not parent._CHILDREN[lexer] then lexer = parent end - end - - if not lexer._WORDLISTS then lexer._WORDLISTS = {case_insensitive = {}} end - local i = lexer._WORDLISTS[word_list] or #lexer._WORDLISTS + 1 - lexer._WORDLISTS[word_list], lexer._WORDLISTS[i] = i, '' -- empty placeholder word list - lexer._WORDLISTS.case_insensitive[i] = case_insensitive - return V(word_list_id(lexer, i)) - end - - -- Lexer-agnostic word match. - word_list, case_insensitive = lexer, word_list - - if type(word_list) == 'string' then - local words = word_list -- space-separated list of words - word_list = {} - for word in words:gmatch('%S+') do word_list[#word_list + 1] = word end - end - - local word_chars = M.alnum + '_' - local extra_chars = '' - for _, word in ipairs(word_list) do - word_list[case_insensitive and word:lower() or word] = true - for char in word:gmatch('[^%w_%s]') do - if not extra_chars:find(char, 1, true) then extra_chars = extra_chars .. char end - end - end - if extra_chars ~= '' then word_chars = word_chars + S(extra_chars) end - - -- Optimize small word sets as ordered choice. "Small" is arbitrary. - if #word_list <= 6 and not case_insensitive then - local choice = P(false) - for _, word in ipairs(word_list) do choice = choice + word:match('%S+') end - return choice * -word_chars - end - - return Cmt(word_chars^1, function(input, index, word) - if case_insensitive then word = word:lower() end - return word_list[word] - end) -end - ---- Sets in lexer *lexer* the word list identified by string or number *name* to string or --- list *word_list*, appending to any existing word list if *append* is `true`. --- This only has an effect if *lexer* uses `word_match()` to reference the given list. --- Case-insensitivity is specified by `word_match()`. --- @param lexer The lexer to add the given word list to. --- @param name The string name or number of the word list to set. --- @param word_list A list of words or a string list of words separated by spaces. --- @param append Whether or not to append *word_list* to the existing word list (if any). The --- default value is `false`. -function M.set_word_list(lexer, name, word_list, append) - if word_list == 'scintillua' then return end -- for SciTE - if lexer._lexer then - -- If this lexer is a proxy (e.g. rails), get the true parent (ruby) in order to set the - -- parent's word list. If this lexer is a child embedding itself (e.g. php), continue - -- setting its word list, not the parent's (html). - local parent = lexer._lexer - if not parent._CHILDREN or not parent._CHILDREN[lexer] then lexer = parent end - end - - assert(lexer._WORDLISTS, 'lexer has no word lists') - local i = tonumber(lexer._WORDLISTS[name]) or name -- lexer._WORDLISTS[name] --> i - if type(i) ~= 'number' or i > #lexer._WORDLISTS then return end -- silently return - - if type(word_list) == 'string' then - local list = {} - for word in word_list:gmatch('%S+') do list[#list + 1] = word end - word_list = list - end - - if not append or lexer._WORDLISTS[i] == '' then - lexer._WORDLISTS[i] = word_list - else - local list = lexer._WORDLISTS[i] - for _, word in ipairs(word_list) do list[#list + 1] = word end - end - - lexer._grammar_table = nil -- invalidate -end - ---- Adds pattern *rule* identified by string *id* to the ordered list of rules for lexer *lexer*. --- @param lexer The lexer to add the given rule to. --- @param id The id associated with this rule. It does not have to be the same as the name --- passed to `tag()`. --- @param rule The LPeg pattern of the rule. --- @see modify_rule -function M.add_rule(lexer, id, rule) - if lexer._lexer then lexer = lexer._lexer end -- proxy; get true parent - if not lexer._rules then lexer._rules = {} end - if id == 'whitespace' and lexer._rules[id] then -- legacy - lexer:modify_rule(id, rule) - return - end - lexer._rules[#lexer._rules + 1], lexer._rules[id] = id, rule - lexer._grammar_table = nil -- invalidate -end - ---- Replaces in lexer *lexer* the existing rule identified by string *id* with pattern *rule*. --- @param lexer The lexer to modify. --- @param id The id associated with this rule. --- @param rule The LPeg pattern of the rule. -function M.modify_rule(lexer, id, rule) - if lexer._lexer then lexer = lexer._lexer end -- proxy; get true parent - assert(lexer._rules[id], 'rule does not exist') - lexer._rules[id] = rule - lexer._grammar_table = nil -- invalidate -end - ---- Returns a unique grammar rule name for the given lexer's rule name. -local function rule_id(lexer, name) return lexer._name .. '.' .. name end - ---- Returns the rule identified by string *id*. --- @param lexer The lexer to fetch a rule from. --- @param id The id of the rule to fetch. --- @return pattern -function M.get_rule(lexer, id) - if lexer._lexer then lexer = lexer._lexer end -- proxy; get true parent - if id == 'whitespace' then return V(rule_id(lexer, id)) end -- special case - return assert(lexer._rules[id], 'rule does not exist') -end - ---- Embeds child lexer *child* in parent lexer *lexer* using patterns *start_rule* and *end_rule*, --- which signal the beginning and end of the embedded lexer, respectively. --- @param lexer The parent lexer. --- @param child The child lexer. --- @param start_rule The pattern that signals the beginning of the embedded lexer. --- @param end_rule The pattern that signals the end of the embedded lexer. --- @usage html:embed(css, css_start_rule, css_end_rule) --- @usage html:embed(lex, php_start_rule, php_end_rule) -- from php lexer -function M.embed(lexer, child, start_rule, end_rule) - if lexer._lexer then lexer = lexer._lexer end -- proxy; get true parent - - -- Add child rules. - assert(child._rules, 'cannot embed lexer with no rules') - if not child._start_rules then child._start_rules = {} end - if not child._end_rules then child._end_rules = {} end - child._start_rules[lexer], child._end_rules[lexer] = start_rule, end_rule - if not lexer._CHILDREN then lexer._CHILDREN = {} end - lexer._CHILDREN[#lexer._CHILDREN + 1], lexer._CHILDREN[child] = child, true - - -- Add child tags. - for name in pairs(child._extra_tags) do lexer:tag(name, true) end - - -- Add child fold symbols. - if child._fold_points then - for tag_name, symbols in pairs(child._fold_points) do - if tag_name ~= '_symbols' then - for symbol, v in pairs(symbols) do lexer:add_fold_point(tag_name, symbol, v) end - end - end - end - - -- Add child word lists. - if child._WORDLISTS then - for name, i in pairs(child._WORDLISTS) do - if type(name) == 'string' and type(i) == 'number' then - name = child._name .. '.' .. name - lexer:word_match(name) -- for side effects - lexer:set_word_list(name, child._WORDLISTS[i]) - end - end - end - - child._lexer = lexer -- use parent's rules if child is embedding itself -end - ---- Adds to lexer *lexer* a fold point whose beginning and end points are tagged with string --- *tag_name* tags and have string content *start_symbol* and *end_symbol*, respectively. --- In the event that *start_symbol* may or may not be a fold point depending on context, and that --- additional processing is required, *end_symbol* may be a function that ultimately returns --- `1` (indicating a beginning fold point), `-1` (indicating an ending fold point), or `0` --- (indicating no fold point). That function is passed the following arguments: --- --- - `text`: The text being processed for fold points. --- - `pos`: The position in *text* of the beginning of the line currently being processed. --- - `line`: The text of the line currently being processed. --- - `s`: The position of *start_symbol* in *line*. --- - `symbol`: *start_symbol* itself. --- @param lexer The lexer to add a fold point to. --- @param tag_name The tag name for text that indicates a fold point. --- @param start_symbol The text that indicates the beginning of a fold point. --- @param end_symbol Either the text that indicates the end of a fold point, or a function that --- returns whether or not *start_symbol* is a beginning fold point (1), an ending fold point --- (-1), or not a fold point at all (0). --- @usage lex:add_fold_point(lexer.OPERATOR, '{', '}') --- @usage lex:add_fold_point(lexer.KEYWORD, 'if', 'end') --- @usage lex:add_fold_point('custom', function(text, pos, line, s, symbol) ... end) -function M.add_fold_point(lexer, tag_name, start_symbol, end_symbol) - if not start_symbol and not end_symbol then return end -- from legacy fold_consecutive_lines() - if not lexer._fold_points then lexer._fold_points = {_symbols = {}} end - local symbols = lexer._fold_points._symbols - if not lexer._fold_points[tag_name] then lexer._fold_points[tag_name] = {} end - if lexer._case_insensitive_fold_points then - start_symbol = start_symbol:lower() - if type(end_symbol) == 'string' then end_symbol = end_symbol:lower() end - end - - if type(end_symbol) == 'string' then - if not symbols[end_symbol] then symbols[#symbols + 1], symbols[end_symbol] = end_symbol, true end - lexer._fold_points[tag_name][start_symbol] = 1 - lexer._fold_points[tag_name][end_symbol] = -1 - else - lexer._fold_points[tag_name][start_symbol] = end_symbol -- function or int - end - if not symbols[start_symbol] then - symbols[#symbols + 1], symbols[start_symbol] = start_symbol, true - end - - -- If the lexer is a proxy or a child that embedded itself, copy this fold point to the - -- parent lexer. - if lexer._lexer then lexer._lexer:add_fold_point(tag_name, start_symbol, end_symbol) end -end - ---- Recursively adds the rules for the given lexer and its children to the given grammar. --- @param g The grammar to add rules to. --- @param lexer The lexer whose rules to add. -local function add_lexer(g, lexer) - local rule = P(false) - - -- Add this lexer's rules. - for _, name in ipairs(lexer._rules) do - local id = rule_id(lexer, name) - g[id] = lexer._rules[name] -- ['lua.keyword'] = keyword_patt - rule = rule + V(id) -- V('lua.keyword') + V('lua.function') + V('lua.constant') + ... - end - local any_id = lexer._name .. '_fallback' - g[any_id] = lexer:tag(M.DEFAULT, M.any) -- ['lua_fallback'] = any_char - rule = rule + V(any_id) -- ... + V('lua.operator') + V('lua_fallback') - - -- Add this lexer's word lists. - if lexer._WORDLISTS then - for i = 1, #lexer._WORDLISTS do - local id = word_list_id(lexer, i) - local list, case_insensitive = lexer._WORDLISTS[i], lexer._WORDLISTS.case_insensitive[i] - local patt = list ~= '' and M.word_match(list, case_insensitive) or P(false) - g[id] = patt -- ['lua_wordlist.1'] = word_match_patt or P(false) - end - end - - -- Add this child lexer's end rules. - if lexer._end_rules then - for parent, end_rule in pairs(lexer._end_rules) do - local back_id = lexer._name .. '_to_' .. parent._name - g[back_id] = end_rule -- ['css_to_html'] = css_end_rule - rule = rule - V(back_id) + -- (V('css.property') + ... + V('css_fallback')) - V('css_to_html') - V(back_id) * V(parent._name) -- V('css_to_html') * V('html') - end - end - - -- Add this child lexer's start rules. - if lexer._start_rules then - for parent, start_rule in pairs(lexer._start_rules) do - local to_id = parent._name .. '_to_' .. lexer._name - g[to_id] = start_rule * V(lexer._name) -- ['html_to_css'] = css_start_rule * V('css') - end - end - - -- Finish adding this lexer's rules. - local rule_id = lexer._name .. '_rule' - g[rule_id] = rule -- ['lua_rule'] = V('lua.keyword') + ... + V('lua_fallback') - g[lexer._name] = V(rule_id)^0 -- ['lua'] = V('lua_rule')^0 - - -- Add this lexer's children's rules. - -- TODO: preprocessor languages like PHP should also embed themselves into their parent's - -- children like HTML's CSS and Javascript. - if not lexer._CHILDREN then return end - for _, child in ipairs(lexer._CHILDREN) do - add_lexer(g, child) - local to_id = lexer._name .. '_to_' .. child._name - g[rule_id] = V(to_id) + g[rule_id] -- ['html_rule'] = V('html_to_css') + V('html.comment') + ... - - -- Add a child's inherited parent's rules (e.g. rhtml parent with rails child inheriting ruby). - if child._parent_name then - local name = child._name - child._name = child._parent_name -- ensure parent and transition rule names are correct - add_lexer(g, child) - child._name = name -- restore - local to_id = lexer._name .. '_to_' .. child._parent_name - g[rule_id] = V(to_id) + g[rule_id] -- ['html_rule'] = V('html_to_ruby') + V('html.comment') + ... - end - end -end - ---- Returns a grammar for the given lexer and initial rule, (re)constructing it if necessary. --- @param lexer The lexer to build a grammar for. --- @param init_style The current style. Multiple-language lexers use this to determine which --- language to start lexing in. -local function build_grammar(lexer, init_style) - if not lexer._rules then return end - if not lexer._initial_rule then lexer._initial_rule = lexer._parent_name or lexer._name end - if not lexer._grammar_table then - local grammar = {lexer._initial_rule} - if not lexer._parent_name then - add_lexer(grammar, lexer) - -- {'lua', - -- ['lua.keyword'] = patt, ['lua.function'] = patt, ..., - -- ['lua_wordlist.1'] = patt, ['lua_wordlist.2'] = patt, ..., - -- ['lua_rule'] = V('lua.keyword') + ... + V('lua_fallback'), - -- ['lua'] = V('lua_rule')^0 - -- } - -- {'html' - -- ['html.comment'] = patt, ['html.doctype'] = patt, ..., - -- ['html_wordlist.1'] = patt, ['html_wordlist.2'] = patt, ..., - -- ['html_rule'] = V('html_to_css') * V('css') + V('html.comment') + ... + V('html_fallback'), - -- ['html'] = V('html')^0, - -- ['css.property'] = patt, ['css.value'] = patt, ..., - -- ['css_wordlist.1'] = patt, ['css_wordlist.2'] = patt, ..., - -- ['css_to_html'] = patt, - -- ['css_rule'] = ((V('css.property') + ... + V('css_fallback')) - V('css_to_html')) + - -- V('css_to_html') * V('html'), - -- ['html_to_css'] = patt, - -- ['css'] = V('css_rule')^0 - -- } - else - local name = lexer._name - lexer._name = lexer._parent_name -- ensure parent and transition rule names are correct - add_lexer(grammar, lexer) - lexer._name = name -- restore - -- {'html', - -- ... - -- ['html_rule'] = V('html_to_php') * V('php') + V('html_to_css') * V('css') + - -- V('html.comment') + ... + V('html_fallback'), - -- ... - -- ['php.keyword'] = patt, ['php.type'] = patt, ..., - -- ['php_wordlist.1'] = patt, ['php_wordlist.2'] = patt, ..., - -- ['php_to_html'] = patt, - -- ['php_rule'] = ((V('php.keyword') + ... + V('php_fallback')) - V('php_to_html')) + - -- V('php_to_html') * V('html') - -- ['html_to_php'] = patt, - -- ['php'] = V('php_rule')^0 - -- } - end - lexer._grammar, lexer._grammar_table = Ct(P(grammar)), grammar - end - - -- For multilang lexers, build a new grammar whose initial rule is the current language - -- if necessary. LPeg does not allow a variable initial rule. - if lexer._CHILDREN then - for style_num, tag in ipairs(lexer._TAGS) do - if style_num == init_style then - local lexer_name = tag:match('^whitespace%.(.+)$') or lexer._parent_name or lexer._name - if lexer._initial_rule == lexer_name then break end - if not lexer._grammar_table[lexer_name] then - -- For proxy lexers like RHTML, the 'whitespace.rhtml' tag would produce the 'rhtml' - -- lexer name, but there is no 'rhtml' rule. It should be the 'html' rule (parent) - -- instead. - lexer_name = lexer._parent_name or lexer._name - end - lexer._initial_rule = lexer_name - lexer._grammar_table[1] = lexer._initial_rule - lexer._grammar = Ct(P(lexer._grammar_table)) - return lexer._grammar - end - end - end - - return lexer._grammar -end - ---- Lexes a chunk of text *text* (that has an initial style number of *init_style*) using lexer --- *lexer*, returning a list of tag names and positions. --- @param lexer The lexer to lex text with. --- @param text The text in the buffer to lex. --- @param init_style The current style. Multiple-language lexers use this to determine which --- language to start lexing in. --- @return list of tag names and positions. -function M.lex(lexer, text, init_style) - local grammar = build_grammar(lexer, init_style) - if not grammar then return {M.DEFAULT, #text + 1} end - if M._standalone then M._text, M.line_state = text, {} end - - if lexer._lex_by_line then - local line_from_position = M.line_from_position - local function append(tags, line_tags, offset) - for i = 1, #line_tags, 2 do - tags[#tags + 1], tags[#tags + 2] = line_tags[i], line_tags[i + 1] + offset - end - end - local tags = {} - local offset = 0 - rawset(M, 'line_from_position', function(pos) return line_from_position(pos + offset) end) - for line in text:gmatch('[^\r\n]*\r?\n?') do - local line_tags = grammar:match(line) - if line_tags then append(tags, line_tags, offset) end - offset = offset + #line - -- Use the default tag to the end of the line if none was specified. - if tags[#tags] ~= offset + 1 then - tags[#tags + 1], tags[#tags + 2] = 'default', offset + 1 - end - end - rawset(M, 'line_from_position', line_from_position) - return tags - end - - return grammar:match(text) -end - ---- Determines fold points in a chunk of text *text* using lexer *lexer*, returning a table of --- fold levels associated with line numbers. --- *text* starts on line number *start_line* with a beginning fold level of *start_level* --- in the buffer. --- @param lexer The lexer to fold text with. --- @param text The text in the buffer to fold. --- @param start_line The line number *text* starts on, counting from 1. --- @param start_level The fold level *text* starts on. --- @return table of fold levels associated with line numbers. -function M.fold(lexer, text, start_line, start_level) - local folds = {} - if text == '' then return folds end - local fold = M.property_int['fold'] > 0 - local FOLD_BASE = M.FOLD_BASE or 0x400 - local FOLD_HEADER, FOLD_BLANK = M.FOLD_HEADER or 0x2000, M.FOLD_BLANK or 0x1000 - if M._standalone then M._text, M.line_state = text, {} end - if fold and lexer._fold_points then - local lines = {} - for p, l in (text .. '\n'):gmatch('()(.-)\r?\n') do lines[#lines + 1] = {p, l} end - local fold_zero_sum_lines = M.property_int['fold.scintillua.on.zero.sum.lines'] > 0 - local fold_compact = M.property_int['fold.scintillua.compact'] > 0 - local fold_points = lexer._fold_points - local fold_point_symbols = fold_points._symbols - local style_at, fold_level = M.style_at, M.fold_level - local line_num, prev_level = start_line, start_level - local current_level = prev_level - for _, captures in ipairs(lines) do - local pos, line = captures[1], captures[2] - if line ~= '' then - if lexer._case_insensitive_fold_points then line = line:lower() end - local ranges = {} - local function is_valid_range(s, e) - if not s or not e then return false end - for i = 1, #ranges - 1, 2 do - local range_s, range_e = ranges[i], ranges[i + 1] - if s >= range_s and s <= range_e or e >= range_s and e <= range_e then - return false - end - end - ranges[#ranges + 1] = s - ranges[#ranges + 1] = e - return true - end - local level_decreased = false - for _, symbol in ipairs(fold_point_symbols) do - local word = not symbol:find('[^%w_]') - local s, e = line:find(symbol, 1, true) - while is_valid_range(s, e) do - -- if not word or line:find('^%f[%w_]' .. symbol .. '%f[^%w_]', s) then - local word_before = s > 1 and line:find('^[%w_]', s - 1) - local word_after = line:find('^[%w_]', e + 1) - if not word or not (word_before or word_after) then - local style_name = style_at[pos + s - 1] - local symbols = fold_points[style_name] - if not symbols and style_name:find('%.') then - symbols = fold_points[style_name:match('^[^.]+')] - end - local level = symbols and symbols[symbol] - if type(level) == 'function' then - level = level(text, pos, line, s, symbol) - end - if type(level) == 'number' then - current_level = current_level + level - if level < 0 and current_level < prev_level then - -- Potential zero-sum line. If the level were to go back up on the same line, - -- the line may be marked as a fold header. - level_decreased = true - end - end - end - s, e = line:find(symbol, s + 1, true) - end - end - folds[line_num] = prev_level - if current_level > prev_level then - folds[line_num] = prev_level + FOLD_HEADER - elseif level_decreased and current_level == prev_level and fold_zero_sum_lines then - if line_num > start_line then - folds[line_num] = prev_level - 1 + FOLD_HEADER - else - -- Typing within a zero-sum line. - local level = fold_level[line_num] - 1 - if level > FOLD_HEADER then level = level - FOLD_HEADER end - if level > FOLD_BLANK then level = level - FOLD_BLANK end - folds[line_num] = level + FOLD_HEADER - current_level = current_level + 1 - end - end - if current_level < FOLD_BASE then current_level = FOLD_BASE end - prev_level = current_level - else - folds[line_num] = prev_level + (fold_compact and FOLD_BLANK or 0) - end - line_num = line_num + 1 - end - elseif fold and - (lexer._fold_by_indentation or M.property_int['fold.scintillua.by.indentation'] > 0) then - -- Indentation based folding. - -- Calculate indentation per line. - local indentation = {} - for indent, line in (text .. '\n'):gmatch('([\t ]*)([^\r\n]*)\r?\n') do - indentation[#indentation + 1] = line ~= '' and #indent - end - -- Find the first non-blank line before start_line. If the current line is indented, make - -- that previous line a header and update the levels of any blank lines inbetween. If the - -- current line is blank, match the level of the previous non-blank line. - local current_level = start_level - for i = start_line, 1, -1 do - local level = M.fold_level[i] - if level >= FOLD_HEADER then level = level - FOLD_HEADER end - if level < FOLD_BLANK then - local indent = M.indent_amount[i] - if indentation[1] and indentation[1] > indent then - folds[i] = FOLD_BASE + indent + FOLD_HEADER - for j = i + 1, start_line - 1 do folds[j] = start_level + FOLD_BLANK end - elseif not indentation[1] then - current_level = FOLD_BASE + indent - end - break - end - end - -- Iterate over lines, setting fold numbers and fold flags. - for i = 1, #indentation do - if indentation[i] then - current_level = FOLD_BASE + indentation[i] - folds[start_line + i - 1] = current_level - for j = i + 1, #indentation do - if indentation[j] then - if FOLD_BASE + indentation[j] > current_level then - folds[start_line + i - 1] = current_level + FOLD_HEADER - current_level = FOLD_BASE + indentation[j] -- for any blanks below - end - break - end - end - else - folds[start_line + i - 1] = current_level + FOLD_BLANK - end - end - else - -- No folding, reset fold levels if necessary. - local current_line = start_line - for _ in text:gmatch('\r?\n') do - folds[current_line] = start_level - current_line = current_line + 1 - end - end - return folds -end - ---- Creates a returns a new lexer with the given name. --- @param name The lexer's name. --- @param opts Table of lexer options. Options currently supported: --- - `lex_by_line`: Whether or not the lexer only processes whole lines of text (instead of --- arbitrary chunks of text) at a time. Line lexers cannot look ahead to subsequent lines. --- The default value is `false`. --- - `fold_by_indentation`: Whether or not the lexer does not define any fold points and that --- fold points should be calculated based on changes in line indentation. The default value --- is `false`. --- - `case_insensitive_fold_points`: Whether or not fold points added via --- `lexer.add_fold_point()` ignore case. The default value is `false`. --- - `no_user_word_lists`: Does not automatically allocate word lists that can be set by --- users. This should really only be set by non-programming languages like markup languages. --- - `inherit`: Lexer to inherit from. The default value is `nil`. --- @usage lexer.new('rhtml', {inherit = lexer.load('html')}) -function M.new(name, opts) - local lexer = setmetatable({ - _name = assert(name, 'lexer name expected'), _lex_by_line = opts and opts['lex_by_line'], - _fold_by_indentation = opts and opts['fold_by_indentation'], - _case_insensitive_fold_points = opts and opts['case_insensitive_fold_points'], - _no_user_word_lists = opts and opts['no_user_word_lists'], _lexer = opts and opts['inherit'] - }, { - __index = { - tag = M.tag, word_match = M.word_match, set_word_list = M.set_word_list, - add_rule = M.add_rule, modify_rule = M.modify_rule, get_rule = M.get_rule, - add_fold_point = M.add_fold_point, embed = M.embed, lex = M.lex, fold = M.fold, -- - add_style = function() end -- legacy - } - }) - - -- Add initial whitespace rule. - -- Use a unique whitespace tag name since embedded lexing relies on these unique names. - lexer:add_rule('whitespace', lexer:tag('whitespace.' .. name, M.space^1)) - - return lexer -end - ---- Creates a substitute for some Scintilla tables and functions that Scintillua depends on --- when using it as a standalone module. -local function initialize_standalone_library() - M.property = setmetatable({['scintillua.lexers'] = package.path:gsub('/%?%.lua', '/lexers')}, { - __index = function() return '' end, __newindex = function(t, k, v) rawset(t, k, tostring(v)) end - }) - - M.line_from_position = function(pos) - local line = 1 - for s in M._text:gmatch('[^\n]*()') do - if pos <= s then return line end - line = line + 1 - end - return line - 1 -- should not get to here - end - - M.indent_amount = setmetatable({}, { - __index = function(_, line) - local current_line = 1 - for s in M._text:gmatch('()[^\n]*') do - if current_line == line then - return #M._text:match('^[ \t]*', s):gsub('\t', string.rep(' ', 8)) - end - current_line = current_line + 1 - end - end - }) - - M._standalone = true -end - ---- Searches for the given *name* in the given *path*. --- This is a safe implementation of Lua 5.2's `package.searchpath()` function that does not --- require the package module to be loaded. -local function searchpath(name, path) - local tried = {} - for part in path:gmatch('[^;]+') do - local filename = part:gsub('%?', name) - local ok, errmsg = loadfile(filename) - if ok or not errmsg:find('cannot open') then return filename end - tried[#tried + 1] = string.format("no file '%s'", filename) - end - return nil, table.concat(tried, '\n') -end - ---- Initializes or loads and then returns the lexer of string name *name*. --- Scintilla calls this function in order to load a lexer. Parent lexers also call this function --- in order to load child lexers and vice-versa. The user calls this function in order to load --- a lexer when using Scintillua as a Lua library. --- @param name The name of the lexing language. --- @param[opt] alt_name Optional alternate name of the lexing language. This is useful for --- embedding the same child lexer with multiple sets of start and end tags. --- @return lexer object -function M.load(name, alt_name) - assert(name, 'no lexer given') - if not M.property then initialize_standalone_library() end - if not M.property_int then - -- Separate from initialize_standalone_library() so applications that choose to define - -- M.property do not also have to define this. - M.property_int = setmetatable({}, { - __index = function(t, k) return tonumber(M.property[k]) or 0 end, - __newindex = function() error('read-only property') end - }) - end - - -- Load the language lexer with its rules, tags, etc. - local path = M.property['scintillua.lexers']:gsub(';', '/?.lua;') .. '/?.lua' - local ro_lexer = setmetatable({ - WHITESPACE = 'whitespace.' .. (alt_name or name) -- legacy - }, {__index = M}) - local env = { - 'assert', 'error', 'ipairs', 'math', 'next', 'pairs', 'print', 'select', 'string', 'table', - 'tonumber', 'tostring', 'type', 'utf8', '_VERSION', lexer = ro_lexer, lpeg = lpeg, -- - require = function() return ro_lexer end -- legacy - } - for _, name in ipairs(env) do env[name] = _G[name] end - local lexer = assert(loadfile(assert(searchpath(name, path)), 't', env))(alt_name or name) - assert(lexer, string.format("'%s.lua' did not return a lexer", name)) - - -- If the lexer is a proxy or a child that embedded itself, set the parent to be the main - -- lexer. Keep a reference to the old parent name since embedded child start and end rules - -- reference and use that name. - if lexer._lexer then - lexer = lexer._lexer - lexer._parent_name, lexer._name = lexer._name, alt_name or name - end - - M.property['scintillua.comment.' .. (alt_name or name)] = M.property['scintillua.comment'] - - return lexer -end - ---- Returns a list of all known lexer names. --- This function is not available to lexers and requires the LuaFileSystem (`lfs`) module to --- be available. --- @param[opt] path Optional ';'-delimited list of directories to search for lexers in. The --- default value is Scintillua's configured lexer path. --- @return lexer name list -function M.names(path) - local lfs = require('lfs') - if not path then path = M.property and M.property['scintillua.lexers'] end - if not path or path == '' then - for part in package.path:gmatch('[^;]+') do - local dir = part:match('^(.-[/\\]?lexers)[/\\]%?%.lua$') - if dir then - path = dir - break - end - end - end - local lexers = {} - for dir in assert(path, 'lexer path not configured or found'):gmatch('[^;]+') do - if lfs.attributes(dir, 'mode') == 'directory' then - for file in lfs.dir(dir) do - local name = file:match('^(.+)%.lua$') - if name and name ~= 'lexer' and not lexers[name] then - lexers[#lexers + 1], lexers[name] = name, true - end - end - end - end - table.sort(lexers) - return lexers -end - ---- Map of file extensions, without the '.' prefix, to their associated lexer names. --- This map has precedence over Scintillua's built-in map. --- @see detect -M.detect_extensions = {} - ---- Map of line patterns to their associated lexer names. --- These are Lua string patterns, not LPeg patterns. --- This map has precedence over Scintillua's built-in map. --- @see detect -M.detect_patterns = {} - ---- Returns the name of the lexer often associated with filename *filename* and/or content --- line *line*. --- @param[opt] filename Optional string filename. The default value is read from the --- 'lexer.scintillua.filename' property. --- @param[opt] line Optional string first content line, such as a shebang line. The default --- value is read from the 'lexer.scintillua.line' property. --- @return string lexer name to pass to `load()`, or `nil` if none was detected --- @see detect_extensions --- @see detect_patterns -function M.detect(filename, line) - if not filename then filename = M.property and M.property['lexer.scintillua.filename'] or '' end - if not line then line = M.property and M.property['lexer.scintillua.line'] or '' end - - -- Locally scoped in order to avoid persistence in memory. - local extensions = { - as = 'actionscript', asc = 'actionscript', -- - adb = 'ada', ads = 'ada', -- - g = 'antlr', g4 = 'antlr', -- - ans = 'apdl', inp = 'apdl', mac = 'apdl', -- - apl = 'apl', -- - applescript = 'applescript', -- - asm = 'asm', ASM = 'asm', s = 'asm', S = 'asm', -- - asa = 'asp', asp = 'asp', hta = 'asp', -- - ahk = 'autohotkey', -- - au3 = 'autoit', a3x = 'autoit', -- - awk = 'awk', -- - bat = 'batch', cmd = 'batch', -- - bib = 'bibtex', -- - boo = 'boo', -- - cs = 'csharp', -- - c = 'ansi_c', C = 'ansi_c', cc = 'cpp', cpp = 'cpp', cxx = 'cpp', ['c++'] = 'cpp', h = 'cpp', - hh = 'cpp', hpp = 'cpp', hxx = 'cpp', ['h++'] = 'cpp', -- - ck = 'chuck', -- - clj = 'clojure', cljs = 'clojure', cljc = 'clojure', edn = 'clojure', -- - ['CMakeLists.txt'] = 'cmake', cmake = 'cmake', ['cmake.in'] = 'cmake', ctest = 'cmake', - ['ctest.in'] = 'cmake', -- - coffee = 'coffeescript', -- - cr = 'crystal', -- - css = 'css', -- - cu = 'cuda', cuh = 'cuda', -- - d = 'dmd', di = 'dmd', -- - dart = 'dart', -- - desktop = 'desktop', -- - diff = 'diff', patch = 'diff', -- - Dockerfile = 'dockerfile', -- - dot = 'dot', -- - e = 'eiffel', eif = 'eiffel', -- - ex = 'elixir', exs = 'elixir', -- - elm = 'elm', -- - erl = 'erlang', hrl = 'erlang', -- - fs = 'fsharp', -- - fan = 'fantom', -- - dsp = 'faust', -- - fnl = 'fennel', -- - fish = 'fish', -- - forth = 'forth', frt = 'forth', -- - f = 'fortran', ['for'] = 'fortran', ftn = 'fortran', fpp = 'fortran', f77 = 'fortran', - f90 = 'fortran', f95 = 'fortran', f03 = 'fortran', f08 = 'fortran', -- - fstab = 'fstab', -- - gd = 'gap', gi = 'gap', gap = 'gap', -- - gmi = 'gemini', -- - po = 'gettext', pot = 'gettext', -- - feature = 'gherkin', -- - gleam = 'gleam', -- - glslf = 'glsl', glslv = 'glsl', -- - dem = 'gnuplot', plt = 'gnuplot', -- - go = 'go', -- - groovy = 'groovy', gvy = 'groovy', -- - gtkrc = 'gtkrc', -- - ha = 'hare', -- - hs = 'haskell', -- - htm = 'html', html = 'html', shtm = 'html', shtml = 'html', xhtml = 'html', vue = 'html', -- - icn = 'icon', -- - idl = 'idl', odl = 'idl', -- - ni = 'inform', -- - cfg = 'ini', cnf = 'ini', inf = 'ini', ini = 'ini', reg = 'ini', -- - io = 'io_lang', -- - bsh = 'java', java = 'java', -- - js = 'javascript', jsfl = 'javascript', -- - jq = 'jq', -- - json = 'json', -- - jsp = 'jsp', -- - jl = 'julia', -- - bbl = 'latex', dtx = 'latex', ins = 'latex', ltx = 'latex', tex = 'latex', sty = 'latex', -- - ledger = 'ledger', journal = 'ledger', -- - less = 'less', -- - lily = 'lilypond', ly = 'lilypond', -- - cl = 'lisp', el = 'lisp', lisp = 'lisp', lsp = 'lisp', -- - litcoffee = 'litcoffee', -- - lgt = 'logtalk', -- - lua = 'lua', -- - GNUmakefile = 'makefile', iface = 'makefile', mak = 'makefile', makefile = 'makefile', - Makefile = 'makefile', -- - md = 'markdown', -- - ['meson.build'] = 'meson', -- - moon = 'moonscript', -- - myr = 'myrddin', -- - n = 'nemerle', -- - link = 'networkd', network = 'networkd', netdev = 'networkd', -- - nim = 'nim', -- - nsh = 'nsis', nsi = 'nsis', nsis = 'nsis', -- - obs = 'objeck', -- - m = 'objective_c', mm = 'objective_c', objc = 'objective_c', -- - caml = 'caml', ml = 'caml', mli = 'caml', mll = 'caml', mly = 'caml', -- - dpk = 'pascal', dpr = 'pascal', p = 'pascal', pas = 'pascal', -- - al = 'perl', perl = 'perl', pl = 'perl', pm = 'perl', pod = 'perl', -- - inc = 'php', php = 'php', php3 = 'php', php4 = 'php', phtml = 'php', -- - p8 = 'pico8', -- - pike = 'pike', pmod = 'pike', -- - PKGBUILD = 'pkgbuild', -- - pony = 'pony', -- - eps = 'ps', ps = 'ps', -- - ps1 = 'powershell', -- - prolog = 'prolog', -- - props = 'props', properties = 'props', -- - proto = 'protobuf', -- - pure = 'pure', -- - sc = 'python', py = 'python', pyw = 'python', -- - R = 'rstats', Rout = 'rstats', Rhistory = 'rstats', Rt = 'rstats', ['Rout.save'] = 'rstats', - ['Rout.fail'] = 'rstats', -- - re = 'reason', -- - r = 'rebol', reb = 'rebol', -- - rst = 'rest', -- - orx = 'rexx', rex = 'rexx', -- - erb = 'rhtml', rhtml = 'rhtml', -- - rsc = 'routeros', -- - spec = 'rpmspec', -- - Rakefile = 'ruby', rake = 'ruby', rb = 'ruby', rbw = 'ruby', -- - rs = 'rust', -- - sass = 'sass', scss = 'sass', -- - scala = 'scala', -- - sch = 'scheme', scm = 'scheme', -- - bash = 'bash', bashrc = 'bash', bash_profile = 'bash', configure = 'bash', csh = 'bash', - ksh = 'bash', mksh = 'bash', sh = 'bash', zsh = 'bash', -- - changes = 'smalltalk', st = 'smalltalk', sources = 'smalltalk', -- - sml = 'sml', fun = 'sml', sig = 'sml', -- - sno = 'snobol4', SNO = 'snobol4', -- - spin = 'spin', -- - ddl = 'sql', sql = 'sql', -- - automount = 'systemd', device = 'systemd', mount = 'systemd', path = 'systemd', - scope = 'systemd', service = 'systemd', slice = 'systemd', socket = 'systemd', swap = 'systemd', - target = 'systemd', timer = 'systemd', -- - taskpaper = 'taskpaper', -- - tcl = 'tcl', tk = 'tcl', -- - texi = 'texinfo', -- - toml = 'toml', -- - ['1'] = 'troff', ['2'] = 'troff', ['3'] = 'troff', ['4'] = 'troff', ['5'] = 'troff', - ['6'] = 'troff', ['7'] = 'troff', ['8'] = 'troff', ['9'] = 'troff', ['1x'] = 'troff', - ['2x'] = 'troff', ['3x'] = 'troff', ['4x'] = 'troff', ['5x'] = 'troff', ['6x'] = 'troff', - ['7x'] = 'troff', ['8x'] = 'troff', ['9x'] = 'troff', -- - t2t = 'txt2tags', -- - ts = 'typescript', -- - vala = 'vala', -- - vcf = 'vcard', vcard = 'vcard', -- - v = 'verilog', ver = 'verilog', -- - vh = 'vhdl', vhd = 'vhdl', vhdl = 'vhdl', -- - bas = 'vb', cls = 'vb', ctl = 'vb', dob = 'vb', dsm = 'vb', dsr = 'vb', frm = 'vb', pag = 'vb', - vb = 'vb', vba = 'vb', vbs = 'vb', -- - wsf = 'wsf', -- - dtd = 'xml', svg = 'xml', xml = 'xml', xsd = 'xml', xsl = 'xml', xslt = 'xml', xul = 'xml', -- - xs = 'xs', xsin = 'xs', xsrc = 'xs', -- - xtend = 'xtend', -- - yaml = 'yaml', yml = 'yaml', -- - zig = 'zig' - } - local patterns = { - ['^#!.+[/ ][gm]?awk'] = 'awk', ['^#!.+[/ ]lua'] = 'lua', ['^#!.+[/ ]octave'] = 'matlab', - ['^#!.+[/ ]perl'] = 'perl', ['^#!.+[/ ]php'] = 'php', ['^#!.+[/ ]python'] = 'python', - ['^#!.+[/ ]ruby'] = 'ruby', ['^#!.+[/ ]bash'] = 'bash', ['^#!.+/m?ksh'] = 'bash', - ['^#!.+/sh'] = 'bash', ['^%s*class%s+%S+%s*<%s*ApplicationController'] = 'rails', - ['^%s*class%s+%S+%s*<%s*ActionController::Base'] = 'rails', - ['^%s*class%s+%S+%s*<%s*ActiveRecord::Base'] = 'rails', - ['^%s*class%s+%S+%s*<%s*ActiveRecord::Migration'] = 'rails', ['^%s*<%?xml%s'] = 'xml', - ['^#cloud%-config'] = 'yaml' - } - - for patt, name in pairs(M.detect_patterns) do if line:find(patt) then return name end end - for patt, name in pairs(patterns) do if line:find(patt) then return name end end - local name, ext = filename:match('[^/\\]+$'), filename:match('[^.]*$') - return M.detect_extensions[name] or extensions[name] or M.detect_extensions[ext] or - extensions[ext] -end - --- The following are utility functions lexers will have access to. - --- Common patterns. - ---- A pattern that matches any single character. -M.any = P(1) ---- A pattern that matches any alphabetic character ('A'-'Z', 'a'-'z'). -M.alpha = R('AZ', 'az') ---- A pattern that matches any digit ('0'-'9'). -M.digit = R('09') ---- A pattern that matches any alphanumeric character ('A'-'Z', 'a'-'z', '0'-'9'). -M.alnum = R('AZ', 'az', '09') ---- A pattern that matches any lower case character ('a'-'z'). -M.lower = R('az') ---- A pattern that matches any upper case character ('A'-'Z'). -M.upper = R('AZ') ---- A pattern that matches any hexadecimal digit ('0'-'9', 'A'-'F', 'a'-'f'). -M.xdigit = R('09', 'AF', 'af') ---- A pattern that matches any graphical character ('!' to '~'). -M.graph = R('!~') ---- A pattern that matches any punctuation character ('!' to '/', ':' to '@', '[' to ''', '{' --- to '~'). -M.punct = R('!/', ':@', '[\'', '{~') ---- A pattern that matches any whitespace character ('\t', '\v', '\f', '\n', '\r', space). -M.space = S('\t\v\f\n\r ') - ---- A pattern that matches a sequence of end of line characters. -M.newline = P('\r')^-1 * '\n' ---- A pattern that matches any single, non-newline character. -M.nonnewline = 1 - M.newline - ---- Returns a pattern that matches a decimal number, whose digits may be separated by character --- *c*. -function M.dec_num_(c) return M.digit * (P(c)^-1 * M.digit)^0 end ---- Returns a pattern that matches a hexadecimal number, whose digits may be separated by --- character *c*. -function M.hex_num_(c) return '0' * S('xX') * (P(c)^-1 * M.xdigit)^1 end ---- Returns a pattern that matches an octal number, whose digits may be separated by character *c*. -function M.oct_num_(c) return '0' * (P(c)^-1 * R('07'))^1 * -M.xdigit end ---- Returns a pattern that matches a binary number, whose digits may be separated by character *c*. -function M.bin_num_(c) return '0' * S('bB') * (P(c)^-1 * S('01'))^1 * -M.xdigit end ---- Returns a pattern that matches either a decimal, hexadecimal, octal, or binary number, --- whose digits may be separated by character *c*. -function M.integer_(c) - return S('+-')^-1 * (M.hex_num_(c) + M.bin_num_(c) + M.oct_num_(c) + M.dec_num_(c)) -end -local function exp_(c) return S('eE') * S('+-')^-1 * M.digit * (P(c)^-1 * M.digit)^0 end ---- Returns a pattern that matches a floating point number, whose digits may be separated by --- character *c*. -function M.float_(c) - return S('+-')^-1 * - ((M.dec_num_(c)^-1 * '.' * M.dec_num_(c) + M.dec_num_(c) * '.' * M.dec_num_(c)^-1 * -P('.')) * - exp_(c)^-1 + (M.dec_num_(c) * exp_(c))) -end ---- Returns a pattern that matches a typical number, either a floating point, decimal, hexadecimal, --- octal, or binary number, and whose digits may be separated by character *c*. -function M.number_(c) return M.float_(c) + M.integer_(c) end - ---- A pattern that matches a decimal number. -M.dec_num = M.dec_num_(false) ---- A pattern that matches a hexadecimal number. -M.hex_num = M.hex_num_(false) ---- A pattern that matches an octal number. -M.oct_num = M.oct_num_(false) ---- A pattern that matches a binary number. -M.bin_num = M.bin_num_(false) ---- A pattern that matches either a decimal, hexadecimal, octal, or binary number. -M.integer = M.integer_(false) ---- A pattern that matches a floating point number. -M.float = M.float_(false) ---- A pattern that matches a typical number, either a floating point, decimal, hexadecimal, --- octal, or binary number. -M.number = M.number_(false) - ---- A pattern that matches a typical word. Words begin with a letter or underscore and consist --- of alphanumeric and underscore characters. -M.word = (M.alpha + '_') * (M.alnum + '_')^0 - ---- Creates and returns a pattern that matches from string or pattern *prefix* until the end of --- the line. --- *escape* indicates whether the end of the line can be escaped with a '\' character. --- @param[opt] prefix Optional string or pattern prefix to start matching at. The default value --- is any non-newline character. --- @param[opt] escape Optional flag indicating whether or not newlines can be escaped by a '\' --- character. The default value is `false`. --- @return pattern --- @usage local line_comment = lexer.to_eol('//') --- @usage local line_comment = lexer.to_eol(S('#;')) -function M.to_eol(prefix, escape) - return (prefix or M.nonnewline) * - (not escape and M.nonnewline or 1 - (M.newline + '\\') + '\\' * M.any)^0 -end - ---- Creates and returns a pattern that matches a range of text bounded by strings or patterns *s* --- and *e*. --- This is a convenience function for matching more complicated ranges like strings with escape --- characters, balanced parentheses, and block comments (nested or not). *e* is optional and --- defaults to *s*. *single_line* indicates whether or not the range must be on a single line; --- *escapes* indicates whether or not to allow '\' as an escape character; and *balanced* --- indicates whether or not to handle balanced ranges like parentheses, and requires *s* and *e* --- to be different. --- @param s String or pattern start of a range. --- @param[opt] e Optional string or pattern end of a range. The default value is *s*. --- @param[opt] single_line Optional flag indicating whether or not the range must be on a single --- line. The default value is `false`. --- @param[opt] escapes Optional flag indicating whether or not the range end may be escaped --- by a '\' character. The default value is `false` unless *s* and *e* are identical, --- single-character strings. In that case, the default value is `true`. --- @param[opt] balanced Optional flag indicating whether or not to match a balanced range, --- like the "%b" Lua pattern. This flag only applies if *s* and *e* are different. --- @return pattern --- @usage local dq_str_escapes = lexer.range('"') --- @usage local dq_str_noescapes = lexer.range('"', false, false) --- @usage local unbalanced_parens = lexer.range('(', ')') --- @usage local balanced_parens = lexer.range('(', ')', false, false, true) -function M.range(s, e, single_line, escapes, balanced) - if type(e) ~= 'string' and type(e) ~= 'userdata' then - e, single_line, escapes, balanced = s, e, single_line, escapes - end - local any = M.any - e - if single_line then any = any - '\n' end - if balanced then any = any - s end - -- Only allow escapes by default for ranges with identical, single-character string delimiters. - if escapes == nil then escapes = type(s) == 'string' and #s == 1 and s == e end - if escapes then any = any - '\\' + '\\' * M.any end - if balanced and s ~= e then return P{s * (any + V(1))^0 * P(e)^-1} end - return s * any^0 * P(e)^-1 -end - ---- Creates and returns a pattern that matches pattern *patt* only when it comes after one of --- the characters in string *set* (or when there are no characters behind *patt*), skipping --- over any characters in string *skip*, which is whitespace by default. --- @param set String character set like one passed to `lpeg.S()`. --- @param patt The LPeg pattern to match after a set character. --- @param skip String character set to skip over. The default value is ' \t\r\n\v\f' (whitespace). --- @usage local regex = lexer.after_set('+-*!%^&|=,([{', lexer.range('/')) -function M.after_set(set, patt, skip) - if not skip then skip = ' \t\r\n\v\f' end - local set_chars, skip_chars = {}, {} - -- Note: cannot use utf8.codes() because Lua 5.1 is still supported. - for char in set:gmatch('.') do set_chars[string.byte(char)] = true end - for char in skip:gmatch('.') do skip_chars[string.byte(char)] = true end - return (B(S(set)) + -B(1)) * patt + Cmt(C(patt), function(input, index, match, ...) - local pos = index - #match - if #skip > 0 then while pos > 1 and skip_chars[input:byte(pos - 1)] do pos = pos - 1 end end - if pos == 1 or set_chars[input:byte(pos - 1)] then return index, ... end - return nil - end) -end - ---- Creates and returns a pattern that matches pattern *patt* only at the beginning of a line, --- or after any line indentation if *allow_indent* is `true`. --- @param patt The LPeg pattern to match on the beginning of a line. --- @param allow_indent Whether or not to consider line indentation as the start of a line. The --- default value is `false`. --- @return pattern --- @usage local preproc = lex:tag(lexer.PREPROCESSOR, lexer.starts_line(lexer.to_eol('#'))) -function M.starts_line(patt, allow_indent) - return M.after_set('\r\n\v\f', patt, allow_indent and ' \t' or '') -end - -M.colors = {} -- legacy -M.styles = setmetatable({}, { -- legacy - __index = function() return setmetatable({}, {__concat = function() return nil end}) end, - __newindex = function() end -}) -M.property_expanded = setmetatable({}, {__index = function() return '' end}) -- legacy - --- Legacy function for creates and returns a token pattern with token name *name* and pattern --- *patt*. --- Use `tag()` instead. --- @param name The name of token. --- @param patt The LPeg pattern associated with the token. --- @return pattern --- @usage local number = token(lexer.NUMBER, lexer.number) --- @usage local addition = token('addition', '+' * lexer.word) -function M.token(name, patt) return Cc(name) * (P(patt) / 0) * Cp() end - --- Legacy function that creates and returns a pattern that verifies the first non-whitespace --- character behind the current match position is in string set *s*. --- @param s String character set like one passed to `lpeg.S()`. --- @return pattern --- @usage local regex = #P('/') * lexer.last_char_includes('+-*!%^&|=,([{') * lexer.range('/') -function M.last_char_includes(s) return M.after_set(s, true) end - -function M.fold_consecutive_lines() end -- legacy - --- The functions and fields below were defined in C. - ---- Table of fold level bit-masks for line numbers starting from 1. (Read-only) --- Fold level masks are composed of an integer level combined with any of the following bits: --- --- - `lexer.FOLD_BASE` --- The initial fold level. --- - `lexer.FOLD_BLANK` --- The line is blank. --- - `lexer.FOLD_HEADER` --- The line is a header, or fold point. --- @table fold_level - ---- Table of indentation amounts in character columns, for line numbers starting from --- 1. (Read-only) --- @table indent_amount - ---- Table of integer line states for line numbers starting from 1. --- Line states can be used by lexers for keeping track of persistent states. For example, --- the output lexer uses this to mark lines that have warnings or errors. --- @table line_state - ---- Map of key-value string pairs. --- @table property - ---- Map of key-value pairs with values interpreted as numbers, or `0` if not found. (Read-only) --- @table property_int - ---- Table of style names at positions in the buffer starting from 1. (Read-only) --- @table style_at - ---- Returns the line number (starting from 1) of the line that contains position *pos*, which --- starts from 1. --- @param pos The position to get the line number of. --- @return number --- @function line_from_position - -return M diff --git a/share/vis/lexers/lilypond.lua b/share/vis/lexers/lilypond.lua @@ -1,32 +0,0 @@ --- Copyright 2006-2024 Robert Gieseke. See LICENSE. --- Lilypond LPeg lexer. --- TODO Embed Scheme; Notes?, Numbers? - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('lilypond') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords, commands. -lex:add_rule('keyword', token(lexer.KEYWORD, '\\' * lexer.word)) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -lex:add_rule('string', token(lexer.STRING, lexer.range('"', false, false))) - --- Comments. --- TODO: block comment. -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('%'))) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S("{}'~<>|"))) - -lexer.property['scintillua.comment'] = '%' - -return lex diff --git a/share/vis/lexers/lisp.lua b/share/vis/lexers/lisp.lua @@ -1,59 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Lisp LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('lisp') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'defclass', 'defconstant', 'defgeneric', 'define-compiler-macro', 'define-condition', - 'define-method-combination', 'define-modify-macro', 'define-setf-expander', 'define-symbol-macro', - 'defmacro', 'defmethod', 'defpackage', 'defparameter', 'defsetf', 'defstruct', 'deftype', 'defun', - 'defvar', -- - 'abort', 'assert', 'block', 'break', 'case', 'catch', 'ccase', 'cerror', 'cond', 'ctypecase', - 'declaim', 'declare', 'do', 'do*', 'do-all-symbols', 'do-external-symbols', 'do-symbols', - 'dolist', 'dotimes', 'ecase', 'error', 'etypecase', 'eval-when', 'flet', 'handler-bind', - 'handler-case', 'if', 'ignore-errors', 'in-package', 'labels', 'lambda', 'let', 'let*', 'locally', - 'loop', 'macrolet', 'multiple-value-bind', 'proclaim', 'prog', 'prog*', 'prog1', 'prog2', 'progn', - 'progv', 'provide', 'require', 'restart-bind', 'restart-case', 'restart-name', 'return', - 'return-from', 'signal', 'symbol-macrolet', 'tagbody', 'the', 'throw', 'typecase', 'unless', - 'unwind-protect', 'when', 'with-accessors', 'with-compilation-unit', 'with-condition-restarts', - 'with-hash-table-iterator', 'with-input-from-string', 'with-open-file', 'with-open-stream', - 'with-output-to-string', 'with-package-iterator', 'with-simple-restart', 'with-slots', - 'with-standard-io-syntax', -- - 't', 'nil' -})) - --- Identifiers. -local word = lexer.alpha * (lexer.alnum + S('_-'))^0 -lex:add_rule('identifier', token(lexer.IDENTIFIER, word)) - --- Strings. -lex:add_rule('string', token(lexer.STRING, "'" * word + lexer.range('"') + '#\\' * lexer.any)) - --- Comments. -local line_comment = lexer.to_eol(';') -local block_comment = lexer.range('#|', '|#') -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, P('-')^-1 * lexer.digit^1 * (S('./') * lexer.digit^1)^-1)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('<>=*/+-`@%()'))) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '(', ')') -lex:add_fold_point(lexer.OPERATOR, '[', ']') -lex:add_fold_point(lexer.OPERATOR, '{', '}') -lex:add_fold_point(lexer.COMMENT, '#|', '|#') - -lexer.property['scintillua.comment'] = ';' - -return lex diff --git a/share/vis/lexers/litcoffee.lua b/share/vis/lexers/litcoffee.lua @@ -1,21 +0,0 @@ --- Copyright 2006-2024 Robert Gieseke. See LICENSE. --- Literate CoffeeScript LPeg lexer. --- http://coffeescript.org/#literate - -local lexer = require('lexer') -local token = lexer.token -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('litcoffee', {inherit = lexer.load('markdown')}) - --- Embedded CoffeeScript. -local coffeescript = lexer.load('coffeescript') -local coffee_start_rule = token(lexer.EMBEDDED, (P(' ')^4 + P('\t'))) -local coffee_end_rule = token(lexer.EMBEDDED, lexer.newline) -lex:embed(coffeescript, coffee_start_rule, coffee_end_rule) - --- Use 'markdown_whitespace' instead of lexer.WHITESPACE since the latter would expand to --- 'litcoffee_whitespace'. -lex:modify_rule('whitespace', token('markdown_whitespace', S(' \t')^1 + S('\r\n')^1)) - -return lex diff --git a/share/vis/lexers/logtalk.lua b/share/vis/lexers/logtalk.lua @@ -1,64 +0,0 @@ --- Copyright © 2017-2024 Michael T. Richter <ttmrichter@gmail.com>. See LICENSE. --- Logtalk LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('logtalk', {inherit = lexer.load('prolog')}) - --- add logtalk keywords to prolog ones -local directives = { - 'set_logtalk_flag', 'object', 'info', 'built_in', 'threaded', 'uses', 'alias', 'use_module', - 'coinductive', 'export', 'reexport', 'public', 'metapredicate', 'mode', 'meta_non_terminal', - 'protected', 'synchronized', 'private', 'module', 'if', 'elif', 'else', 'endif', 'category', - 'protocol', 'end_object', 'end_category', 'end_protocol', 'meta_predicate' -} -local indent = token(lexer.WHITESPACE, lexer.starts_line(S(' \t')^1))^-1 -lex:modify_rule('directive', - (indent * token(lexer.OPERATOR, ':-') * token(lexer.WHITESPACE, S(' \t')^0) * - token(lexer.PREPROCESSOR, word_match(directives)) -) + lex:get_rule('directive')) - --- Whitespace. -lex:modify_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - -local zero_arity_keywords = { - -- extracted from test document in logtalk distribution - 'comment', 'argnames', 'arguments', 'author', 'version', 'date', 'parameters', 'parnames', - 'copyright', 'license', 'remarks', 'see_also', 'as', 'logtalk_make', 'instantiation_error', - 'system_error' -} -local one_plus_arity_keywords = { - -- extracted from test document in logtalk distribution - 'implements', 'imports', 'extends', 'instantiates', 'specializes', 'number_chars', 'number_code', - 'current_category', 'current_object', 'current_protocol', 'create_category', 'create_object', - 'create_protocol', 'abolish_category', 'abolish_object', 'abolish_protocol', 'category_property', - 'object_property', 'protocol_property', 'extends_category', 'extends_object', 'extends_protocol', - 'implements_protocol', 'imports_category', 'instantiates_class', 'specializes_class', - 'complements_object', 'conforms_to_protocol', 'abolish_events', 'current_event', 'define_events', - 'threaded', 'threaded_call', 'threaded_call', 'threaded_once', 'threaded_ignore', 'threaded_exit', - 'threaded_peek', 'threaded_cancel', 'threaded_wait', 'threaded_notify', 'threaded_engine', - 'threaded_engine_create', 'threaded_engine_destroy', 'threaded_engine_self', - 'threaded_engine_next', 'threaded_engine_next_reified', 'threaded_engine_yield', - 'threaded_engine_post', 'threaded_engine_fetch', 'logtalk_compile', 'logtalk_load', - 'logtalk_library_path', 'logtalk_load_context', 'logtalk_make_target_action', - 'current_logtalk_flag', 'set_logtalk_flag', 'create_logtalk_flag', 'context', 'parameter', 'self', - 'sender', 'this', 'type_error', 'domain_error', 'existence_error', 'permission_error', - 'representation_error', 'evaluation_error', 'resource_error', 'syntax_error', 'bagof', 'findall', - 'forall', 'setof', 'before', 'after', 'forward', 'phrase', 'expand_term', 'expand_goal', - 'term_expansion', 'goal_expansion', 'numbervars', 'put_code', 'put_byte', 'current_op', 'op', - 'ignore', 'repeat', 'number_codes', 'current_prolog_flag', 'set_prolog_flag', 'keysort', 'sort' -} -local keyword = word_match(zero_arity_keywords) + (word_match(one_plus_arity_keywords) * #P('(')) -lex:modify_rule('keyword', token(lexer.KEYWORD, keyword) + lex:get_rule('keyword')) - -local operators = { - -- extracted from test document in logtalk distribution - 'as' -} -lex:modify_rule('operator', token(lexer.OPERATOR, word_match(operators)) + lex:get_rule('operator')) - -lexer.property['scintillua.comment'] = '%' - -return lex diff --git a/share/vis/lexers/lua.lua b/share/vis/lexers/lua.lua @@ -1,144 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Lua LPeg lexer. --- Original written by Peter Odding, 2007/04/04. - -local lexer = lexer -local B, P, S = lpeg.B, lpeg.P, lpeg.S - -local lex = lexer.new(...) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD))) - --- Functions. -local non_field = -B('.') + B('_G.') + B('..') -local builtin_func = lex:word_match(lexer.FUNCTION_BUILTIN) -local lib_func = lex:word_match(lexer.FUNCTION_BUILTIN .. '.library') -local func = lex:tag(lexer.FUNCTION, lexer.word) -local method = B(':') * lex:tag(lexer.FUNCTION_METHOD, lexer.word) -lex:add_rule('function', - method + ((non_field * lex:tag(lexer.FUNCTION_BUILTIN, builtin_func + lib_func)) + func) * - #(lexer.space^0 * S('({\'"'))) - --- Constants. -local builtin_const = lex:word_match(lexer.CONSTANT_BUILTIN) -local lib_const = lex:word_match(lexer.CONSTANT_BUILTIN .. '.library') -lex:add_rule('constant', non_field * lex:tag(lexer.CONSTANT_BUILTIN, builtin_const + lib_const)) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local sq_str = lexer.range("'") -local dq_str = lexer.range('"') -local longstring = lpeg.Cmt('[' * lpeg.C(P('=')^0) * '[', function(input, index, eq) - local _, e = input:find(']' .. eq .. ']', index, true) - return (e or #input) + 1 -end) -lex:add_rule('string', lex:tag(lexer.STRING, sq_str + dq_str) + - lex:tag(lexer.STRING .. '.longstring', longstring)) - --- Comments. -local line_comment = lexer.to_eol('--') -local block_comment = '--' * longstring -lex:add_rule('comment', lex:tag(lexer.COMMENT, block_comment + line_comment)) - --- Numbers. -local lua_integer = P('-')^-1 * (lexer.hex_num + lexer.dec_num) -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.float + lua_integer)) - --- Labels. -lex:add_rule('label', lex:tag(lexer.LABEL, '::' * lexer.word * '::')) - --- Attributes. -lex:add_rule('attribute', lex:tag(lexer.ATTRIBUTE, '<' * lexer.space^0 * - lexer.word_match('const close') * lexer.space^0 * '>')) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, '..' + S('+-*/%^#=<>&|~;:,.{}[]()'))) - --- Fold points. -local function fold_longcomment(text, pos, line, s, symbol) - if symbol == '[' then - if line:find('^%[=*%[', s) then return 1 end - elseif symbol == ']' then - if line:find('^%]=*%]', s) then return -1 end - end - return 0 -end -lex:add_fold_point(lexer.KEYWORD, 'if', 'end') -lex:add_fold_point(lexer.KEYWORD, 'do', 'end') -lex:add_fold_point(lexer.KEYWORD, 'function', 'end') -lex:add_fold_point(lexer.KEYWORD, 'repeat', 'until') -lex:add_fold_point(lexer.COMMENT, '[', fold_longcomment) -lex:add_fold_point(lexer.COMMENT, ']', fold_longcomment) -lex:add_fold_point(lexer.FUNCTION .. '.longstring', '[', ']') -lex:add_fold_point(lexer.OPERATOR, '(', ')') -lex:add_fold_point(lexer.OPERATOR, '[', ']') -lex:add_fold_point(lexer.OPERATOR, '{', '}') - --- Word lists. -lex:set_word_list(lexer.KEYWORD, { - 'and', 'break', 'do', 'else', 'elseif', 'end', 'false', 'for', 'function', 'if', 'in', 'local', - 'or', 'nil', 'not', 'repeat', 'return', 'then', 'true', 'until', 'while', -- - 'goto' -- 5.2 -}) - -lex:set_word_list(lexer.FUNCTION_BUILTIN, { - 'assert', 'collectgarbage', 'dofile', 'error', 'getmetatable', 'ipairs', 'load', 'loadfile', - 'next', 'pairs', 'pcall', 'print', 'rawequal', 'rawget', 'rawset', 'require', 'select', - 'setmetatable', 'tonumber', 'tostring', 'type', 'xpcall', -- - 'rawlen', -- 5.2 - 'warn' -- 5.4 -}) - -lex:set_word_list(lexer.FUNCTION_BUILTIN .. '.library', { - 'coroutine.create', 'coroutine.resume', 'coroutine.running', 'coroutine.status', 'coroutine.wrap', - 'coroutine.yield', -- - 'coroutine.isyieldable', -- 5.3 - 'coroutine.close', -- 5.4 - 'package.loadlib', -- - 'package.searchpath', -- 5.2 - 'utf8.char', 'utf8.codepoint', 'utf8.codes', 'utf8.len', 'utf8.offset', -- 5.3 - 'string.byte', 'string.char', 'string.dump', 'string.find', 'string.format', 'string.gmatch', - 'string.gsub', 'string.len', 'string.lower', 'string.match', 'string.rep', 'string.reverse', - 'string.sub', 'string.upper', -- - 'string.pack', 'string.packsize', 'string.unpack', -- 5.3 - 'table.concat', 'table.insert', 'table.remove', 'table.sort', -- - 'table.pack', 'table.unpack', -- 5.2 - 'table.move', -- 5.3 - 'math.abs', 'math.acos', 'math.asin', 'math.atan', 'math.ceil', 'math.cos', 'math.deg', - 'math.exp', 'math.floor', 'math.fmod', 'math.log', 'math.max', 'math.min', 'math.modf', - 'math.rad', 'math.random', 'math.randomseed', 'math.sin', 'math.sqrt', 'math.tan', -- - 'math.tointeger', 'math.type', 'math.ult', -- 5.3 - 'io.close', 'io.flush', 'io.input', 'io.lines', 'io.open', 'io.output', 'io.popen', 'io.read', - 'io.tmpfile', 'io.type', 'io.write', -- - 'os.clock', 'os.date', 'os.difftime', 'os.execute', 'os.exit', 'os.getenv', 'os.remove', - 'os.rename', 'os.setlocale', 'os.time', 'os.tmpname', -- - 'debug', 'debug.debug', 'debug.gethook', 'debug.getinfo', 'debug.getlocal', 'debug.getmetatable', - 'debug.getregistry', 'debug.getupvalue', 'debug.sethook', 'debug.setlocal', 'debug.setmetatable', - 'debug.setupvalue', 'debug.traceback', -- - 'debug.getuservalue', 'debug.setuservalue', 'debug.upvalueid', 'debug.upvaluejoin' -- 5.2 -}) - -lex:set_word_list(lexer.CONSTANT_BUILTIN, { - '_G', '_VERSION', -- - '_ENV' -- 5.2 -}) - -lex:set_word_list(lexer.CONSTANT_BUILTIN .. '.library', { - 'coroutine', -- - 'package', 'package.cpath', 'package.loaded', 'package.path', 'package.preload', -- - 'package.config', 'package.searchers', -- 5.2 - 'utf8', 'utf8.charpattern', -- 5.3 - 'string', -- - 'table', -- - 'math', 'math.huge', 'math.pi', -- - 'math.maxinteger', 'math.mininteger', -- 5.3 - 'io', 'io.stderr', 'io.stdin', 'io.stdout', -- - 'os' -}) - -lexer.property['scintillua.comment'] = '--' - -return lex diff --git a/share/vis/lexers/makefile.lua b/share/vis/lexers/makefile.lua @@ -1,121 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Makefile LPeg lexer. - -local lexer = lexer -local P, S, B = lpeg.P, lpeg.S, lpeg.B - -local lex = lexer.new(..., {lex_by_line = true}) - --- Function definition. -local word = (lexer.any - lexer.space - S('$:,#=(){}'))^1 -local func_name = lex:tag(lexer.FUNCTION, word) -local ws = lex:get_rule('whitespace') -local eq = lex:tag(lexer.OPERATOR, '=') -lex:add_rule('function_def', - lex:tag(lexer.KEYWORD, lexer.word_match('define')) * ws * func_name * ws^-1 * eq) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, P('!')^-1 * lex:word_match(lexer.KEYWORD, true))) - --- Targets. -local special_target = lex:tag(lexer.CONSTANT_BUILTIN, '.' * lex:word_match('special_targets')) --- local normal_target = lex:tag('target', (lexer.any - lexer.space - S(':+?!=#'))^1) -local target = special_target -- + normal_target * (ws * normal_target)^0 -lex:add_rule('target', lexer.starts_line(target * ws^-1 * #(':' * lexer.space))) - --- Variable and function assignments. -local func_assign = func_name * ws^-1 * eq * - #P(function(input, index) return input:find('%$%(%d%)', index) end) -local builtin_var = lex:tag(lexer.VARIABLE_BUILTIN, lex:word_match(lexer.VARIABLE_BUILTIN)) -local var_name = lex:tag(lexer.VARIABLE, word) -local var_assign = (builtin_var + var_name) * ws^-1 * - lex:tag(lexer.OPERATOR, S(':+?!')^-1 * '=' + '::=') -lex:add_rule('assign', lexer.starts_line(func_assign + var_assign, true) + B(': ') * var_assign) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S(':(){}|'))) - --- Strings. -lex:add_rule('string', lexer.range("'", true) + lexer.range('"', true)) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, word)) - --- Functions. -local builtin_func = lex:tag(lexer.FUNCTION_BUILTIN, lex:word_match(lexer.FUNCTION_BUILTIN)) -local call_func = lex:tag(lexer.FUNCTION_BUILTIN, 'call') * ws * func_name -local func = lex:tag(lexer.OPERATOR, '$' * S('({')) * (call_func + builtin_func) -lex:add_rule('function', func) - --- Variables. -local auto_var = lex:tag(lexer.OPERATOR, '$') * lex:tag(lexer.VARIABLE_BUILTIN, S('@%<?^+|*')) + - lex:tag(lexer.OPERATOR, '$(') * lex:tag(lexer.VARIABLE_BUILTIN, S('@%<?^+*') * S('DF')) -local var_ref = lex:tag(lexer.OPERATOR, P('$(') + '${') * (builtin_var + var_name) -local var = auto_var + var_ref -lex:add_rule('variable', var) - --- Comments. -lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.to_eol('#'))) - --- Embedded Bash in target rules. -local bash = lexer.load('bash') -bash:modify_rule('variable', - lex:tag(lexer.VARIABLE, '$$' * word) + func + var + bash:get_rule('variable')) -local bash_start_rule = lex:tag(lexer.WHITESPACE, '\t') + lex:tag(lexer.OPERATOR, ';') -local bash_end_rule = lex:tag(lexer.WHITESPACE, '\n') -lex:embed(bash, bash_start_rule, bash_end_rule) --- Embedded Bash in $(shell ...) calls. -local shell = lexer.load('bash', 'bash.shell') -bash_start_rule = #P('$(shell') * func -bash_end_rule = -B('\\') * lex:tag(lexer.OPERATOR, ')') -lex:embed(shell, bash_start_rule, bash_end_rule) - --- Word lists. -lex:set_word_list(lexer.KEYWORD, { - 'define', 'endef', -- multi-line - 'else', 'endif', 'ifdef', 'ifeq', 'ifndef', 'ifneq', -- conditionals - 'export', 'include', 'load', 'override', 'undefine', 'unexport', 'vpath', -- directives - 'private', -- - 'if', 'elseif', 'elseifdef', 'elseifndef' -- non-Make conditionals -}) - -lex:set_word_list('special_targets', { - 'DEFAULT', 'DELETE_ON_ERROR', 'EXPORT_ALL_VARIABLES', 'IGNORE', 'INTERMEDIATE', - 'LOW_RESOLUTION_TIME', 'NOTPARALLEL', 'ONESHELL', 'PHONY', 'POSIX', 'PRECIOUS', 'SECONDARY', - 'SECONDEXPANSION', 'SILENT', 'SUFFIXES' -}) - -lex:set_word_list(lexer.VARIABLE_BUILTIN, { - -- Special. - '.DEFAULT_GOAL', '.FEATURES', '.INCLUDE_DIRS', '.LIBPATTERNS', '.LOADED', '.RECIPEPREFIX', - '.SHELLFLAGS', '.SHELLSTATUS', '.VARIABLES', -- - 'COMSPEC', 'MAKESHELL', 'SHELL', -- choosing the shell - 'GPATH', 'VPATH', -- search - -- Make. - 'MAKE', 'MAKECMDGOALS', 'MAKEFILES', 'MAKEFILE_LIST', 'MAKEFLAGS', 'MAKELEVEL', 'MAKEOVERRIDES', - 'MAKE_RESTARTS', 'MAKE_TERMERR', 'MAKE_TERMOUT', 'MFLAGS', - -- Other. - 'CURDIR', 'OUTPUT_OPTION', 'SUFFIXES', - -- Implicit. - 'AR', 'ARFLAGS', 'AS', 'ASFLAGS', 'CC', 'CFLAGS', 'CO', 'COFLAGS', 'CPP', 'CPPFLAGS', 'CTANGLE', - 'CWEAVE', 'CXX', 'CXXFLAGS', 'FC', 'FFLAGS', 'GET', 'GFLAGS', 'LDFLAGS', 'LDLIBS', 'LEX', - 'LFLAGS', 'LINT', 'LINTFLAGS', 'M2C', 'MAKEINFO', 'PC', 'PFLAGS', 'RFLAGS', 'RM', 'TANGLE', 'TEX', - 'TEXI2DVI', 'WEAVE', 'YACC', 'YFLAGS', -- - 'bindir', 'DESTDIR', 'exec_prefix', 'libexecdir', 'prefix', 'sbindir' -- directory -}) - -lex:set_word_list(lexer.FUNCTION_BUILTIN, { - -- Filename. - 'abspath', 'addprefix', 'addsuffix', 'basename', 'dir', 'join', 'notdir', 'realpath', 'suffix', - 'wildcard', -- - 'and', 'if', 'or', -- conditional - 'error', 'info', 'warning', -- control - 'filter', 'filter-out', 'findstring', 'firstword', 'lastword', 'patsubst', 'sort', 'strip', - -- Text. - 'subst', 'word', 'wordlist', 'words', -- - 'call', 'eval', 'file', 'flavor', 'foreach', 'origin', 'shell', 'value' -- other -}) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/man.lua b/share/vis/lexers/man.lua @@ -1,22 +0,0 @@ --- Copyright 2015-2022 David B. Lamkins <david@lamkins.net>. See LICENSE. --- man/roff LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('man') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Markup. -lex:add_rule('rule1', token(lexer.STRING, '.' * lexer.to_eol('B' * P('R')^-1 + 'I' * P('PR')^-1))) -lex:add_rule('rule2', token(lexer.NUMBER, lexer.to_eol('.' * S('ST') * 'H'))) -lex:add_rule('rule3', token(lexer.KEYWORD, P('.br') + '.DS' + '.RS' + '.RE' + '.PD')) -lex:add_rule('rule4', token(lexer.LABEL, '.' * (S('ST') * 'H' + '.TP'))) -lex:add_rule('rule5', token(lexer.VARIABLE, '.B' * P('R')^-1 + '.I' * S('PR')^-1 + '.PP')) -lex:add_rule('rule6', token(lexer.TYPE, '\\f' * S('BIPR'))) -lex:add_rule('rule7', token(lexer.PREPROCESSOR, lexer.starts_line('.') * lexer.alpha^1)) - -return lex diff --git a/share/vis/lexers/markdown.lua b/share/vis/lexers/markdown.lua @@ -1,95 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Markdown LPeg lexer. - -local lexer = lexer -local P, S, B = lpeg.P, lpeg.S, lpeg.B - -local lex = lexer.new(..., {no_user_word_lists = true}) - --- Distinguish between horizontal and vertical space so html start rule has a chance to match. -lex:modify_rule('whitespace', lex:tag(lexer.WHITESPACE, S(' \t')^1 + S('\r\n')^1)) - --- Block elements. -local function h(n) - return lex:tag(string.format('%s.h%s', lexer.HEADING, n), - lexer.to_eol(lexer.starts_line(string.rep('#', n)))) -end -lex:add_rule('header', h(6) + h(5) + h(4) + h(3) + h(2) + h(1)) - -lex:add_rule('hr', - lex:tag('hr', lpeg.Cmt(lexer.starts_line(lpeg.C(S('*-_')), true), function(input, index, c) - local line = input:match('[^\r\n]*', index):gsub('[ \t]', '') - if line:find('[^' .. c .. ']') or #line < 2 then return nil end - return (select(2, input:find('\r?\n', index)) or #input) + 1 -- include \n for eolfilled styles - end))) - -lex:add_rule('list', lex:tag(lexer.LIST, - lexer.starts_line(lexer.digit^1 * '.' + S('*+-'), true) * S(' \t'))) - -local hspace = lexer.space - '\n' -local blank_line = '\n' * hspace^0 * ('\n' + P(-1)) - -local code_line = lexer.starts_line((B(' ') + B('\t')) * lexer.to_eol(), true) -local code_block = - lexer.range(lexer.starts_line('```', true), '\n```' * hspace^0 * ('\n' + P(-1))) + - lexer.range(lexer.starts_line('~~~', true), '\n~~~' * hspace^0 * ('\n' + P(-1))) -local code_inline = lpeg.Cmt(lpeg.C(P('`')^1), function(input, index, bt) - -- `foo`, ``foo``, ``foo`bar``, `foo``bar` are all allowed. - local _, e = input:find('[^`]' .. bt .. '%f[^`]', index) - return (e or #input) + 1 -end) -lex:add_rule('block_code', lex:tag(lexer.CODE, code_line + code_block + code_inline)) - -lex:add_rule('blockquote', - lex:tag(lexer.STRING, lpeg.Cmt(lexer.starts_line('>', true), function(input, index) - local _, e = input:find('\n[ \t]*\r?\n', index) -- the next blank line (possibly with indentation) - return (e or #input) + 1 - end))) - --- Span elements. -lex:add_rule('escape', lex:tag(lexer.DEFAULT, P('\\') * 1)) - -local link_text = lexer.range('[', ']', true) -local link_target = - '(' * (lexer.any - S(') \t'))^0 * (S(' \t')^1 * lexer.range('"', false, false))^-1 * ')' -local link_url = 'http' * P('s')^-1 * '://' * (lexer.any - lexer.space)^1 + - ('<' * lexer.alpha^2 * ':' * (lexer.any - lexer.space - '>')^1 * '>') -lex:add_rule('link', lex:tag(lexer.LINK, P('!')^-1 * link_text * link_target + link_url)) - -local link_ref = lex:tag(lexer.REFERENCE, link_text * S(' \t')^0 * lexer.range('[', ']', true)) -local ref_link_label = lex:tag(lexer.REFERENCE, lexer.range('[', ']', true) * ':') -local ws = lex:get_rule('whitespace') -local ref_link_url = lex:tag(lexer.LINK, (lexer.any - lexer.space)^1) -local ref_link_title = lex:tag(lexer.STRING, lexer.range('"', true, false) + - lexer.range("'", true, false) + lexer.range('(', ')', true)) -lex:add_rule('link_ref', link_ref + ref_link_label * ws * ref_link_url * (ws * ref_link_title)^-1) - -local punct_space = lexer.punct + lexer.space - --- Handles flanking delimiters as described in --- https://github.github.com/gfm/#emphasis-and-strong-emphasis in the cases where simple --- delimited ranges are not sufficient. -local function flanked_range(s, not_inword) - local fl_char = lexer.any - s - lexer.space - local left_fl = B(punct_space - s) * s * #fl_char + s * #(fl_char - lexer.punct) - local right_fl = B(lexer.punct) * s * #(punct_space - s) + B(fl_char) * s - return left_fl * (lexer.any - blank_line - (not_inword and s * #punct_space or s))^0 * right_fl -end - -local asterisk_strong = flanked_range('**') -local underscore_strong = (B(punct_space) + #lexer.starts_line('_')) * flanked_range('__', true) * - #(punct_space + -1) -lex:add_rule('strong', lex:tag(lexer.BOLD, asterisk_strong + underscore_strong)) - -local asterisk_em = flanked_range('*') -local underscore_em = (B(punct_space) + #lexer.starts_line('_')) * flanked_range('_', true) * - #(punct_space + -1) -lex:add_rule('em', lex:tag(lexer.ITALIC, asterisk_em + underscore_em)) - --- Embedded HTML. -local html = lexer.load('html') -local start_rule = lexer.starts_line(P(' ')^-3) * #P('<') * html:get_rule('tag') -- P(' ')^4 starts code_line -local end_rule = #blank_line * ws -lex:embed(html, start_rule, end_rule) - -return lex diff --git a/share/vis/lexers/matlab.lua b/share/vis/lexers/matlab.lua @@ -1,90 +0,0 @@ --- Copyright 2006-2024 Martin Morawetz. See LICENSE. --- Matlab LPeg lexer. --- Based off of lexer code by Mitchell. - -local lexer = lexer -local P, B, S = lpeg.P, lpeg.B, lpeg.S - -local lex = lexer.new(...) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD))) - --- Functions. --- Note: cannot tag normal functions because indexing variables uses the same syntax. -local builtin_func = lex:tag(lexer.FUNCTION_BUILTIN, lex:word_match(lexer.FUNCTION_BUILTIN)) -lex:add_rule('function', builtin_func * #(lexer.space^0 * S('('))) - --- Variable. -lex:add_rule('variable', lex:tag(lexer.VARIABLE_BUILTIN, lex:word_match(lexer.VARIABLE_BUILTIN))) - --- Constants. -lex:add_rule('constant', lex:tag(lexer.CONSTANT_BUILTIN, lex:word_match(lexer.CONSTANT_BUILTIN))) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"') -local bq_str = lexer.range('`') -lex:add_rule('string', lex:tag(lexer.STRING, sq_str + dq_str + bq_str)) - --- Comments. -local line_comment = lexer.to_eol(S('%#')) -local block_comment = lexer.range('%{', '%}') -lex:add_rule('comment', lex:tag(lexer.COMMENT, block_comment + line_comment)) - --- Numbers. -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number)) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('!%^&*()[]{}-=+/\\|:;.,?<>~`´'))) - --- Fold points. -lex:add_fold_point(lexer.KEYWORD, 'if', 'end') -lex:add_fold_point(lexer.KEYWORD, 'for', 'end') -lex:add_fold_point(lexer.KEYWORD, 'while', 'end') -lex:add_fold_point(lexer.KEYWORD, 'switch', 'end') -lex:add_fold_point(lexer.OPERATOR, '(', ')') -lex:add_fold_point(lexer.OPERATOR, '[', ']') -lex:add_fold_point(lexer.COMMENT, '%{', '%}') - --- Word lists -lex:set_word_list(lexer.KEYWORD, { - 'break', 'case', 'catch', 'continue', 'do', 'else', 'elseif', 'end', 'end_try_catch', - 'end_unwind_protect', 'endfor', 'endif', 'endswitch', 'endwhile', 'for', 'function', - 'endfunction', 'global', 'if', 'otherwise', 'persistent', 'replot', 'return', 'static', 'switch', - 'try', 'until', 'unwind_protect', 'unwind_protect_cleanup', 'varargin', 'varargout', 'while' -}) - -lex:set_word_list(lexer.FUNCTION_BUILTIN, { - 'abs', 'any', 'argvatan2', 'axes', 'axis', 'ceil', 'cla', 'clear', 'clf', 'columns', 'cos', - 'delete', 'diff', 'disp', 'doc', 'double', 'drawnow', 'exp', 'figure', 'find', 'fix', 'floor', - 'fprintf', 'gca', 'gcf', 'get', 'grid', 'help', 'hist', 'hold', 'isempty', 'isnull', 'length', - 'load', 'log', 'log10', 'loglog', 'max', 'mean', 'median', 'min', 'mod', 'ndims', 'numel', - 'num2str', 'ones', 'pause', 'plot', 'printf', 'quit', 'rand', 'randn', 'rectangle', 'rem', - 'repmat', 'reshape', 'round', 'rows', 'save', 'semilogx', 'semilogy', 'set', 'sign', 'sin', - 'size', 'sizeof', 'size_equal', 'sort', 'sprintf', 'squeeze', 'sqrt', 'std', 'strcmp', 'subplot', - 'sum', 'tan', 'tic', 'title', 'toc', 'uicontrol', 'who', 'xlabel', 'ylabel', 'zeros' -}) - -lex:set_word_list(lexer.VARIABLE_BUILTIN, { - 'ans', 'automatic_replot', 'default_return_value', 'do_fortran_indexing', - 'define_all_return_values', 'empty_list_elements_ok', 'eps', 'gnuplot_binary', - 'ignore_function_time_stamp', 'implicit_str_to_num_ok', 'ok_to_lose_imaginary_part', - 'output_max_field_width', 'output_precision', 'page_screen_output', 'prefer_column_vectors', - 'prefer_zero_one_indexing', 'print_answer_id_name', 'print_empty_dimensions', - 'resize_on_range_error', 'return_last_computed_value', 'save_precision', 'silent_functions', - 'split_long_rows', 'suppress_verbose_help_message', 'treat_neg_dim_as_zero', - 'warn_assign_as_truth_value', 'warn_comma_in_global_decl', 'warn_divide_by_zero', - 'warn_function_name_clash', 'whitespace_in_literal_matrix' -}) - -lex:set_word_list(lexer.CONSTANT_BUILTIN, { - 'false', 'Inf', 'inf', 'NaN', 'nan', 'pi', 'realmax', 'realmin', 'true' -}) - -lexer.property['scintillua.comment'] = '%' - -return lex diff --git a/share/vis/lexers/mediawiki.lua b/share/vis/lexers/mediawiki.lua @@ -1,44 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- MediaWiki LPeg lexer. --- Contributed by Alexander Misel. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S, B = lpeg.P, lpeg.S, lpeg.B - -local lex = lexer.new('mediawiki') - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.range('<!--', '-->'))) - --- HTML-like tags -local tag_start = token(lexer.TAG, '<' * P('/')^-1 * lexer.alnum^1 * lexer.space^0) -local dq_str = '"' * ((lexer.any - S('>"\\')) + ('\\' * lexer.any))^0 * '"' -local tag_attr = token(lexer.ATTRIBUTE, lexer.alpha^1 * lexer.space^0 * - ('=' * lexer.space^0 * (dq_str + (lexer.any - lexer.space - '>')^0)^-1)^0 * lexer.space^0) -local tag_end = token(lexer.TAG, P('/')^-1 * '>') -lex:add_rule('tag', tag_start * tag_attr^0 * tag_end) - --- Link -lex:add_rule('link', token(lexer.STRING, S('[]'))) -lex:add_rule('internal_link', B('[[') * token(lexer.LINK, (lexer.any - '|' - ']]')^1)) - --- Templates and parser functions. -lex:add_rule('template', token(lexer.OPERATOR, S('{}'))) -lex:add_rule('parser_func', - B('{{') * token(lexer.FUNCTION, '#' * lexer.alpha^1 + lexer.upper^1 * ':')) -lex:add_rule('template_name', B('{{') * token(lexer.LINK, (lexer.any - S('{}|'))^1)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('-=|#~!'))) - --- Behavior switches -local start_pat = P(function(_, pos) return pos == 1 end) -lex:add_rule('behavior_switch', (B(lexer.space) + start_pat) * token('behavior_switch', word_match( - '__TOC__ __FORCETOC__ __NOTOC__ __NOEDITSECTION__ __NOCC__ __NOINDEX__')) * #lexer.space) -lex:add_style('behavior_switch', lexer.styles.keyword) - -lexer.property['scintillua.comment'] = '<!--|-->' -lexer.property['scintillua.angle.braces'] = '1' - -return lex diff --git a/share/vis/lexers/meson.lua b/share/vis/lexers/meson.lua @@ -1,129 +0,0 @@ --- Copyright 2020-2024 Florian Fischer. See LICENSE. --- Meson file LPeg lexer. - -local lexer = lexer -local S = lpeg.S - -local lex = lexer.new(..., {fold_by_indentation = true}) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD))) - --- Functions. --- https://mesonbuild.com/Reference-manual.html#builtin-objects --- https://mesonbuild.com/Reference-manual.html#returned-objects -local method = lex:tag(lexer.FUNCTION_METHOD, lex:word_match(lexer.FUNCTION_METHOD)) --- https://mesonbuild.com/Reference-manual.html#functions -local func = lex:tag(lexer.FUNCTION_BUILTIN, lex:word_match(lexer.FUNCTION_BUILTIN)) --- A function call must be followed by an opening parenthesis. The matching of function calls --- instead of just their names is needed to not falsely highlight function names which can also --- be keyword arguments. For example 'include_directories' can be a function call itself or a --- keyword argument of an 'executable' or 'library' function call. -lex:add_rule('function', (method + func) * #(lexer.space^0 * '(')) - --- Builtin objects. --- https://mesonbuild.com/Reference-manual.html#builtin-objects -lex:add_rule('object', lex:tag(lexer.VARIABLE_BUILTIN, lex:word_match(lexer.VARIABLE_BUILTIN))) - --- Constants. -lex:add_rule('constant', lex:tag(lexer.CONSTANT_BUILTIN, lex:word_match(lexer.CONSTANT_BUILTIN))) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local str = lexer.range("'", true) -local multiline_str = lexer.range("'''") -lex:add_rule('string', lex:tag(lexer.STRING, multiline_str + str)) - --- Comments. -lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.to_eol('#', true))) - --- Numbers. -local oct_num = '0o' * lpeg.R('07') -local integer = S('+-')^-1 * (lexer.hex_num + lexer.bin_num + oct_num + lexer.dec_num) -lex:add_rule('number', lex:tag(lexer.NUMBER, integer)) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('()[]{}-=+/%:.,?<>'))) - --- Word lists -lex:set_word_list(lexer.KEYWORD, { - 'and', 'or', 'not', 'if', 'elif', 'else', 'endif', 'foreach', 'break', 'continue', 'endforeach' -}) - -lex:set_word_list(lexer.FUNCTION_METHOD, { - -- array -- - 'contains', 'get', 'length', - -- boolean -- - 'to_int', 'to_string', - -- dictionary -- - 'has_key', 'get', 'keys', - -- disabler -- - 'found', - -- integer -- - 'is_even', 'is_odd', - -- string -- - 'contains', 'endswith', 'format', 'join', 'split', 'startswith', 'substring', 'strip', 'to_int', - 'to_lower', 'to_upper', 'underscorify', 'version_compare', - -- meson object -- - 'add_dist_script', 'add_install_script', 'add_postconf_script', 'backend', 'build_root', - 'source_root', 'project_build_root', 'project_source_root', 'current_build_dir', - 'current_source_dir', 'get_compiler', 'get_cross_property', 'get_external_property', - 'can_run_host_binaries', 'has_exe_wrapper', 'install_dependency_manifest', 'is_cross_build', - 'is_subproject', 'is_unity', 'override_find_program', 'override_dependency', 'project_version', - 'project_license', 'project_name', 'version', - -- *_machine object -- - 'cpu_family', 'cpu', 'system', 'endian', - -- compiler object -- - 'alignment', 'cmd_array', 'compiles', 'compute_int', 'find_library', 'first_supported_argument', - 'first_supported_link_argument', 'get_define', 'get_id', 'get_argument_syntax', 'get_linker_id', - 'get_supported_arguments', 'get_supported_link_arguments', 'has_argument', 'has_link_argument', - 'has_function', 'check_header', 'has_header', 'has_header_symbol', 'has_member', 'has_members', - 'has_multi_arguments', 'has_multi_link_arguments', 'has_type', 'links', 'run', - 'symbols_have_underscore_prefix', 'sizeof', 'version', 'has_function_attribute', - 'get_supported_function_attributes', - -- build target object -- - 'extract_all_objects', 'extract_objects', 'full_path', 'private_dir_include', 'name', - -- configuration data object -- - 'get', 'get_unquoted', 'has', 'keys', 'merge_from', 'set', 'set10', 'set_quoted', - -- custom target object -- - 'full_path', 'to_list', - -- dependency object -- - 'found', 'name', 'get_pkgconfig_variable', 'get_configtool_variable', 'type_name', 'version', - 'include_type', 'as_system', 'as_link_whole', 'partial_dependency', 'found', - -- external program object -- - 'found', 'path', 'full_path', - -- environment object -- - 'append', 'prepend', 'set', - -- external library object -- - 'found', 'type_name', 'partial_dependency', 'enabled', 'disabled', 'auto', - -- generator object -- - 'process', - -- subproject object -- - 'found', 'get_variable', - -- run result object -- - 'compiled', 'returncode', 'stderr', 'stdout' -}) - -lex:set_word_list(lexer.FUNCTION_BUILTIN, { - 'add_global_arguments', 'add_global_link_arguments', 'add_languages', 'add_project_arguments', - 'add_project_link_arguments', 'add_test_setup', 'alias_targ', 'assert', 'benchmark', - 'both_libraries', 'build_target', 'configuration_data', 'configure_file', 'custom_target', - 'declare_dependency', 'dependency', 'disabler', 'error', 'environment', 'executable', - 'find_library', 'find_program', 'files', 'generator', 'get_option', 'get_variable', 'import', - 'include_directories', 'install_data', 'install_headers', 'install_man', 'install_subdir', - 'is_disabler', 'is_variable', 'jar', 'join_paths', 'library', 'message', 'warning', 'summary', - 'project', 'run_command', 'run_targ', 'set_variable', 'shared_library', 'shared_module', - 'static_library', 'subdir', 'subdir_done', 'subproject', 'test', 'vcs_tag' -}) - -lex:set_word_list(lexer.VARIABLE_BUILTIN, { - 'meson', 'build_machine', 'host_machine', 'target_machine' -}) - -lex:set_word_list(lexer.CONSTANT_BUILTIN, {'false', 'true'}) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/moonscript.lua b/share/vis/lexers/moonscript.lua @@ -1,144 +0,0 @@ --- Copyright 2016-2024 Alejandro Baez (https://keybase.io/baez). See LICENSE. --- Moonscript LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('moonscript', {fold_by_indentation = true}) - --- Whitespace. -lex:add_rule('whitspace', token(lexer.WHITESPACE, lexer.space^1)) - --- Table keys. -lex:add_rule('tbl_key', token('tbl_key', lexer.word * ':' + ':' * lexer.word)) -lex:add_style('tbl_key', lexer.styles.regex) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - -- Lua. - 'and', 'break', 'do', 'else', 'elseif', 'false', 'for', 'if', 'in', 'local', 'nil', 'not', 'or', - 'return', 'then', 'true', 'while', - -- Moonscript. - 'continue', 'class', 'export', 'extends', 'from', 'import', 'super', 'switch', 'unless', 'using', - 'when', 'with' -})) - --- Error words. -lex:add_rule('error', token(lexer.ERROR, word_match('function end'))) - --- Self reference. -lex:add_rule('self_ref', token('self_ref', '@' * lexer.word^-1 + 'self')) -lex:add_style('self_ref', lexer.styles.label) - --- Functions. -lex:add_rule('function', token(lexer.FUNCTION, word_match{ - 'assert', 'collectgarbage', 'dofile', 'error', 'getmetatable', 'ipairs', 'load', 'loadfile', - 'next', 'pairs', 'pcall', 'print', 'rawequal', 'rawget', 'rawset', 'require', 'select', - 'setmetatable', 'tonumber', 'tostring', 'type', 'xpcall', - -- Added in 5.2. - 'rawlen' -})) - --- Constants. -lex:add_rule('constant', token(lexer.CONSTANT, word_match{ - '_G', '_VERSION', - -- Added in 5.2. - '_ENV' -})) - --- Libraries. -lex:add_rule('library', token(lexer.FUNCTION_BUILTIN, word_match{ - -- Coroutine. - 'coroutine', 'coroutine.create', 'coroutine.resume', 'coroutine.running', 'coroutine.status', - 'coroutine.wrap', 'coroutine.yield', - -- Coroutine added in 5.3. - 'coroutine.isyieldable', - -- Module. - 'package', 'package.cpath', 'package.loaded', 'package.loadlib', 'package.path', - 'package.preload', - -- Module added in 5.2. - 'package.config', 'package.searchers', 'package.searchpath', - -- UTF-8 added in 5.3. - 'utf8', 'utf8.char', 'utf8.charpattern', 'utf8.codepoint', 'utf8.codes', 'utf8.len', - 'utf8.offset', - -- String. - 'string', 'string.byte', 'string.char', 'string.dump', 'string.find', 'string.format', - 'string.gmatch', 'string.gsub', 'string.len', 'string.lower', 'string.match', 'string.rep', - 'string.reverse', 'string.sub', 'string.upper', - -- String added in 5.3. - 'string.pack', 'string.packsize', 'string.unpack', - -- Table. - 'table', 'table.concat', 'table.insert', 'table.remove', 'table.sort', - -- Table added in 5.2. - 'table.pack', 'table.unpack', - -- Table added in 5.3. - 'table.move', - -- Math. - 'math', 'math.abs', 'math.acos', 'math.asin', 'math.atan', 'math.ceil', 'math.cos', 'math.deg', - 'math.exp', 'math.floor', 'math.fmod', 'math.huge', 'math.log', 'math.max', 'math.min', - 'math.modf', 'math.pi', 'math.rad', 'math.random', 'math.randomseed', 'math.sin', 'math.sqrt', - 'math.tan', - -- Math added in 5.3. - 'math.maxinteger', 'math.mininteger', 'math.tointeger', 'math.type', 'math.ult', - -- IO. - 'io', 'io.close', 'io.flush', 'io.input', 'io.lines', 'io.open', 'io.output', 'io.popen', - 'io.read', 'io.stderr', 'io.stdin', 'io.stdout', 'io.tmpfile', 'io.type', 'io.write', - -- OS. - 'os', 'os.clock', 'os.date', 'os.difftime', 'os.execute', 'os.exit', 'os.getenv', 'os.remove', - 'os.rename', 'os.setlocale', 'os.time', 'os.tmpname', - -- Debug. - 'debug', 'debug.debug', 'debug.gethook', 'debug.getinfo', 'debug.getlocal', 'debug.getmetatable', - 'debug.getregistry', 'debug.getupvalue', 'debug.sethook', 'debug.setlocal', 'debug.setmetatable', - 'debug.setupvalue', 'debug.traceback', - -- Debug added in 5.2. - 'debug.getuservalue', 'debug.setuservalue', 'debug.upvalueid', 'debug.upvaluejoin', - - -- MoonScript 0.3.1 standard library. - -- Printing functions. - 'p', - -- Table functions. - 'run_with_scope', 'defaultbl', 'extend', 'copy', - -- Class/object functions. - 'is_object', 'bind_methods', 'mixin', 'mixin_object', 'mixin_table', - -- Misc functions. - 'fold', - -- Debug functions. - 'debug.upvalue' -})) - --- Identifiers. -local identifier = token(lexer.IDENTIFIER, lexer.word) -local proper_ident = token('proper_ident', lexer.upper * lexer.word) -lex:add_rule('identifier', proper_ident + identifier) -lex:add_style('proper_ident', lexer.styles.class) - --- Strings. -local sq_str = lexer.range("'", false, false) -local dq_str = lexer.range('"', false, false) -local longstring = lpeg.Cmt('[' * lpeg.C(P('=')^0) * '[', function(input, index, eq) - local _, e = input:find(']' .. eq .. ']', index, true) - return (e or #input) + 1 -end) -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str) + token(lexer.STRING, longstring)) - --- Comments. -local line_comment = lexer.to_eol('--') -local block_comment = '--' * longstring -lex:add_rule('comment', token(lexer.COMMENT, block_comment + line_comment)) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Function definition. -lex:add_rule('fndef', token('fndef', P('->') + '=>')) -lex:add_style('fndef', lexer.styles.preprocessor) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('+-*!\\/%^#=<>;:,.'))) -lex:add_rule('symbol', token('symbol', S('(){}[]'))) -lex:add_style('symbol', lexer.styles.embedded) - -lexer.property['scintillua.comment'] = '--' - -return lex diff --git a/share/vis/lexers/myrddin.lua b/share/vis/lexers/myrddin.lua @@ -1,54 +0,0 @@ --- Copyright 2017-2024 Michael Forney. See LICENSE --- Myrddin LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('myrddin') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'break', 'const', 'continue', 'elif', 'else', 'extern', 'false', 'for', 'generic', 'goto', 'if', - 'impl', 'in', 'match', 'pkg', 'pkglocal', 'sizeof', 'struct', 'trait', 'true', 'type', 'union', - 'use', 'var', 'while' -})) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match{ - 'void', 'bool', 'char', 'byte', 'int', 'uint', 'int8', 'uint8', 'int16', 'uint16', 'int32', - 'uint32', 'int64', 'uint64', 'flt32', 'flt64' -} + '@' * lexer.word)) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Comments. -local line_comment = lexer.to_eol('//', true) -local block_comment = lexer.range('/*', '*/', false, false, true) -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"', true) -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) - --- Numbers. -local digit = lexer.digit + '_' -local bdigit = S('01') + '_' -local xdigit = lexer.xdigit + '_' -local odigit = lpeg.R('07') + '_' -local integer = '0x' * xdigit^1 + '0o' * odigit^1 + '0b' * bdigit^1 + digit^1 -local float = digit^1 * ((('.' * digit^1) * (S('eE') * S('+-')^-1 * digit^1)^-1) + - (('.' * digit^1)^-1 * S('eE') * S('+-')^-1 * digit^1)) -lex:add_rule('number', token(lexer.NUMBER, float + integer)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('`#_+-/*%<>~!=^&|~:;,.()[]{}'))) - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/nemerle.lua b/share/vis/lexers/nemerle.lua @@ -1,66 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Nemerle LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('nemerle') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - '_', 'abstract', 'and', 'array', 'as', 'base', 'catch', 'class', 'def', 'do', 'else', 'extends', - 'extern', 'finally', 'foreach', 'for', 'fun', 'if', 'implements', 'in', 'interface', 'internal', - 'lock', 'macro', 'match', 'module', 'mutable', 'namespace', 'new', 'out', 'override', 'params', - 'private', 'protected', 'public', 'ref', 'repeat', 'sealed', 'static', 'struct', 'syntax', 'this', - 'throw', 'try', 'type', 'typeof', 'unless', 'until', 'using', 'variant', 'virtual', 'when', - 'where', 'while', - -- Values. - 'null', 'true', 'false' -})) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match{ - 'bool', 'byte', 'char', 'decimal', 'double', 'float', 'int', 'list', 'long', 'object', 'sbyte', - 'short', 'string', 'uint', 'ulong', 'ushort', 'void' -})) - --- Strings. -local sq_str = P('L')^-1 * lexer.range("'", true) -local dq_str = P('L')^-1 * lexer.range('"', true) -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Comments. -local line_comment = lexer.to_eol('//', true) -local block_comment = lexer.range('/*', '*/') -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Preprocessor. -lex:add_rule('preproc', token(lexer.PREPROCESSOR, lexer.starts_line('#') * S('\t ')^0 * word_match{ - 'define', 'elif', 'else', 'endif', 'endregion', 'error', 'if', 'ifdef', 'ifndef', 'line', - 'pragma', 'region', 'undef', 'using', 'warning' -})) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;.()[]{}'))) - --- Fold points. -lex:add_fold_point(lexer.PREPROCESSOR, 'region', 'endregion') -lex:add_fold_point(lexer.PREPROCESSOR, 'if', 'endif') -lex:add_fold_point(lexer.PREPROCESSOR, 'ifdef', 'endif') -lex:add_fold_point(lexer.PREPROCESSOR, 'ifndef', 'endif') -lex:add_fold_point(lexer.OPERATOR, '{', '}') -lex:add_fold_point(lexer.COMMENT, '/*', '*/') - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/networkd.lua b/share/vis/lexers/networkd.lua @@ -1,101 +0,0 @@ --- Copyright 2016-2024 Christian Hesse. See LICENSE. --- systemd networkd file LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('networkd', {lex_by_line = true}) - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - -- Boolean values. - 'true', 'false', 'on', 'off', 'yes', 'no' -})) - --- Options. -lex:add_rule('option', token(lexer.PREPROCESSOR, word_match{ - -- Match section. - 'MACAddress', 'OriginalName', 'Path', 'Driver', 'Type', 'Host', 'Name', 'Virtualization', - 'KernelCommandLine', 'Architecture', - -- Link section. - 'Description', 'Alias', 'MACAddressPolicy', 'MACAddress', 'NamePolicy', 'Name', 'MTUBytes', - 'BitsPerSecond', 'Duplex', 'WakeOnLan', - -- Network section. - 'Description', 'DHCP', 'DHCPServer', 'LinkLocalAddressing', 'IPv4LLRoute', 'IPv6Token', 'LLMNR', - 'MulticastDNS', 'DNSSEC', 'DNSSECNegativeTrustAnchors', 'LLDP', 'BindCarrier', 'Address', - 'Gateway', 'DNS', 'Domains', 'NTP', 'IPForward', 'IPMasquerade', 'IPv6PrivacyExtensions', - 'IPv6AcceptRouterAdvertisements', 'IPv6DuplicateAddressDetection', 'IPv6HopLimit', 'Bridge', - 'Bond', 'VLAN', 'MACVLAN', 'VXLAN', 'Tunnel', - -- Address section. - 'Address', 'Peer', 'Broadcast', 'Label', - -- Route section. - 'Gateway', 'Destination', 'Source', 'Metric', 'Scope', 'PreferredSource', - -- DHCP section. - 'UseDNS', 'UseNTP', 'UseMTU', 'SendHostname', 'UseHostname', 'Hostname', 'UseDomains', - 'UseRoutes', 'UseTimezone', 'CriticalConnection', 'ClientIdentifier', 'VendorClassIdentifier', - 'RequestBroadcast', 'RouteMetric', - -- DHCPServer section. - 'PoolOffset', 'PoolSize', 'DefaultLeaseTimeSec', 'MaxLeaseTimeSec', 'EmitDNS', 'DNS', 'EmitNTP', - 'NTP', 'EmitTimezone', 'Timezone', - -- Bridge section. - 'UnicastFlood', 'HairPin', 'UseBPDU', 'FastLeave', 'AllowPortToBeRoot', 'Cost', - -- BridgeFDP section. - 'MACAddress', 'VLANId', - -- NetDev section. - 'Description', 'Name', 'Kind', 'MTUBytes', 'MACAddress', - -- Bridge (netdev) section. - 'HelloTimeSec', 'MaxAgeSec', 'ForwardDelaySec', - -- VLAN section. - 'Id', - -- MACVLAN MACVTAP and IPVLAN section. - 'Mode', - -- VXLAN section. - 'Id', 'Group', 'TOS', 'TTL', 'MacLearning', 'FDBAgeingSec', 'MaximumFDBEntries', 'ARPProxy', - 'L2MissNotification', 'L3MissNotification', 'RouteShortCircuit', 'UDPCheckSum', - 'UDP6ZeroChecksumTx', 'UDP6ZeroCheckSumRx', 'GroupPolicyExtension', 'DestinationPort', - 'PortRange', - -- Tunnel section. - 'Local', 'Remote', 'TOS', 'TTL', 'DiscoverPathMTU', 'IPv6FlowLabel', 'CopyDSCP', - 'EncapsulationLimit', 'Mode', - -- Peer section. - 'Name', 'MACAddress', - -- Tun and Tap section. - 'OneQueue', 'MultiQueue', 'PacketInfo', 'VNetHeader', 'User', 'Group', - -- Bond section. - 'Mode', 'TransmitHashPolicy', 'LACPTransmitRate', 'MIIMonitorSec', 'UpDelaySec', 'DownDelaySec', - 'LearnPacketIntervalSec', 'AdSelect', 'FailOverMACPolicy', 'ARPValidate', 'ARPIntervalSec', - 'ARPIPTargets', 'ARPAllTargets', 'PrimaryReselectPolicy', 'ResendIGMP', 'PacketsPerSlave', - 'GratuitousARP', 'AllSlavesActive', 'MinLinks' -})) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, (lexer.alpha + '_') * (lexer.alnum + S('_.'))^0)) - --- Strings. -local sq_str = lexer.range("'") -local dq_str = lexer.range('"') -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) - --- Sections. -lex:add_rule('section', token(lexer.LABEL, '[' * word_match{ - 'Address', 'Link', 'Match', 'Network', 'Route', 'DHCP', 'DHCPServer', 'Bridge', 'BridgeFDB', - 'NetDev', 'VLAN', 'MACVLAN', 'MACVTAP', 'IPVLAN', 'VXLAN', 'Tunnel', 'Peer', 'Tun', 'Tap', 'Bond' -} * ']')) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.starts_line(lexer.to_eol(S(';#'))))) - --- Numbers. -local integer = S('+-')^-1 * (lexer.hex_num + lexer.oct_num_('_') + lexer.dec_num_('_')) -lex:add_rule('number', token(lexer.NUMBER, lexer.float + integer)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, '=')) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/nim.lua b/share/vis/lexers/nim.lua @@ -1,97 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Nim LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('nim', {fold_by_indentation = true}) - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match({ - 'addr', 'and', 'as', 'asm', 'atomic', 'bind', 'block', 'break', 'case', 'cast', 'const', - 'continue', 'converter', 'discard', 'distinct', 'div', 'do', 'elif', 'else', 'end', 'enum', - 'except', 'export', 'finally', 'for', 'from', 'generic', 'if', 'import', 'in', 'include', - 'interface', 'is', 'isnot', 'iterator', 'lambda', 'let', 'macro', 'method', 'mixin', 'mod', 'nil', - 'not', 'notin', 'object', 'of', 'or', 'out', 'proc', 'ptr', 'raise', 'ref', 'return', 'shared', - 'shl', 'static', 'template', 'try', 'tuple', 'type', 'var', 'when', 'while', 'with', 'without', - 'xor', 'yield' -}, true))) - --- Functions. -lex:add_rule('function', token(lexer.FUNCTION, word_match({ - -- Procs. - 'defined', 'definedInScope', 'new', 'unsafeNew', 'internalNew', 'reset', 'high', 'low', 'sizeof', - 'succ', 'pred', 'inc', 'dec', 'newSeq', 'len', 'incl', 'excl', 'card', 'ord', 'chr', 'ze', 'ze64', - 'toU8', 'toU16', 'toU32', 'abs', 'min', 'max', 'contains', 'cmp', 'setLen', 'newString', - 'newStringOfCap', 'add', 'compileOption', 'quit', 'shallowCopy', 'del', 'delete', 'insert', - 'repr', 'toFloat', 'toBiggestFloat', 'toInt', 'toBiggestInt', 'addQuitProc', 'substr', 'zeroMem', - 'copyMem', 'moveMem', 'equalMem', 'swap', 'getRefcount', 'clamp', 'isNil', 'find', 'contains', - 'pop', 'each', 'map', 'GC_ref', 'GC_unref', 'echo', 'debugEcho', 'getTypeInfo', 'Open', 'repopen', - 'Close', 'EndOfFile', 'readChar', 'FlushFile', 'readAll', 'readFile', 'writeFile', 'write', - 'readLine', 'writeln', 'getFileSize', 'ReadBytes', 'ReadChars', 'readBuffer', 'writeBytes', - 'writeChars', 'writeBuffer', 'setFilePos', 'getFilePos', 'fileHandle', 'cstringArrayToSeq', - 'allocCStringArray', 'deallocCStringArray', 'atomicInc', 'atomicDec', 'compareAndSwap', - 'setControlCHook', 'writeStackTrace', 'getStackTrace', 'alloc', 'alloc0', 'dealloc', 'realloc', - 'getFreeMem', 'getTotalMem', 'getOccupiedMem', 'allocShared', 'allocShared0', 'deallocShared', - 'reallocShared', 'IsOnStack', 'GC_addCycleRoot', 'GC_disable', 'GC_enable', 'GC_setStrategy', - 'GC_enableMarkAndSweep', 'GC_disableMarkAndSweep', 'GC_fullCollect', 'GC_getStatistics', - 'nimDestroyRange', 'getCurrentException', 'getCurrentExceptionMsg', 'onRaise', 'likely', - 'unlikely', 'rawProc', 'rawEnv', 'finished', 'slurp', 'staticRead', 'gorge', 'staticExec', 'rand', - 'astToStr', 'InstatiationInfo', 'raiseAssert', 'shallow', 'compiles', 'safeAdd', 'locals', - -- Iterators. - 'countdown', 'countup', 'items', 'pairs', 'fields', 'fieldPairs', 'lines', - -- Templates. - 'accumulateResult', 'newException', 'CurrentSourcePath', 'assert', 'doAssert', 'onFailedAssert', - 'eval', - -- Threads. - 'running', 'joinThread', 'joinThreads', 'createThread', 'threadId', 'myThreadId', - -- Channels. - 'send', 'recv', 'peek', 'ready' -}, true))) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match({ - 'int', 'int8', 'int16', 'int32', 'int64', 'uint', 'uint8', 'uint16', 'uint32', 'uint64', 'float', - 'float32', 'float64', 'bool', 'char', 'string', 'cstring', 'pointer', 'Ordinal', 'auto', 'any', - 'TSignedInt', 'TUnsignedInt', 'TInteger', 'TOrdinal', 'TReal', 'TNumber', 'range', 'array', - 'openarray', 'varargs', 'seq', 'set', 'TSlice', 'TThread', 'TChannel', - -- Meta Types. - 'expr', 'stmt', 'typeDesc', 'void' -}, true))) - --- Constants. -lex:add_rule('constant', token(lexer.CONSTANT, word_match{ - 'on', 'off', 'isMainModule', 'CompileDate', 'CompileTime', 'NimVersion', 'NimMajor', 'NimMinor', - 'NimPatch', 'cpuEndian', 'hostOS', 'hostCPU', 'appType', 'QuitSuccess', 'QuitFailure', 'inf', - 'neginf', 'nan' -})) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"', true) -local tq_str = lexer.range('"""') -local raw_str = 'r' * lexer.range('"', false, false) -lex:add_rule('string', token(lexer.STRING, tq_str + sq_str + dq_str + raw_str)) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Comments. -local line_comment = lexer.to_eol('#', true) -local block_comment = lexer.range('#[', ']#') -lex:add_rule('comment', token(lexer.COMMENT, block_comment + line_comment)) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.float_('_') + lexer.integer_('_') * - ("'" * S('iIuUfF') * (P('8') + '16' + '32' + '64'))^-1)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('=+-*/<>@$~&%|!?^.:\\`()[]{},;'))) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/nsis.lua b/share/vis/lexers/nsis.lua @@ -1,150 +0,0 @@ --- Copyright 2006-2024 Robert Gieseke. See LICENSE. --- NSIS LPeg lexer --- Based on NSIS 2.46 docs: http://nsis.sourceforge.net/Docs/. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('nsis') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Comments (4.1). -local line_comment = lexer.to_eol(S(';#')) -local block_comment = lexer.range('/*', '*/') -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Strings. -local sq_str = lexer.range("'") -local dq_str = lexer.range('"') -local bq_str = lexer.range('`') -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str + bq_str)) - --- Constants (4.2.3). -lex:add_rule('constant', token(lexer.CONSTANT, word_match{ - '$PROGRAMFILES', '$PROGRAMFILES32', '$PROGRAMFILES64', '$COMMONFILES', '$COMMONFILES32', - '$COMMONFILES64', '$DESKTOP', '$EXEDIR', '$EXEFILE', '$EXEPATH', '${NSISDIR}', '$WINDIR', - '$SYSDIR', '$TEMP', '$STARTMENU', '$SMPROGRAMS', '$SMSTARTUP', '$QUICKLAUNCH$DOCUMENTS', - '$SENDTO', '$RECENT', '$FAVORITES', '$MUSIC', '$PICTURES', '$VIDEOS', '$NETHOOD', '$FONTS', - '$TEMPLATES', '$APPDATA', '$LOCALAPPDATA', '$PRINTHOOD', '$INTERNET_CACHE', '$COOKIES', - '$HISTORY', '$PROFILE', '$ADMINTOOLS', '$RESOURCES', '$RESOURCES_LOCALIZED', '$CDBURN_AREA', - '$HWNDPARENT', '$PLUGINSDIR' -})) --- TODO? Constants used in strings: $$ $\r $\n $\t - --- Variables (4.2). -lex:add_rule('variable', token(lexer.VARIABLE, word_match{ - '$0', '$1', '$2', '$3', '$4', '$5', '$6', '$7', '$8', '$9', '$R0', '$R1', '$R2', '$R3', '$R4', - '$R5', '$R6', '$R7', '$R8', '$R9', '$INSTDIR', '$OUTDIR', '$CMDLINE', '$LANGUAGE', 'Var', - '/GLOBAL' -} + '$' * lexer.word)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - -- Pages (4.5). - 'Page', 'UninstPage', 'PageEx', 'PageEnd', 'PageExEnd', - -- Section commands (4.6). - 'AddSize', 'Section', 'SectionEnd', 'SectionIn', 'SectionGroup', 'SectionGroupEnd', - -- Functions (4.7). - 'Function', 'FunctionEnd', - -- Callbacks (4.7.2). - '.onGUIInit', '.onInit', '.onInstFailed', '.onInstSuccess', '.onGUIEnd', '.onMouseOverSection', - '.onRebootFailed', '.onSelChange', '.onUserAbort', '.onVerifyInstDir', 'un.onGUIInit', - 'un.onInit', 'un.onUninstFailed', 'un.onUninstSuccess', 'un.onGUIEnd', 'un.onRebootFailed', - 'un.onSelChange', 'un.onUserAbort', - -- General Attributes (4.8.1). - 'AddBrandingImage', 'AllowRootDirInstall', 'AutoCloseWindow', 'BGFont', 'BGFont', 'BrandingText', - '/TRIMLEFT', '/TRIMRIGHT', '/TRIMCENTER', 'Caption', 'ChangeUI', 'CheckBitmap', 'CompletedText', - 'ComponentText', 'CRCCheck', 'DetailsButtonText', 'DirText', 'DirVar', 'DirVerify', - 'FileErrorText', 'Icon', 'InstallButtonText', 'InstallColors', 'InstallDir', 'InstallDirRegKey', - 'InstProgressFlags', 'InstType', 'LicenseBkColor', 'LicenseData', 'LicenseForceSelection', - 'LicenseText', 'MiscButtonText', 'Name', 'OutFile', 'RequestExecutionLevel', 'SetFont', - 'ShowInstDetails', 'ShowUninstDetails', 'SilentInstall', 'SilentUnInstall', 'SpaceTexts', - 'SubCaption', 'UninstallButtonText', 'UninstallCaption', 'UninstallIcon', 'UninstallSubCaption', - 'UninstallText', 'WindowIcon', 'XPStyle', 'admin', 'auto', 'bottom', 'checkbox', 'false', 'force', - 'height', 'hide', 'highest', 'leave', 'left', 'nevershow', 'none', 'normal', 'off', 'on', - 'radiobuttons', 'right', 'show', 'silent', 'silentlog', 'top', 'true', 'user', 'width', - -- Compiler Flags (4.8.2). - 'AllowSkipFiles', 'FileBufSize', 'SetCompress', 'SetCompressor', '/SOLID', '/FINAL', 'zlib', - 'bzip2', 'lzma', 'SetCompressorDictSize', 'SetDatablockOptimize', 'SetDateSave', 'SetOverwrite', - 'ifnewer', 'ifdiff', 'lastused', 'try', - -- Version Information (4.8.3). - 'VIAddVersionKey', 'VIProductVersion', '/LANG', 'ProductName', 'Comments', 'CompanyName', - 'LegalCopyright', 'FileDescription', 'FileVersion', 'ProductVersion', 'InternalName', - 'LegalTrademarks', 'OriginalFilename', 'PrivateBuild', 'SpecialBuild', - -- Basic Instructions (4.9.1). - 'Delete', '/REBOOTOK', 'Exec', 'ExecShell', 'ExecShell', 'File', '/nonfatal', 'Rename', - 'ReserveFile', 'RMDir', 'SetOutPath', - -- Registry INI File Instructions (4.9.2). - 'DeleteINISec', 'DeleteINIStr', 'DeleteRegKey', '/ifempty', 'DeleteRegValue', 'EnumRegKey', - 'EnumRegValue', 'ExpandEnvStrings', 'FlushINI', 'ReadEnvStr', 'ReadINIStr', 'ReadRegDWORD', - 'ReadRegStr', 'WriteINIStr', 'WriteRegBin', 'WriteRegDWORD', 'WriteRegStr', 'WriteRegExpandStr', - 'HKCR', 'HKEY_CLASSES_ROOT', 'HKLM', 'HKEY_LOCAL_MACHINE', 'HKCU', 'HKEY_CURRENT_USER', 'HKU', - 'HKEY_USERS', 'HKCC', 'HKEY_CURRENT_CONFIG', 'HKDD', 'HKEY_DYN_DATA', 'HKPD', - 'HKEY_PERFORMANCE_DATA', 'SHCTX', 'SHELL_CONTEXT', - -- General Purpose Instructions (4.9.3). - 'CallInstDLL', 'CopyFiles', '/SILENT', '/FILESONLY', 'CreateDirectory', 'CreateShortCut', - 'GetDLLVersion', 'GetDLLVersionLocal', 'GetFileTime', 'GetFileTimeLocal', 'GetFullPathName', - '/SHORT', 'GetTempFileName', 'SearchPath', 'SetFileAttributes', 'RegDLL', 'UnRegDLL', - -- Flow Control Instructions (4.9.4). - 'Abort', 'Call', 'ClearErrors', 'GetCurrentAddress', 'GetFunctionAddress', 'GetLabelAddress', - 'Goto', 'IfAbort', 'IfErrors', 'IfFileExists', 'IfRebootFlag', 'IfSilent', 'IntCmp', 'IntCmpU', - 'MessageBox', 'MB_OK', 'MB_OKCANCEL', 'MB_ABORTRETRYIGNORE', 'MB_RETRYCANCEL', 'MB_YESNO', - 'MB_YESNOCANCEL', 'MB_ICONEXCLAMATION', 'MB_ICONINFORMATION', 'MB_ICONQUESTION', 'MB_ICONSTOP', - 'MB_USERICON', 'MB_TOPMOST', 'MB_SETFOREGROUND', 'MB_RIGHT', 'MB_RTLREADING', 'MB_DEFBUTTON1', - 'MB_DEFBUTTON2', 'MB_DEFBUTTON3', 'MB_DEFBUTTON4', 'IDABORT', 'IDCANCEL', 'IDIGNORE', 'IDNO', - 'IDOK', 'IDRETRY', 'IDYES', 'Return', 'Quit', 'SetErrors', 'StrCmp', 'StrCmpS', - -- File Instructions (4.9.5). - 'FileClose', 'FileOpen', 'FileRead', 'FileReadByte', 'FileSeek', 'FileWrite', 'FileWriteByte', - 'FindClose', 'FindFirst', 'FindNext', - -- Uninstaller Instructions (4.9.6). - 'WriteUninstaller', - -- Miscellaneous Instructions (4.9.7). - 'GetErrorLevel', 'GetInstDirError', 'InitPluginsDir', 'Nop', 'SetErrorLevel', 'SetRegView', - 'SetShellVarContext', 'all', 'current', 'Sleep', - -- String Manipulation Instructions (4.9.8). - 'StrCpy', 'StrLen', - -- Stack Support (4.9.9). - 'Exch', 'Pop', 'Push', - -- Integer Support (4.9.10). - 'IntFmt', 'IntOp', - -- Reboot Instructions (4.9.11). - 'Reboot', 'SetRebootFlag', - -- Install Logging Instructions (4.9.12). - 'LogSet', 'LogText', - -- Section Management (4.9.13). - 'SectionSetFlags', 'SectionGetFlags', 'SectionGetFlags', 'SectionSetText', 'SectionGetText', - 'SectionSetInstTypes', 'SectionGetInstTypes', 'SectionSetSize', 'SectionGetSize', - 'SetCurInstType', 'GetCurInstType', 'InstTypeSetText', 'InstTypeGetText', - -- User Interface Instructions (4.9.14). - 'BringToFront', 'CreateFont', 'DetailPrint', 'EnableWindow', 'FindWindow', 'GetDlgItem', - 'HideWindow', 'IsWindow', 'LockWindow', 'SendMessage', 'SetAutoClose', 'SetBrandingImage', - 'SetDetailsView', 'SetDetailsPrint', 'listonlytextonly', 'both', 'SetCtlColors', '/BRANDING', - 'SetSilent', 'ShowWindow', - -- Multiple Languages Instructions (4.9.15). - 'LoadLanguageFile', 'LangString', 'LicenseLangString', - -- Compile time commands (5). - '!include', '!addincludedir', '!addplugindir', '!appendfile', '!cd', '!delfile', '!echo', - '!error', '!execute', '!packhdr', '!system', '!tempfile', '!warning', '!verbose', '{__FILE__}', - '{__LINE__}', '{__DATE__}', '{__TIME__}', '{__TIMESTAMP__}', '{NSIS_VERSION}', '!define', - '!undef', '!ifdef', '!ifndef', '!if', '!ifmacrodef', '!ifmacrondef', '!else', '!endif', - '!insertmacro', '!macro', '!macroend', '!searchparse', '!searchreplace' -})) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.integer)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('+-*/%|&^~!<>'))) - --- Labels (4.3). -lex:add_rule('label', token(lexer.LABEL, lexer.word * ':')) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/null.lua b/share/vis/lexers/null.lua @@ -1,4 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Null LPeg lexer. - -return require('lexer').new('null') diff --git a/share/vis/lexers/objeck.lua b/share/vis/lexers/objeck.lua @@ -1,59 +0,0 @@ --- Copyright 2023-2024 Mitchell. See LICENSE. --- Objeck LPeg lexer. - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(...) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD))) - --- Types. -lex:add_rule('type', lex:tag(lexer.TYPE, lex:word_match(lexer.TYPE))) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) - --- Class variables. -lex:add_rule('variable', lex:tag(lexer.VARIABLE, '@' * lexer.word)) - --- Comments. -local line_comment = lexer.to_eol('#', true) -local block_comment = lexer.range('#~', '~#') -lex:add_rule('comment', lex:tag(lexer.COMMENT, block_comment + line_comment)) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"', true) -local ml_str = lexer.range('"', false, false) -lex:add_rule('string', lex:tag(lexer.STRING, sq_str + ml_str + dq_str)) - --- Numbers. -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number)) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('~!.,:;+-*/<>=\\^|&%?()[]{}'))) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '{', '}') -lex:add_fold_point(lexer.COMMENT, '#~', '~#') - --- Word lists. -lex:set_word_list(lexer.KEYWORD, { - 'class', 'method', 'function', 'public', 'abstract', 'private', 'static', 'native', 'virtual', - 'Parent', 'As', 'from', 'implements', 'interface', 'enum', 'alias', 'consts', 'bundle', 'use', - 'leaving', 'if', 'else', 'do', 'while', 'select', 'break', 'continue', 'other', 'for', 'each', - 'reverse', 'label', 'return', 'critical', 'New', 'and', 'or', 'xor', 'true', 'false' -- , 'Nil' -}) - -lex:set_word_list(lexer.TYPE, { - 'Nil', 'Byte', 'ByteHolder', 'Int', 'IntHolder', 'Float', 'FloatHolder', 'Char', 'CharHolder', - 'Bool', 'BoolHolder', 'String', 'BaseArrayHolder', 'BoolArrayHolder', 'ByteArrayHolder', - 'CharArrayHolder', 'FloatArrayHolder', 'IntArrayHolder', 'StringArrayHolder', 'Func2Holder', - 'Func3Holder', 'Func4Holder', 'FuncHolder' -}) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/objective_c.lua b/share/vis/lexers/objective_c.lua @@ -1,69 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Objective C LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('objective_c') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - -- From C. - 'asm', 'auto', 'break', 'case', 'const', 'continue', 'default', 'do', 'else', 'extern', 'false', - 'for', 'goto', 'if', 'inline', 'register', 'return', 'sizeof', 'static', 'switch', 'true', - 'typedef', 'void', 'volatile', 'while', 'restrict', '_Bool', '_Complex', '_Pragma', '_Imaginary', - -- Objective C. - 'oneway', 'in', 'out', 'inout', 'bycopy', 'byref', 'self', 'super', - -- Preprocessor directives. - '@interface', '@implementation', '@protocol', '@end', '@private', '@protected', '@public', - '@class', '@selector', '@encode', '@defs', '@synchronized', '@try', '@throw', '@catch', - '@finally', - -- Constants. - 'TRUE', 'FALSE', 'YES', 'NO', 'NULL', 'nil', 'Nil', 'METHOD_NULL' -})) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match( - 'apply_t id Class MetaClass Object Protocol retval_t SEL STR IMP BOOL TypedStream'))) - --- Strings. -local sq_str = P('L')^-1 * lexer.range("'", true) -local dq_str = P('L')^-1 * lexer.range('"', true) -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Comments. -local line_comment = lexer.to_eol('//', true) -local block_comment = lexer.range('/*', '*/') -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Preprocessor. -lex:add_rule('preprocessor', - #lexer.starts_line('#') * token(lexer.PREPROCESSOR, '#' * S('\t ')^0 * word_match{ - 'define', 'elif', 'else', 'endif', 'error', 'if', 'ifdef', 'ifndef', 'import', 'include', - 'line', 'pragma', 'undef', 'warning' - })) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;.()[]{}'))) - --- Fold symbols. -lex:add_fold_point(lexer.PREPROCESSOR, 'region', 'endregion') -lex:add_fold_point(lexer.PREPROCESSOR, 'if', 'endif') -lex:add_fold_point(lexer.PREPROCESSOR, 'ifdef', 'endif') -lex:add_fold_point(lexer.PREPROCESSOR, 'ifndef', 'endif') -lex:add_fold_point(lexer.OPERATOR, '{', '}') -lex:add_fold_point(lexer.COMMENT, '/*', '*/') - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/output.lua b/share/vis/lexers/output.lua @@ -1,97 +0,0 @@ --- Copyright 2022-2024 Mitchell. See LICENSE. --- LPeg lexer for tool output. --- If a warning or error is recognized, tags its filename, line, column (if available), --- and message, and sets the line state to 1 for an error (first bit), and 2 for a warning --- (second bit). --- This is similar to Lexilla's errorlist lexer. - -local lexer = lexer -local starts_line = lexer.starts_line -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(..., {lex_by_line = true}) - --- Tags a pattern as plain text. -local function text(patt) return lex:tag(lexer.DEFAULT, patt) end - --- Tags a pattern as a filename. -local function filename(patt) return lex:tag('filename', patt) end - --- Typical line and column number patterns. -local line = text('line ')^-1 * lex:tag('line', lexer.dec_num) -local column = lex:tag('column', lexer.dec_num) - --- Tags a pattern as an error/warning/etc. message. -local function message(patt) return lex:tag('message', patt) end - --- Immediately marks the current line as an error. --- This should only be specified at the end of a rule, or else LPeg may backtrack and mistakenly --- mark a non-error line. -local function mark_error(_, pos) - lexer.line_state[lexer.line_from_position(pos)] = 1 - return true -end - --- Immediately marks the current line as a warning. --- This should only be specified at the end of a rule, or else LPeg may backtrack and mistakenly --- mark a non-warning line. -local function mark_warning(_, pos) - lexer.line_state[lexer.line_from_position(pos)] = 2 - return true -end - --- filename:line: message (ruby) --- filename:line:col: message (c, cpp, go, ...) --- filename: line X: message (bash) -local c_filename = filename((lexer.nonnewline - ':')^1) -local colon = text(':' * P(' ')^-1) -local warning = message(lexer.to_eol('warning: ')) * mark_warning -local note = message(lexer.to_eol('note: ')) -- do not mark -local error = message(lexer.to_eol()) * mark_error -lex:add_rule('common', starts_line(c_filename) * colon * line * colon * (column * colon)^-1 * - (warning + note + error)) - --- prog: filename:line: message (awk, lua) -lex:add_rule('prog', starts_line(text(lexer.word)) * colon * c_filename * colon * line * colon * - (warning + error)) - --- File "filename", line X (python) -local py_filename = filename((lexer.nonnewline - '"')^1) -lex:add_rule('python', - starts_line(text('File "'), true) * py_filename * text('", ') * line * mark_error) - --- filename(line): error: message (d, cuda) -local lparen, rparen = text('('), text(')') -local d_filename = filename((lexer.nonnewline - '(')^1) -local d_error = message(lexer.to_eol(S('Ee') * 'rror')) * mark_error -lex:add_rule('dmd', starts_line(d_filename) * lparen * line * rparen * colon * d_error) - --- "filename" line X: message (gnuplot) -local gp_filename = filename((lexer.nonnewline - '"')^1) -lex:add_rule('gnuplot', starts_line(text('"')) * gp_filename * text('" ') * line * colon * error) - --- at com.path(filename:line) (java) -lex:add_rule('java', - starts_line(text('at ' * (lexer.nonnewline - '(')^1), true) * lparen * c_filename * colon * line * - rparen * mark_error) - --- message in filename on line X (php) -lex:add_rule('php', starts_line(message((lexer.nonnewline - ' in ')^1)) * text(' in ') * - filename((lexer.nonnewline - ' on ')^1) * text(' on ') * line * mark_error) - --- filename(line, col): message (vb, csharp, fsharp, ...) -lex:add_rule('vb', - starts_line(filename((lexer.nonnewline - '(')^1)) * lparen * line * text(', ') * column * rparen * - colon * error) - --- message at filename line X (perl) -lex:add_rule('perl', starts_line(message((lexer.nonnewline - ' at ')^1)) * text(' at ') * - filename((lexer.nonnewline - ' line ')^1) * text(' line ') * line * mark_error) - --- CMake Error at filename:line: (cmake) -lex:add_rule('cmake', - starts_line(text('CMake Error at ')) * c_filename * colon * line * colon * mark_error) - -lex:add_rule('any_line', lex:tag(lexer.DEFAULT, lexer.to_eol())) - -return lex diff --git a/share/vis/lexers/pascal.lua b/share/vis/lexers/pascal.lua @@ -1,64 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Pascal LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('pascal') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match({ - 'and', 'array', 'as', 'at', 'asm', 'begin', 'case', 'class', 'const', 'constructor', 'destructor', - 'dispinterface', 'div', 'do', 'downto', 'else', 'end', 'except', 'exports', 'file', 'final', - 'finalization', 'finally', 'for', 'function', 'goto', 'if', 'implementation', 'in', 'inherited', - 'initialization', 'inline', 'interface', 'is', 'label', 'mod', 'not', 'object', 'of', 'on', 'or', - 'out', 'packed', 'procedure', 'program', 'property', 'raise', 'record', 'repeat', - 'resourcestring', 'set', 'sealed', 'shl', 'shr', 'static', 'string', 'then', 'threadvar', 'to', - 'try', 'type', 'unit', 'unsafe', 'until', 'uses', 'var', 'while', 'with', 'xor', 'absolute', - 'abstract', 'assembler', 'automated', 'cdecl', 'contains', 'default', 'deprecated', 'dispid', - 'dynamic', 'export', 'external', 'far', 'forward', 'implements', 'index', 'library', 'local', - 'message', 'name', 'namespaces', 'near', 'nodefault', 'overload', 'override', 'package', 'pascal', - 'platform', 'private', 'protected', 'public', 'published', 'read', 'readonly', 'register', - 'reintroduce', 'requires', 'resident', 'safecall', 'stdcall', 'stored', 'varargs', 'virtual', - 'write', 'writeonly', -- - 'false', 'nil', 'self', 'true' -}, true))) - --- Functions. -lex:add_rule('function', token(lexer.FUNCTION, word_match({ - 'chr', 'ord', 'succ', 'pred', 'abs', 'round', 'trunc', 'sqr', 'sqrt', 'arctan', 'cos', 'sin', - 'exp', 'ln', 'odd', 'eof', 'eoln' -}, true))) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match({ - 'shortint', 'byte', 'char', 'smallint', 'integer', 'word', 'longint', 'cardinal', 'boolean', - 'bytebool', 'wordbool', 'longbool', 'real', 'single', 'double', 'extended', 'comp', 'currency', - 'pointer' -}, true))) - --- Strings. -lex:add_rule('string', token(lexer.STRING, S('uUrR')^-1 * lexer.range("'", true, false))) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Comments. -local line_comment = lexer.to_eol('//', true) -local bblock_comment = lexer.range('{', '}') -local pblock_comment = lexer.range('(*', '*)') -lex:add_rule('comment', token(lexer.COMMENT, line_comment + bblock_comment + pblock_comment)) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number * S('LlDdFf')^-1)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('.,;^@:=<>+-/*()[]'))) - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/perl.lua b/share/vis/lexers/perl.lua @@ -1,161 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Perl LPeg lexer. - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(...) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD))) - --- Markers. -lex:add_rule('marker', lex:tag(lexer.COMMENT, lexer.word_match('__DATA__ __END__') * lexer.any^0)) - --- Strings. -local delimiter_matches = {['('] = ')', ['['] = ']', ['{'] = '}', ['<'] = '>'} -local literal_delimited = P(function(input, index) -- for single delimiter sets - local delimiter = input:sub(index, index) - if not delimiter:find('%w') then -- only non alpha-numerics - local patt - if delimiter_matches[delimiter] then - -- Handle nested delimiter/matches in strings. - local s, e = delimiter, delimiter_matches[delimiter] - patt = lexer.range(s, e, false, true, true) - else - patt = lexer.range(delimiter) - end - local match_pos = lpeg.match(patt, input, index) - return match_pos or #input + 1 - end -end) -local literal_delimited2 = P(function(input, index) -- for 2 delimiter sets - local delimiter = input:sub(index, index) - -- Only consider non-alpha-numerics and non-spaces as delimiters. The non-spaces are used to - -- ignore operators like "-s". - if not delimiter:find('[%w ]') then - local patt - if delimiter_matches[delimiter] then - -- Handle nested delimiter/matches in strings. - local s, e = delimiter, delimiter_matches[delimiter] - patt = lexer.range(s, e, false, true, true) - else - patt = lexer.range(delimiter) - end - local first_match_pos = lpeg.match(patt, input, index) - local final_match_pos = lpeg.match(patt, input, first_match_pos - 1) - if not final_match_pos then -- using (), [], {}, or <> notation - final_match_pos = lpeg.match(lexer.space^0 * patt, input, first_match_pos) - end - if final_match_pos and final_match_pos < index then final_match_pos = index end - return final_match_pos or #input + 1 - end -end) - -local sq_str = lexer.range("'") -local dq_str = lexer.range('"') -local cmd_str = lexer.range('`') -local heredoc = '<<' * P(function(input, index) - local s, e, delimiter = input:find('([%a_][%w_]*)[\n\r\f;]+', index) - if s == index and delimiter then - local end_heredoc = '[\n\r\f]+' - e = select(2, input:find(end_heredoc .. delimiter, e)) - return e and e + 1 or #input + 1 - end -end) -local lit_str = 'q' * P('q')^-1 * literal_delimited -local lit_array = 'qw' * literal_delimited -local lit_cmd = 'qx' * literal_delimited -local string = lex:tag(lexer.STRING, - sq_str + dq_str + cmd_str + heredoc + lit_str + lit_array + lit_cmd) -local regex_str = lexer.after_set('-<>+*!~\\=%&|^?:;([{', lexer.range('/', true) * S('imosx')^0) -local lit_regex = 'qr' * literal_delimited * S('imosx')^0 -local lit_match = 'm' * literal_delimited * S('cgimosx')^0 -local lit_sub = 's' * literal_delimited2 * S('ecgimosx')^0 -local lit_tr = (P('tr') + 'y') * literal_delimited2 * S('cds')^0 -local regex = lex:tag(lexer.REGEX, regex_str + lit_regex + lit_match + lit_sub + lit_tr) -lex:add_rule('string', string + regex) - --- Functions. -lex:add_rule('function_builtin', - lex:tag(lexer.FUNCTION_BUILTIN, lex:word_match(lexer.FUNCTION_BUILTIN)) * - #(lexer.space^0 * P('(')^-1)) -local func = lex:tag(lexer.FUNCTION, lexer.word) -local method = lpeg.B('->') * lex:tag(lexer.FUNCTION_METHOD, lexer.word) -lex:add_rule('function', (method + func) * #(lexer.space^0 * '(')) - --- Constants. -lex:add_rule('constant', lex:tag(lexer.CONSTANT_BUILTIN, lex:word_match(lexer.CONSTANT_BUILTIN))) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) - --- Comments. -local line_comment = lexer.to_eol('#', true) -local block_comment = lexer.range(lexer.starts_line('=' * lexer.alpha), lexer.starts_line('=cut')) -lex:add_rule('comment', lex:tag(lexer.COMMENT, block_comment + line_comment)) - --- Numbers. -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number_('_'))) - --- Variables. -local builtin_var_s = '$' * - (lpeg.R('09') + S('!"$%&\'()+,-./:;<=>?@\\]_`|~') + '^' * S('ACDEFHILMNOPRSTVWX')^-1 + 'ARGV') -local builtin_var_a = '@' * (S('+-_F') + 'ARGV' + 'INC' + 'ISA') -local builtin_var_h = '%' * (S('+-!') + '^' * S('H')^-1 + 'ENV' + 'INC' + 'SIG') -lex:add_rule('variable_builtin', - lex:tag(lexer.VARIABLE_BUILTIN, builtin_var_s + builtin_var_a + builtin_var_h)) -local special_var = '$' * - ('^' * S('ADEFHILMOPSTWX')^-1 + S('\\"[]\'&`+*.,;=%~?@<>(|/!-') + ':' * (lexer.any - ':') + - (P('$') * -lexer.word) + lexer.digit^1) -local plain_var = ('$#' + S('$@%')) * P('$')^0 * lexer.word + '$#' -lex:add_rule('variable', lex:tag(lexer.VARIABLE, special_var + plain_var)) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('-<>+*!~\\=/%&|^.,?:;()[]{}'))) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '[', ']') -lex:add_fold_point(lexer.OPERATOR, '{', '}') - --- Word lists. -lex:set_word_list(lexer.KEYWORD, { - 'STDIN', 'STDOUT', 'STDERR', 'BEGIN', 'END', 'CHECK', 'INIT', -- - 'require', 'use', -- - 'break', 'continue', 'do', 'each', 'else', 'elsif', 'foreach', 'for', 'if', 'last', 'local', 'my', - 'next', 'our', 'package', 'return', 'sub', 'unless', 'until', 'while', '__FILE__', '__LINE__', - '__PACKAGE__', -- - 'and', 'or', 'not', 'eq', 'ne', 'lt', 'gt', 'le', 'ge' -}) - -lex:set_word_list(lexer.FUNCTION_BUILTIN, { - 'abs', 'accept', 'alarm', 'atan2', 'bind', 'binmode', 'bless', 'caller', 'chdir', 'chmod', - 'chomp', 'chop', 'chown', 'chr', 'chroot', 'closedir', 'close', 'connect', 'cos', 'crypt', - 'dbmclose', 'dbmopen', 'defined', 'delete', 'die', 'dump', 'each', 'endgrent', 'endhostent', - 'endnetent', 'endprotoent', 'endpwent', 'endservent', 'eof', 'eval', 'exec', 'exists', 'exit', - 'exp', 'fcntl', 'fileno', 'flock', 'fork', 'format', 'formline', 'getc', 'getgrent', 'getgrgid', - 'getgrnam', 'gethostbyaddr', 'gethostbyname', 'gethostent', 'getlogin', 'getnetbyaddr', - 'getnetbyname', 'getnetent', 'getpeername', 'getpgrp', 'getppid', 'getpriority', 'getprotobyname', - 'getprotobynumber', 'getprotoent', 'getpwent', 'getpwnam', 'getpwuid', 'getservbyname', - 'getservbyport', 'getservent', 'getsockname', 'getsockopt', 'glob', 'gmtime', 'goto', 'grep', - 'hex', 'import', 'index', 'int', 'ioctl', 'join', 'keys', 'kill', 'lcfirst', 'lc', 'length', - 'link', 'listen', 'localtime', 'log', 'lstat', 'map', 'mkdir', 'msgctl', 'msgget', 'msgrcv', - 'msgsnd', 'new', 'oct', 'opendir', 'open', 'ord', 'pack', 'pipe', 'pop', 'pos', 'printf', 'print', - 'prototype', 'push', 'quotemeta', 'rand', 'readdir', 'read', 'readlink', 'recv', 'redo', 'ref', - 'rename', 'reset', 'reverse', 'rewinddir', 'rindex', 'rmdir', 'scalar', 'seekdir', 'seek', - 'select', 'semctl', 'semget', 'semop', 'send', 'setgrent', 'sethostent', 'setnetent', 'setpgrp', - 'setpriority', 'setprotoent', 'setpwent', 'setservent', 'setsockopt', 'shift', 'shmctl', 'shmget', - 'shmread', 'shmwrite', 'shutdown', 'sin', 'sleep', 'socket', 'socketpair', 'sort', 'splice', - 'split', 'sprintf', 'sqrt', 'srand', 'stat', 'study', 'substr', 'symlink', 'syscall', 'sysread', - 'sysseek', 'system', 'syswrite', 'telldir', 'tell', 'tied', 'tie', 'time', 'times', 'truncate', - 'ucfirst', 'uc', 'umask', 'undef', 'unlink', 'unpack', 'unshift', 'untie', 'utime', 'values', - 'vec', 'wait', 'waitpid', 'wantarray', 'warn', 'write' -}) - -lex:set_word_list(lexer.CONSTANT_BUILTIN, { - 'ARGV', 'ARGVOUT', 'DATA', 'ENV', 'INC', 'SIG', 'STDERR', 'STDIN', 'STDOUT' -}) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/php.lua b/share/vis/lexers/php.lua @@ -1,107 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- PHP LPeg lexer. - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(...) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD))) - --- Types. -lex:add_rule('type', lex:tag(lexer.TYPE, lex:word_match(lexer.TYPE))) - --- Functions. -local word = (lexer.alpha + '_' + lpeg.R('\127\255')) * (lexer.alnum + '_' + lpeg.R('\127\255'))^0 -local func = lex:tag(lexer.FUNCTION, word) -local method = lpeg.B('->') * lex:tag(lexer.FUNCTION_METHOD, word) -lex:add_rule('function', (method + func) * #(lexer.space^0 * '(')) - --- Constants. -lex:add_rule('constant', lex:tag(lexer.CONSTANT_BUILTIN, lex:word_match(lexer.CONSTANT_BUILTIN))) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, word)) - --- Variables. -lex:add_rule('variable', lex:tag(lexer.VARIABLE, '$' * word)) - --- Strings. -local sq_str = lexer.range("'") -local dq_str = lexer.range('"') -local bq_str = lexer.range('`') -local heredoc = '<<<' * P(function(input, index) - local _, e, delimiter = input:find('([%a_][%w_]*)[\n\r\f]+', index) - if delimiter then - _, e = input:find('[\n\r\f]+' .. delimiter, e) - return e and e + 1 - end -end) -lex:add_rule('string', lex:tag(lexer.STRING, sq_str + dq_str + bq_str + heredoc)) --- TODO: interpolated code. - --- Comments. -local line_comment = lexer.to_eol(P('//') + '#') -local block_comment = lexer.range('/*', '*/') -lex:add_rule('comment', lex:tag(lexer.COMMENT, block_comment + line_comment)) - --- Numbers. -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number)) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('!@%^*&()-+=|/?.,;:<>[]{}'))) - --- Embedded in HTML. -local html = lexer.load('html') - --- Embedded PHP. -local php_start_rule = lex:tag(lexer.PREPROCESSOR, '<?' * ('php' * lexer.space)^-1) -local php_end_rule = lex:tag(lexer.PREPROCESSOR, '?>') -html:embed(lex, php_start_rule, php_end_rule) - --- Fold points. -lex:add_fold_point(lexer.PREPROCESSOR, '<?', '?>') -lex:add_fold_point(lexer.COMMENT, '/*', '*/') -lex:add_fold_point(lexer.OPERATOR, '{', '}') -lex:add_fold_point(lexer.OPERATOR, '(', ')') - --- Word lists. -lex:set_word_list(lexer.KEYWORD, { - -- Reserved words (http://php.net/manual/en/reserved.keywords.php) - '__halt_compiler', 'abstract', 'and', 'array', 'as', 'break', 'callable', 'case', 'catch', - 'class', 'clone', 'const', 'continue', 'declare', 'default', 'die', 'do', 'echo', 'else', - 'elseif', 'empty', 'enddeclare', 'endfor', 'endforeach', 'endif', 'endswitch', 'endwhile', 'eval', - 'exit', 'extends', 'final', 'finally', 'fn', 'for', 'foreach', 'function', 'global', 'goto', 'if', - 'implements', 'include', 'include_once', 'instanceof', 'insteadof', 'interface', 'isset', 'list', - 'namespace', 'new', 'or', 'print', 'private', 'protected', 'public', 'require', 'require_once', - 'return', 'static', 'switch', 'throw', 'trait', 'try', 'unset', 'use', 'var', 'while', 'xor', - 'yield', 'from', - -- Reserved classes (http://php.net/manual/en/reserved.classes.php) - 'Directory', 'stdClass', '__PHP_Incomplete_Class', 'Exception', 'ErrorException', - 'php_user_filter', 'Closure', 'Generator', 'ArithmeticError', 'AssertionError', - 'DivisionByZeroError', 'Error', 'Throwable', 'ParseError', 'TypeError', 'self', 'static', 'parent' -}) - -lex:set_word_list(lexer.TYPE, 'int float bool string true false null void iterable object') - -lex:set_word_list(lexer.CONSTANT_BUILTIN, { - -- Compile-time (https://www.php.net/manual/en/reserved.keywords.php) - '__CLASS__', '__DIR__', '__FILE__', '__FUNCTION__', '__LINE__', '__METHOD__', '__NAMESPACE__', - '__TRAIT__', - -- Reserved (https://www.php.net/manual/en/reserved.constants.php) - 'PHP_VERSION', 'PHP_MAJOR_VERSION', 'PHP_MINOR_VERSION', 'PHP_RELEASE_VERSION', 'PHP_VERSION_ID', - 'PHP_EXTRA_VERSION', 'PHP_ZTS', 'PHP_DEBUG', 'PHP_MAXPATHLEN', 'PHP_OS', 'PHP_OS_FAMILY', - 'PHP_SAPI', 'PHP_EOL', 'PHP_INT_MAX', 'PHP_INT_MIN', 'PHP_INT_SIZE', 'PHP_FLOAT_DIG', - 'PHP_FLOAT_EPSILON', 'PHP_FLOAT_MIN', 'PHP_FLOAT_MAX', 'DEFAULT_INCLUDE_PATH', 'PEAR_INSTALL_DIR', - 'PEAR_EXTENSION_DIR', 'PHP_EXTENSION_DIR', 'PHP_PREFIX', 'PHP_BINDIR', 'PHP_BINARY', 'PHP_MANDIR', - 'PHP_LIBDIR', 'PHP_DATADIR', 'PHP_SYSCONFDIR', 'PHP_LOCALSTATEDIR', 'PHP_CONFIG_FILE_PATH', - 'PHP_CONFIG_FILE_SCAN_DIR', 'PHP_SHLIB_SUFFIX', 'PHP_FD_SETSIZE', 'E_ERROR', 'E_WARNING', - 'E_PARSE', 'E_NOTICE', 'E_CORE_ERROR', 'E_CORE_WARNING', 'E_COMPILE_ERROR', 'E_USER_ERROR', - 'E_USER_WARNING', 'E_USER_NOTICE', 'E_DEPRECATED', 'E_DEPRECATED', 'E_USER_DEPRECATED', 'E_ALL', - 'E_STRICT', '__COMPILER_HALT_OFFSET__' -}) - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/pico8.lua b/share/vis/lexers/pico8.lua @@ -1,35 +0,0 @@ --- Copyright 2016-2024 Alejandro Baez (https://keybase.io/baez). See LICENSE. --- PICO-8 lexer. --- http://www.lexaloffle.com/pico-8.php - -local lexer = lexer -local word_match = lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(...) - --- Keywords -lex:add_rule('keyword', - lex:tag(lexer.KEYWORD, lexer.word_match('__gff__ __map__ __sfx__ __music__'))) - --- Identifiers -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) - --- Comments -lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.to_eol('//', true))) - --- Numbers -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.integer)) - --- Operators -lex:add_rule('operator', lex:tag(lexer.OPERATOR, '_')) - --- Embed Lua into PICO-8. -local lua = lexer.load('lua') -local lua_start_rule = lex:tag(lexer.KEYWORD, word_match('__lua__')) -local lua_end_rule = lex:tag(lexer.KEYWORD, word_match('__gfx__')) -lex:embed(lua, lua_start_rule, lua_end_rule) - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/pike.lua b/share/vis/lexers/pike.lua @@ -1,54 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Pike LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('pike') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'break', 'case', 'catch', 'continue', 'default', 'do', 'else', 'for', 'foreach', 'gauge', 'if', - 'lambda', 'return', 'sscanf', 'switch', 'while', 'import', 'inherit', - -- Type modifiers. - 'constant', 'extern', 'final', 'inline', 'local', 'nomask', 'optional', 'private', 'protected', - 'public', 'static', 'variant' -})) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match( - 'array class float function int mapping mixed multiset object program string void'))) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = P('#')^-1 * lexer.range('"', true) -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) - --- Comments. -local line_comment = lexer.to_eol('//', true) -local block_comment = lexer.range('/*', '*/', false, false, true) -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number * S('lLdDfF')^-1)) - --- Preprocessors. -lex:add_rule('preprocessor', token(lexer.PREPROCESSOR, lexer.to_eol(lexer.starts_line('#')))) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('<>=!+-/*%&|^~@`.,:;()[]{}'))) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '{', '}') -lex:add_fold_point(lexer.COMMENT, '/*', '*/') - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/pkgbuild.lua b/share/vis/lexers/pkgbuild.lua @@ -1,79 +0,0 @@ --- Copyright 2006-2024 gwash. See LICENSE. --- Archlinux PKGBUILD LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('pkgbuild') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#'))) - --- Strings. -local sq_str = lexer.range("'", false, false) -local dq_str = lexer.range('"') -local ex_str = lexer.range('`') -local heredoc = '<<' * P(function(input, index) - local s, e, _, delimiter = input:find('(["\']?)([%a_][%w_]*)%1[\n\r\f;]+', index) - if s == index and delimiter then - e = select(2, input:find('[\n\r\f]+' .. delimiter, e)) - return e and e + 1 or #input + 1 - end -end) -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str + ex_str + heredoc)) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'patch', 'cd', 'make', 'patch', 'mkdir', 'cp', 'sed', 'install', 'rm', 'if', 'then', 'elif', - 'else', 'fi', 'case', 'in', 'esac', 'while', 'for', 'do', 'done', 'continue', 'local', 'return', - 'git', 'svn', 'co', 'clone', 'gconf-merge-schema', 'msg', 'echo', 'ln', - -- Operators. - '-a', '-b', '-c', '-d', '-e', '-f', '-g', '-h', '-k', '-p', '-r', '-s', '-t', '-u', '-w', '-x', - '-O', '-G', '-L', '-S', '-N', '-nt', '-ot', '-ef', '-o', '-z', '-n', '-eq', '-ne', '-lt', '-le', - '-gt', '-ge', '-Np', '-i' -})) - --- Functions. -lex:add_rule('function', - token(lexer.FUNCTION, word_match('build check package pkgver prepare') * '()')) - --- Constants. -lex:add_rule('constant', token(lexer.CONSTANT, word_match{ - -- We do *not* list pkgver srcdir and startdir here. - -- These are defined by makepkg but user should not alter them. - 'arch', 'backup', 'changelog', 'checkdepends', 'conflicts', 'depends', 'epoch', 'groups', - 'install', 'license', 'makedepends', 'md5sums', 'noextract', 'optdepends', 'options', 'pkgbase', - 'pkgdesc', 'pkgname', 'pkgrel', 'pkgver', 'provides', 'replaces', 'sha1sums', 'sha256sums', - 'sha384sums', 'sha512sums', 'source', 'url', 'validpgpkeys' -})) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Variables. -local symbol = S('!#?*@$') -local parens = lexer.range('(', ')', true) -local brackets = lexer.range('[', ']', true) -local braces = lexer.range('{', '}', true) -local backticks = lexer.range('`', true, false) -local number = lexer.dec_num -lex:add_rule('variable', token(lexer.VARIABLE, '$' * - (symbol + parens + brackets + braces + backticks + number + lexer.word))) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('=!<>+-/*^~.,:;?()[]{}'))) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '(', ')') -lex:add_fold_point(lexer.OPERATOR, '{', '}') - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/pony.lua b/share/vis/lexers/pony.lua @@ -1,96 +0,0 @@ --- Copyright 2017-2024 Murray Calavera. See LICENSE. --- Pony LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('pony') - --- Whitespace. -local ws = token(lexer.WHITESPACE, lexer.space^1) -lex:add_rule('whitespace', ws) - --- Capabilities. -local capability = token(lexer.LABEL, word_match('box iso ref tag trn val')) -lex:add_rule('capability', capability) - --- Annotations. -local annotation = token(lexer.PREPROCESSOR, lexer.range('\\', false, false)) -lex:add_rule('annotation', annotation) - --- Functions. --- Highlight functions with syntax sugar at declaration. -lex:add_rule('function', - token(lexer.KEYWORD, word_match('fun new be')) * ws^-1 * annotation^-1 * ws^-1 * capability^-1 * - ws^-1 * token(lexer.FUNCTION, word_match{ - 'create', 'dispose', '_final', 'apply', 'update', 'add', 'sub', 'mul', 'div', 'mod', - 'add_unsafe', 'sub_unsafe', 'mul_unsafe', 'div_unsafe', 'mod_unsafe', 'shl', 'shr', - 'shl_unsafe', 'shr_unsafe', 'op_and', 'op_or', 'op_xor', 'eq', 'ne', 'lt', 'le', 'ge', 'gt', - 'eq_unsafe', 'ne_unsafe', 'lt_unsafe', 'le_unsafe', 'ge_unsafe', 'gt_unsafe', 'neg', - 'neg_unsafe', 'op_not', -- - 'has_next', 'next', -- - '_serialise_space', '_serialise', '_deserialise' - })) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'actor', 'as', 'be', 'break', 'class', 'compile_error', 'compile_intrinsic', 'continue', - 'consume', 'do', 'else', 'elseif', 'embed', 'end', 'error', 'for', 'fun', 'if', 'ifdef', 'iftype', - 'in', 'interface', 'is', 'isnt', 'lambda', 'let', 'match', 'new', 'object', 'primitive', - 'recover', 'repeat', 'return', 'struct', 'then', 'this', 'trait', 'try', 'type', 'until', 'use', - 'var', 'where', 'while', 'with' -})) - --- Constants. -lex:add_rule('constant', token(lexer.CONSTANT, word_match('true false'))) - --- Operators. -local ops = { - ['+'] = true, ['-'] = true, ['*'] = true, ['/'] = true, ['%'] = true, ['+~'] = true, - ['-~'] = true, ['*~'] = true, ['/~'] = true, ['%~'] = true, ['<<'] = true, ['>>'] = true, - ['<<~'] = true, ['>>~'] = true, ['=='] = true, ['!='] = true, ['<'] = true, ['<='] = true, - ['>='] = true, ['>'] = true, ['==~'] = true, ['!=~'] = true, ['<~'] = true, ['<=~'] = true, - ['>=~'] = true, ['>~'] = true -} -lex:add_rule('operator', token(lexer.OPERATOR, word_match('and or xor not addressof digestof') + - lpeg.Cmt(S('+-*/%<>=!~')^1, function(input, index, op) return ops[op] and index or nil end))) - --- Identifiers. -local id_suffix = (lexer.alnum + "'" + '_')^0 -lex:add_rule('type', token(lexer.TYPE, P('_')^-1 * lexer.upper * id_suffix)) -lex:add_rule('identifier', token(lexer.IDENTIFIER, P('_')^-1 * lexer.lower * id_suffix)) -lex:add_rule('lookup', token(lexer.IDENTIFIER, '_' * lexer.digit^1)) - --- Strings. -local sq_str = lexer.range("'") -local dq_str = lexer.range('"') -local tq_str = lexer.range('"""') -lex:add_rule('string', token(lexer.STRING, sq_str + tq_str + dq_str)) - --- Numbers. -local function num(digit) return digit * (digit^0 * '_')^0 * digit^1 + digit end -local int = num(lexer.digit) -local frac = '.' * int -local exp = S('eE') * (P('-') + '+')^-1 * int -local hex = '0x' * num(lexer.xdigit) -local bin = '0b' * num(S('01')) -local float = int * frac^-1 * exp^-1 -lex:add_rule('number', token(lexer.NUMBER, hex + bin + float)) - --- Comments. -local line_comment = lexer.to_eol('//') -local block_comment = lexer.range('/*', '*/', false, false, true) -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Punctuation. --- There is no suitable token name for this, change this if ever one is added. -lex:add_rule('punctuation', - token(lexer.OPERATOR, P('=>') + '.>' + '<:' + '->' + S('=.,:;()[]{}!?~^&|_@'))) - --- Qualifiers. -lex:add_rule('qualifier', token(lexer.LABEL, '#' * word_match('read send share any alias'))) - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/powershell.lua b/share/vis/lexers/powershell.lua @@ -1,62 +0,0 @@ --- Copyright 2015-2024 Mitchell. See LICENSE. --- PowerShell LPeg lexer. --- Contributed by Jeff Stone. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('powershell') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#'))) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match({ - 'Begin', 'Break', 'Continue', 'Do', 'Else', 'End', 'Exit', 'For', 'ForEach', 'ForEach-Object', - 'Get-Date', 'Get-Random', 'If', 'Param', 'Pause', 'Powershell', 'Process', 'Read-Host', 'Return', - 'Switch', 'While', 'Write-Host' -}, true))) - --- Comparison Operators. -lex:add_rule('comparison', token(lexer.KEYWORD, '-' * word_match({ - 'and', 'as', 'band', 'bor', 'contains', 'eq', 'ge', 'gt', 'is', 'isnot', 'le', 'like', 'lt', - 'match', 'ne', 'nomatch', 'not', 'notcontains', 'notlike', 'or', 'replace' -}, true))) - --- Parameters. -lex:add_rule('parameter', token(lexer.KEYWORD, '-' * - word_match('Confirm Debug ErrorAction ErrorVariable OutBuffer OutVariable Verbose WhatIf', true))) - --- Properties. -lex:add_rule('property', token(lexer.KEYWORD, '.' * - word_match('day dayofweek dayofyear hour millisecond minute month second timeofday year', true))) - --- Types. -lex:add_rule('type', token(lexer.KEYWORD, '[' * word_match({ - 'array', 'boolean', 'byte', 'char', 'datetime', 'decimal', 'double', 'hashtable', 'int', 'long', - 'single', 'string', 'xml' -}, true) * ']')) - --- Variables. -lex:add_rule('variable', token(lexer.VARIABLE, - '$' * (lexer.digit^1 + lexer.word + lexer.range('{', '}', true)))) - --- Strings. -lex:add_rule('string', token(lexer.STRING, lexer.range('"', true))) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('=!<>+-/*^&|~.,:;?()[]{}%`'))) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '{', '}') - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/prolog.lua b/share/vis/lexers/prolog.lua @@ -1,354 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Lexer enhanced to conform to the realities of Prologs on the ground by --- Michael T. Richter. Copyright is explicitly assigned back to Mitchell. --- Prolog LPeg lexer. - ---[[ - Prologs are notoriously fractious with many barely-compatible dialects. To - make Textadept more useful for these cases, directives and keywords are - grouped by dialect. Selecting a dialect is a simple matter of setting the - buffer/lexer property "prolog.dialect" in init.lua. Dialects currently in - the lexer file are: - - 'iso': the generic ISO standard without modules. - - 'gprolog': GNU Prolog. - - 'swipl': SWI-Prolog. - - The default dialect is 'iso' if none is defined. (You probably don't want - this.) - - Note that there will be undoubtedly duplicated entries in various categories - because of the flexibility of Prolog and the automated tools used to gather - most information. This is not an issue, however, because directives override - arity-0 predicates which override arity-1+ predicates which override bifs - which override operators. -]] - -local lexer = require('lexer') - -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('prolog') - -local dialect = lexer.property['prolog.dialect'] -if dialect ~= 'gprolog' and dialog ~= 'swipl' then dialect = 'iso' end - --- Directives. -local directives = {} -directives.iso = [[ - -- Gathered by inspection of GNU Prolog documentation. - dynamic multifile discontiguous include ensure_loaded op char_conversion - set_prolog_flag initialization -]] -directives.gprolog = directives.iso .. [[ - -- Gathered by inspection of GNU Prolog documentation. - public ensure_linked built_in if else endif elif foreign -]] -directives.swipl = directives.iso .. [[ - -- Gathered by liberal use of grep on the SWI source and libraries. - coinductive current_predicate_option expects_dialect http_handler listen - module multifile use_foreign_library use_module dynamic http_handler - initialization json_object multifile record use_module abolish - arithmetic_function asserta at_halt begin_tests chr_constraint chr_option - chr_type clear_cache constraints consult create_prolog_flag - current_prolog_flag debug discontiguous dynamic elif else encoding end_tests - endif expects_dialect export forall format format_predicate html_meta - html_resource http_handler http_request_expansion if include - init_color_term_flag init_options initialization json_object - lazy_list_iterator license listen load_extensions load_files - load_foreign_library meta_predicate mode module module_transparent multifile - noprofile op pce_begin_class pce_end_class pce_global pce_group persistent - pop_operators pred predicate_options print_message prolog_load_context prompt - public push_hprolog_library push_ifprolog_library, push_operators - push_sicstus_library push_xsb_library push_yap_library, quasi_quotation_syntax - record redefine_system_predicate reexport register_iri_scheme residual_goals - retract set_module set_prolog_flag set_script_dir set_test_options setenv - setting style_check table thread_local thread_local message type - use_class_template use_foreign_library use_module utter volatile build_schema - chr_constraint chr_option chr_type cql_option determinate discontiguous - dynamic endif format_predicate if initialization license meta_predicate mode - module multifile op reexport thread_local use_module volatile -]] -lex:add_rule('directive', - token(lexer.WHITESPACE, lexer.starts_line(S(' \t'))^0) * - token(lexer.OPERATOR, ':-') * - token(lexer.WHITESPACE, S(' \t')^0) * - token(lexer.PREPROCESSOR, word_match(directives[dialect]))) - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -local zero_arity_keywords = {} -zero_arity_keywords.iso = [[ - -- eyeballed from GNU Prolog documentation - true fail pi float_overflow int_overflow int_underflow undefined asserta - assertz retract retractall clause abolish current_predicate findall bagof - setof at_end_of_stream flush_output nl halt false -]] -zero_arity_keywords.gprolog = [[ - -- Collected automatically via current_predicate/1 with some cleanup. - at_end_of_stream wam_debug listing flush_output fail told false top_level - shell trace debugging seen repeat abort nl statistics halt notrace randomize - true nospyall nodebug debug stop break -]] -zero_arity_keywords.swipl = [[ - -- Collected automatically via current_predicate/1 with some cleanup. - noprotocol compiling ttyflush true abort license known_licenses - print_toplevel_variables initialize mutex_statistics break reset_profiler - win_has_menu version prolog abolish_nonincremental_tables false halt undefined - abolish_all_tables reload_library_index garbage_collect repeat nospyall - tracing trace notrace trim_stacks garbage_collect_clauses - garbage_collect_atoms mutex_unlock_all seen told nl debugging fail - at_end_of_stream attach_packs flush_output true -]] -local one_plus_arity_keywords = {} -one_plus_arity_keywords.iso = [[ - -- eyeballed from GNU Prolog documentation - call catch throw var nonvar atom integer float number atomic compound - callable ground unify_with_occurs_check compare functor arg copy_term - term_variables subsumes_term acyclic_term predicate_property current_input - current_output set_input set_output open close current_stream stream_property - set_stream_position get_char get_code is peek_char peek_code put_char putcode - get_byte peek_byte read_term read write_term write writeq write_canonical - char_conversion current_char_conversion call once repeat atom_length - atom_concat sub_atom char_code atom_chars atom_codes -]] -one_plus_arity_keywords.gprolog = [[ - -- Collected automatically via current_predicate/1 with some cleanup. - abolish absolute_file_name acyclic_term add_linedit_completion - add_stream_alias add_stream_mirror append architecture arg argument_counter - argument_list argument_value asserta assertz at_end_of_stream atom atom_chars - atom_codes atom_concat atom_length atom_property atomic bagof between - bind_variables call call_det call_with_args callable catch change_directory - char_code char_conversion character_count clause close close_input_atom_stream - close_input_chars_stream close_input_codes_stream close_output_atom_stream - close_output_chars_stream close_output_codes_stream compare compound consult - copy_term cpu_time create_pipe current_alias current_atom current_bip_name - current_char_conversion current_input current_mirror current_op current_output - current_predicate current_prolog_flag current_stream date_time - decompose_file_name delete delete_directory delete_file directory_files - display display_to_atom display_to_chars display_to_codes environ exec - expand_term fd_all_different fd_at_least_one fd_at_most_one fd_atleast - fd_atmost fd_cardinality fd_dom fd_domain fd_domain_bool fd_element - fd_element_var fd_exactly fd_has_extra_cstr fd_has_vector fd_labeling - fd_labelingff fd_max fd_max_integer fd_maximize fd_min fd_minimize - fd_not_prime fd_only_one fd_prime fd_reified_in fd_relation fd_relationc - fd_set_vector_max fd_size fd_use_vector fd_var fd_vector_max file_exists - file_permission file_property find_linedit_completion findall flatten float - flush_output for forall fork_prolog format format_to_atom format_to_chars - format_to_codes functor g_array_size g_assign g_assignb g_dec g_deco g_inc - g_inco g_link g_read g_reset_bit g_set_bit g_test_reset_bit g_test_set_bit - generic_var get get_byte get_char get_code get_key get_key_no_echo - get_linedit_prompt get_print_stream get_seed get0 ground halt host_name - hostname_address integer is_absolute_file_name is_list is_relative_file_name - keysort last last_read_start_line_column leash length line_count line_position - list list_or_partial_list listing load lower_upper make_directory maplist - max_list member memberchk min_list msort name name_query_vars - name_singleton_vars new_atom nl non_fd_var non_generic_var nonvar nospy nth - nth0 nth1 number number_atom number_chars number_codes numbervars once op open - open_input_atom_stream open_input_chars_stream open_input_codes_stream - open_output_atom_stream open_output_chars_stream open_output_codes_stream - os_version partial_list peek_byte peek_char peek_code permutation phrase popen - portray_clause predicate_property prefix print print_to_atom print_to_chars - print_to_codes prolog_file_name prolog_pid put put_byte put_char put_code - random read read_atom read_from_atom read_from_chars read_from_codes - read_integer read_number read_pl_state_file read_term read_term_from_atom - read_term_from_chars read_term_from_codes read_token read_token_from_atom - read_token_from_chars read_token_from_codes real_time remove_stream_mirror - rename_file retract retractall reverse see seeing seek select send_signal - set_bip_name set_input set_linedit_prompt set_output set_prolog_flag set_seed - set_stream_buffering set_stream_eof_action set_stream_line_column - set_stream_position set_stream_type setarg setof shell skip sleep socket - socket_accept socket_bind socket_close socket_connect socket_listen sort spawn - spy spypoint_condition sr_change_options sr_close sr_current_descriptor - sr_error_from_exception sr_get_error_counters sr_get_file_name - sr_get_include_list sr_get_include_stream_list sr_get_module sr_get_position - sr_get_size_counters sr_get_stream sr_new_pass sr_open sr_read_term - sr_set_error_counters sr_write_error sr_write_message statistics - stream_line_column stream_position stream_property sub_atom sublist - subsumes_term subtract succ suffix sum_list syntax_error_info system - system_time tab tell telling temporary_file temporary_name term_hash term_ref - term_variables throw unget_byte unget_char unget_code unify_with_occurs_check - unlink user_time var wait working_directory write write_canonical - write_canonical_to_atom write_canonical_to_chars write_canonical_to_codes - write_pl_state_file write_term write_term_to_atom write_term_to_chars - write_term_to_codes write_to_atom write_to_chars write_to_codes writeq - writeq_to_atom writeq_to_chars writeq_to_codes -]] -one_plus_arity_keywords.swipl = [[ - -- Collected automatically via current_predicate/1 with some cleanup. - prolog_exception_hook term_expansion expand_answer message_property resource - help goal_expansion file_search_path prolog_clause_name thread_message_hook - prolog_file_type goal_expansion prolog_predicate_name exception writeln - term_expansion expand_query url_path message_hook library_directory resource - portray prolog_load_file prolog_list_goal ansi_format source_file_property - asserta call_dcg source_location wait_for_input locale_destroy set_locale - read_pending_codes thread_join open_dde_conversation win_folder protocol - copy_stream_data current_locale read_pending_chars win_add_dll_directory - protocola thread_property win_shell goal_expansion phrase gc_file_search_cache - dcg_translate_rule protocolling win_registry_get_value term_expansion - dcg_translate_rule assert copy_stream_data once bagof prompt1 tnot assertz - phrase sort ignore thread_statistics assert locale_create - win_remove_dll_directory term_expansion read_term asserta clause assertz - predicate_option_type is_thread get_single_char set_prolog_IO expand_goal - ground message_queue_create locale_property close_dde_conversation - goal_expansion clause zipper_open_new_file_in_zip term_to_atom with_output_to - module expand_term redefine_system_predicate thread_detach dde_execute - term_string read_clause compile_predicates predicate_option_mode noprofile - read_term_from_atom cancel_halt non_terminal atom_to_term line_position frozen - dde_request findnsols prolog_skip_level prolog_current_choice get get_attrs - license var_property nb_delete unwrap_predicate zipper_open_current put_attrs - dde_poke set_stream read_term zip_file_info_ memberchk seek expand_goal get0 - call var integer attach_packs byte_count zipper_goto findnsols character_count - expand_term get_flag atom line_count set_flag atomic tab create_prolog_flag - copy_term import_module verbose_expansion b_setval duplicate_term - prolog_load_context attach_packs prolog_listen b_getval prolog_frame_attribute - prompt copy_term_nat nb_linkval tab prolog_choice_attribute set_prolog_flag - nb_getval prolog_skip_frame del_attrs skip sort license open_null_stream - nb_current prolog_listen msort is_list is_stream get keysort win_shell - prolog_unlisten notrace get0 add_import_module wildcard_match profiler - delete_directory trie_gen_compiled expand_file_name file_name_extension - delete_file writeq win_module_file call write get_dict win_exec - directory_files trie_insert make_directory engine_next_reified del_dict sleep - getenv call_continuation trie_gen_compiled prolog_to_os_filename - is_absolute_file_name trie_insert engine_fetch engine_create strip_module call - delete_import_module write_canonical compile_aux_clauses setenv callable - is_engine write_term call set_module call halt catch findall trie_gen - trie_destroy rename_file shift unify_with_occurs_check engine_yield forall - unsetenv trie_term file_directory_name version current_engine file_base_name - engine_self import trie_gen trie_lookup write_term trie_update freeze - engine_post export put_dict same_file trie_new call trie_delete start_tabling - is_trie residual_goals thread_peek_message thread_get_message dict_pairs - set_end_of_stream call_cleanup current_predicate arg dict_create - thread_setconcurrency read_link is_dict at_halt tmp_file not put_dict - setup_call_cleanup abolish_nonincremental_tables time_file - start_subsumptive_tabling char_conversion compound sub_atom access_file call - call_cleanup abolish nonvar current_functor abolish_module_tables - subsumes_term engine_post call retractall compare engine_next prolog_cut_to - size_file current_char_conversion predicate_property nonground engine_destroy - message_queue_property format abolish qcompile thread_send_message stream_pair - message_queue_create same_term number select_dict catch_with_backtrace - thread_get_message thread_send_message win_insert_menu_item message_queue_set - <meta-call> exists_directory copy_term nb_set_dict prolog_nodebug functor - current_table cyclic_term untable read exists_file thread_peek_message - b_set_dict engine_create prolog_debug acyclic_term writeln get_dict - compound_name_arity abolish_table_subgoals start_tabling trie_insert - nb_link_dict message_queue_destroy thread_get_message is_dict nth_clause - absolute_file_name term_singletons make_library_index set_output retract - context_module current_trie term_attvars load_files get_char ensure_loaded - current_input prolog_current_frame make_library_index term_variables - compound_name_arguments reexport autoload_path get_code set_input flag - thread_create use_module findall thread_join call_with_inference_limit - var_number dwim_match consult peek_code close nospy print_message - term_variables trie_property read_history get_byte default_module get_byte - print on_signal get_char call_residue_vars dwim_match atom_prefix unifiable - use_module numbervars load_files get_code open format_time - copy_predicate_clauses reexport leash current_output sub_string close - format_time atom_codes stamp_date_time require name open_shared_object open - atom_chars current_predicate format tmp_file_stream term_hash rational - source_file reset atom_concat atom_length current_prolog_flag rational - dwim_predicate date_time_stamp stream_property string_upper setlocale format - writeln current_module normalize_space writeq current_flag shell upcase_atom - qcompile char_code atomic_concat read string_lower write term_string - numbervars working_directory number_codes set_prolog_gc_thread downcase_atom - format_predicate number_string open_shared_object style_check char_type print - stream_position_data code_type write_canonical number_chars length - current_arithmetic_function atomic_list_concat del_attr read_string zip_unlock - open_resource string_length zip_lock see erase open_resource setof - atomic_list_concat current_format_predicate current_resource with_mutex - atomics_to_string term_hash absolute_file_name deterministic current_atom - thread_create collation_key get_attr variant_hash string_concat atom_number - put put_attr variant_sha1 thread_signal mutex_unlock tty_size current_key - mutex_create fill_buffer expand_file_search_path blob shell - register_iri_scheme skip fast_read divmod mutex_trylock thread_self put - mutex_property fast_write mutex_lock current_blob sub_atom_icasechk - mutex_destroy fast_term_serialized split_string set_stream_position recorda - telling setarg thread_exit zip_open_stream instance mutex_create statistics - append get_time zip_close_ tell atomics_to_string clause_property attvar - zip_clone seeing nth_integer_root_and_remainder recorda put_byte string_chars - spy recordz print_message_lines current_op put_char nl source_file - string_codes op setup_call_catcher_cleanup nb_linkarg recorded put_code - peek_byte apply module_property atom_string nb_setarg succ recordz - message_to_string close_shared_object peek_char between recorded visible plus - call_shared_object_function peek_code peek_byte set_prolog_stack float throw - at_end_of_stream get_string_code call_with_depth_limit random_property - flush_output peek_string open_xterm peek_char open_string string_code - set_random prolog_stack_property put_char unload_file nb_setval put_byte - current_signal put_code write_length string read_string text_to_string -]] -lex:add_rule('keyword', token(lexer.KEYWORD, - word_match(zero_arity_keywords[dialect]) + - word_match(one_plus_arity_keywords[dialect]) * #P('('))) - --- BIFs. -local bifs = {} -bifs.iso = [[ - -- eyeballed from GNU Prolog documentation - xor abs sign min max sqrt tan atan atan2 cos acos sin asin exp log float - ceiling floor round truncate float_fractional_part float_integer_part rem div - mod -]] -bifs.gprolog = bifs.iso .. [[ - -- eyeballed from GNU Prolog documentation - inc dec lsb msb popcount gcd tanh atanh cosh acosh sinh asinh log10 rnd -]] -bifs.swipl = [[ - -- Collected automatically via current_arithmetic_function/1 with some - -- cleanup. - abs acos acosh asinh atan atan atanh atan2 ceil ceiling copysign cos cosh - cputime div getbit e epsilon erf erfc eval exp float float_fractional_part - float_integer_part floor gcd inf integer lgamma log log10 lsb max min mod msb - nan pi popcount powm random random_float rational rationalize rdiv rem round - sign sin sinh sqrt tan tanh truncate xor -]] -lex:add_rule('bif', token(lexer.FUNCTION, word_match(bifs[dialect]) * #P('('))) - --- Numbers. -local decimal_group = S('+-')^-1 * (lexer.digit + '_')^1 -local binary_number = '0b' * (S('01') + '_')^1 -local character_code = '0\'' * S('\\')^-1 * lexer.graph -local decimal_number = decimal_group * ('.' * decimal_group)^-1 * - ('e' * decimal_group)^-1 -local hexadecimal_number = '0x' * (lexer.xdigit + '_')^1 -local octal_number = '0o' * (S('01234567') + '_')^1 -lex:add_rule('number', token(lexer.NUMBER, character_code + binary_number + - hexadecimal_number + octal_number + decimal_number)) - --- Comments. -local line_comment = lexer.to_eol('%') -local block_comment = lexer.range('/*', '*/') -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Operators. -local operators = {} -operators.iso = [[ - -- Collected automatically via current_op/3 with some cleanup and comparison - -- to docs. - rem div mod is -]] -operators.gprolog = operators.iso -- GNU Prolog's textual operators are the same -operators.swipl = [[ - -- Collected automatically via current_op/3 with some cleanup. - is as volatile mod discontiguous div rdiv meta_predicate public xor - module_transparent multifile table dynamic thread_initialization thread_local - initialization rem -]] -lex:add_rule('operator', token(lexer.OPERATOR, word_match(operators[dialect]) + - S('-!+\\|=:;&<>()[]{}/*^@?.'))) - --- Variables. -lex:add_rule('variable', token(lexer.VARIABLE, (lexer.upper + '_') * - (lexer.word^1 + lexer.digit^1 + P('_')^1)^0)) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"', true) -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) - -lexer.property['scintillua.comment'] = '%' - -return lex diff --git a/share/vis/lexers/props.lua b/share/vis/lexers/props.lua @@ -1,36 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Props LPeg lexer. - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(..., {lex_by_line = true}) - --- Identifiers. -lex:add_rule('identifier', - lex:tag(lexer.IDENTIFIER, (lexer.alpha + S('.-_')) * (lexer.alnum + S('.-_')^0))) - --- Colors. -local xdigit = lexer.xdigit -lex:add_rule('color', - lex:tag(lexer.NUMBER, '#' * xdigit * xdigit * xdigit * xdigit * xdigit * xdigit)) - --- Comments. -lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.to_eol('#'))) - --- Equals. -lex:add_rule('equals', lex:tag(lexer.OPERATOR, '=')) - --- Strings. -local sq_str = lexer.range("'") -local dq_str = lexer.range('"') -lex:add_rule('string', lex:tag(lexer.STRING, sq_str + dq_str)) - --- Variables. -lex:add_rule('variable', - lex:tag(lexer.OPERATOR, '$(') * lex:tag(lexer.VARIABLE, (lexer.nonnewline - lexer.space - ')')^0) * - lex:tag(lexer.OPERATOR, ')')) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/protobuf.lua b/share/vis/lexers/protobuf.lua @@ -1,48 +0,0 @@ --- Copyright 2016-2024 David B. Lamkins <david@lamkins.net>. See LICENSE. --- Protocol Buffer IDL LPeg lexer. --- <https://developers.google.com/protocol-buffers/> - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('protobuf') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'contained', 'syntax', 'import', 'option', 'package', 'message', 'group', 'oneof', 'optional', - 'required', 'repeated', 'default', 'extend', 'extensions', 'to', 'max', 'reserved', 'service', - 'rpc', 'returns' -})) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match{ - 'int32', 'int64', 'uint32', 'uint64', 'sint32', 'sint64', 'fixed32', 'fixed64', 'sfixed32', - 'sfixed64', 'float', 'double', 'bool', 'string', 'bytes', 'enum', 'true', 'false' -})) - --- Strings. -local sq_str = P('L')^-1 * lexer.range("'", true) -local dq_str = P('L')^-1 * lexer.range('"', true) -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Comments. -local line_comment = lexer.to_eol('//', true) -local block_comment = lexer.range('/*', '*/') -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('<>=|;,.()[]{}'))) - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/ps.lua b/share/vis/lexers/ps.lua @@ -1,49 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Postscript LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('ps') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'pop', 'exch', 'dup', 'copy', 'roll', 'clear', 'count', 'mark', 'cleartomark', 'counttomark', - 'exec', 'if', 'ifelse', 'for', 'repeat', 'loop', 'exit', 'stop', 'stopped', 'countexecstack', - 'execstack', 'quit', 'start', 'true', 'false', 'NULL' -})) - --- Functions. -lex:add_rule('function', token(lexer.FUNCTION, word_match{ - 'add', 'div', 'idiv', 'mod', 'mul', 'sub', 'abs', 'ned', 'ceiling', 'floor', 'round', 'truncate', - 'sqrt', 'atan', 'cos', 'sin', 'exp', 'ln', 'log', 'rand', 'srand', 'rrand' -})) - --- Identifiers. -local word = (lexer.alpha + '-') * (lexer.alnum + '-')^0 -lex:add_rule('identifier', token(lexer.IDENTIFIER, word)) - --- Strings. -local arrow_string = lexer.range('<', '>') -local nested_string = lexer.range('(', ')', false, false, true) -lex:add_rule('string', token(lexer.STRING, arrow_string + nested_string)) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('%'))) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Labels. -lex:add_rule('label', token(lexer.LABEL, '/' * word)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('[]{}'))) - -lexer.property['scintillua.comment'] = '%' - -return lex diff --git a/share/vis/lexers/pure.lua b/share/vis/lexers/pure.lua @@ -1,50 +0,0 @@ --- Copyright 2015-2024 David B. Lamkins <david@lamkins.net>. See LICENSE. --- pure LPeg lexer, see http://purelang.bitbucket.org/ - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('pure') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'namespace', 'with', 'end', 'using', 'interface', 'extern', 'let', 'const', 'def', 'type', - 'public', 'private', 'nonfix', 'outfix', 'infix', 'infixl', 'infixr', 'prefix', 'postfix', 'if', - 'otherwise', 'when', 'case', 'of', 'then', 'else' -})) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -lex:add_rule('string', token(lexer.STRING, lexer.range('"', true))) - --- Comments. -local line_comment = lexer.to_eol('//') -local block_comment = lexer.range('/*', '*/') -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -local bin = '0' * S('Bb') * S('01')^1 -local hex = lexer.hex_num -local dec = lexer.dec_num -local int = (bin + hex + dec) * P('L')^-1 -local rad = P('.') - '..' -local exp = (S('Ee') * S('+-')^-1 * int)^-1 -local flt = int * (rad * dec)^-1 * exp + int^-1 * rad * dec * exp -lex:add_rule('number', token(lexer.NUMBER, flt + int)) - --- Pragmas. -local hashbang = lexer.starts_line('#!') * (lexer.nonnewline - '//')^0 -lex:add_rule('pragma', token(lexer.PREPROCESSOR, hashbang)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, '..' + S('+-/*%<>~!=^&|?~:;,.()[]{}@#$`\\\''))) - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/python.lua b/share/vis/lexers/python.lua @@ -1,128 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Python LPeg lexer. - -local lexer = lexer -local token, word_match = lexer.token, lexer.word_match -local P, S, B = lpeg.P, lpeg.S, lpeg.B - -local lex = lexer.new(..., {fold_by_indentation = true}) - --- Classes. -lex:add_rule('classdef', lex:tag(lexer.KEYWORD, 'class') * lex:get_rule('whitespace') * - lex:tag(lexer.CLASS, lexer.word)) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD)) + - lex:tag(lexer.KEYWORD .. '.soft', lex:word_match(lexer.KEYWORD .. '.soft'))) - --- Functions. -local builtin_func = -B('.') * - lex:tag(lexer.FUNCTION_BUILTIN, lex:word_match(lexer.FUNCTION_BUILTIN)) -local special_func = lex:tag(lexer.FUNCTION_BUILTIN .. '.special', - lex:word_match(lexer.FUNCTION_BUILTIN .. '.special')) -local func = lex:tag(lexer.FUNCTION, lexer.word) -local method = B('.') * lex:tag(lexer.FUNCTION_METHOD, lexer.word) -lex:add_rule('function', (builtin_func + special_func + method + func) * #(lexer.space^0 * '(')) - --- Constants. -local builtin_const = lex:tag(lexer.CONSTANT_BUILTIN, lex:word_match(lexer.CONSTANT_BUILTIN)) -local attr = lex:tag(lexer.ATTRIBUTE, B('.') * lex:word_match(lexer.ATTRIBUTE) + '__name__') -lex:add_rule('constant', builtin_const + attr) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"', true) -local tq_str = lexer.range("'''") + lexer.range('"""') -lex:add_rule('string', lex:tag(lexer.STRING, (S('fFrRbBrR') * S('rRfFrRbB') + S('ruRUfFbB'))^-1 * - (tq_str + sq_str + dq_str))) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) - --- Comments. -lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.to_eol('#', true))) - --- Numbers. -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number_('_') * S('jJ')^-1)) - --- Decorators. -lex:add_rule('decorator', lex:tag(lexer.ANNOTATION, '@' * lexer.word)) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('!@%^&*()[]{}-=+/|:;.,<>~'))) - --- Word lists. -lex:set_word_list(lexer.KEYWORD, { - 'and', 'as', 'assert', 'async', 'await', 'break', 'class', 'continue', 'def', 'del', 'elif', - 'else', 'except', 'False', 'finally', 'for', 'from', 'global', 'if', 'import', 'in', 'is', - 'lambda', 'None', 'nonlocal', 'not', 'or', 'pass', 'raise', 'return', 'True', 'try', 'while', - 'with', 'yield' -}) - -lex:set_word_list(lexer.KEYWORD .. '.soft', '_ case match') - -lex:set_word_list(lexer.FUNCTION_BUILTIN, { - 'abs', 'aiter', 'all', 'any', 'anext', 'ascii', 'bin', 'bool', 'breakpoint', 'bytearray', 'bytes', - 'callable', 'chr', 'classmethod', 'compile', 'complex', 'delattr', 'dict', 'dir', 'divmod', - 'enumerate', 'eval', 'exec', 'filter', 'float', 'format', 'frozenset', 'getattr', 'globals', - 'hasattr', 'hash', 'help', 'hex', 'id', 'input', 'int', 'isinstance', 'issubclass', 'iter', 'len', - 'list', 'locals', 'map', 'max', 'memoryview', 'min', 'next', 'object', 'oct', 'open', 'ord', - 'pow', 'print', 'property', 'range', 'repr', 'reversed', 'round', 'set', 'setattr', 'slice', - 'sorted', 'staticmethod', 'str', 'sum', 'super', 'tuple', 'type', 'vars', 'zip', '__import__' -}) - -lex:set_word_list(lexer.FUNCTION_BUILTIN .. '.special', { - '__new__', '__init__', '__del__', '__repr__', '__str__', '__bytes', '__format__', '__lt__', - '__le__', '__eq__', '__ne__', '__gt__', '__ge__', '__hash__', '__bool__', -- - '__getattr__', '__getattribute__', '__setattr__', '__delattr__', '__dir__', -- - '__get__', '__set__', '__delete__', '__slots__', -- - '__init_subclass__', '__set_name__', -- - '__instancecheck__', '__subclasscheck__', -- - '__class_getitem__', -- - '__call__', -- - '__len__', '__length_hint', '__getitem__', '__setitem__', '__delitem__', '__missing__', - '__iter__', '__reversed__', '__contains__', -- - '__add__', '__sub__', '__mul__', '__matmul__', '__truediv__', '__floordiv__', '__mod__', - '__divmod__', '__pow__', '__lshift__', '__rshift__', '__and__', '__xor__', '__or__', -- - '__radd__', '__rsub__', '__rmul__', '__rmatmul__', '__rtruediv__', '__rfloordiv__', '__rmod__', - '__rdivmod__', '__rpow__', '__rlshift__', '__rrshift__', '__rand__', '__rxor__', '__ror__', -- - '__iadd__', '__isub__', '__imul__', '__imatmul__', '__itruediv__', '__ifloordiv__', '__imod__', - '__idivmod__', '__ipow__', '__ilshift__', '__irshift__', '__iand__', '__ixor__', '__ior__', -- - '__neg__', '__pos__', '__abs__', '__invert__', '__complex__', '__int__', '__float__', '__index__', - '__round__', '__trunc__', '__floor__', '__ceil__', -- - '__enter__', '__exit__', -- - '__match_args__', -- - '__await__', -- - '__aiter__', '__anext__', '__aenter__', '__aexit__' -- -}) - -lex:set_word_list(lexer.CONSTANT_BUILTIN, { - 'BaseException', 'Exception', 'Exception', 'ArithmeticError', 'BufferError', 'LookupError', -- - 'AssertionError', 'AttributeError', 'EOFError', 'FloatingPointError', 'GeneratorExit', - 'ImportError', 'ModuleNotFoundError', 'IndexError', 'KeyError', 'KeyboardInterrupt', - 'MemoryError', 'NameError', 'NotImplementedError', 'OSError', 'OverflowError', 'RecursionError', - 'ReferenceError', 'RuntimeError', 'StopIteration', 'StopAsyncIteration', 'SyntaxError', - 'IndentationError', 'TabError', 'SystemError', 'SystemExit', 'TypeError', 'UnboundLocalError', - 'UnicodeError', 'UnicodeEncodeError', 'UnicodeDecodeError', 'UnicodeTranslateError', 'ValueError', - 'ZeroDivisionError', -- - 'EnvironmentError', 'IOError', 'WindowsError', -- - 'BlockingIOError', 'ChildProcessError', 'ConnectionError', 'BrokenPipeError', - 'ConnectionAbortedError', 'ConnectionRefusedError', 'FileExistsError', 'FileNotFoundError', - 'InterruptedError', 'IsADirectoryError', 'NotADirectoryError', 'PermissionError', - 'ProcessLookupError', 'TimeoutError', -- - 'Warning', 'UserWarning', 'DeprecationWarning', 'PendingDeprecationWarning', 'SyntaxWarning', - 'RuntimeWarning', 'FutureWarning', 'ImportWarning', 'UnicodeWarning', 'BytesWarning', - 'ResourceWarning' -}) - -lex:set_word_list(lexer.ATTRIBUTE, { - '__doc__', '__name__', '__qualname__', '__module__', '__defaults__', '__code__', '__globals__', - '__dict__', '__closure__', '__annotations__', '__kwdefaults__', -- - '__file__', '__bases__', -- - '__class__', -- - '__self__', '__func__' -- -}) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/rails.lua b/share/vis/lexers/rails.lua @@ -1,41 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Ruby on Rails LPeg lexer. - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(..., {inherit = lexer.load('ruby')}) - --- Word lists. -lex:set_word_list(lexer.FUNCTION_BUILTIN, { - -- ActionPack. - 'before_filter', 'skip_before_filter', 'skip_after_filter', 'after_filter', 'around_filter', - 'filter', 'filter_parameter_logging', 'layout', 'require_dependency', 'render', 'render_action', - 'render_text', 'render_file', 'render_template', 'render_nothing', 'render_component', - 'render_without_layout', 'rescue_from', 'url_for', 'redirect_to', 'redirect_to_path', - 'redirect_to_url', 'respond_to', 'helper', 'helper_method', 'model', 'service', 'observer', - 'serialize', 'scaffold', 'verify', 'hide_action', - -- View helpers. - 'check_box', 'content_for', 'error_messages_for', 'form_for', 'fields_for', 'file_field', - 'hidden_field', 'image_submit_tag', 'label', 'link_to', 'password_field', 'radio_button', - 'submit', 'text_field', 'text_area', - -- ActiveRecord. - 'after_create', 'after_destroy', 'after_save', 'after_update', 'after_validation', - 'after_validation_on_create', 'after_validation_on_update', 'before_create', 'before_destroy', - 'before_save', 'before_update', 'before_validation', 'before_validation_on_create', - 'before_validation_on_update', 'composed_of', 'belongs_to', 'has_one', 'has_many', - 'has_and_belongs_to_many', 'validate', 'validates', 'validate_on_create', - 'validates_numericality_of', 'validate_on_update', 'validates_acceptance_of', - 'validates_associated', 'validates_confirmation_of', 'validates_each', 'validates_format_of', - 'validates_inclusion_of', 'validates_exclusion_of', 'validates_length_of', - 'validates_presence_of', 'validates_size_of', 'validates_uniqueness_of', -- - 'attr_protected', 'attr_accessible', 'attr_readonly', 'accepts_nested_attributes_for', - 'default_scope', 'scope', - -- ActiveSupport. - 'alias_method_chain', 'alias_attribute', 'delegate', 'cattr_accessor', 'mattr_accessor', - 'returning', 'memoize' -}, true) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/rc.lua b/share/vis/lexers/rc.lua @@ -1,52 +0,0 @@ --- Copyright 2017-2024 Michael Forney. See LICENSE. --- rc LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('rc') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'for', 'in', 'while', 'if', 'not', 'switch', 'case', 'fn', 'builtin', 'cd', 'eval', 'exec', - 'exit', 'flag', 'rfork', 'shift', 'ulimit', 'umask', 'wait', 'whatis', '.', '~' -})) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local str = lexer.range("'", false, false) -local heredoc = '<<' * P(function(input, index) - local s, e, _, delimiter = input:find('[ \t]*(["\']?)([%w!"%%+,-./:?@_~]+)%1', index) - if s == index and delimiter then - delimiter = delimiter:gsub('[%%+-.?]', '%%%1') - e = select(2, input:find('[\n\r]' .. delimiter .. '[\n\r]', e)) - return e and e + 1 or #input + 1 - end -end) -lex:add_rule('string', token(lexer.STRING, str + heredoc)) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#'))) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Variables. -lex:add_rule('variable', - token(lexer.VARIABLE, '$' * S('"#')^-1 * ('*' + lexer.digit^1 + lexer.word))) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('@`=!<>*&^|;?()[]{}') + '\\\n')) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '{', '}') - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/reason.lua b/share/vis/lexers/reason.lua @@ -1,67 +0,0 @@ --- Copyright 2018-2024 Hugo O. Rivera. See LICENSE. --- Reason (https://reasonml.github.io/) LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('reason') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'and', 'as', 'asr', 'begin', 'class', 'closed', 'constraint', 'do', 'done', 'downto', 'else', - 'end', 'exception', 'external', 'failwith', 'false', 'flush', 'for', 'fun', 'function', 'functor', - 'if', 'in', 'include', 'inherit', 'incr', 'land', 'let', 'load', 'los', 'lsl', 'lsr', 'lxor', - 'method', 'mod', 'module', 'mutable', 'new', 'not', 'of', 'open', 'option', 'or', 'parser', - 'private', 'ref', 'rec', 'raise', 'regexp', 'sig', 'struct', 'stdout', 'stdin', 'stderr', - 'switch', 'then', 'to', 'true', 'try', 'type', 'val', 'virtual', 'when', 'while', 'with' -})) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match('int float bool char string unit'))) - --- Functions. -lex:add_rule('function', token(lexer.FUNCTION, word_match{ - 'raise', 'invalid_arg', 'failwith', 'compare', 'min', 'max', 'succ', 'pred', 'mod', 'abs', - 'max_int', 'min_int', 'sqrt', 'exp', 'log', 'log10', 'cos', 'sin', 'tan', 'acos', 'asin', 'atan', - 'atan2', 'cosh', 'sinh', 'tanh', 'ceil', 'floor', 'abs_float', 'mod_float', 'frexp', 'ldexp', - 'modf', 'float', 'float_of_int', 'truncate', 'int_of_float', 'infinity', 'nan', 'max_float', - 'min_float', 'epsilon_float', 'classify_float', 'int_of_char', 'char_of_int', 'ignore', - 'string_of_bool', 'bool_of_string', 'string_of_int', 'int_of_string', 'string_of_float', - 'float_of_string', 'fst', 'snd', 'stdin', 'stdout', 'stderr', 'print_char', 'print_string', - 'print_int', 'print_float', 'print_endline', 'print_newline', 'prerr_char', 'prerr_string', - 'prerr_int', 'prerr_float', 'prerr_endline', 'prerr_newline', 'read_line', 'read_int', - 'read_float', 'open_out', 'open_out_bin', 'open_out_gen', 'flush', 'flush_all', 'output_char', - 'output_string', 'output', 'output_byte', 'output_binary_int', 'output_value', 'seek_out', - 'pos_out', 'out_channel_length', 'close_out', 'close_out_noerr', 'set_binary_mode_out', 'open_in', - 'open_in_bin', 'open_in_gen', 'input_char', 'input_line', 'input', 'really_input', 'input_byte', - 'input_binary_int', 'input_value', 'seek_in', 'pos_in', 'in_channel_length', 'close_in', - 'close_in_noerr', 'set_binary_mode_in', 'incr', 'decr', 'string_of_format', 'format_of_string', - 'exit', 'at_exit' -})) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"', true) -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) - --- Comments. -local line_comment = lexer.to_eol('//') -local block_comment = lexer.range('/*', '*/', false, false, true) -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('=<>+-*/.,:;~!#%^&|?[](){}'))) - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/rebol.lua b/share/vis/lexers/rebol.lua @@ -1,103 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Rebol LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('rebol') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Comments. -local line_comment = lexer.to_eol(';') -local block_comment = 'comment' * P(' ')^-1 * lexer.range('{', '}') -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'abs', 'absolute', 'add', 'and~', 'at', 'back', 'change', 'clear', 'complement', 'copy', 'cp', - 'divide', 'fifth', 'find', 'first', 'fourth', 'head', 'insert', 'last', 'make', 'max', 'maximum', - 'min', 'minimum', 'multiply', 'negate', 'next', 'or~', 'pick', 'poke', 'power', 'random', - 'remainder', 'remove', 'second', 'select', 'skip', 'sort', 'subtract', 'tail', 'third', 'to', - 'trim', 'xor~', -- - 'alias', 'all', 'any', 'arccosine', 'arcsine', 'arctangent', 'bind', 'break', 'browse', 'call', - 'caret-to-offset', 'catch', 'checksum', 'close', 'comment', 'compose', 'compress', 'cosine', - 'debase', 'decompress', 'dehex', 'detab', 'dh-compute-key', 'dh-generate-key', 'dh-make-key', - 'difference', 'disarm', 'do', 'dsa-generate-key', 'dsa-make-key', 'dsa-make-signature', - 'dsa-verify-signature', 'either', 'else', 'enbase', 'entab', 'exclude', 'exit', 'exp', 'foreach', - 'form', 'free', 'get', 'get-modes', 'halt', 'hide', 'if', 'in', 'intersect', 'load', 'log-10', - 'log-2', 'log-e', 'loop', 'lowercase', 'maximum-of', 'minimum-of', 'mold', 'not', 'now', - 'offset-to-caret', 'open', 'parse', 'prin', 'print', 'protect', 'q', 'query', 'quit', 'read', - 'read-io', 'recycle', 'reduce', 'repeat', 'return', 'reverse', 'rsa-encrypt', 'rsa-generate-key', - 'rsa-make-key', 'save', 'secure', 'set', 'set-modes', 'show', 'sine', 'size-text', 'square-root', - 'tangent', 'textinfo', 'throw', 'to-hex', 'to-local-file', 'to-rebol-file', 'trace', 'try', - 'union', 'unique', 'unprotect', 'unset', 'until', 'update', 'uppercase', 'use', 'wait', 'while', - 'write', 'write-io', -- - 'basic-syntax-header', 'crlf', 'font-fixed', 'font-sans-serif', 'font-serif', 'list-words', - 'outstr', 'val', 'value', -- - 'about', 'alert', 'alter', 'append', 'array', 'ask', 'boot-prefs', 'build-tag', 'center-face', - 'change-dir', 'charset', 'choose', 'clean-path', 'clear-fields', 'confine', 'confirm', 'context', - 'cvs-date', 'cvs-version', 'decode-cgi', 'decode-url', 'deflag-face', 'delete', 'demo', 'desktop', - 'dirize', 'dispatch', 'do-boot', 'do-events', 'do-face', 'do-face-alt', 'does', 'dump-face', - 'dump-pane', 'echo', 'editor', 'emailer', 'emit', 'extract', 'find-by-type', 'find-key-face', - 'find-window', 'flag-face', 'flash', 'focus', 'for', 'forall', 'forever', 'forskip', 'func', - 'function', 'get-net-info', 'get-style', 'has', 'help', 'hide-popup', 'import-email', 'inform', - 'input', 'insert-event-func', 'join', 'launch', 'launch-thru', 'layout', 'license', 'list-dir', - 'load-image', 'load-prefs', 'load-thru', 'make-dir', 'make-face', 'net-error', 'open-events', - 'parse-email-addrs', 'parse-header', 'parse-header-date', 'parse-xml', 'path-thru', 'probe', - 'protect-system', 'read-net', 'read-thru', 'reboot', 'reform', 'rejoin', 'remold', - 'remove-event-func', 'rename', 'repend', 'replace', 'request', 'request-color', 'request-date', - 'request-download', 'request-file', 'request-list', 'request-pass', 'request-text', 'resend', - 'save-prefs', 'save-user', 'scroll-para', 'send', 'set-font', 'set-net', 'set-para', 'set-style', - 'set-user', 'set-user-name', 'show-popup', 'source', 'split-path', 'stylize', 'switch', - 'throw-on-error', 'to-binary', 'to-bitset', 'to-block', 'to-char', 'to-date', 'to-decimal', - 'to-email', 'to-event', 'to-file', 'to-get-word', 'to-hash', 'to-idate', 'to-image', 'to-integer', - 'to-issue', 'to-list', 'to-lit-path', 'to-lit-word', 'to-logic', 'to-money', 'to-none', 'to-pair', - 'to-paren', 'to-path', 'to-refinement', 'to-set-path', 'to-set-word', 'to-string', 'to-tag', - 'to-time', 'to-tuple', 'to-url', 'to-word', 'unfocus', 'uninstall', 'unview', 'upgrade', 'Usage', - 'vbug', 'view', 'view-install', 'view-prefs', 'what', 'what-dir', 'write-user', 'return', 'at', - 'space', 'pad', 'across', 'below', 'origin', 'guide', 'tabs', 'indent', 'style', 'styles', 'size', - 'sense', 'backcolor', 'do', 'none', -- - 'action?', 'any-block?', 'any-function?', 'any-string?', 'any-type?', 'any-word?', 'binary?', - 'bitset?', 'block?', 'char?', 'datatype?', 'date?', 'decimal?', 'email?', 'empty?', 'equal?', - 'error?', 'even?', 'event?', 'file?', 'function?', 'get-word?', 'greater-or-equal?', 'greater?', - 'hash?', 'head?', 'image?', 'index?', 'integer?', 'issue?', 'length?', 'lesser-or-equal?', - 'lesser?', 'library?', 'list?', 'lit-path?', 'lit-word?', 'logic?', 'money?', 'native?', - 'negative?', 'none?', 'not-equal?', 'number?', 'object?', 'odd?', 'op?', 'pair?', 'paren?', - 'path?', 'port?', 'positive?', 'refinement?', 'routine?', 'same?', 'series?', 'set-path?', - 'set-word?', 'strict-equal?', 'strict-not-equal?', 'string?', 'struct?', 'tag?', 'tail?', 'time?', - 'tuple?', 'unset?', 'url?', 'word?', 'zero?', 'connected?', 'crypt-strength?', 'exists-key?', - 'input?', 'script?', 'type?', 'value?', '?', '??', 'dir?', 'exists-thru?', 'exists?', - 'flag-face?', 'found?', 'in-window?', 'info?', 'inside?', 'link-app?', 'link?', 'modified?', - 'offset?', 'outside?', 'screen-offset?', 'size?', 'span?', 'view?', 'viewed?', 'win-offset?', - 'within?', 'action!', 'any-block!', 'any-function!', 'any-string!', 'any-type!', 'any-word!', - 'binary!', 'bitset!', 'block!', 'char!', 'datatype!', 'date!', 'decimal!', 'email!', 'error!', - 'event!', 'file!', 'function!', 'get-word!', 'hash!', 'image!', 'integer!', 'issue!', 'library!', - 'list!', 'lit-path!', 'lit-word!', 'logic!', 'money!', 'native!', 'none!', 'number!', 'object!', - 'op!', 'pair!', 'paren!', 'path!', 'port!', 'refinement!', 'routine!', 'series!', 'set-path!', - 'set-word!', 'string!', 'struct!', 'symbol!', 'tag!', 'time!', 'tuple!', 'unset!', 'url!', - 'word!', -- - 'true', 'false', 'self' -})) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, (lexer.alpha + '-') * (lexer.alnum + '-')^0)) - --- Strings. -local dq_str = lexer.range('"', true) -local br_str = lexer.range('{', '}', false, false, true) -local word_str = "'" * lexer.word -lex:add_rule('string', token(lexer.STRING, dq_str + br_str + word_str)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('=<>+/*:()[]'))) - --- Fold points. -lex:add_fold_point(lexer.COMMENT, '{', '}') -lex:add_fold_point(lexer.OPERATOR, '[', ']') - -lexer.property['scintillua.comment'] = ';' - -return lex diff --git a/share/vis/lexers/rest.lua b/share/vis/lexers/rest.lua @@ -1,215 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- reStructuredText LPeg lexer. - -local lexer = require('lexer') -local token, word_match, starts_line = lexer.token, lexer.word_match, lexer.starts_line -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('rest') - --- Literal block. -local block = '::' * (lexer.newline + -1) * function(input, index) - local rest = input:sub(index) - local level, quote = #rest:match('^([ \t]*)') - for pos, indent, line in rest:gmatch('()[ \t]*()([^\r\n]+)') do - local no_indent = (indent - pos < level and line ~= ' ' or level == 0) - local quoted = no_indent and line:find(quote or '^%s*%W') - if quoted and not quote then quote = '^%s*%' .. line:match('^%s*(%W)') end - if no_indent and not quoted and pos > 1 then return index + pos - 1 end - end - return #input + 1 -end -lex:add_rule('literal_block', token('literal_block', block)) -lex:add_style('literal_block', lexer.styles.embedded .. {eolfilled = true}) - --- Lists. -local option_word = lexer.alnum * (lexer.alnum + '-')^0 -local option = S('-/') * option_word * (' ' * option_word)^-1 + - ('--' * option_word * ('=' * option_word)^-1) -local option_list = option * (',' * lexer.space^1 * option)^-1 -local bullet_list = S('*+-') -- TODO: '•‣⁃', as lpeg does not support UTF-8 -local enum_list = P('(')^-1 * (lexer.digit^1 + S('ivxlcmIVXLCM')^1 + lexer.alnum + '#') * S('.)') -local field_list = ':' * (lexer.any - ':')^1 * P(':')^-1 -lex:add_rule('list', #(lexer.space^0 * (S('*+-:/') + enum_list)) * - starts_line(token(lexer.LIST, - lexer.space^0 * (option_list + bullet_list + enum_list + field_list) * lexer.space))) - -local any_indent = S(' \t')^0 -local word = lexer.alpha * (lexer.alnum + S('-.+'))^0 -local prefix = any_indent * '.. ' - --- Explicit markup blocks. -local footnote_label = '[' * (lexer.digit^1 + '#' * word^-1 + '*') * ']' -local footnote = token('footnote_block', prefix * footnote_label * lexer.space) -local citation_label = '[' * word * ']' -local citation = token('citation_block', prefix * citation_label * lexer.space) -local link = token('link_block', prefix * '_' * - (lexer.range('`') + (P('\\') * 1 + lexer.nonnewline - ':')^1) * ':' * lexer.space) -lex:add_rule('markup_block', #prefix * starts_line(footnote + citation + link)) -lex:add_style('footnote_block', lexer.styles.label) -lex:add_style('citation_block', lexer.styles.label) -lex:add_style('link_block', lexer.styles.label) - --- Sphinx code block. -local indented_block = function(input, index) - local rest = input:sub(index) - local level = #rest:match('^([ \t]*)') - for pos, indent, line in rest:gmatch('()[ \t]*()([^\r\n]+)') do - if indent - pos < level and line ~= ' ' or level == 0 and pos > 1 then return index + pos - 1 end - end - return #input + 1 -end -local code_block = - prefix * 'code-block::' * S(' \t')^1 * lexer.nonnewline^0 * (lexer.newline + -1) * indented_block -lex:add_rule('code_block', #prefix * token('code_block', starts_line(code_block))) -lex:add_style('code_block', lexer.styles.embedded .. {eolfilled = true}) - --- Directives. -local known_directive = token('directive', prefix * word_match{ - -- Admonitions - 'attention', 'caution', 'danger', 'error', 'hint', 'important', 'note', 'tip', 'warning', - 'admonition', - -- Images - 'image', 'figure', - -- Body elements - 'topic', 'sidebar', 'line-block', 'parsed-literal', 'code', 'math', 'rubric', 'epigraph', - 'highlights', 'pull-quote', 'compound', 'container', - -- Table - 'table', 'csv-table', 'list-table', - -- Document parts - 'contents', 'sectnum', 'section-autonumbering', 'header', 'footer', - -- References - 'target-notes', 'footnotes', 'citations', - -- HTML-specific - 'meta', - -- Directives for substitution definitions - 'replace', 'unicode', 'date', - -- Miscellaneous - 'include', 'raw', 'class', 'role', 'default-role', 'title', 'restructuredtext-test-directive' -} * '::' * lexer.space) -local sphinx_directive = token('sphinx_directive', prefix * word_match{ - -- The TOC tree. - 'toctree', - -- Paragraph-level markup. - 'note', 'warning', 'versionadded', 'versionchanged', 'deprecated', 'seealso', 'rubric', - 'centered', 'hlist', 'glossary', 'productionlist', - -- Showing code examples. - 'highlight', 'literalinclude', - -- Miscellaneous - 'sectionauthor', 'index', 'only', 'tabularcolumns' -} * '::' * lexer.space) -local unknown_directive = token('unknown_directive', prefix * word * '::' * lexer.space) -lex:add_rule('directive', - #prefix * starts_line(known_directive + sphinx_directive + unknown_directive)) -lex:add_style('directive', lexer.styles.keyword) -lex:add_style('sphinx_directive', lexer.styles.keyword .. {bold = true}) -lex:add_style('unknown_directive', lexer.styles.keyword .. {italics = true}) - --- Substitution definitions. -lex:add_rule('substitution', #prefix * token('substitution', starts_line(prefix * lexer.range('|') * - lexer.space^1 * word * '::' * lexer.space))) -lex:add_style('substitution', lexer.styles.variable) - --- Comments. -local line_comment = lexer.to_eol(prefix) -local bprefix = any_indent * '..' -local block_comment = bprefix * lexer.newline * indented_block -lex:add_rule('comment', #bprefix * token(lexer.COMMENT, starts_line(line_comment + block_comment))) - --- Section titles (2 or more characters). -local adornment_chars = lpeg.C(S('!"#$%&\'()*+,-./:;<=>?@[\\]^_`{|}~')) -local adornment = lpeg.C(adornment_chars^2 * any_indent) * (lexer.newline + -1) -local overline = lpeg.Cmt(starts_line(adornment), function(input, index, adm, c) - if not adm:find('^%' .. c .. '+%s*$') then return nil end - local rest = input:sub(index) - local lines = 1 - for line, e in rest:gmatch('([^\r\n]+)()') do - if lines > 1 and line:match('^(%' .. c .. '+)%s*$') == adm then return index + e - 1 end - if lines > 3 or #line > #adm then return nil end - lines = lines + 1 - end - return #input + 1 -end) -local underline = lpeg.Cmt(starts_line(adornment), function(_, index, adm, c) - local pos = adm:match('^%' .. c .. '+%s*()$') - return pos and index - #adm + pos - 1 or nil -end) --- Token needs to be a predefined one in order for folder to work. -lex:add_rule('title', token(lexer.HEADING, overline + underline)) - --- Line block. -lex:add_rule('line_block_char', token(lexer.OPERATOR, starts_line(any_indent * '|'))) - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, S(' \t')^1 + lexer.newline^1)) - --- Inline markup. -local strong = token(lexer.BOLD, lexer.range('**')) -local em = token(lexer.ITALIC, lexer.range('*')) -local inline_literal = token('inline_literal', lexer.range('``')) -local postfix_link = (word + lexer.range('`')) * '_' * P('_')^-1 -local prefix_link = '_' * lexer.range('`') -local link_ref = token(lexer.LINK, postfix_link + prefix_link) -local role = token('role', ':' * word * ':' * (word * ':')^-1) -local interpreted = role^-1 * token('interpreted', lexer.range('`')) * role^-1 -local footnote_ref = token(lexer.REFERENCE, footnote_label * '_') -local citation_ref = token(lexer.REFERENCE, citation_label * '_') -local substitution_ref = token('substitution', lexer.range('|', true) * ('_' * P('_')^-1)^-1) -local link = token(lexer.LINK, - lexer.alpha * (lexer.alnum + S('-.'))^1 * ':' * (lexer.alnum + S('/.+-%@'))^1) -lex:add_rule('inline_markup', - (strong + em + inline_literal + link_ref + interpreted + footnote_ref + citation_ref + - substitution_ref + link) * -lexer.alnum) -lex:add_style('inline_literal', lexer.styles.embedded) -lex:add_style('role', lexer.styles.class) -lex:add_style('interpreted', lexer.styles.string) - --- Other. -lex:add_rule('non_space', token(lexer.DEFAULT, lexer.alnum * (lexer.any - lexer.space)^0)) -lex:add_rule('escape', token(lexer.DEFAULT, '\\' * lexer.any)) - --- Section-based folding. -local sphinx_levels = { - ['#'] = 0, ['*'] = 1, ['='] = 2, ['-'] = 3, ['^'] = 4, ['"'] = 5 -} - -function lex:fold(text, start_pos, start_line, start_level) - local folds, line_starts = {}, {} - for pos in (text .. '\n'):gmatch('().-\r?\n') do line_starts[#line_starts + 1] = pos end - local style_at, CONSTANT, level = lexer.style_at, lexer.CONSTANT, start_level - local sphinx = lexer.property_int['fold.scintillua.rest.by.sphinx.convention'] > 0 - local FOLD_BASE = lexer.FOLD_BASE - local FOLD_HEADER, FOLD_BLANK = lexer.FOLD_HEADER, lexer.FOLD_BLANK - for i = 1, #line_starts do - local pos, next_pos = line_starts[i], line_starts[i + 1] - local c = text:sub(pos, pos) - local line_num = start_line + i - 1 - folds[line_num] = level - if style_at[start_pos + pos - 1] == CONSTANT and c:find('^[^%w%s]') then - local sphinx_level = FOLD_BASE + (sphinx_levels[c] or #sphinx_levels) - level = not sphinx and level - 1 or sphinx_level - if level < FOLD_BASE then level = FOLD_BASE end - folds[line_num - 1], folds[line_num] = level, level + FOLD_HEADER - level = (not sphinx and level or sphinx_level) + 1 - elseif c == '\r' or c == '\n' then - folds[line_num] = level + FOLD_BLANK - end - end - return folds -end - --- lexer.property['fold.by.sphinx.convention'] = '0' - ---[[ Embedded languages. -local bash = lexer.load('bash') -local bash_indent_level -local start_rule = - #(prefix * 'code-block' * '::' * lexer.space^1 * 'bash' * (lexer.newline + -1)) * - sphinx_directive * token('bash_begin', P(function(input, index) - bash_indent_level = #input:match('^([ \t]*)', index) - return index - end))]] - -lexer.property['scintillua.comment'] = '.. ' - -return lex diff --git a/share/vis/lexers/rexx.lua b/share/vis/lexers/rexx.lua @@ -1,78 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Rexx LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('rexx') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match({ - 'address', 'arg', 'by', 'call', 'class', 'do', 'drop', 'else', 'end', 'exit', 'expose', 'forever', - 'forward', 'guard', 'if', 'interpret', 'iterate', 'leave', 'method', 'nop', 'numeric', - 'otherwise', 'parse', 'procedure', 'pull', 'push', 'queue', 'raise', 'reply', 'requires', - 'return', 'routine', 'result', 'rc', 'say', 'select', 'self', 'sigl', 'signal', 'super', 'then', - 'to', 'trace', 'use', 'when', 'while', 'until' -}, true))) - --- Functions. -lex:add_rule('function', token(lexer.FUNCTION, word_match({ - 'abbrev', 'abs', 'address', 'arg', 'beep', 'bitand', 'bitor', 'bitxor', 'b2x', 'center', - 'changestr', 'charin', 'charout', 'chars', 'compare', 'consition', 'copies', 'countstr', 'c2d', - 'c2x', 'datatype', 'date', 'delstr', 'delword', 'digits', 'directory', 'd2c', 'd2x', 'errortext', - 'filespec', 'form', 'format', 'fuzz', 'insert', 'lastpos', 'left', 'length', 'linein', 'lineout', - 'lines', 'max', 'min', 'overlay', 'pos', 'queued', 'random', 'reverse', 'right', 'sign', - 'sourceline', 'space', 'stream', 'strip', 'substr', 'subword', 'symbol', 'time', 'trace', - 'translate', 'trunc', 'value', 'var', 'verify', 'word', 'wordindex', 'wordlength', 'wordpos', - 'words', 'xrange', 'x2b', 'x2c', 'x2d', -- - 'rxfuncadd', 'rxfuncdrop', 'rxfuncquery', 'rxmessagebox', 'rxwinexec', 'sysaddrexxmacro', - 'sysbootdrive', 'sysclearrexxmacrospace', 'syscloseeventsem', 'sysclosemutexsem', 'syscls', - 'syscreateeventsem', 'syscreatemutexsem', 'syscurpos', 'syscurstate', 'sysdriveinfo', - 'sysdrivemap', 'sysdropfuncs', 'sysdroprexxmacro', 'sysdumpvariables', 'sysfiledelete', - 'sysfilesearch', 'sysfilesystemtype', 'sysfiletree', 'sysfromunicode', 'systounicode', - 'sysgeterrortext', 'sysgetfiledatetime', 'sysgetkey', 'sysini', 'sysloadfuncs', - 'sysloadrexxmacrospace', 'sysmkdir', 'sysopeneventsem', 'sysopenmutexsem', 'sysposteventsem', - 'syspulseeventsem', 'sysqueryprocess', 'sysqueryrexxmacro', 'sysreleasemutexsem', - 'sysreorderrexxmacro', 'sysrequestmutexsem', 'sysreseteventsem', 'sysrmdir', - 'syssaverexxmacrospace', 'syssearchpath', 'syssetfiledatetime', 'syssetpriority', 'syssleep', - 'sysstemcopy', 'sysstemdelete', 'syssteminsert', 'sysstemsort', 'sysswitchsession', - 'syssystemdirectory', 'systempfilename', 'systextscreenread', 'systextscreensize', - 'sysutilversion', 'sysversion', 'sysvolumelabel', 'syswaiteventsem', 'syswaitnamedpipe', - 'syswindecryptfile', 'syswinencryptfile', 'syswinver' -}, true))) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.alpha * (lexer.alnum + S('@#$\\.!?_'))^0)) - --- Strings. -local sq_str = lexer.range("'", true, false) -local dq_str = lexer.range('"', true, false) -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) - --- Comments. -local line_comment = lexer.to_eol('--', true) -local block_comment = lexer.range('/*', '*/', false, false, true) -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Preprocessor. -lex:add_rule('preprocessor', token(lexer.PREPROCESSOR, lexer.to_eol(lexer.starts_line('#')))) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('=!<>+-/\\*%&|^~.,:;(){}'))) - --- Fold points -lex:add_fold_point(lexer.KEYWORD, 'do', 'end') -lex:add_fold_point(lexer.KEYWORD, 'select', 'return') -lex:add_fold_point(lexer.COMMENT, '/*', '*/') --- lex:add_fold_point(lexer.OPERATOR, ':', ?) - -lexer.property['scintillua.comment'] = '--' - -return lex diff --git a/share/vis/lexers/rhtml.lua b/share/vis/lexers/rhtml.lua @@ -1,20 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- RHTML LPeg lexer. - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(..., {inherit = lexer.load('html')}) - --- Embedded Ruby. -local ruby = lexer.load('rails') -local ruby_start_rule = lex:tag(lexer.PREPROCESSOR, '<%' * P('=')^-1) -local ruby_end_rule = lex:tag(lexer.PREPROCESSOR, '%>') -lex:embed(ruby, ruby_start_rule, ruby_end_rule) - --- Fold points. -lex:add_fold_point(lexer.PREPROCESSOR, '<%', '%>') - -lexer.property['scintillua.comment'] = '<!--|-->' - -return lex diff --git a/share/vis/lexers/routeros.lua b/share/vis/lexers/routeros.lua @@ -1,60 +0,0 @@ --- Copyright 2020-2024 Christian Hesse. See LICENSE. --- Mikrotik RouterOS script LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('routeros') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - -- Control. - ':delay', ':do', 'on-error', 'while', ':error', ':foreach', 'in', 'do', ':for', 'from', 'to', - 'step', ':if', 'do', 'else', ':return', ':while', 'do', - -- Menu specific commands. - 'add', 'disable', 'edit', 'enable', 'export', 'find', 'get', 'info', 'monitor', 'print', 'append', - 'as-value', 'brief', 'count-only', 'detail', 'file', 'follow', 'follow-only', 'from', 'interval', - 'terse', 'value-list', 'where', 'without-paging', 'remove', 'set', - -- Output & string handling. - ':beep', ':blink', ':environment', ':execute', ':find', ':len', ':log', 'alert', 'critical', - 'debug', 'emergency', 'error', 'info', 'notice', 'warning', ':parse', ':pick', ':put', - ':terminal', ':time', ':typeof', - -- Variable declaration. - ':global', ':local', ':set', - -- Variable casting. - ':toarray', ':tobool', ':toid', ':toip', ':toip6', ':tonum', ':tostr', ':totime', - -- Boolean values and logical operators. - 'false', 'no', 'true', 'yes', 'and', 'in', 'or', - -- Networking. - ':ping', ':resolve' -})) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#'))) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Strings. -lex:add_rule('string', token(lexer.STRING, lexer.range('"'))) - --- Variables. -lex:add_rule('variable', token(lexer.VARIABLE, '$' * - (S('!#?*@$') + lexer.digit^1 + lexer.word + lexer.range('{', '}', true, false, true)))) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('=!%<>+-/*&|~.,;()[]{}'))) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '{', '}') - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/rpmspec.lua b/share/vis/lexers/rpmspec.lua @@ -1,33 +0,0 @@ --- Copyright 2022-2024 Matej Cepl mcepl.att.cepl.eu. See LICENSE. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('rpmspec') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#'))) - --- Strings. -lex:add_rule('string', token(lexer.STRING, lexer.range('"'))) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'Prereq', 'Summary', 'Name', 'Version', 'Packager', 'Requires', 'Recommends', 'Suggests', - 'Supplements', 'Enhances', 'Icon', 'URL', 'Source', 'Patch', 'Prefix', 'Packager', 'Group', - 'License', 'Release', 'BuildRoot', 'Distribution', 'Vendor', 'Provides', 'ExclusiveArch', - 'ExcludeArch', 'ExclusiveOS', 'Obsoletes', 'BuildArch', 'BuildArchitectures', 'BuildRequires', - 'BuildConflicts', 'BuildPreReq', 'Conflicts', 'AutoRequires', 'AutoReq', 'AutoReqProv', - 'AutoProv', 'Epoch' -})) - --- Macros -lex:add_rule('command', token(lexer.FUNCTION, '%' * lexer.word)) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/rstats.lua b/share/vis/lexers/rstats.lua @@ -1,52 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- R LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('rstats') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'break', 'else', 'for', 'if', 'in', 'next', 'repeat', 'return', 'switch', 'try', 'while', -- - 'Inf', 'NA', 'NaN', 'NULL', 'FALSE', 'TRUE', 'F', 'T', - -- Frequently used operators. - '|>', '%%', '%*%', '%/%', '%in%', '%o%', '%x%' -})) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match{ - 'array', 'character', 'closure', 'complex', 'data.frame', 'double', 'environment', 'expression', - 'externalptr', 'factor', 'function', 'integer', 'list', 'logical', 'matrix', 'numeric', - 'pairlist', 'promise', 'raw', 'symbol', 'vector' -})) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"', true) -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#'))) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, (lexer.number * P('i')^-1) * P('L')^-1)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('<->+*/^=.,:;|$()[]{}'))) - --- Folding -lex:add_fold_point(lexer.OPERATOR, '(', ')') -lex:add_fold_point(lexer.OPERATOR, '[', ']') -lex:add_fold_point(lexer.OPERATOR, '{', '}') - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/ruby.lua b/share/vis/lexers/ruby.lua @@ -1,127 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Ruby LPeg lexer. - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(...) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD))) - --- Functions. -local builtin_func = lex:tag(lexer.FUNCTION_BUILTIN, lex:word_match(lexer.FUNCTION_BUILTIN)) -lex:add_rule('function', -lpeg.B('.') * builtin_func * -S('.:|')) - --- Identifiers. -local word_char = lexer.alnum + S('_!?') -local word = (lexer.alpha + '_') * word_char^0 -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, word)) - --- Comments. -local line_comment = lexer.to_eol('#', true) -local block_comment = lexer.range(lexer.starts_line('=begin'), lexer.starts_line('=end')) -lex:add_rule('comment', lex:tag(lexer.COMMENT, block_comment + line_comment)) - --- Strings. -local delimiter_matches = {['('] = ')', ['['] = ']', ['{'] = '}'} -local literal_delimited = P(function(input, index) - local delimiter = input:sub(index, index) - if not delimiter:find('[%w\r\n\f\t ]') then -- only non alpha-numerics - local match_pos, patt - if delimiter_matches[delimiter] then - -- Handle nested delimiter/matches in strings. - local s, e = delimiter, delimiter_matches[delimiter] - patt = lexer.range(s, e, false, true, true) - else - patt = lexer.range(delimiter) - end - match_pos = lpeg.match(patt, input, index) - return match_pos or #input + 1 - end -end) - -local cmd_str = lexer.range('`') -local lit_cmd = '%x' * literal_delimited -local lit_array = '%w' * literal_delimited -local sq_str = lexer.range("'") -local dq_str = lexer.range('"') -local lit_str = '%' * S('qQ')^-1 * literal_delimited -local heredoc = '<<' * P(function(input, index) - local s, e, indented, _, delimiter = input:find('([%-~]?)(["`]?)([%a_][%w_]*)%2[\n\r\f;]+', index) - if s == index and delimiter then - local end_heredoc = (#indented > 0 and '[\n\r\f]+ *' or '[\n\r\f]+') - s, e = input:find(end_heredoc .. delimiter, e) - return e and e + 1 or #input + 1 - end -end) -local string = lex:tag(lexer.STRING, (sq_str + dq_str + lit_str + heredoc + cmd_str + lit_cmd + - lit_array) * S('f')^-1) --- TODO: regex_str fails with `obj.method /patt/` syntax. -local regex_str = lexer.after_set('!%^&*([{-=+|:;,?<>~', lexer.range('/', true) * S('iomx')^0) -local lit_regex = '%r' * literal_delimited * S('iomx')^0 -local regex = lex:tag(lexer.REGEX, regex_str + lit_regex) -lex:add_rule('string', string + regex) - --- Numbers. -local numeric_literal = '?' * (lexer.any - lexer.space) * -word_char -- TODO: meta, control, etc. -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number_('_') * S('ri')^-1 + numeric_literal)) - --- Variables. -local global_var = '$' * - (word + S('!@L+`\'=~/\\,.;<>_*"$?:') + lexer.digit + '-' * S('0FadiIKlpvw')) -local class_var = '@@' * word -local inst_var = '@' * word -lex:add_rule('variable', lex:tag(lexer.VARIABLE, global_var + class_var + inst_var)) - --- Symbols. -lex:add_rule('symbol', lex:tag(lexer.STRING .. '.symbol', ':' * P(function(input, index) - if input:sub(index - 2, index - 2) ~= ':' then return true end -end) * (word_char^1 + sq_str + dq_str))) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('!%^&*()[]{}-=+/|:;.,?<>~'))) - --- Fold points. -local function disambiguate(text, pos, line, s) - return line:sub(1, s - 1):match('^%s*$') and not text:sub(1, pos - 1):match('\\[ \t]*\r?\n$') and - 1 or 0 -end -lex:add_fold_point(lexer.KEYWORD, 'begin', 'end') -lex:add_fold_point(lexer.KEYWORD, 'class', 'end') -lex:add_fold_point(lexer.KEYWORD, 'def', 'end') -lex:add_fold_point(lexer.KEYWORD, 'do', 'end') -lex:add_fold_point(lexer.KEYWORD, 'for', 'end') -lex:add_fold_point(lexer.KEYWORD, 'module', 'end') -lex:add_fold_point(lexer.KEYWORD, 'case', 'end') -lex:add_fold_point(lexer.KEYWORD, 'if', disambiguate) -lex:add_fold_point(lexer.KEYWORD, 'while', disambiguate) -lex:add_fold_point(lexer.KEYWORD, 'unless', disambiguate) -lex:add_fold_point(lexer.KEYWORD, 'until', disambiguate) -lex:add_fold_point(lexer.OPERATOR, '(', ')') -lex:add_fold_point(lexer.OPERATOR, '[', ']') -lex:add_fold_point(lexer.OPERATOR, '{', '}') -lex:add_fold_point(lexer.COMMENT, '=begin', '=end') - --- Word lists. -lex:set_word_list(lexer.KEYWORD, { - 'BEGIN', 'END', 'alias', 'and', 'begin', 'break', 'case', 'class', 'def', 'defined?', 'do', - 'else', 'elsif', 'end', 'ensure', 'false', 'for', 'if', 'in', 'module', 'next', 'nil', 'not', - 'or', 'redo', 'rescue', 'retry', 'return', 'self', 'super', 'then', 'true', 'undef', 'unless', - 'until', 'when', 'while', 'yield', '__FILE__', '__LINE__' -}) - -lex:set_word_list(lexer.FUNCTION_BUILTIN, { - 'at_exit', 'autoload', 'binding', 'caller', 'catch', 'chop', 'chop!', 'chomp', 'chomp!', 'eval', - 'exec', 'exit', 'exit!', 'extend', 'fail', 'fork', 'format', 'gets', 'global_variables', 'gsub', - 'gsub!', 'include', 'iterator?', 'lambda', 'load', 'local_variables', 'loop', 'module_function', - 'open', 'p', 'print', 'printf', 'proc', 'putc', 'puts', 'raise', 'rand', 'readline', 'readlines', - 'require', 'require_relative', 'select', 'sleep', 'split', 'sprintf', 'srand', 'sub', 'sub!', - 'syscall', 'system', 'test', 'trace_var', 'trap', 'untrace_var' -}) - -lexer.property['scintillua.comment'] = '#' -lexer.property['scintillua.word.chars'] = - 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_?!' - -return lex diff --git a/share/vis/lexers/rust.lua b/share/vis/lexers/rust.lua @@ -1,90 +0,0 @@ --- Copyright 2015-2024 Alejandro Baez (https://keybase.io/baez). See LICENSE. --- Rust LPeg lexer. - -local lexer = lexer -local P, S = lpeg.P, lpeg.S -local C, Cmt = lpeg.C, lpeg.Cmt - -local lex = lexer.new(...) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD))) - --- Library types. -lex:add_rule('library', lex:tag(lexer.TYPE, lexer.upper * (lexer.lower + lexer.dec_num)^1)) - --- Types. -lex:add_rule('type', lex:tag(lexer.TYPE, lex:word_match(lexer.TYPE))) - --- Lifetime annotation. -lex:add_rule('lifetime', lex:tag(lexer.OPERATOR, S('<&') * P("'"))) - --- Strings. -local sq_str = P('b')^-1 * lexer.range("'", true) -local dq_str = P('b')^-1 * lexer.range('"') -local raw_str = Cmt(P('b')^-1 * P('r') * C(P('#')^0) * '"', function(input, index, hashes) - local _, e = input:find('"' .. hashes, index, true) - return (e or #input) + 1 -end) -lex:add_rule('string', lex:tag(lexer.STRING, sq_str + dq_str + raw_str)) - --- Functions. -local builtin_macros = lex:tag(lexer.FUNCTION_BUILTIN, lex:word_match(lexer.FUNCTION_BUILTIN) * '!') -local macros = lex:tag(lexer.FUNCTION, lexer.word * '!') -local func = lex:tag(lexer.FUNCTION, lexer.word) -lex:add_rule('function', (builtin_macros + macros + func) * #(lexer.space^0 * '(')) - --- Identifiers. -local identifier = P('r#')^-1 * lexer.word -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, identifier)) - --- Comments. -local line_comment = lexer.to_eol('//', true) -local block_comment = lexer.range('/*', '*/', false, false, true) -lex:add_rule('comment', lex:tag(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number_('_'))) - --- Attributes. -lex:add_rule('preprocessor', lex:tag(lexer.PREPROCESSOR, '#' * lexer.range('[', ']', true))) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('+-/*%<>!=`^~@&|?#~:;,.()[]{}'))) - --- Fold points. -lex:add_fold_point(lexer.COMMENT, '/*', '*/') -lex:add_fold_point(lexer.OPERATOR, '(', ')') -lex:add_fold_point(lexer.OPERATOR, '{', '}') - --- https://doc.rust-lang.org/std/#keywords -lex:set_word_list(lexer.KEYWORD, { - 'SelfTy', 'as', 'async', 'await', 'break', 'const', 'continue', 'crate', 'dyn', 'else', 'enum', - 'extern', 'false', 'fn', 'for', 'if', 'impl', 'in', 'let', 'loop', 'match', 'mod', 'move', 'mut', - 'pub', 'ref', 'return', 'self', 'static', 'struct', 'super', 'trait', 'true', 'type', 'union', - 'unsafe', 'use', 'where', 'while' -}) - --- https://doc.rust-lang.org/std/#primitives -lex:set_word_list(lexer.TYPE, { - 'never', 'array', 'bool', 'char', 'f32', 'f64', 'fn', 'i8', 'i16', 'i32', 'i64', 'i128', 'isize', - 'pointer', 'reference', 'slice', 'str', 'tuple', 'u8', 'u16', 'u32', 'u64', 'u128', 'unit', - 'usize' -}) - -lex:set_word_list(lexer.FUNCTION_BUILTIN, { - 'assert', 'assert_eq', 'assert_ne', 'cfg', 'column', 'compile_error', 'concat', 'dbg', - 'debug_assert', 'debug_assert_eq', 'debug_assert_ne', 'env', 'eprint', 'eprintln', 'file', - 'format', 'format_args', 'include', 'include_bytes', 'include_str', 'line', 'matches', - 'module_path', 'option_env', 'panic', 'print', 'println', 'stringify', 'thread_local', 'todo', - 'unimplemented', 'unreachable', 'vec', 'write', 'writeln', - -- Experimental - 'concat_bytes', 'concat_idents', 'const_format_args', 'format_args_nl', 'log_syntax', - 'trace_macros', - -- Deprecated - 'try' -}) - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/sass.lua b/share/vis/lexers/sass.lua @@ -1,21 +0,0 @@ --- Copyright 2006-2024 Robert Gieseke. See LICENSE. --- Sass CSS preprocessor LPeg lexer. --- http://sass-lang.com - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(..., {inherit = lexer.load('css')}) - --- Line comments. -lex:add_rule('line_comment', lex:tag(lexer.COMMENT, lexer.to_eol('//'))) - --- Variables. -lex:add_rule('variable', lex:tag(lexer.VARIABLE, '$' * (lexer.alnum + S('_-'))^1)) - --- Mixins. -lex:add_rule('mixin', lex:tag(lexer.PREPROCESSOR, '@' * lexer.word)) - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/scala.lua b/share/vis/lexers/scala.lua @@ -1,61 +0,0 @@ --- Copyright 2006-2024 JMS. See LICENSE. --- Scala LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('scala') - --- Whitespace. -local ws = token(lexer.WHITESPACE, lexer.space^1) -lex:add_rule('whitespace', ws) - --- Classes. -lex:add_rule('class', token(lexer.KEYWORD, 'class') * ws^1 * token(lexer.CLASS, lexer.word)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'abstract', 'case', 'catch', 'class', 'def', 'do', 'else', 'extends', 'false', 'final', 'finally', - 'for', 'forSome', 'if', 'implicit', 'import', 'lazy', 'match', 'new', 'null', 'object', - 'override', 'package', 'private', 'protected', 'return', 'sealed', 'super', 'this', 'throw', - 'trait', 'try', 'true', 'type', 'val', 'var', 'while', 'with', 'yield' -})) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match{ - 'Array', 'Boolean', 'Buffer', 'Byte', 'Char', 'Collection', 'Double', 'Float', 'Int', 'Iterator', - 'LinkedList', 'List', 'Long', 'Map', 'None', 'Option', 'Set', 'Short', 'SortedMap', 'SortedSet', - 'String', 'TreeMap', 'TreeSet' -})) - --- Functions. -lex:add_rule('function', token(lexer.FUNCTION, lexer.word) * #P('(')) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local symbol = "'" * lexer.word -local dq_str = lexer.range('"', true) -local tq_str = lexer.range('"""') -lex:add_rule('string', token(lexer.STRING, tq_str + symbol + dq_str)) - --- Comments. -local line_comment = lexer.to_eol('//', true) -local block_comment = lexer.range('/*', '*/') -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number * S('LlFfDd')^-1)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;.()[]{}'))) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '{', '}') -lex:add_fold_point(lexer.COMMENT, '/*', '*/') - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/scheme.lua b/share/vis/lexers/scheme.lua @@ -1,175 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Scheme LPeg lexer. --- Contributions by Murray Calavera. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('scheme') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'and', 'or', 'not', 'else', - -- - 'library', 'define-library', 'export', 'include-library-declarations', 'cond-expand', 'import', - 'rename', 'only', 'except', 'prefix', 'include', 'include-ci', - -- - 'begin', 'case', 'case-lambda', 'cond', 'define', 'define-record-type', 'define-syntax', - 'define-values', 'delay', 'delay-force', 'do', 'if', 'guard', 'lambda', 'let', 'let*', - 'let*-values', 'let-syntax', 'let-values', 'letrec', 'letrec*', 'letrec-syntax', 'parameterize', - 'quasiquote', 'quote', 'set!', 'unless', 'unquote', 'unquote-splicing', 'when', - -- - 'define-macro', 'fluid-let' -})) - --- Functions. -lex:add_rule('function', token(lexer.FUNCTION, word_match{ - '*', '+', '-', '/', '<', '<=', '=', '=>', '>', '>=', 'abs', 'append', 'apply', 'assoc', 'assq', - 'assv', 'binary-port?', 'boolean=?', 'boolean?', 'bytevector', 'bytevector-append', - 'bytevector-copy', 'bytevector-copy!', 'bytevector-length', 'bytevector-u8-ref', - 'bytevector-u8-set!', 'bytevector?', 'caar', 'cadr', 'call-with-current-continuation', - 'call-with-port', 'call-with-values', 'call/cc', 'car', 'cdar', 'cddr', 'cdr', 'ceiling', - 'char->integer', 'char-ready?', 'char<=?', 'char<?', 'char=?', 'char>=?', 'char>?', 'char?', - 'close-input-port', 'close-output-port', 'close-port', 'complex?', 'cons', 'current-error-port', - 'current-input-port', 'current-output-port', 'denominator', 'dynamic-wind', 'eof-object', - 'eof-object?', 'eq?', 'equal?', 'eqv?', 'error', 'error-object-irritants', 'error-object-message', - 'error-object?', 'even?', 'exact', 'exact-integer-sqrt', 'exact-integer?', 'exact?', 'expt', - 'features', 'file-error?', 'floor', 'floor-quotient', 'floor-remainder', 'floor/', - 'flush-output-port', 'for-each', 'gcd', 'get-output-bytevector', 'get-output-string', 'inexact', - 'inexact?', 'input-port-open?', 'input-port?', 'integer->char', 'integer?', 'lcm', 'length', - 'list', 'list->string', 'list->vector', 'list-copy', 'list-ref', 'list-set!', 'list-tail', - 'list?', 'make-bytevector', 'make-list', 'make-parameter', 'make-string', 'make-vector', 'map', - 'max', 'member', 'memq', 'memv', 'min', 'modulo', 'negative?', 'newline', 'null?', - 'number->string', 'number?', 'numerator', 'odd?', 'open-input-bytevector', 'open-input-string', - 'open-output-bytevector', 'open-output-string', 'output-port-open?', 'output-port?', 'pair?', - 'peek-char', 'peek-u8', 'port?', 'positive?', 'procedure?', 'quotient', 'raise', - 'raise-continuable', 'rational?', 'rationalize', 'read-bytevector', 'read-bytevector!', - 'read-char', 'read-error?', 'read-line', 'read-string', 'read-u8', 'real?', 'remainder', - 'reverse', 'round', 'set-car!', 'set-cdr!', 'square', 'string', 'string->list', 'string->number', - 'string->symbol', 'string->utf8', 'string->vector', 'string-append', 'string-copy', - 'string-copy!', 'string-fill!', 'string-for-each', 'string-length', 'string-map', 'string-ref', - 'string-set!', 'string<=?', 'string<?', 'string=?', 'string>=?', 'string>?', 'string?', - 'substring', 'symbol->string', 'symbol=?', 'symbol?', 'syntax-error', 'syntax-rules', - 'textual-port?', 'truncate', 'truncate-quotient', 'truncate-remainder', 'truncate/', 'u8-ready?', - 'utf8->string', 'values', 'vector', 'vector->list', 'vector->string', 'vector-append', - 'vector-copy', 'vector-copy!', 'vector-fill!', 'vector-for-each', 'vector-length', 'vector-map', - 'vector-ref', 'vector-set!', 'vector?', 'with-exception-handler', 'write-bytevector', - 'write-char', 'write-string', 'write-u8', 'zero?', - -- - 'char-alphabetic?', 'char-ci<=?', 'char-ci<?', 'char-ci=?', 'char-ci>=?', 'char-ci>?', - 'char-downcase', 'char-foldcase', 'char-lower-case?', 'char-numeric?', 'char-upcase', - 'char-upper-case?', 'char-whitespace?', 'digit-value', 'string-ci<=?', 'string-ci<?', - 'string-ci=?', 'string-ci>=?', 'string-ci>?', 'string-downcase', 'string-foldcase', - 'string-upcase', - -- - 'angle', 'imag-part', 'magnitude', 'make-polar', 'make-rectangular', 'real-part', - -- - 'caaaar', 'caaadr', 'caaar', 'caadar', 'caaddr', 'caadr', 'cadaar', 'cadadr', 'cadar', 'caddar', - 'cadddr', 'caddr', 'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr', 'cdadr', 'cddaar', 'cddadr', - 'cddar', 'cdddar', 'cddddr', 'cdddr', - -- - 'environment', 'eval', - -- - 'call-with-input-file', 'call-with-output-file', 'delete-file', 'file-exists?', - 'open-binary-input-file', 'open-binary-output-file', 'open-input-file', 'open-output-file', - 'with-input-from-file', 'with-output-to-file', - -- - 'acos', 'asin', 'atan', 'cos', 'exp', 'finite?', 'infinite?', 'log', 'nan?', 'sin', 'sqrt', 'tan', - -- - 'force', 'make-promise', 'promise?', - -- - 'load', - -- - 'command-line', 'emergency-exit', 'exit', 'get-environment-variable', 'get-environment-variables', - -- - 'read', - -- - 'interaction-environment', - -- - 'current-jiffy', 'current-second', 'jiffies-per-second', - -- - 'display', 'write', 'write-shared', 'write-simple', - -- - 'syntax-case', 'er-macro-transformer', 'sc-macro-transformer', 'rsc-macro-transformer' -})) - --- Identifiers and symbols. -local explicit_sign = S('+-') -local initial = lexer.alpha + S('!$%&*/:<=>?@^_~') -local subsequent = initial + lexer.digit + explicit_sign + '.' -local sign_subsequent = initial + explicit_sign -local dot_subsequent = sign_subsequent + '.' --- LuaFormatter off -local peculiar_identifier = - explicit_sign * '.' * dot_subsequent * subsequent^0 + - explicit_sign * sign_subsequent * subsequent^0 + - '.' * dot_subsequent * subsequent^0 + - explicit_sign --- LuaFormatter on -local ident = lexer.range('|') + initial * subsequent^0 + peculiar_identifier -lex:add_rule('identifier', token(lexer.IDENTIFIER, ident)) -lex:add_rule('symbol', token(lexer.CLASS, "'" * ident)) - --- Strings. -local character = '#\\' * - (word_match('alarm backspace delete escape newline null return space tab') + 'x' * lexer.xdigit^1 + - lexer.any) -local dq_str = lexer.range('"') -lex:add_rule('string', token(lexer.STRING, character + dq_str)) - --- Constants. -lex:add_rule('constant', token(lexer.CONSTANT, word_match('#t #f #true #false'))) - --- Directives. -lex:add_rule('directive', token(lexer.PREPROCESSOR, P('#!fold-case') + '#!no-fold-case')) - --- Comments. -local line_comment = lexer.to_eol(';') -local block_comment = lexer.range('#|', '|#', false, false, true) -local datum_comment = '#;' * lexer.space^0 * lexer.range('(', ')', false, true, true) * - (lexer.any - lexer.space)^0 -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment + datum_comment)) - --- Numbers. -local radixes = {[2] = P('#b'), [8] = P('#o'), [10] = P('#d')^-1, [16] = P('#x')} -local digits = {[2] = S('01'), [8] = lpeg.R('07'), [10] = lexer.digit, [16] = lexer.xdigit} -local function num(r) - local exactness = (P('#i') + '#e')^-1 - local radix, digit = radixes[r], digits[r] - local prefix = radix * exactness + exactness * radix - local suffix = ('e' * S('+-')^-1 * lexer.digit^1)^-1 - local infnan = S('+-') * word_match[[inf nan]] * '.0' - -- LuaFormatter off - local decimal = lexer.digit^1 * suffix + - '.' * lexer.digit^1 * suffix + - lexer.digit^1 * '.' * lexer.digit^0 * suffix - local ureal = digit^1 * '/' * digit^1 + - (r == 10 and decimal or P(false)) + - digit^1 - local real = S('+-')^-1 * ureal + infnan - local i = P('i') - local complex = real * '@' * real + - real * S('+-') * ureal^-1 * i + - real * infnan * i + - infnan * i + - real + - S('+-') * ureal^-1 * i - -- LuaFormatter on - return prefix * complex -end -lex:add_rule('number', token(lexer.NUMBER, num(2) + num(8) + num(10) + num(16))) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, P('#u8') + ',@' + S(".`'#(),"))) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '(', ')') -lex:add_fold_point(lexer.COMMENT, '#|', '|#') - -lexer.property['scintillua.comment'] = ';' - -return lex diff --git a/share/vis/lexers/smalltalk.lua b/share/vis/lexers/smalltalk.lua @@ -1,46 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Smalltalk LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('smalltalk') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match( - 'true false nil self super isNil not Smalltalk Transcript'))) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match( - 'Date Time Boolean True False Character String Array Symbol Integer Object'))) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local sq_str = lexer.range("'") -local word_str = '$' * lexer.word -lex:add_rule('string', token(lexer.STRING, sq_str + word_str)) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.range('"', false, false))) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S(':=_<>+-/*!()[]'))) - --- Labels. -lex:add_rule('label', token(lexer.LABEL, '#' * lexer.word)) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '[', ']') - -lexer.property['scintillua.comment'] = '"|"' - -return lex diff --git a/share/vis/lexers/sml.lua b/share/vis/lexers/sml.lua @@ -1,93 +0,0 @@ --- Copyright 2017-2024 Murray Calavera. See LICENSE. --- Standard ML LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('sml') - --- Whitespace. -local ws = token(lexer.WHITESPACE, lexer.space^1) -lex:add_rule('whitespace', ws) - --- Structures. -local id = (lexer.alnum + "'" + '_')^0 -local aid = lexer.alpha * id -local longid = (aid * '.')^0 * aid -local struct_dec = token(lexer.KEYWORD, 'structure') * ws * token(lexer.CLASS, aid) * ws * - token(lexer.OPERATOR, '=') * ws -lex:add_rule('struct_new', struct_dec * token(lexer.KEYWORD, 'struct')) -lex:add_rule('struct_alias', struct_dec * token(lexer.CLASS, longid)) -lex:add_rule('structure', token(lexer.CLASS, aid * '.')) - --- Open. -lex:add_rule('open', token(lexer.KEYWORD, word_match('open structure functor')) * ws * - token(lexer.CLASS, longid)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'abstype', 'and', 'andalso', 'as', 'case', 'do', 'datatype', 'else', 'end', 'exception', 'fn', - 'fun', 'handle', 'if', 'in', 'infix', 'infixr', 'let', 'local', 'nonfix', 'of', 'op', 'orelse', - 'raise', 'rec', 'then', 'type', 'val', 'with', 'withtype', 'while', -- - 'eqtype', 'functor', 'include', 'sharing', 'sig', 'signature', 'struct', 'structure' -})) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match{ - 'int', 'real', 'word', 'bool', 'char', 'string', 'unit', 'array', 'exn', 'list', 'option', - 'order', 'ref', 'substring', 'vector' -})) - --- Functions. --- `real`, `vector` and `substring` are a problem. -lex:add_rule('function', token(lexer.FUNCTION, word_match{ - 'app', 'before', 'ceil', 'chr', 'concat', 'exnMessage', 'exnName', 'explode', 'floor', 'foldl', - 'foldr', 'getOpt', 'hd', 'ignore', 'implode', 'isSome', 'length', 'map', 'not', 'null', 'ord', - 'print', 'real', 'rev', 'round', 'size', 'str', 'substring', 'tl', 'trunc', 'valOf', 'vector', - 'o', 'abs', 'mod', 'div' -})) - --- Constants. -lex:add_rule('constant', token(lexer.CONSTANT, word_match('true false nil') + lexer.upper * id)) - --- Indentifiers (non-symbolic). -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.lower * id)) - --- Strings. -lex:add_rule('string', token(lexer.STRING, P('#')^-1 * lexer.range('"', true))) - --- Comments. -local line_comment = lexer.to_eol('(*)') -local block_comment = lexer.range('(*', '*)', false, false, true) -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -local function num(digit) return digit * (digit^0 * '_')^0 * digit^1 + digit end -local int = num(lexer.digit) -local frac = '.' * int -local minus = lpeg.P('~')^-1 -local exp = lpeg.S('eE') * minus * int -local real = int * frac^-1 * exp + int * frac * exp^-1 -local hex = num(lexer.xdigit) -local bin = num(lpeg.S('01')) --- LuaFormatter off -lex:add_rule('number', token(lexer.NUMBER, - '0w' * int + - (P('0wx') + '0xw') * hex + - (P('0wb') + '0bw') * bin + - minus * '0x' * hex + - minus * '0b' * bin + - minus * real + - minus * int)) --- LuaFormatter on - --- Type variables. -lex:add_rule('typevar', token(lexer.VARIABLE, "'" * id)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('!*/+-^:@=<>()[]{},;._|#%&$?~`\\'))) - -lexer.property['scintillua.comment'] = '(*)' - -return lex diff --git a/share/vis/lexers/snobol4.lua b/share/vis/lexers/snobol4.lua @@ -1,71 +0,0 @@ --- Copyright 2013-2024 Michael T. Richter. See LICENSE. --- SNOBOL4 lexer. --- This lexer works with classic SNOBOL4 as well as the CSNOBOL4 extensions. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local B, P, S = lpeg.B, lpeg.P, lpeg.S - -local lex = lexer.new('snobol4') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match({ - 'ABORT', 'ARRAY', 'CONTINUE', 'DEFINE', 'END', 'FRETURN', 'INPUT', 'NRETURN', 'OUTPUT', 'PUNCH', - 'RETURN', 'SCONTINUE', 'TABLE' -}, true) + '&' * lexer.word)) - --- Helper patterns. -local dotted_id = lexer.word * ('.' * lexer.word)^0 - --- Labels. -lex:add_rule('label', token(lexer.LABEL, lexer.starts_line(dotted_id))) - --- Targets. -local branch = B(lexer.space * ':(') * dotted_id * #P(')') -local sbranch = B(lexer.space * ':' * S('SsFf') * '(') * dotted_id * #P(')') -local sbranchx = B(')' * S('SsFf') * '(') * dotted_id * #P(')') -lex:add_rule('target', token(lexer.LABEL, branch + sbranch + sbranchx)) - --- Patterns. -lex:add_rule('pattern', lexer.token(lexer.CLASS, word_match({ - -- Keep distinct. - 'ABORT', 'ANY', 'ARB', 'ARBNO', 'BAL', 'BREAK', 'BREAKX', 'FAIL', 'FENCE', 'LEN', 'NOTANY', 'POS', - 'REM', 'RPOS', 'RTAB', 'SPAN', 'SUCCEED', 'TAB' -}, true) * #P('('))) - --- Token definitions. -lex:add_rule('built-in', token(lexer.FUNCTION, word_match({ - 'APPLY', 'ARRAY', 'CHAR', 'CONVERT', 'COPY', 'DATA', 'DATE', 'DIFFER', 'DUPL', 'EQ', 'EVAL', - 'FILE_ABSPATH', 'FILE_ISDIR', 'FREEZE', 'FUNCTION', 'GE', 'GT', 'HOST', 'IDENT', 'INTEGER', - 'IO_FINDUNIT', 'ITEM', 'LABEL', 'LOAD', 'LPAD', 'LE', 'LGT', 'LT', 'NE', 'OPSYN', 'ORD', - 'PROTOTYPE', 'REMDR', 'REPLACE', 'REVERSE', 'RPAD', 'RSORT', 'SERV_LISTEN', 'SET', 'SETEXIT', - 'SIZE', 'SORT', 'SQRT', 'SSET', 'SUBSTR', 'TABLE', 'THAW', 'TIME', 'TRACE', 'TRIM', 'UNLOAD', - 'VALUE', 'VDIFFER' -}, true) * #P('('))) - --- Identifiers. -lex:add_rule('identifier', token(lexer.DEFAULT, dotted_id)) - --- Strings. -local dq_str = lexer.range('"', true, false) -local sq_str = lexer.range("'", true, false) -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.starts_line(lexer.to_eol(S('*#|;!'))))) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Control. -lex:add_rule('control', token(lexer.PREPROCESSOR, lexer.starts_line('-' * lexer.word))) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S'¬?$.!%*/#+-@⊥&^~\\=')) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/spin.lua b/share/vis/lexers/spin.lua @@ -1,70 +0,0 @@ --- Copyright 2017-2024 David B. Lamkins <david@lamkins.net>. See LICENSE. --- Spin LPeg lexer, see https://www.parallax.com/microcontrollers/propeller. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, R, S = lpeg.P, lpeg.R, lpeg.S - -local lex = lexer.new('spin') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - '_clkfreq', '_clkmode', '_free', '_stack', '_xinfreq', 'abort', 'abs', 'absneg', 'add', 'addabs', - 'adds', 'addsx', 'addx', 'and', 'andn', 'byte', 'bytefill', 'bytemove', 'call', 'case', 'chipver', - 'clkfreq', 'clkmode', 'clkset', 'cmp', 'cmps', 'cmpsub', 'cmpsx', 'cmpx', 'cnt', 'cogid', - 'coginit', 'cognew', 'cogstop', 'con', 'constant', 'ctra', 'ctrb', 'dat', 'dira', 'dirb', 'djnz', - 'else', 'elseif', 'elseifnot', 'enc', 'false', 'file', 'fit', 'float', 'from', 'frqa', 'frqb', - 'hubop', 'if', 'ifnot', 'if_a', 'if_ae', 'if_always', 'if_b', 'if_be', 'if_c', 'if_c_and_nz', - 'if_c_and_z', 'if_c_eq_z', 'if_c_ne_z', 'if_c_or_nz', 'if_c_or_z', 'if_e', 'if_nc', - 'if_nc_and_nz', 'if_nc_and_z', 'if_nc_or_nz', 'if_nc_or_z', 'if_ne', 'if_never', 'if_nz', - 'if_nz_and_c', 'if_nz_and_nc', 'if_nz_or_c', 'if_nz_or_nc', 'if_z', 'if_z_and_c', 'if_z_and_nc', - 'if_z_eq_c', 'if_z_ne_c', 'if_z_or_c', 'if_z_or_nc', 'ina', 'inb', 'jmp', 'jmpret', 'lockclr', - 'locknew', 'lockret', 'lockset', 'long', 'longfill', 'longmove', 'lookdown', 'lookdownz', - 'lookup', 'lookupz', 'max', 'maxs', 'min', 'mins', 'mov', 'movd', 'movi', 'movs', 'mul', 'muls', - 'muxc', 'muxnc', 'muxnz', 'muxz', 'neg', 'negc', 'negnc', 'negnz', 'negx', 'negz', 'next', 'nop', - 'not', 'nr', 'obj', 'ones', 'or', 'org', 'other', 'outa', 'outb', 'par', 'phsa', 'phsb', 'pi', - 'pll1x', 'pll2x', 'pll4x', 'pll8x', 'pll16x', 'posx', 'pri', 'pub', 'quit', 'rcfast', 'rcl', - 'rcr', 'rcslow', 'rdbyte', 'rdlong', 'rdword', 'reboot', 'repeat', 'res', 'result', 'ret', - 'return', 'rev', 'rol', 'ror', 'round', 'sar', 'shl', 'shr', 'spr', 'step', 'strcomp', 'string', - 'strsize', 'sub', 'subabs', 'subs', 'subsx', 'subx', 'sumc', 'sumnc', 'sumnz', 'sumz', 'test', - 'testn', 'tjnz', 'tjz', 'to', 'true', 'trunc', 'until', 'var', 'vcfg', 'vscl', 'waitcnt', - 'waitpeq', 'waitpne', 'waitvid', 'wc', 'while', 'word', 'wordfill', 'wordmove', 'wr', 'wrbyte', - 'wrlong', 'wz', 'xinput', 'xor', 'xtal1', 'xtal2', 'xtal3' -})) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -lex:add_rule('string', token(lexer.STRING, lexer.range('"', true))) - --- Comments. -local line_comment = lexer.to_eol(P("''") + "'") -local block_comment = lexer.range('{', '}') -local block_doc_comment = lexer.range('{{', '}}') -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_doc_comment + block_comment)) - --- Numbers. -local bin = '%' * S('01_')^1 -local ter = '%%' * (R('03') + '_')^1 -local hex = '$' * (lexer.xdigit + '_')^1 -local dec = (lexer.digit + '_')^1 -local int = bin + ter + dec + hex -local rad = P('.') - '..' -local exp = (S('Ee') * S('+-')^-1 * int)^-1 -local flt = dec * (rad * dec)^-1 * exp + dec^-1 * rad * dec * exp -lex:add_rule('number', token(lexer.NUMBER, flt + int)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, - P('--') + '++' + '^^' + '||' + '~~' + '|<' + '>|' + '@@' + ':=' + '+=' + '-=' + '*=' + '/=' + '**' + - '**=' + '//' + '//=' + '#>' + '#>=' + '<#' + '<#=' + '~>' + '~>=' + '<<' + '<<=' + '>>' + '>>=' + - '<-' + '<-=' + '->' + '->=' + '><' + '><=' + '&=' + '|=' + 'and=' + 'or=' + '==' + '===' + '<>' + - '<>=' + '<=' + '>=' + '=<' + '=<=' + '=>' + '=>=' + '..' + S('+-/*<>~!&=^|?:.()[]@#\\'))) - -lexer.property['scintillua.comment'] = "'" - -return lex diff --git a/share/vis/lexers/sql.lua b/share/vis/lexers/sql.lua @@ -1,64 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- SQL LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('sql') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match({ - 'add', 'all', 'alter', 'analyze', 'and', 'as', 'asc', 'asensitive', 'before', 'between', 'bigint', - 'binary', 'blob', 'both', 'by', 'call', 'cascade', 'case', 'change', 'char', 'character', 'check', - 'collate', 'column', 'condition', 'connection', 'constraint', 'continue', 'convert', 'create', - 'cross', 'current_date', 'current_time', 'current_timestamp', 'current_user', 'cursor', - 'database', 'databases', 'day_hour', 'day_microsecond', 'day_minute', 'day_second', 'dec', - 'decimal', 'declare', 'default', 'delayed', 'delete', 'desc', 'describe', 'deterministic', - 'distinct', 'distinctrow', 'div', 'double', 'drop', 'dual', 'each', 'else', 'elseif', 'enclosed', - 'escaped', 'exists', 'exit', 'explain', 'false', 'fetch', 'float', 'for', 'force', 'foreign', - 'from', 'fulltext', 'goto', 'grant', 'group', 'having', 'high_priority', 'hour_microsecond', - 'hour_minute', 'hour_second', 'if', 'ignore', 'in', 'index', 'infile', 'inner', 'inout', - 'insensitive', 'insert', 'int', 'integer', 'interval', 'into', 'is', 'iterate', 'join', 'key', - 'keys', 'kill', 'leading', 'leave', 'left', 'like', 'limit', 'lines', 'load', 'localtime', - 'localtimestamp', 'lock', 'long', 'longblob', 'longtext', 'loop', 'low_priority', 'match', - 'mediumblob', 'mediumint', 'mediumtext', 'middleint', 'minute_microsecond', 'minute_second', - 'mod', 'modifies', 'natural', 'not', 'no_write_to_binlog', 'null', 'numeric', 'on', 'optimize', - 'option', 'optionally', 'or', 'order', 'out', 'outer', 'outfile', 'precision', 'primary', - 'procedure', 'purge', 'read', 'reads', 'real', 'references', 'regexp', 'rename', 'repeat', - 'replace', 'require', 'restrict', 'return', 'revoke', 'right', 'rlike', 'schema', 'schemas', - 'second_microsecond', 'select', 'sensitive', 'separator', 'set', 'show', 'smallint', 'soname', - 'spatial', 'specific', 'sql', 'sqlexception', 'sqlstate', 'sqlwarning', 'sql_big_result', - 'sql_calc_found_rows', 'sql_small_result', 'ssl', 'starting', 'straight_join', 'table', - 'terminated', 'text', 'then', 'tinyblob', 'tinyint', 'tinytext', 'to', 'trailing', 'trigger', - 'true', 'undo', 'union', 'unique', 'unlock', 'unsigned', 'update', 'usage', 'use', 'using', - 'utc_date', 'utc_time', 'utc_timestamp', 'values', 'varbinary', 'varchar', 'varcharacter', - 'varying', 'when', 'where', 'while', 'with', 'write', 'xor', 'year_month', 'zerofill' -}, true))) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local sq_str = lexer.range("'") -local dq_str = lexer.range('"') -local bq_str = lexer.range('`') -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str + bq_str)) - --- Comments. -local line_comment = lexer.to_eol(P('--') + '#') -local block_comment = lexer.range('/*', '*/') -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S(',()'))) - -lexer.property['scintillua.comment'] = '--' - -return lex diff --git a/share/vis/lexers/strace.lua b/share/vis/lexers/strace.lua @@ -1,31 +0,0 @@ --- Copyright 2017-2024 Marc André Tanner. See LICENSE. --- strace(1) output lexer - -local lexer = lexer -local S, B = lpeg.S, lpeg.B - -local lex = lexer.new(..., {lex_by_line = true}) - --- Syscall -lex:add_rule('syscall', lex:tag(lexer.FUNCTION, lexer.starts_line(lexer.word))) - --- Upper case constants -lex:add_rule('constant', - lex:tag(lexer.CONSTANT, (lexer.upper + '_') * (lexer.upper + lexer.digit + '_')^0)) - --- Single and double quoted strings -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"', true) -lex:add_rule('string', lex:tag(lexer.STRING, sq_str + dq_str)) - --- Comments and text in parentheses at the line end -local comment = lexer.range('/*', '*/') -local description = lexer.range('(', ')') * lexer.newline -lex:add_rule('comment', lex:tag(lexer.COMMENT, comment + description)) - -lex:add_rule('result', lex:tag(lexer.TYPE, B(' = ') * lexer.integer)) -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.float + lexer.integer)) -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('+-/*%<>~!=^&|?~:;,.()[]{}'))) - -return lex diff --git a/share/vis/lexers/systemd.lua b/share/vis/lexers/systemd.lua @@ -1,133 +0,0 @@ --- Copyright 2016-2024 Christian Hesse. See LICENSE. --- systemd unit file LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('systemd', {lex_by_line = true}) - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - -- Boolean values. - 'true', 'false', 'on', 'off', 'yes', 'no', - -- Service types. - 'forking', 'simple', 'oneshot', 'dbus', 'notify', 'idle', - -- Special system units. - 'basic.target', 'ctrl-alt-del.target', 'cryptsetup.target', 'dbus.service', 'dbus.socket', - 'default.target', 'display-manager.service', 'emergency.target', 'exit.target', 'final.target', - 'getty.target', 'graphical.target', 'hibernate.target', 'hybrid-sleep.target', 'halt.target', - 'initrd-fs.target', 'kbrequest.target', 'kexec.target', 'local-fs.target', 'multi-user.target', - 'network-online.target', 'paths.target', 'poweroff.target', 'reboot.target', 'remote-fs.target', - 'rescue.target', 'initrd-root-fs.target', 'runlevel2.target', 'runlevel3.target', - 'runlevel4.target', 'runlevel5.target', 'shutdown.target', 'sigpwr.target', 'sleep.target', - 'slices.target', 'sockets.target', 'suspend.target', 'swap.target', 'sysinit.target', - 'syslog.socket', 'system-update.target', 'timers.target', 'umount.target', - -- Special system units for devices. - 'bluetooth.target', 'printer.target', 'smartcard.target', 'sound.target', - -- Special passive system units. - 'cryptsetup-pre.target', 'local-fs-pre.target', 'network.target', 'network-pre.target', - 'nss-lookup.target', 'nss-user-lookup.target', 'remote-fs-pre.target', 'rpcbind.target', - 'time-sync.target', - -- Specail slice units. - '-.slice', 'system.slice', 'user.slice', 'machine.slice', - -- Environment variables. - 'PATH', 'LANG', 'USER', 'LOGNAME', 'HOME', 'SHELL', 'XDG_RUNTIME_DIR', 'XDG_SESSION_ID', - 'XDG_SEAT', 'XDG_VTNR', 'MAINPID', 'MANAGERPID', 'LISTEN_FDS', 'LISTEN_PID', 'LISTEN_FDNAMES', - 'NOTIFY_SOCKET', 'WATCHDOG_PID', 'WATCHDOG_USEC', 'TERM' -})) - --- Options. -lex:add_rule('option', token(lexer.PREPROCESSOR, word_match{ - -- Unit section. - 'Description', 'Documentation', 'Requires', 'Requisite', 'Wants', 'BindsTo', 'PartOf', - 'Conflicts', 'Before', 'After', 'OnFailure', 'PropagatesReloadTo', 'ReloadPropagatedFrom', - 'JoinsNamespaceOf', 'RequiresMountsFor', 'OnFailureJobMode', 'IgnoreOnIsolate', - 'StopWhenUnneeded', 'RefuseManualStart', 'RefuseManualStop', 'AllowIsolate', - 'DefaultDependencies', 'JobTimeoutSec', 'JobTimeoutAction', 'JobTimeoutRebootArgument', - 'StartLimitInterval', 'StartLimitBurst', 'StartLimitAction', 'RebootArgument', - 'ConditionArchitecture', 'ConditionVirtualization', 'ConditionHost', 'ConditionKernelCommandLine', - 'ConditionSecurity', 'ConditionCapability', 'ConditionACPower', 'ConditionNeedsUpdate', - 'ConditionFirstBoot', 'ConditionPathExists', 'ConditionPathExistsGlob', - 'ConditionPathIsDirectory', 'ConditionPathIsSymbolicLink', 'ConditionPathIsMountPoint', - 'ConditionPathIsReadWrite', 'ConditionDirectoryNotEmpty', 'ConditionFileNotEmpty', - 'ConditionFileIsExecutable', 'AssertArchitecture', 'AssertVirtualization', 'AssertHost', - 'AssertKernelCommandLine', 'AssertSecurity', 'AssertCapability', 'AssertACPower', - 'AssertNeedsUpdate', 'AssertFirstBoot', 'AssertPathExists', 'AssertPathExistsGlob', - 'AssertPathIsDirectory', 'AssertPathIsSymbolicLink', 'AssertPathIsMountPoint', - 'AssertPathIsReadWrite', 'AssertDirectoryNotEmpty', 'AssertFileNotEmpty', - 'AssertFileIsExecutable', 'SourcePath', - -- Install section. - 'Alias', 'WantedBy', 'RequiredBy', 'Also', 'DefaultInstance', - -- Service section. - 'Type', 'RemainAfterExit', 'GuessMainPID', 'PIDFile', 'BusName', 'BusPolicy', 'ExecStart', - 'ExecStartPre', 'ExecStartPost', 'ExecReload', 'ExecStop', 'ExecStopPost', 'RestartSec', - 'TimeoutStartSec', 'TimeoutStopSec', 'TimeoutSec', 'RuntimeMaxSec', 'WatchdogSec', 'Restart', - 'SuccessExitStatus', 'RestartPreventExitStatus', 'RestartForceExitStatus', 'PermissionsStartOnly', - 'RootDirectoryStartOnly', 'NonBlocking', 'NotifyAccess', 'Sockets', 'FailureAction', - 'FileDescriptorStoreMax', 'USBFunctionDescriptors', 'USBFunctionStrings', - -- Socket section. - 'ListenStream', 'ListenDatagram', 'ListenSequentialPacket', 'ListenFIFO', 'ListenSpecial', - 'ListenNetlink', 'ListenMessageQueue', 'ListenUSBFunction', 'SocketProtocol', 'BindIPv6Only', - 'Backlog', 'BindToDevice', 'SocketUser', 'SocketGroup', 'SocketMode', 'DirectoryMode', 'Accept', - 'Writable', 'MaxConnections', 'KeepAlive', 'KeepAliveTimeSec', 'KeepAliveIntervalSec', - 'KeepAliveProbes', 'NoDelay', 'Priority', 'DeferAcceptSec', 'ReceiveBuffer', 'SendBuffer', - 'IPTOS', 'IPTTL', 'Mark', 'ReusePort', 'SmackLabel', 'SmackLabelIPIn', 'SmackLabelIPOut', - 'SELinuxContextFromNet', 'PipeSize', 'MessageQueueMaxMessages', 'MessageQueueMessageSize', - 'FreeBind', 'Transparent', 'Broadcast', 'PassCredentials', 'PassSecurity', 'TCPCongestion', - 'ExecStartPre', 'ExecStartPost', 'ExecStopPre', 'ExecStopPost', 'TimeoutSec', 'Service', - 'RemoveOnStop', 'Symlinks', 'FileDescriptorName', - -- Mount section. - 'What', 'Where', 'Type', 'Options', 'SloppyOptions', 'DirectoryMode', 'TimeoutSec', - -- Path section. - 'PathExists', 'PathExistsGlob', 'PathChanged', 'PathModified', 'DirectoryNotEmpty', 'Unit', - 'MakeDirectory', 'DirectoryMode', - -- Timer section. - 'OnActiveSec', 'OnBootSec', 'OnStartupSec', 'OnUnitActiveSec', 'OnUnitInactiveSec', 'OnCalendar', - 'AccuracySec', 'RandomizedDelaySec', 'Unit', 'Persistent', 'WakeSystem', 'RemainAfterElapse', - -- Exec section. - 'WorkingDirectory', 'RootDirectory', 'User', 'Group', 'SupplementaryGroups', 'Nice', - 'OOMScoreAdjust', 'IOSchedulingClass', 'IOSchedulingPriority', 'CPUSchedulingPolicy', - 'CPUSchedulingPriority', 'CPUSchedulingResetOnFork', 'CPUAffinity', 'UMask', 'Environment', - 'EnvironmentFile', 'PassEnvironment', 'StandardInput', 'StandardOutput', 'StandardError', - 'TTYPath', 'TTYReset', 'TTYVHangup', 'TTYVTDisallocate', 'SyslogIdentifier', 'SyslogFacility', - 'SyslogLevel', 'SyslogLevelPrefix', 'TimerSlackNSec', 'LimitCPU', 'LimitFSIZE', 'LimitDATA', - 'LimitSTACK', 'LimitCORE', 'LimitRSS', 'LimitNOFILE', 'LimitAS', 'LimitNPROC', 'LimitMEMLOCK', - 'LimitLOCKS', 'LimitSIGPENDING', 'LimitMSGQUEUE', 'LimitNICE', 'LimitRTPRIO', 'LimitRTTIME', - 'PAMName', 'CapabilityBoundingSet', 'AmbientCapabilities', 'SecureBits', 'Capabilities', - 'ReadWriteDirectories', 'ReadOnlyDirectories', 'InaccessibleDirectories', 'PrivateTmp', - 'PrivateDevices', 'PrivateNetwork', 'ProtectSystem', 'ProtectHome', 'MountFlags', - 'UtmpIdentifier', 'UtmpMode', 'SELinuxContext', 'AppArmorProfile', 'SmackProcessLabel', - 'IgnoreSIGPIPE', 'NoNewPrivileges', 'SystemCallFilter', 'SystemCallErrorNumber', - 'SystemCallArchitectures', 'RestrictAddressFamilies', 'Personality', 'RuntimeDirectory', - 'RuntimeDirectoryMode' -})) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, (lexer.alpha + '_') * (lexer.alnum + S('_.'))^0)) - --- Strings. -local sq_str = lexer.range("'") -local dq_str = lexer.range('"') -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) - --- Sections. -lex:add_rule('section', token(lexer.LABEL, '[' * - word_match('Automount BusName Install Mount Path Service Service Socket Timer Unit') * ']')) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.starts_line(lexer.to_eol(S(';#'))))) - --- Numbers. -local integer = S('+-')^-1 * (lexer.hex_num + lexer.oct_num_('_') + lexer.dec_num_('_')) -lex:add_rule('number', token(lexer.NUMBER, lexer.float + integer)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, '=')) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/taskpaper.lua b/share/vis/lexers/taskpaper.lua @@ -1,27 +0,0 @@ --- Copyright (c) 2016-2024 Larry Hynes. See LICENSE. --- Taskpaper LPeg lexer - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(..., {lex_by_line = true}) - --- Notes. -local delimiter = lpeg.B(' ') + lpeg.B('\t') -lex:add_rule('note', delimiter * lex:tag('note', lexer.to_eol(lexer.alnum))) - --- Tasks. -lex:add_rule('task', delimiter * lex:tag(lexer.LIST, '-')) - --- Projects. -lex:add_rule('project', lex:tag(lexer.HEADING, - lexer.range(lexer.starts_line(lexer.alnum), ':') * lexer.newline)) - --- Tags. -lex:add_rule('extended_tag', lex:tag(lexer.TAG .. '.extended', '@' * lexer.word * '(' * - (lexer.word + lexer.digit + '-')^1 * ')')) -lex:add_rule('day_tag', lex:tag(lexer.TAG .. '.day', (P('@today') + '@tomorrow'))) -lex:add_rule('overdue_tag', lex:tag(lexer.TAG .. '.overdue', '@overdue')) -lex:add_rule('plain_tag', lex:tag(lexer.TAG .. '.plain', '@' * lexer.word)) - -return lex diff --git a/share/vis/lexers/tcl.lua b/share/vis/lexers/tcl.lua @@ -1,47 +0,0 @@ --- Copyright 2014-2024 Joshua Krämer. See LICENSE. --- Tcl LPeg lexer. --- This lexer follows the TCL dodekalogue (http://wiki.tcl.tk/10259). --- It is based on the previous lexer by Mitchell. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('tcl') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Comment. -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#' * P(function(input, index) - local i = index - 2 - while i > 0 and input:find('^[ \t]', i) do i = i - 1 end - if i < 1 or input:find('^[\r\n;]', i) then return true end -end)))) - --- Separator (semicolon). -lex:add_rule('separator', token(lexer.CLASS, ';')) - --- Argument expander. -lex:add_rule('expander', token(lexer.LABEL, '{*}')) - --- Delimiters. -lex:add_rule('braces', token(lexer.KEYWORD, S('{}'))) -lex:add_rule('quotes', token(lexer.FUNCTION, '"')) -lex:add_rule('brackets', token(lexer.VARIABLE, S('[]'))) - --- Variable substitution. -lex:add_rule('variable', token(lexer.STRING, '$' * (lexer.alnum + '_' + P(':')^2)^0)) - --- Backslash substitution. -local oct = lexer.digit * lexer.digit^-2 -local hex = 'x' * lexer.xdigit^1 -local unicode = 'u' * lexer.xdigit * lexer.xdigit^-3 -lex:add_rule('backslash', token(lexer.TYPE, '\\' * (oct + hex + unicode + 1))) - --- Fold points. -lex:add_fold_point(lexer.KEYWORD, '{', '}') - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/tex.lua b/share/vis/lexers/tex.lua @@ -1,28 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Plain TeX LPeg lexer. --- Modified by Robert Gieseke. - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(...) - --- Comments. -lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.to_eol('%'))) - --- TeX environments. -lex:add_rule('environment', lex:tag('environment', '\\' * (P('begin') + 'end') * lexer.word)) - --- Commands. -lex:add_rule('command', lex:tag('command', '\\' * (lexer.alpha^1 + S('#$&~_^%{}')))) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('$&#{}[]'))) - --- Fold points. -lex:add_fold_point('environment', '\\begin', '\\end') -lex:add_fold_point(lexer.OPERATOR, '{', '}') - -lexer.property['scintillua.comment'] = '%' - -return lex diff --git a/share/vis/lexers/texinfo.lua b/share/vis/lexers/texinfo.lua @@ -1,207 +0,0 @@ --- Copyright 2014-2024 stef@ailleurs.land. See LICENSE. --- Plain Texinfo version 5.2 LPeg lexer --- Freely inspired from Mitchell work and valuable help from him too ! - --- Directives are processed (more or less) in the Reference Card Texinfo order Reference Card --- page for each directive group is in comment for reference - ---[[ -Note: Improving Fold Points use with Texinfo - -At the very beginning of your Texinfo file, it could be wised to insert theses alias : - -@alias startchapter = comment -@alias endchapter = comment - -Then use this to begin each chapter : - -@endchapter -------------------------------------------------------------------- -@chapter CHAPTER TITLE -@startchapter ------------------------------------------------------------------ - -With the use of Scintilla's `SCI_FOLDALL(SC_FOLDACTION_TOGGLE)` or Textadept's -`buffer:fold_all(buffer.FOLDACTION_TOGGLE)`, you have then a nice chapter folding, useful with -large documents. -]] - -local lexer = lexer -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(...) - --- Directives. -lex:add_rule('directive', - lex:tag('command', ('@end' * lexer.space^1 + '@') * lex:word_match('directive', true))) - --- Chapters. -lex:add_rule('chapter', lex:tag('command.section', - ('@end' * lexer.space^1 + '@') * lex:word_match('chapter', true))) - --- Common keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, ('@end' * lexer.space^1 + '@') * - lex:word_match(lexer.KEYWORD, true))) - --- Italics -local nested_braces = lexer.range('{', '}', false, false, true) -lex:add_rule('emph', lex:tag(lexer.ITALIC, '@emph' * nested_braces)) - --- Bold -lex:add_rule('strong', lex:tag(lexer.BOLD, '@strong' * nested_braces)) - --- Identifiers -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) - --- Strings. -lex:add_rule('string', lex:tag(lexer.STRING, nested_braces)) - --- Numbers. -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number)) - --- Comments. -local line_comment = lexer.to_eol('@c', true) --- local line_comment_long = lexer.to_eol('@comment', true) -local block_comment = lexer.range('@ignore', '@end ignore') -lex:add_rule('comment', lex:tag(lexer.COMMENT, line_comment + block_comment)) - --- Fold points. -lex:add_fold_point('command', '@titlepage', '@end titlepage') -lex:add_fold_point('command', '@copying', '@end copying') -lex:add_fold_point('command', '@ifset', '@end ifset') -lex:add_fold_point('command', '@tex', '@end tex') -lex:add_fold_point('command', '@itemize', '@end itemize') -lex:add_fold_point('command', '@enumerate', '@end enumerate') -lex:add_fold_point('command', '@multitable', '@end multitable') -lex:add_fold_point('command', '@example', '@end example') -lex:add_fold_point('command', '@smallexample', '@end smallexample') -lex:add_fold_point('command', '@cartouche', '@end cartouche') -lex:add_fold_point('command', '@startchapter', '@end startchapter') - --- Word lists. -lex:set_word_list('directive', { - 'end', - -- Custom keywords for chapter folding - 'startchapter', 'endchapter', - -- List and tables (page 2, column 2) - 'itemize', 'enumerate', - -- Beginning a Texinfo document (page 1, column 1) - 'titlepage', 'copying', - -- Block environments (page 2, column 1) - 'cartouche', - -- Block environments > Displays using fixed-width fonts (page 2, column 2) - 'example', 'smallexample', - -- List and tables (page 2, column 2) - 'multitable', - -- Floating Displays (page 2, column 3) - 'float', 'listoffloats', 'caption', 'shortcaption', 'image', - -- Floating Displays > Footnotes (page 2, column 3) - 'footnote', 'footnotestyle', - -- Conditionally (in)visible text > Output formats (page 3, column 3) - 'ifdocbook', 'ifhtml', 'ifinfo', 'ifplaintext', 'iftex', 'ifxml', 'ifnotdocbook', 'ifnothtml', - 'ifnotplaintext', 'ifnottex', 'ifnotxml', 'ifnotinfo', 'inlinefmt', 'inlinefmtifelse', - -- Conditionally (in)visible text > Raw formatter text (page 4, column 1) - 'docbook', 'html', 'tex', 'xml', 'inlineraw', - -- Conditionally (in)visible text > Documents variables (page 4, column 1) - 'set', 'clear', 'value', 'ifset', 'ifclear', 'inlineifset', 'inlineifclear', - -- Conditionally (in)visible text > Testing for commands (page 4, column 1) - 'ifcommanddefined', 'ifcommandnotdefined', 'end', - -- Defining new Texinfo commands (page 4, column 1) - 'alias', 'macro', 'unmacro', 'definfounclose', - -- File inclusion (page 4, column 1) - 'include', 'verbatiminclude', - -- Formatting and headers footers for TeX (page 4, column 1) - 'allowcodebreaks', 'finalout', 'fonttextsize', - -- Formatting and headers footers for TeX > paper size (page 4, column 2) - 'smallbook', 'afourpaper', 'afivepaper', 'afourlatex', 'afourwide', 'pagesizes', - -- Formatting and headers footers for TeX > Page headers and footers (page 4, column 2) - -- not implemented - -- Document preferences (page 4, column 2) - -- not implemented - -- Ending a Texinfo document (page 4, column 2) - 'bye' -}) - -lex:set_word_list('chapter', { - -- Chapter structuring (page 1, column 2) - 'lowersections', 'raisesections', 'part', - -- Chapter structuring > Numbered, included in contents (page 1, column 2) - 'chapter', 'centerchap', - -- Chapter structuring > Context-dependent, included in contents (page 1, column 2) - 'section', 'subsection', 'subsubsection', - -- Chapter structuring > Unumbered, included in contents (page 1, column 2) - 'unnumbered', 'unnumberedsec', 'unnumberedsubsec', 'unnumberedsubsection', 'unnumberedsubsubsec', - 'unnumberedsubsubsection', - -- Chapter structuring > Letter and numbered, included in contents (page 1, column 2) - 'appendix', 'appendixsec', 'appendixsection', 'appendixsubsec', 'appendixsubsection', - 'appendixsubsubsec', 'appendixsubsubsection', - -- Chapter structuring > Unumbered, not included in contents, no new page (page 1, column 3) - 'chapheading', 'majorheading', 'heading', 'subheading', 'subsubheading' -}) - -lex:set_word_list(lexer.KEYWORD, { - 'end', - -- Beginning a Texinfo document (page 1, column 1) - 'setfilename', 'settitle', 'insertcopying', - -- Beginning a Texinfo document > Internationlization (page 1, column 1) - 'documentencoding', 'documentlanguage', 'frenchspacing', - -- Beginning a Texinfo document > Info directory specification and HTML document description - -- (page 1, column 1) - 'dircategory', 'direntry', 'documentdescription', - -- Beginning a Texinfo document > Titre pages (page 1, column 1) - 'shorttitlepage', 'center', 'titlefont', 'title', 'subtitle', 'author', - -- Beginning a Texinfo document > Tables of contents (page 1, column 2) - 'shortcontents', 'summarycontents', 'contents', 'setcontentsaftertitlepage', - 'setshortcontentsaftertitlepage', - -- Nodes (page 1, column 2) - 'node', 'top', 'anchor', 'novalidate', - -- Menus (page 1, column 2) - 'menu', 'detailmenu', - -- Cross references > Within the Info system (page 1, column 3) - 'xref', 'pxref', 'ref', 'inforef', 'xrefautomaticsectiontitle', - -- Cross references > Outside of info (page 1, column 3) - 'url', 'cite', - -- Marking text > Markup for regular text (page 1, column 3) - 'var', 'dfn', 'acronym', 'abbr', - -- Marking text > Markup for litteral text (page 1, column 3) - 'code', 'file', 'command', 'env', 'option', 'kbd', 'key', 'email', 'indicateurl', 'samp', 'verb', - -- Marking text > GUI sequences (page 2, column 1) - 'clicksequence', 'click', 'clickstyle', 'arrow', - -- Marking text > Math (page 2, column 1) - 'math', 'minus', 'geq', 'leq', - -- Marking text > Explicit font selection (page 2, column 1) - 'sc', 'r', 'i', 'slanted', 'b', 'sansserif', 't', - -- Block environments (page 2, column 1) - 'noindent', 'indent', 'exdent', - -- Block environments > Normally filled displays using regular text fonts (page 2, column 1) - 'quotation', 'smallquotation', 'indentedblock', 'smallindentedblock', 'raggedright', - -- Block environments > Line-for-line displays using regular test fonts (page 2, column 2) - 'format', 'smallformat', 'display', 'smalldisplay', 'flushleft', 'flushright', - -- Block environments > Displays using fixed-width fonts (page 2, column 2) - 'lisp', 'smalllisp', 'verbatim', - -- List and tables (page 2, column 2) - 'table', 'ftable', 'vtable', 'tab', 'item', 'itemx', 'headitem', 'headitemfont', 'asis', - -- Indices (page 2, column 3) - 'cindex', 'findex', 'vindex', 'kindex', 'pindex', 'tindex', 'defcodeindex', 'syncodeindex', - 'synindex', 'printindex', - -- Insertions within a paragraph > Characters special to Texinfo (page 2, column 3) - '@', '{', '}', 'backslashcar', 'comma', 'hashcar', ':', '.', '?', '!', 'dmn', - -- Insertions within a paragraph > Accents (page 3, column 1) - -- not implemented - -- Insertions within a paragraph > Non-English characters (page 3, column 1) - -- not implemented - -- Insertions within a paragraph > Other text characters an logos (page 3, column 1) - 'bullet', 'dots', 'enddots', 'euro', 'pounds', 'textdegree', 'copyright', 'registeredsymbol', - 'TeX', 'LaTeX', 'today', 'guillemetleft', 'guillementright', 'guillemotleft', 'guillemotright', - -- Insertions within a paragraph > Glyphs for code examples (page 3, column 2) - 'equiv', 'error', 'expansion', 'point', 'print', 'result', - -- Making and preventing breaks (page 3, column 2) - '*', '/', '-', 'hyphenation', 'tie', 'w', 'refill', - -- Vertical space (page 3, column 2) - 'sp', 'page', 'need', 'group', 'vskip' - -- Definition commands (page 3, column 2) - -- not implemented -}) - -lexer.property['scintillua.comment'] = '@c' - -return lex diff --git a/share/vis/lexers/text.lua b/share/vis/lexers/text.lua @@ -1,10 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Text LPeg lexer. - -local lexer = require('lexer') - -local lex = lexer.new('text') - -lex:add_rule('whitespace', lexer.token(lexer.WHITESPACE, lexer.space^1)) - -return lex diff --git a/share/vis/lexers/toml.lua b/share/vis/lexers/toml.lua @@ -1,44 +0,0 @@ --- Copyright 2015-2024 Alejandro Baez (https://keybase.io/baez). See LICENSE. --- TOML LPeg lexer. - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(..., {fold_by_indentation = true}) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lexer.word_match('true false'))) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local sq_str = lexer.range("'") -local dq_str = lexer.range('"') -lex:add_rule('string', lex:tag(lexer.STRING, sq_str + dq_str)) - --- Comments. -lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.to_eol('#'))) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('=+-,.{}[]()'))) - --- Datetime. -local year = lexer.digit * lexer.digit * lexer.digit * lexer.digit -local month = lexer.digit * lexer.digit^-1 -local day = lexer.digit * lexer.digit^-1 -local date = year * '-' * month * '-' * day -local hours = lexer.digit * lexer.digit^-1 -local minutes = lexer.digit * lexer.digit -local seconds = lexer.digit * lexer.digit -local fraction = '.' * lexer.digit^0 -local time = hours * ':' * minutes * ':' * seconds * fraction^-1 -local zone = 'Z' + S(' \t')^0 * S('-+') * hours * (':' * minutes)^-1 -lex:add_rule('datetime', lex:tag(lexer.NUMBER .. '.timestamp', date * (S('tT \t') * time * zone^-1))) - --- Numbers. -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number)) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/troff.lua b/share/vis/lexers/troff.lua @@ -1,42 +0,0 @@ --- Copyright 2023-2024 Mitchell. See LICENSE. --- troff/man LPeg lexer. --- Based on original Man lexer by David B. Lamkins and modified by Eolien55. - -local lexer = lexer -local P, R, S = lpeg.P, lpeg.R, lpeg.S - -local lex = lexer.new(...) - --- Registers and groff's structured programming. -lex:add_rule('keywords', lex:tag(lexer.KEYWORD, (lexer.starts_line('.') * (lexer.space - '\n')^0 * - (P('while') + 'break' + 'continue' + 'nr' + 'rr' + 'rnn' + 'aln' + '\\}')) + '\\{')) - --- Markup. -lex:add_rule('escape_sequences', lex:tag(lexer.VARIABLE, - '\\' * (('s' * S('+-')^-1) + S('*fgmnYV'))^-1 * (P('(') * 2 + lexer.range('[', ']') + 1))) - -lex:add_rule('headings', lex:tag(lexer.NUMBER, - lexer.starts_line('.') * (lexer.space - '\n')^0 * (S('STN') * 'H') * (lexer.space - '\n') * - lexer.nonnewline^0)) -lex:add_rule('man_alignment', lex:tag(lexer.KEYWORD, - lexer.starts_line('.') * (lexer.space - '\n')^0 * (P('br') + 'DS' + 'RS' + 'RE' + 'PD' + 'PP') * - lexer.space)) -lex:add_rule('font', lex:tag(lexer.VARIABLE, - lexer.starts_line('.') * (lexer.space - '\n')^0 * ('B' * P('R')^-1 + 'I' * S('PR')^-1) * - lexer.space)) - --- Lowercase troff macros are plain macros (like .so or .nr). -lex:add_rule('troff_plain_macros', lex:tag(lexer.VARIABLE, lexer.starts_line('.') * - (lexer.space - '\n')^0 * lexer.lower^1)) -lex:add_rule('any_macro', lex:tag(lexer.PREPROCESSOR, - lexer.starts_line('.') * (lexer.space - '\n')^0 * (lexer.any - lexer.space)^0)) -lex:add_rule('comment', lex:tag(lexer.COMMENT, - (lexer.starts_line('.\\"') + '\\"' + '\\#') * lexer.nonnewline^0)) -lex:add_rule('string', lex:tag(lexer.STRING, lexer.range('"', true))) - --- Usually used by eqn, and mandoc in some way. -lex:add_rule('in_dollars', lex:tag(lexer.EMBEDDED, lexer.range('$', false, false))) - --- TODO: a lexer for each preprocessor? - -return lex diff --git a/share/vis/lexers/txt2tags.lua b/share/vis/lexers/txt2tags.lua @@ -1,131 +0,0 @@ --- Copyright 2019-2024 Julien L. See LICENSE. --- txt2tags LPeg lexer. --- (developed and tested with Txt2tags Markup Rules --- [https://txt2tags.org/doc/english/rules.t2t]) --- Contributed by Julien L. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S -local nonspace = lexer.any - lexer.space - -local lex = lexer.new('txt2tags') - --- Whitespace. -local ws = token(lexer.WHITESPACE, (lexer.space - lexer.newline)^1) - --- Titles -local alphanumeric = lexer.alnum + S('_-') -local header_label = token('header_label_start', '[') * token(lexer.LABEL, alphanumeric^1) * - token('header_label_end', ']') -local function h(level) - local equal = string.rep('=', level) * (lexer.nonnewline - '=')^1 * string.rep('=', level) - local plus = string.rep('+', level) * (lexer.nonnewline - '+')^1 * string.rep('+', level) - return token('h' .. level, equal + plus) * header_label^-1 -end -local header = h(5) + h(4) + h(3) + h(2) + h(1) - --- Comments. -local line_comment = lexer.to_eol(lexer.starts_line('%')) -local block_comment = lexer.range(lexer.starts_line('%%%')) -local comment = token(lexer.COMMENT, block_comment + line_comment) - --- Inline. -local function span(name, delimiter) - return token(name, (delimiter * nonspace * delimiter * S(delimiter)^0) + - (delimiter * nonspace * (lexer.nonnewline - nonspace * delimiter)^0 * nonspace * delimiter * - S(delimiter)^0)) -end -local bold = span(lexer.BOLD, '**') -local italic = span(lexer.ITALIC, '//') -local underline = span(lexer.UNDERLINE, '__') -local strike = span('strike', '--') -local mono = span(lexer.CODE, '``') -local raw = span(lexer.DEFAULT, '""') -local tagged = span('tagged', "''") -local inline = bold + italic + underline + strike + mono + raw + tagged - --- Link. -local email = token(lexer.LINK, - (nonspace - '@')^1 * '@' * (nonspace - '.')^1 * ('.' * (nonspace - S('.?'))^1)^1 * - ('?' * nonspace^1)^-1) -local host = token(lexer.LINK, - word_match('www ftp', true) * (nonspace - '.')^0 * '.' * (nonspace - '.')^1 * '.' * - (nonspace - S(',.'))^1) -local url = token(lexer.LINK, - (nonspace - '://')^1 * '://' * (nonspace - ',' - '.')^1 * ('.' * (nonspace - S(',./?#'))^1)^1 * - ('/' * (nonspace - S('./?#'))^0 * ('.' * (nonspace - S(',.?#'))^1)^0)^0 * - ('?' * (nonspace - '#')^1)^-1 * ('#' * nonspace^0)^-1) -local label_with_address = token(lexer.LABEL, '[') * lexer.space^0 * - token(lexer.LABEL, ((nonspace - ']')^1 * lexer.space^1)^1) * token(lexer.LINK, (nonspace - ']')^1) * - token(lexer.LABEL, ']') -local link = label_with_address + url + host + email - --- Line. -local line = token('line', S('-=_')^20) - --- Image. -local image_only = token('image_start', '[') * token('image', (nonspace - ']')^1) * - token('image_end', ']') -local image_link = token('image_link_start', '[') * image_only * - token('image_link_sep', lexer.space^1) * token(lexer.LINK, (nonspace - ']')^1) * - token('image_link_end', ']') -local image = image_link + image_only - --- Macro. -local macro = token(lexer.PREPROCESSOR, '%%' * (nonspace - '(')^1 * lexer.range('(', ')', true)^-1) - --- Verbatim. -local verbatim_line = lexer.to_eol(lexer.starts_line('```') * S(' \t')) -local verbatim_block = lexer.range(lexer.starts_line('```')) -local verbatim_area = token(lexer.CODE, verbatim_block + verbatim_line) - --- Raw. -local raw_line = lexer.to_eol(lexer.starts_line('"""') * S(' \t')) -local raw_block = lexer.range(lexer.starts_line('"""')) -local raw_area = token(lexer.DEFAULT, raw_block + raw_line) - --- Tagged. -local tagged_line = lexer.to_eol(lexer.starts_line('\'\'\'') * S(' \t')) -local tagged_block = lexer.range(lexer.starts_line('\'\'\'')) -local tagged_area = token('tagged_area', tagged_block + tagged_line) - --- Table. -local table_sep = token('table_sep', '|') -local cell_content = inline + link + image + macro + token('cell_content', lexer.nonnewline - ' |') -local header_cell_content = token('header_cell_content', lexer.nonnewline - ' |') -local field_sep = ' ' * table_sep^1 * ' ' -local table_row_end = P(' ')^0 * table_sep^0 -local table_row = lexer.starts_line(P(' ')^0 * table_sep) * cell_content^0 * - (field_sep * cell_content^0)^0 * table_row_end -local table_row_header = - lexer.starts_line(P(' ')^0 * table_sep * table_sep) * header_cell_content^0 * - (field_sep * header_cell_content^0)^0 * table_row_end -local table = table_row_header + table_row - -lex:add_rule('table', table) -lex:add_rule('link', link) -lex:add_rule('line', line) -lex:add_rule('header', header) -lex:add_rule('comment', comment) -lex:add_rule('whitespace', ws) -lex:add_rule('image', image) -lex:add_rule('macro', macro) -lex:add_rule('inline', inline) -lex:add_rule('verbatim_area', verbatim_area) -lex:add_rule('raw_area', raw_area) -lex:add_rule('tagged_area', tagged_area) - -lex:add_style('line', {bold = true}) -local font_size = tonumber(lexer.property_expanded['style.default']:match('size:(%d+)')) or 10 -for n = 5, 1, -1 do - lex:add_style('h' .. n, {fore = lexer.colors.red, size = font_size + (6 - n)}) -end -lex:add_style('image', {fore = lexer.colors.green}) -lex:add_style('strike', {italics = true}) -- a strike style is not available -lex:add_style('tagged', lexer.styles.embedded) -lex:add_style('tagged_area', lexer.styles.embedded) -- in consistency with tagged -lex:add_style('table_sep', {fore = lexer.colors.green}) -lex:add_style('header_cell_content', {fore = lexer.colors.green}) - -return lex diff --git a/share/vis/lexers/typescript.lua b/share/vis/lexers/typescript.lua @@ -1,18 +0,0 @@ --- Copyright 2021-2024 Mitchell. See LICENSE. --- TypeScript LPeg lexer. - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(..., {inherit = lexer.load('javascript')}) - --- Word lists. -lex:set_word_list(lexer.KEYWORD, 'abstract as constructor declare is module namespace require type', - true) - -lex:set_word_list(lexer.TYPE, 'boolean number bigint string unknown any void never symbol object', - true) - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/vala.lua b/share/vis/lexers/vala.lua @@ -1,62 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Vala LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('vala') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'class', 'delegate', 'enum', 'errordomain', 'interface', 'namespace', 'signal', 'struct', 'using', - -- Modifiers. - 'abstract', 'const', 'dynamic', 'extern', 'inline', 'out', 'override', 'private', 'protected', - 'public', 'ref', 'static', 'virtual', 'volatile', 'weak', - -- Other. - 'as', 'base', 'break', 'case', 'catch', 'construct', 'continue', 'default', 'delete', 'do', - 'else', 'ensures', 'finally', 'for', 'foreach', 'get', 'if', 'in', 'is', 'lock', 'new', - 'requires', 'return', 'set', 'sizeof', 'switch', 'this', 'throw', 'throws', 'try', 'typeof', - 'value', 'var', 'void', 'while', - -- Etc. - 'null', 'true', 'false' -})) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match{ - 'bool', 'char', 'double', 'float', 'int', 'int8', 'int16', 'int32', 'int64', 'long', 'short', - 'size_t', 'ssize_t', 'string', 'uchar', 'uint', 'uint8', 'uint16', 'uint32', 'uint64', 'ulong', - 'unichar', 'ushort' -})) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"', true) -local tq_str = lexer.range('"""') -local ml_str = '@' * lexer.range('"', false, false) -lex:add_rule('string', token(lexer.STRING, tq_str + sq_str + dq_str + ml_str)) - --- Comments. -local line_comment = lexer.to_eol('//', true) -local block_comment = lexer.range('/*', '*/') -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number * S('uUlLfFdDmM')^-1)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;.()[]{}'))) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '{', '}') -lex:add_fold_point(lexer.COMMENT, '/*', '*/') - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/vb.lua b/share/vis/lexers/vb.lua @@ -1,68 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- VisualBasic LPeg lexer. - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(..., {case_insensitive_fold_points = true}) - --- Keywords. -lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD, true))) - --- Types. -lex:add_rule('type', lex:tag(lexer.TYPE, lex:word_match(lexer.TYPE, true))) - --- Comments. -lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.to_eol("'" + lexer.word_match('rem', true)))) - --- Identifiers. -lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) - --- Strings. -lex:add_rule('string', lex:tag(lexer.STRING, lexer.range('"', true, false))) - --- Numbers. -lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number * S('LlUuFf')^-2)) - --- Operators. -lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('=><+-*^&:.,_()'))) - --- Fold points. -lex:add_fold_point(lexer.KEYWORD, 'If', 'End If') -lex:add_fold_point(lexer.KEYWORD, 'Select', 'End Select') -lex:add_fold_point(lexer.KEYWORD, 'For', 'Next') -lex:add_fold_point(lexer.KEYWORD, 'While', 'End While') -lex:add_fold_point(lexer.KEYWORD, 'While', 'Wend') -lex:add_fold_point(lexer.KEYWORD, 'Do', 'Loop') -lex:add_fold_point(lexer.KEYWORD, 'With', 'End With') -lex:add_fold_point(lexer.KEYWORD, 'Sub', 'End Sub') -lex:add_fold_point(lexer.KEYWORD, 'Function', 'End Function') -lex:add_fold_point(lexer.KEYWORD, 'Property', 'End Property') -lex:add_fold_point(lexer.KEYWORD, 'Module', 'End Module') -lex:add_fold_point(lexer.KEYWORD, 'Class', 'End Class') -lex:add_fold_point(lexer.KEYWORD, 'Try', 'End Try') - --- Word lists. -lex:set_word_list(lexer.KEYWORD, { - -- Control. - 'If', 'Then', 'Else', 'ElseIf', 'While', 'Wend', 'For', 'To', 'Each', 'In', 'Step', 'Case', - 'Select', 'Return', 'Continue', 'Do', 'Until', 'Loop', 'Next', 'With', 'Exit', - -- Operators. - 'Mod', 'And', 'Not', 'Or', 'Xor', 'Is', - -- Storage types. - 'Call', 'Class', 'Const', 'Dim', 'ReDim', 'Preserve', 'Function', 'Sub', 'Property', 'End', 'Set', - 'Let', 'Get', 'New', 'Randomize', 'Option', 'Explicit', 'On', 'Error', 'Execute', 'Module', - -- Storage modifiers. - 'Private', 'Public', 'Default', - -- Constants. - 'Empty', 'False', 'Nothing', 'Null', 'True' -}) - -lex:set_word_list(lexer.TYPE, { - 'Boolean', 'Byte', 'Char', 'Date', 'Decimal', 'Double', 'Long', 'Object', 'Short', 'Single', - 'String' -}) - -lexer.property['scintillua.comment'] = "'" - -return lex diff --git a/share/vis/lexers/vbscript.lua b/share/vis/lexers/vbscript.lua @@ -1,63 +0,0 @@ --- Copyright 2006-2017 Mitchell mitchell.att.foicica.com. See LICENSE. --- VisualBasic LPeg lexer. - -local l = require('lexer') -local token, word_match = l.token, l.word_match -local P, R, S = lpeg.P, lpeg.R, lpeg.S - -local M = {_NAME = 'vbscript'} - --- Whitespace. -local ws = token(l.WHITESPACE, l.space^1) - --- Comments. -local comment = token(l.COMMENT, (P("'") + word_match({'rem'}, nil, true)) * l.nonnewline^0) - --- Strings. -local string = token(l.STRING, l.range('"', true, true)) - --- Numbers. -local number = token(l.NUMBER, (l.float + l.integer) * S('LlUuFf')^-2) - --- Keywords. -local keyword = token(l.KEYWORD, word_match({ - -- Control. - 'If', 'Then', 'Else', 'ElseIf', 'While', 'Wend', 'For', 'To', 'Each', - 'In', 'Step', 'Case', 'Select', 'Return', 'Continue', 'Do', - 'Until', 'Loop', 'Next', 'With', 'Exit', - -- Operators. - 'Mod', 'And', 'Not', 'Or', 'Xor', 'Is', - -- Storage types. - 'Call', 'Class', 'Const', 'Dim', 'ReDim', 'Preserve', 'Function', 'Sub', - 'Property', 'End', 'Set', 'Let', 'Get', 'New', 'Randomize', 'Option', - 'Explicit', 'On', 'Error', 'Execute', - -- Storage modifiers. - 'Private', 'Public', 'Default', - -- Constants. - 'Empty', 'False', 'Nothing', 'Null', 'True' -}, nil, true)) - --- Types. -local type = token(l.TYPE, word_match({ - 'Boolean', 'Byte', 'Char', 'Date', 'Decimal', 'Double', 'Long', 'Object', - 'Short', 'Single', 'String' -}, nil, true)) - --- Identifiers. -local identifier = token(l.IDENTIFIER, l.word) - --- Operators. -local operator = token(l.OPERATOR, S('=><+-*^&:.,_()')) - -M._rules = { - {'whitespace', ws}, - {'keyword', keyword}, - {'type', type}, - {'comment', comment}, - {'identifier', identifier}, - {'string', string}, - {'number', number}, - {'operator', operator}, -} - -return M diff --git a/share/vis/lexers/vcard.lua b/share/vis/lexers/vcard.lua @@ -1,71 +0,0 @@ --- Copyright (c) 2015-2024 Piotr Orzechowski [drzewo.org]. See LICENSE. --- vCard 2.1, 3.0 and 4.0 LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('vcard') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Begin vCard, end vCard. -lex:add_rule('begin_sequence', token(lexer.KEYWORD, 'BEGIN') * token(lexer.OPERATOR, ':') * - token(lexer.COMMENT, 'VCARD')) -lex:add_rule('end_sequence', token(lexer.KEYWORD, 'END') * token(lexer.OPERATOR, ':') * - token(lexer.COMMENT, 'VCARD')) - --- vCard version (in v3.0 and v4.0 must appear immediately after BEGIN:VCARD). -lex:add_rule('version_sequence', token(lexer.KEYWORD, 'VERSION') * token(lexer.OPERATOR, ':') * - token(lexer.CONSTANT, lexer.digit^1 * ('.' * lexer.digit^1)^-1)) - --- Required properties. -local required_property = token(lexer.KEYWORD, word_match({ - 'BEGIN', 'END', 'FN', 'VERSION', -- - 'N' -- Not required in v4.0. -}, true)) * #P(':') -lex:add_rule('required_property', required_property) - --- Supported properties. -local supported_property = token(lexer.TYPE, word_match({ - 'ADR', 'BDAY', 'CATEGORIES', 'EMAIL', 'END', 'GEO', 'KEY', 'LOGO', 'NOTE', 'ORG', 'PHOTO', 'REV', - 'ROLE', 'SOUND', 'SOURCE', 'TEL', 'TITLE', 'TZ', 'UID', 'URL', - -- Supported in v4.0 only. - 'ANNIVERSARY', 'CALADRURI', 'CALURI', 'CLIENTPIDMAP', 'FBURL', 'GENDER', 'KIND', 'LANG', 'MEMBER', - 'RELATED', 'XML', - -- Not supported in v4.0. - 'AGENT', 'LABEL', 'MAILER', 'PROFILE', 'SORT-STRING', - -- Supported in v3.0 only. - 'CLASS', 'NAME', - -- Not supported in v2.1. - 'IMPP', 'NICKNAME', 'PRODID' -}, true)) * #S(':;') -lex:add_rule('supported_property', supported_property) - --- Group and property. -local identifier = lexer.alpha^1 * lexer.digit^0 * ('-' * lexer.alnum^1)^0 -local property = required_property + supported_property + - lexer.token(lexer.TYPE, S('xX') * '-' * identifier) * #S(':;') -lex:add_rule('group_sequence', token(lexer.CONSTANT, lexer.starts_line(identifier)) * - token(lexer.OPERATOR, '.') * property) - --- Extension. -lex:add_rule('extension', - token(lexer.TYPE, lexer.starts_line(S('xX') * '-' * identifier * #S(':;')))) - --- Parameter. -local parameter = (token(lexer.IDENTIFIER, lexer.starts_line(identifier)) + - token(lexer.STRING, identifier)) * #S(':=') -lex:add_rule('parameter', parameter) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('.:;='))) - --- Data. -lex:add_rule('data', token(lexer.IDENTIFIER, lexer.any)) - --- Fold points. -lex:add_fold_point(lexer.KEYWORD, 'BEGIN', 'END') - -return lex diff --git a/share/vis/lexers/verilog.lua b/share/vis/lexers/verilog.lua @@ -1,89 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- Verilog LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('verilog') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'always', 'assign', 'begin', 'case', 'casex', 'casez', 'default', 'deassign', 'disable', 'else', - 'end', 'endcase', 'endfunction', 'endgenerate', 'endmodule', 'endprimitive', 'endspecify', - 'endtable', 'endtask', 'for', 'force', 'forever', 'fork', 'function', 'generate', 'if', 'initial', - 'join', 'macromodule', 'module', 'negedge', 'posedge', 'primitive', 'repeat', 'release', - 'specify', 'table', 'task', 'wait', 'while', - -- Compiler directives. - '`include', '`define', '`undef', '`ifdef', '`ifndef', '`else', '`endif', '`timescale', - '`resetall', '`signed', '`unsigned', '`celldefine', '`endcelldefine', '`default_nettype', - '`unconnected_drive', '`nounconnected_drive', '`protect', '`endprotect', '`protected', - '`endprotected', '`remove_gatename', '`noremove_gatename', '`remove_netname', '`noremove_netname', - '`expand_vectornets', '`noexpand_vectornets', '`autoexpand_vectornets', - -- Signal strengths. - 'strong0', 'strong1', 'pull0', 'pull1', 'weak0', 'weak1', 'highz0', 'highz1', 'small', 'medium', - 'large' -})) - --- Function. -lex:add_rule('function', token(lexer.FUNCTION, word_match{ - '$stop', '$finish', '$time', '$stime', '$realtime', '$settrace', '$cleartrace', '$showscopes', - '$showvars', '$monitoron', '$monitoroff', '$random', '$printtimescale', '$timeformat', '$display', - -- Built-in primitives. - 'and', 'nand', 'or', 'nor', 'xor', 'xnor', 'buf', 'bufif0', 'bufif1', 'not', 'notif0', 'notif1', - 'nmos', 'pmos', 'cmos', 'rnmos', 'rpmos', 'rcmos', 'tran', 'tranif0', 'tranif1', 'rtran', - 'rtranif0', 'rtranif1', 'pullup', 'pulldown' -})) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match{ - 'integer', 'reg', 'time', 'realtime', 'defparam', 'parameter', 'event', 'wire', 'wand', 'wor', - 'tri', 'triand', 'trior', 'tri0', 'tri1', 'trireg', 'vectored', 'scalared', 'input', 'output', - 'inout', 'supply0', 'supply1' -})) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -lex:add_rule('string', token(lexer.STRING, lexer.range('"'))) - --- Comments. -local line_comment = lexer.to_eol('//') -local block_comment = lexer.range('/*', '*/') -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -local bin_suffix = S('bB') * S('01_xXzZ')^1 * -lexer.xdigit -local oct_suffix = S('oO') * S('01234567_xXzZ')^1 -local dec_suffix = S('dD') * S('0123456789_xXzZ')^1 -local hex_suffix = S('hH') * S('0123456789abcdefABCDEF_xXzZ')^1 -lex:add_rule('number', token(lexer.NUMBER, (lexer.digit + '_')^1 + "'" * - (bin_suffix + oct_suffix + dec_suffix + hex_suffix))) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('=~+-/*<>%&|^~,:;()[]{}'))) - --- Fold points. -lex:add_fold_point(lexer.KEYWORD, 'case', 'endcase') -lex:add_fold_point(lexer.KEYWORD, 'casex', 'endcase') -lex:add_fold_point(lexer.KEYWORD, 'casez', 'endcase') -lex:add_fold_point(lexer.KEYWORD, 'function', 'endfunction') -lex:add_fold_point(lexer.KEYWORD, 'fork', 'join') -lex:add_fold_point(lexer.KEYWORD, 'table', 'endtable') -lex:add_fold_point(lexer.KEYWORD, 'task', 'endtask') -lex:add_fold_point(lexer.KEYWORD, 'generate', 'endgenerate') -lex:add_fold_point(lexer.KEYWORD, 'specify', 'endspecify') -lex:add_fold_point(lexer.KEYWORD, 'primitive', 'endprimitive') -lex:add_fold_point(lexer.KEYWORD, 'module', 'endmodule') -lex:add_fold_point(lexer.KEYWORD, 'begin', 'end') -lex:add_fold_point(lexer.OPERATOR, '(', ')') -lex:add_fold_point(lexer.OPERATOR, '{', '}') -lex:add_fold_point(lexer.COMMENT, '/*', '*/') - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/vhdl.lua b/share/vis/lexers/vhdl.lua @@ -1,72 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- VHDL LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('vhdl') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'access', 'after', 'alias', 'all', 'architecture', 'array', 'assert', 'attribute', 'begin', - 'block', 'body', 'buffer', 'bus', 'case', 'component', 'configuration', 'constant', 'disconnect', - 'downto', 'else', 'elsif', 'end', 'entity', 'exit', 'file', 'for', 'function', 'generate', - 'generic', 'group', 'guarded', 'if', 'impure', 'in', 'inertial', 'inout', 'is', 'label', - 'library', 'linkage', 'literal', 'loop', 'map', 'new', 'next', 'null', 'of', 'on', 'open', - 'others', 'out', 'package', 'port', 'postponed', 'procedure', 'process', 'pure', 'range', - 'record', 'register', 'reject', 'report', 'return', 'select', 'severity', 'signal', 'shared', - 'subtype', 'then', 'to', 'transport', 'type', 'unaffected', 'units', 'until', 'use', 'variable', - 'wait', 'when', 'while', 'with', -- - 'note', 'warning', 'error', 'failure', -- - 'and', 'nand', 'or', 'nor', 'xor', 'xnor', 'rol', 'ror', 'sla', 'sll', 'sra', 'srl', 'mod', 'rem', -- - 'abs', 'not', 'false', 'true' -})) - --- Functions. -lex:add_rule('function', token(lexer.FUNCTION, word_match{ - 'rising_edge', 'shift_left', 'shift_right', 'rotate_left', 'rotate_right', 'resize', 'std_match', - 'to_integer', 'to_unsigned', 'to_signed', 'unsigned', 'signed', 'to_bit', 'to_bitvector', - 'to_stdulogic', 'to_stdlogicvector', 'to_stdulogicvector' -})) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match{ - 'bit', 'bit_vector', 'character', 'boolean', 'integer', 'real', 'time', 'string', - 'severity_level', 'positive', 'natural', 'signed', 'unsigned', 'line', 'text', 'std_logic', - 'std_logic_vector', 'std_ulogic', 'std_ulogic_vector', 'qsim_state', 'qsim_state_vector', - 'qsim_12state', 'qsim_12state_vector', 'qsim_strength', 'mux_bit', 'mux_vectory', 'reg_bit', - 'reg_vector', 'wor_bit', 'wor_vector' -})) - --- Constants. -lex:add_rule('constant', token(lexer.CONSTANT, word_match{ - 'EVENT', 'BASE', 'LEFT', 'RIGHT', 'LOW', 'HIGH', 'ASCENDING', 'IMAGE', 'VALUE', 'POS', 'VAL', - 'SUCC', 'VAL', 'POS', 'PRED', 'VAL', 'POS', 'LEFTOF', 'RIGHTOF', 'LEFT', 'RIGHT', 'LOW', 'HIGH', - 'RANGE', 'REVERSE', 'LENGTH', 'ASCENDING', 'DELAYED', 'STABLE', 'QUIET', 'TRANSACTION', 'EVENT', - 'ACTIVE', 'LAST', 'LAST', 'LAST', 'DRIVING', 'DRIVING', 'SIMPLE', 'INSTANCE', 'PATH' -})) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, (lexer.alpha + "'") * (lexer.alnum + S("_'"))^1)) - --- Strings. -local sq_str = lexer.range("'", true, false) -local dq_str = lexer.range('"', true) -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('--'))) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('=/!:;<>+-/*%&|^~()'))) - -lexer.property['scintillua.comment'] = '--' - -return lex diff --git a/share/vis/lexers/wsf.lua b/share/vis/lexers/wsf.lua @@ -1,87 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- WSF LPeg lexer (based on XML). --- Contributed by Jeff Stone. - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(...) - --- Comments. -lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.range('<!--', '-->'))) - --- Elements. -local identifier = (lexer.alpha + S('_-')) * (lexer.alnum + S('_-'))^0 -local tag = lex:tag(lexer.TAG, '<' * P('/')^-1 * identifier) -lex:add_rule('tag', tag) - --- Closing tags. -local tag_close = lex:tag(lexer.TAG, P('/')^-1 * '>') -lex:add_rule('tag_close', tag_close) - --- Equals. --- TODO: performance is terrible on large files. -local in_tag = P(function(input, index) - local before = input:sub(1, index - 1) - local s, e = before:find('<[^>]-$'), before:find('>[^<]-$') - if s and e then return s > e end - if s then return true end - return input:find('^[^<]->', index) ~= nil -end) - -local equals = lex:tag(lexer.OPERATOR, '=') -- * in_tag --- lex:add_rule('equals', equals) - --- Attributes. -local ws = lex:get_rule('whitespace') -local attribute_eq = lex:tag(lexer.ATTRIBUTE, identifier) * ws^-1 * equals -lex:add_rule('attribute', attribute_eq) - --- Strings. -local sq_str = lexer.range("'", false, false) -local dq_str = lexer.range('"', false, false) -local string = lex:tag(lexer.STRING, lexer.after_set('=', sq_str + dq_str)) -lex:add_rule('string', string) - --- Numbers. -local number = lex:tag(lexer.NUMBER, lexer.dec_num * P('%')^-1) -lex:add_rule('number', lexer.after_set('=', number)) -- * in_tag) - --- Entities. -local predefined = lex:tag(lexer.CONSTANT_BUILTIN .. '.entity', - '&' * lexer.word_match('lt gt amp apos quot') * ';') -local general = lex:tag(lexer.CONSTANT .. '.entity', '&' * identifier * ';') -lex:add_rule('entity', predefined + general) - --- Fold points. -local function disambiguate_lt(text, pos, line, s) return not line:find('^</', s) and 1 or -1 end -lex:add_fold_point(lexer.TAG, '<', disambiguate_lt) -lex:add_fold_point(lexer.TAG, '/>', -1) -lex:add_fold_point(lexer.COMMENT, '<!--', '-->') - --- Finally, add JavaScript and VBScript as embedded languages - --- Tags that start embedded languages. -local embed_start_tag = tag * (ws * attribute_eq * ws^-1 * string)^0 * ws^-1 * tag_close -local embed_end_tag = tag * tag_close - --- Embedded JavaScript. -local js = lexer.load('javascript') -local js_start_rule = #(P('<script') * (P(function(input, index) - if input:find('^%s+language%s*=%s*(["\'])[jJ][ava]*[sS]cript%1', index) then return true end -end) + '>')) * embed_start_tag -- <script language="javascript"> -local js_end_rule = #P('</script>') * embed_end_tag -- </script> -lex:embed(js, js_start_rule, js_end_rule) - --- Embedded VBScript. -local vbs = lexer.load('vb', 'vbscript') -local vbs_start_rule = #(P('<script') * (P(function(input, index) - if input:find('^%s+language%s*=%s*(["\'])[vV][bB][sS]cript%1', index) then return true end -end) + '>')) * embed_start_tag -- <script language="vbscript"> -local vbs_end_rule = #P('</script>') * embed_end_tag -- </script> -lex:embed(vbs, vbs_start_rule, vbs_end_rule) - -lexer.property['scintillua.comment'] = '<!--|-->' -lexer.property['scintillua.angle.braces'] = '1' - -return lex diff --git a/share/vis/lexers/xml.lua b/share/vis/lexers/xml.lua @@ -1,75 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- XML LPeg lexer. - -local lexer = lexer -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new(...) - --- Comments and CDATA. -lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.range('<!--', '-->'))) -lex:add_rule('cdata', lex:tag('cdata', lexer.range('<![CDATA[', ']]>'))) - --- Doctype. -local ws = lex:get_rule('whitespace') -local identifier = (lexer.alpha + S('_-')) * (lexer.alnum + S('_-'))^0 -local doctype = lex:tag(lexer.TAG .. '.doctype', '<!DOCTYPE') * ws * - lex:tag(lexer.TAG .. '.doctype', identifier) * (ws * identifier)^-1 * (1 - P('>'))^0 * - lex:tag(lexer.TAG .. '.doctype', '>') -lex:add_rule('doctype', doctype) - --- Processing instructions. -lex:add_rule('proc_insn', lex:tag(lexer.TAG .. '.pi', '<?' * identifier + '?>')) - --- Tags. -local namespace = lex:tag(lexer.OPERATOR, ':') * lex:tag(lexer.LABEL, identifier) -lex:add_rule('element', lex:tag(lexer.TAG, '<' * P('/')^-1 * identifier) * namespace^-1) - --- Closing tags. -lex:add_rule('close_tag', lex:tag(lexer.TAG, P('/')^-1 * '>')) - --- Equals. --- TODO: performance is terrible on large files. -local in_tag = P(function(input, index) - local before = input:sub(1, index - 1) - local s, e = before:find('<[^>]-$'), before:find('>[^<]-$') - if s and e then return s > e end - if s then return true end - return input:find('^[^<]->', index) ~= nil -end) - -local equals = lex:tag(lexer.OPERATOR, '=') -- * in_tag --- lex:add_rule('equal', equals) - --- Attributes. -local attribute_eq = lex:tag(lexer.ATTRIBUTE, identifier) * namespace^-1 * ws^-1 * equals -lex:add_rule('attribute', attribute_eq) - --- Strings. -local sq_str = lexer.range("'", false, false) -local dq_str = lexer.range('"', false, false) -lex:add_rule('string', lex:tag(lexer.STRING, lexer.after_set('=', sq_str + dq_str))) - --- Numbers. -local number = lex:tag(lexer.NUMBER, lexer.dec_num * P('%')^-1) -lex:add_rule('number', lexer.after_set('=', number)) -- *in_tag) - --- Entities. -local predefined = lex:tag(lexer.CONSTANT_BUILTIN .. '.entity', - '&' * lexer.word_match('lt gt amp apos quot') * ';') -local general = lex:tag(lexer.CONSTANT .. '.entity', '&' * identifier * ';') -lex:add_rule('entity', predefined + general) - --- Fold Points. -local function disambiguate_lt(text, pos, line, s) return not line:find('^</', s) and 1 or -1 end -lex:add_fold_point(lexer.TAG, '<', disambiguate_lt) -lex:add_fold_point(lexer.TAG, '/>', -1) -lex:add_fold_point(lexer.COMMENT, '<!--', '-->') -lex:add_fold_point('cdata', '<![CDATA[', ']]>') - -lexer.property['scintillua.comment'] = '<!--|-->' -lexer.property['scintillua.angle.braces'] = '1' -lexer.property['scintillua.word.chars'] = - 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-' - -return lex diff --git a/share/vis/lexers/xs.lua b/share/vis/lexers/xs.lua @@ -1,60 +0,0 @@ --- Copyright 2017-2024 David B. Lamkins. See LICENSE. --- xs LPeg lexer. --- Adapted from rc lexer by Michael Forney. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('xs') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - 'access', 'alias', 'catch', 'cd', 'dirs', 'echo', 'else', 'escape', 'eval', 'exec', 'exit', - 'false', 'fn-', 'fn', 'for', 'forever', 'fork', 'history', 'if', 'jobs', 'let', 'limit', 'local', - 'map', 'omap', 'popd', 'printf', 'pushd', 'read', 'result', 'set-', 'switch', 'throw', 'time', - 'true', 'umask', 'until', 'unwind-protect', 'var', 'vars', 'wait', 'whats', 'while', ':lt', ':le', - ':gt', ':ge', ':eq', ':ne', '~', '~~', '...', '.' -})) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Strings. -local str = lexer.range("'", false, true) -local herestr = '<<<' * str -local heredoc = '<<' * P(function(input, index) - local s, e, _, delimiter = input:find('[ \t]*(["\']?)([%w!"%%+,-./:?@_~]+)%1', index) - if s == index and delimiter then - delimiter = delimiter:gsub('[%%+-.?]', '%%%1') - e = select(2, input:find('[\n\r]' .. delimiter .. '[\n\r]', e)) - return e and e + 1 or #input + 1 - end -end) -lex:add_rule('string', token(lexer.STRING, str + herestr + heredoc)) - --- Comments. -lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#'))) - --- Numbers. --- lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Constants. -lex:add_rule('constant', token(lexer.CONSTANT, '$&' * lexer.word)) - --- Variables. -lex:add_rule('variable', - token(lexer.VARIABLE, '$' * S('"#')^-1 * ('*' + lexer.digit^1 + lexer.word))) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('@`=!<>*&^|;?()[]{}') + '\\\n')) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '{', '}') - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/xtend.lua b/share/vis/lexers/xtend.lua @@ -1,88 +0,0 @@ --- Copyright (c) 2014-2024 Piotr Orzechowski [drzewo.org]. See LICENSE. --- Xtend LPeg lexer. - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('xtend') - --- Whitespace. -local ws = token(lexer.WHITESPACE, lexer.space^1) -lex:add_rule('whitespace', ws) - --- Classes. -lex:add_rule('class', token(lexer.KEYWORD, 'class') * ws^1 * token(lexer.CLASS, lexer.word)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - -- General. - 'abstract', 'annotation', 'as', 'case', 'catch', 'class', 'create', 'def', 'default', 'dispatch', - 'do', 'else', 'enum', 'extends', 'extension', 'final', 'finally', 'for', 'if', 'implements', - 'import', 'interface', 'instanceof', 'it', 'new', 'override', 'package', 'private', 'protected', - 'public', 'return', 'self', 'static', 'super', 'switch', 'synchronized', 'this', 'throw', - 'throws', 'try', 'typeof', 'val', 'var', 'while', - -- Templates. - 'AFTER', 'BEFORE', 'ENDFOR', 'ENDIF', 'FOR', 'IF', 'SEPARATOR', - -- Literals. - 'true', 'false', 'null' -})) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match{ - 'boolean', 'byte', 'char', 'double', 'float', 'int', 'long', 'short', 'void', 'Boolean', 'Byte', - 'Character', 'Double', 'Float', 'Integer', 'Long', 'Short', 'String' -})) - --- Functions. -lex:add_rule('function', token(lexer.FUNCTION, lexer.word) * #P('(')) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Templates. -lex:add_rule('template', token(lexer.EMBEDDED, lexer.range("'''"))) - --- Strings. -local sq_str = lexer.range("'", true) -local dq_str = lexer.range('"', true) -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) - --- Comments. -local line_comment = lexer.to_eol('//', true) -local block_comment = lexer.range('/*', '*/') -lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) - --- Numbers. -local small_suff = S('lL') -local med_suff = S('bB') * S('iI') -local large_suff = S('dD') + S('fF') + S('bB') * S('dD') -local exp = S('eE') * lexer.digit^1 - -local dec_inf = ('_' * lexer.digit^1)^0 -local hex_inf = ('_' * lexer.xdigit^1)^0 -local float_pref = lexer.digit^1 * '.' * lexer.digit^1 -local float_suff = exp^-1 * med_suff^-1 * large_suff^-1 - -local dec = lexer.digit * dec_inf * (small_suff^-1 + float_suff) -local hex = lexer.hex_num * hex_inf * P('#' * (small_suff + med_suff))^-1 -local float = float_pref * dec_inf * float_suff - -lex:add_rule('number', token(lexer.NUMBER, float + hex + dec)) - --- Annotations. -lex:add_rule('annotation', token(lexer.ANNOTATION, '@' * lexer.word)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;.()[]{}#'))) - --- Error. -lex:add_rule('error', token(lexer.ERROR, lexer.any)) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '{', '}') -lex:add_fold_point(lexer.COMMENT, '/*', '*/') - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/lexers/yaml.lua b/share/vis/lexers/yaml.lua @@ -1,106 +0,0 @@ --- Copyright 2006-2024 Mitchell. See LICENSE. --- YAML LPeg lexer. --- It does not keep track of indentation perfectly. - -local lexer = lexer -local word_match = lexer.word_match -local P, S, B = lpeg.P, lpeg.S, lpeg.B - -local lex = lexer.new(..., {fold_by_indentation = true}) - --- Distinguish between horizontal and vertical space so indenting tabs can be marked as errors. -local tab_indent = lex:tag(lexer.ERROR .. '.indent', lexer.starts_line('\t', true)) -lex:modify_rule('whitespace', tab_indent + lex:tag(lexer.WHITESPACE, S(' \r\n')^1 + P('\t')^1)) - --- Document boundaries. -lex:add_rule('doc_bounds', lex:tag(lexer.OPERATOR, lexer.starts_line(P('---') + '...'))) - --- Keys. -local word = (lexer.alnum + '-')^1 -lex:add_rule('key', -P('- ') * lex:tag(lexer.STRING, word * (S(' \t_')^1 * word^-1)^0) * - #P(':' * lexer.space)) - --- Collections. -lex:add_rule('collection', lex:tag(lexer.OPERATOR, - lexer.after_set('?-:\n', S('?-') * #P(' '), ' \t') + ':' * #P(lexer.space) + S('[]{}') + ',' * - #P(' '))) - --- Alias indicators. -local anchor = lex:tag(lexer.OPERATOR, '&') * lex:tag(lexer.LABEL, word) -local alias = lex:tag(lexer.OPERATOR, '*') * lex:tag(lexer.LABEL, word) -lex:add_rule('alias', anchor + alias) - --- Tags. -local explicit_tag = '!!' * word_match{ - 'map', 'omap', 'pairs', 'set', 'seq', -- collection - 'binary', 'bool', 'float', 'int', 'merge', 'null', 'str', 'timestamp', 'value', 'yaml' -- scalar -} -local verbatim_tag = '!' * lexer.range('<', '>', true) -local short_tag = '!' * word * ('!' * (1 - lexer.space)^1)^-1 -lex:add_rule('tag', lex:tag(lexer.TYPE, explicit_tag + verbatim_tag + short_tag)) - --- Comments. -lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.to_eol('#'))) - --- Reserved. -lex:add_rule('reserved', - B(S(':,') * ' ') * lex:tag(lexer.ERROR, S('@`') + lexer.starts_line(S('@`')))) - --- Constants. -local scalar_end = #(S(' \t')^0 * lexer.newline + S(',]}') + -1) -lex:add_rule('constant', - lex:tag(lexer.CONSTANT_BUILTIN, word_match('null true false', true)) * scalar_end) - --- Strings. -local sq_str = lexer.range("'") -local dq_str = lexer.range('"') -lex:add_rule('string', lex:tag(lexer.STRING, sq_str + dq_str) * (scalar_end + #P(':' * lexer.space))) - --- Timestamps. -local year = lexer.digit * lexer.digit * lexer.digit * lexer.digit -local month = lexer.digit * lexer.digit^-1 -local day = lexer.digit * lexer.digit^-1 -local date = year * '-' * month * '-' * day -local hours = lexer.digit * lexer.digit^-1 -local minutes = lexer.digit * lexer.digit -local seconds = lexer.digit * lexer.digit -local fraction = '.' * lexer.digit^0 -local time = hours * ':' * minutes * ':' * seconds * fraction^-1 -local zone = 'Z' + S(' \t')^-1 * S('-+') * hours * (':' * minutes)^-1 -lex:add_rule('timestamp', lex:tag(lexer.NUMBER .. '.timestamp', - date * (S('tT \t') * time * zone^-1)^-1) * scalar_end) - --- Numbers. -local special_num = S('+-')^-1 * '.' * word_match('inf nan', true) -local number = lexer.number + special_num -lex:add_rule('number', (B(lexer.alnum) * lex:tag(lexer.DEFAULT, number) + - lex:tag(lexer.NUMBER, number)) * scalar_end) - --- Scalars. -local block_indicator = S('|>') * (S('-+') * lexer.digit^-1 + lexer.digit * S('-+')^-1)^-1 -local block = lpeg.Cmt(lpeg.C(block_indicator * lexer.newline), function(input, index, indicator) - local indent = lexer.indent_amount[lexer.line_from_position(index - #indicator)] - for s, i, j in input:gmatch('()\n()[ \t]*()[^ \t\r\n]', index) do -- ignore blank lines - if s >= index then -- compatibility for Lua < 5.4, which doesn't have init for string.gmatch() - if j - i <= indent then return s end - end - end - return #input + 1 -end) -local seq = B('- ') * lexer.nonnewline^1 -local csv = B(', ') * (lexer.nonnewline - S(',]}'))^1 -local stop_chars, LF = {[string.byte('{')] = true, [string.byte('\n')] = true}, string.byte('\n') -local map = B(': ') * lexer.nonnewline * P(function(input, index) - local pos = index - while pos > 1 and not stop_chars[input:byte(pos)] do pos = pos - 1 end - local s = input:find(input:byte(pos) ~= LF and '[\n,}]' or '\n', index) - return s or #input + 1 -end) -lex:add_rule('scalar', lex:tag(lexer.DEFAULT, block + seq + csv + map)) - --- Directives -lex:add_rule('directive', lex:tag(lexer.PREPROCESSOR, lexer.starts_line(lexer.to_eol('%')))) - -lexer.property['scintillua.comment'] = '#' - -return lex diff --git a/share/vis/lexers/zig.lua b/share/vis/lexers/zig.lua @@ -1,93 +0,0 @@ --- Copyright 2020-2024 Karchnu karchnu@karchnu.fr. See LICENSE. --- Zig LPeg lexer. --- (Based on the C++ LPeg lexer from Mitchell.) - -local lexer = require('lexer') -local token, word_match = lexer.token, lexer.word_match -local P, S = lpeg.P, lpeg.S - -local lex = lexer.new('zig') - --- Whitespace. -lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1)) - --- Keywords. -lex:add_rule('keyword', token(lexer.KEYWORD, word_match{ - -- Keywords. - 'inline', 'pub', 'fn', 'comptime', 'const', 'extern', 'return', 'var', 'usingnamespace', - -- Defering code blocks. - 'defer', 'errdefer', - -- Functions and structures related keywords. - 'align', 'allowzero', 'noalias', 'noinline', 'callconv', 'packed', 'linksection', 'unreachable', - 'test', 'asm', 'volatile', - -- Parallelism and concurrency related keywords. - 'async', 'await', 'noasync', 'suspend', 'nosuspend', 'resume', 'threadlocalanyframe', - -- Control flow: conditions and loops. - 'if', 'else', 'orelse', 'or', 'and', 'while', 'for', 'switch', 'continue', 'break', 'catch', - 'try', - -- Not keyword but overly used variable name with always the same semantic. - 'self' -})) - --- Types. -lex:add_rule('type', token(lexer.TYPE, word_match{ - 'enum', 'struct', 'union', -- - 'i8', 'u8', 'i16', 'u16', 'i32', 'u32', 'i64', 'u64', 'i128', 'u128', -- - 'isize', 'usize', -- - 'c_short', 'c_ushort', 'c_int', 'c_uint', -- - 'c_long', 'c_ulong', 'c_longlong', 'c_ulonglong', 'c_longdouble', -- - 'c_void', -- - 'f16', 'f32', 'f64', 'f128', -- - 'bool', 'void', 'noreturn', 'type', 'anytype', 'error', 'anyerror', -- - 'comptime_int', 'comptime_float' -})) - --- Constants. -lex:add_rule('constant', token(lexer.CONSTANT, word_match{ - -- Special values. - 'false', 'true', 'null', 'undefined' -})) - --- Built-in functions. -lex:add_rule('function', token(lexer.FUNCTION, '@' * word_match{ - 'addWithOverflow', 'alignCast', 'alignOf', 'as', 'asyncCall', 'atomicLoad', 'atomicRmw', - 'atomicStore', 'bitCast', 'bitOffsetOf', 'boolToInt', 'bitSizeOf', 'breakpoint', 'mulAdd', - 'byteSwap', 'bitReverse', 'byteOffsetOf', 'call', 'cDefine', 'cImport', 'cInclude', 'clz', - 'cmpxchgStrong', 'cmpxchgWeak', 'compileError', 'compileLog', 'ctz', 'cUndef', 'divExact', - 'divFloor', 'divTrunc', 'embedFile', 'enumToInt', 'errorName', 'errorReturnTrace', 'errorToInt', - 'errSetCast', 'export', 'fence', 'field', 'fieldParentPtr', 'floatCast', 'floatToInt', 'frame', - 'Frame', 'frameAddress', 'frameSize', 'hasDecl', 'hasField', 'import', 'intCast', 'intToEnum', - 'intToError', 'intToFloat', 'intToPtr', 'memcpy', 'memset', 'wasmMemorySize', 'wasmMemoryGrow', - 'mod', 'mulWithOverflow', 'panic', 'popCount', 'ptrCast', 'ptrToInt', 'rem', 'returnAddress', - 'setAlignStack', 'setCold', 'setEvalBranchQuota', 'setFloatMode', 'setRuntimeSafety', 'shlExact', - 'shlWithOverflow', 'shrExact', 'shuffle', 'sizeOf', 'splat', 'reduce', 'src', 'sqrt', 'sin', - 'cos', 'exp', 'exp2', 'log', 'log2', 'log10', 'fabs', 'floor', 'ceil', 'trunc', 'round', - 'subWithOverflow', 'tagName', 'TagType', 'This', 'truncate', 'Type', 'typeInfo', 'typeName', - 'TypeOf', 'unionInit' -})) - --- Strings. -local sq_str = P('L')^-1 * lexer.range("'", true) -local dq_str = P('L')^-1 * lexer.range('"', true) -lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) - --- Identifiers. -lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) - --- Comments. -local doc_comment = lexer.to_eol('///', true) -local comment = lexer.to_eol('//', true) -lex:add_rule('comment', token(lexer.COMMENT, doc_comment + comment)) - --- Numbers. -lex:add_rule('number', token(lexer.NUMBER, lexer.number)) - --- Operators. -lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;,.()[]{}'))) - --- Fold points. -lex:add_fold_point(lexer.OPERATOR, '{', '}') - -lexer.property['scintillua.comment'] = '//' - -return lex diff --git a/share/vis/plugins/complete-filename.lua b/share/vis/plugins/complete-filename.lua @@ -1,51 +0,0 @@ -local complete_filename = function(expand) - local win = vis.win - local file = win.file - local pos = win.selection.pos - if not pos then return end - - -- TODO do something clever here - local range = file:text_object_longword(pos > 0 and pos-1 or pos); - if not range then return end - if range.finish > pos then range.finish = pos end - - local prefix = file:content(range) - if not prefix then return end - - -- Strip leading delimiters for some programming languages - local _, j = prefix:find(".*[{[(<'\"]+") - if not expand and j then prefix = prefix:sub(j + 1) end - - if prefix:match("^%s*$") then - prefix = "" - range.start = pos - range.finish = pos - end - - local cmdfmt = "vis-complete --file '%s'" - if expand then cmdfmt = "vis-open -- '%s'*" end - local status, out, err = vis:pipe(cmdfmt:format(prefix:gsub("'", "'\\''"))) - if status ~= 0 or not out then - if err then vis:info(err) end - return - end - out = out:gsub("\n$", "") - - if expand then - file:delete(range) - pos = range.start - end - - file:insert(pos, out) - win.selection.pos = pos + #out -end - --- complete file path at primary selection location using vis-complete(1) -vis:map(vis.modes.INSERT, "<C-x><C-f>", function() - complete_filename(false); -end, "Complete file name") - --- complete file path at primary selection location using vis-open(1) -vis:map(vis.modes.INSERT, "<C-x><C-o>", function() - complete_filename(true); -end, "Complete file name (expands path)") diff --git a/share/vis/plugins/complete-word.lua b/share/vis/plugins/complete-word.lua @@ -1,38 +0,0 @@ --- complete word at primary selection location using vis-complete(1) - -vis:map(vis.modes.INSERT, "<C-n>", function() - local win = vis.win - local file = win.file - local pos = win.selection.pos - if not pos then return end - - local range = file:text_object_word(pos > 0 and pos-1 or pos); - if not range then return end - if range.finish > pos then range.finish = pos end - if range.start == range.finish then return end - local prefix = file:content(range) - if not prefix then return end - - vis:feedkeys("<vis-selections-save><Escape><Escape>") - -- collect words starting with prefix - vis:command("x/\\b" .. prefix .. "\\w+/") - local candidates = {} - for sel in win:selections_iterator() do - table.insert(candidates, file:content(sel.range)) - end - vis:feedkeys("<Escape><Escape><vis-selections-restore>") - if #candidates == 1 and candidates[1] == "\n" then return end - candidates = table.concat(candidates, "\n") - - local cmd = "printf '" .. candidates .. "' | sort -u | vis-menu" - local status, out, err = vis:pipe(cmd) - if status ~= 0 or not out then - if err then vis:info(err) end - return - end - out = out:sub(#prefix + 1, #out - 1) - file:insert(pos, out) - win.selection.pos = pos + #out - -- restore mode to what it was on entry - vis.mode = vis.modes.INSERT -end, "Complete word in file") diff --git a/share/vis/plugins/digraph.lua b/share/vis/plugins/digraph.lua @@ -1,23 +0,0 @@ --- insert digraphs using vis-digraph(1) - -vis:map(vis.modes.INSERT, "<C-k>", function(keys) - if #keys < 2 then - return -1 -- need more input - end - local file = io.popen(string.format("vis-digraph '%s' 2>&1", keys:gsub("'", "'\\''"))) - local output = file:read('*all') - local success, msg, status = file:close() - if success then - if vis.mode == vis.modes.INSERT then - vis:insert(output) - elseif vis.mode == vis.modes.REPLACE then - vis:replace(output) - end - elseif msg == 'exit' then - if status == 2 then - return -1 -- prefix need more input - end - vis:info(output) - end - return #keys -end, "Insert digraph") diff --git a/share/vis/plugins/filetype.lua b/share/vis/plugins/filetype.lua @@ -1,638 +0,0 @@ -vis.ftdetect = {} - -vis.ftdetect.ignoresuffixes = { - "~+$", "%.orig$", "%.bak$", "%.old$", "%.new$" -} - -vis.ftdetect.filetypes = { - actionscript = { - ext = { "%.as$", "%.asc$" }, - }, - ada = { - ext = { "%.adb$", "%.ads$" }, - }, - ansi_c = { - ext = { "%.c$", "%.C$", "%.h$" }, - mime = { "text/x-c" }, - }, - antlr = { - ext = { "%.g$", "%.g4$" }, - }, - apdl = { - ext = { "%.ans$", "%.inp$", "%.mac$" }, - }, - apl = { - ext = { "%.apl$" } - }, - applescript = { - ext = { "%.applescript$" }, - }, - asm = { - ext = { "%.asm$", "%.ASM$", "%.s$", "%.S$" }, - }, - asp = { - ext = { "%.asa$", "%.asp$", "%.hta$" }, - }, - autoit = { - ext = { "%.au3$", "%.a3x$" }, - }, - awk = { - hashbang = { "^/usr/bin/[mng]awk%s+%-f" }, - utility = { "^[mgn]?awk$", "^goawk$" }, - ext = { "%.awk$" }, - }, - bash = { - utility = { "^[db]ash$", "^sh$","^t?csh$","^zsh$" }, - ext = { "%.bash$", "%.csh$", "%.sh$", "%.zsh$" ,"^APKBUILD$", "%.ebuild$", "^.bashrc$", "^.bash_profile$" }, - mime = { "text/x-shellscript", "application/x-shellscript" }, - }, - batch = { - ext = { "%.bat$", "%.cmd$" }, - }, - bibtex = { - ext = { "%.bib$" }, - }, - boo = { - ext = { "%.boo$" }, - }, - caml = { - ext = { "%.caml$", "%.ml$", "%.mli$", "%.mll$", "%.mly$" }, - }, - chuck = { - ext = { "%.ck$" }, - }, - clojure = { - ext = { "%.clj$", "%.cljc$", "%.cljs$", "%.edn$" } - }, - cmake = { - ext = { "%.cmake$", "%.cmake.in$", "%.ctest$", "%.ctest.in$" }, - }, - coffeescript = { - ext = { "%.coffee$" }, - mime = { "text/x-coffee" }, - }, - cpp = { - ext = { "%.cpp$", "%.cxx$", "%.c++$", "%.cc$", "%.hh$", "%.hpp$", "%.hxx$", "%.h++$" }, - mime = { "text/x-c++" }, - }, - crontab = { - ext = { "^crontab.*$" }, - cmd = { "set savemethod inplace" }, - }, - crystal = { - ext = { "%.cr$" }, - }, - csharp = { - ext = { "%.cs$" }, - }, - css = { - ext = { "%.css$" }, - mime = { "text/x-css" }, - }, - cuda = { - ext = { "%.cu$", "%.cuh$" }, - }, - dart = { - ext = { "%.dart$" }, - }, - desktop = { - ext = { "%.desktop$" }, - }, - diff = { - ext = { "%.diff$", "%.patch$", "%.rej$" }, - }, - dmd = { - ext = { "%.d$", "%.di$" }, - }, - dockerfile = { - ext = { "^Dockerfile$", "%.Dockerfile$" }, - }, - dot = { - ext = { "%.dot$" }, - }, - dsv = { - ext = { "^group$", "^gshadow$", "^passwd$", "^shadow$" }, - }, - eiffel = { - ext = { "%.e$", "%.eif$" }, - }, - elixir = { - ext = { "%.ex$", "%.exs$" }, - }, - elm = { - ext = { "%.elm$" }, - }, - mail = { - ext = { "%.eml$" }, - }, - erlang = { - ext = { "%.erl$", "%.hrl$" }, - }, - fantom = { - ext = { "%.fan$" }, - }, - faust = { - ext = { "%.dsp$" }, - }, - fennel = { - ext = { "%.fnl$" }, - }, - fish = { - utility = { "^fish$" }, - ext = { "%.fish$" }, - }, - forth = { - ext = { "%.forth$", "%.frt$", "%.fs$", "%.fth$" }, - }, - fortran = { - ext = { "%.f$", "%.for$", "%.ftn$", "%.fpp$", "%.f77$", "%.f90$", "%.f95$", "%.f03$", "%.f08$" }, - }, - fsharp = { - ext = { "%.fs$" }, - }, - fstab = { - ext = { "^fstab$" }, - }, - gap = { - ext = { "%.g$", "%.gd$", "%.gi$", "%.gap$" }, - }, - gemini = { - ext = { "%.gmi" }, - mime = { "text/gemini" }, - }, - gettext = { - ext = { "%.po$", "%.pot$" }, - }, - gherkin = { - ext = { "%.feature$" }, - }, - ['git-commit'] = { - alt_name = "diff", - ext = { "^COMMIT_EDITMSG$" }, - cmd = { "set colorcolumn 72" }, - }, - ['git-rebase'] = { - ext = { "git%-rebase%-todo" }, - }, - gleam = { - ext = { "%.gleam$" }, - }, - glsl = { - ext = { "%.glsl[fv]?$" }, - }, - gnuplot = { - ext = { "%.dem$", "%.plt$" }, - }, - go = { - ext = { "%.go$" }, - }, - groovy = { - ext = { "%.groovy$", "%.gvy$", "^Jenkinsfile$" }, - }, - gtkrc = { - ext = { "%.gtkrc$" }, - }, - hare = { - ext = { "%.ha$" } - }, - haskell = { - ext = { "%.hs$" }, - mime = { "text/x-haskell" }, - }, - html = { - ext = { "%.[sx]?htm[l]?$" }, - mime = { "text/x-html" }, - }, - icon = { - ext = { "%.icn$" }, - }, - idl = { - ext = { "%.idl$", "%.odl$" }, - }, - inform = { - ext = { "%.inf$", "%.ni$" }, - }, - ini = { - ext = { "%.cfg$", "%.cnf$", "%.conf$", "%.inf$", "%.ini$", "%.reg$" }, - }, - io_lang = { - ext = { "%.io$" }, - }, - java = { - ext = { "%.bsh$", "%.java$" }, - }, - javascript = { - ext = { "%.cjs$", "%.js$", "%.jsfl$", "%.mjs$", "%.jsx$" }, - }, - jq = { - ext = { "%.jq$" }, - }, - json = { - ext = { "%.json$" }, - mime = { "text/x-json" }, - }, - jsp = { - ext = { "%.jsp$" }, - }, - julia = { - ext = { "%.jl$" }, - }, - latex = { - ext = { "%.bbl$", "%.cls$", "%.dtx$", "%.ins$", "%.ltx$", "%.tex$", "%.sty$" }, - mime = { "text/x-tex" }, - }, - ledger = { - ext = { "%.ledger$", "%.journal$" }, - }, - less = { - ext = { "%.less$" }, - }, - lilypond = { - ext = { "%.ily$", "%.ly$" }, - }, - lisp = { - ext = { "%.cl$", "%.el$", "%.lisp$", "%.lsp$" }, - mime = { "text/x-lisp" }, - }, - litcoffee = { - ext = { "%.litcoffee$" }, - }, - logtalk = { - ext = { "%.lgt$" }, - }, - lua = { - utility = {"^lua%-?5?%d?$", "^lua%-?5%.%d$" }, - ext = { "%.lua$" }, - mime = { "text/x-lua" }, - }, - makefile = { - hashbang = {"^#!/usr/bin/make"}, - utility = {"^make$"}, - ext = { "%.iface$", "%.mak$", "%.mk$", "^GNUmakefile$", "^makefile$", "^Makefile$" }, - mime = { "text/x-makefile" }, - }, - man = { - ext = { "%.[1-9][xp]?$", "%.ms$", "%.me$", "%.mom$", "%.mm$", "%.tmac$" }, - }, - markdown = { - ext = { "%.md$", "%.markdown$" }, - mime = { "text/x-markdown" }, - }, - meson = { - ext = { "^meson%.build$" }, - }, - modula2 = { - ext = { "%.mod$", "%.def$" }, - }, - modula3 = { - ext = { "%.mg$", "%.ig$", "%.i3$", "%.m3$" }, - }, - moonscript = { - ext = { "%.moon$" }, - mime = { "text/x-moon" }, - }, - myrddin = { - ext = { "%.myr$" }, - }, - nemerle = { - ext = { "%.n$" }, - }, - networkd = { - ext = { "%.link$", "%.network$", "%.netdev$" }, - }, - nim = { - ext = { "%.nim$" }, - }, - nsis = { - ext = { "%.nsh$", "%.nsi$", "%.nsis$" }, - }, - objective_c = { - ext = { "%.m$", "%.mm$", "%.objc$" }, - mime = { "text/x-objc" }, - }, - pascal = { - ext = { "%.dpk$", "%.dpr$", "%.p$", "%.pas$" }, - }, - perl = { - ext = { "%.al$", "%.perl$", "%.pl$", "%.pm$", "%.pod$" }, - mime = { "text/x-perl" }, - }, - php = { - ext = { "%.inc$", "%.php$", "%.php3$", "%.php4$", "%.phtml$" }, - }, - pico8 = { - ext = { "%.p8$" }, - }, - pike = { - ext = { "%.pike$", "%.pmod$" }, - }, - pkgbuild = { - ext = { "^PKGBUILD$", "%.PKGBUILD$" }, - }, - pony = { - ext = { "%.pony$" }, - }, - powershell = { - ext = { "%.ps1$", "%.psm1$" }, - }, - prolog = { - ext = { "%.pl$", "%.pro$", "%.prolog$" }, - }, - props = { - ext = { "%.props$", "%.properties$" }, - }, - protobuf = { - ext = { "%.proto$" }, - }, - ps = { - ext = { "%.eps$", "%.ps$" }, - }, - pure = { - ext = { "%.pure$" }, - }, - python = { - utility = { "^python%d?" }, - ext = { "%.sc$", "%.py[iw]?$" }, - mime = { "text/x-python", "text/x-script.python" }, - }, - reason = { - ext = { "%.re$" }, - }, - rc = { - utility = {"^rc$"}, - ext = { "%.rc$", "%.es$" }, - }, - rebol = { - ext = { "%.r$", "%.reb$" }, - }, - rest = { - ext = { "%.rst$" }, - }, - rexx = { - ext = { "%.orx$", "%.rex$" }, - }, - rhtml = { - ext = { "%.erb$", "%.rhtml$" }, - }, - routeros = { - ext = { "%.rsc" }, - detect = function(_, data) - return data:match("^#.* by RouterOS") - end - }, - rpmspec = { - ext = { "%.spec$" }, - }, - rstats = { - ext = { "%.R$", "%.Rout$", "%.Rhistory$", "%.Rt$", "Rout.save", "Rout.fail" }, - }, - ruby = { - ext = { "%.Rakefile$", "%.rake$", "%.rb$", "%.rbw$", "^Vagrantfile$" }, - mime = { "text/x-ruby" }, - }, - rust = { - ext = { "%.rs$" }, - mime = { "text/x-rust" }, - }, - sass = { - ext = { "%.sass$", "%.scss$" }, - mime = { "text/x-sass", "text/x-scss" }, - }, - scala = { - ext = { "%.scala$" }, - mime = { "text/x-scala" }, - }, - scheme = { - ext = { "%.rkt$", "%.sch$", "%.scm$", "%.sld$", "%.sls$", "%.ss$" }, - }, - smalltalk = { - ext = { "%.changes$", "%.st$", "%.sources$" }, - }, - sml = { - ext = { "%.sml$", "%.fun$", "%.sig$" }, - }, - snobol4 = { - ext = { "%.sno$", "%.SNO$" }, - }, - spin = { - ext = { "%.spin$" } - }, - sql= { - ext = { "%.ddl$", "%.sql$" }, - }, - strace = { - detect = function(_, data) - return data:match("^execve%(") - end - }, - systemd = { - ext = { - "%.automount$", "%.device$", "%.mount$", "%.path$", - "%.scope$", "%.service$", "%.slice$", "%.socket$", - "%.swap$", "%.target$", "%.timer$" - }, - }, - taskpaper = { - ext = { "%.taskpaper$" }, - }, - tcl = { - utility = {"^tclsh$", "^jimsh$" }, - ext = { "%.tcl$", "%.tk$" }, - }, - texinfo = { - ext = { "%.texi$" }, - }, - text = { - ext = { "%.txt$" }, - -- Do *not* list mime "text/plain" here, it is covered below, - -- see 'try text lexer as a last resort' - }, - toml = { - ext = { "%.toml$" }, - }, - typescript = { - ext = { "%.ts$", "%.tsx$" }, - }, - typst = { - ext = { "%.typ$", "%.typst$" }, - }, - vala = { - ext = { "%.vala$" } - }, - vb = { - ext = { - "%.asa$", "%.bas$", "%.ctl$", "%.dob$", - "%.dsm$", "%.dsr$", "%.frm$", "%.pag$", "%.vb$", - "%.vba$", "%.vbs$" - }, - }, - vcard = { - ext = { "%.vcf$", "%.vcard$" }, - }, - verilog = { - ext = { "%.v$", "%.ver$", "%.sv$" }, - }, - vhdl = { - ext = { "%.vh$", "%.vhd$", "%.vhdl$" }, - }, - wsf = { - ext = { "%.wsf$" }, - }, - xs = { - ext = { "%.xs$", "^%.xsin$", "^%.xsrc$" }, - }, - xml = { - ext = { - "%.dtd$", "%.glif$", "%.plist$", "%.svg$", "%.xml$", - "%.xsd$", "%.xsl$", "%.xslt$", "%.xul$" - }, - mime = { "text/xml" }, - }, - xtend = { - ext = {"%.xtend$" }, - }, - yaml = { - ext = { "%.yaml$", "%.yml$" }, - mime = { "text/x-yaml" }, - }, - zig = { - ext = { "%.zig$" }, - }, -} - -vis.events.subscribe(vis.events.WIN_OPEN, function(win) - - local set_filetype = function(syntax, filetype) - for _, cmd in pairs(filetype.cmd or {}) do - vis:command(cmd) - end - if not vis.lexers.property then return end - local path = vis.lexers.property['scintillua.lexers']:gsub(';', '/?.lua;') - local lexname = filetype.alt_name or syntax - local lexpath = package.searchpath(lexname, path) - if lexpath ~= nil then - win:set_syntax(lexname) - else - win:set_syntax(nil) - end - end - - local path = win.file.name -- filepath - local mime - - if path and #path > 0 then - local name = path:match("[^/]+$") -- filename - if name then - local unchanged - while #name > 0 and name ~= unchanged do - unchanged = name - for _, pattern in ipairs(vis.ftdetect.ignoresuffixes) do - name = name:gsub(pattern, "") - end - end - end - - if name and #name > 0 then - -- detect filetype by filename ending with a configured extension - for lang, ft in pairs(vis.ftdetect.filetypes) do - for _, pattern in pairs(ft.ext or {}) do - if name:match(pattern) then - set_filetype(lang, ft) - return - end - end - end - end - - -- run file(1) to determine mime type - local file = io.popen(string.format("file -bL --mime-type -- '%s'", path:gsub("'", "'\\''"))) - if file then - mime = file:read('*all') - file:close() - if mime then - mime = mime:gsub('%s*$', '') - end - if mime and #mime > 0 then - for lang, ft in pairs(vis.ftdetect.filetypes) do - for _, ft_mime in pairs(ft.mime or {}) do - if mime == ft_mime then - set_filetype(lang, ft) - return - end - end - end - end - end - end - - -- pass first few bytes of file to custom file type detector functions - local file = win.file - local data = file:content(0, 256) - if data and #data > 0 then - for lang, ft in pairs(vis.ftdetect.filetypes) do - if type(ft.detect) == 'function' and ft.detect(file, data) then - set_filetype(lang, ft) - return - end - end - ---[[ hashbang check - hashbangs only have command <SPACE> argument - if /env, find utility in args - discard first arg if /-[^S]*S/; and all subsequent /=/ - NOTE: this means you can't have a command with /^-|=/ - return first field, which should be the utility. - NOTE: long-options unsupported ---]] - local fullhb, utility = data:match"^#![ \t]*(/+[^/\n]+[^\n]*)" - if fullhb then - local i, field = 1, {} - for m in fullhb:gmatch"%g+" do field[i],i = m,i+1 end - -- NOTE: executables should not have a space (or =, see below) - if field[1]:match"/env$" then - table.remove(field,1) - -- it is assumed that the first argument are short options, with -S inside - if string.match(field[1] or "", "^%-[^S-]*S") then -- -S found - table.remove(field,1) - -- skip all name=value - while string.match(field[1] or "","=") do - table.remove(field,1) - end - -- (hopefully) whatever is left in field[1] should be the utility or nil - end - end - utility = string.match(field[1] or "", "[^/]+$") -- remove filepath - end - - local function searcher(tbl, subject) - for _, pattern in ipairs(tbl or {}) do - if string.match(subject, pattern) then - return true - end - end - return false - end - - if utility or fullhb then - for lang, ft in pairs(vis.ftdetect.filetypes) do - if - utility and searcher(ft.utility, utility) - or - fullhb and searcher(ft.hashbang, fullhb) - then - set_filetype(lang, ft) - return - end - end - end - end - - -- try text lexer as a last resort - if (mime or 'text/plain'):match('^text/.+$') then - set_filetype('text', vis.ftdetect.filetypes.text) - return - end - - win:set_syntax(nil) -end) - diff --git a/share/vis/plugins/number-inc-dec.lua b/share/vis/plugins/number-inc-dec.lua @@ -1,59 +0,0 @@ --- increment/decrement number in dec/hex/oct format -local lexer = vis.lexers -local lpeg = vis.lpeg -if not lexer.load or not lpeg then return end - -local Cp = lpeg.Cp() -local dec_num = lpeg.S('+-')^-1 * lexer.dec_num -local pattern = lpeg.P{ Cp * (lexer.hex_num + lexer.oct_num + dec_num) * Cp + 1 * lpeg.V(1) } - -local change = function(delta) - - local win = vis.win - local file = win.file - local count = vis.count - if not count then count = 1 end - vis.count = nil -- reset count, otherwise it affects next motion - - for selection in win:selections_iterator() do - local pos = selection.pos - if not pos then goto continue end - local word = file:text_object_word(pos); - if not word then goto continue end - local data = file:content(word.start, 1024) - if not data then goto continue end - local s, e = pattern:match(data) - if not s then goto continue end - data = string.sub(data, s, e-1) - if #data == 0 then goto continue end - -- align start and end for fileindex - s = word.start + s - 1 - e = word.start + e - 1 - local base, format, padding = 10, 'd', 0 - if lexer.oct_num:match(data) then - base = 8 - format = 'o' - padding = #data - elseif lexer.hex_num:match(data) then - base = 16 - format = 'x' - padding = #data - #"0x" - end - local number = tonumber(data, base == 8 and 8 or nil) - if not number then goto continue end - number = number + delta * count - -- string.format does not support negative hex/oct values - if base ~= 10 and number < 0 then number = 0 end - number = string.format((base == 16 and "0x" or "") .. "%0"..padding..format, number) - if base == 8 and string.sub(number, 0, 1) ~= "0" then - number = '0' .. number - end - file:delete(s, e - s) - file:insert(s, number) - selection.pos = s - ::continue:: - end -end - -vis:map(vis.modes.NORMAL, "<C-a>", function() change( 1) end, "Increment number") -vis:map(vis.modes.NORMAL, "<C-x>", function() change(-1) end, "Decrement number") diff --git a/share/vis/plugins/textobject-lexer.lua b/share/vis/plugins/textobject-lexer.lua @@ -1,31 +0,0 @@ --- text object matching a lexer token - -local MAX_CONTEXT = 32768 - -vis:textobject_new("ii", function(win, pos) - - if not win.syntax or not vis.lexers.load then - return nil - end - - local before, after = pos - MAX_CONTEXT, pos + MAX_CONTEXT - if before < 0 then - before = 0 - end - -- TODO make sure we start at a line boundary? - - local lexer = vis.lexers.load(win.syntax, nil, true) - local data = win.file:content(before, after - before) - local tokens = lexer:lex(data) - local cur = before - - for i = 1, #tokens, 2 do - local token_next = before + tokens[i+1] - 1 - if cur <= pos and pos < token_next then - return cur, token_next - end - cur = token_next - end - - return nil -end, "Current lexer token") diff --git a/share/vis/themes/base-16.lua b/share/vis/themes/base-16.lua @@ -1,156 +0,0 @@ --- Eight-color scheme -local lexers = vis.lexers -lexers.STYLE_DEFAULT ='' -lexers.STYLE_NOTHING = '' -lexers.STYLE_ATTRIBUTE = 'fore:green,bold' -lexers.STYLE_CLASS = 'fore:yellow,bold' -lexers.STYLE_COMMENT = 'fore:blue,bold' -lexers.STYLE_CONSTANT = 'fore:cyan,bold' -lexers.STYLE_DEFINITION = 'fore:blue,bold' -lexers.STYLE_ERROR = 'fore:red,italics' -lexers.STYLE_FUNCTION = 'fore:blue,bold' -lexers.STYLE_HEADING = 'fore:magenta' -lexers.STYLE_KEYWORD = 'fore:yellow,bold' -lexers.STYLE_LABEL = 'fore:green,bold' -lexers.STYLE_NUMBER = 'fore:red,bold' -lexers.STYLE_OPERATOR = 'fore:cyan,bold' -lexers.STYLE_REGEX = 'fore:green,bold' -lexers.STYLE_STRING = 'fore:red,bold' -lexers.STYLE_PREPROCESSOR = 'fore:magenta,bold' -lexers.STYLE_TAG = 'fore:red,bold' -lexers.STYLE_TYPE = 'fore:green,bold' -lexers.STYLE_VARIABLE = 'fore:blue,bold' -lexers.STYLE_WHITESPACE = '' -lexers.STYLE_EMBEDDED = 'back:blue,bold' -lexers.STYLE_IDENTIFIER = '' - -lexers.STYLE_LINENUMBER = '' -lexers.STYLE_LINENUMBER_CURSOR = lexers.STYLE_LINENUMBER -lexers.STYLE_CURSOR = 'back:white,fore:black' -lexers.STYLE_CURSOR_PRIMARY = lexers.STYLE_CURSOR..',fore:yellow' -lexers.STYLE_CURSOR_LINE = 'underlined' -lexers.STYLE_COLOR_COLUMN = 'back:red' -lexers.STYLE_SELECTION = 'back:white,bold' -lexers.STYLE_STATUS = 'reverse' -lexers.STYLE_STATUS_FOCUSED = 'reverse,bold' -lexers.STYLE_SEPARATOR = lexers.STYLE_DEFAULT -lexers.STYLE_INFO = 'bold' -lexers.STYLE_EOF = '' - --- lexer specific styles - --- Diff -lexers.STYLE_ADDITION = 'fore:green' -lexers.STYLE_DELETION = 'fore:red' -lexers.STYLE_CHANGE = 'fore:yellow' - --- CSS -lexers.STYLE_PROPERTY = lexers.STYLE_ATTRIBUTE -lexers.STYLE_PSEUDOCLASS = '' -lexers.STYLE_PSEUDOELEMENT = '' - --- HTML -lexers.STYLE_TAG_UNKNOWN = lexers.STYLE_TAG .. ',italics' -lexers.STYLE_ATTRIBUTE_UNKNOWN = lexers.STYLE_ATTRIBUTE .. ',italics' - --- Latex, TeX, and Texinfo -lexers.STYLE_COMMAND = lexers.STYLE_KEYWORD -lexers.STYLE_COMMAND_SECTION = lexers.STYLE_CLASS -lexers.STYLE_ENVIRONMENT = lexers.STYLE_TYPE -lexers.STYLE_ENVIRONMENT_MATH = lexers.STYLE_NUMBER - --- Makefile -lexers.STYLE_TARGET = '' - --- Markdown -lexers.STYLE_HR = '' -for i = 1,6 do lexers['STYLE_HEADING_H'..i] = lexers.STYLE_HEADING end -lexers.STYLE_BOLD = 'bold' -lexers.STYLE_ITALIC = 'italics' -lexers.STYLE_LIST = lexers.STYLE_KEYWORD -lexers.STYLE_LINK = lexers.STYLE_KEYWORD -lexers.STYLE_REFERENCE = lexers.STYLE_KEYWORD -lexers.STYLE_CODE = lexers.STYLE_EMBEDDED - --- Output -lexers.STYE_FILENAME = '' -lexers.STYLE_LINE = '' -lexers.STYLE_COLUMN = '' -lexers.STYLE_MESSAGE = '' - --- Python -lexers.STYLE_KEYWORD_SOFT = '' - --- Taskpaper -lexers.STYLE_NOTE = '' -lexers.STYLE_TAG_EXTENDED = '' -lexers.STYLE_TAG_DAY = 'fore:yellow' -lexers.STYLE_TAG_OVERDUE = 'fore:red' -lexers.STYLE_TAG_PLAIN = '' - --- XML -lexers.STYLE_CDATA = '' - --- YAML -lexers.STYLE_ERROR_INDENT = 'back:red' - --- The following are temporary styles until their legacy lexers are migrated. - --- Antlr -lexers.STYLE_ACTION = '' - --- Clojure -lexers.STYLE_CLOJURE_KEYWORD = lexers.STYLE_TYPE -lexers.STYLE_CLOJURE_SYMBOL = lexers.STYLE_TYPE .. ',bold' - --- Crystal ---lexers.STYLE_SYMBOL = lexers.STYLE_STRING - --- Gleam -lexers.STYLE_MODULE = lexers.STYLE_CONSTANT -lexers.STYLE_DISCARD = lexers.STYLE_COMMENT - --- Icon -lexers.STYLE_SPECIAL_KEYWORD = lexers.STYLE_TYPE - --- jq -lexers.STYLE_FORMAT = lexers.STYLE_CONSTANT -lexers.STYLE_SYSVAR = lexers.STYLE_CONSTANT .. ',bold' - --- Julia --- lexers.STYLE_SYMBOL = lexers.STYLE_STRING -lexers.STYLE_CHARACTER = lexers.STYLE_CONSTANT - --- Mediawiki -lexers.STYLE_BEHAVIOR_SWITCH = lexers.STYLE_KEYWORD - --- Moonscript -lexers.STYLE_TBL_KEY = lexers.STYLE_REGEX -lexers.STYLE_SELF_REF = lexers.STYLE_LABEL -lexers.STYLE_PROPER_IDENT = lexers.STYLE_CLASS -lexers.STYLE_FNDEF = lexers.STYLE_PREPROCESSOR --- lexers.STYLE_SYMBOL = lexers.STYLE_EMBEDDED - --- reST -lexers.STYLE_LITERAL_BLOCK = lexers.STYLE_EMBEDDED -lexers.STYLE_FOOTNOTE_BLOCK = lexers.STYLE_LABEL -lexers.STYLE_CITATION_BLOCK = lexers.STYLE_LABEL -lexers.STYLE_LINK_BLOCK = lexers.STYLE_LABEL -lexers.STYLE_CODE_BLOCK = lexers.STYLE_CODE -lexers.STYLE_DIRECTIVE = lexers.STYLE_KEYWORD -lexers.STYLE_SPHINX_DIRECTIVE = lexers.STYLE_KEYWORD -lexers.STYLE_UNKNOWN_DIRECTIVE = lexers.STYLE_KEYWORD -lexers.STYLE_SUBSTITUTION = lexers.STYLE_VARIABLE -lexers.STYLE_INLINE_LITERAL = lexers.STYLE_EMBEDDED -lexers.STYLE_ROLE = lexers.STYLE_CLASS -lexers.STYLE_INTERPRETED = lexers.STYLE_STRING - --- txt2tags -lexers.STYLE_LINE = 'bold' -for i = 1,5 do lexers['STYLE_H'..i] = lexers.STYLE_HEADING end -lexers.STYLE_IMAGE = 'fore:green' -lexers.STYLE_STRIKE = 'italics' -lexers.STYLE_TAGGED = lexers.STYLE_EMBEDDED -lexers.STYLE_TAGGED_AREA = lexers.STYLE_EMBEDDED -lexers.STYLE_TABLE_SEP = 'fore:green' -lexers.STYLE_HEADER_CELL_CONTENT = 'fore:green' diff --git a/share/vis/themes/default.lua b/share/vis/themes/default.lua @@ -1 +0,0 @@ -base-16.lua -\ No newline at end of file diff --git a/share/vis/themes/solarized.lua b/share/vis/themes/solarized.lua @@ -1,69 +0,0 @@ --- Solarized color codes Copyright (c) 2011 Ethan Schoonover -local lexers = vis.lexers - -local colors = { - base03 = '#002b36', - base02 = '#073642', - base01 = '#586e75', - base00 = '#657b83', - base0 = '#839496', - base1 = '#93a1a1', - base2 = '#eee8d5', - base3 = '#fdf6e3', - yellow = '#b58900', - orange = '#cb4b16', - red = '#dc322f', - magenta = '#d33682', - violet = '#6c71c4', - blue = '#268bd2', - cyan = '#2aa198', - green = '#859900', -} - -lexers.colors = colors --- dark -local fg = ',fore:'..colors.base0..',' -local bg = ',back:'..colors.base03..',' --- light --- local fg = ',fore:'..colors.base03..',' --- local bg = ',back:'..colors.base3..',' --- solarized term --- local fg = ',fore:default,' --- local bg = ',back:default,' - - -lexers.STYLE_DEFAULT = bg..fg -lexers.STYLE_NOTHING = bg -lexers.STYLE_CLASS = 'fore:yellow' -lexers.STYLE_COMMENT = 'fore:'..colors.base01 -lexers.STYLE_CONSTANT = 'fore:'..colors.cyan -lexers.STYLE_DEFINITION = 'fore:'..colors.blue -lexers.STYLE_ERROR = 'fore:'..colors.red..',italics' -lexers.STYLE_FUNCTION = 'fore:'..colors.blue -lexers.STYLE_KEYWORD = 'fore:'..colors.green -lexers.STYLE_LABEL = 'fore:'..colors.green -lexers.STYLE_NUMBER = 'fore:'..colors.cyan -lexers.STYLE_OPERATOR = 'fore:'..colors.green -lexers.STYLE_REGEX = 'fore:green' -lexers.STYLE_STRING = 'fore:'..colors.cyan -lexers.STYLE_PREPROCESSOR = 'fore:'..colors.orange -lexers.STYLE_TAG = 'fore:'..colors.red -lexers.STYLE_TYPE = 'fore:'..colors.yellow -lexers.STYLE_VARIABLE = 'fore:'..colors.blue -lexers.STYLE_WHITESPACE = 'fore:'..colors.base01 -lexers.STYLE_EMBEDDED = 'back:blue' -lexers.STYLE_IDENTIFIER = fg - -lexers.STYLE_LINENUMBER = 'fore:'..colors.base00..',back:'..colors.base02 -lexers.STYLE_LINENUMBER_CURSOR = 'back:'..colors.base00..',fore:'..colors.base02 -lexers.STYLE_CURSOR = 'fore:'..colors.base03..',back:'..colors.base0 -lexers.STYLE_CURSOR_PRIMARY = lexers.STYLE_CURSOR..',back:yellow' -lexers.STYLE_CURSOR_LINE = 'back:'..colors.base02 -lexers.STYLE_COLOR_COLUMN = 'back:'..colors.base02 --- lexers.STYLE_SELECTION = 'back:'..colors.base02 -lexers.STYLE_SELECTION = 'back:white' -lexers.STYLE_STATUS = 'back:'..colors.base00..',fore:'..colors.base02 -lexers.STYLE_STATUS_FOCUSED = 'back:'..colors.base1..',fore:'..colors.base02 -lexers.STYLE_SEPARATOR = lexers.STYLE_DEFAULT -lexers.STYLE_INFO = 'fore:default,back:default,bold' -lexers.STYLE_EOF = 'fore:'..colors.base01 diff --git a/share/vis/themes/zenburn.lua b/share/vis/themes/zenburn.lua @@ -1,39 +0,0 @@ --- A poor imitation of the original Vim colourscheme which can be --- found at https://github.com/jnurmine/Zenburn - -local lexers = vis.lexers - -lexers.STYLE_DEFAULT = 'fore:#d7d7d7,back:#3a3a3a' -lexers.STYLE_NOTHING = '' -lexers.STYLE_CLASS = 'fore:#dfdfbf' -lexers.STYLE_COMMENT = 'fore:#87af87' -lexers.STYLE_CONSTANT = 'fore:#d7afaf,bold' -lexers.STYLE_DEFINITION = 'fore:#ffd7af,bold' -lexers.STYLE_ERROR = 'fore:#87d7af,back:#303030,bold' -lexers.STYLE_FUNCTION = 'fore:#d7d7af' -lexers.STYLE_KEYWORD = 'fore:#afaf87,bold' -lexers.STYLE_LABEL = 'fore:#d7d7af' -lexers.STYLE_NUMBER = 'fore:#87d7d7' -lexers.STYLE_OPERATOR = 'fore:#ffffd7' -lexers.STYLE_REGEX = 'fore:#f0dfaf' -lexers.STYLE_STRING = 'fore:#d78787' -lexers.STYLE_PREPROCESSOR = 'fore:#ffd7af,bold' -lexers.STYLE_TAG = 'fore:#d7afaf,bold' -lexers.STYLE_TYPE = 'fore:#d7d7af,bold' -lexers.STYLE_VARIABLE = 'fore:#efdcbc' -lexers.STYLE_WHITESPACE = '' -lexers.STYLE_EMBEDDED = 'fore:#cc9393' -lexers.STYLE_IDENTIFIER = '' - -lexers.STYLE_LINENUMBER = 'fore:#585858' -lexers.STYLE_LINENUMBER_CURSOR = 'fore:#666666' -lexers.STYLE_CURSOR = 'back:#585858' -lexers.STYLE_CURSOR_PRIMARY = 'fore:#1c1c1c,back:#87afaf,bold' -lexers.STYLE_CURSOR_LINE = 'back:#444444' -lexers.STYLE_COLOR_COLUMN = 'back:#444444' -lexers.STYLE_SELECTION = 'back:#5f875f' -lexers.STYLE_STATUS = 'back:#262626,fore:#87af87' -lexers.STYLE_STATUS_FOCUSED = 'back:#303030,fore:#afaf87,bold' -lexers.STYLE_SEPARATOR = '' -lexers.STYLE_INFO = '' -lexers.STYLE_EOF = 'fore:#585858' diff --git a/share/vis/vis-std.lua b/share/vis/vis-std.lua @@ -1,143 +0,0 @@ --- standard vis event handlers - -vis.events.subscribe(vis.events.INIT, function() - if os.getenv("TERM_PROGRAM") == "Apple_Terminal" then - vis:command("set change256colors false") - end - vis:command("set theme default") -end) - -vis:option_register("theme", "string", function(name) - if name ~= nil then - local theme = 'themes/'..name - package.loaded[theme] = nil - require(theme) - end - - local lexers = vis.lexers - lexers.lexers = {} - - if not lexers.load then return false end - if not lexers.property then lexers.load("text") end - local colors = lexers.colors - local default_colors = { "black", "red", "green", "yellow", "blue", "magenta", "cyan", "white" } - for _, c in ipairs(default_colors) do - if not colors[c] or colors[c] == '' then - colors[c] = c - end - end - - for win in vis:windows() do - win:set_syntax(win.syntax) - end - return true -end, "Color theme to use, filename without extension") - -vis:option_register("syntax", "string", function(name) - if not vis.win then return false end - if not vis.win:set_syntax(name) then - vis:info(string.format("Unknown syntax definition: `%s'", name)) - return false - end - return true -end, "Syntax highlighting lexer to use") - -vis:option_register("horizon", "number", function(horizon) - if not vis.win then return false end - vis.win.horizon = horizon - return true -end, "Number of bytes to consider for syntax highlighting") - -vis.events.subscribe(vis.events.WIN_HIGHLIGHT, function(win) - if not win.syntax or not vis.lexers.load then return end - local lexer = vis.lexers.load(win.syntax, nil, true) - if not lexer then return end - - -- TODO: improve heuristic for initial style - local viewport = win.viewport.bytes - if not viewport then return end - local horizon_max = win.horizon or 32768 - local horizon = viewport.start < horizon_max and viewport.start or horizon_max - local view_start = viewport.start - local lex_start = viewport.start - horizon - viewport.start = lex_start - local data = win.file:content(viewport) - local token_styles = lexer._TAGS - local tokens = lexer:lex(data, 1) - local token_end = lex_start + (tokens[#tokens] or 1) - 1 - - for i = #tokens - 1, 1, -2 do - local token_start = lex_start + (tokens[i-1] or 1) - 1 - if token_end < view_start then - break - end - local name = tokens[i] - local style = token_styles[name] - if style ~= nil then - win:style(style, token_start, token_end) - end - token_end = token_start - 1 - end -end) - -local modes = { - [vis.modes.NORMAL] = '', - [vis.modes.OPERATOR_PENDING] = '', - [vis.modes.VISUAL] = 'VISUAL', - [vis.modes.VISUAL_LINE] = 'VISUAL-LINE', - [vis.modes.INSERT] = 'INSERT', - [vis.modes.REPLACE] = 'REPLACE', -} - -vis.events.subscribe(vis.events.WIN_STATUS, function(win) - local left_parts = {} - local right_parts = {} - local file = win.file - local selection = win.selection - - local mode = modes[vis.mode] - if mode ~= '' and vis.win == win then - table.insert(left_parts, mode) - end - - table.insert(left_parts, (file.name or '[No Name]') .. - (file.modified and ' [+]' or '') .. (vis.recording and ' @' or '')) - - local count = vis.count - local keys = vis.input_queue - if keys ~= '' then - table.insert(right_parts, keys) - elseif count then - table.insert(right_parts, count) - end - - if #win.selections > 1 then - table.insert(right_parts, selection.number..'/'..#win.selections) - end - - local size = file.size - local pos = selection.pos - if not pos then pos = 0 end - table.insert(right_parts, (size == 0 and "0" or math.ceil(pos/size*100)).."%") - - if not win.large then - local col = selection.col - table.insert(right_parts, selection.line..', '..col) - if size > 33554432 or col > 65536 then - win.large = true - end - end - - local left = ' ' .. table.concat(left_parts, " » ") .. ' ' - local right = ' ' .. table.concat(right_parts, " « ") .. ' ' - win:status(left, right); -end) - --- default plugins - -require('plugins/filetype') -require('plugins/textobject-lexer') -require('plugins/digraph') -require('plugins/number-inc-dec') -require('plugins/complete-word') -require('plugins/complete-filename') diff --git a/share/vis/vis.lua b/share/vis/vis.lua @@ -1,335 +0,0 @@ ---- --- Vis Lua plugin API standard library --- @module vis - ---- --- @type Vis - ---- Map a new operator. --- --- Sets up a mapping in normal, visual and operator pending mode. --- The operator function will receive the @{File}, @{Range} and position --- to operate on and is expected to return the new cursor position. --- --- @tparam string key the key to associate with the new operator --- @tparam function operator the operator logic implemented as Lua function --- @tparam[opt] string help the single line help text as displayed in `:help` --- @treturn bool whether the new operator could be installed --- @usage --- vis:operator_new("gq", function(file, range, pos) --- local status, out, err = vis:pipe(file, range, "fmt") --- if status ~= 0 then --- vis:info(err) --- else --- file:delete(range) --- file:insert(range.start, out) --- end --- return range.start -- new cursor location --- end, "Formatting operator, filter range through fmt(1)") --- -vis.operator_new = function(vis, key, operator, help) - local id = vis:operator_register(operator) - if id < 0 then - return false - end - local binding = function() - vis:operator(id) - end - vis:map(vis.modes.NORMAL, key, binding, help) - vis:map(vis.modes.VISUAL, key, binding, help) - vis:map(vis.modes.OPERATOR_PENDING, key, binding, help) - return true -end - ---- Map a new motion. --- --- Sets up a mapping in normal, visual and operator pending mode. --- The motion function will receive the @{Window} and an initial position --- (in bytes from the start of the file) as argument and is expected to --- return the resulting position. --- @tparam string key the key to associate with the new option --- @tparam function motion the motion logic implemented as Lua function --- @tparam[opt] string help the single line help text as displayed in `:help` --- @treturn bool whether the new motion could be installed --- @usage --- vis:motion_new("<C-l>", function(win, pos) --- return pos+1 --- end, "Advance to next byte") --- -vis.motion_new = function(vis, key, motion, help) - local id = vis:motion_register(motion) - if id < 0 then - return false - end - local binding = function() - vis:motion(id) - end - vis:map(vis.modes.NORMAL, key, binding, help) - vis:map(vis.modes.VISUAL, key, binding, help) - vis:map(vis.modes.OPERATOR_PENDING, key, binding, help) - return true -end - ---- Map a new text object. --- --- Sets up a mapping in visual and operator pending mode. --- The text object function will receive the @{Window} and an initial --- position(in bytes from the start of the file) as argument and is --- expected to return the resulting range or `nil`. --- @tparam string key the key associated with the new text object --- @tparam function textobject the text object logic implemented as Lua function --- @tparam[opt] string help the single line help text as displayed in `:help` --- @treturn bool whether the new text object could be installed --- @usage --- vis:textobject_new("<C-l>", function(win, pos) --- return pos, pos+1 --- end, "Single byte text object") --- -vis.textobject_new = function(vis, key, textobject, help) - local id = vis:textobject_register(textobject) - if id < 0 then - return false - end - local binding = function() - vis:textobject(id) - end - vis:map(vis.modes.VISUAL, key, binding, help) - vis:map(vis.modes.OPERATOR_PENDING, key, binding, help) - return true -end - ---- Check whether a Lua module exists --- --- Checks whether a subsequent @{require} call will succeed. --- @tparam string name the module name to check --- @treturn bool whether the module was found -vis.module_exist = function(_, name) - for _, searcher in ipairs(package.searchers or package.loaders) do - local loader = searcher(name) - if type(loader) == 'function' then - return true - end - end - return false -end - -vis.lexers = {} - -if not vis:module_exist('lpeg') then - vis:info('WARNING: could not find lpeg module') -elseif not vis:module_exist('lexer') then - vis:info('WARNING: could not find lexer module') -else - vis.lexers = require('lexer') - - --- Cache of loaded lexers - -- - -- Caching lexers causes lexer tables to be constructed once and reused - -- during each HIGHLIGHT event. Additionally it allows to modify the lexer - -- used for syntax highlighting from Lua code. - local lexers = {} - local load_lexer = vis.lexers.load - vis.lexers.load = function (name, alt_name, cache) - if cache and lexers[alt_name or name] then return lexers[alt_name or name] end - local lexer = load_lexer(name, alt_name) - if cache then lexers[alt_name or name] = lexer end - return lexer - end - vis.lpeg = require('lpeg') -end - ---- Events. --- --- User scripts can subscribe Lua functions to certain events. Multiple functions --- can be associated with the same event. They will be called in the order they were --- registered. The first function which returns a non `nil` value terminates event --- propagation. The remaining event handler will not be called. --- --- Keep in mind that the editor is blocked while the event handlers --- are being executed, avoid long running tasks. --- --- @section Events - ---- Event names. ---- @table events -local events = { - FILE_CLOSE = "Event::FILE_CLOSE", -- see @{file_close} - FILE_OPEN = "Event::FILE_OPEN", -- see @{file_open} - FILE_SAVE_POST = "Event::FILE_SAVE_POST", -- see @{file_save_post} - FILE_SAVE_PRE = "Event::FILE_SAVE_PRE", -- see @{file_save_pre} - INIT = "Event::INIT", -- see @{init} - INPUT = "Event::INPUT", -- see @{input} - QUIT = "Event::QUIT", -- see @{quit} - START = "Event::START", -- see @{start} - WIN_CLOSE = "Event::WIN_CLOSE", -- see @{win_close} - WIN_HIGHLIGHT = "Event::WIN_HIGHLIGHT", -- see @{win_highlight} - WIN_OPEN = "Event::WIN_OPEN", -- see @{win_open} - WIN_STATUS = "Event::WIN_STATUS", -- see @{win_status} - TERM_CSI = "Event::TERM_CSI", -- see @{term_csi} - PROCESS_RESPONSE = "Event::PROCESS_RESPONSE", -- see @{process_response} - UI_DRAW = "Event::UI_DRAW", -- see @{ui_draw} -} - -events.file_close = function(...) events.emit(events.FILE_CLOSE, ...) end -events.file_open = function(...) events.emit(events.FILE_OPEN, ...) end -events.file_save_post = function(...) events.emit(events.FILE_SAVE_POST, ...) end -events.file_save_pre = function(...) return events.emit(events.FILE_SAVE_PRE, ...) end -events.init = function(...) events.emit(events.INIT, ...) end -events.input = function(...) return events.emit(events.INPUT, ...) end -events.quit = function(...) events.emit(events.QUIT, ...) end -events.start = function(...) events.emit(events.START, ...) end -events.win_close = function(...) events.emit(events.WIN_CLOSE, ...) end -events.win_highlight = function(...) events.emit(events.WIN_HIGHLIGHT, ...) end -events.win_open = function(...) events.emit(events.WIN_OPEN, ...) end -events.win_status = function(...) events.emit(events.WIN_STATUS, ...) end -events.term_csi = function(...) events.emit(events.TERM_CSI, ...) end -events.process_response = function(...) events.emit(events.PROCESS_RESPONSE, ...) end -events.ui_draw = function(...) events.emit(events.UI_DRAW, ...) end - -local handlers = {} - ---- Subscribe to an event. --- --- Register an event handler. --- @tparam string event the event name --- @tparam function handler the event handler --- @tparam[opt] int index the index at which to insert the handler (1 is the highest priority) --- @usage --- vis.events.subscribe(vis.events.FILE_SAVE_PRE, function(file, path) --- -- do something useful --- return true --- end) -events.subscribe = function(event, handler, index) - if not event then error("Invalid event name") end - if type(handler) ~= 'function' then error("Invalid event handler") end - if not handlers[event] then handlers[event] = {} end - events.unsubscribe(event, handler) - table.insert(handlers[event], index or #handlers[event]+1, handler) -end - ---- Unsubscribe from an event. --- --- Remove a registered event handler. --- @tparam string event the event name --- @tparam function handler the event handler to unsubscribe --- @treturn bool whether the handler was successfully removed -events.unsubscribe = function(event, handler) - local h = handlers[event] - if not h then return end - for i = 1, #h do - if h[i] == handler then - table.remove(h, i) - return true - end - end - return false -end - ---- Generate event. --- --- Invokes all event handlers in the order they were registered. --- Passes all arguments to the handler. The first handler which returns a non `nil` --- value terminates the event propagation. The other handlers will not be called. --- --- @tparam string event the event name --- @tparam ... ... the remaining parameters are passed on to the handler -events.emit = function(event, ...) - local h = handlers[event] - if not h then return end - for i = 1, #h do - local ret = h[i](...) - if type(ret) ~= 'nil' then return ret end - end -end - -vis.events = events - ---- --- @type Window - ---- The file type associated with this window. --- @tfield string syntax the syntax lexer name or `nil` if unset - ---- Change syntax lexer to use for this window --- @function set_syntax --- @tparam string syntax the syntax lexer name or `nil` to disable syntax highlighting --- @treturn bool whether the lexer could be changed -vis.types.window.set_syntax = function(win, syntax) - - local lexers = vis.lexers - - win:style_define(win.STYLE_DEFAULT, lexers.STYLE_DEFAULT or '') - win:style_define(win.STYLE_CURSOR, lexers.STYLE_CURSOR or '') - win:style_define(win.STYLE_CURSOR_PRIMARY, lexers.STYLE_CURSOR_PRIMARY or '') - win:style_define(win.STYLE_CURSOR_LINE, lexers.STYLE_CURSOR_LINE or '') - win:style_define(win.STYLE_SELECTION, lexers.STYLE_SELECTION or '') - win:style_define(win.STYLE_LINENUMBER, lexers.STYLE_LINENUMBER or '') - win:style_define(win.STYLE_LINENUMBER_CURSOR, lexers.STYLE_LINENUMBER_CURSOR or '') - win:style_define(win.STYLE_COLOR_COLUMN, lexers.STYLE_COLOR_COLUMN or '') - win:style_define(win.STYLE_STATUS, lexers.STYLE_STATUS or '') - win:style_define(win.STYLE_STATUS_FOCUSED, lexers.STYLE_STATUS_FOCUSED or '') - win:style_define(win.STYLE_SEPARATOR, lexers.STYLE_SEPARATOR or '') - win:style_define(win.STYLE_INFO, lexers.STYLE_INFO or '') - win:style_define(win.STYLE_EOF, lexers.STYLE_EOF or '') - - if syntax == nil or syntax == 'off' then - win.syntax = nil - return true - end - - if not lexers.load then return false end - local lexer = lexers.load(syntax) - if not lexer then return false end - - for id, token_name in ipairs(lexer._TAGS) do - local style = lexers['STYLE_' .. token_name:upper():gsub("%.", "_")] or '' - if type(style) == 'table' then - local s = '' - if style.attr then - s = string.format("%s,%s", s, attr) - elseif style.fore then - s = string.format("%s,fore:%s", s, style.fore) - elseif style.back then - s = string.format("%s,back:%s", s, style.back) - end - style = s - end - if style ~= nil then win:style_define(id, style) end - end - - win.syntax = syntax - return true -end - ---- --- @type File - ---- Check whether LPeg pattern matches at a given file position. --- @function match_at --- @param pattern the LPeg pattern --- @tparam int pos the absolute file position which should be tested for a match --- @tparam[opt] int horizon the number of bytes around `pos` to consider (defaults to 1K) --- @treturn int start,end the range of the matched region or `nil` -vis.types.file.match_at = function(file, pattern, pos, horizon) - horizon = horizon or 1024 - local lpeg = vis.lpeg - if not lpeg then return nil end - local before, after = pos - horizon, pos + horizon - if before < 0 then before = 0 end - local data = file:content(before, after - before) - local string_pos = pos - before + 1 - - local I = lpeg.Cp() - local p = lpeg.P{ I * pattern * I + 1 * lpeg.V(1) } - local s, e = 1 - while true do - s, e = p:match(data, s) - if not s then return nil end - if s <= string_pos and string_pos < e then - return before + s - 1, before + e - 1 - end - s = e - end -end - -require('vis-std') diff --git a/share/vis/visrc.lua b/share/vis/visrc.lua @@ -1,11 +0,0 @@ --- load standard vis module, providing parts of the Lua API -require('vis') - -vis.events.subscribe(vis.events.INIT, function() - -- Your global configuration options -end) - -vis.events.subscribe(vis.events.WIN_OPEN, function(win) -- luacheck: no unused args - -- Your per window configuration options e.g. - -- vis:command('set number') -end)