logo

oasis-root

Compiled tree of Oasis Linux based on own branch at <https://hacktivis.me/git/oasis/> git clone https://anongit.hacktivis.me/git/oasis-root.git

jq.lua (4112B)


  1. -- Copyright 2006-2024 Mitchell. See LICENSE.
  2. -- jq 1.6 Lua lexer -- https://stedolan.github.io/jq/wiki
  3. -- Anonymously contributed.
  4. local lexer = require('lexer')
  5. local token, word_match = lexer.token, lexer.word_match
  6. local P, S = lpeg.P, lpeg.S
  7. local lex = lexer.new('jq')
  8. -- Whitespace.
  9. lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
  10. -- Keywords.
  11. lex:add_rule('keyword', token(lexer.KEYWORD, word_match{
  12. -- keywords not listed by jq's "builtins", minus operators 'and' and 'or', plus the '?' shorthand
  13. 'as', 'break', 'catch', 'def', 'elif', 'else', 'end', 'foreach', 'if', 'import', 'include',
  14. 'label', 'module', 'reduce', 'then', 'try'
  15. } + '?'))
  16. -- Functions.
  17. lex:add_rule('function', token(lexer.FUNCTION, word_match{
  18. -- jq 1.6 built-in functions (SQL in upper caisse)
  19. 'acos', 'acosh', 'add', 'all', 'any', 'arrays', 'ascii_downcase', 'ascii_upcase', 'asin', 'asinh',
  20. 'atan', 'atan2', 'atanh', 'booleans', 'bsearch', 'builtins', 'capture', 'cbrt', 'ceil',
  21. 'combinations', 'contains', 'copysign', 'cos', 'cosh', 'debug', 'del', 'delpaths', 'drem',
  22. 'empty', 'endswith', 'env', 'erf', 'erfc', 'error', 'exp', 'exp10', 'exp2', 'explode', 'expm1',
  23. 'fabs', 'fdim', 'finites', 'first', 'flatten', 'floor', 'fma', 'fmax', 'fmin', 'fmod', 'format',
  24. 'frexp', 'from_entries', 'fromdate', 'fromdateiso8601', 'fromjson', 'fromstream', 'gamma',
  25. 'get_jq_origin', 'get_prog_origin', 'get_search_list', 'getpath', 'gmtime', 'group_by', 'gsub',
  26. 'halt', 'halt_error', 'has', 'hypot', 'implode', 'IN', 'in', 'INDEX', 'index', 'indices',
  27. 'infinite', 'input', 'input_filename', 'input_line_number', 'inputs', 'inside', 'isempty',
  28. 'isfinite', 'isinfinite', 'isnan', 'isnormal', 'iterables', 'j0', 'j1', 'jn', 'JOIN', 'join',
  29. 'keys', 'keys_unsorted', 'last', 'ldexp', 'leaf_paths', 'length', 'lgamma', 'lgamma_r', 'limit',
  30. 'localtime', 'log', 'log10', 'log1p', 'log2', 'logb', 'ltrimstr', 'map', 'map_values', 'match',
  31. 'max', 'max_by', 'min', 'min_by', 'mktime', 'modf', 'modulemeta', 'nan', 'nearbyint', 'nextafter',
  32. 'nexttoward', 'normals', 'not', 'now', 'nth', 'nulls', 'numbers', 'objects', 'path', 'paths',
  33. 'pow', 'pow10', 'range', 'recurse', 'recurse_down', 'remainder', 'repeat', 'reverse', 'rindex',
  34. 'rint', 'round', 'rtrimstr', 'scalars', 'scalars_or_empty', 'scalb', 'scalbln', 'scan', 'select',
  35. 'setpath', 'significand', 'sin', 'sinh', 'sort', 'sort_by', 'split', 'splits', 'sqrt',
  36. 'startswith', 'stderr', 'strflocaltime', 'strftime', 'strings', 'strptime', 'sub', 'tan', 'tanh',
  37. 'test', 'tgamma', 'to_entries', 'todate', 'todateiso8601', 'tojson', 'tonumber', 'tostream',
  38. 'tostring', 'transpose', 'trunc', 'truncate_stream', 'type', 'unique', 'unique_by', 'until',
  39. 'utf8bytelength', 'values', 'walk', 'while', 'with_entries', 'y0', 'y1', 'yn'
  40. }))
  41. -- Strings.
  42. local string = token(lexer.STRING, lexer.range('"', true))
  43. local literal = token(lexer.STRING, word_match('null false true'))
  44. lex:add_rule('string', string + literal)
  45. -- Operators.
  46. -- 'not' isn't an operator but a function (filter)
  47. lex:add_rule('operator', token(lexer.OPERATOR,
  48. P('.[]') + '?//' + '//=' + 'and' + '[]' + '//' + '==' + '!=' + '>=' + '<=' + '|=' + '+=' + '-=' +
  49. '*=' + '/=' + '%=' + 'or' + S('=+-*/%<>()[]{}.,') + '|' + ';'))
  50. -- Identifiers.
  51. lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
  52. -- Comments.
  53. lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#')))
  54. -- Numbers.
  55. lex:add_rule('number', token(lexer.NUMBER, lexer.number))
  56. -- Formats.
  57. lex:add_rule('format',
  58. token('format', '@' * word_match('text json html uri csv tsv sh base64 base64d')))
  59. lex:add_style('format', lexer.styles.constant)
  60. -- Variables.
  61. lex:add_rule('sysvar', token('sysvar', '$' * word_match('ENV ORIGIN __loc__')))
  62. lex:add_style('sysvar', lexer.styles.constant .. {bold = true})
  63. lex:add_rule('variable', token(lexer.VARIABLE, '$' * lexer.word))
  64. -- Fold points.
  65. lex:add_fold_point(lexer.KEYWORD, 'if', 'end')
  66. lex:add_fold_point(lexer.OPERATOR, '[', ']')
  67. lex:add_fold_point(lexer.OPERATOR, '{', '}')
  68. lexer.property['scintillua.comment'] = '#'
  69. return lex