logo

oasis-root

Compiled tree of Oasis Linux based on own branch at <https://hacktivis.me/git/oasis/> git clone https://anongit.hacktivis.me/git/oasis-root.git

pony.lua (3917B)


  1. -- Copyright 2017-2024 Murray Calavera. See LICENSE.
  2. -- Pony LPeg lexer.
  3. local lexer = require('lexer')
  4. local token, word_match = lexer.token, lexer.word_match
  5. local P, S = lpeg.P, lpeg.S
  6. local lex = lexer.new('pony')
  7. -- Whitespace.
  8. local ws = token(lexer.WHITESPACE, lexer.space^1)
  9. lex:add_rule('whitespace', ws)
  10. -- Capabilities.
  11. local capability = token(lexer.LABEL, word_match('box iso ref tag trn val'))
  12. lex:add_rule('capability', capability)
  13. -- Annotations.
  14. local annotation = token(lexer.PREPROCESSOR, lexer.range('\\', false, false))
  15. lex:add_rule('annotation', annotation)
  16. -- Functions.
  17. -- Highlight functions with syntax sugar at declaration.
  18. lex:add_rule('function',
  19. token(lexer.KEYWORD, word_match('fun new be')) * ws^-1 * annotation^-1 * ws^-1 * capability^-1 *
  20. ws^-1 * token(lexer.FUNCTION, word_match{
  21. 'create', 'dispose', '_final', 'apply', 'update', 'add', 'sub', 'mul', 'div', 'mod',
  22. 'add_unsafe', 'sub_unsafe', 'mul_unsafe', 'div_unsafe', 'mod_unsafe', 'shl', 'shr',
  23. 'shl_unsafe', 'shr_unsafe', 'op_and', 'op_or', 'op_xor', 'eq', 'ne', 'lt', 'le', 'ge', 'gt',
  24. 'eq_unsafe', 'ne_unsafe', 'lt_unsafe', 'le_unsafe', 'ge_unsafe', 'gt_unsafe', 'neg',
  25. 'neg_unsafe', 'op_not', --
  26. 'has_next', 'next', --
  27. '_serialise_space', '_serialise', '_deserialise'
  28. }))
  29. -- Keywords.
  30. lex:add_rule('keyword', token(lexer.KEYWORD, word_match{
  31. 'actor', 'as', 'be', 'break', 'class', 'compile_error', 'compile_intrinsic', 'continue',
  32. 'consume', 'do', 'else', 'elseif', 'embed', 'end', 'error', 'for', 'fun', 'if', 'ifdef', 'iftype',
  33. 'in', 'interface', 'is', 'isnt', 'lambda', 'let', 'match', 'new', 'object', 'primitive',
  34. 'recover', 'repeat', 'return', 'struct', 'then', 'this', 'trait', 'try', 'type', 'until', 'use',
  35. 'var', 'where', 'while', 'with'
  36. }))
  37. -- Constants.
  38. lex:add_rule('constant', token(lexer.CONSTANT, word_match('true false')))
  39. -- Operators.
  40. local ops = {
  41. ['+'] = true, ['-'] = true, ['*'] = true, ['/'] = true, ['%'] = true, ['+~'] = true,
  42. ['-~'] = true, ['*~'] = true, ['/~'] = true, ['%~'] = true, ['<<'] = true, ['>>'] = true,
  43. ['<<~'] = true, ['>>~'] = true, ['=='] = true, ['!='] = true, ['<'] = true, ['<='] = true,
  44. ['>='] = true, ['>'] = true, ['==~'] = true, ['!=~'] = true, ['<~'] = true, ['<=~'] = true,
  45. ['>=~'] = true, ['>~'] = true
  46. }
  47. lex:add_rule('operator', token(lexer.OPERATOR, word_match('and or xor not addressof digestof') +
  48. lpeg.Cmt(S('+-*/%<>=!~')^1, function(input, index, op) return ops[op] and index or nil end)))
  49. -- Identifiers.
  50. local id_suffix = (lexer.alnum + "'" + '_')^0
  51. lex:add_rule('type', token(lexer.TYPE, P('_')^-1 * lexer.upper * id_suffix))
  52. lex:add_rule('identifier', token(lexer.IDENTIFIER, P('_')^-1 * lexer.lower * id_suffix))
  53. lex:add_rule('lookup', token(lexer.IDENTIFIER, '_' * lexer.digit^1))
  54. -- Strings.
  55. local sq_str = lexer.range("'")
  56. local dq_str = lexer.range('"')
  57. local tq_str = lexer.range('"""')
  58. lex:add_rule('string', token(lexer.STRING, sq_str + tq_str + dq_str))
  59. -- Numbers.
  60. local function num(digit) return digit * (digit^0 * '_')^0 * digit^1 + digit end
  61. local int = num(lexer.digit)
  62. local frac = '.' * int
  63. local exp = S('eE') * (P('-') + '+')^-1 * int
  64. local hex = '0x' * num(lexer.xdigit)
  65. local bin = '0b' * num(S('01'))
  66. local float = int * frac^-1 * exp^-1
  67. lex:add_rule('number', token(lexer.NUMBER, hex + bin + float))
  68. -- Comments.
  69. local line_comment = lexer.to_eol('//')
  70. local block_comment = lexer.range('/*', '*/', false, false, true)
  71. lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment))
  72. -- Punctuation.
  73. -- There is no suitable token name for this, change this if ever one is added.
  74. lex:add_rule('punctuation',
  75. token(lexer.OPERATOR, P('=>') + '.>' + '<:' + '->' + S('=.,:;()[]{}!?~^&|_@')))
  76. -- Qualifiers.
  77. lex:add_rule('qualifier', token(lexer.LABEL, '#' * word_match('read send share any alias')))
  78. lexer.property['scintillua.comment'] = '//'
  79. return lex