logo

oasis-root

Compiled tree of Oasis Linux based on own branch at <https://hacktivis.me/git/oasis/> git clone https://anongit.hacktivis.me/git/oasis-root.git

yaml.lua (4302B)


  1. -- Copyright 2006-2024 Mitchell. See LICENSE.
  2. -- YAML LPeg lexer.
  3. -- It does not keep track of indentation perfectly.
  4. local lexer = lexer
  5. local word_match = lexer.word_match
  6. local P, S, B = lpeg.P, lpeg.S, lpeg.B
  7. local lex = lexer.new(..., {fold_by_indentation = true})
  8. -- Distinguish between horizontal and vertical space so indenting tabs can be marked as errors.
  9. local tab_indent = lex:tag(lexer.ERROR .. '.indent', lexer.starts_line('\t', true))
  10. lex:modify_rule('whitespace', tab_indent + lex:tag(lexer.WHITESPACE, S(' \r\n')^1 + P('\t')^1))
  11. -- Document boundaries.
  12. lex:add_rule('doc_bounds', lex:tag(lexer.OPERATOR, lexer.starts_line(P('---') + '...')))
  13. -- Keys.
  14. local word = (lexer.alnum + '-')^1
  15. lex:add_rule('key', -P('- ') * lex:tag(lexer.STRING, word * (S(' \t_')^1 * word^-1)^0) *
  16. #P(':' * lexer.space))
  17. -- Collections.
  18. lex:add_rule('collection', lex:tag(lexer.OPERATOR,
  19. lexer.after_set('?-:\n', S('?-') * #P(' '), ' \t') + ':' * #P(lexer.space) + S('[]{}') + ',' *
  20. #P(' ')))
  21. -- Alias indicators.
  22. local anchor = lex:tag(lexer.OPERATOR, '&') * lex:tag(lexer.LABEL, word)
  23. local alias = lex:tag(lexer.OPERATOR, '*') * lex:tag(lexer.LABEL, word)
  24. lex:add_rule('alias', anchor + alias)
  25. -- Tags.
  26. local explicit_tag = '!!' * word_match{
  27. 'map', 'omap', 'pairs', 'set', 'seq', -- collection
  28. 'binary', 'bool', 'float', 'int', 'merge', 'null', 'str', 'timestamp', 'value', 'yaml' -- scalar
  29. }
  30. local verbatim_tag = '!' * lexer.range('<', '>', true)
  31. local short_tag = '!' * word * ('!' * (1 - lexer.space)^1)^-1
  32. lex:add_rule('tag', lex:tag(lexer.TYPE, explicit_tag + verbatim_tag + short_tag))
  33. -- Comments.
  34. lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.to_eol('#')))
  35. -- Reserved.
  36. lex:add_rule('reserved',
  37. B(S(':,') * ' ') * lex:tag(lexer.ERROR, S('@`') + lexer.starts_line(S('@`'))))
  38. -- Constants.
  39. local scalar_end = #(S(' \t')^0 * lexer.newline + S(',]}') + -1)
  40. lex:add_rule('constant',
  41. lex:tag(lexer.CONSTANT_BUILTIN, word_match('null true false', true)) * scalar_end)
  42. -- Strings.
  43. local sq_str = lexer.range("'")
  44. local dq_str = lexer.range('"')
  45. lex:add_rule('string', lex:tag(lexer.STRING, sq_str + dq_str) * (scalar_end + #P(':' * lexer.space)))
  46. -- Timestamps.
  47. local year = lexer.digit * lexer.digit * lexer.digit * lexer.digit
  48. local month = lexer.digit * lexer.digit^-1
  49. local day = lexer.digit * lexer.digit^-1
  50. local date = year * '-' * month * '-' * day
  51. local hours = lexer.digit * lexer.digit^-1
  52. local minutes = lexer.digit * lexer.digit
  53. local seconds = lexer.digit * lexer.digit
  54. local fraction = '.' * lexer.digit^0
  55. local time = hours * ':' * minutes * ':' * seconds * fraction^-1
  56. local zone = 'Z' + S(' \t')^-1 * S('-+') * hours * (':' * minutes)^-1
  57. lex:add_rule('timestamp', lex:tag(lexer.NUMBER .. '.timestamp',
  58. date * (S('tT \t') * time * zone^-1)^-1) * scalar_end)
  59. -- Numbers.
  60. local special_num = S('+-')^-1 * '.' * word_match('inf nan', true)
  61. local number = lexer.number + special_num
  62. lex:add_rule('number', (B(lexer.alnum) * lex:tag(lexer.DEFAULT, number) +
  63. lex:tag(lexer.NUMBER, number)) * scalar_end)
  64. -- Scalars.
  65. local block_indicator = S('|>') * (S('-+') * lexer.digit^-1 + lexer.digit * S('-+')^-1)^-1
  66. local block = lpeg.Cmt(lpeg.C(block_indicator * lexer.newline), function(input, index, indicator)
  67. local indent = lexer.indent_amount[lexer.line_from_position(index - #indicator)]
  68. for s, i, j in input:gmatch('()\n()[ \t]*()[^ \t\r\n]', index) do -- ignore blank lines
  69. if s >= index then -- compatibility for Lua < 5.4, which doesn't have init for string.gmatch()
  70. if j - i <= indent then return s end
  71. end
  72. end
  73. return #input + 1
  74. end)
  75. local seq = B('- ') * lexer.nonnewline^1
  76. local csv = B(', ') * (lexer.nonnewline - S(',]}'))^1
  77. local stop_chars, LF = {[string.byte('{')] = true, [string.byte('\n')] = true}, string.byte('\n')
  78. local map = B(': ') * lexer.nonnewline * P(function(input, index)
  79. local pos = index
  80. while pos > 1 and not stop_chars[input:byte(pos)] do pos = pos - 1 end
  81. local s = input:find(input:byte(pos) ~= LF and '[\n,}]' or '\n', index)
  82. return s or #input + 1
  83. end)
  84. lex:add_rule('scalar', lex:tag(lexer.DEFAULT, block + seq + csv + map))
  85. -- Directives
  86. lex:add_rule('directive', lex:tag(lexer.PREPROCESSOR, lexer.starts_line(lexer.to_eol('%'))))
  87. lexer.property['scintillua.comment'] = '#'
  88. return lex