2017-04-25 14:19:50 +02:00
|
|
|
from __future__ import absolute_import, division, print_function
|
2017-02-24 10:26:56 +01:00
|
|
|
|
2016-03-16 18:06:40 +01:00
|
|
|
from langkit.lexer import (
|
2017-01-06 13:15:05 +01:00
|
|
|
Eof, Ignore, Lexer, LexerToken, Literal, Pattern, WithSymbol, WithText
|
2016-03-16 18:06:40 +01:00
|
|
|
)
|
2016-02-10 13:19:29 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
class Token(LexerToken):
|
2017-01-06 13:15:05 +01:00
|
|
|
Example = WithText()
|
|
|
|
|
Null = WithText()
|
2016-03-16 18:06:40 +01:00
|
|
|
|
2017-01-06 13:15:05 +01:00
|
|
|
Comma = WithText()
|
|
|
|
|
LPar = WithText()
|
|
|
|
|
RPar = WithText()
|
|
|
|
|
LBrace = WithText()
|
|
|
|
|
RBrace = WithText()
|
|
|
|
|
Plus = WithText()
|
2016-03-16 18:06:40 +01:00
|
|
|
|
|
|
|
|
Number = WithText()
|
2016-04-12 15:28:56 +02:00
|
|
|
Identifier = WithSymbol()
|
2016-02-10 13:19:29 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
foo_lexer = Lexer(Token)
|
|
|
|
|
foo_lexer.add_rules(
|
2016-04-12 15:28:56 +02:00
|
|
|
(Pattern(r'[ \n\r\t]+'), Ignore()),
|
|
|
|
|
(Eof(), Token.Termination),
|
2016-03-16 18:06:40 +01:00
|
|
|
|
2016-04-12 15:28:56 +02:00
|
|
|
(Literal("example"), Token.Example),
|
|
|
|
|
(Literal("null"), Token.Null),
|
2016-03-16 18:06:40 +01:00
|
|
|
|
2016-04-12 15:28:56 +02:00
|
|
|
(Literal(','), Token.Comma),
|
|
|
|
|
(Literal('('), Token.LPar),
|
|
|
|
|
(Literal(')'), Token.RPar),
|
|
|
|
|
(Literal('{'), Token.LBrace),
|
|
|
|
|
(Literal('}'), Token.RBrace),
|
2016-12-13 17:54:54 +01:00
|
|
|
(Literal('+'), Token.Plus),
|
2016-03-16 18:06:40 +01:00
|
|
|
|
2016-04-12 15:28:56 +02:00
|
|
|
(Pattern('[0-9]+'), Token.Number),
|
2017-01-13 10:18:57 +01:00
|
|
|
(Pattern('[a-zA-Z_][a-zA-Z0-9_]*'), Token.Identifier),
|
2016-02-10 13:19:29 +01:00
|
|
|
)
|