This repo contains code to mirror other repos. It also contains the code that is getting mirrored.
Nevar pievienot vairāk kā 25 tēmas Tēmai ir jāsākas ar burtu vai ciparu, tā var saturēt domu zīmes ('-') un var būt līdz 35 simboliem gara.

185 rindas
6.6 KiB

  1. from __future__ import absolute_import
  2. import os
  3. import time
  4. from collections import defaultdict
  5. from .utils import STRING_TYPE
  6. from .load_grammar import load_grammar
  7. from .tree import Tree
  8. from .common import LexerConf, ParserConf
  9. from .lexer import Lexer
  10. from .parse_tree_builder import ParseTreeBuilder
  11. from .parser_frontends import get_frontend
  12. class LarkOptions(object):
  13. """Specifies the options for Lark
  14. """
  15. OPTIONS_DOC = """
  16. parser - Decides which parser engine to use, "earley" or "lalr". (Default: "earley")
  17. Note: "lalr" requires a lexer
  18. lexer - Decides whether or not to use a lexer stage
  19. None: Don't use a lexer (scanless, only works with parser="earley")
  20. "standard": Use a standard lexer
  21. "contextual": Stronger lexer (only works with parser="lalr")
  22. "auto" (default): Choose for me based on grammar and parser
  23. transformer - Applies the transformer to every parse tree
  24. debug - Affects verbosity (default: False)
  25. keep_all_tokens - Don't automagically remove "punctuation" tokens (default: False)
  26. cache_grammar - Cache the Lark grammar (Default: False)
  27. postlex - Lexer post-processing (Default: None)
  28. start - The start symbol (Default: start)
  29. profile - Measure run-time usage in Lark. Read results from the profiler proprety (Default: False)
  30. """
  31. __doc__ += OPTIONS_DOC
  32. def __init__(self, options_dict):
  33. o = dict(options_dict)
  34. self.debug = bool(o.pop('debug', False))
  35. self.keep_all_tokens = bool(o.pop('keep_all_tokens', False))
  36. self.tree_class = o.pop('tree_class', Tree)
  37. self.cache_grammar = o.pop('cache_grammar', False)
  38. self.postlex = o.pop('postlex', None)
  39. self.parser = o.pop('parser', 'earley')
  40. self.lexer = o.pop('lexer', 'auto')
  41. self.transformer = o.pop('transformer', None)
  42. self.start = o.pop('start', 'start')
  43. self.profile = o.pop('profile', False)
  44. assert self.parser in ('earley', 'lalr', None)
  45. if self.parser == 'earley' and self.transformer:
  46. raise ValueError('Cannot specify an auto-transformer when using the Earley algorithm.'
  47. 'Please use your transformer on the resulting parse tree, or use a different algorithm (i.e. lalr)')
  48. if self.keep_all_tokens:
  49. raise NotImplementedError("keep_all_tokens: Not implemented yet!")
  50. if o:
  51. raise ValueError("Unknown options: %s" % o.keys())
  52. class Profiler:
  53. def __init__(self):
  54. self.total_time = defaultdict(float)
  55. self.cur_section = '__init__'
  56. self.last_enter_time = time.time()
  57. def enter_section(self, name):
  58. cur_time = time.time()
  59. self.total_time[self.cur_section] += cur_time - self.last_enter_time
  60. self.last_enter_time = cur_time
  61. self.cur_section = name
  62. def make_wrapper(self, name, f):
  63. def wrapper(*args, **kwargs):
  64. last_section = self.cur_section
  65. self.enter_section(name)
  66. try:
  67. return f(*args, **kwargs)
  68. finally:
  69. self.enter_section(last_section)
  70. return wrapper
  71. class Lark:
  72. def __init__(self, grammar, **options):
  73. """
  74. grammar : a string or file-object containing the grammar spec (using Lark's ebnf syntax)
  75. options : a dictionary controlling various aspects of Lark.
  76. """
  77. self.options = LarkOptions(options)
  78. # Some, but not all file-like objects have a 'name' attribute
  79. try:
  80. source = grammar.name
  81. except AttributeError:
  82. source = '<string>'
  83. cache_file = "larkcache_%s" % str(hash(grammar)%(2**32))
  84. else:
  85. cache_file = "larkcache_%s" % os.path.basename(source)
  86. # Drain file-like objects to get their contents
  87. try:
  88. read = grammar.read
  89. except AttributeError:
  90. pass
  91. else:
  92. grammar = read()
  93. assert isinstance(grammar, STRING_TYPE)
  94. if self.options.cache_grammar or self.options.keep_all_tokens:
  95. raise NotImplementedError("Not available yet")
  96. assert not self.options.profile, "Feature temporarily disabled"
  97. self.profiler = Profiler() if self.options.profile else None
  98. lexer = self.options.lexer
  99. if lexer == 'auto':
  100. if self.options.parser == 'lalr':
  101. lexer = 'standard'
  102. elif self.options.parser == 'earley':
  103. lexer = None
  104. self.options.lexer = lexer
  105. self.grammar = load_grammar(grammar, source)
  106. tokens, self.rules, self.grammar_extra = self.grammar.compile(lexer=bool(lexer), start=self.options.start)
  107. self.ignore_tokens = self.grammar.extra['ignore']
  108. self.lexer_conf = LexerConf(tokens, self.ignore_tokens, self.options.postlex)
  109. if self.options.parser:
  110. self.parser = self._build_parser()
  111. elif lexer:
  112. self.lexer = self._build_lexer()
  113. if self.profiler: self.profiler.enter_section('outside_lark')
  114. __init__.__doc__ += "\nOPTIONS:" + LarkOptions.OPTIONS_DOC
  115. def _build_lexer(self):
  116. return Lexer(self.lexer_conf.tokens, ignore=self.lexer_conf.ignore)
  117. def _build_parser(self):
  118. self.parser_class = get_frontend(self.options.parser, self.options.lexer)
  119. self.parse_tree_builder = ParseTreeBuilder(self.options.tree_class)
  120. rules, callback = self.parse_tree_builder.create_tree_builder(self.rules, self.options.transformer)
  121. if self.profiler:
  122. for f in dir(callback):
  123. if not (f.startswith('__') and f.endswith('__')):
  124. setattr(callback, f, self.profiler.make_wrapper('transformer', getattr(callback, f)))
  125. parser_conf = ParserConf(rules, callback, self.options.start)
  126. return self.parser_class(self.lexer_conf, parser_conf)
  127. def lex(self, text):
  128. if not hasattr(self, 'lexer'):
  129. self.lexer = self._build_lexer()
  130. stream = self.lexer.lex(text)
  131. if self.options.postlex:
  132. return self.options.postlex.process(stream)
  133. else:
  134. return stream
  135. def parse(self, text):
  136. return self.parser.parse(text)
  137. # if self.profiler:
  138. # self.profiler.enter_section('lex')
  139. # l = list(self.lex(text))
  140. # self.profiler.enter_section('parse')
  141. # try:
  142. # return self.parser.parse(l)
  143. # finally:
  144. # self.profiler.enter_section('outside_lark')
  145. # else:
  146. # l = list(self.lex(text))
  147. # return self.parser.parse(l)