Browse Source

Refactor: Eradicated inheritance from object

remotes/origin/gm/2021-09-23T00Z/github.com--lark-parser-lark/master
Erez Sh 3 years ago
parent
commit
81fa039872
12 changed files with 29 additions and 42 deletions
  1. +3
    -3
      lark/ast_utils.py
  2. +2
    -2
      lark/lexer.py
  3. +1
    -1
      lark/load_grammar.py
  4. +5
    -5
      lark/parsers/cyk.py
  5. +3
    -16
      lark/parsers/earley_common.py
  6. +2
    -3
      lark/parsers/earley_forest.py
  7. +3
    -3
      lark/parsers/grammar_analysis.py
  8. +1
    -1
      lark/parsers/lalr_interactive_parser.py
  9. +3
    -3
      lark/parsers/lalr_parser.py
  10. +1
    -1
      lark/tree.py
  11. +1
    -1
      lark/utils.py
  12. +4
    -3
      tests/test_parser.py

+ 3
- 3
lark/ast_utils.py View File

@@ -8,20 +8,20 @@ from typing import Optional, Callable

from lark import Transformer, v_args

class Ast(object):
class Ast:
"""Abstract class

Subclasses will be collected by `create_transformer()`
"""
pass

class AsList(object):
class AsList:
"""Abstract class

Subclasses will be instanciated with the parse results as a single list, instead of as arguments.
"""

class WithMeta(object):
class WithMeta:
"""Abstract class

Subclasses will be instanciated with the Meta instance of the tree. (see ``v_args`` for more detail)


+ 2
- 2
lark/lexer.py View File

@@ -331,7 +331,7 @@ def _regexp_has_newline(r: str):
return '\n' in r or '\\n' in r or '\\s' in r or '[^' in r or ('(?s' in r and '.' in r)


class LexerState(object):
class LexerState:
__slots__ = 'text', 'line_ctr', 'last_token'

def __init__(self, text, line_ctr, last_token=None):
@@ -521,7 +521,7 @@ class ContextualLexer(Lexer):
except UnexpectedCharacters:
raise e # Raise the original UnexpectedCharacters. The root lexer raises it with the wrong expected set.

class LexerThread(object):
class LexerThread:
"""A thread that ties a lexer instance and a lexer state, to be used by the parser"""

def __init__(self, lexer, text):


+ 1
- 1
lark/load_grammar.py View File

@@ -791,7 +791,7 @@ class Grammar:
PackageResource = namedtuple('PackageResource', 'pkg_name path')


class FromPackageLoader(object):
class FromPackageLoader:
"""
Provides a simple way of creating custom import loaders that load from packages via ``pkgutil.get_data`` instead of using `open`.
This allows them to be compatible even from within zip files.


+ 5
- 5
lark/parsers/cyk.py View File

@@ -23,7 +23,7 @@ def match(t, s):
return t.name == s.type


class Rule(object):
class Rule:
"""Context-free grammar rule."""

def __init__(self, lhs, rhs, weight, alias):
@@ -51,7 +51,7 @@ class Rule(object):
return not (self == other)


class Grammar(object):
class Grammar:
"""Context-free grammar."""

def __init__(self, rules):
@@ -68,7 +68,7 @@ class Grammar(object):


# Parse tree data structures
class RuleNode(object):
class RuleNode:
"""A node in the parse tree, which also contains the full rhs rule."""

def __init__(self, rule, children, weight=0):
@@ -81,7 +81,7 @@ class RuleNode(object):



class Parser(object):
class Parser:
"""Parser wrapper."""

def __init__(self, rules):
@@ -186,7 +186,7 @@ def _parse(s, g):
# * Empty rules (epsilon rules)


class CnfWrapper(object):
class CnfWrapper:
"""CNF wrapper for grammar.

Validates that the input grammar is CNF and provides helper data structures.


+ 3
- 16
lark/parsers/earley_common.py View File

@@ -1,21 +1,8 @@
"This module implements an Earley Parser"
"""This module implements useful building blocks for the Earley parser
"""

# The parser uses a parse-forest to keep track of derivations and ambiguations.
# When the parse ends successfully, a disambiguation stage resolves all ambiguity
# (right now ambiguity resolution is not developed beyond the needs of lark)
# Afterwards the parse tree is reduced (transformed) according to user callbacks.
# I use the no-recursion version of Transformer, because the tree might be
# deeper than Python's recursion limit (a bit absurd, but that's life)
#
# The algorithm keeps track of each state set, using a corresponding Column instance.
# Column keeps track of new items using NewsList instances.
#
# Author: Erez Shinan (2017)
# Email : erezshin@gmail.com

from ..grammar import NonTerminal, Terminal

class Item(object):
class Item:
"An Earley Item, the atom of the algorithm."

__slots__ = ('s', 'rule', 'ptr', 'start', 'is_complete', 'expect', 'previous', 'node', '_hash')


+ 2
- 3
lark/parsers/earley_forest.py View File

@@ -8,7 +8,6 @@ http://www.bramvandersanden.com/post/2014/06/shared-packed-parse-forest/
"""

from random import randint
from math import isinf
from collections import deque
from operator import attrgetter
from importlib import import_module
@@ -20,7 +19,7 @@ from ..lexer import Token
from ..utils import logger
from ..tree import Tree

class ForestNode(object):
class ForestNode:
pass

class SymbolNode(ForestNode):
@@ -173,7 +172,7 @@ class PackedNode(ForestNode):
symbol = self.s.name
return "({}, {}, {}, {})".format(symbol, self.start, self.priority, self.rule.order)

class ForestVisitor(object):
class ForestVisitor:
"""
An abstract base class for building forest visitors.



+ 3
- 3
lark/parsers/grammar_analysis.py View File

@@ -5,7 +5,7 @@ from ..exceptions import GrammarError
from ..grammar import Rule, Terminal, NonTerminal


class RulePtr(object):
class RulePtr:
__slots__ = ('rule', 'index')

def __init__(self, rule, index):
@@ -38,7 +38,7 @@ class RulePtr(object):


# state generation ensures no duplicate LR0ItemSets
class LR0ItemSet(object):
class LR0ItemSet:
__slots__ = ('kernel', 'closure', 'transitions', 'lookaheads')

def __init__(self, kernel, closure):
@@ -121,7 +121,7 @@ def calculate_sets(rules):
return FIRST, FOLLOW, NULLABLE


class GrammarAnalyzer(object):
class GrammarAnalyzer:
def __init__(self, parser_conf, debug=False):
self.debug = debug



+ 1
- 1
lark/parsers/lalr_interactive_parser.py View File

@@ -6,7 +6,7 @@ from .. import Token
from ..exceptions import UnexpectedToken


class InteractiveParser(object):
class InteractiveParser:
"""InteractiveParser gives you advanced control over parsing and error handling when parsing with LALR.

For a simpler interface, see the ``on_error`` argument to ``Lark.parse()``.


+ 3
- 3
lark/parsers/lalr_parser.py View File

@@ -69,7 +69,7 @@ class LALR_Parser(Serialize):
e = e2


class ParseConf(object):
class ParseConf:
__slots__ = 'parse_table', 'callbacks', 'start', 'start_state', 'end_state', 'states'

def __init__(self, parse_table, callbacks, start):
@@ -83,7 +83,7 @@ class ParseConf(object):
self.start = start


class ParserState(object):
class ParserState:
__slots__ = 'parse_conf', 'lexer', 'state_stack', 'value_stack'

def __init__(self, parse_conf, lexer, state_stack=None, value_stack=None):
@@ -157,7 +157,7 @@ class ParserState(object):
if is_end and state_stack[-1] == end_state:
return value_stack[-1]

class _Parser(object):
class _Parser:
def __init__(self, parse_table, callbacks, debug=False):
self.parse_table = parse_table
self.callbacks = callbacks


+ 1
- 1
lark/tree.py View File

@@ -35,7 +35,7 @@ class Meta:
self.empty = True


class Tree(object):
class Tree:
"""The main tree class.

Creates a new tree, and stores "data" and "children" in attributes of the same name.


+ 1
- 1
lark/utils.py View File

@@ -41,7 +41,7 @@ def _deserialize(data, namespace, memo):
return data


class Serialize(object):
class Serialize:
"""Safe-ish serialization interface that doesn't rely on Pickle

Attributes:


+ 4
- 3
tests/test_parser.py View File

@@ -918,7 +918,7 @@ class CustomLexerNew(Lexer):
so it uses the traditionalparser as implementation without custom lexing behaviour.
"""
def __init__(self, lexer_conf):
self.lexer = TraditionalLexer(copy(lexer_conf))
self.lexer = BasicLexer(copy(lexer_conf))
def lex(self, lexer_state, parser_state):
return self.lexer.lex(lexer_state, parser_state)

@@ -930,7 +930,7 @@ class CustomLexerOld(Lexer):
so it uses the traditionalparser as implementation without custom lexing behaviour.
"""
def __init__(self, lexer_conf):
self.lexer = TraditionalLexer(copy(lexer_conf))
self.lexer = BasicLexer(copy(lexer_conf))
def lex(self, text):
ls = self.lexer.make_lexer_state(text)
return self.lexer.lex(ls, None)
@@ -1736,7 +1736,8 @@ def _make_parser_test(LEXER, PARSER):
self.assertEqual(len(tree.children), 2)


# @unittest.skipIf(LEXER != 'basic', "Only basic lexers care about token priority")
# TODO: Remove after merging priority for Dynamic Earley
@unittest.skipIf(LEXER != 'basic', "Only basic lexers care about token priority")
def test_lexer_prioritization(self):
"Tests effect of priority on result"



Loading…
Cancel
Save