diff --git a/demystify/demystify.py b/demystify/demystify.py index 1449d22..b2d3f54 100755 --- a/demystify/demystify.py +++ b/demystify/demystify.py @@ -30,7 +30,7 @@ _stdout.setFormatter(logging.Formatter(fmt='%(levelname)s: %(message)s')) plog.addHandler(_stdout) -import antlr3 +import antlr4 import card import data @@ -56,11 +56,11 @@ def get_cards(): def _token_stream(name, text): """ Helper method for generating a token stream from text. """ - char_stream = antlr3.ANTLRStringStream(text) + char_stream = antlr4.ANTLRStringStream(text) lexer = DemystifyLexer.DemystifyLexer(char_stream) lexer.card = name # tokenizes completely and logs on errors - return antlr3.CommonTokenStream(lexer) + return antlr4.CommonTokenStream(lexer) def _lex(c): try: @@ -95,7 +95,7 @@ def pprint_tokens(tokens): return tlen = max(len(t.text) for t in tokens) for t in tokens: - if t.channel != antlr3.HIDDEN_CHANNEL: + if t.channel != antlr4.HIDDEN_CHANNEL: print('{0.line:>2} {0.charPositionInLine:>4} {0.index:>3} ' '{0.text:{tlen}} {0.typeName}' .format(t, tlen=tlen)) @@ -162,7 +162,7 @@ def _crawl_tree_for_errors(name, lineno, text, tree): n = queue.pop(0) if n.children: queue.extend(n.children) - if isinstance(n, antlr3.tree.CommonErrorNode): + if isinstance(n, antlr4.tree.CommonErrorNode): mstart = n.trappedException.token.start mend = text.find(',', mstart) if mend < 0: diff --git a/demystify/grammar/Demystify.g b/demystify/grammar/Demystify.g index d88bdea..6638da1 100644 --- a/demystify/grammar/Demystify.g +++ b/demystify/grammar/Demystify.g @@ -1,20 +1,20 @@ grammar Demystify; // This file is part of Demystify. -// +// // Demystify: a Magic: The Gathering parser // Copyright (C) 2012 Benjamin S Wolf -// +// // Demystify is free software; you can redistribute it and/or modify // it under the terms of the GNU Lesser General Public License as published // by the Free Software Foundation; either version 3 of the License, // or (at your option) any later version. -// +// // Demystify is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Lesser General Public License for more details. -// +// // You should have received a copy of the GNU Lesser General Public License // along with Demystify. If not, see . @@ -34,45 +34,45 @@ options { import Words, events, keywords, costs, subsets, math, properties, counters, zones, players, misc, objects, raw_keywords, macro; tokens { - ADD_COUNTERS; - ATTACHED_TO; - CMC; - COINFLIP; - CONDITION; - COUNTER_GROUP; - COUNTER_SET; - ENERGY; - EVENT; - GEQ; - GT; - HAS_COUNTERS; - INT; - KEYWORDS; - LEQ; - LINKED; - LT; - MAX; - MOVE_TO; - MULT; - NONCOMBAT; - NTH; - PAY_COST; - PAY_LIFE; - PAY_PER; - PER; - PLAYER_GROUP; - POSS; - PROPERTIES; - PT; - REMOVE_COUNTERS; - SUBSET; - SUBTYPES; - SUPERTYPES; - TYPECYCLING; - TYPELINE; - TYPES; - VAR; - ZONE_SET; + ADD_COUNTERS, + ATTACHED_TO, + CMC, + COINFLIP, + CONDITION, + COUNTER_GROUP, + COUNTER_SET, + ENERGY, + EVENT, + GEQ, + GT, + HAS_COUNTERS, + INT, + KEYWORDS, + LEQ, + LINKED, + LT, + MAX, + MOVE_TO, + MULT, + NONCOMBAT, + NTH, + PAY_COST, + PAY_LIFE, + PAY_PER, + PER, + PLAYER_GROUP, + POSS, + PROPERTIES, + PT, + REMOVE_COUNTERS, + SUBSET, + SUBTYPES, + SUPERTYPES, + TYPECYCLING, + TYPELINE, + TYPES, + VAR, + ZONE_SET } @lexer::header { @@ -182,7 +182,9 @@ tokens { def setCardState(self, name): self._state.card = name } - +// The line below throws: +// error(50): /home/robert/magictg/demystify/demystify/grammar/Demystify.g:186:28: syntax error: '->' came as a complete surprise to me while looking for rule element +// error(50): /home/robert/magictg/demystify/demystify/grammar/Demystify.g:186:31: syntax error: '^' came as a complete surprise to me card_mana_cost : mc_symbols -> ^( COST mc_symbols ); // Literals used in parsing rules don't have to be declared, diff --git a/demystify/test.py b/demystify/test.py index d3b0aac..aa998ab 100644 --- a/demystify/test.py +++ b/demystify/test.py @@ -24,7 +24,7 @@ import re import unittest -import antlr3 +import antlr4 from grammar import DemystifyLexer, DemystifyParser @@ -63,11 +63,11 @@ def compare(self, a, b): def _token_stream(name, text): """ Helper method for generating a token stream from text. """ - char_stream = antlr3.ANTLRStringStream(text) + char_stream = antlr4.ANTLRStringStream(text) lexer = DemystifyLexer.DemystifyLexer(char_stream) lexer.card = name # tokenizes completely and logs on errors - return antlr3.CommonTokenStream(lexer) + return antlr4.CommonTokenStream(lexer) def parse_text(name, rule, text): ts = _token_stream(name, text)