11import time
22from abc import abstractmethod
3- from typing import Optional , List
3+ from typing import Optional , List , Union
44
55from antlr4 import CommonTokenStream , InputStream , Lexer , Parser , ParserATNSimulator , ParserRuleContext , \
66 PredictionContextCache , Recognizer , Token , TokenStream
@@ -22,8 +22,8 @@ class PylasuANTLRParser:
2222 def __init__ (self ):
2323 self .prediction_context_cache = PredictionContextCache ()
2424
25- def parse (self , input_stream : InputStream , consider_range : bool = True , measure_lexing_time : bool = False ,
26- source : Optional [Source ] = None ):
25+ def parse (self , input_stream : Union [ InputStream , str ], consider_range : bool = True ,
26+ measure_lexing_time : bool = False , source : Optional [Source ] = None ):
2727 """Parses source code, returning a result that includes an AST and a collection of parse issues
2828 (errors, warnings).
2929 The parsing is done in accordance to the StarLasu methodology i.e. a first-stage parser builds a parse tree
@@ -34,6 +34,8 @@ def parse(self, input_stream: InputStream, consider_range: bool = True, measure_
3434 @param measureLexingTime if true, the result will include a measurement of the time spent in lexing i.e.
3535 breaking the input stream into tokens."""
3636 start = time .time_ns ()
37+ if type (input_stream ) is str :
38+ input_stream = InputStream (input_stream )
3739 first_stage = self .parse_first_stage (input_stream , measure_lexing_time )
3840 issues = first_stage .issues
3941 ast = self .parse_tree_to_ast (first_stage .root , consider_range , issues , source )
@@ -120,12 +122,12 @@ def create_token_stream(self, lexer: Lexer) -> TokenStream:
120122 return CommonTokenStream (lexer )
121123
122124 @abstractmethod
123- def create_antlr_lexer (self , input_stream : InputStream ):
125+ def create_antlr_lexer (self , input_stream : InputStream ) -> Lexer :
124126 """Creates the lexer."""
125127 pass
126128
127129 @abstractmethod
128- def create_antlr_parser (self , token_stream : TokenStream ):
130+ def create_antlr_parser (self , token_stream : TokenStream ) -> Parser :
129131 """Creates the first-stage parser."""
130132 pass
131133
@@ -147,4 +149,5 @@ def syntaxError(self, recognizer, offending_symbol, line, column, msg, e):
147149 end_point = start_point
148150 if isinstance (offending_symbol , Token ):
149151 end_point = token_end_point (offending_symbol )
150- self .issues .append (Issue (IssueType .SYNTACTIC , msg or "unspecified" , position = Position (start_point , end_point )))
152+ msg = (msg or "unspecified" ).capitalize ()
153+ self .issues .append (Issue (IssueType .SYNTACTIC , msg , position = Position (start_point , end_point )))
0 commit comments