@@ -57,6 +57,26 @@ def [](byteindex)
5757 end
5858 end
5959
60+ # This represents all of the tokens coming back from the lexer. It is
61+ # replacing a simple array because it keeps track of the last deleted token
62+ # from the list for better error messages.
63+ class TokenList < SimpleDelegator
64+ attr_reader :last_deleted
65+
66+ def initialize ( object )
67+ super
68+ @last_deleted = nil
69+ end
70+
71+ def delete ( value )
72+ @last_deleted = super || @last_deleted
73+ end
74+
75+ def delete_at ( index )
76+ @last_deleted = super
77+ end
78+ end
79+
6080 # [String] the source being parsed
6181 attr_reader :source
6282
@@ -124,7 +144,7 @@ def initialize(source, *)
124144 # Most of the time, when a parser event consumes one of these events, it
125145 # will be deleted from the list. So ideally, this list stays pretty short
126146 # over the course of parsing a source string.
127- @tokens = [ ]
147+ @tokens = TokenList . new ( [ ] )
128148
129149 # Here we're going to build up a list of SingleByteString or
130150 # MultiByteString objects. They're each going to represent a string in the
@@ -174,6 +194,33 @@ def current_column
174194 line [ column ] . to_i - line . start
175195 end
176196
197+ # Returns the current location that is being looked at for the parser for
198+ # the purpose of locating the error.
199+ def find_token_error ( location )
200+ if location
201+ # If we explicitly passed a location into this find_token_error method,
202+ # that means that's the source of the error, so we'll use that
203+ # information for our error object.
204+ lineno = location . start_line
205+ [ lineno , location . start_char - line_counts [ lineno - 1 ] . start ]
206+ elsif lineno && column
207+ # If there is a line number associated with the current ripper state,
208+ # then we'll use that information to generate the error.
209+ [ lineno , column ]
210+ elsif ( location = tokens . last_deleted &.location )
211+ # If we've already deleted a token from the list of tokens that we are
212+ # consuming, then we'll fall back to that token's location.
213+ lineno = location . start_line
214+ [ lineno , location . start_char - line_counts [ lineno - 1 ] . start ]
215+ else
216+ # Finally, it's possible that when we hit this error the parsing thread
217+ # for ripper has died. In that case, lineno and column both return nil.
218+ # So we're just going to set it to line 1, column 0 in the hopes that
219+ # that makes any sense.
220+ [ 1 , 0 ]
221+ end
222+ end
223+
177224 # As we build up a list of tokens, we'll periodically need to go backwards
178225 # and find the ones that we've already hit in order to determine the
179226 # location information for nodes that use them. For example, if you have a
@@ -201,14 +248,7 @@ def find_token(type, value = :any, consume: true, location: nil)
201248 unless index
202249 token = value == :any ? type . name . split ( "::" , 2 ) . last : value
203250 message = "Cannot find expected #{ token } "
204-
205- if location
206- lineno = location . start_line
207- column = location . start_char - line_counts [ lineno - 1 ] . start
208- raise ParseError . new ( message , lineno , column )
209- else
210- raise ParseError . new ( message , lineno , column )
211- end
251+ raise ParseError . new ( message , *find_token_error ( location ) )
212252 end
213253
214254 tokens . delete_at ( index )
0 commit comments