5
5
* @class TokenStreamBase
6
6
* @namespace parserlib.util
7
7
* @constructor
8
- * @param {String|StringReader } input The text to tokenize or a reader from
8
+ * @param {String|StringReader } input The text to tokenize or a reader from
9
9
* which to read the input.
10
10
*/
11
11
function TokenStreamBase ( input , tokenData ) {
@@ -17,39 +17,39 @@ function TokenStreamBase(input, tokenData){
17
17
* @private
18
18
*/
19
19
this . _reader = input ? new StringReader ( input . toString ( ) ) : null ;
20
-
20
+
21
21
/**
22
22
* Token object for the last consumed token.
23
23
* @type Token
24
24
* @property _token
25
25
* @private
26
26
*/
27
- this . _token = null ;
28
-
27
+ this . _token = null ;
28
+
29
29
/**
30
30
* The array of token information.
31
31
* @type Array
32
32
* @property _tokenData
33
33
* @private
34
34
*/
35
35
this . _tokenData = tokenData ;
36
-
36
+
37
37
/**
38
38
* Lookahead token buffer.
39
39
* @type Array
40
40
* @property _lt
41
41
* @private
42
42
*/
43
43
this . _lt = [ ] ;
44
-
44
+
45
45
/**
46
46
* Lookahead token buffer index.
47
47
* @type int
48
48
* @property _ltIndex
49
49
* @private
50
50
*/
51
51
this . _ltIndex = 0 ;
52
-
52
+
53
53
this . _ltIndexCache = [ ] ;
54
54
}
55
55
@@ -69,7 +69,7 @@ TokenStreamBase.createTokenData = function(tokens){
69
69
tokenData = tokens . concat ( [ ] ) ,
70
70
i = 0 ,
71
71
len = tokenData . length + 1 ;
72
-
72
+
73
73
tokenData . UNKNOWN = - 1 ;
74
74
tokenData . unshift ( { name :"EOF" } ) ;
75
75
@@ -80,27 +80,27 @@ TokenStreamBase.createTokenData = function(tokens){
80
80
typeMap [ tokenData [ i ] . text ] = i ;
81
81
}
82
82
}
83
-
83
+
84
84
tokenData . name = function ( tt ) {
85
85
return nameMap [ tt ] ;
86
86
} ;
87
-
87
+
88
88
tokenData . type = function ( c ) {
89
89
return typeMap [ c ] ;
90
90
} ;
91
-
91
+
92
92
return tokenData ;
93
93
} ;
94
94
95
95
TokenStreamBase . prototype = {
96
96
97
97
//restore constructor
98
- constructor : TokenStreamBase ,
99
-
98
+ constructor : TokenStreamBase ,
99
+
100
100
//-------------------------------------------------------------------------
101
101
// Matching methods
102
102
//-------------------------------------------------------------------------
103
-
103
+
104
104
/**
105
105
* Determines if the next token matches the given token type.
106
106
* If so, that token is consumed; if not, the token is placed
@@ -116,27 +116,27 @@ TokenStreamBase.prototype = {
116
116
* @method match
117
117
*/
118
118
match : function ( tokenTypes , channel ) {
119
-
119
+
120
120
//always convert to an array, makes things easier
121
121
if ( ! ( tokenTypes instanceof Array ) ) {
122
122
tokenTypes = [ tokenTypes ] ;
123
123
}
124
-
124
+
125
125
var tt = this . get ( channel ) ,
126
126
i = 0 ,
127
127
len = tokenTypes . length ;
128
-
128
+
129
129
while ( i < len ) {
130
130
if ( tt == tokenTypes [ i ++ ] ) {
131
131
return true ;
132
132
}
133
133
}
134
-
134
+
135
135
//no match found, put the token back
136
136
this . unget ( ) ;
137
137
return false ;
138
- } ,
139
-
138
+ } ,
139
+
140
140
/**
141
141
* Determines if the next token matches the given token type.
142
142
* If so, that token is consumed; if not, an error is thrown.
@@ -147,7 +147,7 @@ TokenStreamBase.prototype = {
147
147
* provided, reads from the default (unnamed) channel.
148
148
* @return {void }
149
149
* @method mustMatch
150
- */
150
+ */
151
151
mustMatch : function ( tokenTypes , channel ) {
152
152
153
153
var token ;
@@ -157,17 +157,17 @@ TokenStreamBase.prototype = {
157
157
tokenTypes = [ tokenTypes ] ;
158
158
}
159
159
160
- if ( ! this . match . apply ( this , arguments ) ) {
160
+ if ( ! this . match . apply ( this , arguments ) ) {
161
161
token = this . LT ( 1 ) ;
162
- throw new SyntaxError ( "Expected " + this . _tokenData [ tokenTypes [ 0 ] ] . name +
162
+ throw new SyntaxError ( "Expected " + this . _tokenData [ tokenTypes [ 0 ] ] . name +
163
163
" at line " + token . startLine + ", col " + token . startCol + "." , token . startLine , token . startCol ) ;
164
164
}
165
165
} ,
166
-
166
+
167
167
//-------------------------------------------------------------------------
168
168
// Consuming methods
169
169
//-------------------------------------------------------------------------
170
-
170
+
171
171
/**
172
172
* Keeps reading from the token stream until either one of the specified
173
173
* token types is found or until the end of the input is reached.
@@ -180,21 +180,21 @@ TokenStreamBase.prototype = {
180
180
* @method advance
181
181
*/
182
182
advance : function ( tokenTypes , channel ) {
183
-
183
+
184
184
while ( this . LA ( 0 ) !== 0 && ! this . match ( tokenTypes , channel ) ) {
185
185
this . get ( ) ;
186
186
}
187
187
188
- return this . LA ( 0 ) ;
188
+ return this . LA ( 0 ) ;
189
189
} ,
190
-
190
+
191
191
/**
192
- * Consumes the next token from the token stream.
192
+ * Consumes the next token from the token stream.
193
193
* @return {int } The token type of the token that was just consumed.
194
194
* @method get
195
- */
195
+ */
196
196
get : function ( channel ) {
197
-
197
+
198
198
var tokenInfo = this . _tokenData ,
199
199
reader = this . _reader ,
200
200
value ,
@@ -203,76 +203,76 @@ TokenStreamBase.prototype = {
203
203
found = false ,
204
204
token ,
205
205
info ;
206
-
206
+
207
207
//check the lookahead buffer first
208
- if ( this . _lt . length && this . _ltIndex >= 0 && this . _ltIndex < this . _lt . length ) {
209
-
208
+ if ( this . _lt . length && this . _ltIndex >= 0 && this . _ltIndex < this . _lt . length ) {
209
+
210
210
i ++ ;
211
211
this . _token = this . _lt [ this . _ltIndex ++ ] ;
212
212
info = tokenInfo [ this . _token . type ] ;
213
-
213
+
214
214
//obey channels logic
215
215
while ( ( info . channel !== undefined && channel !== info . channel ) &&
216
216
this . _ltIndex < this . _lt . length ) {
217
217
this . _token = this . _lt [ this . _ltIndex ++ ] ;
218
218
info = tokenInfo [ this . _token . type ] ;
219
219
i ++ ;
220
220
}
221
-
221
+
222
222
//here be dragons
223
223
if ( ( info . channel === undefined || channel === info . channel ) &&
224
224
this . _ltIndex <= this . _lt . length ) {
225
225
this . _ltIndexCache . push ( i ) ;
226
226
return this . _token . type ;
227
227
}
228
228
}
229
-
229
+
230
230
//call token retriever method
231
231
token = this . _getToken ( ) ;
232
232
233
233
//if it should be hidden, don't save a token
234
234
if ( token . type > - 1 && ! tokenInfo [ token . type ] . hide ) {
235
-
235
+
236
236
//apply token channel
237
237
token . channel = tokenInfo [ token . type ] . channel ;
238
-
238
+
239
239
//save for later
240
240
this . _token = token ;
241
241
this . _lt . push ( token ) ;
242
242
243
243
//save space that will be moved (must be done before array is truncated)
244
- this . _ltIndexCache . push ( this . _lt . length - this . _ltIndex + i ) ;
245
-
244
+ this . _ltIndexCache . push ( this . _lt . length - this . _ltIndex + i ) ;
245
+
246
246
//keep the buffer under 5 items
247
247
if ( this . _lt . length > 5 ) {
248
- this . _lt . shift ( ) ;
248
+ this . _lt . shift ( ) ;
249
249
}
250
-
250
+
251
251
//also keep the shift buffer under 5 items
252
252
if ( this . _ltIndexCache . length > 5 ) {
253
253
this . _ltIndexCache . shift ( ) ;
254
254
}
255
-
255
+
256
256
//update lookahead index
257
257
this . _ltIndex = this . _lt . length ;
258
258
}
259
-
259
+
260
260
/*
261
261
* Skip to the next token if:
262
262
* 1. The token type is marked as hidden.
263
263
* 2. The token type has a channel specified and it isn't the current channel.
264
264
*/
265
265
info = tokenInfo [ token . type ] ;
266
- if ( info &&
267
- ( info . hide ||
266
+ if ( info &&
267
+ ( info . hide ||
268
268
( info . channel !== undefined && channel !== info . channel ) ) ) {
269
269
return this . get ( channel ) ;
270
270
} else {
271
271
//return just the type
272
272
return token . type ;
273
273
}
274
274
} ,
275
-
275
+
276
276
/**
277
277
* Looks ahead a certain number of tokens and returns the token type at
278
278
* that position. This will throw an error if you lookahead past the
@@ -291,34 +291,34 @@ TokenStreamBase.prototype = {
291
291
if ( index > 5 ) {
292
292
throw new Error ( "Too much lookahead." ) ;
293
293
}
294
-
294
+
295
295
//get all those tokens
296
296
while ( total ) {
297
- tt = this . get ( ) ;
298
- total -- ;
297
+ tt = this . get ( ) ;
298
+ total -- ;
299
299
}
300
-
300
+
301
301
//unget all those tokens
302
302
while ( total < index ) {
303
303
this . unget ( ) ;
304
304
total ++ ;
305
305
}
306
306
} else if ( index < 0 ) {
307
-
307
+
308
308
if ( this . _lt [ this . _ltIndex + index ] ) {
309
309
tt = this . _lt [ this . _ltIndex + index ] . type ;
310
310
} else {
311
311
throw new Error ( "Too much lookbehind." ) ;
312
312
}
313
-
313
+
314
314
} else {
315
315
tt = this . _token . type ;
316
316
}
317
-
317
+
318
318
return tt ;
319
-
319
+
320
320
} ,
321
-
321
+
322
322
/**
323
323
* Looks ahead a certain number of tokens and returns the token at
324
324
* that position. This will throw an error if you lookahead past the
@@ -328,26 +328,26 @@ TokenStreamBase.prototype = {
328
328
* current token, 1 for the next, -1 for the previous, etc.
329
329
* @return {Object } The token of the token in the given position.
330
330
* @method LA
331
- */
331
+ */
332
332
LT : function ( index ) {
333
-
333
+
334
334
//lookahead first to prime the token buffer
335
335
this . LA ( index ) ;
336
-
336
+
337
337
//now find the token, subtract one because _ltIndex is already at the next index
338
- return this . _lt [ this . _ltIndex + index - 1 ] ;
338
+ return this . _lt [ this . _ltIndex + index - 1 ] ;
339
339
} ,
340
-
340
+
341
341
/**
342
- * Returns the token type for the next token in the stream without
342
+ * Returns the token type for the next token in the stream without
343
343
* consuming it.
344
344
* @return {int } The token type of the next token in the stream.
345
345
* @method peek
346
346
*/
347
347
peek : function ( ) {
348
348
return this . LA ( 1 ) ;
349
349
} ,
350
-
350
+
351
351
/**
352
352
* Returns the actual token object for the last consumed token.
353
353
* @return {Token } The token object for the last consumed token.
@@ -356,7 +356,7 @@ TokenStreamBase.prototype = {
356
356
token : function ( ) {
357
357
return this . _token ;
358
358
} ,
359
-
359
+
360
360
/**
361
361
* Returns the name of the token for the given token type.
362
362
* @param {int } tokenType The type of token to get the name of.
@@ -371,22 +371,22 @@ TokenStreamBase.prototype = {
371
371
return this . _tokenData [ tokenType ] . name ;
372
372
}
373
373
} ,
374
-
374
+
375
375
/**
376
376
* Returns the token type value for the given token name.
377
377
* @param {String } tokenName The name of the token whose value should be returned.
378
378
* @return {int } The token type value for the given token name or -1
379
379
* for an unknown token.
380
380
* @method tokenName
381
- */
381
+ */
382
382
tokenType : function ( tokenName ) {
383
383
return this . _tokenData [ tokenName ] || - 1 ;
384
384
} ,
385
-
385
+
386
386
/**
387
387
* Returns the last consumed token to the token stream.
388
388
* @method unget
389
- */
389
+ */
390
390
unget : function ( ) {
391
391
//if (this._ltIndex > -1){
392
392
if ( this . _ltIndexCache . length ) {
0 commit comments