1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
|
--[[--------------------------------------------------------------------
llex.lua
Lua lexical analyzer in Lua
This file is part of Yueliang.
Copyright (c) 2005-2006 Kein-Hong Man <khman@users.sf.net>
The COPYRIGHT file describes the conditions
under which this software may be distributed.
See the ChangeLog for more information.
----------------------------------------------------------------------]]
--[[--------------------------------------------------------------------
-- Notes:
-- * intended to 'imitate' llex.c code; performance is not a concern
-- * tokens are strings; code structure largely retained
-- * deleted stuff (compared to llex.c) are noted, comments retained
-- * nextc() returns the currently read character to simplify coding
-- here; next() in llex.c does not return anything
-- * compatibility code is marked with "--#" comments
--
-- Added:
-- * luaX:chunkid (function luaO_chunkid from lobject.c)
-- * luaX:str2d (function luaO_str2d from lobject.c)
-- * luaX.LUA_QS used in luaX:lexerror (from luaconf.h)
-- * luaX.LUA_COMPAT_LSTR in luaX:read_long_string (from luaconf.h)
-- * luaX.MAX_INT used in luaX:inclinenumber (from llimits.h)
--
-- To use the lexer:
-- (1) luaX:init() to initialize the lexer
-- (2) luaX:setinput() to set the input stream to lex
-- (3) call luaX:next() or luaX:luaX:lookahead() to get tokens,
-- until "TK_EOS": luaX:next()
-- * since EOZ is returned as a string, be careful when regexp testing
--
-- Not implemented:
-- * luaX_newstring: not required by this Lua implementation
-- * buffer MAX_SIZET size limit (from llimits.h) test not implemented
-- in the interest of performance
-- * locale-aware number handling is largely redundant as Lua's
-- tonumber() function is already capable of this
--
-- Changed in 5.1.x:
-- * TK_NAME token order moved down
-- * string representation for TK_NAME, TK_NUMBER, TK_STRING changed
-- * token struct renamed to lower case (LS -> ls)
-- * LexState struct: removed nestlevel, added decpoint
-- * error message functions have been greatly simplified
-- * token2string renamed to luaX_tokens, exposed in llex.h
-- * lexer now handles all kinds of newlines, including CRLF
-- * shbang first line handling removed from luaX:setinput;
-- it is now done in lauxlib.c (luaL_loadfile)
-- * next(ls) macro renamed to nextc(ls) due to new luaX_next function
-- * EXTRABUFF and MAXNOCHECK removed due to lexer changes
-- * checkbuffer(ls, len) macro deleted
-- * luaX:read_numeral now has 3 support functions: luaX:trydecpoint,
-- luaX:buffreplace and (luaO_str2d from lobject.c) luaX:str2d
-- * luaX:read_numeral is now more promiscuous in slurping characters;
-- hexadecimal numbers was added, locale-aware decimal points too
-- * luaX:skip_sep is new; used by luaX:read_long_string
-- * luaX:read_long_string handles new-style long blocks, with some
-- optional compatibility code
-- * luaX:llex: parts changed to support new-style long blocks
-- * luaX:llex: readname functionality has been folded in
-- * luaX:llex: removed test for control characters
----------------------------------------------------------------------]]
luaX = {}
-- FIRST_RESERVED is not required as tokens are manipulated as strings
-- TOKEN_LEN deleted; maximum length of a reserved word not needed
------------------------------------------------------------------------
-- "ORDER RESERVED" deleted; enumeration in one place: luaX.RESERVED
------------------------------------------------------------------------
-- terminal symbols denoted by reserved words: TK_AND to TK_WHILE
-- other terminal symbols: TK_NAME to TK_EOS
luaX.RESERVED = [[
TK_AND and
TK_BREAK break
TK_DO do
TK_ELSE else
TK_ELSEIF elseif
TK_END end
TK_FALSE false
TK_FOR for
TK_FUNCTION function
TK_IF if
TK_IN in
TK_LOCAL local
TK_NIL nil
TK_NOT not
TK_OR or
TK_REPEAT repeat
TK_RETURN return
TK_THEN then
TK_TRUE true
TK_UNTIL until
TK_WHILE while
TK_CONCAT ..
TK_DOTS ...
TK_EQ ==
TK_GE >=
TK_LE <=
TK_NE ~=
TK_NAME <name>
TK_NUMBER <number>
TK_STRING <string>
TK_EOS <eof>]]
-- NUM_RESERVED is not required; number of reserved words
--[[--------------------------------------------------------------------
-- Instead of passing seminfo, the Token struct (e.g. ls.t) is passed
-- so that lexer functions can use its table element, ls.t.seminfo
--
-- SemInfo (struct no longer needed, a mixed-type value is used)
--
-- Token (struct of ls.t and ls.lookahead):
-- token -- token symbol
-- seminfo -- semantics information
--
-- LexState (struct of ls; ls is initialized by luaX:setinput):
-- current -- current character (charint)
-- linenumber -- input line counter
-- lastline -- line of last token 'consumed'
-- t -- current token (table: struct Token)
-- lookahead -- look ahead token (table: struct Token)
-- fs -- 'FuncState' is private to the parser
-- L -- LuaState
-- z -- input stream
-- buff -- buffer for tokens
-- source -- current source name
-- decpoint -- locale decimal point
-- nestlevel -- level of nested non-terminals
----------------------------------------------------------------------]]
-- luaX.tokens (was luaX_tokens) is now a hash; see luaX:init
luaX.MAXSRC = 80
luaX.MAX_INT = 2147483645 -- constants from elsewhere (see above)
luaX.LUA_QS = "'%s'"
luaX.LUA_COMPAT_LSTR = 1
--luaX.MAX_SIZET = 4294967293
------------------------------------------------------------------------
-- initialize lexer
-- * original luaX_init has code to create and register token strings
-- * luaX.tokens: TK_* -> token
-- * luaX.enums: token -> TK_* (used in luaX:llex)
------------------------------------------------------------------------
function luaX:init()
local tokens, enums = {}, {}
for v in string.gmatch(self.RESERVED, "[^\n]+") do
local _, _, tok, str = string.find(v, "(%S+)%s+(%S+)")
tokens[tok] = str
enums[str] = tok
end
self.tokens = tokens
self.enums = enums
end
------------------------------------------------------------------------
-- returns a suitably-formatted chunk name or id
-- * from lobject.c, used in llex.c and ldebug.c
-- * the result, out, is returned (was first argument)
------------------------------------------------------------------------
function luaX:chunkid(source, bufflen)
local out
local first = string.sub(source, 1, 1)
if first == "=" then
out = string.sub(source, 2, bufflen) -- remove first char
else -- out = "source", or "...source"
if first == "@" then
source = string.sub(source, 2) -- skip the '@'
bufflen = bufflen - #" '...' "
local l = #source
out = ""
if l > bufflen then
source = string.sub(source, 1 + l - bufflen) -- get last part of file name
out = out.."..."
end
out = out..source
else -- out = [string "string"]
local len = string.find(source, "[\n\r]") -- stop at first newline
len = len and (len - 1) or #source
bufflen = bufflen - #(" [string \"...\"] ")
if len > bufflen then len = bufflen end
out = "[string \""
if len < #source then -- must truncate?
out = out..string.sub(source, 1, len).."..."
else
out = out..source
end
out = out.."\"]"
end
end
return out
end
--[[--------------------------------------------------------------------
-- Support functions for lexer
-- * all lexer errors eventually reaches lexerror:
syntaxerror -> lexerror
----------------------------------------------------------------------]]
------------------------------------------------------------------------
-- look up token and return keyword if found (also called by parser)
------------------------------------------------------------------------
function luaX:token2str(ls, token)
if string.sub(token, 1, 3) ~= "TK_" then
if string.find(token, "%c") then
return string.format("char(%d)", string.byte(token))
end
return token
else
return self.tokens[token]
end
end
------------------------------------------------------------------------
-- throws a lexer error
-- * txtToken has been made local to luaX:lexerror
-- * can't communicate LUA_ERRSYNTAX, so it is unimplemented
------------------------------------------------------------------------
function luaX:lexerror(ls, msg, token)
local function txtToken(ls, token)
if token == "TK_NAME" or
token == "TK_STRING" or
token == "TK_NUMBER" then
return ls.buff
else
return self:token2str(ls, token)
end
end
local buff = self:chunkid(ls.source, self.MAXSRC)
local msg = string.format("%s:%d: %s", buff, ls.linenumber, msg)
if token then
msg = string.format("%s near "..self.LUA_QS, msg, txtToken(ls, token))
end
-- luaD_throw(ls->L, LUA_ERRSYNTAX)
error(msg)
end
------------------------------------------------------------------------
-- throws a syntax error (mainly called by parser)
-- * ls.t.token has to be set by the function calling luaX:llex
-- (see luaX:next and luaX:lookahead elsewhere in this file)
------------------------------------------------------------------------
function luaX:syntaxerror(ls, msg)
self:lexerror(ls, msg, ls.t.token)
end
------------------------------------------------------------------------
-- move on to next line
------------------------------------------------------------------------
function luaX:currIsNewline(ls)
return ls.current == "\n" or ls.current == "\r"
end
function luaX:inclinenumber(ls)
local old = ls.current
-- lua_assert(currIsNewline(ls))
self:nextc(ls) -- skip '\n' or '\r'
if self:currIsNewline(ls) and ls.current ~= old then
self:nextc(ls) -- skip '\n\r' or '\r\n'
end
ls.linenumber = ls.linenumber + 1
if ls.linenumber >= self.MAX_INT then
self:syntaxerror(ls, "chunk has too many lines")
end
end
------------------------------------------------------------------------
-- initializes an input stream for lexing
-- * if ls (the lexer state) is passed as a table, then it is filled in,
-- otherwise it has to be retrieved as a return value
-- * LUA_MINBUFFER not used; buffer handling not required any more
------------------------------------------------------------------------
function luaX:setinput(L, ls, z, source)
if not ls then ls = {} end -- create struct
if not ls.lookahead then ls.lookahead = {} end
if not ls.t then ls.t = {} end
ls.decpoint = "."
ls.L = L
ls.lookahead.token = "TK_EOS" -- no look-ahead token
ls.z = z
ls.fs = nil
ls.linenumber = 1
ls.lastline = 1
ls.source = source
self:nextc(ls) -- read first char
end
--[[--------------------------------------------------------------------
-- LEXICAL ANALYZER
----------------------------------------------------------------------]]
------------------------------------------------------------------------
-- checks if current character read is found in the set 'set'
------------------------------------------------------------------------
function luaX:check_next(ls, set)
if not string.find(set, ls.current, 1, 1) then
return false
end
self:save_and_next(ls)
return true
end
------------------------------------------------------------------------
-- retrieve next token, checking the lookahead buffer if necessary
-- * note that the macro next(ls) in llex.c is now luaX:nextc
-- * utilized used in lparser.c (various places)
------------------------------------------------------------------------
function luaX:next(ls)
ls.lastline = ls.linenumber
if ls.lookahead.token ~= "TK_EOS" then -- is there a look-ahead token?
-- this must be copy-by-value
ls.t.seminfo = ls.lookahead.seminfo -- use this one
ls.t.token = ls.lookahead.token
ls.lookahead.token = "TK_EOS" -- and discharge it
else
ls.t.token = self:llex(ls, ls.t) -- read next token
end
end
------------------------------------------------------------------------
-- fill in the lookahead buffer
-- * utilized used in lparser.c:constructor
------------------------------------------------------------------------
function luaX:lookahead(ls)
-- lua_assert(ls.lookahead.token == "TK_EOS")
ls.lookahead.token = self:llex(ls, ls.lookahead)
end
------------------------------------------------------------------------
-- gets the next character and returns it
-- * this is the next() macro in llex.c; see notes at the beginning
------------------------------------------------------------------------
function luaX:nextc(ls)
local c = luaZ:zgetc(ls.z)
ls.current = c
return c
end
------------------------------------------------------------------------
-- saves the given character into the token buffer
-- * buffer handling code removed, not used in this implementation
-- * test for maximum token buffer length not used, makes things faster
------------------------------------------------------------------------
function luaX:save(ls, c)
local buff = ls.buff
-- if you want to use this, please uncomment luaX.MAX_SIZET further up
--if #buff > self.MAX_SIZET then
-- self:lexerror(ls, "lexical element too long")
--end
ls.buff = buff..c
end
------------------------------------------------------------------------
-- save current character into token buffer, grabs next character
-- * like luaX:nextc, returns the character read for convenience
------------------------------------------------------------------------
function luaX:save_and_next(ls)
self:save(ls, ls.current)
return self:nextc(ls)
end
------------------------------------------------------------------------
-- LUA_NUMBER
-- * luaX:read_numeral is the main lexer function to read a number
-- * luaX:str2d, luaX:buffreplace, luaX:trydecpoint are support functions
------------------------------------------------------------------------
------------------------------------------------------------------------
-- string to number converter (was luaO_str2d from lobject.c)
-- * returns the number, nil if fails (originally returns a boolean)
-- * conversion function originally lua_str2number(s,p), a macro which
-- maps to the strtod() function by default (from luaconf.h)
------------------------------------------------------------------------
function luaX:str2d(s)
local result = tonumber(s)
if result then return result end
-- conversion failed
if string.lower(string.sub(s, 1, 2)) == "0x" then -- maybe an hexadecimal constant?
result = tonumber(s, 16)
if result then return result end -- most common case
-- Was: invalid trailing characters?
-- In C, this function then skips over trailing spaces.
-- true is returned if nothing else is found except for spaces.
-- If there is still something else, then it returns a false.
-- All this is not necessary using Lua's tonumber.
end
return nil
end
------------------------------------------------------------------------
-- single-character replacement, for locale-aware decimal points
------------------------------------------------------------------------
function luaX:buffreplace(ls, from, to)
local result, buff = "", ls.buff
for p = 1, #buff do
local c = string.sub(buff, p, p)
if c == from then c = to end
result = result..c
end
ls.buff = result
end
------------------------------------------------------------------------
-- Attempt to convert a number by translating '.' decimal points to
-- the decimal point character used by the current locale. This is not
-- needed in Yueliang as Lua's tonumber() is already locale-aware.
-- Instead, the code is here in case the user implements localeconv().
------------------------------------------------------------------------
function luaX:trydecpoint(ls, Token)
-- format error: try to update decimal point separator
local old = ls.decpoint
-- translate the following to Lua if you implement localeconv():
-- struct lconv *cv = localeconv();
-- ls->decpoint = (cv ? cv->decimal_point[0] : '.');
self:buffreplace(ls, old, ls.decpoint) -- try updated decimal separator
local seminfo = self:str2d(ls.buff)
Token.seminfo = seminfo
if not seminfo then
-- format error with correct decimal point: no more options
self:buffreplace(ls, ls.decpoint, ".") -- undo change (for error message)
self:lexerror(ls, "malformed number", "TK_NUMBER")
end
end
------------------------------------------------------------------------
-- main number conversion function
-- * "^%w$" needed in the scan in order to detect "EOZ"
------------------------------------------------------------------------
function luaX:read_numeral(ls, Token)
-- lua_assert(string.find(ls.current, "%d"))
repeat
self:save_and_next(ls)
until string.find(ls.current, "%D") and ls.current ~= "."
if self:check_next(ls, "Ee") then -- 'E'?
self:check_next(ls, "+-") -- optional exponent sign
end
while string.find(ls.current, "^%w$") or ls.current == "_" do
self:save_and_next(ls)
end
self:buffreplace(ls, ".", ls.decpoint) -- follow locale for decimal point
local seminfo = self:str2d(ls.buff)
Token.seminfo = seminfo
if not seminfo then -- format error?
self:trydecpoint(ls, Token) -- try to update decimal point separator
end
end
------------------------------------------------------------------------
-- count separators ("=") in a long string delimiter
-- * used by luaX:read_long_string
------------------------------------------------------------------------
function luaX:skip_sep(ls)
local count = 0
local s = ls.current
-- lua_assert(s == "[" or s == "]")
self:save_and_next(ls)
while ls.current == "=" do
self:save_and_next(ls)
count = count + 1
end
return (ls.current == s) and count or (-count) - 1
end
------------------------------------------------------------------------
-- reads a long string or long comment
------------------------------------------------------------------------
function luaX:read_long_string(ls, Token, sep)
local cont = 0
self:save_and_next(ls) -- skip 2nd '['
if self:currIsNewline(ls) then -- string starts with a newline?
self:inclinenumber(ls) -- skip it
end
while true do
local c = ls.current
if c == "EOZ" then
self:lexerror(ls, Token and "unfinished long string" or
"unfinished long comment", "TK_EOS")
elseif c == "[" then
--# compatibility code start
if self.LUA_COMPAT_LSTR then
if self:skip_sep(ls) == sep then
self:save_and_next(ls) -- skip 2nd '['
cont = cont + 1
--# compatibility code start
if self.LUA_COMPAT_LSTR == 1 then
if sep == 0 then
self:lexerror(ls, "nesting of [[...]] is deprecated", "[")
end
end
--# compatibility code end
end
end
--# compatibility code end
elseif c == "]" then
if self:skip_sep(ls) == sep then
self:save_and_next(ls) -- skip 2nd ']'
--# compatibility code start
if self.LUA_COMPAT_LSTR and self.LUA_COMPAT_LSTR == 2 then
cont = cont - 1
if sep == 0 and cont >= 0 then break end
end
--# compatibility code end
break
end
elseif self:currIsNewline(ls) then
self:save(ls, "\n")
self:inclinenumber(ls)
if not Token then ls.buff = "" end -- avoid wasting space
else -- default
if Token then
self:save_and_next(ls)
else
self:nextc(ls)
end
end--if c
end--while
if Token then
local p = 3 + sep
Token.seminfo = string.sub(ls.buff, p, -p)
end
end
------------------------------------------------------------------------
-- reads a string
-- * has been restructured significantly compared to the original C code
------------------------------------------------------------------------
function luaX:read_string(ls, del, Token)
self:save_and_next(ls)
while ls.current ~= del do
local c = ls.current
if c == "EOZ" then
self:lexerror(ls, "unfinished string", "TK_EOS")
elseif self:currIsNewline(ls) then
self:lexerror(ls, "unfinished string", "TK_STRING")
elseif c == "\\" then
c = self:nextc(ls) -- do not save the '\'
if self:currIsNewline(ls) then -- go through
self:save(ls, "\n")
self:inclinenumber(ls)
elseif c ~= "EOZ" then -- will raise an error next loop
-- escapes handling greatly simplified here:
local i = string.find("abfnrtv", c, 1, 1)
if i then
self:save(ls, string.sub("\a\b\f\n\r\t\v", i, i))
self:nextc(ls)
elseif not string.find(c, "%d") then
self:save_and_next(ls) -- handles \\, \", \', and \?
else -- \xxx
c, i = 0, 0
repeat
c = 10 * c + ls.current
self:nextc(ls)
i = i + 1
until i >= 3 or not string.find(ls.current, "%d")
if c > 255 then -- UCHAR_MAX
self:lexerror(ls, "escape sequence too large", "TK_STRING")
end
self:save(ls, string.char(c))
end
end
else
self:save_and_next(ls)
end--if c
end--while
self:save_and_next(ls) -- skip delimiter
Token.seminfo = string.sub(ls.buff, 2, -2)
end
------------------------------------------------------------------------
-- main lexer function
------------------------------------------------------------------------
function luaX:llex(ls, Token)
ls.buff = ""
while true do
local c = ls.current
----------------------------------------------------------------
if self:currIsNewline(ls) then
self:inclinenumber(ls)
----------------------------------------------------------------
elseif c == "-" then
c = self:nextc(ls)
if c ~= "-" then return "-" end
-- else is a comment
local sep = -1
if self:nextc(ls) == '[' then
sep = self:skip_sep(ls)
ls.buff = "" -- 'skip_sep' may dirty the buffer
end
if sep >= 0 then
self:read_long_string(ls, nil, sep) -- long comment
ls.buff = ""
else -- else short comment
while not self:currIsNewline(ls) and ls.current ~= "EOZ" do
self:nextc(ls)
end
end
----------------------------------------------------------------
elseif c == "[" then
local sep = self:skip_sep(ls)
if sep >= 0 then
self:read_long_string(ls, Token, sep)
return "TK_STRING"
elseif sep == -1 then
return "["
else
self:lexerror(ls, "invalid long string delimiter", "TK_STRING")
end
----------------------------------------------------------------
elseif c == "=" then
c = self:nextc(ls)
if c ~= "=" then return "="
else self:nextc(ls); return "TK_EQ" end
----------------------------------------------------------------
elseif c == "<" then
c = self:nextc(ls)
if c ~= "=" then return "<"
else self:nextc(ls); return "TK_LE" end
----------------------------------------------------------------
elseif c == ">" then
c = self:nextc(ls)
if c ~= "=" then return ">"
else self:nextc(ls); return "TK_GE" end
----------------------------------------------------------------
elseif c == "~" then
c = self:nextc(ls)
if c ~= "=" then return "~"
else self:nextc(ls); return "TK_NE" end
----------------------------------------------------------------
elseif c == "\"" or c == "'" then
self:read_string(ls, c, Token)
return "TK_STRING"
----------------------------------------------------------------
elseif c == "." then
c = self:save_and_next(ls)
if self:check_next(ls, ".") then
if self:check_next(ls, ".") then
return "TK_DOTS" -- ...
else return "TK_CONCAT" -- ..
end
elseif not string.find(c, "%d") then
return "."
else
self:read_numeral(ls, Token)
return "TK_NUMBER"
end
----------------------------------------------------------------
elseif c == "EOZ" then
return "TK_EOS"
----------------------------------------------------------------
else -- default
if string.find(c, "%s") then
-- lua_assert(self:currIsNewline(ls))
self:nextc(ls)
elseif string.find(c, "%d") then
self:read_numeral(ls, Token)
return "TK_NUMBER"
elseif string.find(c, "[_%a]") then
-- identifier or reserved word
repeat
c = self:save_and_next(ls)
until c == "EOZ" or not string.find(c, "[_%w]")
local ts = ls.buff
local tok = self.enums[ts]
if tok then return tok end -- reserved word?
Token.seminfo = ts
return "TK_NAME"
else
self:nextc(ls)
return c -- single-char tokens (+ - / ...)
end
----------------------------------------------------------------
end--if c
end--while
end
|