aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/LuaSL/testLua/yueliang-0.4.1/orig-5.1.3/llex.lua
diff options
context:
space:
mode:
Diffstat (limited to 'LuaSL/testLua/yueliang-0.4.1/orig-5.1.3/llex.lua')
-rw-r--r--LuaSL/testLua/yueliang-0.4.1/orig-5.1.3/llex.lua686
1 files changed, 0 insertions, 686 deletions
diff --git a/LuaSL/testLua/yueliang-0.4.1/orig-5.1.3/llex.lua b/LuaSL/testLua/yueliang-0.4.1/orig-5.1.3/llex.lua
deleted file mode 100644
index 7949326..0000000
--- a/LuaSL/testLua/yueliang-0.4.1/orig-5.1.3/llex.lua
+++ /dev/null
@@ -1,686 +0,0 @@
1--[[--------------------------------------------------------------------
2
3 llex.lua
4 Lua lexical analyzer in Lua
5 This file is part of Yueliang.
6
7 Copyright (c) 2005-2006 Kein-Hong Man <khman@users.sf.net>
8 The COPYRIGHT file describes the conditions
9 under which this software may be distributed.
10
11 See the ChangeLog for more information.
12
13----------------------------------------------------------------------]]
14
15--[[--------------------------------------------------------------------
16-- Notes:
17-- * intended to 'imitate' llex.c code; performance is not a concern
18-- * tokens are strings; code structure largely retained
19-- * deleted stuff (compared to llex.c) are noted, comments retained
20-- * nextc() returns the currently read character to simplify coding
21-- here; next() in llex.c does not return anything
22-- * compatibility code is marked with "--#" comments
23--
24-- Added:
25-- * luaX:chunkid (function luaO_chunkid from lobject.c)
26-- * luaX:str2d (function luaO_str2d from lobject.c)
27-- * luaX.LUA_QS used in luaX:lexerror (from luaconf.h)
28-- * luaX.LUA_COMPAT_LSTR in luaX:read_long_string (from luaconf.h)
29-- * luaX.MAX_INT used in luaX:inclinenumber (from llimits.h)
30--
31-- To use the lexer:
32-- (1) luaX:init() to initialize the lexer
33-- (2) luaX:setinput() to set the input stream to lex
34-- (3) call luaX:next() or luaX:luaX:lookahead() to get tokens,
35-- until "TK_EOS": luaX:next()
36-- * since EOZ is returned as a string, be careful when regexp testing
37--
38-- Not implemented:
39-- * luaX_newstring: not required by this Lua implementation
40-- * buffer MAX_SIZET size limit (from llimits.h) test not implemented
41-- in the interest of performance
42-- * locale-aware number handling is largely redundant as Lua's
43-- tonumber() function is already capable of this
44--
45-- Changed in 5.1.x:
46-- * TK_NAME token order moved down
47-- * string representation for TK_NAME, TK_NUMBER, TK_STRING changed
48-- * token struct renamed to lower case (LS -> ls)
49-- * LexState struct: removed nestlevel, added decpoint
50-- * error message functions have been greatly simplified
51-- * token2string renamed to luaX_tokens, exposed in llex.h
52-- * lexer now handles all kinds of newlines, including CRLF
53-- * shbang first line handling removed from luaX:setinput;
54-- it is now done in lauxlib.c (luaL_loadfile)
55-- * next(ls) macro renamed to nextc(ls) due to new luaX_next function
56-- * EXTRABUFF and MAXNOCHECK removed due to lexer changes
57-- * checkbuffer(ls, len) macro deleted
58-- * luaX:read_numeral now has 3 support functions: luaX:trydecpoint,
59-- luaX:buffreplace and (luaO_str2d from lobject.c) luaX:str2d
60-- * luaX:read_numeral is now more promiscuous in slurping characters;
61-- hexadecimal numbers was added, locale-aware decimal points too
62-- * luaX:skip_sep is new; used by luaX:read_long_string
63-- * luaX:read_long_string handles new-style long blocks, with some
64-- optional compatibility code
65-- * luaX:llex: parts changed to support new-style long blocks
66-- * luaX:llex: readname functionality has been folded in
67-- * luaX:llex: removed test for control characters
68----------------------------------------------------------------------]]
69
70luaX = {}
71
72-- FIRST_RESERVED is not required as tokens are manipulated as strings
73-- TOKEN_LEN deleted; maximum length of a reserved word not needed
74
75------------------------------------------------------------------------
76-- "ORDER RESERVED" deleted; enumeration in one place: luaX.RESERVED
77------------------------------------------------------------------------
78
79-- terminal symbols denoted by reserved words: TK_AND to TK_WHILE
80-- other terminal symbols: TK_NAME to TK_EOS
81luaX.RESERVED = [[
82TK_AND and
83TK_BREAK break
84TK_DO do
85TK_ELSE else
86TK_ELSEIF elseif
87TK_END end
88TK_FALSE false
89TK_FOR for
90TK_FUNCTION function
91TK_IF if
92TK_IN in
93TK_LOCAL local
94TK_NIL nil
95TK_NOT not
96TK_OR or
97TK_REPEAT repeat
98TK_RETURN return
99TK_THEN then
100TK_TRUE true
101TK_UNTIL until
102TK_WHILE while
103TK_CONCAT ..
104TK_DOTS ...
105TK_EQ ==
106TK_GE >=
107TK_LE <=
108TK_NE ~=
109TK_NAME <name>
110TK_NUMBER <number>
111TK_STRING <string>
112TK_EOS <eof>]]
113
114-- NUM_RESERVED is not required; number of reserved words
115
116--[[--------------------------------------------------------------------
117-- Instead of passing seminfo, the Token struct (e.g. ls.t) is passed
118-- so that lexer functions can use its table element, ls.t.seminfo
119--
120-- SemInfo (struct no longer needed, a mixed-type value is used)
121--
122-- Token (struct of ls.t and ls.lookahead):
123-- token -- token symbol
124-- seminfo -- semantics information
125--
126-- LexState (struct of ls; ls is initialized by luaX:setinput):
127-- current -- current character (charint)
128-- linenumber -- input line counter
129-- lastline -- line of last token 'consumed'
130-- t -- current token (table: struct Token)
131-- lookahead -- look ahead token (table: struct Token)
132-- fs -- 'FuncState' is private to the parser
133-- L -- LuaState
134-- z -- input stream
135-- buff -- buffer for tokens
136-- source -- current source name
137-- decpoint -- locale decimal point
138-- nestlevel -- level of nested non-terminals
139----------------------------------------------------------------------]]
140
141-- luaX.tokens (was luaX_tokens) is now a hash; see luaX:init
142
143luaX.MAXSRC = 80
144luaX.MAX_INT = 2147483645 -- constants from elsewhere (see above)
145luaX.LUA_QS = "'%s'"
146luaX.LUA_COMPAT_LSTR = 1
147--luaX.MAX_SIZET = 4294967293
148
149------------------------------------------------------------------------
150-- initialize lexer
151-- * original luaX_init has code to create and register token strings
152-- * luaX.tokens: TK_* -> token
153-- * luaX.enums: token -> TK_* (used in luaX:llex)
154------------------------------------------------------------------------
155function luaX:init()
156 local tokens, enums = {}, {}
157 for v in string.gmatch(self.RESERVED, "[^\n]+") do
158 local _, _, tok, str = string.find(v, "(%S+)%s+(%S+)")
159 tokens[tok] = str
160 enums[str] = tok
161 end
162 self.tokens = tokens
163 self.enums = enums
164end
165
166------------------------------------------------------------------------
167-- returns a suitably-formatted chunk name or id
168-- * from lobject.c, used in llex.c and ldebug.c
169-- * the result, out, is returned (was first argument)
170------------------------------------------------------------------------
171function luaX:chunkid(source, bufflen)
172 local out
173 local first = string.sub(source, 1, 1)
174 if first == "=" then
175 out = string.sub(source, 2, bufflen) -- remove first char
176 else -- out = "source", or "...source"
177 if first == "@" then
178 source = string.sub(source, 2) -- skip the '@'
179 bufflen = bufflen - #" '...' "
180 local l = #source
181 out = ""
182 if l > bufflen then
183 source = string.sub(source, 1 + l - bufflen) -- get last part of file name
184 out = out.."..."
185 end
186 out = out..source
187 else -- out = [string "string"]
188 local len = string.find(source, "[\n\r]") -- stop at first newline
189 len = len and (len - 1) or #source
190 bufflen = bufflen - #(" [string \"...\"] ")
191 if len > bufflen then len = bufflen end
192 out = "[string \""
193 if len < #source then -- must truncate?
194 out = out..string.sub(source, 1, len).."..."
195 else
196 out = out..source
197 end
198 out = out.."\"]"
199 end
200 end
201 return out
202end
203
204--[[--------------------------------------------------------------------
205-- Support functions for lexer
206-- * all lexer errors eventually reaches lexerror:
207 syntaxerror -> lexerror
208----------------------------------------------------------------------]]
209
210------------------------------------------------------------------------
211-- look up token and return keyword if found (also called by parser)
212------------------------------------------------------------------------
213function luaX:token2str(ls, token)
214 if string.sub(token, 1, 3) ~= "TK_" then
215 if string.find(token, "%c") then
216 return string.format("char(%d)", string.byte(token))
217 end
218 return token
219 else
220 return self.tokens[token]
221 end
222end
223
224------------------------------------------------------------------------
225-- throws a lexer error
226-- * txtToken has been made local to luaX:lexerror
227-- * can't communicate LUA_ERRSYNTAX, so it is unimplemented
228------------------------------------------------------------------------
229function luaX:lexerror(ls, msg, token)
230 local function txtToken(ls, token)
231 if token == "TK_NAME" or
232 token == "TK_STRING" or
233 token == "TK_NUMBER" then
234 return ls.buff
235 else
236 return self:token2str(ls, token)
237 end
238 end
239 local buff = self:chunkid(ls.source, self.MAXSRC)
240 local msg = string.format("%s:%d: %s", buff, ls.linenumber, msg)
241 if token then
242 msg = string.format("%s near "..self.LUA_QS, msg, txtToken(ls, token))
243 end
244 -- luaD_throw(ls->L, LUA_ERRSYNTAX)
245 error(msg)
246end
247
248------------------------------------------------------------------------
249-- throws a syntax error (mainly called by parser)
250-- * ls.t.token has to be set by the function calling luaX:llex
251-- (see luaX:next and luaX:lookahead elsewhere in this file)
252------------------------------------------------------------------------
253function luaX:syntaxerror(ls, msg)
254 self:lexerror(ls, msg, ls.t.token)
255end
256
257------------------------------------------------------------------------
258-- move on to next line
259------------------------------------------------------------------------
260function luaX:currIsNewline(ls)
261 return ls.current == "\n" or ls.current == "\r"
262end
263
264function luaX:inclinenumber(ls)
265 local old = ls.current
266 -- lua_assert(currIsNewline(ls))
267 self:nextc(ls) -- skip '\n' or '\r'
268 if self:currIsNewline(ls) and ls.current ~= old then
269 self:nextc(ls) -- skip '\n\r' or '\r\n'
270 end
271 ls.linenumber = ls.linenumber + 1
272 if ls.linenumber >= self.MAX_INT then
273 self:syntaxerror(ls, "chunk has too many lines")
274 end
275end
276
277------------------------------------------------------------------------
278-- initializes an input stream for lexing
279-- * if ls (the lexer state) is passed as a table, then it is filled in,
280-- otherwise it has to be retrieved as a return value
281-- * LUA_MINBUFFER not used; buffer handling not required any more
282------------------------------------------------------------------------
283function luaX:setinput(L, ls, z, source)
284 if not ls then ls = {} end -- create struct
285 if not ls.lookahead then ls.lookahead = {} end
286 if not ls.t then ls.t = {} end
287 ls.decpoint = "."
288 ls.L = L
289 ls.lookahead.token = "TK_EOS" -- no look-ahead token
290 ls.z = z
291 ls.fs = nil
292 ls.linenumber = 1
293 ls.lastline = 1
294 ls.source = source
295 self:nextc(ls) -- read first char
296end
297
298--[[--------------------------------------------------------------------
299-- LEXICAL ANALYZER
300----------------------------------------------------------------------]]
301
302------------------------------------------------------------------------
303-- checks if current character read is found in the set 'set'
304------------------------------------------------------------------------
305function luaX:check_next(ls, set)
306 if not string.find(set, ls.current, 1, 1) then
307 return false
308 end
309 self:save_and_next(ls)
310 return true
311end
312
313------------------------------------------------------------------------
314-- retrieve next token, checking the lookahead buffer if necessary
315-- * note that the macro next(ls) in llex.c is now luaX:nextc
316-- * utilized used in lparser.c (various places)
317------------------------------------------------------------------------
318function luaX:next(ls)
319 ls.lastline = ls.linenumber
320 if ls.lookahead.token ~= "TK_EOS" then -- is there a look-ahead token?
321 -- this must be copy-by-value
322 ls.t.seminfo = ls.lookahead.seminfo -- use this one
323 ls.t.token = ls.lookahead.token
324 ls.lookahead.token = "TK_EOS" -- and discharge it
325 else
326 ls.t.token = self:llex(ls, ls.t) -- read next token
327 end
328end
329
330------------------------------------------------------------------------
331-- fill in the lookahead buffer
332-- * utilized used in lparser.c:constructor
333------------------------------------------------------------------------
334function luaX:lookahead(ls)
335 -- lua_assert(ls.lookahead.token == "TK_EOS")
336 ls.lookahead.token = self:llex(ls, ls.lookahead)
337end
338
339------------------------------------------------------------------------
340-- gets the next character and returns it
341-- * this is the next() macro in llex.c; see notes at the beginning
342------------------------------------------------------------------------
343function luaX:nextc(ls)
344 local c = luaZ:zgetc(ls.z)
345 ls.current = c
346 return c
347end
348
349------------------------------------------------------------------------
350-- saves the given character into the token buffer
351-- * buffer handling code removed, not used in this implementation
352-- * test for maximum token buffer length not used, makes things faster
353------------------------------------------------------------------------
354
355function luaX:save(ls, c)
356 local buff = ls.buff
357 -- if you want to use this, please uncomment luaX.MAX_SIZET further up
358 --if #buff > self.MAX_SIZET then
359 -- self:lexerror(ls, "lexical element too long")
360 --end
361 ls.buff = buff..c
362end
363
364------------------------------------------------------------------------
365-- save current character into token buffer, grabs next character
366-- * like luaX:nextc, returns the character read for convenience
367------------------------------------------------------------------------
368function luaX:save_and_next(ls)
369 self:save(ls, ls.current)
370 return self:nextc(ls)
371end
372
373------------------------------------------------------------------------
374-- LUA_NUMBER
375-- * luaX:read_numeral is the main lexer function to read a number
376-- * luaX:str2d, luaX:buffreplace, luaX:trydecpoint are support functions
377------------------------------------------------------------------------
378
379------------------------------------------------------------------------
380-- string to number converter (was luaO_str2d from lobject.c)
381-- * returns the number, nil if fails (originally returns a boolean)
382-- * conversion function originally lua_str2number(s,p), a macro which
383-- maps to the strtod() function by default (from luaconf.h)
384------------------------------------------------------------------------
385function luaX:str2d(s)
386 local result = tonumber(s)
387 if result then return result end
388 -- conversion failed
389 if string.lower(string.sub(s, 1, 2)) == "0x" then -- maybe an hexadecimal constant?
390 result = tonumber(s, 16)
391 if result then return result end -- most common case
392 -- Was: invalid trailing characters?
393 -- In C, this function then skips over trailing spaces.
394 -- true is returned if nothing else is found except for spaces.
395 -- If there is still something else, then it returns a false.
396 -- All this is not necessary using Lua's tonumber.
397 end
398 return nil
399end
400
401------------------------------------------------------------------------
402-- single-character replacement, for locale-aware decimal points
403------------------------------------------------------------------------
404function luaX:buffreplace(ls, from, to)
405 local result, buff = "", ls.buff
406 for p = 1, #buff do
407 local c = string.sub(buff, p, p)
408 if c == from then c = to end
409 result = result..c
410 end
411 ls.buff = result
412end
413
414------------------------------------------------------------------------
415-- Attempt to convert a number by translating '.' decimal points to
416-- the decimal point character used by the current locale. This is not
417-- needed in Yueliang as Lua's tonumber() is already locale-aware.
418-- Instead, the code is here in case the user implements localeconv().
419------------------------------------------------------------------------
420function luaX:trydecpoint(ls, Token)
421 -- format error: try to update decimal point separator
422 local old = ls.decpoint
423 -- translate the following to Lua if you implement localeconv():
424 -- struct lconv *cv = localeconv();
425 -- ls->decpoint = (cv ? cv->decimal_point[0] : '.');
426 self:buffreplace(ls, old, ls.decpoint) -- try updated decimal separator
427 local seminfo = self:str2d(ls.buff)
428 Token.seminfo = seminfo
429 if not seminfo then
430 -- format error with correct decimal point: no more options
431 self:buffreplace(ls, ls.decpoint, ".") -- undo change (for error message)
432 self:lexerror(ls, "malformed number", "TK_NUMBER")
433 end
434end
435
436------------------------------------------------------------------------
437-- main number conversion function
438-- * "^%w$" needed in the scan in order to detect "EOZ"
439------------------------------------------------------------------------
440function luaX:read_numeral(ls, Token)
441 -- lua_assert(string.find(ls.current, "%d"))
442 repeat
443 self:save_and_next(ls)
444 until string.find(ls.current, "%D") and ls.current ~= "."
445 if self:check_next(ls, "Ee") then -- 'E'?
446 self:check_next(ls, "+-") -- optional exponent sign
447 end
448 while string.find(ls.current, "^%w$") or ls.current == "_" do
449 self:save_and_next(ls)
450 end
451 self:buffreplace(ls, ".", ls.decpoint) -- follow locale for decimal point
452 local seminfo = self:str2d(ls.buff)
453 Token.seminfo = seminfo
454 if not seminfo then -- format error?
455 self:trydecpoint(ls, Token) -- try to update decimal point separator
456 end
457end
458
459------------------------------------------------------------------------
460-- count separators ("=") in a long string delimiter
461-- * used by luaX:read_long_string
462------------------------------------------------------------------------
463function luaX:skip_sep(ls)
464 local count = 0
465 local s = ls.current
466 -- lua_assert(s == "[" or s == "]")
467 self:save_and_next(ls)
468 while ls.current == "=" do
469 self:save_and_next(ls)
470 count = count + 1
471 end
472 return (ls.current == s) and count or (-count) - 1
473end
474
475------------------------------------------------------------------------
476-- reads a long string or long comment
477------------------------------------------------------------------------
478function luaX:read_long_string(ls, Token, sep)
479 local cont = 0
480 self:save_and_next(ls) -- skip 2nd '['
481 if self:currIsNewline(ls) then -- string starts with a newline?
482 self:inclinenumber(ls) -- skip it
483 end
484 while true do
485 local c = ls.current
486 if c == "EOZ" then
487 self:lexerror(ls, Token and "unfinished long string" or
488 "unfinished long comment", "TK_EOS")
489 elseif c == "[" then
490 --# compatibility code start
491 if self.LUA_COMPAT_LSTR then
492 if self:skip_sep(ls) == sep then
493 self:save_and_next(ls) -- skip 2nd '['
494 cont = cont + 1
495 --# compatibility code start
496 if self.LUA_COMPAT_LSTR == 1 then
497 if sep == 0 then
498 self:lexerror(ls, "nesting of [[...]] is deprecated", "[")
499 end
500 end
501 --# compatibility code end
502 end
503 end
504 --# compatibility code end
505 elseif c == "]" then
506 if self:skip_sep(ls) == sep then
507 self:save_and_next(ls) -- skip 2nd ']'
508 --# compatibility code start
509 if self.LUA_COMPAT_LSTR and self.LUA_COMPAT_LSTR == 2 then
510 cont = cont - 1
511 if sep == 0 and cont >= 0 then break end
512 end
513 --# compatibility code end
514 break
515 end
516 elseif self:currIsNewline(ls) then
517 self:save(ls, "\n")
518 self:inclinenumber(ls)
519 if not Token then ls.buff = "" end -- avoid wasting space
520 else -- default
521 if Token then
522 self:save_and_next(ls)
523 else
524 self:nextc(ls)
525 end
526 end--if c
527 end--while
528 if Token then
529 local p = 3 + sep
530 Token.seminfo = string.sub(ls.buff, p, -p)
531 end
532end
533
534------------------------------------------------------------------------
535-- reads a string
536-- * has been restructured significantly compared to the original C code
537------------------------------------------------------------------------
538
539function luaX:read_string(ls, del, Token)
540 self:save_and_next(ls)
541 while ls.current ~= del do
542 local c = ls.current
543 if c == "EOZ" then
544 self:lexerror(ls, "unfinished string", "TK_EOS")
545 elseif self:currIsNewline(ls) then
546 self:lexerror(ls, "unfinished string", "TK_STRING")
547 elseif c == "\\" then
548 c = self:nextc(ls) -- do not save the '\'
549 if self:currIsNewline(ls) then -- go through
550 self:save(ls, "\n")
551 self:inclinenumber(ls)
552 elseif c ~= "EOZ" then -- will raise an error next loop
553 -- escapes handling greatly simplified here:
554 local i = string.find("abfnrtv", c, 1, 1)
555 if i then
556 self:save(ls, string.sub("\a\b\f\n\r\t\v", i, i))
557 self:nextc(ls)
558 elseif not string.find(c, "%d") then
559 self:save_and_next(ls) -- handles \\, \", \', and \?
560 else -- \xxx
561 c, i = 0, 0
562 repeat
563 c = 10 * c + ls.current
564 self:nextc(ls)
565 i = i + 1
566 until i >= 3 or not string.find(ls.current, "%d")
567 if c > 255 then -- UCHAR_MAX
568 self:lexerror(ls, "escape sequence too large", "TK_STRING")
569 end
570 self:save(ls, string.char(c))
571 end
572 end
573 else
574 self:save_and_next(ls)
575 end--if c
576 end--while
577 self:save_and_next(ls) -- skip delimiter
578 Token.seminfo = string.sub(ls.buff, 2, -2)
579end
580
581------------------------------------------------------------------------
582-- main lexer function
583------------------------------------------------------------------------
584function luaX:llex(ls, Token)
585 ls.buff = ""
586 while true do
587 local c = ls.current
588 ----------------------------------------------------------------
589 if self:currIsNewline(ls) then
590 self:inclinenumber(ls)
591 ----------------------------------------------------------------
592 elseif c == "-" then
593 c = self:nextc(ls)
594 if c ~= "-" then return "-" end
595 -- else is a comment
596 local sep = -1
597 if self:nextc(ls) == '[' then
598 sep = self:skip_sep(ls)
599 ls.buff = "" -- 'skip_sep' may dirty the buffer
600 end
601 if sep >= 0 then
602 self:read_long_string(ls, nil, sep) -- long comment
603 ls.buff = ""
604 else -- else short comment
605 while not self:currIsNewline(ls) and ls.current ~= "EOZ" do
606 self:nextc(ls)
607 end
608 end
609 ----------------------------------------------------------------
610 elseif c == "[" then
611 local sep = self:skip_sep(ls)
612 if sep >= 0 then
613 self:read_long_string(ls, Token, sep)
614 return "TK_STRING"
615 elseif sep == -1 then
616 return "["
617 else
618 self:lexerror(ls, "invalid long string delimiter", "TK_STRING")
619 end
620 ----------------------------------------------------------------
621 elseif c == "=" then
622 c = self:nextc(ls)
623 if c ~= "=" then return "="
624 else self:nextc(ls); return "TK_EQ" end
625 ----------------------------------------------------------------
626 elseif c == "<" then
627 c = self:nextc(ls)
628 if c ~= "=" then return "<"
629 else self:nextc(ls); return "TK_LE" end
630 ----------------------------------------------------------------
631 elseif c == ">" then
632 c = self:nextc(ls)
633 if c ~= "=" then return ">"
634 else self:nextc(ls); return "TK_GE" end
635 ----------------------------------------------------------------
636 elseif c == "~" then
637 c = self:nextc(ls)
638 if c ~= "=" then return "~"
639 else self:nextc(ls); return "TK_NE" end
640 ----------------------------------------------------------------
641 elseif c == "\"" or c == "'" then
642 self:read_string(ls, c, Token)
643 return "TK_STRING"
644 ----------------------------------------------------------------
645 elseif c == "." then
646 c = self:save_and_next(ls)
647 if self:check_next(ls, ".") then
648 if self:check_next(ls, ".") then
649 return "TK_DOTS" -- ...
650 else return "TK_CONCAT" -- ..
651 end
652 elseif not string.find(c, "%d") then
653 return "."
654 else
655 self:read_numeral(ls, Token)
656 return "TK_NUMBER"
657 end
658 ----------------------------------------------------------------
659 elseif c == "EOZ" then
660 return "TK_EOS"
661 ----------------------------------------------------------------
662 else -- default
663 if string.find(c, "%s") then
664 -- lua_assert(self:currIsNewline(ls))
665 self:nextc(ls)
666 elseif string.find(c, "%d") then
667 self:read_numeral(ls, Token)
668 return "TK_NUMBER"
669 elseif string.find(c, "[_%a]") then
670 -- identifier or reserved word
671 repeat
672 c = self:save_and_next(ls)
673 until c == "EOZ" or not string.find(c, "[_%w]")
674 local ts = ls.buff
675 local tok = self.enums[ts]
676 if tok then return tok end -- reserved word?
677 Token.seminfo = ts
678 return "TK_NAME"
679 else
680 self:nextc(ls)
681 return c -- single-char tokens (+ - / ...)
682 end
683 ----------------------------------------------------------------
684 end--if c
685 end--while
686end