aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/linden/indra/llui/llkeywords.cpp
diff options
context:
space:
mode:
authorJacek Antonelli2008-08-15 23:44:46 -0500
committerJacek Antonelli2008-08-15 23:44:46 -0500
commit38d6d37f2d982fa959e9e8a4a3f7e1ccfad7b5d4 (patch)
treeadca584755d22ca041a2dbfc35d4eca01f70b32c /linden/indra/llui/llkeywords.cpp
parentREADME.txt (diff)
downloadmeta-impy-38d6d37f2d982fa959e9e8a4a3f7e1ccfad7b5d4.zip
meta-impy-38d6d37f2d982fa959e9e8a4a3f7e1ccfad7b5d4.tar.gz
meta-impy-38d6d37f2d982fa959e9e8a4a3f7e1ccfad7b5d4.tar.bz2
meta-impy-38d6d37f2d982fa959e9e8a4a3f7e1ccfad7b5d4.tar.xz
Second Life viewer sources 1.13.2.12
Diffstat (limited to 'linden/indra/llui/llkeywords.cpp')
-rw-r--r--linden/indra/llui/llkeywords.cpp521
1 files changed, 521 insertions, 0 deletions
diff --git a/linden/indra/llui/llkeywords.cpp b/linden/indra/llui/llkeywords.cpp
new file mode 100644
index 0000000..fd224d8
--- /dev/null
+++ b/linden/indra/llui/llkeywords.cpp
@@ -0,0 +1,521 @@
1/**
2 * @file llkeywords.cpp
3 * @brief Keyword list for LSL
4 *
5 * Copyright (c) 2000-2007, Linden Research, Inc.
6 *
7 * The source code in this file ("Source Code") is provided by Linden Lab
8 * to you under the terms of the GNU General Public License, version 2.0
9 * ("GPL"), unless you have obtained a separate licensing agreement
10 * ("Other License"), formally executed by you and Linden Lab. Terms of
11 * the GPL can be found in doc/GPL-license.txt in this distribution, or
12 * online at http://secondlife.com/developers/opensource/gplv2
13 *
14 * There are special exceptions to the terms and conditions of the GPL as
15 * it is applied to this Source Code. View the full text of the exception
16 * in the file doc/FLOSS-exception.txt in this software distribution, or
17 * online at http://secondlife.com/developers/opensource/flossexception
18 *
19 * By copying, modifying or distributing this software, you acknowledge
20 * that you have read and understood your obligations described above,
21 * and agree to abide by those obligations.
22 *
23 * ALL LINDEN LAB SOURCE CODE IS PROVIDED "AS IS." LINDEN LAB MAKES NO
24 * WARRANTIES, EXPRESS, IMPLIED OR OTHERWISE, REGARDING ITS ACCURACY,
25 * COMPLETENESS OR PERFORMANCE.
26 */
27
28#include "linden_common.h"
29
30#include <iostream>
31#include <fstream>
32
33#include "llkeywords.h"
34#include "lltexteditor.h"
35#include "llstl.h"
36#include <boost/tokenizer.hpp>
37
38const U32 KEYWORD_FILE_CURRENT_VERSION = 2;
39
40inline BOOL LLKeywordToken::isHead(const llwchar* s)
41{
42 // strncmp is much faster than string compare
43 BOOL res = TRUE;
44 const llwchar* t = mToken.c_str();
45 S32 len = mToken.size();
46 for (S32 i=0; i<len; i++)
47 {
48 if (s[i] != t[i])
49 {
50 res = FALSE;
51 break;
52 }
53 }
54 return res;
55}
56
57LLKeywords::LLKeywords() : mLoaded(FALSE)
58{
59}
60
61LLKeywords::~LLKeywords()
62{
63 std::for_each(mWordTokenMap.begin(), mWordTokenMap.end(), DeletePairedPointer());
64 std::for_each(mLineTokenList.begin(), mLineTokenList.end(), DeletePointer());
65 std::for_each(mDelimiterTokenList.begin(), mDelimiterTokenList.end(), DeletePointer());
66}
67
68BOOL LLKeywords::loadFromFile( const LLString& filename )
69{
70 mLoaded = FALSE;
71
72 ////////////////////////////////////////////////////////////
73 // File header
74
75 const S32 BUFFER_SIZE = 1024;
76 char buffer[BUFFER_SIZE];
77
78 llifstream file;
79 file.open(filename.c_str());
80 if( file.fail() )
81 {
82 llinfos << "LLKeywords::loadFromFile() Unable to open file: " << filename << llendl;
83 return mLoaded;
84 }
85
86 // Identifying string
87 file >> buffer;
88 if( strcmp( buffer, "llkeywords" ) )
89 {
90 llinfos << filename << " does not appear to be a keyword file" << llendl;
91 return mLoaded;
92 }
93
94 // Check file version
95 file >> buffer;
96 U32 version_num;
97 file >> version_num;
98 if( strcmp(buffer, "version") || version_num != (U32)KEYWORD_FILE_CURRENT_VERSION )
99 {
100 llinfos << filename << " does not appear to be a version " << KEYWORD_FILE_CURRENT_VERSION << " keyword file" << llendl;
101 return mLoaded;
102 }
103
104 // start of line (SOL)
105 const char SOL_COMMENT[] = "#";
106 const char SOL_WORD[] = "[word ";
107 const char SOL_LINE[] = "[line ";
108 const char SOL_ONE_SIDED_DELIMITER[] = "[one_sided_delimiter ";
109 const char SOL_TWO_SIDED_DELIMITER[] = "[two_sided_delimiter ";
110
111 LLColor3 cur_color( 1, 0, 0 );
112 LLKeywordToken::TOKEN_TYPE cur_type = LLKeywordToken::WORD;
113
114 while (!file.eof())
115 {
116 file.getline( buffer, BUFFER_SIZE );
117 if( !strncmp( buffer, SOL_COMMENT, strlen(SOL_COMMENT) ) )
118 {
119 continue;
120 }
121 else
122 if( !strncmp( buffer, SOL_WORD, strlen(SOL_WORD) ) )
123 {
124 cur_color = readColor( buffer + strlen(SOL_WORD) );
125 cur_type = LLKeywordToken::WORD;
126 continue;
127 }
128 else
129 if( !strncmp( buffer, SOL_LINE, strlen(SOL_LINE) ) )
130 {
131 cur_color = readColor( buffer + strlen(SOL_LINE) );
132 cur_type = LLKeywordToken::LINE;
133 continue;
134 }
135 else
136 if( !strncmp( buffer, SOL_TWO_SIDED_DELIMITER, strlen(SOL_TWO_SIDED_DELIMITER) ) )
137 {
138 cur_color = readColor( buffer + strlen(SOL_TWO_SIDED_DELIMITER) );
139 cur_type = LLKeywordToken::TWO_SIDED_DELIMITER;
140 continue;
141 }
142 if( !strncmp( buffer, SOL_ONE_SIDED_DELIMITER, strlen(SOL_ONE_SIDED_DELIMITER) ) )
143 {
144 cur_color = readColor( buffer + strlen(SOL_ONE_SIDED_DELIMITER) );
145 cur_type = LLKeywordToken::ONE_SIDED_DELIMITER;
146 continue;
147 }
148
149 LLString token_buffer( buffer );
150 LLString::trim(token_buffer);
151
152 typedef boost::tokenizer<boost::char_separator<char> > tokenizer;
153 boost::char_separator<char> sep_word("", " \t");
154 tokenizer word_tokens(token_buffer, sep_word);
155 tokenizer::iterator token_word_iter = word_tokens.begin();
156
157 if( !token_buffer.empty() && token_word_iter != word_tokens.end() )
158 {
159 // first word is keyword
160 LLString keyword = (*token_word_iter);
161 LLString::trim(keyword);
162
163 // following words are tooltip
164 LLString tool_tip;
165 while (++token_word_iter != word_tokens.end())
166 {
167 tool_tip += (*token_word_iter);
168 }
169 LLString::trim(tool_tip);
170
171 if( !tool_tip.empty() )
172 {
173 // Replace : with \n for multi-line tool tips.
174 LLString::replaceChar( tool_tip, ':', '\n' );
175 addToken(cur_type, keyword, cur_color, tool_tip );
176 }
177 else
178 {
179 addToken(cur_type, keyword, cur_color, NULL );
180 }
181 }
182 }
183
184 file.close();
185
186 mLoaded = TRUE;
187 return mLoaded;
188}
189
190// Add the token as described
191void LLKeywords::addToken(LLKeywordToken::TOKEN_TYPE type,
192 const LLString& key_in,
193 const LLColor3& color,
194 const LLString& tool_tip_in )
195{
196 LLWString key = utf8str_to_wstring(key_in);
197 LLWString tool_tip = utf8str_to_wstring(tool_tip_in);
198 switch(type)
199 {
200 case LLKeywordToken::WORD:
201 mWordTokenMap[key] = new LLKeywordToken(type, color, key, tool_tip);
202 break;
203
204 case LLKeywordToken::LINE:
205 mLineTokenList.push_front(new LLKeywordToken(type, color, key, tool_tip));
206 break;
207
208 case LLKeywordToken::TWO_SIDED_DELIMITER:
209 case LLKeywordToken::ONE_SIDED_DELIMITER:
210 mDelimiterTokenList.push_front(new LLKeywordToken(type, color, key, tool_tip));
211 break;
212
213 default:
214 llassert(0);
215 }
216}
217
218LLColor3 LLKeywords::readColor( const LLString& s )
219{
220 F32 r, g, b;
221 r = g = b = 0.0f;
222 S32 read = sscanf(s.c_str(), "%f, %f, %f]", &r, &g, &b );
223 if( read != 3 )
224 {
225 llinfos << " poorly formed color in keyword file" << llendl;
226 }
227 return LLColor3( r, g, b );
228}
229
230// Walk through a string, applying the rules specified by the keyword token list and
231// create a list of color segments.
232void LLKeywords::findSegments(std::vector<LLTextSegment *>* seg_list, const LLWString& wtext)
233{
234 std::for_each(seg_list->begin(), seg_list->end(), DeletePointer());
235 seg_list->clear();
236
237 if( wtext.empty() )
238 {
239 return;
240 }
241
242 S32 text_len = wtext.size();
243
244 seg_list->push_back( new LLTextSegment( LLColor3(0,0,0), 0, text_len ) );
245
246 const llwchar* base = wtext.c_str();
247 const llwchar* cur = base;
248 const llwchar* line = NULL;
249
250 while( *cur )
251 {
252 if( *cur == '\n' || cur == base )
253 {
254 if( *cur == '\n' )
255 {
256 cur++;
257 if( !*cur || *cur == '\n' )
258 {
259 continue;
260 }
261 }
262
263 // Start of a new line
264 line = cur;
265
266 // Skip white space
267 while( *cur && isspace(*cur) && (*cur != '\n') )
268 {
269 cur++;
270 }
271 if( !*cur || *cur == '\n' )
272 {
273 continue;
274 }
275
276 // cur is now at the first non-whitespace character of a new line
277
278 // Line start tokens
279 {
280 BOOL line_done = FALSE;
281 for (token_list_t::iterator iter = mLineTokenList.begin();
282 iter != mLineTokenList.end(); ++iter)
283 {
284 LLKeywordToken* cur_token = *iter;
285 if( cur_token->isHead( cur ) )
286 {
287 S32 seg_start = cur - base;
288 while( *cur && *cur != '\n' )
289 {
290 // skip the rest of the line
291 cur++;
292 }
293 S32 seg_end = cur - base;
294
295 //llinfos << "Seg: [" << (char*)LLString( base, seg_start, seg_end-seg_start) << "]" << llendl;
296 LLTextSegment* text_segment = new LLTextSegment( cur_token->getColor(), seg_start, seg_end );
297 text_segment->setToken( cur_token );
298 insertSegment( seg_list, text_segment, text_len);
299 line_done = TRUE; // to break out of second loop.
300 break;
301 }
302 }
303
304 if( line_done )
305 {
306 continue;
307 }
308 }
309 }
310
311 // Skip white space
312 while( *cur && isspace(*cur) && (*cur != '\n') )
313 {
314 cur++;
315 }
316
317 while( *cur && *cur != '\n' )
318 {
319 // Check against delimiters
320 {
321 S32 seg_start = 0;
322 LLKeywordToken* cur_delimiter = NULL;
323 for (token_list_t::iterator iter = mDelimiterTokenList.begin();
324 iter != mDelimiterTokenList.end(); ++iter)
325 {
326 LLKeywordToken* delimiter = *iter;
327 if( delimiter->isHead( cur ) )
328 {
329 cur_delimiter = delimiter;
330 break;
331 }
332 }
333
334 if( cur_delimiter )
335 {
336 S32 between_delimiters = 0;
337 S32 seg_end = 0;
338
339 seg_start = cur - base;
340 cur += cur_delimiter->getLength();
341
342 if( cur_delimiter->getType() == LLKeywordToken::TWO_SIDED_DELIMITER )
343 {
344 while( *cur && !cur_delimiter->isHead(cur))
345 {
346 // Check for an escape sequence.
347 if (*cur == '\\')
348 {
349 // Count the number of backslashes.
350 S32 num_backslashes = 0;
351 while (*cur == '\\')
352 {
353 num_backslashes++;
354 between_delimiters++;
355 cur++;
356 }
357 // Is the next character the end delimiter?
358 if (cur_delimiter->isHead(cur))
359 {
360 // Is there was an odd number of backslashes, then this delimiter
361 // does not end the sequence.
362 if (num_backslashes % 2 == 1)
363 {
364 between_delimiters++;
365 cur++;
366 }
367 else
368 {
369 // This is an end delimiter.
370 break;
371 }
372 }
373 }
374 else
375 {
376 between_delimiters++;
377 cur++;
378 }
379 }
380
381 if( *cur )
382 {
383 cur += cur_delimiter->getLength();
384 seg_end = seg_start + between_delimiters + 2 * cur_delimiter->getLength();
385 }
386 else
387 {
388 // eof
389 seg_end = seg_start + between_delimiters + cur_delimiter->getLength();
390 }
391 }
392 else
393 {
394 llassert( cur_delimiter->getType() == LLKeywordToken::ONE_SIDED_DELIMITER );
395 // Left side is the delimiter. Right side is eol or eof.
396 while( *cur && ('\n' != *cur) )
397 {
398 between_delimiters++;
399 cur++;
400 }
401 seg_end = seg_start + between_delimiters + cur_delimiter->getLength();
402 }
403
404
405 //llinfos << "Seg: [" << (char*)LLString( base, seg_start, seg_end-seg_start ) << "]" << llendl;
406 LLTextSegment* text_segment = new LLTextSegment( cur_delimiter->getColor(), seg_start, seg_end );
407 text_segment->setToken( cur_delimiter );
408 insertSegment( seg_list, text_segment, text_len);
409
410 // Note: we don't increment cur, since the end of one delimited seg may be immediately
411 // followed by the start of another one.
412 continue;
413 }
414 }
415
416 // check against words
417 llwchar prev = cur > base ? *(cur-1) : 0;
418 if( !isalnum( prev ) && (prev != '_') )
419 {
420 const llwchar* p = cur;
421 while( isalnum( *p ) || (*p == '_') )
422 {
423 p++;
424 }
425 S32 seg_len = p - cur;
426 if( seg_len > 0 )
427 {
428 LLWString word( cur, 0, seg_len );
429 word_token_map_t::iterator map_iter = mWordTokenMap.find(word);
430 if( map_iter != mWordTokenMap.end() )
431 {
432 LLKeywordToken* cur_token = map_iter->second;
433 S32 seg_start = cur - base;
434 S32 seg_end = seg_start + seg_len;
435
436 // llinfos << "Seg: [" << word.c_str() << "]" << llendl;
437
438
439 LLTextSegment* text_segment = new LLTextSegment( cur_token->getColor(), seg_start, seg_end );
440 text_segment->setToken( cur_token );
441 insertSegment( seg_list, text_segment, text_len);
442 }
443 cur += seg_len;
444 continue;
445 }
446 }
447
448 if( *cur && *cur != '\n' )
449 {
450 cur++;
451 }
452 }
453 }
454}
455
456void LLKeywords::insertSegment(std::vector<LLTextSegment*>* seg_list, LLTextSegment* new_segment, S32 text_len )
457{
458 LLTextSegment* last = seg_list->back();
459 S32 new_seg_end = new_segment->getEnd();
460
461 if( new_segment->getStart() == last->getStart() )
462 {
463 *last = *new_segment;
464 delete new_segment;
465 }
466 else
467 {
468 last->setEnd( new_segment->getStart() );
469 seg_list->push_back( new_segment );
470 }
471
472 if( new_seg_end < text_len )
473 {
474 seg_list->push_back( new LLTextSegment( LLColor3(0,0,0), new_seg_end, text_len ) );
475 }
476}
477
478#ifdef _DEBUG
479void LLKeywords::dump()
480{
481 llinfos << "LLKeywords" << llendl;
482
483
484 llinfos << "LLKeywords::sWordTokenMap" << llendl;
485 word_token_map_t::iterator word_token_iter = mWordTokenMap.begin();
486 while( word_token_iter != mWordTokenMap.end() )
487 {
488 LLKeywordToken* word_token = word_token_iter->second;
489 word_token->dump();
490 ++word_token_iter;
491 }
492
493 llinfos << "LLKeywords::sLineTokenList" << llendl;
494 for (token_list_t::iterator iter = mLineTokenList.begin();
495 iter != mLineTokenList.end(); ++iter)
496 {
497 LLKeywordToken* line_token = *iter;
498 line_token->dump();
499 }
500
501
502 llinfos << "LLKeywords::sDelimiterTokenList" << llendl;
503 for (token_list_t::iterator iter = mDelimiterTokenList.begin();
504 iter != mDelimiterTokenList.end(); ++iter)
505 {
506 LLKeywordToken* delimiter_token = *iter;
507 delimiter_token->dump();
508 }
509}
510
511void LLKeywordToken::dump()
512{
513 llinfos << "[" <<
514 mColor.mV[VX] << ", " <<
515 mColor.mV[VY] << ", " <<
516 mColor.mV[VZ] << "] [" <<
517 mToken.c_str() << "]" <<
518 llendl;
519}
520
521#endif // DEBUG