llkeywords.cpp

Go to the documentation of this file.
00001 
00032 #include "linden_common.h"
00033 
00034 #include <iostream>
00035 #include <fstream>
00036 
00037 #include "llkeywords.h"
00038 #include "lltexteditor.h"
00039 #include "llstl.h"
00040 #include <boost/tokenizer.hpp>
00041 
00042 const U32 KEYWORD_FILE_CURRENT_VERSION = 2;
00043 
00044 inline BOOL LLKeywordToken::isHead(const llwchar* s) const
00045 {
00046         // strncmp is much faster than string compare
00047         BOOL res = TRUE;
00048         const llwchar* t = mToken.c_str();
00049         S32 len = mToken.size();
00050         for (S32 i=0; i<len; i++)
00051         {
00052                 if (s[i] != t[i])
00053                 {
00054                         res = FALSE;
00055                         break;
00056                 }
00057         }
00058         return res;
00059 }
00060 
00061 LLKeywords::LLKeywords() : mLoaded(FALSE)
00062 {
00063 }
00064 
00065 LLKeywords::~LLKeywords()
00066 {
00067         std::for_each(mWordTokenMap.begin(), mWordTokenMap.end(), DeletePairedPointer());
00068         std::for_each(mLineTokenList.begin(), mLineTokenList.end(), DeletePointer());
00069         std::for_each(mDelimiterTokenList.begin(), mDelimiterTokenList.end(), DeletePointer());
00070 }
00071 
00072 BOOL LLKeywords::loadFromFile( const LLString& filename )
00073 {
00074         mLoaded = FALSE;
00075 
00077         // File header
00078 
00079         const S32 BUFFER_SIZE = 1024;
00080         char    buffer[BUFFER_SIZE];    /* Flawfinder: ignore */
00081 
00082         llifstream file;
00083         file.open(filename.c_str());    /* Flawfinder: ignore */
00084         if( file.fail() )
00085         {
00086                 llinfos << "LLKeywords::loadFromFile()  Unable to open file: " << filename << llendl;
00087                 return mLoaded;
00088         }
00089 
00090         // Identifying string
00091         file >> buffer;
00092         if( strcmp( buffer, "llkeywords" ) )
00093         {
00094                 llinfos << filename << " does not appear to be a keyword file" << llendl;
00095                 return mLoaded;
00096         }
00097 
00098         // Check file version
00099         file >> buffer;
00100         U32     version_num;
00101         file >> version_num;
00102         if( strcmp(buffer, "version") || version_num != (U32)KEYWORD_FILE_CURRENT_VERSION )
00103         {
00104                 llinfos << filename << " does not appear to be a version " << KEYWORD_FILE_CURRENT_VERSION << " keyword file" << llendl;
00105                 return mLoaded;
00106         }
00107 
00108         // start of line (SOL)
00109         const char SOL_COMMENT[] = "#";
00110         const char SOL_WORD[] = "[word ";
00111         const char SOL_LINE[] = "[line ";
00112         const char SOL_ONE_SIDED_DELIMITER[] = "[one_sided_delimiter ";
00113         const char SOL_TWO_SIDED_DELIMITER[] = "[two_sided_delimiter ";
00114 
00115         LLColor3 cur_color( 1, 0, 0 );
00116         LLKeywordToken::TOKEN_TYPE cur_type = LLKeywordToken::WORD;
00117 
00118         while (!file.eof())
00119         {
00120                 file.getline( buffer, BUFFER_SIZE );
00121                 if( !strncmp( buffer, SOL_COMMENT, strlen(SOL_COMMENT) ) )      /* Flawfinder: ignore */
00122                 {
00123                         continue;
00124                 }
00125                 else
00126                 if( !strncmp( buffer, SOL_WORD, strlen(SOL_WORD) ) )    /* Flawfinder: ignore */
00127                 {
00128                         cur_color = readColor( buffer + strlen(SOL_WORD) );     /* Flawfinder: ignore */
00129                         cur_type = LLKeywordToken::WORD;
00130                         continue;
00131                 }
00132                 else
00133                 if( !strncmp( buffer, SOL_LINE, strlen(SOL_LINE) ) )    /* Flawfinder: ignore */
00134                 {
00135                         cur_color = readColor( buffer + strlen(SOL_LINE) );     /* Flawfinder: ignore */
00136                         cur_type = LLKeywordToken::LINE;
00137                         continue;
00138                 }
00139                 else
00140                 if( !strncmp( buffer, SOL_TWO_SIDED_DELIMITER, strlen(SOL_TWO_SIDED_DELIMITER) ) )      /* Flawfinder: ignore */
00141                 {
00142                         cur_color = readColor( buffer + strlen(SOL_TWO_SIDED_DELIMITER) );      /* Flawfinder: ignore */
00143                         cur_type = LLKeywordToken::TWO_SIDED_DELIMITER;
00144                         continue;
00145                 }
00146                 if( !strncmp( buffer, SOL_ONE_SIDED_DELIMITER, strlen(SOL_ONE_SIDED_DELIMITER) ) )      /* Flawfinder: ignore */
00147                 {
00148                         cur_color = readColor( buffer + strlen(SOL_ONE_SIDED_DELIMITER) );      /* Flawfinder: ignore */
00149                         cur_type = LLKeywordToken::ONE_SIDED_DELIMITER;
00150                         continue;
00151                 }
00152 
00153                 LLString token_buffer( buffer );
00154                 LLString::trim(token_buffer);
00155                 
00156                 typedef boost::tokenizer<boost::char_separator<char> > tokenizer;
00157                 boost::char_separator<char> sep_word("", " \t");
00158                 tokenizer word_tokens(token_buffer, sep_word);
00159                 tokenizer::iterator token_word_iter = word_tokens.begin();
00160 
00161                 if( !token_buffer.empty() && token_word_iter != word_tokens.end() )
00162                 {
00163                         // first word is keyword
00164                         LLString keyword = (*token_word_iter);
00165                         LLString::trim(keyword);
00166 
00167                         // following words are tooltip
00168                         LLString tool_tip;
00169                         while (++token_word_iter != word_tokens.end())
00170                         {
00171                                 tool_tip += (*token_word_iter);
00172                         }
00173                         LLString::trim(tool_tip);
00174                         
00175                         if( !tool_tip.empty() )
00176                         {
00177                                 // Replace : with \n for multi-line tool tips.
00178                                 LLString::replaceChar( tool_tip, ':', '\n' );
00179                                 addToken(cur_type, keyword, cur_color, tool_tip );
00180                         }
00181                         else
00182                         {
00183                                 addToken(cur_type, keyword, cur_color, NULL );
00184                         }
00185                 }
00186         }
00187 
00188         file.close();
00189 
00190         mLoaded = TRUE;
00191         return mLoaded;
00192 }
00193 
00194 // Add the token as described
00195 void LLKeywords::addToken(LLKeywordToken::TOKEN_TYPE type,
00196                                                   const LLString& key_in,
00197                                                   const LLColor3& color,
00198                                                   const LLString& tool_tip_in )
00199 {
00200         LLWString key = utf8str_to_wstring(key_in);
00201         LLWString tool_tip = utf8str_to_wstring(tool_tip_in);
00202         switch(type)
00203         {
00204         case LLKeywordToken::WORD:
00205                 mWordTokenMap[key] = new LLKeywordToken(type, color, key, tool_tip);
00206                 break;
00207 
00208         case LLKeywordToken::LINE:
00209                 mLineTokenList.push_front(new LLKeywordToken(type, color, key, tool_tip));
00210                 break;
00211 
00212         case LLKeywordToken::TWO_SIDED_DELIMITER:
00213         case LLKeywordToken::ONE_SIDED_DELIMITER:
00214                 mDelimiterTokenList.push_front(new LLKeywordToken(type, color, key, tool_tip));
00215                 break;
00216 
00217         default:
00218                 llassert(0);
00219         }
00220 }
00221 
00222 LLColor3 LLKeywords::readColor( const LLString& s )
00223 {
00224         F32 r, g, b;
00225         r = g = b = 0.0f;
00226         S32 read = sscanf(s.c_str(), "%f, %f, %f]", &r, &g, &b );
00227         if( read != 3 ) /* Flawfinder: ignore */
00228         {
00229                 llinfos << " poorly formed color in keyword file" << llendl;
00230         }
00231         return LLColor3( r, g, b );
00232 }
00233 
00234 // Walk through a string, applying the rules specified by the keyword token list and
00235 // create a list of color segments.
00236 void LLKeywords::findSegments(std::vector<LLTextSegment *>* seg_list, const LLWString& wtext, const LLColor4 &defaultColor)
00237 {
00238         std::for_each(seg_list->begin(), seg_list->end(), DeletePointer());
00239         seg_list->clear();
00240 
00241         if( wtext.empty() )
00242         {
00243                 return;
00244         }
00245         
00246         S32 text_len = wtext.size();
00247 
00248         seg_list->push_back( new LLTextSegment( LLColor3(defaultColor), 0, text_len ) ); 
00249 
00250         const llwchar* base = wtext.c_str();
00251         const llwchar* cur = base;
00252         const llwchar* line = NULL;
00253 
00254         while( *cur )
00255         {
00256                 if( *cur == '\n' || cur == base )
00257                 {
00258                         if( *cur == '\n' )
00259                         {
00260                                 cur++;
00261                                 if( !*cur || *cur == '\n' )
00262                                 {
00263                                         continue;
00264                                 }
00265                         }
00266 
00267                         // Start of a new line
00268                         line = cur;
00269 
00270                         // Skip white space
00271                         while( *cur && isspace(*cur) && (*cur != '\n')  )
00272                         {
00273                                 cur++;
00274                         }
00275                         if( !*cur || *cur == '\n' )
00276                         {
00277                                 continue;
00278                         }
00279 
00280                         // cur is now at the first non-whitespace character of a new line       
00281                 
00282                         // Line start tokens
00283                         {
00284                                 BOOL line_done = FALSE;
00285                                 for (token_list_t::iterator iter = mLineTokenList.begin();
00286                                          iter != mLineTokenList.end(); ++iter)
00287                                 {
00288                                         LLKeywordToken* cur_token = *iter;
00289                                         if( cur_token->isHead( cur ) )
00290                                         {
00291                                                 S32 seg_start = cur - base;
00292                                                 while( *cur && *cur != '\n' )
00293                                                 {
00294                                                         // skip the rest of the line
00295                                                         cur++;
00296                                                 }
00297                                                 S32 seg_end = cur - base;
00298                                                 
00299                                                 //llinfos << "Seg: [" << (char*)LLString( base, seg_start, seg_end-seg_start) << "]" << llendl;
00300                                                 LLTextSegment* text_segment = new LLTextSegment( cur_token->getColor(), seg_start, seg_end );
00301                                                 text_segment->setToken( cur_token );
00302                                                 insertSegment( seg_list, text_segment, text_len, defaultColor);
00303                                                 line_done = TRUE; // to break out of second loop.
00304                                                 break;
00305                                         }
00306                                 }
00307 
00308                                 if( line_done )
00309                                 {
00310                                         continue;
00311                                 }
00312                         }
00313                 }
00314 
00315                 // Skip white space
00316                 while( *cur && isspace(*cur) && (*cur != '\n')  )
00317                 {
00318                         cur++;
00319                 }
00320 
00321                 while( *cur && *cur != '\n' )
00322                 {
00323                         // Check against delimiters
00324                         {
00325                                 S32 seg_start = 0;
00326                                 LLKeywordToken* cur_delimiter = NULL;
00327                                 for (token_list_t::iterator iter = mDelimiterTokenList.begin();
00328                                          iter != mDelimiterTokenList.end(); ++iter)
00329                                 {
00330                                         LLKeywordToken* delimiter = *iter;
00331                                         if( delimiter->isHead( cur ) )
00332                                         {
00333                                                 cur_delimiter = delimiter;
00334                                                 break;
00335                                         }
00336                                 }
00337 
00338                                 if( cur_delimiter )
00339                                 {
00340                                         S32 between_delimiters = 0;
00341                                         S32 seg_end = 0;
00342 
00343                                         seg_start = cur - base;
00344                                         cur += cur_delimiter->getLength();
00345                                         
00346                                         if( cur_delimiter->getType() == LLKeywordToken::TWO_SIDED_DELIMITER )
00347                                         {
00348                                                 while( *cur && !cur_delimiter->isHead(cur))
00349                                                 {
00350                                                         // Check for an escape sequence.
00351                                                         if (*cur == '\\')
00352                                                         {
00353                                                                 // Count the number of backslashes.
00354                                                                 S32 num_backslashes = 0;
00355                                                                 while (*cur == '\\')
00356                                                                 {
00357                                                                         num_backslashes++;
00358                                                                         between_delimiters++;
00359                                                                         cur++;
00360                                                                 }
00361                                                                 // Is the next character the end delimiter?
00362                                                                 if (cur_delimiter->isHead(cur))
00363                                                                 {
00364                                                                         // Is there was an odd number of backslashes, then this delimiter
00365                                                                         // does not end the sequence.
00366                                                                         if (num_backslashes % 2 == 1)
00367                                                                         {
00368                                                                                 between_delimiters++;
00369                                                                                 cur++;
00370                                                                         }
00371                                                                         else
00372                                                                         {
00373                                                                                 // This is an end delimiter.
00374                                                                                 break;
00375                                                                         }
00376                                                                 }
00377                                                         }
00378                                                         else
00379                                                         {
00380                                                                 between_delimiters++;
00381                                                                 cur++;
00382                                                         }
00383                                                 }
00384 
00385                                                 if( *cur )
00386                                                 {
00387                                                         cur += cur_delimiter->getLength();
00388                                                         seg_end = seg_start + between_delimiters + 2 * cur_delimiter->getLength();
00389                                                 }
00390                                                 else
00391                                                 {
00392                                                         // eof
00393                                                         seg_end = seg_start + between_delimiters + cur_delimiter->getLength();
00394                                                 }
00395                                         }
00396                                         else
00397                                         {
00398                                                 llassert( cur_delimiter->getType() == LLKeywordToken::ONE_SIDED_DELIMITER );
00399                                                 // Left side is the delimiter.  Right side is eol or eof.
00400                                                 while( *cur && ('\n' != *cur) )
00401                                                 {
00402                                                         between_delimiters++;
00403                                                         cur++;
00404                                                 }
00405                                                 seg_end = seg_start + between_delimiters + cur_delimiter->getLength();
00406                                         }
00407 
00408 
00409                                         //llinfos << "Seg: [" << (char*)LLString( base, seg_start, seg_end-seg_start ) << "]" << llendl;
00410                                         LLTextSegment* text_segment = new LLTextSegment( cur_delimiter->getColor(), seg_start, seg_end );
00411                                         text_segment->setToken( cur_delimiter );
00412                                         insertSegment( seg_list, text_segment, text_len, defaultColor);
00413 
00414                                         // Note: we don't increment cur, since the end of one delimited seg may be immediately
00415                                         // followed by the start of another one.
00416                                         continue;
00417                                 }
00418                         }
00419 
00420                         // check against words
00421                         llwchar prev = cur > base ? *(cur-1) : 0;
00422                         if( !isalnum( prev ) && (prev != '_') )
00423                         {
00424                                 const llwchar* p = cur;
00425                                 while( isalnum( *p ) || (*p == '_') )
00426                                 {
00427                                         p++;
00428                                 }
00429                                 S32 seg_len = p - cur;
00430                                 if( seg_len > 0 )
00431                                 {
00432                                         LLWString word( cur, 0, seg_len );
00433                                         word_token_map_t::iterator map_iter = mWordTokenMap.find(word);
00434                                         if( map_iter != mWordTokenMap.end() )
00435                                         {
00436                                                 LLKeywordToken* cur_token = map_iter->second;
00437                                                 S32 seg_start = cur - base;
00438                                                 S32 seg_end = seg_start + seg_len;
00439 
00440                                                 // llinfos << "Seg: [" << word.c_str() << "]" << llendl;
00441 
00442 
00443                                                 LLTextSegment* text_segment = new LLTextSegment( cur_token->getColor(), seg_start, seg_end );
00444                                                 text_segment->setToken( cur_token );
00445                                                 insertSegment( seg_list, text_segment, text_len, defaultColor);
00446                                         }
00447                                         cur += seg_len; 
00448                                         continue;
00449                                 }
00450                         }
00451 
00452                         if( *cur && *cur != '\n' )
00453                         {
00454                                 cur++;
00455                         }
00456                 }
00457         }
00458 }
00459 
00460 void LLKeywords::insertSegment(std::vector<LLTextSegment*>* seg_list, LLTextSegment* new_segment, S32 text_len, const LLColor4 &defaultColor )
00461 {
00462         LLTextSegment* last = seg_list->back();
00463         S32 new_seg_end = new_segment->getEnd();
00464 
00465         if( new_segment->getStart() == last->getStart() )
00466         {
00467                 *last = *new_segment;
00468                 delete new_segment;
00469         }
00470         else
00471         {
00472                 last->setEnd( new_segment->getStart() );
00473                 seg_list->push_back( new_segment );
00474         }
00475 
00476         if( new_seg_end < text_len )
00477         {
00478                 seg_list->push_back( new LLTextSegment( defaultColor, new_seg_end, text_len ) );
00479         }
00480 }
00481 
00482 #ifdef _DEBUG
00483 void LLKeywords::dump()
00484 {
00485         llinfos << "LLKeywords" << llendl;
00486 
00487 
00488         llinfos << "LLKeywords::sWordTokenMap" << llendl;
00489         word_token_map_t::iterator word_token_iter = mWordTokenMap.begin();
00490         while( word_token_iter != mWordTokenMap.end() )
00491         {
00492                 LLKeywordToken* word_token = word_token_iter->second;
00493                 word_token->dump();
00494                 ++word_token_iter;
00495         }
00496 
00497         llinfos << "LLKeywords::sLineTokenList" << llendl;
00498         for (token_list_t::iterator iter = mLineTokenList.begin();
00499                  iter != mLineTokenList.end(); ++iter)
00500         {
00501                 LLKeywordToken* line_token = *iter;
00502                 line_token->dump();
00503         }
00504 
00505 
00506         llinfos << "LLKeywords::sDelimiterTokenList" << llendl;
00507         for (token_list_t::iterator iter = mDelimiterTokenList.begin();
00508                  iter != mDelimiterTokenList.end(); ++iter)
00509         {
00510                 LLKeywordToken* delimiter_token = *iter;
00511                 delimiter_token->dump();
00512         }
00513 }
00514 
00515 void LLKeywordToken::dump()
00516 {
00517         llinfos << "[" << 
00518                 mColor.mV[VX] << ", " <<
00519                 mColor.mV[VY] << ", " <<
00520                 mColor.mV[VZ] << "] [" <<
00521                 mToken.c_str() << "]" <<
00522                 llendl;
00523 }
00524 
00525 #endif  // DEBUG

Generated on Fri May 16 08:32:52 2008 for SecondLife by  doxygen 1.5.5