00001 /* 00002 ----------------------------------------------------------------------------- 00003 This source file is part of OGRE 00004 (Object-oriented Graphics Rendering Engine) 00005 For the latest info, see http://www.stevestreeting.com/ogre/ 00006 00007 Copyright (c) 2000-2005 The OGRE Team 00008 Also see acknowledgements in Readme.html 00009 00010 This program is free software; you can redistribute it and/or modify it under 00011 the terms of the GNU General Public License as published by the Free Software 00012 Foundation; either version 2 of the License, or (at your option) any later 00013 version. 00014 00015 This program is distributed in the hope that it will be useful, but WITHOUT 00016 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 00017 FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. 00018 00019 You should have received a copy of the GNU General Public License along with 00020 this program; if not, write to the Free Software Foundation, Inc., 59 Temple 00021 Place - Suite 330, Boston, MA 02111-1307, USA, or go to 00022 http://www.gnu.org/copyleft/gpl.html. 00023 ----------------------------------------------------------------------------- 00024 */ 00025 00026 00027 #ifndef __Compiler2Pass_H__ 00028 #define __Compiler2Pass_H__ 00029 00030 #include <vector> 00031 #include "OgrePrerequisites.h" 00032 00033 namespace Ogre { 00034 00148 class _OgreExport Compiler2Pass 00149 { 00150 00151 protected: 00152 00153 // BNF operation types 00154 enum OperationType {otUNKNOWN, otRULE, otAND, otOR, otOPTIONAL, 00155 otREPEAT, otDATA, otNOT_TEST, otINSERT_TOKEN, otEND}; 00156 00160 struct TokenRule 00161 { 00162 OperationType operation; 00163 size_t tokenID; 00164 00165 TokenRule(void) : operation(otUNKNOWN), tokenID(0) {} 00166 TokenRule(const OperationType ot, const size_t token) 00167 : operation(ot), tokenID(token) {} 00168 }; 00169 00170 typedef std::vector<TokenRule> TokenRuleContainer; 00171 typedef TokenRuleContainer::iterator TokenRuleIterator; 00172 00173 static const size_t SystemTokenBase = 1000; 00174 enum SystemRuleToken { 00175 _no_token_ = SystemTokenBase, 00176 _character_, 00177 _value_, 00178 _no_space_skip_ 00179 }; 00180 00181 enum BNF_ID {BNF_UNKOWN = 0, 00182 BNF_SYNTAX, BNF_RULE, BNF_IDENTIFIER, BNF_IDENTIFIER_RIGHT, BNF_IDENTIFIER_CHARACTERS, BNF_ID_BEGIN, BNF_ID_END, 00183 BNF_CONSTANT_BEGIN, BNF_SET_RULE, BNF_EXPRESSION, 00184 BNF_AND_TERM, BNF_OR_TERM, BNF_TERM, BNF_TERM_ID, BNF_CONSTANT, BNF_OR, BNF_TERMINAL_SYMBOL, BNF_TERMINAL_START, 00185 BNF_REPEAT_EXPRESSION, BNF_REPEAT_BEGIN, BNF_REPEAT_END, BNF_SET, BNF_SET_BEGIN, BNF_SET_END, 00186 BNF_NOT_TEST, BNF_NOT_TEST_BEGIN, BNF_CONDITIONAL_TOKEN_INSERT, BNF_OPTIONAL_EXPRESSION, 00187 BNF_NOT_EXPRESSION, BNF_NOT_CHK, 00188 BNF_OPTIONAL_BEGIN, BNF_OPTIONAL_END, BNF_NO_TOKEN_START, BNF_SINGLEQUOTE, BNF_SINGLE_QUOTE_EXC, BNF_SET_END_EXC, 00189 BNF_ANY_CHARACTER, BNF_SPECIAL_CHARACTERS1, 00190 BNF_SPECIAL_CHARACTERS2, BNF_WHITE_SPACE_CHK, 00191 00192 BNF_LETTER, BNF_LETTER_DIGIT, BNF_DIGIT, BNF_WHITE_SPACE, 00193 BNF_ALPHA_SET, BNF_NUMBER_SET, BNF_SPECIAL_CHARACTER_SET1, 00194 BNF_SPECIAL_CHARACTER_SET2, BNF_SPECIAL_CHARACTER_SET3, BNF_NOT_CHARS 00195 }; 00196 00197 00199 struct LexemeTokenDef 00200 { 00201 size_t ID; 00202 bool hasAction; 00203 bool isNonTerminal; 00204 size_t ruleID; 00205 bool isCaseSensitive; 00206 String lexeme; 00207 00208 LexemeTokenDef(void) : ID(0), hasAction(false), isNonTerminal(false), ruleID(0), isCaseSensitive(false) {} 00209 LexemeTokenDef( const size_t ID, const String& lexeme, const bool hasAction = false, const bool caseSensitive = false ) 00210 : ID(ID) 00211 , hasAction(hasAction) 00212 , isNonTerminal(false) 00213 , ruleID(0) 00214 , isCaseSensitive(caseSensitive) 00215 , lexeme(lexeme) 00216 { 00217 } 00218 00219 }; 00220 00221 typedef std::vector<LexemeTokenDef> LexemeTokenDefContainer; 00222 typedef LexemeTokenDefContainer::iterator LexemeTokenDefIterator; 00223 00224 typedef std::map<std::string, size_t> LexemeTokenMap; 00225 typedef LexemeTokenMap::iterator TokenKeyIterator; 00227 00228 00230 struct TokenInst 00231 { 00232 size_t NTTRuleID; 00233 size_t tokenID; 00234 size_t line; 00235 size_t pos; 00236 bool found; 00237 }; 00238 00239 typedef std::vector<TokenInst> TokenInstContainer; 00240 typedef TokenInstContainer::iterator TokenInstIterator; 00241 00242 // token que, definitions, rules 00243 struct TokenState 00244 { 00245 TokenInstContainer tokenQue; 00246 LexemeTokenDefContainer lexemeTokenDefinitions; 00247 TokenRuleContainer rootRulePath; 00248 LexemeTokenMap lexemeTokenMap; 00249 }; 00250 00251 TokenState* mClientTokenState; 00252 00254 TokenState* mActiveTokenState; 00256 mutable size_t mPass2TokenQuePosition; 00260 size_t mPreviousActionQuePosition; 00263 size_t mNextActionQuePosition; 00264 00266 const String* mSource; 00268 String mSourceName; 00269 size_t mEndOfSource; 00270 00271 size_t mCurrentLine; 00272 size_t mCharPos; 00273 size_t mErrorCharPos; 00274 00277 std::map<size_t, float> mConstants; 00280 std::map<size_t, String> mLabels; 00283 bool mLabelIsActive; 00286 size_t mActiveLabelKey; 00289 bool mNoSpaceSkip; 00292 bool mNoTerminalToken; 00295 size_t mInsertTokenID; 00296 00298 uint mActiveContexts; 00299 00308 bool doPass1(); 00309 00319 bool doPass2(); 00320 00327 virtual void executeTokenAction(const size_t tokenID) = 0; 00330 virtual void setupTokenDefinitions(void) = 0; 00339 const TokenInst& getNextToken(const size_t expectedTokenID = 0) const 00340 { 00341 skipToken(); 00342 return getCurrentToken(expectedTokenID); 00343 } 00352 const TokenInst& getCurrentToken(const size_t expectedTokenID = 0) const; 00360 bool testNextTokenID(const size_t expectedTokenID) const; 00361 00365 bool testCurrentTokenID(const size_t expectedTokenID) const 00366 { 00367 return mActiveTokenState->tokenQue[mPass2TokenQuePosition].tokenID == expectedTokenID; 00368 } 00371 void skipToken(void) const; 00374 void replaceToken(void); 00380 float getNextTokenValue(void) const 00381 { 00382 skipToken(); 00383 return getCurrentTokenValue(); 00384 } 00391 float getCurrentTokenValue(void) const; 00399 const String& getNextTokenLabel(void) const 00400 { 00401 skipToken(); 00402 return getCurrentTokenLabel(); 00403 } 00408 const String& getCurrentTokenLabel(void) const; 00411 size_t getNextTokenID(void) const { return getNextToken().tokenID; } 00414 size_t getCurrentTokenID(void) const { return getCurrentToken().tokenID; } 00418 const String& getNextTokenLexeme(void) const 00419 { 00420 skipToken(); 00421 return getCurrentTokenLexeme(); 00422 } 00426 const String& getCurrentTokenLexeme(void) const; 00429 size_t getPass2TokenQueCount(void) const; 00434 size_t getRemainingTokensForAction(void) const; 00445 void setPass2TokenQuePosition(size_t pos, const bool activateAction = false); 00448 size_t getPass2TokenQuePosition(void) const { return mPass2TokenQuePosition; } 00458 bool setNextActionQuePosition(size_t pos, const bool search = false); 00469 void addLexemeToken(const String& lexeme, const size_t token, const bool hasAction = false, const bool caseSensitive = false); 00470 00481 void setClientBNFGrammer(void); 00482 00483 00484 00486 void findEOL(); 00487 00495 bool isFloatValue(float& fvalue, size_t& charsize) const; 00496 00505 bool isCharacterLabel(const size_t rulepathIDX); 00513 bool isLexemeMatch(const String& lexeme, const bool caseSensitive) const; 00515 bool isEndOfSource() const { return mCharPos >= mEndOfSource; } 00517 bool positionToNextLexeme(); 00538 bool processRulePath( size_t rulepathIDX); 00539 00540 00543 void setActiveContexts(const uint contexts){ mActiveContexts = contexts; } 00544 00546 void skipComments(); 00547 00549 void skipEOL(); 00550 00552 void skipWhiteSpace(); 00553 00554 00563 bool ValidateToken(const size_t rulepathIDX, const size_t activeRuleID); 00564 00569 void verifyTokenRuleLinks(const String& grammerName); 00573 void checkTokenActionTrigger(void); 00580 String getBNFGrammerTextFromRulePath(size_t ruleID, const size_t level = 0); 00581 00582 00583 private: 00584 // used for interpreting BNF script 00585 // keep it as static so that only one structure is created 00586 // no matter how many times this class is instantiated. 00587 static TokenState mBNFTokenState; 00588 // maintain a map of BNF grammer 00589 typedef std::map<String, TokenState> TokenStateContainer; 00590 static TokenStateContainer mClientTokenStates; 00592 void activatePreviousTokenAction(void); 00594 void initBNFCompiler(void); 00596 void buildClientBNFRulePaths(void); 00598 void modifyLastRule(const OperationType pendingRuleOp, const size_t tokenID); 00603 size_t getClientLexemeTokenID(const String& lexeme, const bool isCaseSensitive = false); 00605 void extractNonTerminal(const OperationType pendingRuleOp); 00607 void extractTerminal(const OperationType pendingRuleOp, const bool notoken = false); 00609 void extractSet(const OperationType pendingRuleOp); 00611 void extractNumericConstant(const OperationType pendingRuleOp); 00613 void setConditionalTokenInsert(void); 00615 String getLexemeText(size_t& ruleID, const size_t level = 0); 00616 00617 public: 00618 00620 Compiler2Pass(); 00621 virtual ~Compiler2Pass() {} 00622 00634 bool compile(const String& source, const String& sourceName); 00637 virtual const String& getClientBNFGrammer(void) = 0; 00638 00641 virtual const String& getClientGrammerName(void) const = 0; 00642 00643 }; 00644 00645 } 00646 00647 #endif 00648
Copyright © 2000-2005 by The OGRE Team
This work is licensed under a Creative Commons Attribution-ShareAlike 2.5 License.
Last modified Sun Jan 21 10:01:35 2007