Exemple #1
0
bool CapstoneTokenizer::tokenizePrefix()
{
    bool hasPrefix = true;
    QString prefixText;
    //TODO: look at multiple prefixes on one instruction (https://github.com/aquynh/capstone/blob/921904888d7c1547c558db3a24fa64bcf97dede4/arch/X86/X86DisassemblerDecoder.c#L540)
    switch(_cp.x86().prefix[0])
    {
    case X86_PREFIX_LOCK:
        prefixText = "lock";
        break;
    case X86_PREFIX_REP:
        prefixText = "rep";
        break;
    case X86_PREFIX_REPNE:
        prefixText = "repne";
        break;
    default:
        hasPrefix = false;
    }

    if(hasPrefix)
    {
        addToken(TokenType::Prefix, prefixText);
        addToken(TokenType::Space, " ");
    }

    return true;
}
GRMSGEXPORT void coGRObjSetConnectionMsg::initClass(const char *connPoint1, const char *connPoint2, int connected, int enabled, const char *obj_name2)
{
    connPoint1_ = new char[strlen(connPoint1) + 1];
    strcpy(connPoint1_, connPoint1);
    addToken(connPoint1);

    connPoint2_ = new char[strlen(connPoint2) + 1];
    strcpy(connPoint2_, connPoint2);
    addToken(connPoint2);

    connected_ = connected;
    char str[1024];
    sprintf(str, "%d", connected_);
    addToken(str);

    enabled_ = enabled;
    sprintf(str, "%d", enabled_);
    addToken(str);

    obj_name2_ = new char[strlen(obj_name2) + 1];
    strcpy(obj_name2_, obj_name2);
    addToken(obj_name2_);

    is_valid_ = 1;
}
Exemple #3
0
bool CapstoneTokenizer::Tokenize(duint addr, const unsigned char* data, int datasize, InstructionToken & instruction)
{
    _inst = InstructionToken();

    _success = _cp.Disassemble(addr, data, datasize);
    if(_success)
    {
        if(!tokenizeMnemonic())
            return false;

        for(int i = 0; i < _cp.OpCount(); i++)
        {
            if(i)
            {
                addToken(TokenType::Comma, ",");
                if(_bArgumentSpaces)
                    addToken(TokenType::ArgumentSpace, " ");
            }
            if(!tokenizeOperand(_cp[i]))
                return false;
        }
    }
    else
        addToken(TokenType::Uncategorized, "???");

    instruction = _inst;

    return true;
}
Exemple #4
0
/**
 * @brief TokenMenu::on_tokenButton_clicked Displays a context menu which lets the user choose
 * between the creation of a custom TokenItem and the creation of a TokenItem possessing a
 * GameObject.
 */
void TokenMenu::on_tokenButton_clicked()
{
    QMenu menu;

    QAction* createCustomToken = menu.addAction(tr("Créer un jeton personnalisé"));
    QAction* createGameObject = menu.addAction(tr("Créer un élément de jeu"));

    QAction* selectedItem = menu.exec(QCursor::pos());
    if (selectedItem == createCustomToken) {
        QString text = ui->inputSearchField->text();
        addToken(text);
    }
    else if (selectedItem == createGameObject) {
        QString text = ui->inputSearchField->text();
        GameObjectDialog gameObjectDlg;
        gameObjectDlg.setNameToken(text);
        gameObjectDlg.exec();
        GameObject *gameObject = gameObjectDlg.getGameObject();
        int size = gameObjectDlg.getSize();
        QString path = gameObjectDlg.getPath();
        gameObjectDlg.close();

        if (gameObject != NULL) {
            // Push the game object into the database
            RepositoryManager::s_GameObjectRepository.insertGameObject(gameObject);

            addToken(gameObject->getName(), path, size, false, gameObject);
        }
    }
}
void WordNgrams::addTokens()
{
	// get token string from input file
	string & inFileName = getInFileName();
	FILE * fp = inFileName.length() > 0 ? fopen( inFileName.c_str(), "r" ) : stdin;
	if ( !fp )
	{
		printf("Can not find file %s, use stdio as input.\n", inFileName.c_str() );
		fp = stdin;
	}

	int count = 0;
	char c;
	bool isSpecialChar = false;
	string token;
	token.reserve(256);
	while ( ( c = (char) fgetc( fp ) ) != EOF )
	{
		if ( isDelimiter( c ) || isStopChar ( c ) )
		{
			if ( !isSpecialChar && token.length() >0 )
			{
				addToken( token );
				token.empty();
				++count;
				isSpecialChar = true;
			}
			else
			{
				isSpecialChar = false;
			}

		}
		else
		{
			token.append( c );
			isSpecialChar = false;
		}
	}
	if ( token.length() > 0 )
	{
		++count;
		addToken( token );
	}
	// special processing need to be done, if less than NGRAM_N tokens in the whole input text.
	if ( count < this->getN() )
	{
		preParse( count );
	}

	/*	int padding = ngramN - count % ngramN;

	for ( int i=0; i< padding; i++)
	{
	addToken( "_" );
	}
	*/
	fclose( fp );
}
Exemple #6
0
void CapstoneTokenizer::addMemoryOperator(char operatorText)
{
    if(_bMemorySpaces)
        addToken(TokenType::MemoryOperatorSpace, " ");
    QString text;
    text += operatorText;
    addToken(TokenType::MemoryOperator, text);
    if(_bMemorySpaces)
        addToken(TokenType::MemoryOperatorSpace, " ");
}
Exemple #7
0
bool CapstoneTokenizer::tokenizeMnemonic()
{
    auto type = TokenType::MnemonicNormal;
    auto id = _cp.GetId();
    if(_cp.InGroup(CS_GRP_CALL))
        type = TokenType::MnemonicCall;
    else if(_cp.InGroup(CS_GRP_RET))
        type = TokenType::MnemonicRet;
    else if(_cp.InGroup(CS_GRP_JUMP) || _cp.IsLoop())
    {
        switch(id)
        {
        case X86_INS_JMP:
        case X86_INS_LOOP:
            type = TokenType::MnemonicUncondJump;
            break;
        default:
            type = TokenType::MnemonicCondJump;
            break;
        }
    }
    else if(_cp.IsNop())
        type = TokenType::MnemonicNop;
    else if(_cp.IsInt3())
        type = TokenType::MnemonicInt3;
    else
    {
        switch(id)
        {
        case X86_INS_PUSH:
        case X86_INS_POP:
            type = TokenType::MnemonicPushPop;
            break;
        default:
            break;
        }
    }
    QString mnemonic = QString(_cp.Mnemonic().c_str());
    addToken(type, mnemonic);
    if(_bTabbedMnemonic)
    {
        int spaceCount = 7 - mnemonic.length();
        if(spaceCount > 0)
        {
            for(int i = 0; i < spaceCount; i++)
                addToken(TokenType::Space, " ");
        }
    }
    addToken(TokenType::Space, " ");
    return true;
}
Exemple #8
0
Token *tokenize(Buffer *input)
{
	Lexer *l;
	l->source = input;
	l->start = 0;
	l->pos = 0;
	l->head = NULL;
	l->tail = NULL;

	while (peek(l) != '\0')
	{
		l->start = l->pos;

		char c = next(l);
		switch(c)
		{
			case '(': addToken(l, TOKEN_LEFT_PAREN); break;
			case ')': addToken(l, TOKEN_RIGHT_PAREN); break;
			case '%': addToken(l, TOKEN_MOD); break;
			case '.': addToken(l, TOKEN_DOT); break;
			case '/': addToken(l, TOKEN_SLASH); break;
			case '*': addToken(l, TOKEN_STAR); break;
			case '=': addToken(l, TOKEN_EQ); break;
			case '-': addToken(l, TOKEN_MINUS); break;
			case '+': addToken(l, TOKEN_PLUS); break;
			case ' ': readWhitespace(l); break;
			case '"': readString(l); break;
			case '\n': addToken(l, TOKEN_LINE); break;

			default:
				if (isName(c)) 
				{
					readName(l);
				}
				else if (isDigit(c))
				{
					readNumber(c);
				}
				else 
				{
					emitToken(l, TOKEN_ERROR);
				}
				break;
		}
	}

	lexer.start = lexer.pos;
	emitToken(l, TOKEN_EOF);

	return l->head;
}
 static void addToken (Array<SyntaxToken>& dest, const String& text,
                       const int length, const int type)
 {
     if (length > 1000)
     {
         // subdivide very long tokens to avoid unwieldy glyph sequences
         addToken (dest, text.substring (0, length / 2), length / 2, type);
         addToken (dest, text.substring (length / 2), length - length / 2, type);
     }
     else
     {
         dest.add (SyntaxToken (text, length, type));
     }
 }
Exemple #10
0
static inline void tokenize(const char *data, int size, Map<Token, int> &tokens)
{
    int tokenEnd = -1;
    for (int i=size - 1; i>=0; --i) {
        if (symbolChar(data[i])) {
            if (tokenEnd == -1)
                tokenEnd = i;
        } else if (tokenEnd != -1) {
            addToken(data, i + 1, tokenEnd - i, tokens);
            tokenEnd = -1;
        }
    }
    if (tokenEnd != -1)
        addToken(data, 0, tokenEnd + 1, tokens);
}
void LineParser::createToken(char* pszLast,
                             char* pszCursor)
{
  DEBUG(("LineParser::createToken(%08xh,%08xh)\n",
         pszLast,
         pszCursor));
  
  // determine length of cleaned token
  int iLength = cleanToken(pszLast,
                           pszCursor,
                           NULL);
  
  
  // Note: iLength == 1 means we need space
  // for the terminating zero character only.
  // That's nonsense :)
  if (iLength > 1)
  {
    char* pszToken = new char[ iLength ];
    if (NULL != pszToken)
    {
      cleanToken(pszLast,
                 pszCursor,
                 pszToken);

      // add the token to the list
      addToken( pszToken );
    }
  }
}
    static void createTokens (int startPosition, const String& lineText,
                              CodeDocument::Iterator& source,
                              CodeTokeniser& tokeniser,
                              Array <SyntaxToken>& newTokens)
    {
        CodeDocument::Iterator lastIterator (source);
        const int lineLength = lineText.length();

        for (;;)
        {
            int tokenType = tokeniser.readNextToken (source);
            int tokenStart = lastIterator.getPosition();
            int tokenEnd = source.getPosition();

            if (tokenEnd <= tokenStart)
                break;

            tokenEnd -= startPosition;

            if (tokenEnd > 0)
            {
                tokenStart -= startPosition;
                const int start = jmax (0, tokenStart);
                addToken (newTokens, lineText.substring (start, tokenEnd),
                          tokenEnd - start, tokenType);

                if (tokenEnd >= lineLength)
                    break;
            }

            lastIterator = source;
        }

        source = lastIterator;
    }
Exemple #13
0
void HTokenizer::Label(QList<Token_t> *Tok,const DISASM *Instr){
    // label is in the current database with key "current address"
    if( TIA::instance()->wLabels->exists(Instr->VirtualAddr)  ){
        addToken(Tok,T_LABEL,TIA::instance()->wLabels->get(Instr->VirtualAddr),Instr->VirtualAddr);
    }

}
Exemple #14
0
void HTokenizer::Prefix(QList<Token_t> *Tok,const DISASM *Instr){
    if(Instr->Prefix.Number){
        QStringList Mne = QString("%1").arg(Instr->CompleteInstr).split(" ");
        addToken(Tok,T_PREFIX,Mne[0]);

    }

}
void DOMSettableTokenList::addInternal(const AtomicString& token)
{
    m_value = addToken(m_value, token);
    if (m_tokens.isNull())
        m_tokens.set(token, false);
    else
        m_tokens.add(token);
}
Exemple #16
0
/**
 * Create a new DGN token if token detection and initialization is successful
 *
 * @param slot      The slot in which a token was detected
 * @param token     Pointer to pointer updated with newly created token structure
 * @return          CKR_OK or any other Cryptoki error code
 */
static int newDGNToken(struct p11Slot_t *slot, struct p11Token_t **token)
{
	static struct p11TokenDriver esign_token;
	struct p11Token_t *ptoken;
	struct p11TokenDriver *drv;
	struct p11Slot_t *vslot;
	int rc;

	FUNC_CALLED();

	esign_token = *getStarcosTokenDriver();
	esign_token.name = "3.5ID ECC C1 DGN";
	esign_token.isCandidate = isCandidate;
	esign_token.newToken = newDGNToken;
	esign_token.C_Sign = esign_C_Sign;

	rc = createStarcosToken(slot, &ptoken, &esign_token, &starcosApplications[1]);
	if (rc != CKR_OK)
		FUNC_FAILS(rc, "Base token creation failed");

	rc = addToken(slot, ptoken);
	if (rc != CKR_OK) {
		FUNC_FAILS(rc, "addToken() failed");
	}

	*token = ptoken;

	if (context->caller == CALLER_FIREFOX) {
		FUNC_RETURNS(CKR_OK);
	}

	rc = getVirtualSlot(slot, 0, &vslot);
	if (rc != CKR_OK)
		FUNC_FAILS(rc, "Virtual slot creation failed");

	drv = getDGNTokenDriver();
	rc = createStarcosToken(vslot, &ptoken, drv, &starcosApplications[0]);
	if (rc != CKR_OK)
		FUNC_FAILS(rc, "Token creation failed");

	rc = addToken(vslot, ptoken);
	if (rc != CKR_OK)
		FUNC_FAILS(rc, "addToken() failed");

	FUNC_RETURNS(CKR_OK);
}
Exemple #17
0
CaseModifier::CaseModifier()
    : Modifier(i18n("Change Case"), i18n("change the case of a renaming option"))
{
    setUseTokenMenu(true);

    addToken(QLatin1String("{upper}"),      i18n("Convert to uppercase"),
             i18n("Uppercase"));

    addToken(QLatin1String("{lower}"),      i18n("Convert to lowercase"),
             i18n("Lowercase"));

    addToken(QLatin1String("{firstupper}"), i18n("Convert the first letter of each word to uppercase"),
             i18n("First Letter of Each Word Uppercase"));

    QRegExp reg(QLatin1String("\\{(firstupper|lower|upper)\\}"));
    reg.setMinimal(true);
    setRegExp(reg);
}
Exemple #18
0
Tokenizer::Tokenizer(std::istream& istr):
	StreamTokenizer(istr)	
{
	addToken(new OperatorToken);
	addToken(new IdentifierToken);
	addToken(new StringLiteralToken);
	addToken(new CharLiteralToken);
	addToken(new NumberLiteralToken);
	addToken(new CommentToken, false);
	addToken(new PreprocessorToken);
	addToken(new WhitespaceToken);
}
Exemple #19
0
RangeModifier::RangeModifier()
    : Modifier(i18n("Range..."), i18n("Add only a specific range of a renaming option"),
               SmallIcon("measure"))
{
    addToken("{range:||from||,||to||}", i18n("Extract a specific range (if '||to||' is omitted, go to the end of string)"));

    QRegExp reg("\\{range(:(\\d+)(,((-1|\\d+))?)?)\\}");
    reg.setMinimal(true);
    setRegExp(reg);
}
Exemple #20
0
DatabaseOption::DatabaseOption()
    : Option(i18n("Database..."), i18n("Add information from the database"), SmallIcon("server-database"))
{
    addToken("[db:||key||]", i18n("Add database information"));
    QRegExp reg("\\[db(:(.*))\\]");
    reg.setMinimal(true);
    setRegExp(reg);

    registerKeysCollection();
}
Exemple #21
0
static void readName(Lexer *l)
{
	while (isName(l) || isDigit(peek(l))) next(l);

	TokenType type = TOKEN_NAME;

	if(isKeyword(l, "let")) type = TOKEN_LET;

	addToken(l, type);
}
Exemple #22
0
ReplaceModifier::ReplaceModifier()
    : Modifier(i18nc("Replace text", "Replace..."), i18n("Replace text in a renaming option"),
               SmallIcon("document-edit"))
{
    addToken("{replace:\"||old||\", \"||new||\",||options||}",
             i18n("Replace text (||options||: ||r|| = regular expression, ||i|| = ignore case)"));

    QRegExp reg("\\{replace(:\"(.*)\",\"(.*)\"(,(r|ri|ir|i))?)\\}");
    reg.setMinimal(true);
    setRegExp(reg);
}
RemoveDoublesModifier::RemoveDoublesModifier()
    : Modifier(i18n("Remove Doubles"),
               i18n("Remove duplicate words"),
               "edit-copy")
{
    addToken("{removedoubles}", description());

    QRegExp reg("\\{removedoubles\\}");
    reg.setMinimal(true);
    setRegExp(reg);
}
Exemple #24
0
struct tokenList* tokenizer(char *file)
{
	int i = 0, line = 1;
	struct tokenList *tokens = NULL;
	struct token *token = NULL;

	do
	{
		tokens = (struct tokenList*) calloc((int)strlen(file) + 1, sizeof(struct tokenList));
	}
	while (tokens == NULL);

	while (file[i] > 0 && i < strlen(file))
	{
		token = (struct token*) calloc(1,sizeof(struct token));

		int separator = -1;

		if (file[i] == '\'')
			separator = nextSeparator(file+i,'c');
		else if (file[i] == '\"')
			separator = nextSeparator(file+i,'s');
		else if (isalpha(file[i]))
			separator = nextSeparator(file+i,'i');
		else if (isdigit(file[i]))
			separator = nextSeparator(file+i,'n');
		else
			separator = nextSeparator(file+i,'t');

		do
		{
			token->text = (char*) calloc(separator+1, sizeof(char));
		} while (token->text == NULL);

		strncat(token->text, file+i, separator);
		token->text[separator] = '\0';
		token->line = line;

		//printf("|(%d)%s/\n",j, token->text);

		if (token->text[(int)strlen(token->text) - 1] == '\n')
			line++;

		if (!strcmp(token->text," ") || !strcmp(token->text,"\n") || !strcmp(token->text,"\t"))
//			k++;
			free(token);
		else
			addToken(&tokens,token);

		i += separator;
	}
	return tokens;
}
Exemple #25
0
void Scanner::number()
{
	while (isDigit(peek())) advance();

	if (peek() == '.' && isDigit(peekNext()))
	{
		advance();
		while (isDigit(peek())) advance();
	}

	addToken(NUMBER, source_.substr(start_, current_ - start_));
}
Exemple #26
0
bool CapstoneTokenizer::tokenizeImmOperand(const cs_x86_op & op)
{
    duint value = duint(op.imm);
    auto valueType = TokenType::Value;
    if(_cp.InGroup(CS_GRP_JUMP) || _cp.InGroup(CS_GRP_CALL) || _cp.IsLoop())
    {
        valueType = TokenType::Address;
    }
    auto tokenValue = TokenValue(op.size, value);
    addToken(valueType, printValue(tokenValue, true, _maxModuleLength), tokenValue);
    return true;
}
Exemple #27
0
void Scanner::identifier()
{
	while (isAlphaNumeric(peek()))
	{
		advance();
	}
	auto text = source_.substr(start_, current_- start_);

	auto it = keywords.find(text);
	TokenType type = IDENTIFIER;
	if (it != keywords.end()) type = it->second;
	addToken(type);
}
    bool update (CodeDocument& codeDoc, int lineNum,
                 CodeDocument::Iterator& source,
                 CodeTokeniser* tokeniser, const int tabSpaces,
                 const CodeDocument::Position& selStart,
                 const CodeDocument::Position& selEnd)
    {
        Array <SyntaxToken> newTokens;
        newTokens.ensureStorageAllocated (8);

        if (tokeniser == nullptr)
        {
            const String line (codeDoc.getLine (lineNum));
            addToken (newTokens, line, line.length(), -1);
        }
        else if (lineNum < codeDoc.getNumLines())
        {
            const CodeDocument::Position pos (codeDoc, lineNum, 0);
            createTokens (pos.getPosition(), pos.getLineText(),
                          source, *tokeniser, newTokens);
        }

        replaceTabsWithSpaces (newTokens, tabSpaces);

        int newHighlightStart = 0;
        int newHighlightEnd = 0;

        if (selStart.getLineNumber() <= lineNum && selEnd.getLineNumber() >= lineNum)
        {
            const String line (codeDoc.getLine (lineNum));

            CodeDocument::Position lineStart (codeDoc, lineNum, 0), lineEnd (codeDoc, lineNum + 1, 0);
            newHighlightStart = indexToColumn (jmax (0, selStart.getPosition() - lineStart.getPosition()),
                                               line, tabSpaces);
            newHighlightEnd = indexToColumn (jmin (lineEnd.getPosition() - lineStart.getPosition(), selEnd.getPosition() - lineStart.getPosition()),
                                             line, tabSpaces);
        }

        if (newHighlightStart != highlightColumnStart || newHighlightEnd != highlightColumnEnd)
        {
            highlightColumnStart = newHighlightStart;
            highlightColumnEnd = newHighlightEnd;
        }
        else if (tokens == newTokens)
        {
            return false;
        }

        tokens.swapWith (newTokens);
        return true;
    }
/**
 * $ANTLR start xpath_expr
 * /home/cross/workspace/djondb/db/grammars/filter_expression.g:137:1: xpath_expr returns [BaseExpression* val] : XPATH ;
 */
static BaseExpression*
xpath_expr(pfilter_expressionParser ctx)
{
    BaseExpression* val;


    pANTLR3_COMMON_TOKEN    XPATH7;

    /* Initialize rule variables
     */

    XPATH7       = NULL;

    {
        // /home/cross/workspace/djondb/db/grammars/filter_expression.g:138:2: ( XPATH )
        // /home/cross/workspace/djondb/db/grammars/filter_expression.g:138:4: XPATH
        {
            XPATH7 = (pANTLR3_COMMON_TOKEN) MATCHT(XPATH, &FOLLOW_XPATH_in_xpath_expr280);
            if  (HASEXCEPTION())
            {
                goto rulexpath_exprEx;
            }


            {

                	     char* text = (char*)(XPATH7->getText(XPATH7))->chars;
                	     SimpleExpression* result = new SimpleExpression(text);
                	     addToken(text);
                	     
                val= result;

                	
            }


        }

    }

    // This is where rules clean up and exit
    //
    goto rulexpath_exprEx; /* Prevent compiler warnings */
    rulexpath_exprEx: ;



    return val;
}
Exemple #30
0
//Gets the directives aguments
struct linkedListToken * getDirArguments(FILE * in, struct linkedListToken * end, int line)
{
  struct token * t = malloc(sizeof(struct token));
  char * value = malloc(sizeof(char)*MAX_VALUE_SIZE);
  
  fscanf(in, " %s", value);
   
  if(value[strlen(value)-1] == ',')
    value[strlen(value)-1] = '\0';

  t->type = ADDR;
  t->attributes.string = value;
  end = addToken(end, t, line);

  return end;
}