void IWORKStyleRefContext::endOfElement()
{
  const boost::optional<std::string> dummyIdent;
  const boost::optional<IWORKPropertyMap> dummyProps;

  // TODO: need to get the style
  switch (m_id)
  {
  case IWORKToken::NS_URI_SF | IWORKToken::cell_style_ref :
    getCollector()->collectStyle(IWORKStylePtr_t(), m_anonymous);
    break;
  case IWORKToken::NS_URI_SF | IWORKToken::characterstyle_ref :
  {
    IWORKStylePtr_t style;
    if (getRef())
    {
      const IWORKStyleMap_t::const_iterator it = getDictionary().m_characterStyles.find(get(getRef()));
      if (getDictionary().m_characterStyles.end() != it)
        style = it->second;
    }
    getCollector()->collectStyle(style, m_anonymous);
    break;
  }
  case IWORKToken::NS_URI_SF | IWORKToken::liststyle_ref :
    getCollector()->collectStyle(IWORKStylePtr_t(), m_anonymous);
    break;
  case IWORKToken::NS_URI_SF | IWORKToken::paragraphstyle_ref :
  {
    IWORKStylePtr_t style;
    if (getRef())
    {
      const IWORKStyleMap_t::const_iterator it = getDictionary().m_paragraphStyles.find(get(getRef()));
      if (getDictionary().m_paragraphStyles.end() != it)
        style = it->second;
    }
    getCollector()->collectStyle(style, m_anonymous);
    break;
  }
  case IWORKToken::NS_URI_SF | IWORKToken::vector_style_ref :
    getCollector()->collectStyle(IWORKStylePtr_t(), m_anonymous);
    break;
  default :
    break;
  }
}
already_AddRefd<iface::cellml_services::MaLaESTransform>
CDA_DictionaryGenerator::getMalTransform() 
  throw(std::exception&)
{
  RETURN_INTO_OBJREF(malDict, iface::cellml_services::LanguageDictionary, 
                     getDictionary(L"http://www.cellml.org/CeLEDS/MaLaES/1.0#"));
  if (malDict == NULL)
    return NULL;

  uint32_t i;
  std::wstring MalString(L"");

  RETURN_INTO_OBJREF(entries, iface::dom::NodeList,
                     malDict->getMappings());

  for (i=0; i < entries->length(); i++)
  {
    RETURN_INTO_OBJREF(currentNode, iface::dom::Node, entries->item(i));
    DECLARE_QUERY_INTERFACE_OBJREF(currentEl, currentNode, dom::Element);
    if (currentEl == NULL)
      continue;

    // Get attributes
    RETURN_INTO_WSTRING(keyName,
                       currentEl->getAttribute(L"keyname"));
    RETURN_INTO_WSTRING(precedence,
                       currentEl->getAttribute(L"precedence"));

    // Create Mal string
    MalString.append(keyName);
    MalString.append(L": ");
    if (precedence != L"")
    {
      MalString.append(L"#prec[");
      MalString.append(precedence);
      MalString.append(L"]");
    }

    RETURN_INTO_WSTRING(tc, getTextContents(currentNode));
    RETURN_INTO_WSTRING(ptc, padMalString(tc.c_str()));
    MalString.append(ptc);
    MalString.append(L"\r\n");
  }

  // create transformer
  RETURN_INTO_OBJREF(mb, iface::cellml_services::MaLaESBootstrap, 
                     CreateMaLaESBootstrap());

  try
  {
    return mb->compileTransformer(MalString.c_str());
  }
  catch (...)
  {
    return NULL;
  }
}
void KEY1ParserState::storeCurrentPlaceholder()
{
  if (!m_isBodyContentOpened && !m_isTitleContentOpened)
    return;
  bool isTitle=m_isTitleContentOpened;
  m_isBodyContentOpened = m_isTitleContentOpened = false;
  if (!m_enableCollector)
    return;
  if (m_currentText && !m_currentText->empty())
  {
    if (isTitle && getDictionary().getTitlePlaceholder())
      getDictionary().getTitlePlaceholder()->m_text=m_currentText;
    else if (!isTitle && getDictionary().getBodyPlaceholder())
      getDictionary().getBodyPlaceholder()->m_text=m_currentText;
    else
    {
      ETONYEK_DEBUG_MSG(("KEY1ParserState::storeCurrentPlaceholder: oops can not store the text zone\n"));
    }
  }
  m_currentText.reset();
}
Exemple #4
0
int main(int argc, char *argv[])
{
    QApplication a(argc, argv);

    QVector<train_tweet> train_sample=getTrainSample("/Users/v144/Documents/workspace/C++/build-tweets-Desktop_Qt_5_6_0_clang_64bit-Debug/bank_train_2016.xml");

    QMap<QString,dictionaryWord> dict=getDictionary(train_sample);
    QVector<train_tweet> etalon_sample=getEtalonSample("/Users/v144/Documents/workspace/C++/build-tweets-Desktop_Qt_5_6_0_clang_64bit-Debug/banks_test_etalon.xml");
    isValidDictionary(dict,etalon_sample);

    return a.exec();
}
 bool BencodeParser::parse(const QByteArray &content)
 {
     if (content.isEmpty()) {
         errString = QString("No content");
         return false;
     }

     this->content = content;
     index = 0;
     infoStart = 0;
     infoLength = 0;
     return getDictionary(&dictionaryValue);
 }
vector<unsigned char> SuffixModelTrieBinaryFileCreator::binarySuffixModelNode(SuffixModelNode * _node, int _parentId)
{
    vector<unsigned char> _result;
	_result.push_back(getDictionary()->getDictionaryTools()->wcharToChar(_node->getCharacter()));
    vector<unsigned char> binParentId = getDictionary()->getDictionaryTools()->intToCharVector3(_parentId);
	_result.insert(_result.end(), binParentId.begin(), binParentId.end());
	
	if (max_frequency_size < (int) _node->getFeatureFrequencyMap().size())
	{
		max_frequency_size = (int) _node->getFeatureFrequencyMap().size();
		wcout << "update max_frequency_size = " << max_frequency_size << endl;
	}

	map<int, int> _frequencyMap = _node->getFeatureFrequencyMap();
    vector<unsigned char> binSizeOfFrequencyMap = getDictionary()->getDictionaryTools()->intToCharVector2((int) _frequencyMap.size());
	_result.insert(_result.end(), binSizeOfFrequencyMap.begin(), binSizeOfFrequencyMap.end());
	if (!_frequencyMap.size())
	{
		return _result;
	}
	map<int, int>::iterator iter;
	for (iter = _frequencyMap.begin(); iter != _frequencyMap.end(); ++iter)
	{
		int _featureId = iter->first;
        vector<unsigned char> binFeatureId = getDictionary()->getDictionaryTools()->intToCharVector3(_featureId);
		_result.insert(_result.end(), binFeatureId.begin(), binFeatureId.end());
		int _frequency = iter->second;
		
		if (max_frequency < _frequency)
		{
			max_frequency = _frequency;
			wcout << "update max_frequency = " << max_frequency << endl;
		}
		
        vector<unsigned char> binFrequency = getDictionary()->getDictionaryTools()->intToCharVector2(_frequency);
		_result.insert(_result.end(), binFrequency.begin(), binFrequency.end());
	}
	return _result;
}
/**
 * @brief Saves the Python dictionary containing the user-language_pair settings to a file
 *
 * pythonInit() must have been called before or an error will occur (the module is not loaded)
 */
void saveDictionary(void){
    PyObject *pFunc, *pArgs;

    if(getDictionary() != Py_None && getDictionary() != NULL){
        if (files_module != NULL) {
            pFunc = PyObject_GetAttrString(files_module, "save");

            if (pFunc) {
                pArgs = PyTuple_New(0);

                PyObject_CallObject(pFunc, pArgs);
            }
            else {
                return;
            }
            Py_XDECREF(pFunc);
        }
        else {
            notify_error("Module: \'apertiumFiles\' is not loaded");
            return;
        }
    }
}
/**
 * @brief Returns the language stored for a user in the preferences file
 *
 * pythonInit() must have been called before or an error will occur (the module is not loaded)
 * @param user Name of the user to look for
 * @param direction Direction to look for the user in ("incoming" or "outgoing")
 * @param key Language to look for ("source" or "target")
 * @return The language if the call was successful, or "None" otherwise
 */
char* dictionaryGetUserLanguage(const char *user, const char* direction, const char* key){
    char* user_lang;
    PyObject *dictionary;

    if((dictionary = getDictionary()) == Py_None){
        return "None";
    }

    user_lang = PyBytes_AsString(PyDict_GetItemString(
                    PyDict_GetItemString(PyDict_GetItemString(dictionary, direction), user),key));

    Py_XDECREF(dictionary);
    return user_lang;
}
 bool BencodeParser::getDictionary(QMap<QByteArray, QVariant> *dictionary)
 {
     const int contentSize = content.size();
     if (content.at(index) != 'd')
         return false;

     QMap<QByteArray, QVariant> tmp;
     ++index;

     do {
         if (content.at(index) == 'e') {
             ++index;
             break;
         }

         QByteArray key;
         if (!getByteString(&key))
             break;

         if (key == "info")
           infoStart = index;

         qint64 number;
         QByteArray byteString;
         QList<QVariant> tmpList;
         QMap<QByteArray, QVariant> dictionary;

         if (getInteger(&number))
             tmp.insert(key, number);
         else if (getByteString(&byteString))
             tmp.insert(key, byteString);
         else if (getList(&tmpList))
             tmp.insert(key, tmpList);
         else if (getDictionary(&dictionary))
             tmp.insert(key, QVariant::fromValue<QMap<QByteArray, QVariant> >(dictionary));
         else {
             errString = QString("error at index %1").arg(index);
             return false;
         }

         if (key == "info")
           infoLength = index - infoStart;

     } while (index < contentSize);

     if (dictionary)
         *dictionary = tmp;
     return true;
 }
/**
 * @brief Checks whether the dictionary contains language pair information for a given user
 *
 * pythonInit() must have been called before or an error will occur (the module is not loaded)
 * @param user Name of the user to look for
 * @param direction Direction to look for the user in ("incoming" or "outgoing")
 * @return 1 if there is a language pair for the user, or 0 otherwise
 */
int dictionaryHasUser(const char* user, const char* direction){
    int has_user;
    PyObject *dictionary;

    if((dictionary = getDictionary()) == Py_None){
        return 0;
    }

    has_user = PyDict_Contains(
                PyDict_GetItemString(dictionary, direction),
                PyUnicode_FromString(user));

    Py_XDECREF(dictionary);
    return has_user;
}
Exemple #11
0
/*****************************************************************************
 * ~Ndb();
 *
 * Remark:        Disconnect with the database.
 *****************************************************************************/
Ndb::~Ndb()
{
    DBUG_ENTER("Ndb::~Ndb()");
    DBUG_PRINT("enter",("this: 0x%lx", (long) this));

    if (m_sys_tab_0)
        getDictionary()->removeTableGlobal(*m_sys_tab_0, 0);

    assert(theImpl->m_ev_op == 0); // user should return NdbEventOperation's
    for (NdbEventOperationImpl *op= theImpl->m_ev_op; op; op=op->m_next)
    {
        if (op->m_state == NdbEventOperation::EO_EXECUTING && op->stop())
            g_eventLogger.error("stopping NdbEventOperation failed in Ndb destructor");
        op->m_magic_number= 0;
    }
    doDisconnect();

    /* Disconnect from transporter to stop signals from coming in */
    if (theImpl->m_transporter_facade != NULL && theNdbBlockNumber > 0) {
        theImpl->m_transporter_facade->close(theNdbBlockNumber, theFirstTransId);
    }

    delete theEventBuffer;

    releaseTransactionArrays();

    delete []theConnectionArray;
    if(theCommitAckSignal != NULL) {
        delete theCommitAckSignal;
        theCommitAckSignal = NULL;
    }

    delete theImpl;

#ifdef POORMANSPURIFY
#ifdef POORMANSGUI
    ndbout << "cnewSignals=" << cnewSignals << endl;
    ndbout << "cfreeSignals=" << cfreeSignals << endl;
    ndbout << "cgetSignals=" << cgetSignals << endl;
    ndbout << "creleaseSignals=" << creleaseSignals << endl;
#endif
    // Poor mans purifier
    assert(cnewSignals == cfreeSignals);
    assert(cgetSignals == creleaseSignals);
#endif
    DBUG_VOID_RETURN;
}
Exemple #12
0
void BasicHDT::fillHeader(string& baseUri) {
	string formatNode = "_:format";
	string dictNode = "_:dictionary";
	string triplesNode = "_:triples";
	string statisticsNode = "_:statistics";
	string publicationInfoNode = "_:publicationInformation";

	uint64_t origSize = header->getPropertyLong(statisticsNode.c_str(), HDTVocabulary::ORIGINAL_SIZE.c_str());

	header->clear();

	// BASE
	header->insert(baseUri, HDTVocabulary::RDF_TYPE, HDTVocabulary::HDT_DATASET);

	// VOID
	header->insert(baseUri, HDTVocabulary::RDF_TYPE, HDTVocabulary::VOID_DATASET);
	header->insert(baseUri, HDTVocabulary::VOID_TRIPLES, triples->getNumberOfElements());
	header->insert(baseUri, HDTVocabulary::VOID_PROPERTIES, dictionary->getNpredicates());
	header->insert(baseUri, HDTVocabulary::VOID_DISTINCT_SUBJECTS, dictionary->getNsubjects());
	header->insert(baseUri, HDTVocabulary::VOID_DISTINCT_OBJECTS, dictionary->getNobjects());
	// TODO: Add more VOID Properties. E.g. void:classes

	// Structure
	header->insert(baseUri, HDTVocabulary::HDT_STATISTICAL_INFORMATION,	statisticsNode);
	header->insert(baseUri, HDTVocabulary::HDT_PUBLICATION_INFORMATION,	publicationInfoNode);
	header->insert(baseUri, HDTVocabulary::HDT_FORMAT_INFORMATION, formatNode);
	header->insert(formatNode, HDTVocabulary::HDT_DICTIONARY, dictNode);
	header->insert(formatNode, HDTVocabulary::HDT_TRIPLES, triplesNode);

	// Dictionary
	dictionary->populateHeader(*header, dictNode);

	// Triples
	triples->populateHeader(*header, triplesNode);

	// Sizes
	header->insert(statisticsNode, HDTVocabulary::ORIGINAL_SIZE, origSize);
	header->insert(statisticsNode, HDTVocabulary::HDT_SIZE, getDictionary()->size() + getTriples()->size());

	// Current time
	time_t now;
	char date[40];
	time(&now);
	struct tm* today = localtime(&now);
	strftime(date, 40, "%Y-%m-%dT%H:%M:%S%z", today);
	header->insert(publicationInfoNode, HDTVocabulary::DUBLIN_CORE_ISSUED, date);
}
/**
 * @brief Removes all the entries from the dictionary related to the given user
 *
 * pythonInit() must have been called before or an error will occur (the module is not loaded)
 * @param user Name of the user whose entries will be removed
 * @return 1 on success, or 0 otherwise
 */
int dictionaryRemoveUserEntries(const char* user){
    PyObject *dictionary;

    if((dictionary = getDictionary()) == Py_None){
        return 0;
    }

    if(PyDict_Contains(PyDict_GetItemString(dictionary, "incoming"), PyUnicode_FromString(user))){
        PyDict_DelItemString(PyDict_GetItemString(dictionary, "incoming"), user);
    }

    if(PyDict_Contains(PyDict_GetItemString(dictionary, "outgoing"), PyUnicode_FromString(user))){
        PyDict_DelItemString(PyDict_GetItemString(dictionary, "outgoing"), user);
    }

    setDictionary(dictionary);
    Py_XDECREF(dictionary);

    return 1;
}
 bool BencodeParser::getList(QList<QVariant> *list)
 {
     const int contentSize = content.size();
     if (content.at(index) != 'l')
         return false;

     QList<QVariant> tmp;
     ++index;

     do {
         if (content.at(index) == 'e') {
             ++index;
             break;
         }

         qint64 number;
         QByteArray byteString;
         QList<QVariant> tmpList;
         QMap<QByteArray, QVariant> dictionary;

         if (getInteger(&number))
             tmp << number;
         else if (getByteString(&byteString))
             tmp << byteString;
         else if (getList(&tmpList))
             tmp << tmpList;
         else if (getDictionary(&dictionary))
             tmp << QVariant::fromValue<QMap<QByteArray, QVariant> >(dictionary);
         else {
             errString = QString("error at index %1").arg(index);
             return false;
         }
     } while (index < contentSize);

     if (list)
         *list = tmp;
     return true;
 }
/*
 * Bind the application dictionary to the cache
 */
static void initializeCacheDictionary()
{
	RsslDataDictionary *dictionary;
	if (cacheInfo.useCache && cacheInfo.cacheHandle)
	{
		dictionary = getDictionary();
		if (dictionary)
		{
			if (rsslPayloadCacheSetDictionary(cacheInfo.cacheHandle, dictionary, cacheInfo.cacheDictionaryKey, &cacheInfo.cacheErrorInfo)
					!= RSSL_RET_SUCCESS)
			{
				printf("Error: Failed to bind RDM Field dictionary to cache.\n\tError (%d): %s\n",
						cacheInfo.cacheErrorInfo.rsslErrorId, cacheInfo.cacheErrorInfo.text);
				cacheInfo.useCache = RSSL_FALSE;
			}
		}
		else
		{
			printf("Error: No RDM Field dictionary for cache.\n");
			cacheInfo.useCache = RSSL_FALSE;
		}
	}
}
Exemple #16
0
int Ngn_Hashcat::run ()
{
	Application& app = Application::instance();
	DJob *job = DJob::Instance();
	string cmd;
	string hashcat(dengine->getBinaryPath("hashcat"));
	int eCode(-1);
	
	// clean results
	_results = string("");
	
	// grab settings from job
	setAttackMode(job->getAttackMode());
	setHashType(job->getHashType());
	setMask(job->getMask());
	setRules(job->getRules());
	setDictionary(job->getDictionary());
	setPot("disthc.pot");
	
	// clean pot before working
	File f(getPot());
	if(f.exists()) {
		f.remove();
	}
	
	// setup command prefix (format command takes 7 args max)
	cmd = format("%s -o %s -s %lu -l %u",
		hashcat,
		getPot(),
		job->getChunk(),
		job->getChunkSize()
	);
	
	// Attack modes:
	// 0 = Straight
	// 1 = Combination
	// 2 = Toggle-Case
	// 3 = Brute-force
	// 4 = Permutation
	// 5 = Table-Lookup
	
	// if mask minimum set, apply it
	if(job->getMaskMin())
	{
		cmd = format("%s --pw-min %d",
			cmd,
			job->getMaskMin()
		);
	}
	
	// if mask maximum set, apply it
	if(job->getMaskMax())
	{
		cmd = format("%s --pw-min %d",
			cmd,
			job->getMaskMax()
		);
	}
	
	// discover attack mode and create command to execute
	switch(getAttackMode())
	{
		case 3:
			cmd = format("%s -a3 -m %d %s %s %s",
				cmd,
				getHashType(),
				getFlags(),
				getHashFile(),
				getMask()
			);
			break;
		default:
			// default command uses attack mode 0
			cmd = format("%s -m %d %s %s %s %s",
				cmd,
				getHashType(),
				getFlags(),
				getHashFile(),
				getDictionary(),
				getRules()
			);
	}
	
	if(DEBUG) app.logger().information(format("%%Running command: %s", cmd));

	// check for ghosts, and run as appropriate
	if(isGhost())
	{
		app.logger().information("~~~ A ghost is loose! ~~~");
		app.logger().information("      .-.");
		app.logger().information("     (o o) boo!");
		app.logger().information("    \\| O \\/");
		app.logger().information("      \\   \\ ");
		app.logger().information("       `~~~' ");
	}
	else
	{
		// run hashcat!  :)
		// TODO change this over to use Poco Processes
		eCode = system(cmd.c_str());
		
		// check for results
		if(f.exists()) {
			FileInputStream fis(getPot());
			//std::ifstream in(pot,std::ios::in);
			string line;
			while(fis >> line) {
				_results.append(line + "\n");
			}
		}
		
		// TODO might take this out?
		// see if it's worth it to just display hashcout output during
		// execution
		// if enabled, print pot to screen
//		if(false) {
//			app.logger().information("\n=== Recovered Hashes ===");
//			if(!_results.empty()) app.logger().information(_results);
//			app.logger().information("========================");
//		}
	}
void SuffixModelTrieBinaryFileCreator::buildBuffer(void)
{
	wcout << "SuffixModelTrieBinaryFileCreator build buffer ...";
    buffer = new unsigned char[N_GRAM_MAX_BUFFER_SIZE];
	bufferSize = 0;
	wcout << "numberOfNodes = " << numberOfNodes << endl;
    vector<unsigned char> binNumberOfNode = getDictionary()->getDictionaryTools()->intToCharVector3(numberOfNodes);
	writeToBuffer(binNumberOfNode);
	
	queue<SuffixModelNode*> nodeQueue = queue<SuffixModelNode*>();
	nodeQueue.push(root);
	int currentNodeId = -1;
	int _count = 1;
	
	// write root
    vector<unsigned char> binRoot = binarySuffixModelNode(root, currentNodeId);
	writeToBuffer(binRoot);

	while (!nodeQueue.empty())
	{
		SuffixModelNode* currentNode = nodeQueue.front();
		nodeQueue.pop();
		currentNodeId++;

		vector<SuffixModelNode*> childrenNodes = currentNode->getChildrenNode();
		for (int i = 0; i < (int) childrenNodes.size(); ++i)
		{
			SuffixModelNode* childNode = childrenNodes.at(i);
			nodeQueue.push(childNode);
			_count++;
			// write node
            vector<unsigned char> binCurrentNode = binarySuffixModelNode(childNode, currentNodeId);
			writeToBuffer(binCurrentNode);
		}
	}
	wcout << " Node count = " << _count << endl;
	_count = 0;
	// featureIdFrequency
	wcout << "featureIdFrequency.size = " << (int) featureIdFrequency.size() << endl;
    vector<unsigned char> binFeatureIdFrequencySize = getDictionary()->getDictionaryTools()->intToCharVector2((int) featureIdFrequency.size());
	writeToBuffer(binFeatureIdFrequencySize);
	map<int, int>::iterator iter;
	for (iter = featureIdFrequency.begin(); iter != featureIdFrequency.end(); ++iter)
	{
		int _featureId = iter->first;
        vector<unsigned char> binFeatureId = getDictionary()->getDictionaryTools()->intToCharVector3(_featureId);
		writeToBuffer(binFeatureId);
		int _frequency = iter->second;
        vector<unsigned char> binFrequency = getDictionary()->getDictionaryTools()->intToCharVector3(_frequency);
		writeToBuffer(binFrequency);
		_count++;
		if (max_feature_frequency < _frequency)
		{
			max_feature_frequency = _frequency;
			//wcout << "update max_frequency = " << max_frequency << endl;
		}
	}
	wcout << "Done! Count = " << _count << endl;
	wcout << "max_frequency_size = " << max_frequency_size << endl;
	wcout << "max_frequency = " << max_frequency << endl;
	wcout << "max_feature_frequency = " << max_feature_frequency << endl;
}
Exemple #18
0
/*****************************************************************************
 * ~Ndb();
 *
 * Remark:        Disconnect with the database. 
 *****************************************************************************/
Ndb::~Ndb()
{ 
  DBUG_ENTER("Ndb::~Ndb()");
  DBUG_PRINT("enter",("this: 0x%lx", (long) this));

  if (m_sys_tab_0)
    getDictionary()->removeTableGlobal(*m_sys_tab_0, 0);

  if (theImpl->m_ev_op != 0)
  {
    g_eventLogger->warning("Deleting Ndb-object with NdbEventOperation still"
                           " active");
    printf("this: %p NdbEventOperation(s): ", this);
    for (NdbEventOperationImpl *op= theImpl->m_ev_op; op; op=op->m_next)
    {
      printf("%p ", op);
    }
    printf("\n");
    fflush(stdout);
  }

  assert(theImpl->m_ev_op == 0); // user should return NdbEventOperation's
  for (NdbEventOperationImpl *op= theImpl->m_ev_op; op; op=op->m_next)
  {
    if (op->m_state == NdbEventOperation::EO_EXECUTING && op->stop())
      g_eventLogger->error("stopping NdbEventOperation failed in Ndb destructor");
    op->m_magic_number= 0;
  }
  doDisconnect();

  /* Disconnect from transporter to stop signals from coming in */
  theImpl->close();

  delete theEventBuffer;

  releaseTransactionArrays();

  delete []theConnectionArray;
  delete []theConnectionArrayLast;
  if(theCommitAckSignal != NULL){
    delete theCommitAckSignal; 
    theCommitAckSignal = NULL;
  }

  theImpl->m_ndb_cluster_connection.unlink_ndb_object(this);

  delete theImpl;

#ifdef POORMANSPURIFY
#ifdef POORMANSGUI
  ndbout << "cnewSignals=" << cnewSignals << endl;
  ndbout << "cfreeSignals=" << cfreeSignals << endl;
  ndbout << "cgetSignals=" << cgetSignals << endl;
  ndbout << "creleaseSignals=" << creleaseSignals << endl;
#endif
  // Poor mans purifier
  assert(cnewSignals == cfreeSignals);
  assert(cgetSignals == creleaseSignals);
#endif
  DBUG_VOID_RETURN;
}
/*
 * Publically visable - used by all non-admin domain handlers to output field lists
 *
 * Decodes the field entry data and prints out the field entry data
 * with help of the dictionary.  Returns success if decoding succeeds
 * or failure if decoding fails.
 * fEntry - The field entry data
 * dIter - The decode iterator
 */
RsslRet decodeFieldEntry(RsslFieldEntry* fEntry, RsslDecodeIterator *dIter)
{
	RsslRet ret = 0;
	RsslDataType dataType = RSSL_DT_UNKNOWN;
	RsslUInt64 fidUIntValue = 0;
	RsslInt64 fidIntValue = 0;
	RsslFloat tempFloat = 0;
	RsslDouble tempDouble = 0;
	RsslReal fidRealValue = RSSL_INIT_REAL;
	RsslEnum fidEnumValue;
	RsslFloat fidFloatValue = 0;
	RsslDouble fidDoubleValue = 0;
	RsslQos fidQosValue = RSSL_INIT_QOS; 
	RsslDateTime fidDateTimeValue;
	RsslState fidStateValue;
	RsslBuffer fidBufferValue;
	RsslBuffer fidDateTimeBuf;
	RsslBuffer fidRealBuf;
	RsslBuffer fidStateBuf;
	RsslBuffer fidQosBuf;
	RsslDataDictionary* dictionary = getDictionary();
	RsslDictionaryEntry* dictionaryEntry = NULL;

	/* get dictionary entry */
	if (!dictionary->entriesArray)
	{
		dumpHexBuffer(&fEntry->encData);
		return RSSL_RET_SUCCESS;
	}
	else
		dictionaryEntry = dictionary->entriesArray[fEntry->fieldId];

	/* return if no entry found */
	if (!dictionaryEntry) 
    {
		printf("\tFid %d not found in dictionary\n", fEntry->fieldId);
		dumpHexBuffer(&fEntry->encData);
		return RSSL_RET_SUCCESS;
    }

	/* print out fid name */
	printf("\t%-20s", dictionaryEntry->acronym.data);
	/* decode and print out fid value */
	dataType = dictionaryEntry->rwfType;
	switch (dataType)
	{
		case RSSL_DT_UINT:
			if ((ret = rsslDecodeUInt(dIter, &fidUIntValue)) == RSSL_RET_SUCCESS)
			{
				printf(""RTR_LLU"\n", fidUIntValue);
			}
			else if (ret != RSSL_RET_BLANK_DATA)
			{
				printf("rsslDecodeUInt() failed with return code: %d\n", ret);
				return ret;
			}
			break;
		case RSSL_DT_INT:
			if ((ret = rsslDecodeInt(dIter, &fidIntValue)) == RSSL_RET_SUCCESS)
			{
				printf(""RTR_LLD"\n", fidIntValue);
			}
			else if (ret != RSSL_RET_BLANK_DATA)
			{
				printf("rsslDecodeInt() failed with return code: %d\n", ret);
				return ret;
			}
			break;
		case RSSL_DT_FLOAT:
			if ((ret = rsslDecodeFloat(dIter, &fidFloatValue)) == RSSL_RET_SUCCESS) 
			{
				printf("%f\n", fidFloatValue);
			}
			else if (ret != RSSL_RET_BLANK_DATA)
			{
				printf("rsslDecodeFloat() failed with return code: %d\n", ret);
				return ret;
			}
			break;
		case RSSL_DT_DOUBLE:
			if ((ret = rsslDecodeDouble(dIter, &fidDoubleValue)) == RSSL_RET_SUCCESS) 
			{
				printf("%f\n", fidDoubleValue);
			}
			else if (ret != RSSL_RET_BLANK_DATA)
			{
				printf("rsslDecodeDouble() failed with return code: %d\n", ret);
				return ret;
			}
			break;
		case RSSL_DT_REAL:
			if ((ret = rsslDecodeReal(dIter, &fidRealValue)) == RSSL_RET_SUCCESS)
			{
				fidRealBuf.data = (char*)alloca(35);
				fidRealBuf.length = 35;
				rsslRealToString(&fidRealBuf, &fidRealValue);
				printf("%s\n", fidRealBuf.data);
			}
			else if (ret != RSSL_RET_BLANK_DATA)
			{
				printf("rsslDecodeReal() failed with return code: %d\n", ret);
				return ret;
			}
			break;
		case RSSL_DT_ENUM:
			if ((ret = rsslDecodeEnum(dIter, &fidEnumValue)) == RSSL_RET_SUCCESS)
			{
				RsslEnumType *pEnumType = getFieldEntryEnumType(dictionaryEntry, fidEnumValue);
				if (pEnumType)
    				printf("%.*s(%d)\n", pEnumType->display.length, pEnumType->display.data, fidEnumValue);
				else
    				printf("%d\n", fidEnumValue);
			}
			else if (ret != RSSL_RET_BLANK_DATA)
			{
				printf("rsslDecodeEnum() failed with return code: %d\n", ret);
				return ret;
			}
			break;
		case RSSL_DT_DATE:
			if ((ret = rsslDecodeDate(dIter, &fidDateTimeValue.date)) == RSSL_RET_SUCCESS)
			{
				fidDateTimeBuf.data = (char*)alloca(30);
				fidDateTimeBuf.length = 30;
				rsslDateTimeToString(&fidDateTimeBuf, RSSL_DT_DATE, &fidDateTimeValue);
				printf("%s\n", fidDateTimeBuf.data);
			}
			else if (ret != RSSL_RET_BLANK_DATA)
			{
				printf("rsslDecodeDate() failed with return code: %d\n", ret);
				return ret;
			}
			break;
		case RSSL_DT_TIME:
			if ((ret = rsslDecodeTime(dIter, &fidDateTimeValue.time)) == RSSL_RET_SUCCESS)
			{
				fidDateTimeBuf.data = (char*)alloca(30);
				fidDateTimeBuf.length = 30;
				rsslDateTimeToString(&fidDateTimeBuf, RSSL_DT_TIME, &fidDateTimeValue);
				printf("%s\n", fidDateTimeBuf.data);
			}
			else if (ret != RSSL_RET_BLANK_DATA)
			{
				printf("rsslDecodeTime() failed with return code: %d\n", ret);
				return ret;
			}
			break;
		case RSSL_DT_DATETIME:
			if ((ret = rsslDecodeDateTime(dIter, &fidDateTimeValue)) == RSSL_RET_SUCCESS)
			{
				fidDateTimeBuf.data = (char*)alloca(50);
				fidDateTimeBuf.length = 50;
				rsslDateTimeToString(&fidDateTimeBuf, RSSL_DT_DATETIME, &fidDateTimeValue);
				printf("%s\n", fidDateTimeBuf.data);
			}
			else if (ret != RSSL_RET_BLANK_DATA)
			{
				printf("rsslDecodeDateTime() failed with return code: %d\n", ret);
				return ret;
			}
			break;
		case RSSL_DT_QOS:
			if((ret = rsslDecodeQos(dIter, &fidQosValue)) == RSSL_RET_SUCCESS) {
				fidQosBuf.data = (char*)alloca(100);
				fidQosBuf.length = 100;
				rsslQosToString(&fidQosBuf, &fidQosValue);
				printf("%s\n", fidQosBuf.data);
			}
			else if (ret != RSSL_RET_BLANK_DATA)
			{
				printf("rsslDecodeQos() failed with return code: %d\n", ret);
				return ret;
			}
			break;
		case RSSL_DT_STATE:
			if((ret = rsslDecodeState(dIter, &fidStateValue)) == RSSL_RET_SUCCESS) {
				int stateBufLen = 80;
				if (fidStateValue.text.data)
					stateBufLen += fidStateValue.text.length;
				fidStateBuf.data = (char*)alloca(stateBufLen);
				fidStateBuf.length = stateBufLen;
				rsslStateToString(&fidStateBuf, &fidStateValue);
				printf("%.*s\n", fidStateBuf.length, fidStateBuf.data);
			}
			else if (ret != RSSL_RET_BLANK_DATA)
			{
				printf("rsslDecodeState() failed with return code: %d\n", ret);
				return ret;
			}
			break;
		
		/*For an example of array decoding, see fieldListEncDec.c*/
		case RSSL_DT_ARRAY:
		break;
		case RSSL_DT_BUFFER:
		case RSSL_DT_ASCII_STRING:
		case RSSL_DT_UTF8_STRING:
		case RSSL_DT_RMTES_STRING:
			if((ret = rsslDecodeBuffer(dIter, &fidBufferValue)) == RSSL_RET_SUCCESS)
			{
				printf("%.*s\n", fidBufferValue.length, fidBufferValue.data);
			}
			else if (ret != RSSL_RET_BLANK_DATA) 
			{
				printf("rsslDecodeBuffer() failed with return code: %d\n", ret);
				return ret;
			}
			break;
		default:
			printf("Unsupported data type (%d) for fid value\n", dataType);
			break;
	}
	if (ret == RSSL_RET_BLANK_DATA)
	{
		printf("<blank data>\n");
	}

	return RSSL_RET_SUCCESS;
}
/**
 * Load dictionary from binary file
 */
void SuffixModelTrieBinaryFileReader::loadFromBinaryFile(string _filePath) 
{
	ifstream f(_filePath.c_str(), ios::in|ios::binary|ios::ate);
	char* buffer;
	if (f.is_open())
	{
		// get size of file
		int size = (int) f.tellg();
		// jump to begin of file
		f.seekg(0, ios::beg);

		//============= Read SuffixModelNode(s) ============================================================================================
		// allocate buffer
		buffer = new char[size];
		// read file
		f.read(buffer, size);
		// close file
		f.close();
		// set offset begin of buffer
		int offset = 0;
		// convert 3 bytes to number of NodeVer3
		numberOfNodes = (unsigned char) buffer[offset] * 65536 + (unsigned char) buffer[offset + 1] * 256 + (unsigned char) buffer[offset + 2];
		offset += 3;
		// read list of SuffixModelNode
		SuffixModelNodeList = vector<SuffixModelNode*>();
		for (int _nodeId = 0; _nodeId < numberOfNodes; ++_nodeId)
		{
			// convert first byte to wchar_t
			wchar_t _character = getDictionary()->getDictionaryTools()->charToWchar((unsigned char) buffer[offset]);
			// convert 3 remaining bytes to _parentId
			int _parentId = (unsigned char) buffer[offset + 1] * 65536 + (unsigned char) buffer[offset + 2] * 256 + (unsigned char) buffer[offset + 3];
			offset += 4;

			// create new SuffixModelNode
			SuffixModelNode* _node = new SuffixModelNode(_character);
			// addChildNode
			if (_parentId >= 0 && _parentId < _nodeId)
			{
				SuffixModelNodeList.at(_parentId)->addChildNode(_node);
			}
			// size of frequency map
			int _sizeOfFrequencyMap = (unsigned char) buffer[offset] * 256 + (unsigned char) buffer[offset + 1];
			offset += 2;
			map<int, int> _featureFrequencyMap = map<int, int>();
			for (int i = 0; i < _sizeOfFrequencyMap; ++i)
			{
				int _featureId = (unsigned char) buffer[offset] * 65536 + (unsigned char) buffer[offset + 1] * 256 + (unsigned char) buffer[offset + 2];
				offset += 3;
				int _frequency = (unsigned char) buffer[offset] * 256 + (unsigned char) buffer[offset + 1];
				offset += 2;
				_featureFrequencyMap.insert(pair<int, int>(_featureId, _frequency));
			}
			_node->setFeatureFrequencyMap(_featureFrequencyMap);
			SuffixModelNodeList.push_back(_node);
		}
		root = SuffixModelNodeList.at(0);
		//============= Read featureIdFrequency ============================================================================================
		// convert 3 bytes to number of SuffixModelNode
		int featureIdFrequencySize = (unsigned char) buffer[offset] * 256 + (unsigned char) buffer[offset + 1];
		offset += 2;
		for (int i = 0; i < featureIdFrequencySize; ++i)
		{
			int _featureId = (unsigned char) buffer[offset] * 65536 + (unsigned char) buffer[offset + 1] * 256 + (unsigned char) buffer[offset + 2];
			offset += 3;
			int _frequency = (unsigned char) buffer[offset] * 65536 + (unsigned char) buffer[offset + 1] * 256 + (unsigned char) buffer[offset + 2];
			offset += 3;
			featureIdFrequency.insert(pair<int, int>(_featureId, _frequency));
		}
		delete[] buffer;
		buffer = NULL;
	}
	else 
	{
		throw FILE_NOT_FOUND_ERROR_CODE;
		//wcout << "### Error ### : loadFromBinaryFile -> Unable to open file";
	}
}
/**
 * Load dictionary from binary file
 */
void DictionaryTrieBinaryFileReader::loadFromBinaryFile(string _filePath) 
{
    bool debug = false;
//    wcout << "loadFromBinaryFile" << endl;
	ifstream f(_filePath.c_str(), ios::in|ios::binary|ios::ate);
    unsigned char* buffer;
	if (f.is_open())
	{
		// get size of file
		int size = (int) f.tellg();
		// jump to begin of file
		f.seekg(0, ios::beg);

		//============= Read NodeVer3s ============================================================================================
		// allocate buffer
        buffer = new unsigned char[size];
		// read file
        f.read((char*) buffer, size);
		// close file
		f.close();
		// set offset begin of buffer
		int offset = 0;
		// convert 3 bytes to number of NodeVer3
        numberOfNodes = buffer[offset] * 65536 + buffer[offset + 1] * 256 + buffer[offset + 2];
        if (debug)
        {
            wcout << "numberOfNodes = " << numberOfNodes << endl;
        }
		offset += 3;
		
		// read list of NodeVer3
		DictionaryNodeList = vector<DictionaryNode*>();
		for (int _nodeId = 0; _nodeId < numberOfNodes; ++_nodeId)
		{
			// convert first byte to wchar_t
            wchar_t _character = getDictionary()->getDictionaryTools()->charToWchar(buffer[offset]);
			// convert 3 remaining bytes to _parentId
            int _parentId = buffer[offset + 1] * 65536 + buffer[offset + 2] * 256 + buffer[offset + 3];
			offset += 4;

//            if (_nodeId == 35627)
//            {
//                wcout << "NodeId = " << _nodeId << " Char = " << _character << " ParentId = " << _parentId << endl;
//            }

			// create new NodeVer3
			DictionaryNode* _node = new DictionaryNode();
			_node->setCharacter(_character);
			if (_parentId < 0 || _parentId >= _nodeId)
			{
				//wcout << "### Error ### : loadFromBinaryFile -> id = " << _nodeId << " parentId = " << _parentId << endl;
				_node->setParentNode(NULL); // root
			}
			else
			{
				_node->setParentNode(DictionaryNodeList.at(_parentId));
				DictionaryNodeList.at(_parentId)->addChild(_node);
			}
			DictionaryNodeList.push_back(_node);
		}
		root = DictionaryNodeList.at(0);
        root->setParentNode(NULL);
        if (debug)
        {
            wcout << endl << "OK 1 : numberOfNodes = " << numberOfNodes << endl;
        }
		//================ Read NodeModels =========================================================================================
		
		// read number of NodeModel
		// convert 3 bytes to number of NodeModel
        int numberOfNodeModel = buffer[offset] * 65536 + buffer[offset + 1] * 256 + buffer[offset + 2];
        if (debug)
        {
            wcout << "numberOfNodeModel = " << numberOfNodeModel << endl;
        }
		offset += 3;
		// read list of NodeModel
		nodeModelMap = map<int, DictionaryNodeModel*>();
		for (int i = 0; i < numberOfNodeModel; ++i)
		{
			// convert first 2 bytes to modelId
            int _modelId = buffer[offset] * 256 + buffer[offset + 1];
			// convert the next 3 bytes to _lemmaId
            int _lemmaId = buffer[offset + 2] * 65536 + buffer[offset + 3] * 256 + buffer[offset + 4];
			// convert 3 remaining bytes to _nodeId
            int _nodeVer3Id = buffer[offset + 5] * 65536 + buffer[offset + 6] * 256 + buffer[offset + 7];
			offset += 8;
			// create new NodeModel
			if (nodeModelMap.count(_lemmaId))
			{
				//wcout << "##### TrieVer5::loadFromBinaryFile -> Duplicate : lemmaId = " << _lemmaId << " modelId = " << _modelId << endl;
			}
			else
			{
				// create a NodeModel
				DictionaryNodeModel* _nodeModel = new DictionaryNodeModel(_modelId, _lemmaId); 
				// add NodeModel to NodeVer3
				DictionaryNodeList.at(_nodeVer3Id)->addNodeModel(_nodeModel);
				// map lemmaId to NodeModel
				nodeModelMap.insert(pair<int, DictionaryNodeModel*>(_lemmaId, _nodeModel));

//                if (_modelId == 872)
//                {
//                    wcout << "NodeId == " << _nodeVer3Id << " ModelId = " << _modelId << endl;
//                }
			}
		}

        if (debug)
        {
            wcout << "OK 2 : numberOfNodeModel = " << numberOfNodeModel << endl;
        }
		//================ Read links =========================================================================================

        // read number of links
		// convert 3 bytes to number of links
        int numberOfLinks = buffer[offset] * 65536 + buffer[offset + 1] * 256 + buffer[offset + 2];
		offset += 3;
        if (debug)
        {
            wcout << "numberOfLinks = " << numberOfLinks << endl;
        }
		// read links
		for (int i = 0; i < numberOfLinks; ++i)
		{
			// convert the first 3 bytes to _fromLemmaId
            int _fromLemmaId = buffer[offset] * 65536 + buffer[offset + 1] * 256 + buffer[offset + 2];
			// convert the 3 remaining bytes to _toLemmaId
            int _toLemmaId = buffer[offset + 3] * 65536 + buffer[offset + 4] * 256 + buffer[offset + 5];
			offset += 6;
			addLinkByLemmaIds(_fromLemmaId, _toLemmaId);
		}

        if (debug)
        {
            wcout << "OK 3" << endl;
        }

		//================ Read TrieModels =========================================================================================

		// read number of TrieModel
		// convert 2 bytes to number of TrieModel
        int numberOfModels = buffer[offset] * 256 + buffer[offset + 1];
		offset += 2;
        if (debug)
        {
            wcout << "Number of TrieModels = " << numberOfModels << endl;
        }
		// read TrieModels
		for (int i = 1; i <= numberOfModels; ++i)
		{
			// create a TrieModel from string sufixesAttrs
			DictionaryTrieModel* _trieModel = new DictionaryTrieModel();
			// convert 1 bytes to numberOfTrieModelElement
            int numberOfTrieModelElement = buffer[offset];
			offset += 1;
//            wcout << "TM#" << i << ": elements=" << numberOfTrieModelElement << endl;
            for (int j = 0; j < numberOfTrieModelElement; ++j)
			{
				DictionaryTrieModelElement* _modelElement = new DictionaryTrieModelElement();
				// convert 1 byte to suffix's length
                int suffixLength = buffer[offset];
				offset += 1;
				wstring _suffix = L"";
				// read suffix
				for (int k = 0; k < suffixLength; ++k)
				{
					_suffix.push_back(getDictionary()->getDictionaryTools()->charToWchar(buffer[offset + k]));
				}
				offset += suffixLength;
//                wcout << "_suffix = " << _suffix << " ";
                // set suffix for DictionaryTrieModelElement
				_modelElement->setSuffix(_suffix);
				// convert 1 byte to beginWithPo
                int _beginWithPo = buffer[offset];
				offset += 1;
//                wcout << "Po = " << _beginWithPo << " ";
                // set beginWithPo for DictionaryTrieModelElement
				_modelElement->setBeginWithPo(_beginWithPo == 1);
				// convert 2 byte to featureListId
                int _featureListId = buffer[offset] * 256 + buffer[offset + 1];
				offset += 2;
//                wcout << "_featureListId = " << _featureListId << endl;
                // set featureListId for DictionaryTrieModelElement
				_modelElement->setFeatureListId(_featureListId);
				// add DictionaryTrieModelElement to DictionaryTrieModel
				_trieModel->addDictionaryTrieModelElement(_modelElement);
			}
			// map modelIndex to trieModel
			mapModelIndexToTrieModel(i, _trieModel);
		}

        if (debug)
        {
            wcout << "OK 4" << endl;
        }
		//================ Read featureListMap  =========================================================================================

		// read number of FeatureList
		// convert 2 bytes to number of FeatureList
        int numberOfFeatureList = buffer[offset] * 256 + buffer[offset + 1];
        if (debug)
        {
            wcout << "numberOfFeatureList = " << numberOfFeatureList << endl;
        }
		offset += 2;
		for (int i = 1; i <= numberOfFeatureList; ++i)
		{
			// read number of features in list
            int numberOfFeature = buffer[offset];
//            wcout << "   numberOfFeature = " << numberOfFeature << ": ";
			offset += 1;
			vector<int> featureIdList = vector<int>();
			// read features
			for (int j = 0; j < numberOfFeature; ++j)
			{
				// convert 1 byte to featureId
                int featureId = buffer[offset];
//                wcout << featureId << "; ";
				offset += 1;
				// add featureId to featureIdList
				featureIdList.push_back(featureId);
			}
//            wcout << endl;
			// insert featureIdList to featureListMap
			featureListMap.insert(pair<int, vector<int> >(i, featureIdList));
		}
		
        if (debug)
        {
            wcout << "OK 5" << endl;
        }
		
		//================ Read featureMap  =========================================================================================

		// read number of features
		// convert 1 bytes to number of FeatureList

        featureIdMap.clear();
        idFeatureMap.clear();

        int _numberOfFeature = buffer[offset];
        if (debug)
        {
            wcout << "_numberOfFeature = " << _numberOfFeature << endl;
        }
		offset += 1;
		for (int i = 1; i <= _numberOfFeature; ++i)
		{

            // short feature
			// convert 1 byte to feature's length
            int _short_feature_length = buffer[offset];
			offset += 1;
			// read feature
            wstring _short_feature = L"";
            for (int j = 0; j < _short_feature_length; ++j)
			{
                //wcout << buffer[offset + j] << "; ";
                _short_feature.push_back(getDictionary()->getDictionaryTools()->charToWchar(buffer[offset + j]));
			}
            //wcout << endl;
            offset += _short_feature_length;
			// insert _feature to featureMap
            featureIdMap.insert(pair<wstring, int>(_short_feature, i));
            idFeatureMap.insert(pair<int, wstring>(i, _short_feature));
            if (debug)
            {
                wcout << "Short feature (BIN) #" << i << ": (" << _short_feature_length << ") " << _short_feature << endl;
            }

            // long feature
            // convert 1 byte to feature's length
            int _long_feature_length = buffer[offset];
            offset += 1;
            // read feature
            wstring _long_feature = L"";
            for (int j = 0; j < _long_feature_length; ++j)
            {
                //wcout << buffer[offset + j] << "; ";
                _long_feature.push_back(getDictionary()->getDictionaryTools()->charToWchar(buffer[offset + j]));
            }
            //wcout << endl;
            offset += _long_feature_length;
            // insert _feature to featureMap
            featureMap.insert(pair<int, wstring>(i, _long_feature));
            if (debug)
            {
                wcout << "Long feature (BIN) #" << i << ": (" << _long_feature_length << ") " << _long_feature << endl;
            }
        }
        if (debug)
        {
            wcout << "OK 6" << endl;
        }
		//================ Loading done =========================================================================================
		delete[] buffer;
		buffer = NULL;
        if (debug)
        {
            wcout << "... loadFromBinaryFile done!" << endl;
        }
	}
	else 
	{
		throw FILE_NOT_FOUND_ERROR_CODE;
        wcout << "### Error ### : loadFromBinaryFile -> Unable to open file" << endl;
	}
}