bool Spread::TopicDiffuse() { map<int,StatusTagLink>::iterator iter; map<long,UserStatus> userSMap; map<long,UserStatus>::iterator userSIter; for(unsigned int i=0;i<keyWordList.size();i++) { //针对每一个单词,循环查找每一个单词,它的概率 float keyWordValue = 0.0; int wid = keyWordList[i]; //获得keyword的id iter = widSTMap->find(wid); if(iter == widSTMap->end()) { LOG4CPLUS_INFO(LogUtil::GetInstance()->GetLogger() ,"continue"); continue; } Queue<long> useridQueue; StatusTagNode *p = iter->second->next; BuildQueue(useridQueue,userSMap,p,wid); ModelSpread(useridQueue,userSMap); for(userSIter = userSMap.begin();userSIter != userSMap.end();userSIter++) { keyWordValue += userSIter->second.tfIdf; } keyWordMap[keyWordList[i]]=keyWordValue; userSMap.clear(); } return true; }
int main(int argc, char const* argv[]) { Queue queue = BuildQueue(5); Enqueue(queue, 1); Enqueue(queue, 2); Enqueue(queue, 4); Enqueue(queue, 5); Enqueue(queue, 6); Dequeue(queue); Enqueue(queue, 7); ShowQueue(queue); DestroyQueue(queue); }
bool Sampling::InitWordTopicValueMap(map<long,WordTfIdfLink> &uidWordTopicMap) { LOG4CPLUS_INFO(LogUtil::GetInstance()->GetLogger() ,"InitWordTopicValueMap start"); map<int,StatusTagLink>::iterator iter; map<long,WordTfIdfLink>::iterator uidWordTopicIter; map<long int,UserLink>::iterator userGraphIter; for(userGraphIter = userGraphMap->begin();userGraphIter != userGraphMap->end();userGraphIter++) { WordTfIdfLink head = (struct WordTfIdfNode *)malloc(sizeof(struct WordTfIdfNode)); head->next = NULL; uidWordTopicMap[userGraphIter->first] = head; } float total = 0.0; int record = 0; for(unsigned int i=0;i<keyWordList.size();i++) { //针对每一个单词,循环查找每一个单词,它的概率 map<long,UserStatus> userSMap; map<long,UserStatus>::iterator userSMapIter; int wid = keyWordList[i]; //获得keyword的id iter = widSTMap->find(wid); if(iter == widSTMap->end()) continue; Queue<long> useridQueue; StatusTagNode *p = iter->second->next; BuildQueue(useridQueue,userSMap,p,wid); ModelSpread(useridQueue,userSMap); for(userSMapIter=userSMap.begin();userSMapIter!=userSMap.end();userSMapIter++) { WordTfIdfNode *uidWordTopicNode = (struct WordTfIdfNode *)malloc(sizeof(struct WordTfIdfNode)); uidWordTopicNode->wid = keyWordList[i]; uidWordTopicNode->tfIdf = userSMapIter->second.tfIdf; uidWordTopicIter = uidWordTopicMap.find(userSMapIter->first); if(uidWordTopicIter != uidWordTopicMap.end()) { WordTfIdfLink head = uidWordTopicIter->second; uidWordTopicNode->next = head->next; head->next = uidWordTopicNode; } } userSMap.clear(); } LOG4CPLUS_INFO(LogUtil::GetInstance()->GetLogger() ,"InitWordTopicValueMap End"); return true; }
bool Sampling::InitUserTopicValueMap(map<int,UserTopicValueLink> &widUserTopicMap) { LOG4CPLUS_INFO(LogUtil::GetInstance()->GetLogger() ,"InitUserTopicValueMap Start"); map<int,StatusTagLink>::iterator iter; map<int,UserTopicValueLink>::iterator widUserTopicIter; float total = 0.0; int record = 0; for(unsigned int i=0;i<keyWordList.size();i++) { widUserTopicIter = widUserTopicMap.find(keyWordList[i]); if(widUserTopicIter == widUserTopicMap.end()) continue; //针对每一个单词,循环查找每一个单词,它的概率 map<long,UserStatus> userSMap; map<long,UserStatus>::iterator userSMapIter; int wid = keyWordList[i]; //获得keyword的id iter = widSTMap->find(wid); if(iter == widSTMap->end()) continue; Queue<long> useridQueue; StatusTagNode *p = iter->second->next; BuildQueue(useridQueue,userSMap,p,wid); ModelSpread(useridQueue,userSMap); for(userSMapIter=userSMap.begin();userSMapIter!=userSMap.end();userSMapIter++) { UserTopicValue *userTopicValueNode = (struct UserTopicValue *)malloc(sizeof(struct UserTopicValue)); userTopicValueNode->uid = userSMapIter->first; userTopicValueNode->value = userSMapIter->second.tfIdf; UserTopicValueLink head = widUserTopicIter->second; userTopicValueNode->next = head->next; head->next = userTopicValueNode; } userSMap.clear(); } LOG4CPLUS_INFO(LogUtil::GetInstance()->GetLogger() ,"InitUserTopicValueMap End"); return true; }