struct svalue * get_map_lvalue(struct mapping *m, struct svalue *k, int c) { short h, h16; struct apair *p; h16 = hashsvalue(k); h = h16 & (m->size-1); for(p = m->pairs[h]; p; p = p->next) { if (p->hashval == h16 && equal_svalue(k, &p->arg)) break; } if (!p) { if (c) { m->pairs[h] = p = newpair(m->pairs[h], k, &const0, h16); if (++m->card > m->mcard) { /* We need to extend the hash table */ rehash_map(m); } } else { /* Return address of a dummy location, with 0. */ return &const0; } } return &p->val; }
struct svalue * get_map_lvalue(struct mapping *m, struct svalue *k, int c) { unsigned int hash, h; struct apair *p; hash = hashsvalue(k); h = hash % m->size; for (p = m->pairs[h]; p; p = p->next) { if (p->hashval == hash && equal_svalue(k, &p->arg)) break; } if (!p) { if (c) { if (m->card >= MAX_MAPPING_SIZE) { error("Too large mapping.\n"); return &const0; } m->pairs[h] = p = newpair(m->pairs[h], k, &const0, hash); if (++m->card > m->mcard) { /* We need to extend the hash table */ rehash_map(m); } } else { /* Return address of a dummy location, with 0. */ return &const0; } } return &p->val; }
void F4::updatePairs(vector<Polynomial>& polys, bool initial) { // Setup timer to measuere how long the 'update' takes. double timer = F4Logger::seconds(); // The index of the current new element in the groebner basis. size_t t = groebnerBasis.size(); // Since the groebner basis grows, we store the current size // of the groebner basis in a variable. size_t is = groebnerBasis.size(); // Iterate over all new polynomials, which have been found in the last // run of reduce(...) for(size_t i = 0; i < polys.size(); i++) { Polynomial& h = polys[i]; // True if the current polynomial h will be inserted into the groebner basis bool insertIntoG = true; // Check if h should be inserted: h will not be inserted into g if there // is already a (new!) element in the groebner basis, which has a leading // term that divides the leading term of h // Note: An old element of the groebner basis means that this element was // already known before the last call of reduce, so h can't have a leading // term which is divisible by the leading term of the old element, because // if this would be the case, there would have been a reduction polynomial // reducing this leading term. if(!initial) { for(size_t i = is; insertIntoG && i < groebnerBasis.size(); i++) { if(inGroebnerBasis[i] && h.LT().isDivisibleBy(groebnerBasis[i].LT())) { insertIntoG = false; } } } //Check the criteria only if h will be inserted into the groebner basis if(true) //insertIntoG): BUG FIX! This doesn't work! { // Do the first criterium: // Cancel in P all pairs (i,j) which satisfy T(i,j) = T(i,j,t), T(i,t) != T(i,j) != T(j,t) [ B_t(i,j) ] F4PairSet P1(pairs.key_comp()); for(set<F4Pair>::iterator it = pairs.begin(); it != pairs.end(); it++) { if( !it->LCM.isDivisibleBy(h.LT()) || h.lcmLT(groebnerBasis[it->i]) == it->LCM || h.lcmLT(groebnerBasis[it->j]) == it->LCM ) { P1.insert( *it ); } } swap(pairs, P1); // Do the second criterium: // Cancel in D1 each (i,t) for which a (j,t) exists s.t. T(i,t) is a proper multiple of T(j,t) [ M(i,t) ] vector<bool> D1(inGroebnerBasis.begin(), inGroebnerBasis.end()); for(size_t i = 0; i < D1.size(); i++) { for(size_t j = i+1; D1[i] && j < D1.size(); j++) { if(D1[j]) { Term a = h.lcmLT(groebnerBasis[i]); Term b = h.lcmLT(groebnerBasis[j]); if(a != b) { if(a.isDivisibleBy(b)) { D1[i] = false; } if(b.isDivisibleBy(a)) { D1[j] = false; } } } } } // Do the thrd criterium: // In each nonvoid subset { (j,t) | T(j,t) = tau } ... // Attention P2 is not a multiset, so each element is unique. set<F4Pair, F4Pair::comparator> P2(pairs.key_comp()); for(size_t i = 0; i < D1.size(); i++) { if(D1[i]) { Term LCM = groebnerBasis[i].lcmLT(h); // Create a new pair: LCM, i, t, marked, sugar degree F4Pair newpair( LCM, i, t, LCM == groebnerBasis[i].LT().mul(h.LT()), max(groebnerBasis[i].sugar() - groebnerBasis[i].LT().deg(), h.sugar() - h.LT().deg()) + LCM.deg() ); pair<F4PairSet::iterator,bool> ret; ret = P2.insert( newpair ); // If there is a marked pair for the given LCM, store this, // since all pairs with this LCM will be deleted. if(newpair.marked && ret.second) { P2.erase(ret.first); P2.insert(newpair); } } } // Finally delete all (i,t) with T(i)T(j) = T(i,t). for(set<F4Pair, F4Pair::comparator>::iterator it = P2.begin(); it != P2.end(); it++) { if(!it->marked) { pairs.insert(*it); } } // Check all old elements of the groebner basis if the current element // divides the leading term, so the old element is reducible and can // removed from the result set. for(size_t j = 0; j < groebnerBasis.size(); j++) { if(inGroebnerBasis[j] && groebnerBasis[j].LT().isDivisibleBy(h.LT())) { inGroebnerBasis[j] = false; } } // Insert h into the groebner basis groebnerBasis.push_back( h ); inGroebnerBasis.push_back( insertIntoG ); t++; } } log->updateTime += F4Logger::seconds() - timer; }
void mainloop(){ rio_t rp_client, rp_server; int hostport; char hostname[MAXLINE], pathname[MAXLINE]; char buf[MAXLINE], method[MAXLINE], uri[MAXLINE], version[MAXLINE]; while (1) { char buffer[MAX_UINT]; while (queue->isEmpty()) { sleep(1); continue; } clientsock = queue->Pop(); if(clientsock == -1){ //printf("Fake Client sockID:%d \n", clientsock); continue; } //printf("Get Client sockID:%d \n", clientsock); // Init read structure for client-proxy connection Rio_readinitb(&rp_client, clientsock); // Parse the HTTP request read_requesthdrs(&rp_client, method, uri, version); // Parse the URI of the request parse_uri(uri, hostname, pathname, &hostport); //printf("%s %s %d\n", hostname, pathname, hostport); /***** If method is GET *****/ if (strcmp(method, "GET")!=0) { char buf2[] = "500 'Internal Error' \r\n"; Rio_writenb_w(clientsock, buf2, strlen(buf2)); //printf("It is not GET method:%s socket:%d \n", method, clientsock); Rio_close(clientsock); continue; } #ifdef WITH_CACHE //find url in cache and return [FIXME] if(pmap->find(uri)!=pmap->end()){ //not found the topic proxy_cache *pcache = (*pmap)[uri]; for (int i =0; i<pcache->size; i++) { Rio_writen_w(clientsock, pcache->buf[i], strlen(pcache->buf[i])); } Rio_close(clientsock); continue; } // Make cache entry [FIXME] proxy_cache *pcache = new proxy_cache(); std::string urlstr = *new std::string(uri); std::pair<std::string, proxy_cache *> newpair(urlstr,pcache); pmap->insert(newpair); #endif // Open connection to requested web server char strport[10]={0}; sprintf(strport, "%d",hostport); proxysock = open_targetfd(hostname, strport); if (proxysock < 0) { char buf2[] = "500 'Internal Error' \r\n"; Rio_writenb_w(clientsock, buf2, strlen(buf2)); Rio_close(clientsock); continue; } // Init read struct for proxy-webserver connection Rio_readinitb(&rp_server, proxysock); sprintf(buf, "%s %s %s\r\n", method, pathname, "HTTP/1.0"); Rio_writenb_w(proxysock, buf, strlen(buf)); //printf("%s", buf); sprintf(buf, "Host: %s\r\n", hostname); Rio_writenb_w(proxysock, buf, strlen(buf)); //printf("%s", buf); // Read from client request // and write to web server while(strcmp(buf, "\r\n")) { Rio_readlineb_w(&rp_client, buf, MAXLINE); if (!strcmp(buf, "\r\n")) { char buf2[] = "Connection: close\r\n"; Rio_writenb_w(proxysock, buf2, strlen(buf2)); //printf("%s", buf2); } if (!strncmp(buf, "Connection: keep-alive", 22) || !strncmp(buf, "Host:", 5)) { //printf("%s", buf); continue; } Rio_writenb_w(proxysock, buf, strlen(buf)); //printf("%s", buf); } // Read the respons from webserver and // forward it to the requesting client ssize_t n = 0; while ((n = Rio_readnb_w(proxysock, buffer, MAX_UINT)) > 0) { #ifdef WITH_CACHE char *cache = (char*)malloc(sizeof(char)*(n+1)); if(cache == NULL){ perror("Alloc memory failed, cache emited"); } memset(cache, 0, n+1); strncpy(cache,buffer,n); #endif Rio_writenb_w(clientsock, buffer, n); } Rio_close(clientsock); Rio_close(proxysock); } }
void Grid::identifyPoints() { // clear lists: filterMap_i2g.clear(); filterMap_g2i.clear(); // prepare: MultiIndexed::IndexSetSet identical; vector<COORD_CART> checked; vector<int> checked_i; vector<COORD_CART> doubles; int duplicates = 0; int actLabel = 0; // loop: for (unsigned int i = 0; i < indexSize(); i++) { // current index set: MultiIndexed::IndexSet is = i2x(i); // get Cartesian point: COORD_CART cp = getPoint(is); // check if this has been found before: vector<COORD_CART>::iterator bef = find(checked.begin(), checked.end(), cp); // yes, this is a duplicate: if (bef != checked.end()) { // the old label: int i_old = 0; // search the original: vector<COORD_CART>::iterator found = find(doubles.begin(), doubles.end(), cp); // yes, found it: if (found != doubles.end()) { // get position: int pos = found - doubles.begin(); i_old = (identical[pos])[0]; // write: identical[pos].push_back(i); // tell: cout << gridName << ": Found duplicate. Identify (" << String(is) << ") with (" << String(i2x(i_old)) << ")." << endl; // this reduces the grid size, so count: duplicates++; } // no, it's new: else { // add to lists: doubles.push_back(cp); i_old = bef - checked.begin(); MultiIndexed::IndexSet newpair(2); newpair[0] = i_old; newpair[1] = i; identical.push_back(newpair); // tell: cout << gridName << ": Found duplicate. Identify (" << String(is) << ") with (" << String(i2x( (identical.back())[0])) << ")" << endl; // this reduces the grid size, so count: duplicates++; } // write original label to filter: filterMap_i2g.push_back(i_old); } // no, this is not a duplicate: else { // give label: filterMap_i2g.push_back(actLabel); filterMap_g2i.push_back(i); actLabel++; // mark as checked: checked.push_back(cp); checked_i.push_back(i); } } // reduce grid size: gSize -= duplicates; cout<<"i2gMap: " << endl; cout<<String(filterMap_i2g)<<endl; cout<<"g2iMap: " << endl; cout<<String(filterMap_g2i)<<endl; cout << gridName << ": Index size = " << indexSize() << ", grid size = " << gridSize() << endl; }