static void requestNextChunk( tr_webseed * w ) { tr_torrent * tor = tr_torrentFindFromHash( w->session, w->hash ); if( tor != NULL ) { const tr_info * inf = tr_torrentInfo( tor ); const uint32_t have = EVBUFFER_LENGTH( w->content ); const uint32_t left = w->byteCount - have; const uint32_t pieceOffset = w->pieceOffset + have; tr_file_index_t fileIndex; uint64_t fileOffset; uint32_t thisPass; char * url; char * range; tr_ioFindFileLocation( tor, w->pieceIndex, pieceOffset, &fileIndex, &fileOffset ); thisPass = MIN( left, inf->files[fileIndex].length - fileOffset ); url = makeURL( w, &inf->files[fileIndex] ); /*fprintf( stderr, "url is [%s]\n", url );*/ range = tr_strdup_printf( "%"PRIu64"-%"PRIu64, fileOffset, fileOffset + thisPass - 1 ); /*fprintf( stderr, "range is [%s] ... we want %lu total, we have %lu, so %lu are left, and we're asking for %lu this time\n", range, (unsigned long)w->byteCount, (unsigned long)have, (unsigned long)left, (unsigned long)thisPass );*/ tr_webRun( w->session, url, range, webResponseFunc, w ); tr_free( range ); tr_free( url ); } }
Result HTTPLookup::sendQuery() { QString cmd = QString::fromLatin1( "cddb query %1 %2" ) .arg( trackOffsetListToId(), trackOffsetListToString() ) ; makeURL( cmd ); Result result = fetchURL(); return result; }
Result HTTPLookup::sendRead( const CDDBMatch & match ) { category_ = match.first; discid_ = match.second; QString cmd = QString::fromLatin1( "cddb read %1 %2" ) .arg( category_, discid_ ); makeURL( cmd ); Result result = fetchURL(); return result; }
urlinfo *getparts(char *string) { printf("Analyzing %s\n----------------------------\n", string); // free memory from previous urls if (prev) freeURL(prev); // set prev to current, making room for the new url to be stored in current prev = current; // make the url (if prev != NULL, relative addresses may be passed in string current = makeURL(string, prev); // print selected substrings printf("Domain: %s\n", current->host); printf("Path: %s\n", current->path); printf("File: %s\n\n", current->filename); return current; }
url_llist *getcache(char *folder, char *searchstring) { char *modifiedstring = tounderline(searchstring); char *path = getpath(folder, modifiedstring); FILE *file = fopen(path, "r"); // return null if unable to open for read (indicating file doesn't exist if (!file) return NULL; // get number of urls int numlinks; fscanf(file, "%d\n", &numlinks); // creat array of urls, and a corresponding array holding indexes of urls each url points to urlinfo *urls[numlinks]; llist outlink_indexes[numlinks]; // initialize output list url_llist *output = malloc(sizeof(url_llist)); url_llist_init(output); // read each url (each is on a separate line) // push to linked list char urlstring[MAXLENGTH]; int numoutlinks; urlinfo *url; // pass 1: index urls unsigned long *outlink_index; unsigned long i, j; while(fscanf(file, "%s %d\n", urlstring, &numoutlinks) != EOF) { // construct url (without outlinks) url = makeURL(urlstring, NULL); // push url and a linked list for its outlinks url_llist_push_back(output, url); urls[i] = url; llist_init(&(outlink_indexes[i]), (void *)comparelong); // read and push each outlink for (j = 0; j < numoutlinks; j++) { outlink_index = malloc(sizeof(unsigned long)); fscanf(file, "%lu ", outlink_index);//outlink_index)) llist_push_back(&(outlink_indexes[i]), outlink_index); } // read in '\n' fscanf(file, "\n"); i++; } lnode *current_node, *prev_node; // pass 2: get outlinks by their indexes for (i = 0; i < numlinks; i++) { url = urls[i]; current_node = outlink_indexes[i].front; while (current_node) { outlink_index = current_node->data; prev_node = current_node; llist_push_back(&urls[i]->outlinks, urls[(long)*outlink_index]); current_node = current_node->next; free(outlink_index); free(prev_node); } } free(path); free(modifiedstring); close(file); return output; }