コード例 #1
0
ファイル: criticalpacket.cpp プロジェクト: GDXN/Par-N-Rar
void CriticalPacket::FinishPacket(const MD5Hash &setid)
{
  assert(packetdata != 0 && packetlength >= sizeof(PACKET_HEADER));

  PACKET_HEADER *header = (PACKET_HEADER*)packetdata;
  header->setid = setid;

  MD5Context packetcontext;
  packetcontext.Update(&header->setid, packetlength - offsetof(PACKET_HEADER, setid));
  packetcontext.Final(header->hash);
}
コード例 #2
0
void DescriptionPacket::ComputeFileId(void)
{
  FILEDESCRIPTIONPACKET *packet = ((FILEDESCRIPTIONPACKET *)packetdata);

  // Compute the fileid from the hash, length, and name fields in the packet.

  MD5Context context;
  context.Update(&packet->hash16k, 
                 sizeof(FILEDESCRIPTIONPACKET)-offsetof(FILEDESCRIPTIONPACKET,hash16k)
                 +strlen((const char*)packet->name));
  context.Final(packet->fileid);
}
コード例 #3
0
void Par2CreatorSourceFile::UpdateHashes(u32 blocknumber, const void *buffer, size_t length)
{
    // Compute the crc and hash of the data
    u32 blockcrc = ~0 ^ CRCUpdateBlock(~0, length, buffer);
    MD5Context blockcontext;
    blockcontext.Update(buffer, length);
    MD5Hash blockhash;
    blockcontext.Final(blockhash);

    // Store the results in the verification packet
    verificationpacket->SetBlockHashAndCRC(blocknumber, blockhash, blockcrc);


    // Update the full file hash, but don't go beyond the end of the file
    if ((u64)length > filesize - blocknumber * (u64)length)
    {
        length = (size_t)(filesize - blocknumber * (u64)length);
    }

    assert(contextfull != 0);

    contextfull->Update(buffer, length);
}
コード例 #4
0
ファイル: creatorpacket.cpp プロジェクト: 0BruceWayne0/nzbget
bool CreatorPacket::Create(const MD5Hash &setid)
{
  string creator = "Created by " PACKAGE " version " VERSION ".";

  // Allocate a packet just large enough for creator name
  CREATORPACKET *packet = (CREATORPACKET *)AllocatePacket(sizeof(*packet) + (~3 & (3+(u32)creator.size())));

  // Fill in the details the we know
  packet->header.magic = packet_magic;
  packet->header.length = packetlength;
  //packet->header.hash;  // Compute shortly
  packet->header.setid = setid;
  packet->header.type = creatorpacket_type;

  // Copy the creator description into the packet
  memcpy(packet->client, creator.c_str(), creator.size());

  // Compute the packet hash
  MD5Context packetcontext;
  packetcontext.Update(&packet->header.setid, packetlength - offsetof(PACKET_HEADER, setid));
  packetcontext.Final(packet->header.hash);

  return true;
}
コード例 #5
0
ファイル: mainpacket.cpp プロジェクト: Bootz/nzbm
bool MainPacket::Create(vector<Par2CreatorSourceFile*> &sourcefiles, u64 _blocksize)
{
  recoverablefilecount = totalfilecount =(u32)sourcefiles.size();
  blocksize = _blocksize;

  // Allocate memory for the main packet with enough fileid entries
  MAINPACKET *packet = (MAINPACKET *)AllocatePacket(sizeof(MAINPACKET) + totalfilecount * sizeof(MD5Hash));

  // Record the details we already know in the packet
  packet->header.magic         = packet_magic;
  packet->header.length        = packetlength;
  //packet->header.hash;         // Compute shortly
  //packet->header.setid;        // Compute shortly
  packet->header.type          = mainpacket_type;

  packet->blocksize            = _blocksize;
  packet->recoverablefilecount = totalfilecount;
  //packet->fileid;              // Compute shortly

  // Sort the source files according to their fileid values
  if (totalfilecount > 1)
  {
    sort(sourcefiles.begin(), sourcefiles.end(), Par2CreatorSourceFile::CompareLess);
  }

  // Store the fileid values in the main packet
  vector<Par2CreatorSourceFile*>::const_iterator sourcefile;
  MD5Hash *hash;
  for ((sourcefile=sourcefiles.begin()),(hash=packet->fileid);
       sourcefile!=sourcefiles.end();
       ++sourcefile, ++hash)
  {
    *hash = (*sourcefile)->FileId();
  }

  // Compute the set_id_hash
  MD5Context setidcontext;
  setidcontext.Update(&packet->blocksize, packetlength - offsetof(MAINPACKET, blocksize));
  setidcontext.Final(packet->header.setid);

  // Compute the packet_hash
  MD5Context packetcontext;
  packetcontext.Update(&packet->header.setid, packetlength - offsetof(MAINPACKET, header.setid));
  packetcontext.Final(packet->header.hash);

  return true;
}
コード例 #6
0
bool Par1Repairer::VerifyDataFile(DiskFile *diskfile, Par1RepairerSourceFile *sourcefile) {
	Par1RepairerSourceFile *match = 0;

	string path;
	string name;
	DiskFile::SplitFilename(diskfile->FileName(), path, name);

	// How big is the file we are checking
	u64 filesize = diskfile->FileSize();

	if (filesize == 0) {
		if (noiselevel > CommandLine::nlSilent) {
			cout << "Target: \"" << name << "\" - empty." << endl;
		}
		return true;
	}

	// Search for the first file that is the correct size
	vector<Par1RepairerSourceFile*>::iterator sourceiterator = sourcefiles.begin();
	while (sourceiterator != sourcefiles.end() &&
				 filesize != (*sourceiterator)->FileSize()) {
		++sourceiterator;
	}

	// Are there any files that are the correct size?
	if (sourceiterator != sourcefiles.end()) {
		// Allocate a buffer to compute the file hash
		size_t buffersize = (size_t)min((u64)1048576, filesize);
		char *buffer = new char[buffersize];

		// Read the first 16k of the file
		size_t want = (size_t)min((u64)16384, filesize);
		if (!diskfile->Read(0, buffer, want)) {
			delete [] buffer;
			return false;
		}

		// Compute the MD5 hash of the first 16k
		MD5Context contextfull;
		contextfull.Update(buffer, want);
		MD5Context context16k = contextfull;
		MD5Hash hash16k;
		context16k.Final(hash16k);

		if (!ignore16kfilehash) {
			// Search for the first file that has the correct 16k hash
			while (sourceiterator != sourcefiles.end() &&
						(filesize != (*sourceiterator)->FileSize() ||
							hash16k != (*sourceiterator)->Hash16k())) {
				++sourceiterator;
			}
		}

		// Are there any files with the correct 16k hash?
		if (sourceiterator != sourcefiles.end()) {
			// Compute the MD5 hash of the whole file
			if (filesize > 16384) {
				u64 progress = 0;
				u64 offset = 16384;
				while (offset < filesize) {
					if (noiselevel > CommandLine::nlQuiet) {
						// Update a progress indicator
						u32 oldfraction = (u32)(1000 * (progress) / filesize);
						u32 newfraction = (u32)(1000 * (progress=offset) / filesize);
						if (oldfraction != newfraction)
						{
							cout << "Scanning: \"" << name << "\": " << newfraction/10 << '.' << newfraction%10 << "%\r" << flush;
						}
					}

					want = (size_t)min((u64)buffersize, filesize-offset);

					if (!diskfile->Read(offset, buffer, want)) {
						delete [] buffer;
						return false;
					}

					contextfull.Update(buffer, want);

					offset += want;
				}
			}

			MD5Hash hashfull;
			contextfull.Final(hashfull);

			// Search for the first file that has the correct full hash
			while (sourceiterator != sourcefiles.end() &&
						(filesize != (*sourceiterator)->FileSize() ||
							(!ignore16kfilehash && hash16k != (*sourceiterator)->Hash16k()) ||
							hashfull != (*sourceiterator)->HashFull())) {
				++sourceiterator;
			}

			// Are there any files with the correct full hash?
			if (sourceiterator != sourcefiles.end()) {
				// If a source file was originally specified, check to see if it is a match
				if (sourcefile != 0 &&
						sourcefile->FileSize() == filesize &&
						(ignore16kfilehash || sourcefile->Hash16k() == hash16k) &&
						sourcefile->HashFull() == hashfull) {
					match = sourcefile;
				} else {
					// Search for a file which matches and has not already been matched
					while (sourceiterator != sourcefiles.end() &&
								(filesize != (*sourceiterator)->FileSize() ||
									(!ignore16kfilehash && hash16k != (*sourceiterator)->Hash16k()) ||
									hashfull != (*sourceiterator)->HashFull() ||
									(*sourceiterator)->GetCompleteFile() != 0)) {
						++sourceiterator;
					}

					// Did we find a match
					if (sourceiterator != sourcefiles.end()) {
						match = *sourceiterator;
					}
				}
			}
		}

		delete [] buffer;
	}

	// Did we find a match
	if (match != 0) {
		match->SetCompleteFile(diskfile);

		if (noiselevel > CommandLine::nlSilent) {
			// Was the match the file we were originally looking for
			if (match == sourcefile) {
				cout << "Target: \"" << name << "\" - found." << endl;
			}
			// Were we looking for a specific file
			else if (sourcefile != 0) {
				string targetname;
				DiskFile::SplitFilename(sourcefile->FileName(), path, targetname);

				cout << "Target: \""
							<< name
							<< "\" - is a match for \""
							<< targetname
							<< "\"."
							<< endl;
			}
		} else {
			if (noiselevel > CommandLine::nlSilent) {
				string targetname;
				DiskFile::SplitFilename(match->FileName(), path, targetname);

				cout << "File: \""
							<< name
							<< "\" - is a match for \""
							<< targetname
							<< "\"."
							<< endl;
			}
		}
	} else {
		if (noiselevel > CommandLine:: nlSilent)
			cout << "File: \""
						<< name
						<< "\" - no data found."
						<< endl;
	}

	return true;
}
コード例 #7
0
bool Par1Repairer::LoadRecoveryFile(string filename) {
	// Skip the file if it has already been processed
	if (diskfilemap.Find(filename) != 0) {
		return true;
	}

	DiskFile *diskfile = new DiskFile;

	// Open the file
	if (!diskfile->Open(filename)) {
		// If we could not open the file, ignore the error and
		// proceed to the next file
		delete diskfile;
		return true;
	}

	if (noiselevel > CommandLine::nlSilent) {
		string path;
		string name;
		DiskFile::SplitFilename(filename, path, name);
		cout << "Loading \"" << name << "\"." << endl;
	}

	parlist.push_back(filename);

	bool havevolume = false;
	u32 volumenumber = 0;

	// How big is the file
	u64 filesize = diskfile->FileSize();
	if (filesize >= sizeof(PAR1FILEHEADER)) {
		// Allocate a buffer to read data into
		size_t buffersize = (size_t)min((u64)1048576, filesize);
		u8 *buffer = new u8[buffersize];

		do {
			PAR1FILEHEADER fileheader;
			if (!diskfile->Read(0, &fileheader, sizeof(fileheader)))
				break;

			// Is this really a PAR file?
			if (fileheader.magic != par1_magic)
				break;

			// Is the version number correct?
			if (fileheader.fileversion != 0x00010000)
				break;

			ignore16kfilehash = (fileheader.programversion == smartpar11);

			// Prepare to carry out MD5 Hash check of the Control Hash
			MD5Context context;
			u64 offset = offsetof(PAR1FILEHEADER, sethash);

			// Process until the end of the file is reached
			while (offset < filesize) {
				// How much data should we read?
				size_t want = (size_t)min((u64)buffersize, filesize-offset);
				if (!diskfile->Read(offset, buffer, want))
					break;

				context.Update(buffer, want);

				offset += want;
			}

			// Did we read the whole file
			if (offset < filesize)
				break;

			// Compute the hash value
			MD5Hash hash;
			context.Final(hash);

			// Is it correct?
			if (hash != fileheader.controlhash)
				break;

			// Check that the volume number is ok
			if (fileheader.volumenumber >= 256)
				break;

			// Are there any files?
			if (fileheader.numberoffiles == 0 ||
					fileheader.filelistoffset < sizeof(PAR1FILEHEADER) ||
					fileheader.filelistsize == 0)
				break;

			// Verify that the file list and data offsets are ok
			if ((fileheader.filelistoffset + fileheader.filelistsize > filesize)
					||
					(fileheader.datasize && (fileheader.dataoffset < sizeof(fileheader) || fileheader.dataoffset + fileheader.datasize > filesize))
					||
					(fileheader.datasize && ((fileheader.filelistoffset <= fileheader.dataoffset && fileheader.dataoffset < fileheader.filelistoffset+fileheader.filelistsize) || (fileheader.dataoffset <= fileheader.filelistoffset && fileheader.filelistoffset < fileheader.dataoffset + fileheader.datasize))))
				break;

			// Check the size of the file list
			if (fileheader.filelistsize > 200000)
				break;

			// If we already have a copy of the file list, make sure this one has the same size
			if (filelist != 0 && filelistsize != fileheader.filelistsize)
				break;

			// Allocate a buffer to hold a copy of the file list
			unsigned char *temp = new unsigned char[(size_t)fileheader.filelistsize];

			// Read the file list into the buffer
			if (!diskfile->Read(fileheader.filelistoffset, temp, (size_t)fileheader.filelistsize)) {
				delete [] temp;
				break;
			}

			// If we already have a copy of the file list, make sure this copy is identical
			if (filelist != 0) {
				bool match = (0 == memcmp(filelist, temp, filelistsize));
				delete [] temp;

				if (!match)
					break;
			} else {
				// Prepare to scan the file list
				unsigned char *current = temp;
				size_t remaining = (size_t)fileheader.filelistsize;
				unsigned int fileindex = 0;

				// Allocate a buffer to copy each file entry into so that
				// all fields will be correctly aligned in memory.
				PAR1FILEENTRY *fileentry = (PAR1FILEENTRY*)new u64[(remaining + sizeof(u64)-1)/sizeof(u64)];

				// Process until we run out of files or data
				while (remaining > 0 && fileindex < fileheader.numberoffiles) {
					// Copy fixed portion of file entry
					memcpy((void*)fileentry, (void*)current, sizeof(PAR1FILEENTRY));

					// Is there enough data remaining
					if (remaining < sizeof(fileentry->entrysize) ||
							remaining < fileentry->entrysize)
						break;

					// Check the length of the filename
					if (fileentry->entrysize <= sizeof(PAR1FILEENTRY))
						break;

					// Check the file size
					if (blocksize < fileentry->filesize)
						blocksize = fileentry->filesize;

					// Copy whole of file entry
					memcpy((void*)fileentry, (void*)current, (size_t)(u64)fileentry->entrysize);

					// Create source file and add it to the appropriate list
					Par1RepairerSourceFile *sourcefile = new Par1RepairerSourceFile(fileentry, searchpath);
					if (fileentry->status & INPARITYVOLUME) {
						sourcefiles.push_back(sourcefile);
					} else {
						extrafiles.push_back(sourcefile);
					}

					remaining -= (size_t)fileentry->entrysize;
					current += (size_t)fileentry->entrysize;

					fileindex++;
				}

				delete [] (u64*)fileentry;

				// Did we find the correct number of files
				if (fileindex < fileheader.numberoffiles) {
					vector<Par1RepairerSourceFile*>::iterator i = sourcefiles.begin();
					while (i != sourcefiles.end()) {
						Par1RepairerSourceFile *sourcefile = *i;
						delete sourcefile;
						++i;
					}
					sourcefiles.clear();

					i = extrafiles.begin();
					while (i != extrafiles.end()) {
						Par1RepairerSourceFile *sourcefile = *i;
						delete sourcefile;
						++i;
					}
					extrafiles.clear();

					delete [] temp;
					break;
				}

				filelist = temp;
				filelistsize = (u32)fileheader.filelistsize;
			}

			// Is this a recovery volume?
			if (fileheader.volumenumber > 0) {
				// Make sure there is data and that it is the correct size
				if (fileheader.dataoffset == 0 || fileheader.datasize != blocksize)
					break;

				// What volume number is this?
				volumenumber = (u32)(fileheader.volumenumber - 1);

				// Do we already have this volume?
				if (recoveryblocks.find(volumenumber) == recoveryblocks.end()) {
					// Create a data block
					DataBlock *datablock = new DataBlock;
					datablock->SetLength(blocksize);
					datablock->SetLocation(diskfile, fileheader.dataoffset);

					// Store it in the map
					recoveryblocks.insert(pair<u32, DataBlock*>(volumenumber, datablock));

					havevolume = true;
				}
			}
		} while (false);

		delete [] buffer;
	}

	// We have finished with the file for now
	diskfile->Close();

	if (noiselevel > CommandLine::nlQuiet) {
		if (havevolume) {
			cout << "Loaded recovery volume " << volumenumber << endl;
		} else {
			cout << "No new recovery volumes found" << endl;
		}
	}

	// Remember that the file was processed
	bool success = diskfilemap.Insert(diskfile);
	assert(success);

	return true;
}
コード例 #8
0
bool Par2CreatorSourceFile::Open(CommandLine::NoiseLevel noiselevel, const CommandLine::ExtraFile &extrafile, u64 blocksize, bool deferhashcomputation, string basepath)
{
    // Get the filename and filesize
    diskfilename = extrafile.FileName();
    filesize = extrafile.FileSize();

    // Work out how many blocks the file will be sliced into
    blockcount = (u32)((filesize + blocksize-1) / blocksize);

    // Determine what filename to record in the PAR2 files
    parfilename = diskfilename;
    parfilename.erase(0, basepath.length());

    // Create the Description and Verification packets
    descriptionpacket = new DescriptionPacket;
    descriptionpacket->Create(parfilename, filesize);

    verificationpacket = new VerificationPacket;
    verificationpacket->Create(blockcount);

    // Create the diskfile object
    diskfile  = new DiskFile;

    // Open the source file
    if (!diskfile->Open(diskfilename, filesize))
        return false;

    // Do we want to defer the computation of the full file hash, and
    // the block crc and hashes. This is only permitted if there
    // is sufficient memory available to create all recovery blocks
    // in one pass of the source files (i.e. chunksize == blocksize)
    if (deferhashcomputation)
    {
        // Initialise a buffer to read the first 16k of the source file
        size_t buffersize = 16 * 1024;
        if (buffersize > filesize)
            buffersize = (size_t)filesize;
        char *buffer = new char[buffersize];

        // Read the data from the file
        if (!diskfile->Read(0, buffer, buffersize))
        {
            diskfile->Close();
            delete [] buffer;
            return false;
        }

        // Compute the hash of the data read from the file
        MD5Context context;
        context.Update(buffer, buffersize);
        delete [] buffer;
        MD5Hash hash;
        context.Final(hash);

        // Store the hash in the descriptionpacket and compute the file id
        descriptionpacket->Hash16k(hash);

        // Compute the fileid and store it in the verification packet.
        descriptionpacket->ComputeFileId();
        verificationpacket->FileId(descriptionpacket->FileId());

        // Allocate an MD5 context for computing the file hash
        // during the recovery data generation phase
        contextfull = new MD5Context;
    }
    else
    {
        // Initialise a buffer to read the source file
        size_t buffersize = 1024*1024;
        if (buffersize > min(blocksize,filesize))
            buffersize = (size_t)min(blocksize,filesize);
        char *buffer = new char[buffersize];

        // Get ready to start reading source file to compute the hashes and crcs
        u64 offset = 0;
        u32 blocknumber = 0;
        u64 need = blocksize;

        MD5Context filecontext;
        MD5Context blockcontext;
        u32        blockcrc = 0;

        // Whilst we have not reached the end of the file
        while (offset < filesize)
        {
            // Work out how much we can read
            size_t want = (size_t)min(filesize-offset, (u64)buffersize);

            // Read some data from the file into the buffer
            if (!diskfile->Read(offset, buffer, want))
            {
                diskfile->Close();
                delete [] buffer;
                return false;
            }

            // If the new data passes the 16k boundary, compute the 16k hash for the file
            if (offset < 16384 && offset + want >= 16384)
            {
                filecontext.Update(buffer, (size_t)(16384-offset));

                MD5Context temp = filecontext;
                MD5Hash hash;
                temp.Final(hash);

                // Store the 16k hash in the file description packet
                descriptionpacket->Hash16k(hash);

                if (offset + want > 16384)
                {
                    filecontext.Update(&buffer[16384-offset], (size_t)(offset+want)-16384);
                }
            }
            else
            {
                filecontext.Update(buffer, want);
            }

            // Get ready to update block hashes and crcs
            u32 used = 0;

            // Whilst we have not used all of the data we just read
            while (used < want)
            {
                // How much of it can we use for the current block
                u32 use = (u32)min(need, (u64)(want-used));

                blockcrc = ~0 ^ CRCUpdateBlock(~0 ^ blockcrc, use, &buffer[used]);
                blockcontext.Update(&buffer[used], use);

                used += use;
                need -= use;

                // Have we finished the current block
                if (need == 0)
                {
                    MD5Hash blockhash;
                    blockcontext.Final(blockhash);

                    // Store the block hash and block crc in the file verification packet.
                    verificationpacket->SetBlockHashAndCRC(blocknumber, blockhash, blockcrc);

                    blocknumber++;

                    // More blocks
                    if (blocknumber < blockcount)
                    {
                        need = blocksize;

                        blockcontext.Reset();
                        blockcrc = 0;
                    }
                }
            }

            if (noiselevel > CommandLine::nlQuiet)
            {
                // Display progress
                u32 oldfraction = (u32)(1000 * offset / filesize);
                u32 newfraction = (u32)(1000 * (offset + want) / filesize);
                if (oldfraction != newfraction)
                {
                    cout << newfraction/10 << '.' << newfraction%10 << "%\r" << flush;
                }
            }

            offset += want;
        }

        // Did we finish the last block
        if (need > 0)
        {
            blockcrc = ~0 ^ CRCUpdateBlock(~0 ^ blockcrc, (size_t)need);
            blockcontext.Update((size_t)need);

            MD5Hash blockhash;
            blockcontext.Final(blockhash);

            // Store the block hash and block crc in the file verification packet.
            verificationpacket->SetBlockHashAndCRC(blocknumber, blockhash, blockcrc);

            blocknumber++;

            need = 0;
        }

        // Finish computing the file hash.
        MD5Hash filehash;
        filecontext.Final(filehash);

        // Store the file hash in the file description packet.
        descriptionpacket->HashFull(filehash);

        // Did we compute the 16k hash.
        if (offset < 16384)
        {
            // Store the 16k hash in the file description packet.
            descriptionpacket->Hash16k(filehash);
        }

        delete [] buffer;

        // Compute the fileid and store it in the verification packet.
        descriptionpacket->ComputeFileId();
        verificationpacket->FileId(descriptionpacket->FileId());
    }

    return true;
}