STDMETHODIMP COutVolumeStream::Write(const void *data, UInt32 size, UInt32 *processedSize) { if(processedSize != NULL) *processedSize = 0; while(size > 0) { if (!_volumeStream) { RINOK(VolumeCallback->GetVolumeSize(_volIndex, &_volSize)); RINOK(VolumeCallback->GetVolumeStream(_volIndex, &_volumeStream)); _volIndex++; _curPos = 0; RINOK(_archive.Create(_volumeStream, true)); RINOK(_archive.SkeepPrefixArchiveHeader()); _crc.Init(); continue; } UInt64 pureSize = COutArchive::GetVolPureSize(_volSize, _file.Name.Length()); UInt32 curSize = (UInt32)MyMin(UInt64(size), pureSize - _curPos); _crc.Update(data, curSize); UInt32 realProcessed; RINOK(_volumeStream->Write(data, curSize, &realProcessed)) data = (void *)((Byte *)data + realProcessed); size -= realProcessed; if(processedSize != NULL) *processedSize += realProcessed; _curPos += realProcessed; if (realProcessed != curSize && realProcessed == 0) return E_FAIL; if (_curPos == pureSize) { RINOK(Flush()); } } return S_OK; }
STDMETHODIMP CHandler::UpdateItems(ISequentialOutStream *outStream, UInt32 numItems, IArchiveUpdateCallback *updateCallback) { if (numItems != 1) return E_INVALIDARG; UInt64 size; Int32 newData; Int32 newProperties; UInt32 indexInArchive; UInt32 itemIndex = 0; if (!updateCallback) return E_FAIL; RINOK(updateCallback->GetUpdateItemInfo(0, &newData, &newProperties, &indexInArchive)); CItem newItem = m_Item; newItem.ExtraFlags = 0; newItem.Flags = 0; if (IntToBool(newProperties)) { FILETIME utcTime; UString name; { NCOM::CPropVariant prop; RINOK(updateCallback->GetProperty(itemIndex, kpidMTime, &prop)); if (prop.vt != VT_FILETIME) return E_INVALIDARG; utcTime = prop.filetime; } { NCOM::CPropVariant prop; RINOK(updateCallback->GetProperty(itemIndex, kpidPath, &prop)); if (prop.vt == VT_EMPTY) name.Empty(); else if (prop.vt != VT_BSTR) return E_INVALIDARG; else name = prop.bstrVal; } { NCOM::CPropVariant prop; RINOK(updateCallback->GetProperty(itemIndex, kpidIsDir, &prop)); if (prop.vt == VT_BOOL) { if (prop.boolVal != VARIANT_FALSE) return E_INVALIDARG; } else if (prop.vt != VT_EMPTY) return E_INVALIDARG; } if(!FileTimeToUnixTime(utcTime, newItem.Time)) return E_INVALIDARG; newItem.Name = UnicodeStringToMultiByte(name, CP_ACP); int dirDelimiterPos = newItem.Name.ReverseFind(CHAR_PATH_SEPARATOR); if (dirDelimiterPos >= 0) newItem.Name = newItem.Name.Mid(dirDelimiterPos + 1); newItem.SetNameIsPresentFlag(!newItem.Name.IsEmpty()); } if (IntToBool(newData)) { { NCOM::CPropVariant prop; RINOK(updateCallback->GetProperty(itemIndex, kpidSize, &prop)); if (prop.vt != VT_UI8) return E_INVALIDARG; size = prop.uhVal.QuadPart; } newItem.UnPackSize32 = (UInt32)size; UInt32 level = m_Level; if (level == 0xFFFFFFFF) level = 5; if (m_Method.NumPasses == 0xFFFFFFFF) m_Method.NumPasses = (level >= 9 ? kNumPassesX9 : (level >= 7 ? kNumPassesX7 : kNumPassesX1)); if (m_Method.NumFastBytes == 0xFFFFFFFF) m_Method.NumFastBytes = (level >= 9 ? kNumFastBytesX9 : (level >= 7 ? kNumFastBytesX7 : kNumFastBytesX1)); if (m_Method.Algo == 0xFFFFFFFF) m_Method.Algo = (level >= 5 ? kAlgoX5 : kAlgoX1); return UpdateArchive( EXTERNAL_CODECS_VARS m_Stream, size, outStream, newItem, m_Method, itemIndex, updateCallback); } if (indexInArchive != 0) return E_INVALIDARG; if (IntToBool(newProperties)) { COutArchive outArchive; outArchive.Create(outStream); outArchive.WriteHeader(newItem); RINOK(m_Stream->Seek(m_StreamStartPosition + m_DataOffset, STREAM_SEEK_SET, NULL)); } else { RINOK(m_Stream->Seek(m_StreamStartPosition, STREAM_SEEK_SET, NULL)); } return CopyStreams(m_Stream, outStream); }
static HRESULT Update2( DECL_EXTERNAL_CODECS_LOC_VARS IInStream *inStream, const CArchiveDatabaseEx *database, const CObjectVector<CUpdateItem> &updateItems, ISequentialOutStream *seqOutStream, IArchiveUpdateCallback *updateCallback, const CUpdateOptions &options) { UInt64 numSolidFiles = options.NumSolidFiles; if (numSolidFiles == 0) numSolidFiles = 1; /* CMyComPtr<IOutStream> outStream; RINOK(seqOutStream->QueryInterface(IID_IOutStream, (void **)&outStream)); if (!outStream) return E_NOTIMPL; */ UInt64 startBlockSize = database != 0 ? database->ArchiveInfo.StartPosition: 0; if (startBlockSize > 0 && !options.RemoveSfxBlock) { RINOK(WriteRange(inStream, seqOutStream, 0, startBlockSize, NULL)); } CRecordVector<int> fileIndexToUpdateIndexMap; if (database != 0) { fileIndexToUpdateIndexMap.Reserve(database->Files.Size()); for (int i = 0; i < database->Files.Size(); i++) fileIndexToUpdateIndexMap.Add(-1); } int i; for(i = 0; i < updateItems.Size(); i++) { int index = updateItems[i].IndexInArchive; if (index != -1) fileIndexToUpdateIndexMap[index] = i; } CRecordVector<int> folderRefs; if (database != 0) { for(i = 0; i < database->Folders.Size(); i++) { CNum indexInFolder = 0; CNum numCopyItems = 0; CNum numUnPackStreams = database->NumUnPackStreamsVector[i]; for (CNum fileIndex = database->FolderStartFileIndex[i]; indexInFolder < numUnPackStreams; fileIndex++) { if (database->Files[fileIndex].HasStream) { indexInFolder++; int updateIndex = fileIndexToUpdateIndexMap[fileIndex]; if (updateIndex >= 0) if (!updateItems[updateIndex].NewData) numCopyItems++; } } if (numCopyItems != numUnPackStreams && numCopyItems != 0) return E_NOTIMPL; // It needs repacking !!! if (numCopyItems > 0) folderRefs.Add(i); } folderRefs.Sort(CompareFolderRefs, (void *)database); } CArchiveDatabase newDatabase; //////////////////////////// COutArchive archive; RINOK(archive.Create(seqOutStream, false)); RINOK(archive.SkeepPrefixArchiveHeader()); UInt64 complexity = 0; for(i = 0; i < folderRefs.Size(); i++) complexity += database->GetFolderFullPackSize(folderRefs[i]); UInt64 inSizeForReduce = 0; for(i = 0; i < updateItems.Size(); i++) { const CUpdateItem &updateItem = updateItems[i]; if (updateItem.NewData) { complexity += updateItem.Size; if (numSolidFiles == 1) { if (updateItem.Size > inSizeForReduce) inSizeForReduce = updateItem.Size; } else inSizeForReduce += updateItem.Size; } } RINOK(updateCallback->SetTotal(complexity)); complexity = 0; RINOK(updateCallback->SetCompleted(&complexity)); CLocalProgress *lps = new CLocalProgress; CMyComPtr<ICompressProgressInfo> progress = lps; lps->Init(updateCallback, true); ///////////////////////////////////////// // Write Copy Items for(i = 0; i < folderRefs.Size(); i++) { int folderIndex = folderRefs[i]; lps->ProgressOffset = complexity; UInt64 packSize = database->GetFolderFullPackSize(folderIndex); RINOK(WriteRange(inStream, archive.SeqStream, database->GetFolderStreamPos(folderIndex, 0), packSize, progress)); complexity += packSize; const CFolder &folder = database->Folders[folderIndex]; CNum startIndex = database->FolderStartPackStreamIndex[folderIndex]; for (int j = 0; j < folder.PackStreams.Size(); j++) { newDatabase.PackSizes.Add(database->PackSizes[startIndex + j]); // newDatabase.PackCRCsDefined.Add(database.PackCRCsDefined[startIndex + j]); // newDatabase.PackCRCs.Add(database.PackCRCs[startIndex + j]); } newDatabase.Folders.Add(folder); CNum numUnPackStreams = database->NumUnPackStreamsVector[folderIndex]; newDatabase.NumUnPackStreamsVector.Add(numUnPackStreams); CNum indexInFolder = 0; for (CNum fi = database->FolderStartFileIndex[folderIndex]; indexInFolder < numUnPackStreams; fi++) { CFileItem file = database->Files[fi]; if (file.HasStream) { indexInFolder++; int updateIndex = fileIndexToUpdateIndexMap[fi]; if (updateIndex >= 0) { const CUpdateItem &updateItem = updateItems[updateIndex]; if (updateItem.NewProperties) { CFileItem file2; FromUpdateItemToFileItem(updateItem, file2); file2.UnPackSize = file.UnPackSize; file2.FileCRC = file.FileCRC; file2.IsFileCRCDefined = file.IsFileCRCDefined; file2.HasStream = file.HasStream; file = file2; } } newDatabase.Files.Add(file); } } } ///////////////////////////////////////// // Compress New Files CObjectVector<CSolidGroup> groups; SplitFilesToGroups(*options.Method, options.UseFilters, options.MaxFilter, updateItems, groups); const UInt32 kMinReduceSize = (1 << 16); if (inSizeForReduce < kMinReduceSize) inSizeForReduce = kMinReduceSize; for (int groupIndex = 0; groupIndex < groups.Size(); groupIndex++) { const CSolidGroup &group = groups[groupIndex]; int numFiles = group.Indices.Size(); if (numFiles == 0) continue; CRecordVector<CRefItem> refItems; refItems.Reserve(numFiles); bool sortByType = (numSolidFiles > 1); for (i = 0; i < numFiles; i++) refItems.Add(CRefItem(group.Indices[i], updateItems[group.Indices[i]], sortByType)); refItems.Sort(CompareUpdateItems, (void *)&sortByType); CRecordVector<UInt32> indices; indices.Reserve(numFiles); for (i = 0; i < numFiles; i++) { UInt32 index = refItems[i].Index; indices.Add(index); /* const CUpdateItem &updateItem = updateItems[index]; CFileItem file; if (updateItem.NewProperties) FromUpdateItemToFileItem(updateItem, file); else file = database.Files[updateItem.IndexInArchive]; if (file.IsAnti || file.IsDirectory) return E_FAIL; newDatabase.Files.Add(file); */ } CEncoder encoder(group.Method); for (i = 0; i < numFiles;) { UInt64 totalSize = 0; int numSubFiles; UString prevExtension; for (numSubFiles = 0; i + numSubFiles < numFiles && numSubFiles < numSolidFiles; numSubFiles++) { const CUpdateItem &updateItem = updateItems[indices[i + numSubFiles]]; totalSize += updateItem.Size; if (totalSize > options.NumSolidBytes) break; if (options.SolidExtension) { UString ext = updateItem.GetExtension(); if (numSubFiles == 0) prevExtension = ext; else if (ext.CompareNoCase(prevExtension) != 0) break; } } if (numSubFiles < 1) numSubFiles = 1; CFolderInStream *inStreamSpec = new CFolderInStream; CMyComPtr<ISequentialInStream> solidInStream(inStreamSpec); inStreamSpec->Init(updateCallback, &indices[i], numSubFiles); CFolder folderItem; int startPackIndex = newDatabase.PackSizes.Size(); RINOK(encoder.Encode( EXTERNAL_CODECS_LOC_VARS solidInStream, NULL, &inSizeForReduce, folderItem, archive.SeqStream, newDatabase.PackSizes, progress)); for (; startPackIndex < newDatabase.PackSizes.Size(); startPackIndex++) lps->OutSize += newDatabase.PackSizes[startPackIndex]; lps->InSize += folderItem.GetUnPackSize(); // for() // newDatabase.PackCRCsDefined.Add(false); // newDatabase.PackCRCs.Add(0); newDatabase.Folders.Add(folderItem); CNum numUnPackStreams = 0; for (int subIndex = 0; subIndex < numSubFiles; subIndex++) { const CUpdateItem &updateItem = updateItems[indices[i + subIndex]]; CFileItem file; if (updateItem.NewProperties) FromUpdateItemToFileItem(updateItem, file); else file = database->Files[updateItem.IndexInArchive]; if (file.IsAnti || file.IsDirectory) return E_FAIL; /* CFileItem &file = newDatabase.Files[ startFileIndexInDatabase + i + subIndex]; */ if (!inStreamSpec->Processed[subIndex]) { continue; // file.Name += L".locked"; } file.FileCRC = inStreamSpec->CRCs[subIndex]; file.UnPackSize = inStreamSpec->Sizes[subIndex]; if (file.UnPackSize != 0) { file.IsFileCRCDefined = true; file.HasStream = true; numUnPackStreams++; } else { file.IsFileCRCDefined = false; file.HasStream = false; } newDatabase.Files.Add(file); } // numUnPackStreams = 0 is very bad case for locked files // v3.13 doesn't understand it. newDatabase.NumUnPackStreamsVector.Add(numUnPackStreams); i += numSubFiles; } } { ///////////////////////////////////////// // Write Empty Files & Folders CRecordVector<int> emptyRefs; for(i = 0; i < updateItems.Size(); i++) { const CUpdateItem &updateItem = updateItems[i]; if (updateItem.NewData) { if (updateItem.HasStream()) continue; } else if (updateItem.IndexInArchive != -1) if (database->Files[updateItem.IndexInArchive].HasStream) continue; emptyRefs.Add(i); } emptyRefs.Sort(CompareEmptyItems, (void *)&updateItems); for(i = 0; i < emptyRefs.Size(); i++) { const CUpdateItem &updateItem = updateItems[emptyRefs[i]]; CFileItem file; if (updateItem.NewProperties) FromUpdateItemToFileItem(updateItem, file); else file = database->Files[updateItem.IndexInArchive]; newDatabase.Files.Add(file); } } /* if (newDatabase.Files.Size() != updateItems.Size()) return E_FAIL; */ return archive.WriteDatabase(EXTERNAL_CODECS_LOC_VARS newDatabase, options.HeaderMethod, options.HeaderOptions); }
HRESULT UpdateVolume( IInStream *inStream, const CArchiveDatabaseEx *database, CObjectVector<CUpdateItem> &updateItems, ISequentialOutStream *seqOutStream, IArchiveUpdateCallback *updateCallback, const CUpdateOptions &options) { if (updateItems.Size() != 1) return E_NOTIMPL; CMyComPtr<IArchiveUpdateCallback2> volumeCallback; RINOK(updateCallback->QueryInterface(IID_IArchiveUpdateCallback2, (void **)&volumeCallback)); if (!volumeCallback) return E_NOTIMPL; CMyComPtr<ISequentialInStream> fileStream; HRESULT result = updateCallback->GetStream(0, &fileStream); if (result != S_OK && result != S_FALSE) return result; if (result == S_FALSE) return E_FAIL; CFileItem file; const CUpdateItem &updateItem = updateItems[0]; if (updateItem.NewProperties) FromUpdateItemToFileItem(updateItem, file); else file = database->Files[updateItem.IndexInArchive]; if (file.IsAnti || file.IsDirectory) return E_FAIL; UInt64 complexity = 0; file.IsStartPosDefined = true; file.StartPos = 0; for (UInt64 volumeIndex = 0; true; volumeIndex++) { UInt64 volSize; RINOK(volumeCallback->GetVolumeSize(volumeIndex, &volSize)); UInt64 pureSize = COutArchive::GetVolPureSize(volSize, file.Name.Length(), true); CMyComPtr<ISequentialOutStream> volumeStream; RINOK(volumeCallback->GetVolumeStream(volumeIndex, &volumeStream)); COutArchive archive; RINOK(archive.Create(volumeStream, true)); RINOK(archive.SkeepPrefixArchiveHeader()); CSequentialInStreamWithCRC *inCrcStreamSpec = new CSequentialInStreamWithCRC; CMyComPtr<ISequentialInStream> inCrcStream = inCrcStreamSpec; inCrcStreamSpec->Init(fileStream); RINOK(WriteRange(inCrcStream, volumeStream, pureSize, updateCallback, complexity)); file.UnPackSize = inCrcStreamSpec->GetSize(); if (file.UnPackSize == 0) break; file.FileCRC = inCrcStreamSpec->GetCRC(); RINOK(WriteVolumeHeader(archive, file, options)); file.StartPos += file.UnPackSize; if (file.UnPackSize < pureSize) break; } return S_OK; }
HRESULT UpdateArchive(IInStream *inStream, ISequentialOutStream *outStream, const CObjectVector<NArchive::NTar::CItemEx> &inputItems, const CObjectVector<CUpdateItem> &updateItems, UINT codePage, IArchiveUpdateCallback *updateCallback) { COutArchive outArchive; outArchive.Create(outStream); outArchive.Pos = 0; CMyComPtr<IOutStream> outSeekStream; outStream->QueryInterface(IID_IOutStream, (void **)&outSeekStream); UInt64 complexity = 0; unsigned i; for (i = 0; i < updateItems.Size(); i++) { const CUpdateItem &ui = updateItems[i]; if (ui.NewData) complexity += ui.Size; else complexity += inputItems[ui.IndexInArchive].GetFullSize(); } RINOK(updateCallback->SetTotal(complexity)); NCompress::CCopyCoder *copyCoderSpec = new NCompress::CCopyCoder; CMyComPtr<ICompressCoder> copyCoder = copyCoderSpec; CLocalProgress *lps = new CLocalProgress; CMyComPtr<ICompressProgressInfo> progress = lps; lps->Init(updateCallback, true); CLimitedSequentialInStream *streamSpec = new CLimitedSequentialInStream; CMyComPtr<CLimitedSequentialInStream> inStreamLimited(streamSpec); streamSpec->SetStream(inStream); complexity = 0; for (i = 0; i < updateItems.Size(); i++) { lps->InSize = lps->OutSize = complexity; RINOK(lps->SetCur()); const CUpdateItem &ui = updateItems[i]; CItem item; if (ui.NewProps) { item.Mode = ui.Mode; item.Name = ui.Name; item.User = ui.User; item.Group = ui.Group; if (ui.IsDir) { item.LinkFlag = NFileHeader::NLinkFlag::kDirectory; item.PackSize = 0; } else { item.LinkFlag = NFileHeader::NLinkFlag::kNormal; item.PackSize = ui.Size; } item.MTime = ui.MTime; item.DeviceMajorDefined = false; item.DeviceMinorDefined = false; item.UID = 0; item.GID = 0; memcpy(item.Magic, NFileHeader::NMagic::kUsTar_00, 8); } else item = inputItems[ui.IndexInArchive]; AString symLink; if (ui.NewData || ui.NewProps) { RINOK(GetPropString(updateCallback, ui.IndexInClient, kpidSymLink, symLink, codePage, true)); if (!symLink.IsEmpty()) { item.LinkFlag = NFileHeader::NLinkFlag::kSymLink; item.LinkName = symLink; } } if (ui.NewData) { item.SparseBlocks.Clear(); item.PackSize = ui.Size; item.Size = ui.Size; if (ui.Size == (UInt64)(Int64)-1) return E_INVALIDARG; CMyComPtr<ISequentialInStream> fileInStream; bool needWrite = true; if (!symLink.IsEmpty()) { item.PackSize = 0; item.Size = 0; } else { HRESULT res = updateCallback->GetStream(ui.IndexInClient, &fileInStream); if (res == S_FALSE) needWrite = false; else { RINOK(res); if (fileInStream) { CMyComPtr<IStreamGetProps> getProps; fileInStream->QueryInterface(IID_IStreamGetProps, (void **)&getProps); if (getProps) { FILETIME mTime; UInt64 size2; if (getProps->GetProps(&size2, NULL, NULL, &mTime, NULL) == S_OK) { item.PackSize = size2; item.MTime = NWindows::NTime::FileTimeToUnixTime64(mTime);; } } } { AString hardLink; RINOK(GetPropString(updateCallback, ui.IndexInClient, kpidHardLink, hardLink, codePage, true)); if (!hardLink.IsEmpty()) { item.LinkFlag = NFileHeader::NLinkFlag::kHardLink; item.LinkName = hardLink; item.PackSize = 0; item.Size = 0; fileInStream.Release(); } } } } if (needWrite) { UInt64 fileHeaderStartPos = outArchive.Pos; RINOK(outArchive.WriteHeader(item)); if (fileInStream) { RINOK(copyCoder->Code(fileInStream, outStream, NULL, NULL, progress)); outArchive.Pos += copyCoderSpec->TotalSize; if (copyCoderSpec->TotalSize != item.PackSize) { if (!outSeekStream) return E_FAIL; UInt64 backOffset = outArchive.Pos - fileHeaderStartPos; RINOK(outSeekStream->Seek(-(Int64)backOffset, STREAM_SEEK_CUR, NULL)); outArchive.Pos = fileHeaderStartPos; item.PackSize = copyCoderSpec->TotalSize; RINOK(outArchive.WriteHeader(item)); RINOK(outSeekStream->Seek(item.PackSize, STREAM_SEEK_CUR, NULL)); outArchive.Pos += item.PackSize; } RINOK(outArchive.FillDataResidual(item.PackSize)); } } complexity += item.PackSize; RINOK(updateCallback->SetOperationResult(NArchive::NUpdate::NOperationResult::kOK)); } else { const CItemEx &existItem = inputItems[ui.IndexInArchive]; UInt64 size; if (ui.NewProps) { // memcpy(item.Magic, NFileHeader::NMagic::kEmpty, 8); if (!symLink.IsEmpty()) { item.PackSize = 0; item.Size = 0; } else { if (ui.IsDir == existItem.IsDir()) item.LinkFlag = existItem.LinkFlag; item.SparseBlocks = existItem.SparseBlocks; item.Size = existItem.Size; item.PackSize = existItem.PackSize; } item.DeviceMajorDefined = existItem.DeviceMajorDefined; item.DeviceMinorDefined = existItem.DeviceMinorDefined; item.DeviceMajor = existItem.DeviceMajor; item.DeviceMinor = existItem.DeviceMinor; item.UID = existItem.UID; item.GID = existItem.GID; RINOK(outArchive.WriteHeader(item)); RINOK(inStream->Seek(existItem.GetDataPosition(), STREAM_SEEK_SET, NULL)); size = existItem.PackSize; } else { RINOK(inStream->Seek(existItem.HeaderPos, STREAM_SEEK_SET, NULL)); size = existItem.GetFullSize(); } streamSpec->Init(size); RINOK(copyCoder->Code(inStreamLimited, outStream, NULL, NULL, progress)); if (copyCoderSpec->TotalSize != size) return E_FAIL; outArchive.Pos += size; RINOK(outArchive.FillDataResidual(existItem.PackSize)); complexity += size; } } lps->InSize = lps->OutSize = complexity; RINOK(lps->SetCur()); return outArchive.WriteFinishHeader(); }
// Create a new 7z archive for each folder contained in the archive to be // exploded. HRESULT ExplodeArchives(CCodecs *codecs, const CIntVector &formatIndices, bool stdInMode, UStringVector &arcPaths, UStringVector &arcPathsFull, UString& outputPath, UInt64 maxDepth, UInt64 &numErrors) { int numArcs = arcPaths.Size(); for (int i = 0; i < numArcs; i++) { UString archivePath = arcPaths[i]; /*UString outputPath = arcPaths[i]; outputPath.Replace(L'\\', L'/'); // linux and windows consistent const UString archiveName = StripFile(outputPath); outputPath.Empty();*/ archivePath.Replace(L'\\', L'/'); // linux, windows and archive consistent outputPath.Replace(L'\\', L'/'); FixPathFormat(outputPath); const UString archiveName = GetFileFromPath(archivePath); g_StdOut << "Outputting into : " << outputPath << endl; UInt64 arcPackSize = 0; if (!stdInMode) { NFile::NFind::CFileInfoW fi; if (!fi.Find(archivePath) || fi.IsDir()) { SHOW_ERROR("is not a file."); continue; } arcPackSize = fi.Size; } g_StdOut << endl << "Exploding : " << archivePath << endl << endl; CArchiveLink archiveLink; COpenCallbackConsole openCallback; openCallback.OutStream = &g_StdOut; #ifndef _NO_CRYPTO openCallback.PasswordIsDefined = false; #endif HRESULT result = archiveLink.Open2(codecs, formatIndices, stdInMode, NULL, archivePath, &openCallback); if (result != S_OK) { if (result == E_ABORT) return result; g_StdOut << endl << "Error: " << archivePath << ": "; if (result == S_FALSE) g_StdOut << "Can not open file as archive"; else if (result == E_OUTOFMEMORY) g_StdOut << "Can't allocate required memory"; else g_StdOut << NError::MyFormatMessage(result); g_StdOut << endl; numErrors++; continue; } // remove other files names if multi-volume if (!stdInMode) { for (int v = 0; v < archiveLink.VolumePaths.Size(); v++) { int index = arcPathsFull.FindInSorted(archiveLink.VolumePaths[v]); if (index >= 0 && index > i) { arcPaths.Delete(index); arcPathsFull.Delete(index); numArcs = arcPaths.Size(); } } } // don't support multi volume because i have to reopen the stream if (archiveLink.VolumePaths.Size() != 1) { SHOW_ERROR("Exploding multi-volume archives isn't supported."); continue; } bool szArchive = true; for (int x = 0; x < archiveLink.Arcs.Size(); x++) { const UString szName = L"7z"; const CArc &arc = archiveLink.Arcs[x]; if (codecs->Formats[arc.FormatIndex].Name != szName) { szArchive = false; break; } } if (!szArchive) { SHOW_ERROR("Only 7z archives can be exploded."); continue; } // Only 7z is supported, and it's been checked using namespace NArchive::N7z; IInArchive* inArc = archiveLink.GetArchive(); CHandler* szHandler = (CHandler*)inArc; // not a fan of having to reopen the file.. CInFileStream* _inStream = new CInFileStream; CMyComPtr<CInFileStream> inStream(_inStream); if (!inStream->Open(archivePath)) { SHOW_ERROR("Cannot be opened for reading."); continue; } // Explode the archive into each folder CObjectVector<szExplodeData> exploded; szHandler->Explode(exploded, maxDepth); if (exploded.Size() == 0) { SHOW_ERROR("Empty archive!"); continue; } // should make a struct that gets passed to Explode.. // something like /* * struct a { * CArchiveDatabase newDatabase; * CRecordVector<UInt64> folderSizes, folderPositions * }; * CObjectVector<a> exploded; * szHandler->Explode(exploded); */ // Save each folder as a new 7z archive for (int x = 0; x < exploded.Size(); x++) { UString relativeFilePath; // relative to archive UString fileName; CArchiveDatabase& newDatabase = exploded[x].newDatabase; szExplodeData& explodeData = exploded[x]; // each exploded archive will only have a single folder. // no longer true. need to make sure the selected file // is the highest in the dir tree. could make 7zhandler // give us this info i guess. if (newDatabase.Files.Size() > 0) { relativeFilePath = newDatabase.Files[0].Name; if (!newDatabase.Files[0].IsDir) { fileName = GetFileFromPath(relativeFilePath); StripFile(relativeFilePath); } } //g_StdOut << "Relative path " << relativeFilePath << endl; //g_StdOut << "Archive " << archivePath << endl; UString folderOutPath = outputPath + relativeFilePath; if (relativeFilePath.Length() != 0) { bool b = NWindows::NFile::NDirectory::CreateComplexDirectory(folderOutPath); if (!b) g_StdOut << "Couldn't create directory " << folderOutPath << endl; //relativeFilePath.Insert(folderOutPath.Length(), L'/'); } std::wstringstream sstream; sstream << folderOutPath.GetBuffer(); if (newDatabase.Files.Size() == 1) // can use file names sstream << fileName.GetBuffer(); else // use folder as name sstream << archiveName.GetBuffer() << L"_folder_" << x; sstream << ".7z"; g_StdOut << "Saving as '" << sstream.str().c_str() << "'" << endl; COutFileStream* _outstream = new COutFileStream; CMyComPtr<COutFileStream> outstream(_outstream); outstream->Create(sstream.str().c_str(), true); COutArchive out; out.Create(outstream, false); out.SkipPrefixArchiveHeader(); for (int folderIndex = 0; folderIndex < newDatabase.Folders.Size(); folderIndex++) { UInt64 folderLen = explodeData.folderSizes[folderIndex]; UInt64 folderStartPackPos = explodeData.folderPositions[folderIndex]; // write actual data RINOK(WriteRange(inStream, out.SeqStream, folderStartPackPos, folderLen, NULL)); } CCompressionMethodMode method, headerMethod; szHandler->SetCompressionMethod(method, headerMethod); CHeaderOptions headerOptions; headerOptions.CompressMainHeader = true; out.WriteDatabase(newDatabase, &headerMethod, headerOptions); out.Close(); /*#ifdef ENV_UNIX // Create a symlink for each file in the folder. // This makes it seem as though each file is individually accessible. for (int fileIndex = 0; fileIndex < newDatabase.Files.Size(); fileIndex++) { AString oldfile, newfile; UString woldfile = sstream.str().c_str(); UString wnewfile = outputPath + relativeFilePath + newDatabase.Files[fileIndex].Name + L".7z"; ConvertUnicodeToUTF8(woldfile, oldfile); ConvertUnicodeToUTF8(wnewfile, newfile); const char* link_to = oldfile.GetBuffer(); const char* link_name = newfile.GetBuffer(); unlink(link_name);// should ask user //g_StdOut << "Creating symlink to '" << link_to << "' called '" << link_name << "'" << endl; int status = symlink(link_to, link_name); if (status == -1) { AString error = "Couldn't create symlink for '"; error += newfile; error += "'"; SHOW_ERROR(error); g_StdOut << "Error: " << errno << endl; } } #endif*/ } archiveLink.Close(); // not needed but oh well } return S_OK; }
HRESULT UpdateArchive( DECL_EXTERNAL_CODECS_LOC_VARS IInStream * /* inStream */, UInt64 unpackSize, ISequentialOutStream *outStream, const CItem &newItem, const CCompressionMethodMode &compressionMethod, int indexInClient, IArchiveUpdateCallback *updateCallback) { UInt64 complexity = unpackSize; RINOK(updateCallback->SetTotal(complexity)); CMyComPtr<ICompressCoder> deflateEncoder; complexity = 0; RINOK(updateCallback->SetCompleted(&complexity)); CMyComPtr<ISequentialInStream> fileInStream; RINOK(updateCallback->GetStream(indexInClient, &fileInStream)); CSequentialInStreamWithCRC *inStreamSpec = new CSequentialInStreamWithCRC; CMyComPtr<ISequentialInStream> crcStream(inStreamSpec); inStreamSpec->SetStream(fileInStream); inStreamSpec->Init(); CLocalProgress *lps = new CLocalProgress; CMyComPtr<ICompressProgressInfo> progress = lps; lps->Init(updateCallback, true); COutArchive outArchive; outArchive.Create(outStream); CItem item = newItem; item.CompressionMethod = NFileHeader::NCompressionMethod::kDeflate; item.ExtraFlags = 0; item.HostOS = kHostOS; RINOK(outArchive.WriteHeader(item)); { RINOK(CreateCoder( EXTERNAL_CODECS_LOC_VARS kMethodId_Deflate, deflateEncoder, true)); if (!deflateEncoder) return E_NOTIMPL; NWindows::NCOM::CPropVariant properties[] = { compressionMethod.Algo, compressionMethod.NumPasses, compressionMethod.NumFastBytes, compressionMethod.NumMatchFinderCycles }; PROPID propIDs[] = { NCoderPropID::kAlgorithm, NCoderPropID::kNumPasses, NCoderPropID::kNumFastBytes, NCoderPropID::kMatchFinderCycles }; int numProps = sizeof(propIDs) / sizeof(propIDs[0]); if (!compressionMethod.NumMatchFinderCyclesDefined) numProps--; CMyComPtr<ICompressSetCoderProperties> setCoderProperties; RINOK(deflateEncoder.QueryInterface(IID_ICompressSetCoderProperties, &setCoderProperties)); RINOK(setCoderProperties->SetCoderProperties(propIDs, properties, numProps)); } RINOK(deflateEncoder->Code(crcStream, outStream, NULL, NULL, progress)); item.FileCRC = inStreamSpec->GetCRC(); item.UnPackSize32 = (UInt32)inStreamSpec->GetSize(); RINOK(outArchive.WritePostHeader(item)); return updateCallback->SetOperationResult(NArchive::NUpdate::NOperationResult::kOK); }
HRESULT UpdateArchive(IInStream *inStream, ISequentialOutStream *outStream, const CObjectVector<NArchive::NTar::CItemEx> &inputItems, const CObjectVector<CUpdateItemInfo> &updateItems, IArchiveUpdateCallback *updateCallback) { COutArchive outArchive; outArchive.Create(outStream); UInt64 complexity = 0; int i; for(i = 0; i < updateItems.Size(); i++) { const CUpdateItemInfo &updateItem = updateItems[i]; if (updateItem.NewData) complexity += updateItem.Size; else complexity += inputItems[updateItem.IndexInArchive].GetFullSize(); complexity += kOneItemComplexity; } RINOK(updateCallback->SetTotal(complexity)); complexity = 0; for(i = 0; i < updateItems.Size(); i++) { RINOK(updateCallback->SetCompleted(&complexity)); CLocalProgress *localProgressSpec = new CLocalProgress; CMyComPtr<ICompressProgressInfo> localProgress = localProgressSpec; localProgressSpec->Init(updateCallback, true); CLocalCompressProgressInfo *localCompressProgressSpec = new CLocalCompressProgressInfo; CMyComPtr<ICompressProgressInfo> compressProgress = localCompressProgressSpec; localCompressProgressSpec->Init(localProgress, &complexity, NULL); const CUpdateItemInfo &updateItem = updateItems[i]; CItem item; if (updateItem.NewProperties) { item.Mode = 0777; item.Name = (updateItem.Name); if (updateItem.IsDirectory) { item.LinkFlag = NFileHeader::NLinkFlag::kDirectory; item.Size = 0; } else { item.LinkFlag = NFileHeader::NLinkFlag::kNormal; item.Size = updateItem.Size; } item.ModificationTime = updateItem.Time; item.DeviceMajorDefined = false; item.DeviceMinorDefined = false; item.UID = 0; item.GID = 0; memmove(item.Magic, NFileHeader::NMagic::kEmpty, 8); } else { const CItemEx &existItemInfo = inputItems[updateItem.IndexInArchive]; item = existItemInfo; } if (updateItem.NewData) { item.Size = updateItem.Size; if (item.Size == UInt64(Int64(-1))) return E_INVALIDARG; } else { const CItemEx &existItemInfo = inputItems[updateItem.IndexInArchive]; item.Size = existItemInfo.Size; } if (updateItem.NewData) { CMyComPtr<ISequentialInStream> fileInStream; HRESULT res = updateCallback->GetStream(updateItem.IndexInClient, &fileInStream); if (res != S_FALSE) { RINOK(res); RINOK(outArchive.WriteHeader(item)); if (!updateItem.IsDirectory) { UInt64 totalSize; RINOK(CopyBlock(fileInStream, outStream, compressProgress, &totalSize)); if (totalSize != item.Size) return E_FAIL; RINOK(outArchive.FillDataResidual(item.Size)); } } complexity += updateItem.Size; RINOK(updateCallback->SetOperationResult( NArchive::NUpdate::NOperationResult::kOK)); } else { CLimitedSequentialInStream *streamSpec = new CLimitedSequentialInStream; CMyComPtr<CLimitedSequentialInStream> inStreamLimited(streamSpec); const CItemEx &existItemInfo = inputItems[updateItem.IndexInArchive]; if (updateItem.NewProperties) { RINOK(outArchive.WriteHeader(item)); RINOK(inStream->Seek(existItemInfo.GetDataPosition(), STREAM_SEEK_SET, NULL)); streamSpec->Init(inStream, existItemInfo.Size); } else { RINOK(inStream->Seek(existItemInfo.HeaderPosition, STREAM_SEEK_SET, NULL)); streamSpec->Init(inStream, existItemInfo.GetFullSize()); } RINOK(CopyBlock(inStreamLimited, outStream, compressProgress)); RINOK(outArchive.FillDataResidual(existItemInfo.Size)); complexity += existItemInfo.GetFullSize(); } complexity += kOneItemComplexity; } return outArchive.WriteFinishHeader(); }
HRESULT UpdateArchive(IInStream *inStream, ISequentialOutStream *outStream, const CObjectVector<NArchive::NTar::CItemEx> &inputItems, const CObjectVector<CUpdateItem> &updateItems, IArchiveUpdateCallback *updateCallback) { COutArchive outArchive; outArchive.Create(outStream); UInt64 complexity = 0; int i; for(i = 0; i < updateItems.Size(); i++) { const CUpdateItem &ui = updateItems[i]; if (ui.NewData) complexity += ui.Size; else complexity += inputItems[ui.IndexInArchive].GetFullSize(); } RINOK(updateCallback->SetTotal(complexity)); NCompress::CCopyCoder *copyCoderSpec = new NCompress::CCopyCoder; CMyComPtr<ICompressCoder> copyCoder = copyCoderSpec; CLocalProgress *lps = new CLocalProgress; CMyComPtr<ICompressProgressInfo> progress = lps; lps->Init(updateCallback, true); CLimitedSequentialInStream *streamSpec = new CLimitedSequentialInStream; CMyComPtr<CLimitedSequentialInStream> inStreamLimited(streamSpec); streamSpec->SetStream(inStream); complexity = 0; for (i = 0; i < updateItems.Size(); i++) { lps->InSize = lps->OutSize = complexity; RINOK(lps->SetCur()); const CUpdateItem &ui = updateItems[i]; CItem item; if (ui.NewProps) { item.Mode = ui.Mode; item.Name = ui.Name; item.User = ui.User; item.Group = ui.Group; if (ui.IsDir) { item.LinkFlag = NFileHeader::NLinkFlag::kDirectory; item.PackSize = 0; } else { item.LinkFlag = NFileHeader::NLinkFlag::kNormal; item.PackSize = ui.Size; } item.MTime = ui.MTime; item.DeviceMajorDefined = false; item.DeviceMinorDefined = false; item.UID = 0; item.GID = 0; memmove(item.Magic, NFileHeader::NMagic::kEmpty, 8); } else item = inputItems[ui.IndexInArchive]; if (ui.NewData) { item.PackSize = ui.Size; if (ui.Size == (UInt64)(Int64)-1) return E_INVALIDARG; } else item.PackSize = inputItems[ui.IndexInArchive].PackSize; if (ui.NewData) { CMyComPtr<ISequentialInStream> fileInStream; HRESULT res = updateCallback->GetStream(ui.IndexInClient, &fileInStream); if (res != S_FALSE) { RINOK(res); RINOK(outArchive.WriteHeader(item)); if (!ui.IsDir) { RINOK(copyCoder->Code(fileInStream, outStream, NULL, NULL, progress)); if (copyCoderSpec->TotalSize != item.PackSize) return E_FAIL; RINOK(outArchive.FillDataResidual(item.PackSize)); } } complexity += ui.Size; RINOK(updateCallback->SetOperationResult(NArchive::NUpdate::NOperationResult::kOK)); } else { const CItemEx &existItem = inputItems[ui.IndexInArchive]; UInt64 size; if (ui.NewProps) { RINOK(outArchive.WriteHeader(item)); RINOK(inStream->Seek(existItem.GetDataPosition(), STREAM_SEEK_SET, NULL)); size = existItem.PackSize; } else { RINOK(inStream->Seek(existItem.HeaderPos, STREAM_SEEK_SET, NULL)); size = existItem.GetFullSize(); } streamSpec->Init(size); RINOK(copyCoder->Code(inStreamLimited, outStream, NULL, NULL, progress)); if (copyCoderSpec->TotalSize != size) return E_FAIL; RINOK(outArchive.FillDataResidual(existItem.PackSize)); complexity += size; } } lps->InSize = lps->OutSize = complexity; RINOK(lps->SetCur()); return outArchive.WriteFinishHeader(); }