7zupdate.cpp
来自「由7-zip提供的压缩、解压缩程序」· C++ 代码 · 共 1,034 行 · 第 1/2 页
CPP
1,034 行
fileIndexToUpdateIndexMap[index] = i; } CRecordVector<int> folderRefs; if (database != 0) { for(i = 0; i < database->Folders.Size(); i++) { CNum indexInFolder = 0; CNum numCopyItems = 0; CNum numUnPackStreams = database->NumUnPackStreamsVector[i]; for (CNum fileIndex = database->FolderStartFileIndex[i]; indexInFolder < numUnPackStreams; fileIndex++) { if (database->Files[fileIndex].HasStream) { indexInFolder++; int updateIndex = fileIndexToUpdateIndexMap[fileIndex]; if (updateIndex >= 0) if (!updateItems[updateIndex].NewData) numCopyItems++; } } if (numCopyItems != numUnPackStreams && numCopyItems != 0) return E_NOTIMPL; // It needs repacking !!! if (numCopyItems > 0) folderRefs.Add(i); } folderRefs.Sort(CompareFolderRefs, (void *)database); } CArchiveDatabase newDatabase; ///////////////////////////////////////// // Write Empty Files & Folders CRecordVector<int> emptyRefs; for(i = 0; i < updateItems.Size(); i++) { const CUpdateItem &updateItem = updateItems[i]; if (updateItem.NewData) { if (updateItem.HasStream()) continue; } else if (updateItem.IndexInArchive != -1) if (database->Files[updateItem.IndexInArchive].HasStream) continue; emptyRefs.Add(i); } emptyRefs.Sort(CompareEmptyItems, (void *)&updateItems); for(i = 0; i < emptyRefs.Size(); i++) { const CUpdateItem &updateItem = updateItems[emptyRefs[i]]; CFileItem file; if (updateItem.NewProperties) FromUpdateItemToFileItem(updateItem, file); else file = database->Files[updateItem.IndexInArchive]; newDatabase.Files.Add(file); } //////////////////////////// COutArchive archive; RINOK(archive.Create(seqOutStream, false)); RINOK(archive.SkeepPrefixArchiveHeader()); UInt64 complexity = 0; for(i = 0; i < folderRefs.Size(); i++) complexity += database->GetFolderFullPackSize(folderRefs[i]); UInt64 inSizeForReduce = 0; for(i = 0; i < updateItems.Size(); i++) { const CUpdateItem &updateItem = updateItems[i]; if (updateItem.NewData) { complexity += updateItem.Size; if (numSolidFiles == 1) { if (updateItem.Size > inSizeForReduce) inSizeForReduce = updateItem.Size; } else inSizeForReduce += updateItem.Size; } } RINOK(updateCallback->SetTotal(complexity)); complexity = 0; RINOK(updateCallback->SetCompleted(&complexity)); ///////////////////////////////////////// // Write Copy Items for(i = 0; i < folderRefs.Size(); i++) { int folderIndex = folderRefs[i]; RINOK(WriteRange(inStream, archive.SeqStream, database->GetFolderStreamPos(folderIndex, 0), database->GetFolderFullPackSize(folderIndex), updateCallback, complexity)); const CFolder &folder = database->Folders[folderIndex]; CNum startIndex = database->FolderStartPackStreamIndex[folderIndex]; for (int j = 0; j < folder.PackStreams.Size(); j++) { newDatabase.PackSizes.Add(database->PackSizes[startIndex + j]); // newDatabase.PackCRCsDefined.Add(database.PackCRCsDefined[startIndex + j]); // newDatabase.PackCRCs.Add(database.PackCRCs[startIndex + j]); } newDatabase.Folders.Add(folder); CNum numUnPackStreams = database->NumUnPackStreamsVector[folderIndex]; newDatabase.NumUnPackStreamsVector.Add(numUnPackStreams); CNum indexInFolder = 0; for (CNum fi = database->FolderStartFileIndex[folderIndex]; indexInFolder < numUnPackStreams; fi++) { CFileItem file = database->Files[fi]; if (file.HasStream) { indexInFolder++; int updateIndex = fileIndexToUpdateIndexMap[fi]; if (updateIndex >= 0) { const CUpdateItem &updateItem = updateItems[updateIndex]; if (updateItem.NewProperties) { CFileItem file2; FromUpdateItemToFileItem(updateItem, file2); file2.UnPackSize = file.UnPackSize; file2.FileCRC = file.FileCRC; file2.IsFileCRCDefined = file.IsFileCRCDefined; file2.HasStream = file.HasStream; file = file2; } } newDatabase.Files.Add(file); } } } ///////////////////////////////////////// // Compress New Files CObjectVector<CSolidGroup> groups; SplitFilesToGroups(*options.Method, options.UseFilters, options.MaxFilter, updateItems, groups); const UInt32 kMinReduceSize = (1 << 16); if (inSizeForReduce < kMinReduceSize) inSizeForReduce = kMinReduceSize; for (int groupIndex = 0; groupIndex < groups.Size(); groupIndex++) { const CSolidGroup &group = groups[groupIndex]; int numFiles = group.Indices.Size(); if (numFiles == 0) continue; CRecordVector<CRefItem> refItems; refItems.Reserve(numFiles); for (i = 0; i < numFiles; i++) refItems.Add(CRefItem(group.Indices[i], updateItems[group.Indices[i]], numSolidFiles > 1)); refItems.Sort(CompareUpdateItems, 0); CRecordVector<UInt32> indices; indices.Reserve(numFiles); for (i = 0; i < numFiles; i++) { UInt32 index = refItems[i].Index; indices.Add(index); /* const CUpdateItem &updateItem = updateItems[index]; CFileItem file; if (updateItem.NewProperties) FromUpdateItemToFileItem(updateItem, file); else file = database.Files[updateItem.IndexInArchive]; if (file.IsAnti || file.IsDirectory) return E_FAIL; newDatabase.Files.Add(file); */ } CEncoder encoder(group.Method); for (i = 0; i < numFiles;) { UInt64 totalSize = 0; int numSubFiles; UString prevExtension; for (numSubFiles = 0; i + numSubFiles < numFiles && numSubFiles < numSolidFiles; numSubFiles++) { const CUpdateItem &updateItem = updateItems[indices[i + numSubFiles]]; totalSize += updateItem.Size; if (totalSize > options.NumSolidBytes) break; if (options.SolidExtension) { UString ext = updateItem.GetExtension(); if (numSubFiles == 0) prevExtension = ext; else if (ext.CompareNoCase(prevExtension) != 0) break; } } if (numSubFiles < 1) numSubFiles = 1; CFolderInStream *inStreamSpec = new CFolderInStream; CMyComPtr<ISequentialInStream> solidInStream(inStreamSpec); inStreamSpec->Init(updateCallback, &indices[i], numSubFiles); CFolder folderItem; CLocalProgress *localProgressSpec = new CLocalProgress; CMyComPtr<ICompressProgressInfo> localProgress = localProgressSpec; localProgressSpec->Init(updateCallback, true); CLocalCompressProgressInfo *localCompressProgressSpec = new CLocalCompressProgressInfo; CMyComPtr<ICompressProgressInfo> compressProgress = localCompressProgressSpec; localCompressProgressSpec->Init(localProgress, &complexity, NULL); RINOK(encoder.Encode(solidInStream, NULL, &inSizeForReduce, folderItem, archive.SeqStream, newDatabase.PackSizes, compressProgress)); // for() // newDatabase.PackCRCsDefined.Add(false); // newDatabase.PackCRCs.Add(0); newDatabase.Folders.Add(folderItem); CNum numUnPackStreams = 0; for (int subIndex = 0; subIndex < numSubFiles; subIndex++) { const CUpdateItem &updateItem = updateItems[indices[i + subIndex]]; CFileItem file; if (updateItem.NewProperties) FromUpdateItemToFileItem(updateItem, file); else file = database->Files[updateItem.IndexInArchive]; if (file.IsAnti || file.IsDirectory) return E_FAIL; /* CFileItem &file = newDatabase.Files[ startFileIndexInDatabase + i + subIndex]; */ if (!inStreamSpec->Processed[subIndex]) { continue; // file.Name += L".locked"; } file.FileCRC = inStreamSpec->CRCs[subIndex]; file.UnPackSize = inStreamSpec->Sizes[subIndex]; if (file.UnPackSize != 0) { file.IsFileCRCDefined = true; file.HasStream = true; numUnPackStreams++; complexity += file.UnPackSize; } else { file.IsFileCRCDefined = false; file.HasStream = false; } newDatabase.Files.Add(file); } // numUnPackStreams = 0 is very bad case for locked files // v3.13 doesn't understand it. newDatabase.NumUnPackStreamsVector.Add(numUnPackStreams); i += numSubFiles; } } /* if (newDatabase.Files.Size() != updateItems.Size()) return E_FAIL; */ return archive.WriteDatabase(newDatabase, options.HeaderMethod, options.UseAdditionalHeaderStreams, options.CompressMainHeader);}static HRESULT WriteVolumeHeader(COutArchive &archive, CFileItem &file, const CUpdateOptions &options){ CAltCoderInfo altCoder; altCoder.MethodID.IDSize = 1; altCoder.MethodID.ID[0] = 0; CCoderInfo coder; coder.NumInStreams = coder.NumOutStreams = 1; coder.AltCoders.Add(altCoder); CFolder folder; folder.Coders.Add(coder); folder.PackStreams.Add(0); CNum numUnPackStreams = 0; if (file.UnPackSize != 0) { file.IsFileCRCDefined = true; file.HasStream = true; numUnPackStreams++; } else { throw 1; file.IsFileCRCDefined = false; file.HasStream = false; } folder.UnPackSizes.Add(file.UnPackSize); CArchiveDatabase newDatabase; newDatabase.Files.Add(file); newDatabase.Folders.Add(folder); newDatabase.NumUnPackStreamsVector.Add(numUnPackStreams); newDatabase.PackSizes.Add(file.UnPackSize); newDatabase.PackCRCsDefined.Add(false); newDatabase.PackCRCs.Add(file.FileCRC); return archive.WriteDatabase(newDatabase, options.HeaderMethod, false, false);}#ifdef _7Z_VOLHRESULT UpdateVolume( IInStream *inStream, const CArchiveDatabaseEx *database, CObjectVector<CUpdateItem> &updateItems, ISequentialOutStream *seqOutStream, IArchiveUpdateCallback *updateCallback, const CUpdateOptions &options){ if (updateItems.Size() != 1) return E_NOTIMPL; CMyComPtr<IArchiveUpdateCallback2> volumeCallback; RINOK(updateCallback->QueryInterface(IID_IArchiveUpdateCallback2, (void **)&volumeCallback)); if (!volumeCallback) return E_NOTIMPL; CMyComPtr<ISequentialInStream> fileStream; HRESULT result = updateCallback->GetStream(0, &fileStream); if (result != S_OK && result != S_FALSE) return result; if (result == S_FALSE) return E_FAIL; CFileItem file; const CUpdateItem &updateItem = updateItems[0]; if (updateItem.NewProperties) FromUpdateItemToFileItem(updateItem, file); else file = database->Files[updateItem.IndexInArchive]; if (file.IsAnti || file.IsDirectory) return E_FAIL; UInt64 complexity = 0; file.IsStartPosDefined = true; file.StartPos = 0; for (UInt64 volumeIndex = 0; true; volumeIndex++) { UInt64 volSize; RINOK(volumeCallback->GetVolumeSize(volumeIndex, &volSize)); UInt64 pureSize = COutArchive::GetVolPureSize(volSize, file.Name.Length(), true); CMyComPtr<ISequentialOutStream> volumeStream; RINOK(volumeCallback->GetVolumeStream(volumeIndex, &volumeStream)); COutArchive archive; RINOK(archive.Create(volumeStream, true)); RINOK(archive.SkeepPrefixArchiveHeader()); CSequentialInStreamWithCRC *inCrcStreamSpec = new CSequentialInStreamWithCRC; CMyComPtr<ISequentialInStream> inCrcStream = inCrcStreamSpec; inCrcStreamSpec->Init(fileStream); RINOK(WriteRange(inCrcStream, volumeStream, pureSize, updateCallback, complexity)); file.UnPackSize = inCrcStreamSpec->GetSize(); if (file.UnPackSize == 0) break; file.FileCRC = inCrcStreamSpec->GetCRC(); RINOK(WriteVolumeHeader(archive, file, options)); file.StartPos += file.UnPackSize; if (file.UnPackSize < pureSize) break; } return S_OK;}class COutVolumeStream: public ISequentialOutStream, public CMyUnknownImp{ int _volIndex; UInt64 _volSize; UInt64 _curPos; CMyComPtr<ISequentialOutStream> _volumeStream; COutArchive _archive; CCRC _crc;public: MY_UNKNOWN_IMP CFileItem _file; CUpdateOptions _options; CMyComPtr<IArchiveUpdateCallback2> VolumeCallback; void Init(IArchiveUpdateCallback2 *volumeCallback, const UString &name) { _file.Name = name; _file.IsStartPosDefined = true; _file.StartPos = 0; VolumeCallback = volumeCallback; _volIndex = 0; _volSize = 0; } HRESULT Flush(); STDMETHOD(Write)(const void *data, UInt32 size, UInt32 *processedSize);};HRESULT COutVolumeStream::Flush(){ if (_volumeStream) { _file.UnPackSize = _curPos; _file.FileCRC = _crc.GetDigest(); RINOK(WriteVolumeHeader(_archive, _file, _options)); _archive.Close(); _volumeStream.Release(); _file.StartPos += _file.UnPackSize; } return S_OK;}STDMETHODIMP COutVolumeStream::Write(const void *data, UInt32 size, UInt32 *processedSize){ if(processedSize != NULL) *processedSize = 0; while(size > 0) { if (!_volumeStream) { RINOK(VolumeCallback->GetVolumeSize(_volIndex, &_volSize)); RINOK(VolumeCallback->GetVolumeStream(_volIndex, &_volumeStream)); _volIndex++; _curPos = 0; RINOK(_archive.Create(_volumeStream, true)); RINOK(_archive.SkeepPrefixArchiveHeader()); _crc.Init(); continue; } UInt64 pureSize = COutArchive::GetVolPureSize(_volSize, _file.Name.Length()); UInt32 curSize = (UInt32)MyMin(UInt64(size), pureSize - _curPos); _crc.Update(data, curSize); UInt32 realProcessed; RINOK(_volumeStream->Write(data, curSize, &realProcessed)) data = (void *)((Byte *)data + realProcessed); size -= realProcessed; if(processedSize != NULL) *processedSize += realProcessed; _curPos += realProcessed; if (realProcessed != curSize && realProcessed == 0) return E_FAIL; if (_curPos == pureSize) { RINOK(Flush()); } } return S_OK;}#endifHRESULT Update( IInStream *inStream, const CArchiveDatabaseEx *database, const CObjectVector<CUpdateItem> &updateItems, ISequentialOutStream *seqOutStream, IArchiveUpdateCallback *updateCallback, const CUpdateOptions &options){ #ifdef _7Z_VOL if (seqOutStream) #endif return Update2(inStream, database, updateItems, seqOutStream, updateCallback, options); #ifdef _7Z_VOL if (options.VolumeMode) return UpdateVolume(inStream, database, updateItems, seqOutStream, updateCallback, options); COutVolumeStream *volStreamSpec = new COutVolumeStream; CMyComPtr<ISequentialOutStream> volStream = volStreamSpec; CMyComPtr<IArchiveUpdateCallback2> volumeCallback; RINOK(updateCallback->QueryInterface(IID_IArchiveUpdateCallback2, (void **)&volumeCallback)); if (!volumeCallback) return E_NOTIMPL; volStreamSpec->Init(volumeCallback, L"a.7z"); volStreamSpec->_options = options; RINOK(Update2(inStream, database, updateItems, volStream, updateCallback, options)); return volStreamSpec->Flush(); #endif}}}
⌨️ 快捷键说明
复制代码Ctrl + C
搜索代码Ctrl + F
全屏模式F11
增大字号Ctrl + =
减小字号Ctrl + -
显示快捷键?