//! Read a "Chunk" from a non-MXF file DataChunkPtr mxflib::FileReadChunk(FileHandle InFile, size_t Size) { DataChunkPtr Ret = new DataChunk; Ret->Resize(Size); // Read the data (and shrink chunk to fit) size_t Bytes = FileRead(InFile, Ret->Data, Size); if(Bytes == static_cast<size_t>(-1)) Bytes = 0; Ret->Resize(Bytes); return Ret; }
//! Get the finished hash value DataChunkPtr HashHMACSHA1::GetHash(void) { //FileClose(OutFile); // Build a data chunk for the output DataChunkPtr Ret = new DataChunk; Ret->Resize(20); SHA1_Final(Ret->Data, &Context); // Hash the inner hash with the outer key SHA1_Init(&Context); SHA1_Update(&Context, KeyBuffer_o, 64); SHA1_Update(&Context, Ret->Data, static_cast<unsigned long>(Ret->Size)); SHA1_Final(Ret->Data, &Context); /* printf("Hash is:"); for(int i=0; i<20; i++) { printf(" %02x", Ret->Data[i]); } printf("\n"); */ return Ret; }
//! Set a data chunk from a hex string DataChunkPtr mxflib::Hex2DataChunk(std::string Hex) { // Build the result chunk DataChunkPtr Ret = new DataChunk(); // Use a granularity of 16 as most hex strings are likely to be 16 or 32 bytes // DRAGONS: We may want to revise this later Ret->SetGranularity(16); // Index the hex string char const *p = Hex.c_str(); int Size = 0; int Value = -1; // During this loop Value = -1 when no digits of a number are mid-process // This stops a double space being regarded as a small zero in between two spaces // It also stops a trailing zero being appended to the data if the last character // before the terminating byte is not a hex digit. do { int digit; if(*p >= '0' && *p <='9') digit = (*p) - '0'; else if(*p >= 'a' && *p <= 'f') digit = (*p) - 'a' + 10; else if(*p >= 'A' && *p <= 'F') digit = (*p) - 'A' + 10; else if(Value == -1) { // Skip second or subsiquent non-digit continue; } else { Size++; Ret->Resize(Size); Ret->Data[Size-1] = Value; Value = -1; continue; } if(Value == -1) Value = 0; else Value <<=4; Value += digit; // Note that the loop test is done in this way to force // a final cycle of the loop with *p == 0 to allow the last // number to be processed } while(*(p++)); return Ret; }
/* The hashing key is: * - trunc( HMAC-SHA-1( CipherKey, 0x00112233445566778899aabbccddeeff ) ) * Where trunc(x) is the first 128 bits of x */ DataChunkPtr BuildHashKey(size_t Size, const UInt8 *CryptoKey) { //! Constant value to be hashed with cypher key to produce the hashing key const UInt8 KeyConst[] = { 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff }; HashPtr Hasher = new HashHMACSHA1(); // Hash the constant data with the crypto key Hasher->SetKey(Size, CryptoKey); Hasher->HashData(16, KeyConst); DataChunkPtr Ret = Hasher->GetHash(); // Truncate the hashed key to 128-bits (16-bytes) Ret->Resize(16); return Ret; }
//! Read raw index table data from this partition's source file DataChunkPtr mxflib::Partition::ReadIndexChunk(void) { DataChunkPtr Ret; // Locate the index table data if(!SeekIndex()) return Ret; Int64 IndexSize = GetInt64(IndexByteCount_UL); if(IndexSize == 0) return Ret; // Read the specified number of bytes Ret = Object->GetParentFile()->Read(static_cast<size_t>(IndexSize)); /* Remove any trailing filler */ // Scan backwards from the end of the index data if(Ret->Size >= 16) { size_t Count = Ret->Size - 15; UInt8 *p = &Ret->Data[Count - 1]; // Do the scan (slightly optimized) while(Count--) { if(*p == 0x06) { // Do a versionless compare if(memcmp(p, KLVFill_UL.GetValue(), 7) == 0) { if(memcmp(&p[8], &(KLVFill_UL.GetValue()[8]), 8) == 0) { Ret->Resize(p - Ret->Data); break; } } } p--; } } return Ret; }
/*! If frame or line mapping is used the parameter Count is used to * determine how many items are read. In frame wrapping it is in * units of EditRate, as specified in the call to Use(), which may * not be the frame rate of this essence * \note This is going to take a lot of memory in clip wrapping! */ DataChunkPtr mxflib::WAVE_PCM_EssenceSubParser::Read(FileHandle InFile, UInt32 Stream, UInt64 Count /*=1*/ /*, IndexTablePtr Index */ /*=NULL*/) { // Move to the current position if(BytePosition == 0) BytePosition = DataStart; FileSeek(InFile, BytePosition); // Either use the cached value, or scan the stream and find out how many bytes to read if((CachedDataSize == static_cast<size_t>(-1)) || (CachedCount != Count)) ReadInternal(InFile, Stream, Count); // Record, then clear, the data size size_t Bytes = CachedDataSize; CachedDataSize = static_cast<size_t>(-1); // Make a datachunk with enough space DataChunkPtr Ret = new DataChunk(Bytes); // Read the data size_t BytesRead = static_cast<size_t>(FileRead(InFile, Ret->Data, Bytes)); // Update the file pointer BytePosition = FileTell(InFile); // Move the edit unit pointer forward by the number of edit units read CurrentPosition += Count; // Cope with an early end-of-file if(BytesRead < Bytes) { DataSize = BytePosition - DataStart; Ret->Resize(BytesRead); // We need to work out where we actually ended CurrentPosition = CalcCurrentPosition(); } return Ret; }
/*! \return Pointer to a data chunk holding the next data or a NULL pointer when no more remains * \note If there is more data to come but it is not currently available the return value will be a pointer to an empty data chunk * \note If Size = 0 the object will decide the size of the chunk to return * \note On no account will the returned chunk be larger than MaxSize (if MaxSize > 0) */ DataChunkPtr WAVE_PCM_EssenceSubParser::ESP_EssenceSource::GetEssenceData(size_t Size /*=0*/, size_t MaxSize /*=0*/) { WAVE_PCM_EssenceSubParser *pCaller = SmartPtr_Cast(Caller, WAVE_PCM_EssenceSubParser); // Allow us to differentiate the first call if(!Started) { Started = true; // Move to the selected position if(pCaller->BytePosition == 0) pCaller->BytePosition = pCaller->DataStart; } if(!BytesRemaining) { // Either use the cached value, or scan the stream and find out how many bytes to read if((pCaller->CachedDataSize == static_cast<size_t>(-1)) || (pCaller->CachedCount != RequestedCount)) pCaller->ReadInternal(File, Stream, RequestedCount); // Record, then clear, the data size BytesRemaining = pCaller->CachedDataSize; pCaller->CachedDataSize = static_cast<size_t>(-1); // Flag all done when no more to read if(BytesRemaining == 0) { // Undo removing the size by calling SamplesThisEditUnit so that the padding sequence stays corrent if(PaddingEnabled) pCaller->PushBackSize(); AtEndOfData = true; return NULL; } } // Decide how many bytes to read this time - start by trying to read them all size_t Bytes = BytesRemaining; // Hard limit to MaxSize if((MaxSize != 0) && (Bytes > MaxSize)) { Bytes = MaxSize; } // Also limit to Size if((Size != 0) && (Bytes > Size)) { Bytes = Size; } // Remove this number of bytes from the remaining count BytesRemaining -= Bytes; // Seek to the current position FileSeek(File, pCaller->BytePosition); // Read the data DataChunkPtr Ret = FileReadChunk(File, Bytes); // Update the file pointer pCaller->BytePosition = FileTell(File); // Move the edit unit pointer forward by the number of edit units read (if the last part of a read) if(!BytesRemaining) { // Only do a simple add if not reading the whole clip, and if the read succeeded if((pCaller->SelectedWrapping->ThisWrapType==WrappingOption::Frame) && (Ret->Size == Bytes)) pCaller->CurrentPosition += RequestedCount; // ... otherwise calculate the new position else pCaller->CurrentPosition=pCaller->CalcCurrentPosition(); } // If we get too few bytes - and padding has been selected, padd this wrapping unit if((Ret->Size < Bytes) && PaddingEnabled) { size_t OldSize = Ret->Size; Ret->Resize(Bytes); memset(&Ret->Data[OldSize], 0, Bytes - OldSize); } return Ret; }
/*! \return false on error, else true * * DRAGONS: Ensure that the data written matches the size given by CalcFooterLength() */ bool KLVEObject::WriteFooter(void) { // Don't write anything if we don't need to if((!TrackFileID) && (!HasSequenceNumber) && (!WriteHasher)) return true; // Make a reasonable sized buffer DataChunkPtr Buffer = new DataChunk(64); // Make a pointer to walk through the write buffer unsigned char *p = Buffer->Data; if(!TrackFileID) { // Write a "missing" TrackFile ID p += MakeBER(p, 4, 0); } else { // Write the TrackFile ID p += MakeBER(p, 4, 16); memcpy(p, TrackFileID->GetValue(), 16); p += 16; } if(!HasSequenceNumber) { // Write a "missing" sequence number p += MakeBER(p, 4, 0); } else { // Write the sequence number p += MakeBER(p, 4, 8); PutU64(SequenceNumber, p); p += 8; } // DRAGONS: We don't bother to write a "missing" MIC as this is the last item so can just be omitted if(WriteHasher) { // Write the BER length for the hash - general concensus seems to be that this is included IN the hash! p += MakeBER(p, 4, 20); // Finish calculating the MIC WriteHasher->HashData((int)(p - Buffer->Data), Buffer->Data); // Get the hash DataChunkPtr Hash = WriteHasher->GetHash(); // Write the hash if(Hash->Size == 20) memcpy(p, Hash->Data, 20); else error("Hash for this KLVEObject is not 20 bytes\n"); p += 20; } // Resize the buffer to exactly the amount of data we built mxflib_assert((UInt32)(p - Buffer->Data) == FooterLength); Buffer->Resize((int)(p - Buffer->Data)); // Write the footer Dest.File->Write(Buffer); // Return "All OK" return true; }
void JPEGLSCodec::compress(const FrameBuffer &frame) { const Box2i dataW = dataWindow(); const int width = (dataW.max.x - dataW.min.x + 1); const int height = (dataW.max.y - dataW.min.y + 1); const PixelType pixType = (_depth == JPEGLS_8 ? UINT8 : _depth == JPEGLS_10 ? UINT10 : _depth == JPEGLS_12 ? UINT12 : _depth == JPEGLS_16 ? UINT16 : UINT8); const size_t pixSize = PixelSize(pixType); const unsigned int bitDepth = PixelBits(pixType); const int numChannels = (_channels == JPEGLS_RGBA ? 4 : 3); const size_t tempPixelSize = (numChannels * pixSize); const size_t tempRowbytes = (tempPixelSize * width); const size_t tempBufSize = (tempRowbytes * height); DataChunk dataChunk(tempBufSize); char *tempBuffer = (char *)dataChunk.Data; assert(dataW.min.x == 0 && dataW.min.y == 0); FrameBuffer tempFrameBuffer(dataW); tempFrameBuffer.insert("R", Slice(pixType, &tempBuffer[0 * pixSize], tempPixelSize, tempRowbytes)); tempFrameBuffer.insert("G", Slice(pixType, &tempBuffer[1 * pixSize], tempPixelSize, tempRowbytes)); tempFrameBuffer.insert("B", Slice(pixType, &tempBuffer[2 * pixSize], tempPixelSize, tempRowbytes)); if(_channels == JPEGLS_RGBA) tempFrameBuffer.insert("A", Slice(pixType, &tempBuffer[3 * pixSize], tempPixelSize, tempRowbytes)); tempFrameBuffer.copyFromFrame(frame); JlsParameters params = JlsParameters(); params.width = width; params.height = height; params.bitspersample = bitDepth; //params.bytesperline = (tempRowbytes / 3); params.components = numChannels; params.allowedlossyerror = 0; // always lossless params.ilv = ILV_SAMPLE; params.colorTransform = COLORXFORM_NONE; //params.outputBgr = 0; /* params.custom.MAXVAL = 255; params.custom.T1 = 0; params.custom.T2 = 0; params.custom.T3 = 0; params.custom.RESET = 1; params.jfif.Ver = 123; params.jfif.units = 0; params.jfif.XDensity = 72; params.jfif.YDensity = 72; params.jfif.Xthumb = 0; params.jfif.Ythumb = 0; params.jfif.pdataThumbnail = NULL; */ ByteStreamInfo inStream = FromByteArray(dataChunk.Data, dataChunk.Size); DataChunkPtr outDataChunk = new DataChunk(tempBufSize); size_t bytesWritten = 0; JLS_ERROR err = OK; do { ByteStreamInfo outStream = FromByteArray(outDataChunk->Data, outDataChunk->Size); err = JpegLsEncodeStream(outStream, &bytesWritten, inStream, ¶ms); if(err == CompressedBufferTooSmall) { outDataChunk->Resize(2 * outDataChunk->Size, false); } }while(err == CompressedBufferTooSmall); assert(err != TooMuchCompressedData); if(err == OK) { assert(bytesWritten > 0); outDataChunk->Resize(bytesWritten); storeData(outDataChunk); } else throw MoxMxf::ArgExc("JPEG-LS compression error"); }
//! Build the data for this frame in SMPTE-436M format DataChunkPtr ANCVBISource::BuildChunk(void) { /* Fill lines from line sources */ ANCVBILineSourceList::iterator LS_it = Sources.begin(); while(LS_it != Sources.end()) { int LineNumber = (*LS_it)->GetLineNumber(); if((*LS_it)->GetField() == 2) LineNumber += Field2Offset(); Lines.insert(ANCLineMap::value_type( LineNumber, new ANCLine(LineNumber, (*LS_it)->GetWrappingType(), (*LS_it)->GetSampleCoding(), (*LS_it)->GetLineData(), (*LS_it)->GetDID(), (*LS_it)->GetSDID()))); LS_it++; } /* Now build the chunk from line data */ /* First we handle the special case of no lines this frame (should be quite common) */ if(Lines.empty()) { // Simply return "Number of Lines = 0" DataChunkPtr Ret = new DataChunk(2); PutU16(0, Ret->Data); return Ret; } VBILineMap::iterator it = Lines.begin(); // Guess the buffer size by assuming that all the lines are the same size, then add 2 bytes for the line count // DRAGONS: If the line sizes do vary this is a bottle-neck // DRAGONS: We will use this as a remaining-bytes counter while writing the data size_t BufferSize = ((*it).second->GetFullDataSize() * Lines.size()) + 2; // Get a buffer of this size DataChunkPtr Ret = new DataChunk(BufferSize); // Index the start of the buffer UInt8 *pBuffer = Ret->Data; // Write in the number of lines PutU16(static_cast<UInt16>(Lines.size()), Ret->Data); pBuffer += 2; BufferSize -= 2; while(it != Lines.end()) { // Get the number of bytes required to add this line to the buffer size_t RequiredBytes = (*it).second->GetFullDataSize(); // If we don't have enough space we must increase the buffer size - can only happen if lines differ in size if(RequiredBytes > BufferSize) { // Work out how far through the buffer we currently are size_t CurrentPos = pBuffer - Ret->Data; // Make the buffer big enough for this line Ret->Resize(static_cast<UInt32>(CurrentPos + RequiredBytes)); // Flag that we now have just enough bytes left BufferSize = RequiredBytes; // Set the buffer pointer in the (possibly re-allocated) buffer pBuffer = &Ret->Data[CurrentPos]; } // Write the data into the buffer (formatted by the VBILine itself) (*it).second->WriteData(pBuffer); // Update the buffer pointer and bytes-remaining count pBuffer += RequiredBytes; BufferSize -= RequiredBytes; it++; } // Resize the buffer to the actual number of bytes that we wrote Ret->Resize(static_cast<UInt32>(pBuffer - Ret->Data)); // Clear the list of pending lines Lines.clear(); // Return the finished data return Ret; }