예제 #1
0
void COutArchive::CreateStreamForCompressing(IOutStream **outStream)
{
  COffsetOutStream *streamSpec = new COffsetOutStream;
  CMyComPtr<IOutStream> tempStream(streamSpec);
  streamSpec->Init(m_Stream, m_BasePosition + m_LocalFileHeaderSize);
  *outStream = tempStream.Detach();
}
bool
ProtoChannel::writeMessage(const ProtoMessage& message, DebugMsgCallback debugCallback)
{
    bool success = true;
    if (outputStreamError()) {
        success = false;
        debugLogMsg(debugCallback, "outputStreamError");
    } else if (!message.IsInitialized()) {
        success = false;
        debugLogMsg(debugCallback, "message is not fully initialized!");
    } else {
        uint32_t bytes = (uint32_t)message.ByteSize();
        ZeroCopyOutputStream* out = outputStream();
        {
            google::protobuf::io::CodedOutputStream tempStream(out);
            tempStream.WriteVarint32(bytes);
            success &= !tempStream.HadError();
        }
        if (!success) {
            debugLogMsg(debugCallback, "WriteVarint32 failed!");
        } else {
            success &= message.SerializeToZeroCopyStream(out);
            // If serialization failed, assume it was an I/O error
            // because we already checked IsInitialized.
            // TODO: could double-check this.
            if (!success) {
                debugLogMsg(debugCallback, "Failed in SerializeToZeroCopyStream!");
            }
        }
    }
    return success;
}
예제 #3
0
void ModelExporter::ExportToPath(const model::IModelExporterPtr& exporter,
	const std::string& outputPath, const std::string& filename)
{
	fs::path targetPath = outputPath;

	// Open a temporary file (leading underscore)
	fs::path tempFile = targetPath / ("_" + filename);

	std::ofstream::openmode mode = std::ofstream::out;

	if (exporter->getFileFormat() == model::IModelExporter::Format::Binary)
	{
		mode |= std::ios::binary;
	}

	std::ofstream tempStream(tempFile.string().c_str(), mode);

	if (!tempStream.is_open())
	{
		throw std::runtime_error(
			fmt::format(_("Cannot open file for writing: {0}"), tempFile.string()));
	}

	exporter->exportToStream(tempStream);

	tempStream.close();

	// The full OS path to the output file
	targetPath /= filename;

	if (fs::exists(targetPath))
	{
		try
		{
			fs::rename(targetPath, targetPath.string() + ".bak");
		}
		catch (fs::filesystem_error& e)
		{
			rError() << "Could not rename the existing file to .bak: " << targetPath.string() << std::endl
				<< e.what() << std::endl;

			throw std::runtime_error(
				fmt::format(_("Could not rename the existing file to .bak: {0}"), tempFile.string()));
		}
	}

	try
	{
		fs::rename(tempFile, targetPath);
	}
	catch (fs::filesystem_error& e)
	{
		rError() << "Could not rename the temporary file " << tempFile.string() << std::endl
			<< e.what() << std::endl;

		throw std::runtime_error(
			fmt::format(_("Could not rename the temporary file: {0}"), tempFile.string()));
	}
}
예제 #4
0
void writePly(Interface& interface) {

    QFile file(saveToFileName);
    file.open(QIODevice::WriteOnly | QIODevice::Text);
    QTextStream out(&file);

    QVector<Face *> faces = interface.getFaces();

    out << "ply" << "\n";
    out << "format ascii 1.0" << "\n";

    int vertex_count = interface.getVertices().size();
    int faces_count = faces.size();

    out << "element vertex " << vertex_count << "\n";
    out << "property float x" << "\n";
    out << "property float y" << "\n";

    out << "element face " << faces_count << "\n";
    out << "property list uchar int vertex_index" << "\n";

    out << "end_header" << "\n";

    QList<Vertex*> points = interface.getVertices().values();
    for (int i = 0; i < points.size(); i++) {
        QPointF p = points[i]->getPoint();
        qDebug() << "adicionando ponto: (" << p.x() << "," << p.y() << ")\n";
        //out << i << " ";
        out << p.x() << " " << p.y() << "\n";
    }

    for (int i = 0; i < faces.size(); i++) {
        HalfEdge* start = faces.at(i)->getOuterComp();
        HalfEdge* aux = start;

        //out << i << " ";

        qDebug() << "### Face: " << i << "\n";

        int vertex_per_face_counter = 0;
        QString tempStr;
        do {            
            QTextStream tempStream(&tempStr);

            Vertex* vertex = aux->getOrigem();
            int index = points.indexOf(vertex);

            tempStream << index << " ";
            aux = aux->getProx();

            vertex_per_face_counter++;
        } while (aux != start);

        out << vertex_per_face_counter << " " << tempStr << "\n";
    }

}
예제 #5
0
VecStr tokenizeString(const std::string& str, const std::string& delim,
                      bool addEmptyToEnd = false) {
  VecStr output;
  if("whitespace" == delim){
    std::stringstream tempStream(str);
    while (!tempStream.eof()) {
      std::string tempName;
      tempStream >> tempName;
      output.emplace_back(tempName);
    }
  }else{
예제 #6
0
int main(int argc, char* argv[]){
    if (argc == 2){
        WheneverCode code(argv[1]);
        code.run();
        return 0;
    }
    else if (argc == 3){
        std::string tempStr(argv[2]);
        std::stringstream tempStream(tempStr);
        int traceLevel;
        tempStream >> traceLevel;
        if (tempStream){
            WheneverCode code(argv[1], traceLevel);
            code.run();
            return 0;
        }
    }
예제 #7
0
size_t BlockReadStream::ReadDataTo(MemoryByteData& outData, DataReadingMode mode/*=DataReadingMode::AlwaysCopy*/)const
{
	RETURN_ZERO_IF_FALSE(CanRead());

	size_t outPos = 0;
	size_t outSize = outData.Size();

	//read left buffer data
	size_t bufferLeftLength = mBufferLength - mBuffer.Position();
	if (bufferLeftLength != 0)
	{
		size_t readSize = Math::Min(bufferLeftLength, outSize);
		MemoryByteData tempData = MemoryByteData::FromStatic(outData.MutableData(), readSize);
		readSize = mBuffer.ReadDataTo(tempData, DataReadingMode::AlwaysCopy);
		outPos += readSize;
		outSize -= readSize;
	}

	if (outSize > 0)
	{
		mBuffer.Rewind();
		mBufferLength = 0;

		//directly read to out data block per block
		size_t blockSize = mBuffer.Length();
		size_t blockCount = outSize / blockSize;
		FOR_EACH_SIZE(i, blockCount)
		{
			MemoryByteData tempData = MemoryByteData::FromStatic(outData.MutableData() + outPos, blockSize);
			MemoryStream tempStream(tempData);
			++mBlockIndex;
			size_t readSize = LoadBlockTo(mBlockIndex, tempStream);
			outPos += readSize;
			outSize -= readSize;
			if (readSize != blockSize)	//reach file end
			{
				return outPos;
			}
		}
예제 #8
0
int main(int argc, char* argv[]) {
	
	try
	{
		gphoto2pp::CameraWrapper cameraWrapper;
		
		std::cout << "#############################" << std::endl;
		std::cout << "# Capture Preview           #" << std::endl;
		std::cout << "#############################" << std::endl;
		
		std::cout << "How Many preview pictures would you like to take [1..100]? ";
		std::string input = "";
		int pictureQty = 0;
		std::getline(std::cin, input);
		
		std::stringstream tempStream(input);
		
		if(tempStream >> pictureQty)
		{
			if(pictureQty < 0 || pictureQty > 100)
			{
				std::cout << "That number is not between 1 and 100 (inclusive)" << std::endl;
				return 0;
			}
			else
			{
				for (int i = 0; i < pictureQty; i++)
				{
					gphoto2pp::helper::capturePreview(cameraWrapper,"example7_preview"+std::to_string(i)+".jpg");
				}
				std::cout << "Done! check the directory which example7 executed in, the pictures were saved there" << std::endl;
			}
		}
		else
		{
			std::cout << "That was not a number" << std::endl;
			return 0;
		}
	}
bool
ProtoChannel::extractMessage(ProtoMessage& message)
{
    bool success = true;
    if (inputStreamError()) {
        success = false;
    } else {
        uint32_t bytes;
        ZeroCopyInputStream* in = inputStream();
        {
            google::protobuf::io::CodedInputStream tempStream(in);
            success &= tempStream.ReadVarint32(&bytes);
        }
        if (success) {
            int64_t bytesReadPre = in->ByteCount();
            // TODO: fix this in ParseFromBoundedZeroCopyStream: even if you ask
            //   for 0 bytes, it will still try to read from the socket... which
            //   causes it to wait forever if there is nothing to read.
            if (bytes > 0) {
                success &= message.ParseFromBoundedZeroCopyStream(in, (int)bytes);
                if ((!success) && (!inputStreamError())) {
                    // Encountered malformed message, but the stream is ok.
                    // Make sure it is positioned for the next message.
                    int64_t bytesReadPost = in->ByteCount();
                    uint32_t bytesRead = (uint32_t)(bytesReadPost - bytesReadPre);
                    if (bytesRead < bytes) {
                        // Don't need to check return value.  If this fails,
                        // there will be an error on the stream, so it does not
                        // matter that the positioning failed.
                        in->Skip((int)(bytes - bytesRead));
                    }
                }
            }
        }
    }
    return success;
}
예제 #10
0
int main(int argc, char* argv[])
{
    // Parse command-line arguments
    Parameters parameters;
    bool bParsed = parseCommandLine(argc, argv, parameters);

    if(!bParsed || parameters.bShowHelp || argc == 1)
    {
        help();
        return 0;
    }
    else if(parameters.bShowVersion)
    {
        std::cout << "Naive Bayes Classify v1.0.5 by Donovan Parks, Norm MacDonald, and Rob Beiko." << std::endl;
        return 0;
    }
    else if(parameters.bShowContactInfo)
    {
        std::cout << "Comments, suggestions, and bug reports can be sent to Donovan Parks ([email protected])." << std::endl;
        return 0;
    }
    else if(parameters.queryFile.empty() || parameters.modelFile.empty() || parameters.resultsFile.empty())
    {
        std::cout << "Must specify query (-q), model (-m), and result (-r) file." << std::endl << std::endl;
        help();
        return 0;
    }

    bool bRecordAllModels = false;
    if(parameters.topModels <= 0)
    {
        bRecordAllModels = true;
        parameters.topModels = 0;
    }

    // Get model k-mer length
    if(parameters.verbose >= 1)
        std::cout << "Determining n-mer length..." << std::endl;

    std::ifstream tempStream(parameters.modelFile.c_str(), std::ios::in);
    if(tempStream.fail())
    {
        std::cout << "Failed to open model file: " << parameters.modelFile << std::endl << std::endl;
        return -1;
    }
    std::string line;
    std::getline(tempStream, line);
    KmerModel tempModel(line);
    uint kmerLength = tempModel.kmerLength();
    if(parameters.verbose >= 1)
        std::cout << "  n-mer length: " << kmerLength << std::endl << std::endl;

    // Read query fragments

    if(parameters.verbose >= 1)
        std::cout << "Reading query fragments..." << std::endl;

    char* buffer = NULL;
    std::vector<SeqInfo> querySeqs;
    FastaIO fastaIO;
    bool bSuccess = fastaIO.readSeqs(parameters.queryFile, querySeqs, buffer, parameters.verbose);
    if(!bSuccess)
    {
        std::cout << "Failed to open query fragment file: " << parameters.queryFile << std::endl;
        return -1;
    }
    if(parameters.verbose >= 1)
        std::cout << "  Number of query fragments: " << querySeqs.size() << std::endl << std::endl;

    // Classify query fragments in batches in order to keep memory requirements within reason (~ 1GB)
    if(parameters.verbose >= 1)
        std::cout << "Processing query fragments in batches of " << parameters.batchSize << "." << std::endl << std::endl;

    KmerCalculator kmerCalculator(kmerLength);
    for(uint batchNum = 0; batchNum < ceil(double(querySeqs.size()) / parameters.batchSize); ++batchNum)
    {
        if(parameters.verbose >= 1)
            std::cout << "Batch #" << (batchNum+1) << std::endl;

        // get k-mers for each query fragment
        if(parameters.verbose >= 1)
            std::cout << "  Calculating n-mers in query fragment: " << std::endl;

        std::vector< std::vector<uint> > queryKmerProfiles;
        queryKmerProfiles.reserve(parameters.batchSize);
        for(uint seqIndex = batchNum*parameters.batchSize;
                seqIndex < std::min(ulong(querySeqs.size()), ulong(batchNum+1)*parameters.batchSize);
                ++seqIndex)
        {
            if(parameters.verbose >= 3)
                std::cout << querySeqs.at(seqIndex).seqId << std::endl;
            else if (seqIndex % 5000 == 0 && parameters.verbose >= 1)
                std::cout << "." << std::flush;

            std::vector<uint> profile;
            kmerCalculator.extractForwardKmers(querySeqs.at(seqIndex), profile);
            queryKmerProfiles.push_back(profile);
        }
        if(parameters.verbose >= 1)
            std::cout << std::endl;

        // apply each model to each query sequence
        if(parameters.verbose >= 1)
            std::cout << "  Applying models to query sequences: " << std::endl;

        std::ifstream modelStream(parameters.modelFile.c_str(), std::ios::in);

        uint modelNum = 0;

        std::vector<std::string> modelNames;
        std::vector< std::list<TopModel> > topModelsPerFragment(queryKmerProfiles.size());
        std::vector< std::vector<float> > modelLogLikelihoods;
        while(!modelStream.eof())
        {
            std::string line;
            std::getline(modelStream, line);

            if(line.empty())
                break;

            if(modelNum % 200 == 0 && parameters.verbose >= 1)
                std::cout << " " << modelNum << std::flush;

            KmerModel kmerModel(line);
            modelNames.push_back(kmerModel.name());
            if(parameters.verbose >= 2)
            {
                kmerModel.printModelInfo(std::cout);
                std::cout << std::endl;
            }

            ulong size = 0;
            if(bRecordAllModels)
                size = queryKmerProfiles.size();
            std::vector<float> logLikelihoods(size);
            for(uint seqIndex = 0; seqIndex < queryKmerProfiles.size(); ++seqIndex)
            {
                SeqInfo querySeqInfo = querySeqs[seqIndex + batchNum*parameters.batchSize];
                float logLikelihood = kmerModel.classify(querySeqInfo, queryKmerProfiles[seqIndex]);

                // record models with highest log likelihood
                if(bRecordAllModels)
                {
                    logLikelihoods[seqIndex] = logLikelihood;
                }
                else
                {
                    std::list<TopModel> topModels = topModelsPerFragment.at(seqIndex);

                    if(topModels.size() == 0)
                        topModels.push_front(TopModel(modelNum, logLikelihood));

                    std::list<TopModel>::iterator it;
                    bool bInserted = false;
                    for(it = topModels.begin(); it != topModels.end(); it++)
                    {
                        if(logLikelihood > it->logLikelihood)
                        {
                            topModels.insert(it, TopModel(modelNum, logLikelihood));
                            bInserted = true;
                            break;
                        }
                    }

                    if((int)topModels.size() < parameters.topModels && !bInserted)
                        topModels.push_back(TopModel(modelNum, logLikelihood));
                    else if((int)topModels.size() > parameters.topModels)
                        topModels.pop_back();

                    topModelsPerFragment.at(seqIndex) = topModels;
                }
            }

            if(bRecordAllModels)
                modelLogLikelihoods.push_back(logLikelihoods);

            modelNum++;
        }
        if(parameters.verbose >= 1)
            std::cout << std::endl;

        // write out classification
        if(parameters.verbose >= 1)
            std::cout << "  Writing out classification results." << std::endl << std::endl;

        std::stringstream outputTempResults;
        outputTempResults << "./batch_" << batchNum << "." << parameters.tempExtension;
        std::ofstream fout(outputTempResults.str().c_str(), std::ios::out);
        if(fout.fail())
        {
            std::cout << "Failed to write temporary results file: " << outputTempResults.str() << std::endl;
            return -1;
        }

        // check if all model results are to be written out
        if(bRecordAllModels)
        {
            if(batchNum == 0)
            {
                fout << "Fragment Id" << "\t" << "Length" << "\t" << "Valid n-mers";
                for(uint modelIndex = 0; modelIndex < modelNames.size(); ++modelIndex)
                    fout << "\t" << modelNames[modelIndex];
                fout << std::endl;
            }

            for(uint seqIndex = 0; seqIndex < queryKmerProfiles.size(); ++seqIndex)
            {
                SeqInfo querySeqInfo = querySeqs.at(seqIndex + batchNum*parameters.batchSize);

                fout << querySeqInfo.seqId << "\t" << querySeqInfo.length << "\t" << querySeqInfo.validKmers;

                for(uint modelIndex = 0; modelIndex < modelNames.size(); ++modelIndex)
                    fout << "\t" << modelLogLikelihoods[modelIndex][seqIndex];
                fout << std::endl;
            }
        }
        else
        {
            for(uint seqIndex = 0; seqIndex < queryKmerProfiles.size(); ++seqIndex)
            {
                SeqInfo querySeqInfo = querySeqs.at(seqIndex + batchNum*parameters.batchSize);

                fout << querySeqInfo.seqId << "\t" << querySeqInfo.length << "\t" << querySeqInfo.validKmers;

                std::list<TopModel>::iterator it;
                for(it = topModelsPerFragment.at(seqIndex).begin(); it != topModelsPerFragment.at(seqIndex).end(); it++)
                    fout << "\t" << modelNames[it->modelNum] << "\t" << it->logLikelihood;

                fout << std::endl;
            }
        }

        fout.close();
    }

    // free memory allocated to hold query fragment data
    delete[] buffer;

    // Concatenate result files
    if(parameters.verbose >= 1)
        std::cout << "Building results file: ";

    std::ofstream resultsStream(parameters.resultsFile.c_str(), std::ios::out | std::ios::binary);
    for(uint batchNum = 0; batchNum < ceil(double(querySeqs.size()) / parameters.batchSize); ++batchNum)
    {
        if(parameters.verbose >= 1)
            std::cout << "." << std::flush;

        std::stringstream tempResultFile;
        tempResultFile << "./batch_" << batchNum  << "." << parameters.tempExtension;
        std::ifstream tempStream(tempResultFile.str().c_str(), std::ios::binary);
        if(tempStream.fail() || tempStream.bad())
        {
            std::cout << "Failed to open file: " << tempResultFile.str() << std::endl;
            return -1;
        }

        // calculate size of file
        tempStream.seekg(0, std::ios::end);
        ulong fileSize = tempStream.tellg();
        tempStream.seekg(0, std::ios::beg);

        // write out data in reasonable sized chunks
        ulong chunkSize = 64*1024*1024;

        // allocate memory for reading file
        char* tempBuffer = new char[chunkSize];
        if(tempBuffer == NULL)
        {
            std::cout << std::endl << "Failed to allocate memory required by file: " << tempResultFile.str() << std::endl;
            return -1;
        }

        for(uint chunk = 0; chunk < ceil(float(fileSize) / chunkSize); ++chunk)
        {
            ulong currentChunkSize = std::min(chunkSize, fileSize - chunk*chunkSize);

            // read file into buffer
            tempStream.read(tempBuffer, currentChunkSize);
            if(tempStream.fail() || tempStream.bad())
            {
                std::cout << std::endl << "Failed to read data from " << tempResultFile.str() << std::endl;
                return -1;
            }

            resultsStream.write(tempBuffer, currentChunkSize);
            resultsStream.flush();
        }

        tempStream.close();
        delete[] tempBuffer;
    }
    resultsStream.close();

    if(parameters.verbose >= 1)
    {
        std::cout << std::endl;
        std::cout << "Done." << std::endl;
    }

    for(uint batchNum = 0; batchNum < ceil(double(querySeqs.size()) / parameters.batchSize); ++batchNum)
    {
        std::stringstream filename;
        filename << "./batch_" << batchNum  << "." << parameters.tempExtension;
        std::remove(filename.str().c_str());
    }

    return 0;
}
예제 #11
0
void dng_info::ParseDNGPrivateData (dng_host &host,
									dng_stream &stream)
	{
	
	if (fShared->fDNGPrivateDataCount < 2)
		{
		return;
		}
	
	// DNG private data should always start with a null-terminated 
	// company name, to define the format of the private data.
			
	dng_string privateName;
			
		{
			
		char buffer [64];
		
		stream.SetReadPosition (fShared->fDNGPrivateDataOffset);
	
		uint32 readLength = Min_uint32 (fShared->fDNGPrivateDataCount,
										sizeof (buffer) - 1);
		
		stream.Get (buffer, readLength);
		
		buffer [readLength] = 0;
		
		privateName.Set (buffer);
		
		}
		
	// Pentax is storing their MakerNote in the DNGPrivateData data.
	
	if (privateName.StartsWith ("PENTAX" ) ||
		privateName.StartsWith ("SAMSUNG"))
		{
		
		#if qDNGValidate
		
		if (gVerbose)
			{
			printf ("Parsing Pentax/Samsung DNGPrivateData\n\n");
			}
			
		#endif

		stream.SetReadPosition (fShared->fDNGPrivateDataOffset + 8);
		
		bool bigEndian = stream.BigEndian ();
		
		uint16 endianMark = stream.Get_uint16 ();
		
		if (endianMark == byteOrderMM)
			{
			bigEndian = true;
			}
			
		else if (endianMark == byteOrderII)
			{
			bigEndian = false;
			}
			
		TempBigEndian temp_endian (stream, bigEndian);
	
		ParseMakerNoteIFD (host,
						   stream,
						   fShared->fDNGPrivateDataCount - 10,
						   fShared->fDNGPrivateDataOffset + 10,
						   fShared->fDNGPrivateDataOffset,
						   fShared->fDNGPrivateDataOffset,
						   fShared->fDNGPrivateDataOffset + fShared->fDNGPrivateDataCount,
						   tcPentaxMakerNote);
						   
		return;
		
		}
				
	// Stop parsing if this is not an Adobe format block.
	
	if (!privateName.Matches ("Adobe"))
		{
		return;
		}
	
	TempBigEndian temp_order (stream);
	
	uint32 section_offset = 6;
	
	while (section_offset + 8 < fShared->fDNGPrivateDataCount)
		{
		
		stream.SetReadPosition (fShared->fDNGPrivateDataOffset + section_offset);
		
		uint32 section_key   = stream.Get_uint32 ();
		uint32 section_count = stream.Get_uint32 ();
		
		if (section_key == DNG_CHAR4 ('M','a','k','N') && section_count > 6)
			{
			
			#if qDNGValidate
			
			if (gVerbose)
				{
				printf ("Found MakerNote inside DNGPrivateData\n\n");
				}
				
			#endif
				
			uint16 order_mark = stream.Get_uint16 ();
			uint64 old_offset = stream.Get_uint32 ();

			uint32 tempSize = section_count - 6;
			
			AutoPtr<dng_memory_block> tempBlock (host.Allocate (tempSize));
			
			uint64 positionInOriginalFile = stream.PositionInOriginalFile();
			
			stream.Get (tempBlock->Buffer (), tempSize);
			
			dng_stream tempStream (tempBlock->Buffer (),
								   tempSize,
								   positionInOriginalFile);
								   
			tempStream.SetBigEndian (order_mark == byteOrderMM);
			
			ParseMakerNote (host,
							tempStream,
							tempSize,
							0,
							0 - old_offset,
							0,
							tempSize);
	
			}
			
		else if (section_key == DNG_CHAR4 ('S','R','2',' ') && section_count > 6)
			{
			
			#if qDNGValidate
			
			if (gVerbose)
				{
				printf ("Found Sony private data inside DNGPrivateData\n\n");
				}
				
			#endif
			
			uint16 order_mark = stream.Get_uint16 ();
			uint64 old_offset = stream.Get_uint32 ();

			uint64 new_offset = fShared->fDNGPrivateDataOffset + section_offset + 14;
			
			TempBigEndian sr2_order (stream, order_mark == byteOrderMM);
			
			ParseSonyPrivateData (host,
							  	  stream,
								  section_count - 6,
								  old_offset,
								  new_offset);
				
			}

		else if (section_key == DNG_CHAR4 ('R','A','F',' ') && section_count > 4)
			{
			
			#if qDNGValidate
			
			if (gVerbose)
				{
				printf ("Found Fuji RAF tags inside DNGPrivateData\n\n");
				}
				
			#endif
			
			uint16 order_mark = stream.Get_uint16 ();
			
			uint32 tagCount = stream.Get_uint32 ();
			
			uint64 tagOffset = stream.Position ();
				
			if (tagCount)
				{
				
				TempBigEndian raf_order (stream, order_mark == byteOrderMM);
				
				ParseTag (host,
						  stream,
						  fExif.Get (),
						  fShared.Get (),
						  NULL,
						  tcFujiRAF,
						  tcFujiHeader,
						  ttUndefined,
						  tagCount,
						  tagOffset,
						  0);
						  
				stream.SetReadPosition (tagOffset + tagCount);
				
				}
			
			tagCount = stream.Get_uint32 ();
			
			tagOffset = stream.Position ();
				
			if (tagCount)
				{
				
				TempBigEndian raf_order (stream, order_mark == byteOrderMM);
				
				ParseTag (host,
						  stream,
						  fExif.Get (),
						  fShared.Get (),
						  NULL,
						  tcFujiRAF,
						  tcFujiRawInfo1,
						  ttUndefined,
						  tagCount,
						  tagOffset,
						  0);
						  
				stream.SetReadPosition (tagOffset + tagCount);
				
				}
			
			tagCount = stream.Get_uint32 ();
			
			tagOffset = stream.Position ();
				
			if (tagCount)
				{
				
				TempBigEndian raf_order (stream, order_mark == byteOrderMM);
				
				ParseTag (host,
						  stream,
						  fExif.Get (),
						  fShared.Get (),
						  NULL,
						  tcFujiRAF,
						  tcFujiRawInfo2,
						  ttUndefined,
						  tagCount,
						  tagOffset,
						  0);
						  
				stream.SetReadPosition (tagOffset + tagCount);
				
				}
			
			}

		else if (section_key == DNG_CHAR4 ('C','n','t','x') && section_count > 4)
			{
			
			#if qDNGValidate
			
			if (gVerbose)
				{
				printf ("Found Contax Raw header inside DNGPrivateData\n\n");
				}
				
			#endif
			
			uint16 order_mark = stream.Get_uint16 ();
			
			uint32 tagCount  = stream.Get_uint32 ();
			
			uint64 tagOffset = stream.Position ();
				
			if (tagCount)
				{
				
				TempBigEndian contax_order (stream, order_mark == byteOrderMM);
				
				ParseTag (host,
						  stream,
						  fExif.Get (),
						  fShared.Get (),
						  NULL,
						  tcContaxRAW,
						  tcContaxHeader,
						  ttUndefined,
						  tagCount,
						  tagOffset,
						  0);
						  
				}
			
			}
			
		else if (section_key == DNG_CHAR4 ('C','R','W',' ') && section_count > 4)
			{
			
			#if qDNGValidate
			
			if (gVerbose)
				{
				printf ("Found Canon CRW tags inside DNGPrivateData\n\n");
				}
				
			#endif
				
			uint16 order_mark = stream.Get_uint16 ();
			uint32 entries    = stream.Get_uint16 ();
			
			uint64 crwTagStart = stream.Position ();
			
			for (uint32 parsePass = 1; parsePass <= 2; parsePass++)
				{
				
				stream.SetReadPosition (crwTagStart);
			
				for (uint32 index = 0; index < entries; index++)
					{
					
					uint32 tagCode = stream.Get_uint16 ();
											 
					uint32 tagCount = stream.Get_uint32 ();
					
					uint64 tagOffset = stream.Position ();
					
					// We need to grab the model id tag first, and then all the
					// other tags.
					
					if ((parsePass == 1) == (tagCode == 0x5834))
						{
				
						TempBigEndian tag_order (stream, order_mark == byteOrderMM);
					
						ParseTag (host,
								  stream,
								  fExif.Get (),
								  fShared.Get (),
								  NULL,
								  tcCanonCRW,
								  tagCode,
								  ttUndefined,
								  tagCount,
								  tagOffset,
								  0);
								  
						}
					
					stream.SetReadPosition (tagOffset + tagCount);
					
					}
					
				}
			
			}

		else if (section_count > 4)
			{
			
			uint32 parentCode = 0;
			
			bool code32  = false;
			bool hasType = true;
			
			switch (section_key)
				{
				
				case DNG_CHAR4 ('M','R','W',' '):
					{
					parentCode = tcMinoltaMRW;
					code32     = true;
					hasType    = false;
					break;
					}
				
				case DNG_CHAR4 ('P','a','n','o'):
					{
					parentCode = tcPanasonicRAW;
					break;
					}
					
				case DNG_CHAR4 ('L','e','a','f'):
					{
					parentCode = tcLeafMOS;
					break;
					}
					
				case DNG_CHAR4 ('K','o','d','a'):
					{
					parentCode = tcKodakDCRPrivateIFD;
					break;
					}
					
				case DNG_CHAR4 ('K','D','C',' '):
					{
					parentCode = tcKodakKDCPrivateIFD;
					break;
					}
					
				default:
					break;
					
				}

			if (parentCode)
				{
			
				#if qDNGValidate
				
				if (gVerbose)
					{
					printf ("Found %s tags inside DNGPrivateData\n\n",
							LookupParentCode (parentCode));
					}
					
				#endif
				
				uint16 order_mark = stream.Get_uint16 ();
				uint32 entries    = stream.Get_uint16 ();
				
				for (uint32 index = 0; index < entries; index++)
					{
					
					uint32 tagCode = code32 ? stream.Get_uint32 ()
											: stream.Get_uint16 ();
											 
					uint32 tagType  = hasType ? stream.Get_uint16 () 
											  : ttUndefined;
					
					uint32 tagCount = stream.Get_uint32 ();
					
					uint32 tagSize = tagCount * TagTypeSize (tagType);
					
					uint64 tagOffset = stream.Position ();
					
					TempBigEndian tag_order (stream, order_mark == byteOrderMM);
				
					ParseTag (host,
							  stream,
							  fExif.Get (),
							  fShared.Get (),
							  NULL,
							  parentCode,
							  tagCode,
							  tagType,
							  tagCount,
							  tagOffset,
							  0);
					
					stream.SetReadPosition (tagOffset + tagSize);
					
					}
					
				}
			
			}
		
		section_offset += 8 + section_count;
		
		if (section_offset & 1)
			{
			section_offset++;
			}
		
		}
		
	}
예제 #12
0
void COutArchive::CreateStreamForCopying(ISequentialOutStream **outStream)
{
  CMyComPtr<ISequentialOutStream> tempStream(m_Stream);
  *outStream = tempStream.Detach();
}
예제 #13
0
void ParticlesManager::saveParticleDef(const std::string& particleName)
{
	ParticleDefMap::const_iterator found = _particleDefs.find(particleName);

	if (found == _particleDefs.end())
	{
		throw std::runtime_error(_("Cannot save particle, it has not been registered yet."));
	}

	ParticleDefPtr particle = found->second;

	std::string relativePath = PARTICLES_DIR + particle->getFilename();

	fs::path particlesModPath = GlobalGameManager().getModPath();
	particlesModPath /= PARTICLES_DIR;

	// Ensure the particles folder exists
	fs::create_directories(particlesModPath);

	fs::path targetFile = particlesModPath / particle->getFilename();

	// If the file doesn't exist yet, let's check if we need to inherit stuff first from the VFS
	if (!fs::exists(targetFile))
	{
		ArchiveTextFilePtr inheritFile = GlobalFileSystem().openTextFile(relativePath);

		if (inheritFile != NULL)
		{
			// There is a file with that name already in the VFS, copy it to the target file
			TextInputStream& inheritStream = inheritFile->getInputStream();

			std::ofstream outFile(targetFile.string().c_str());

			if (!outFile.is_open())
			{
				throw std::runtime_error(
					(boost::format(_("Cannot open file for writing: %s")) % targetFile.string()).str());
			}

			char buf[16384];
			std::size_t bytesRead = inheritStream.read(buf, sizeof(buf));

			while (bytesRead > 0)
			{
				outFile.write(buf, bytesRead);

				bytesRead = inheritStream.read(buf, sizeof(buf));
			}

			outFile.close();
		}
	}

	// Open a temporary file
	fs::path tempFile = targetFile;

	tempFile.remove_filename();
	tempFile /= "_" + os::filename_from_path(targetFile);

	std::ofstream tempStream(tempFile.string().c_str());

	if (!tempStream.is_open())
	{
		throw std::runtime_error(
				(boost::format(_("Cannot open file for writing: %s")) % tempFile.string()).str());
	}

	std::string tempString;

	// If a previous file exists, open it for reading and filter out the particle def we'll be writing
	if (fs::exists(targetFile))
	{
		std::ifstream inheritStream(targetFile.string().c_str());

		if (!inheritStream.is_open())
		{
			throw std::runtime_error(
					(boost::format(_("Cannot open file for reading: %s")) % targetFile.string()).str());
		}

		// Write the file to the output stream, up to the point the particle def should be written to
		stripParticleDefFromStream(inheritStream, tempStream, particleName);

		if (inheritStream.eof())
		{
			// Particle def was not found in the inherited stream, write our comment
			tempStream << std::endl << std::endl;

			writeParticleCommentHeader(tempStream);
		}

		// We're at the insertion point (which might as well be EOF of the inheritStream)

		// Write the particle declaration
		tempStream << *particle << std::endl;

		tempStream << inheritStream.rdbuf();

		inheritStream.close();
	}
	else
	{
		// Just put the particle def into the file and that's it, leave a comment at the head of the decl
		writeParticleCommentHeader(tempStream);

		// Write the particle declaration
		tempStream << *particle << std::endl;
	}

	tempStream.close();

	// Move the temporary stream over the actual file, removing the target first
	if (fs::exists(targetFile))
	{
		try
		{
			fs::remove(targetFile);
		}
		catch (fs::filesystem_error& e)
		{
			rError() << "Could not remove the file " << targetFile.string() << std::endl
				<< e.what() << std::endl;

			throw std::runtime_error(
				(boost::format(_("Could not remove the file %s")) % targetFile.string()).str());
		}
	}

	try
	{
		fs::rename(tempFile, targetFile);
	}
	catch (fs::filesystem_error& e)
	{
		rError() << "Could not rename the temporary file " << tempFile.string() << std::endl
			<< e.what() << std::endl;

		throw std::runtime_error(
			(boost::format(_("Could not rename the temporary file %s")) % tempFile.string()).str());
	}
}