bool CanvasExporterPS::write() { QString format("PostScript (%1)"); int vtkPSFormat = -1; if (outputFormat() == format.arg("PS")) vtkPSFormat = vtkGL2PSExporter::PS_FILE; else if (outputFormat() == format.arg("EPS")) vtkPSFormat = vtkGL2PSExporter::EPS_FILE; else if (outputFormat() == format.arg("PDF")) vtkPSFormat = vtkGL2PSExporter::PDF_FILE; else if (outputFormat() == format.arg("TEX")) vtkPSFormat = vtkGL2PSExporter::TEX_FILE; else if (outputFormat() == format.arg("SVG")) vtkPSFormat = vtkGL2PSExporter::SVG_FILE; else return false; // vtkGL2PSExporter always appends a file extensions, so we should cut it if it already exists QString fileName = outputFileName(); QString ext = fileExtension(); QFileInfo info(fileName); if (info.suffix().toLower() == ext) { fileName.truncate(fileName.length() - ext.length() - 1); // cut ".EXT" } m_exporter->SetFilePrefix(fileName.toUtf8().data()); m_exporter->SetFileFormat(vtkPSFormat); m_exporter->SetRenderWindow(renderWindow()); m_exporter->Write(); return true; }
void HelpWriterContext::writeOptionItem(const std::string &name, const std::string &args, const std::string &description) const { File &file = outputFile(); switch (outputFormat()) { case eHelpOutputFormat_Console: // TODO: Generalize this when there is need for it; the current, // special implementation is in CommandLineHelpWriter. GMX_THROW(NotImplementedError("Option item formatting for console output not implemented")); break; case eHelpOutputFormat_Man: file.writeLine(formatString(".BI \"\\%s\" \" %s\"", name.c_str(), args.c_str())); file.writeString(" "); writeTextBlock(description); file.writeLine(); break; case eHelpOutputFormat_Html: { std::string substArgs = substituteMarkupAndWrapToString(TextLineWrapperSettings(), args); file.writeLine(formatString("<dt><b><tt>%s</tt></b> %s</dt>", name.c_str(), substArgs.c_str())); file.writeLine("<dd>"); writeTextBlock(description); file.writeLine("</dd>"); break; } default: GMX_THROW(NotImplementedError( "This output format is not implemented")); } }
nsresult nsTextEditRules::WillOutputText(nsISelection *aSelection, const nsAString *aOutputFormat, nsAString *aOutString, PRBool *aCancel, PRBool *aHandled) { // null selection ok if (!aOutString || !aOutputFormat || !aCancel || !aHandled) { return NS_ERROR_NULL_POINTER; } // initialize out param *aCancel = PR_FALSE; *aHandled = PR_FALSE; nsAutoString outputFormat(*aOutputFormat); ToLowerCase(outputFormat); if (outputFormat.EqualsLiteral("text/plain")) { // only use these rules for plain text output if (mFlags & nsIPlaintextEditor::eEditorPasswordMask) { *aOutString = mPasswordText; *aHandled = PR_TRUE; } else if (mBogusNode) { // this means there's no content, so output null string aOutString->Truncate(); *aHandled = PR_TRUE; } } return NS_OK; }
nsresult TextEditRules::WillOutputText(Selection* aSelection, const nsAString* aOutputFormat, nsAString* aOutString, bool* aCancel, bool* aHandled) { // null selection ok if (!aOutString || !aOutputFormat || !aCancel || !aHandled) { return NS_ERROR_NULL_POINTER; } // initialize out param *aCancel = false; *aHandled = false; nsAutoString outputFormat(*aOutputFormat); ToLowerCase(outputFormat); if (outputFormat.EqualsLiteral("text/plain")) { // Only use these rules for plain text output. if (IsPasswordEditor()) { *aOutString = mPasswordText; *aHandled = true; } else if (mBogusNode) { // This means there's no content, so output null string. aOutString->Truncate(); *aHandled = true; } } return NS_OK; }
void HelpWriterContext::writeOptionListEnd() const { if (outputFormat() == eHelpOutputFormat_Html) { outputFile().writeLine("</dl>"); } }
void HelpWriterContext::writeOptionItem(const std::string &name, const std::string &value, const std::string &defaultValue, const std::string &info, const std::string &description) const { TextWriter &file = outputFile(); switch (outputFormat()) { case eHelpOutputFormat_Console: { TextTableFormatter &formatter(impl_->state_->consoleOptionsFormatter()); formatter.clear(); formatter.addColumnLine(0, name); formatter.addColumnLine(1, value); if (!defaultValue.empty()) { formatter.addColumnLine(2, "(" + defaultValue + ")"); } if (!info.empty()) { formatter.addColumnLine(3, "(" + info + ")"); } TextLineWrapperSettings settings; settings.setIndent(11); settings.setLineLength(78); std::string formattedDescription = substituteMarkupAndWrapToString(settings, description); file.writeLine(formatter.formatRow()); file.writeLine(formattedDescription); break; } case eHelpOutputFormat_Rst: { std::string args(value); if (!defaultValue.empty()) { args.append(" ("); args.append(defaultValue); args.append(")"); } if (!info.empty()) { args.append(" ("); args.append(info); args.append(")"); } file.writeLine(formatString("``%s`` %s", name.c_str(), args.c_str())); TextLineWrapperSettings settings; settings.setIndent(4); file.writeLine(substituteMarkupAndWrapToString(settings, description)); break; } default: GMX_THROW(NotImplementedError( "This output format is not implemented")); } }
void HelpWriterContext::writeTextBlock(const std::string &text) const { TextLineWrapperSettings settings; if (outputFormat() == eHelpOutputFormat_Console) { settings.setLineLength(78); } outputFile().writeLine(substituteMarkupAndWrapToString(settings, text)); }
QString CanvasExporterPS::fileExtension() const { QString format = outputFormat(); int prefixLength = QString("PostScript (").length(); int length = format.length() - prefixLength - 1; QStringRef ext(&format, prefixLength, length); return ext.toString().toLower(); }
void QgsRasterTerrainAnalysisDialog::on_mExportToCsvButton_clicked() { QString file = QFileDialog::getSaveFileName( 0, tr( "Export Frequency distribution as csv" ), QDir::homePath() ); if ( file.isEmpty() ) { return; } QgsRelief relief( inputFile(), outputFile(), outputFormat() ); relief.exportFrequencyDistributionToCsv( file ); }
bool ChilitagsSurface::start(const QVideoSurfaceFormat &format) { QVideoSurfaceFormat outputFormat(format.frameSize(), QVideoFrame::Format_ARGB32); //if (m_videoSurface) { // qDebug() << m_videoSurface->supportedPixelFormats(); // qDebug() << m_videoSurface->isFormatSupported(outputFormat); // qDebug() << outputFormat; //} if (m_videoSurface) return m_videoSurface->start(outputFormat) && QAbstractVideoSurface::start(format); return QAbstractVideoSurface::start(format); }
void QgsRasterTerrainAnalysisDialog::on_mAutomaticColorButton_clicked() { QgsRelief relief( inputFile(), outputFile(), outputFormat() ); QList< QgsRelief::ReliefColor > reliefColorList = relief.calculateOptimizedReliefClasses(); QList< QgsRelief::ReliefColor >::iterator it = reliefColorList.begin(); mReliefClassTreeWidget->clear(); for ( ; it != reliefColorList.end(); ++it ) { QTreeWidgetItem* item = new QTreeWidgetItem(); item->setText( 0, QString::number( it->minElevation ) ); item->setText( 1, QString::number( it->maxElevation ) ); item->setBackground( 2, QBrush( it->color ) ); mReliefClassTreeWidget->addTopLevelItem( item ); } }
void ReportPrinter::setPrinterType(type type) { if(outputFormat() != QPrinter::NativeFormat) { type = Standard; } if(type == m_printerType) { return; } // NB engines switch resets printer properties, we have to save them // (and also note that the print dialog can't support and custom printers, so it has to be called before switching to a custom type) QString savePrinterName = printerName(); releaseEngines(); switch (type) { case Sato: m_paintEngine = new SatoPaintEngine(this); if (m_printToBuffer) m_paintEngine->setPrintToBuffer(); m_printEngine = new SatoPrintEngine(m_paintEngine, this); break; case Zebra: m_paintEngine = new ZebraPaintEngine(this); if (m_printToBuffer) m_paintEngine->setPrintToBuffer(); m_printEngine = new ZebraPrintEngine(m_paintEngine, this); break; default: break; } if(m_paintEngine && m_printEngine) { setEngines(m_printEngine, m_paintEngine); } m_printerType = type; setPrinterName(savePrinterName); }
void HelpWriterContext::writeTitle(const std::string &title) const { File &file = outputFile(); switch (outputFormat()) { case eHelpOutputFormat_Console: file.writeLine(toUpperCase(title)); file.writeLine(); break; case eHelpOutputFormat_Man: file.writeLine(formatString(".SH %s", toUpperCase(title).c_str())); break; case eHelpOutputFormat_Html: file.writeLine(formatString("<H3>%s</H3>", title.c_str())); break; default: GMX_THROW(NotImplementedError( "This output format is not implemented")); } }
void HelpWriterContext::writeTitle(const std::string &title) const { if (title.empty()) { return; } TextWriter &file = outputFile(); file.ensureEmptyLine(); switch (outputFormat()) { case eHelpOutputFormat_Console: file.writeLine(toUpperCase(title)); break; case eHelpOutputFormat_Rst: file.writeLine(title); file.writeLine(std::string(title.length(), g_titleChars[impl_->sectionDepth_])); break; default: GMX_THROW(NotImplementedError( "This output format is not implemented")); } file.ensureEmptyLine(); }
void MediaCodecDataDecoder::DecoderLoop() { bool isOutputDone = false; AutoLocalJNIFrame frame(jni::GetEnvForThread(), 1); MediaFormat::LocalRef outputFormat(frame.GetEnv()); nsresult res = NS_OK; while (WaitForInput()) { RefPtr<MediaRawData> sample = PeekNextSample(); { MonitorAutoLock lock(mMonitor); if (State() == kDrainDecoder) { MOZ_ASSERT(!sample, "Shouldn't have a sample when pushing EOF frame"); res = QueueEOS(); BREAK_ON_DECODER_ERROR(); } } if (sample) { res = QueueSample(sample); if (NS_SUCCEEDED(res)) { // We've fed this into the decoder, so remove it from the queue. MonitorAutoLock lock(mMonitor); MOZ_RELEASE_ASSERT(mQueue.size(), "Queue may not be empty"); mQueue.pop_front(); isOutputDone = false; } } if (isOutputDone) { continue; } BufferInfo::LocalRef bufferInfo; nsresult res = BufferInfo::New(&bufferInfo); BREAK_ON_DECODER_ERROR(); int32_t outputStatus = -1; res = mDecoder->DequeueOutputBuffer(bufferInfo, kDecoderTimeout, &outputStatus); BREAK_ON_DECODER_ERROR(); if (outputStatus == MediaCodec::INFO_TRY_AGAIN_LATER) { // We might want to call mCallback->InputExhausted() here, but there seems // to be some possible bad interactions here with the threading. } else if (outputStatus == MediaCodec::INFO_OUTPUT_BUFFERS_CHANGED) { res = ResetOutputBuffers(); BREAK_ON_DECODER_ERROR(); } else if (outputStatus == MediaCodec::INFO_OUTPUT_FORMAT_CHANGED) { res = mDecoder->GetOutputFormat(ReturnTo(&outputFormat)); BREAK_ON_DECODER_ERROR(); } else if (outputStatus < 0) { NS_WARNING("Unknown error from decoder!"); INVOKE_CALLBACK(Error); // Don't break here just in case it's recoverable. If it's not, other // stuff will fail later and we'll bail out. } else { // We have a valid buffer index >= 0 here. int32_t flags; nsresult res = bufferInfo->Flags(&flags); BREAK_ON_DECODER_ERROR(); if (flags & MediaCodec::BUFFER_FLAG_END_OF_STREAM) { HandleEOS(outputStatus); isOutputDone = true; // We only queue empty EOF frames, so we're done for now. continue; } res = ProcessOutput(bufferInfo, outputFormat, outputStatus); BREAK_ON_DECODER_ERROR(); } } Cleanup(); // We're done. MonitorAutoLock lock(mMonitor); State(kShutdown); mMonitor.Notify(); }
QString Arguments::destination(const QString &source, int num, int palette) const { return destinationPath(source, outputFormat(), num, palette); }
void MediaCodecDataDecoder::DecoderLoop() { bool outputDone = false; bool draining = false; bool waitingEOF = false; AutoLocalJNIFrame frame(GetJNIForThread(), 1); nsRefPtr<MediaRawData> sample; MediaFormat::LocalRef outputFormat(frame.GetEnv()); nsresult res; for (;;) { { MonitorAutoLock lock(mMonitor); while (!mStopping && !mDraining && !mFlushing && mQueue.empty()) { if (mQueue.empty()) { // We could be waiting here forever if we don't signal that we need more input ENVOKE_CALLBACK(InputExhausted); } lock.Wait(); } if (mStopping) { // Get out of the loop. This is the only exit point. break; } if (mFlushing) { mDecoder->Flush(); ClearQueue(); mFlushing = false; lock.Notify(); continue; } if (mDraining && !sample && !waitingEOF) { draining = true; } // We're not stopping or draining, so try to get a sample if (!mQueue.empty()) { sample = mQueue.front(); } } if (draining && !waitingEOF) { MOZ_ASSERT(!sample, "Shouldn't have a sample when pushing EOF frame"); int32_t inputIndex; res = mDecoder->DequeueInputBuffer(DECODER_TIMEOUT, &inputIndex); HANDLE_DECODER_ERROR(); if (inputIndex >= 0) { res = mDecoder->QueueInputBuffer(inputIndex, 0, 0, 0, MediaCodec::BUFFER_FLAG_END_OF_STREAM); HANDLE_DECODER_ERROR(); waitingEOF = true; } } if (sample) { // We have a sample, try to feed it to the decoder int inputIndex; res = mDecoder->DequeueInputBuffer(DECODER_TIMEOUT, &inputIndex); HANDLE_DECODER_ERROR(); if (inputIndex >= 0) { jni::Object::LocalRef buffer(frame.GetEnv()); res = GetInputBuffer(frame.GetEnv(), inputIndex, &buffer); HANDLE_DECODER_ERROR(); void* directBuffer = frame.GetEnv()->GetDirectBufferAddress(buffer.Get()); MOZ_ASSERT(frame.GetEnv()->GetDirectBufferCapacity(buffer.Get()) >= sample->Size(), "Decoder buffer is not large enough for sample"); { // We're feeding this to the decoder, so remove it from the queue MonitorAutoLock lock(mMonitor); mQueue.pop(); } PodCopy((uint8_t*)directBuffer, sample->Data(), sample->Size()); res = mDecoder->QueueInputBuffer(inputIndex, 0, sample->Size(), sample->mTime, 0); HANDLE_DECODER_ERROR(); mDurations.push(media::TimeUnit::FromMicroseconds(sample->mDuration)); sample = nullptr; outputDone = false; } } if (!outputDone) { BufferInfo::LocalRef bufferInfo; res = BufferInfo::New(&bufferInfo); HANDLE_DECODER_ERROR(); int32_t outputStatus; res = mDecoder->DequeueOutputBuffer(bufferInfo, DECODER_TIMEOUT, &outputStatus); HANDLE_DECODER_ERROR(); if (outputStatus == MediaCodec::INFO_TRY_AGAIN_LATER) { // We might want to call mCallback->InputExhausted() here, but there seems to be // some possible bad interactions here with the threading } else if (outputStatus == MediaCodec::INFO_OUTPUT_BUFFERS_CHANGED) { res = ResetOutputBuffers(); HANDLE_DECODER_ERROR(); } else if (outputStatus == MediaCodec::INFO_OUTPUT_FORMAT_CHANGED) { res = mDecoder->GetOutputFormat(ReturnTo(&outputFormat)); HANDLE_DECODER_ERROR(); } else if (outputStatus < 0) { NS_WARNING("unknown error from decoder!"); ENVOKE_CALLBACK(Error); // Don't break here just in case it's recoverable. If it's not, others stuff will fail later and // we'll bail out. } else { int32_t flags; res = bufferInfo->Flags(&flags); HANDLE_DECODER_ERROR(); // We have a valid buffer index >= 0 here if (flags & MediaCodec::BUFFER_FLAG_END_OF_STREAM) { if (draining) { draining = false; waitingEOF = false; mMonitor.Lock(); mDraining = false; mMonitor.Notify(); mMonitor.Unlock(); ENVOKE_CALLBACK(DrainComplete); } mDecoder->ReleaseOutputBuffer(outputStatus, false); outputDone = true; // We only queue empty EOF frames, so we're done for now continue; } MOZ_ASSERT(!mDurations.empty(), "Should have had a duration queued"); media::TimeUnit duration; if (!mDurations.empty()) { duration = mDurations.front(); mDurations.pop(); } auto buffer = jni::Object::LocalRef::Adopt( frame.GetEnv()->GetObjectArrayElement(mOutputBuffers.Get(), outputStatus)); if (buffer) { // The buffer will be null on Android L if we are decoding to a Surface void* directBuffer = frame.GetEnv()->GetDirectBufferAddress(buffer.Get()); Output(bufferInfo, directBuffer, outputFormat, duration); } // The Surface will be updated at this point (for video) mDecoder->ReleaseOutputBuffer(outputStatus, true); PostOutput(bufferInfo, outputFormat, duration); } } } Cleanup(); // We're done MonitorAutoLock lock(mMonitor); mStopping = false; mMonitor.Notify(); }
QString Arguments::destinationPalette(const QString &source, int num) const { return destinationPath(source, "palette." + outputFormat(), num); }
ORTHANC_PLUGINS_API int32_t DecodeImage(OrthancPluginRestOutput* output, const char* url, const OrthancPluginHttpRequest* request) { std::string instance(request->groups[0]); std::string outputFormat(request->groups[1]); OrthancContext::GetInstance().LogWarning("Using GDCM to decode instance " + instance); // Download the request DICOM instance from Orthanc into a memory buffer std::string dicom; OrthancContext::GetInstance().GetDicomForInstance(dicom, instance); // Prepare a memory stream over the DICOM instance std::stringstream stream(dicom); // Parse the DICOM instance using GDCM gdcm::ImageReader imageReader; imageReader.SetStream(stream); if (!imageReader.Read()) { OrthancContext::GetInstance().LogError("GDCM cannot extract an image from this DICOM instance"); AnswerUnsupportedImage(output); return 0; } gdcm::Image& image = imageReader.GetImage(); // Log information about the decoded image char tmp[1024]; sprintf(tmp, "Image format: %dx%d %s with %d color channel(s)", image.GetRows(), image.GetColumns(), image.GetPixelFormat().GetScalarTypeAsString(), image.GetPixelFormat().GetSamplesPerPixel()); OrthancContext::GetInstance().LogWarning(tmp); // Convert planar configuration gdcm::ImageChangePlanarConfiguration planar; if (image.GetPlanarConfiguration() != 0 && image.GetPixelFormat().GetSamplesPerPixel() != 1) { OrthancContext::GetInstance().LogWarning("Converting planar configuration to interleaved"); planar.SetInput(imageReader.GetImage()); planar.Change(); image = planar.GetOutput(); } // Create a read-only accessor to the bitmap decoded by GDCM Orthanc::PixelFormat format; if (!GetOrthancPixelFormat(format, image)) { OrthancContext::GetInstance().LogError("This sample plugin does not support this image format"); AnswerUnsupportedImage(output); return 0; } Orthanc::ImageAccessor decodedImage; std::vector<char> decodedBuffer(image.GetBufferLength()); if (decodedBuffer.size()) { image.GetBuffer(&decodedBuffer[0]); unsigned int pitch = image.GetColumns() * ::Orthanc::GetBytesPerPixel(format); decodedImage.AssignWritable(format, image.GetColumns(), image.GetRows(), pitch, &decodedBuffer[0]); } else { // Empty image decodedImage.AssignWritable(format, 0, 0, 0, NULL); } // Convert the pixel format from GDCM to the format requested by the REST query Orthanc::ImageBuffer converted; converted.SetWidth(decodedImage.GetWidth()); converted.SetHeight(decodedImage.GetHeight()); if (outputFormat == "preview") { if (format == Orthanc::PixelFormat_RGB24 || format == Orthanc::PixelFormat_RGBA32) { // Do not rescale color image converted.SetFormat(Orthanc::PixelFormat_RGB24); } else { converted.SetFormat(Orthanc::PixelFormat_Grayscale8); // Rescale the image to the [0,255] range int64_t a, b; Orthanc::ImageProcessing::GetMinMaxValue(a, b, decodedImage); float offset = -a; float scaling = 255.0f / static_cast<float>(b - a); Orthanc::ImageProcessing::ShiftScale(decodedImage, offset, scaling); } } else { if (format == Orthanc::PixelFormat_RGB24 || format == Orthanc::PixelFormat_RGBA32) { // Do not convert color images to grayscale values (this is Orthanc convention) AnswerUnsupportedImage(output); return 0; } if (outputFormat == "image-uint8") { converted.SetFormat(Orthanc::PixelFormat_Grayscale8); } else if (outputFormat == "image-uint16") { converted.SetFormat(Orthanc::PixelFormat_Grayscale16); } else if (outputFormat == "image-int16") { converted.SetFormat(Orthanc::PixelFormat_SignedGrayscale16); } else { OrthancContext::GetInstance().LogError("Unknown output format: " + outputFormat); AnswerUnsupportedImage(output); return 0; } } Orthanc::ImageAccessor convertedAccessor(converted.GetAccessor()); Orthanc::ImageProcessing::Convert(convertedAccessor, decodedImage); // Compress the converted image as a PNG file OrthancContext::GetInstance().CompressAndAnswerPngImage(output, convertedAccessor); return 0; // Success }
QString ProjectResolution::fillFrameFileName() const { return name() + "." + outputFormat().toLower(); }