TEST(CompressionTestNeedsUncompressedLength, Simple) { EXPECT_FALSE(getCodec(CodecType::NO_COMPRESSION)->needsUncompressedLength()); EXPECT_TRUE(getCodec(CodecType::LZ4)->needsUncompressedLength()); EXPECT_FALSE(getCodec(CodecType::SNAPPY)->needsUncompressedLength()); EXPECT_FALSE(getCodec(CodecType::ZLIB)->needsUncompressedLength()); EXPECT_FALSE(getCodec(CodecType::LZ4_VARINT_SIZE)->needsUncompressedLength()); EXPECT_TRUE(getCodec(CodecType::LZMA2)->needsUncompressedLength()); EXPECT_FALSE(getCodec(CodecType::LZMA2_VARINT_SIZE) ->needsUncompressedLength()); EXPECT_TRUE(getCodec(CodecType::ZSTD)->needsUncompressedLength()); EXPECT_FALSE(getCodec(CodecType::GZIP)->needsUncompressedLength()); }
void matlabOpenVideo(char *filename) { pFormatCtx = openVideo(filename); videoStream = firstVideoStream(pFormatCtx); pCodecCtx = getCodec(pFormatCtx,videoStream); pFrame = avcodec_alloc_frame(); pFrameRGB24 = avcodec_alloc_frame(); if(!pFrameRGB24 || !pFrame) { mexErrMsgTxt("error: Can't allocate frame!"); } buffer = (uint8_t *)av_malloc(avpicture_get_size(PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height) * sizeof(uint8_t)); avpicture_fill((AVPicture *)pFrameRGB24, buffer, PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height); img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL); videoFinished = 0; frame = 0; av_init_packet(&packet); matlabNextFrame(); }
AmAudioSimpleFormat::AmAudioSimpleFormat(int codec_id) : AmAudioFormat(), codec_id(codec_id) { codec = getCodec(); rate = 8000; channels = 1; }
void AmAudioFileFormat::setSubtypeId(int subtype_id) { if (subtype != subtype_id) { DBG("changing file subtype to ID %d\n", subtype_id); destroyCodec(); subtype = subtype_id; p_subtype = 0; codec = getCodec(); } }
AmAudioFormat::AmAudioFormat(int codec_id, unsigned int rate) : channels(1), codec_id(codec_id), rate(rate), codec(NULL), sdp_format_parameters_out(NULL) { codec = getCodec(); }
AmAudioFileFormat::AmAudioFileFormat(const string& name, int subtype) : name(name), subtype(subtype), p_subtype(0) { getSubtype(); codec = getCodec(); if(p_subtype && codec){ rate = p_subtype->sample_rate; channels = p_subtype->channels; subtype = p_subtype->type; } }
AmAudioFileFormat::AmAudioFileFormat(const string& name, int subtype, amci_subtype_t* p_subtype) : name(name), subtype(subtype), p_subtype(p_subtype) { codec = getCodec(); if(p_subtype && codec){ rate = p_subtype->sample_rate; channels = p_subtype->channels; } DBG("created AmAudioFileFormat of subtype %i, with rate %u, channels %u\n", subtype, rate, channels); }
/* * loadScript 方法 * @param string js路径(支持本地路径以及http与https协议下远程js获取) * @param function 调用函数 * @return * 如果第二个参数不存在, load 失败,返回 false 。 * 如果第二个参数不存在, load 成功,则返回 true 。 * 如果第二参数存在,则立即调用内部函数,返回执行结果。 * load的内容将以回调函数参数传入: * @param bool err 表明是否正常读取内容 * @param function callback 回一个load函数引用 */ QScriptValue ScriptBinding::loadScript(QScriptContext *context, QScriptEngine *interpreter) { QScriptValue path = context->argument(0); QScriptValue scriptFunc = context->argument(1); if (context->argumentCount() == 0) return QScriptValue(false); if (!path.isString()) return QScriptValue(false); QString pathStr = path.toString().toLower().trimmed(); QString content = ""; bool err = false; // 如果是 HTTP 、 HTTPS 则尝试从远端获取源码 if (pathStr.indexOf("http://") == 0 || pathStr.indexOf("https://") == 0 ) { QNetworkReply* reply; QNetworkAccessManager* manager = new QNetworkAccessManager(); reply = manager->get(QNetworkRequest(QUrl(pathStr))); QEventLoop eventLoop; connect(manager, SIGNAL(finished(QNetworkReply*)), &eventLoop, SLOT(quit())); eventLoop.exec(); QByteArray responseData; responseData = reply->readAll(); // 通过 Content-Type 来嗅探字节流编码 // 默认为 utf-8 编码 QString charset = QString(reply->rawHeader("Content-Type")).toLower(); QRegExp charsetRegExp("charset=([\\w-]+)\\b"); int pos = charset.indexOf(charsetRegExp); if (pos > 0) { if (charsetRegExp.cap().size() < 2) { charset = "utf-8"; } else { charset = charsetRegExp.cap(1); } } else { charset = "utf-8"; } QTextStream stream(responseData); stream.setCodec(getCodec(charset)); content = QString(stream.readAll()); } else {
void AviCodecRestrictions::openConfiguration(const std::wstring &codecName, void *winId) { if (codecName == L"Uncompressed") return; //find the codec int bpp; HIC hic = getCodec(codecName, bpp); if (!hic) return; ICConfigure(hic, winId); ICClose(hic); }
bool VideoProcessor::setOutput(const std::string &filename, int codec=0, double framerate=0.0,bool isColor=true) { outputFile= filename; // extension.clear(); if (framerate==0.0) framerate= getFrameRate(); // same as input char c[4]; if (codec==0){ codec= getCodec(c); // use same codec as input } printf("AAAAAAAAAAAAAAA\n"); // Open output video return writer.open("outputFile.avi", codec, framerate, getFrameSize(), isColor); }
AmAudioRtpFormat::AmAudioRtpFormat(int payload, string format_parameters) : AmAudioFormat(), payload(payload), amci_pl(0) { sdp_format_parameters = format_parameters; codec = getCodec(); amci_payload_t* pl = getPayloadP(); if(pl && codec){ channels = pl->channels; rate = pl->sample_rate; } else { ERROR("Could not find payload <%i>\n",payload); } }
bool AviCodecRestrictions::canBeConfigured(const std::wstring &codecName) { if (codecName == L"Uncompressed") return false; //find the codec int bpp; HIC hic = getCodec(codecName, bpp); if (!hic) return false; bool test = ICQueryConfigure(hic); ICClose(hic); return test; }
bool AviCodecRestrictions::canWriteMovie(const std::wstring &codecName, const TDimension &resolution) { if (codecName == L"Uncompressed") { return true; } //find the codec int bpp; HIC hic = getCodec(codecName, bpp); if (!hic) return false; bool test = canWork(hic, resolution, bpp); ICClose(hic); return test; }
bool NTNDArray::isValid() { int64 valueSize = getValueSize(); int64 compressedSize = getCompressedDataSize()->get(); if (valueSize != compressedSize) return false; long expectedUncompressed = getExpectedUncompressedSize(); long uncompressedSize = getUncompressedDataSize()->get(); if (uncompressedSize != expectedUncompressed) return false; std::string codecName = getCodec()->getSubField<PVString>("name")->get(); if (codecName == "" && valueSize < uncompressedSize) return false; return true; }
//----------------------------------------------------------------------------------------- void GenericTextEditorDocument::closeEvent(QCloseEvent* event) { if(mCloseEvtAlreadyProcessed == true) return; if(isTextModified()) { int result = QMessageBox::information(QApplication::activeWindow(), "qtOgitor", "Document has been modified. Should the changes be saved?", QMessageBox::Yes | QMessageBox::No | QMessageBox::Cancel); switch(result) { case QMessageBox::Yes: save(); break; case QMessageBox::No: break; case QMessageBox::Cancel: event->ignore(); return; } } event->accept(); getCodec()->onClose(); mCloseEvtAlreadyProcessed = true; }
/** * setOutput - set the output video file * * by default the same parameters than input video will be used * * @param filename - filename prefix * @param codec - the codec * @param framerate - frame rate * @param isColor - is the video colorful * * @return True if successful. False otherwise */ bool VideoProcessor::setOutput(const std::string &filename, int codec, double framerate, bool isColor) { outputFile = filename; extension.clear(); if (framerate==0.0) framerate = getFrameRate(); // same as input char c[4]; // use same codec as input if (codec==0) { codec = getCodec(c); } // Open output video return writer.open(outputFile, // filename codec, // codec to be used framerate, // frame rate of the video getFrameSize(), // frame size isColor); // color video? }
Codec* Codec::getCodec(char *magicNumberPtr, size_t maxbytes) { for (CodecList::const_iterator i = msMapCodecs. begin(); i != msMapCodecs.end(); ++i) { String ext = i->second->magicNumberToFileExt(magicNumberPtr, maxbytes); if (!ext.empty()) { // check codec type matches // if we have a single codec class that can handle many types, // and register many instances of it against different types, we // can end up matching the wrong one here, so grab the right one if (ext == i->second->getType()) return i->second; else return getCodec(ext); } } return 0; }
void fmt::toPrettyString(std::ostream &o, size_t indent) const{ o << std::string(indent, ' ') << "[" << getType() << "] (" << (getPayloadSize() + 8) << "b):" << std::endl; indent += 1; o << std::string(indent, ' ') << "Codec: " << getCodec() << " (" << getFormat() << ")" << std::endl; o << std::string(indent, ' ') << "Channels: " << getChannels() << std::endl; o << std::string(indent, ' ') << "Sample rate: " << getHz() << "Hz" << std::endl; o << std::string(indent, ' ') << "Bytes/s: " << getBPS() << std::endl; o << std::string(indent, ' ') << "Block size: " << getBlockSize() << " bytes" << std::endl; o << std::string(indent, ' ') << "Sample size: " << getSize() << " bits" << std::endl; if (getExtLen()){ o << std::string(indent, ' ') << "-- extended " << getExtLen() << "bytes --" << std::endl; if (getExtLen() >= 2){ o << std::string(indent, ' ') << "Valid bits: " << getValidBits() << std::endl; } if (getExtLen() >= 6){ o << std::string(indent, ' ') << "Channel mask: " << getChannelMask() << std::endl; } if (getExtLen() >= 22){ o << std::string(indent, ' ') << "GUID: " << getGUID() << std::endl; } } }
long AmAudioFormat::getHCodec() { if(!codec) getCodec(); return h_codec; }
void AmAudioFormat::resetCodec() { codec = NULL; getCodec(); }
/****************************************************************************** * parseArgs ******************************************************************************/ void parseArgs(Int argc, Char *argv[], Args *argsp) { // const Char shortOptions[] = "a:s:v:y:O:kt:lfoh"; const Char shortOptions[] = "v:y:O:kt:lfoh"; const struct option longOptions[] = { {"videofile", required_argument, NULL, 'v'}, {"display_standard", required_argument, NULL, 'y'}, {"display_output", required_argument, NULL, 'O'}, {"keyboard", no_argument, NULL, 'k'}, {"time", required_argument, NULL, 't'}, {"loop", no_argument, NULL, 'l'}, {"osd", no_argument, NULL, 'o'}, {"help", no_argument, NULL, 'h'}, {"exit", no_argument, NULL, 'e'}, {0, 0, 0, 0} }; Int index; Int c; Char *extension; for (;;) { c = getopt_long(argc, argv, shortOptions, longOptions, &index); if (c == -1) { break; } switch (c) { case 0: break; case 'v': extension = rindex(optarg, '.'); if (extension == NULL) { fprintf(stderr, "Video file without extension: %s\n", optarg); exit(EXIT_FAILURE); } argsp->videoDecoder = getCodec(extension, engine->videoDecoders); if (!argsp->videoDecoder) { fprintf(stderr, "Unknown video file extension: %s\n", extension); exit(EXIT_FAILURE); } argsp->videoFile = optarg; break; case 'y': switch (atoi(optarg)) { case 1: argsp->videoStd = VideoStd_D1_NTSC; argsp->videoStdString = "D1 NTSC"; break; case 2: argsp->videoStd = VideoStd_D1_PAL; argsp->videoStdString = "D1 PAL"; break; case 3: argsp->videoStd = VideoStd_720P_60; argsp->videoStdString = "720P 60Hz"; break; case 5: argsp->videoStd = VideoStd_1080I_30; argsp->videoStdString = "1080I 30Hz"; break; default: fprintf(stderr, "Unknown display resolution\n"); usage(); exit(EXIT_FAILURE); } break; case 'O': if (strcmp(optarg, "component") == 0) { argsp->displayOutput = Display_Output_COMPONENT; } else if (strcmp(optarg, "composite") == 0) { argsp->displayOutput = Display_Output_COMPOSITE; } else { fprintf(stderr, "Unknown video output: %s\n", optarg); usage(); exit(EXIT_FAILURE); } break; case 'k': argsp->keyboard = TRUE; break; case 't': argsp->time = atoi(optarg); break; case 'l': argsp->loop = TRUE; break; case 'o': argsp->osd = TRUE; break; case 'h': usage(); exit(EXIT_SUCCESS); default: usage(); exit(EXIT_FAILURE); } } if (argsp->displayOutput == Display_Output_COUNT) { if ((argsp->videoStd == VideoStd_D1_NTSC) || (argsp->videoStd == VideoStd_D1_PAL)) { argsp->displayOutput = Display_Output_COMPOSITE; } else { argsp->displayOutput = Display_Output_COMPONENT; } } }
void SetUp() { codec_ = getCodec(GetParam()); }
} /* * Class: com_phono_audio_codec_g722_NativeG722Codec * Method: initCodec * Signature: ()[B */ JNIEXPORT jbyteArray JNICALL Java_com_phono_audio_codec_g722_NativeG722Codec_initCodec (JNIEnv *env , jobject this){ jbyteArray jcodec; struct codec *co; jcodec = (*env)->NewByteArray(env, sizeof(struct codec)); co = getCodec(env,jcodec); g722_encode_init(&(co->encoder_st), 64000, 0); g722_decode_init(&(co->decoder_st), 64000, 0); releaseCodec(env,jcodec,co); return jcodec; } /* * Class: com_phono_audio_codec_g722_NativeG722Codec * Method: g722Encode * Signature: ([B[S[B)V */ JNIEXPORT void JNICALL Java_com_phono_audio_codec_g722_NativeG722Codec_g722Encode (JNIEnv *env, jobject this, jbyteArray jcodec, jshortArray jaudio, jbyteArray jwire ){
/****************************************************************************** * parseArgs ******************************************************************************/ static Void parseArgs(Int argc, Char *argv[], Args *argsp) { const Char shortOptions[] = "s:O:v:y:r:b:xlkt:oih"; const struct option longOptions[] = { {"speechfile", required_argument, NULL, 's'}, {"display_output", required_argument, NULL, 'O'}, {"videofile", required_argument, NULL, 'v'}, {"display_standard", required_argument, NULL, 'y'}, {"resolution", required_argument, NULL, 'r'}, {"videobitrate", required_argument, NULL, 'b'}, {"svideo", no_argument, NULL, 'x'}, {"linein", no_argument, NULL, 'l'}, {"keyboard", no_argument, NULL, 'k'}, {"time", required_argument, NULL, 't'}, {"osd", no_argument, NULL, 'o'}, {"interface", no_argument, NULL, 'i'}, {"help", no_argument, NULL, 'h'}, {0, 0, 0, 0} }; Int index; Int c; Char *extension; for (;;) { c = getopt_long(argc, argv, shortOptions, longOptions, &index); if (c == -1) { break; } switch (c) { case 0: break; case 's': extension = rindex(optarg, '.'); if (extension == NULL) { fprintf(stderr, "Speech file without extension: %s\n", optarg); exit(EXIT_FAILURE); } argsp->speechEncoder = getCodec(extension, engine->speechEncoders); if (!argsp->speechEncoder) { fprintf(stderr, "Unknown speech file extension: %s\n", extension); exit(EXIT_FAILURE); } argsp->speechFile = optarg; break; case 'v': extension = rindex(optarg, '.'); if (extension == NULL) { fprintf(stderr, "Video file without extension: %s\n", optarg); exit(EXIT_FAILURE); } argsp->videoEncoder = getCodec(extension, engine->videoEncoders); if (!argsp->videoEncoder) { fprintf(stderr, "Unknown video file extension: %s\n", extension); exit(EXIT_FAILURE); } argsp->videoFile = optarg; break; case 'y': switch (atoi(optarg)) { case 1: argsp->videoStd = VideoStd_D1_NTSC; argsp->videoStdString = "D1 NTSC"; break; case 2: argsp->videoStd = VideoStd_D1_PAL; argsp->videoStdString = "D1 PAL"; break; case 3: argsp->videoStd = VideoStd_720P_60; argsp->videoStdString = "720P 60Hz"; break; case 4: argsp->videoStd = VideoStd_720P_50; argsp->videoStdString = "720P 50Hz"; break; case 5: argsp->videoStd = VideoStd_1080I_30; argsp->videoStdString = "1080I 30Hz"; break; case 6: argsp->videoStd = VideoStd_1080I_25; argsp->videoStdString = "1080I 25Hz"; break; default: fprintf(stderr, "Unknown display resolution\n"); usage(); exit(EXIT_FAILURE); } break; case 'r': { Int32 imageWidth, imageHeight; if (sscanf(optarg, "%ldx%ld", &imageWidth, &imageHeight) != 2) { fprintf(stderr, "Invalid resolution supplied (%s)\n", optarg); usage(); exit(EXIT_FAILURE); } /* Sanity check resolution */ if (imageWidth < 2UL || imageHeight < 2UL || imageWidth > VideoStd_1080I_WIDTH || imageHeight > VideoStd_1080I_HEIGHT) { fprintf(stderr, "Video resolution must be between %dx%d " "and %dx%d\n", 2, 2, VideoStd_1080I_WIDTH, VideoStd_1080I_HEIGHT); exit(EXIT_FAILURE); } /* Width and height must be multiple of 16 */ argsp->imageWidth = imageWidth & ~0xf; argsp->imageHeight = imageHeight & ~0xf; break; } case 'b': argsp->videoBitRate = atoi(optarg); break; case 'x': argsp->videoInput = Capture_Input_SVIDEO; break; case 'l': argsp->soundInput = Sound_Input_LINE; break; case 'k': argsp->keyboard = TRUE; break; case 't': argsp->time = atoi(optarg); break; case 'o': argsp->osd = TRUE; break; case 'i': argsp->interface = TRUE; break; case 'h': usage(); exit(EXIT_SUCCESS); case 'O': if (strcmp(optarg, "component") == 0) { argsp->displayOutput = Display_Output_COMPONENT; } else if (strcmp(optarg, "composite") == 0) { argsp->displayOutput = Display_Output_COMPOSITE; } else if (strcmp(optarg, "lcd") == 0) { argsp->displayOutput = Display_Output_LCD; } else { fprintf(stderr, "Unknown video output: %s\n", optarg); usage(); exit(EXIT_FAILURE); } break; default: usage(); exit(EXIT_FAILURE); } } if (argsp->videoInput != Capture_Input_SVIDEO) { if (argsp->videoStd == VideoStd_D1_NTSC || argsp->videoStd == VideoStd_D1_PAL ) { argsp->videoInput = Capture_Input_COMPOSITE; } else { argsp->videoInput = Capture_Input_COMPONENT; } } /* Need at least one file to decode and only one sound file */ if (!argsp->videoFile && !argsp->speechFile) { usage(); exit(EXIT_FAILURE); } }
int AmAudioRtpFormat::setCurrentPayload(int payload) { if (m_currentPayload != payload) { std::map<int, SdpPayload *>::iterator p = m_sdpPayloadByPayload.find(payload); if (p == m_sdpPayloadByPayload.end()) { ERROR("Could not find payload <%i>\n", payload); return -1; } std::map<int, amci_payload_t *>::iterator pp = m_payloadPByPayload.find(payload); if (pp == m_payloadPByPayload.end()) { m_currentPayloadP = AmPlugIn::instance()->payload(p->second->int_pt); if (m_currentPayloadP == NULL) { ERROR("Could not find payload <%i>\n", payload); return -1; } m_payloadPByPayload[payload] = m_currentPayloadP; } else m_currentPayloadP = pp->second; m_currentPayload = payload; sdp_format_parameters = p->second->sdp_format_parameters; std::map<int, CodecContainer *>::iterator c = m_codecContainerByPayload.find(payload); if (c == m_codecContainerByPayload.end()) { codec = NULL; codec_name = m_currentPayloadP->codec; getCodec(); if (codec) { CodecContainer *cc = new CodecContainer(); cc->codec = codec; cc->frame_size = frame_size; cc->frame_length = frame_length; cc->frame_encoded_size = frame_encoded_size; cc->h_codec = h_codec; m_codecContainerByPayload[payload] = cc; } } else { codec = c->second->codec; frame_size = c->second->frame_size; frame_length = c->second->frame_length; frame_encoded_size = c->second->frame_encoded_size; h_codec = c->second->h_codec; } if (m_currentPayloadP && codec) { channels = m_currentPayloadP->channels; rate = m_currentPayloadP->sample_rate; } else { ERROR("Could not find payload <%i>\n", payload); return -1; } } return 0; }
void SetUp() { auto tup = GetParam(); uncompressedLength_ = uint64_t(1) << std::tr1::get<0>(tup); codec_ = getCodec(std::tr1::get<1>(tup)); }
void AviCodecRestrictions::getRestrictions(const std::wstring &codecName, QString &restrictions) { restrictions.clear(); if (codecName == L"Uncompressed") { restrictions = QObject::tr("No restrictions for uncompressed avi video"); return; } //find the codec int bpp; HIC hic = getCodec(codecName, bpp); if (!hic) { restrictions = QObject::tr("It is not possible to communicate with the codec.\n Probably the codec cannot work correctly."); return; } BITMAPINFO bi; bi.bmiHeader.biSize = sizeof(BITMAPINFOHEADER); bi.bmiHeader.biPlanes = 1; bi.bmiHeader.biCompression = BI_RGB; bi.bmiHeader.biXPelsPerMeter = 80; bi.bmiHeader.biYPelsPerMeter = 72; bi.bmiHeader.biClrUsed = 0; bi.bmiHeader.biClrImportant = 0; int lx = 640, ly = 480; bi.bmiHeader.biWidth = lx; bi.bmiHeader.biHeight = ly; // Loop until we can find a width, height, and depth that works! int i; // check the x lenght bi.bmiHeader.biBitCount = bpp; for (i = 3; i >= 0; i--) { bi.bmiHeader.biWidth = lx + (1 << i); bi.bmiHeader.biSizeImage = ((bi.bmiHeader.biWidth * bi.bmiHeader.biBitCount + 31) / 32) * 4 * ly; if (ICERR_OK != ICCompressQuery(hic, &bi.bmiHeader, NULL)) break; } if (i >= 0) restrictions = QObject::tr("video width must be a multiple of %1").arg(QString::number(1 << (i + 1))); // check the y lenght bi.bmiHeader.biWidth = 640; for (i = 3; i >= 0; i--) { bi.bmiHeader.biHeight = ly + (1 << i); bi.bmiHeader.biSizeImage = ((lx * bi.bmiHeader.biBitCount + 31) / 32) * 4 * bi.bmiHeader.biHeight; if (ICERR_OK != ICCompressQuery(hic, &bi.bmiHeader, NULL)) break; } if (i >= 0) restrictions = restrictions + "\n" + QObject::tr("video lenght must be a multiple of %1").arg(QString::number(1 << (i + 1))); ICClose(hic); if (restrictions.isEmpty()) restrictions = QObject::tr("No restrictions for this codec"); else restrictions.prepend(QObject::tr("Resolution restrictions:") + "\n"); }
void SetUp() override { codec_ = getCodec(GetParam()); }
int convert(const std::string & file_in, const std::string & codec_in, const std::string & file_out, const std::string & codec_out) { std::cout << "Convert " << codec_in << " to " << codec_out << std::endl; std::fstream in, out; in.open( file_in.c_str(), std::ios::in ); if (!in.is_open()) { std::cerr << "Unable to open " << file_in << " for input" << std::endl << std::flush; return 1; } out.open( file_out.c_str(), std::ios::out ); if (!out.is_open()) { std::cerr << "Unable to open " << file_out << " for output" << std::endl << std::flush; return 1; } std::cout << "Reading... "; Atlas::Message::QueuedDecoder decoder; Atlas::Codec *inCodec = getCodec(codec_in, in, decoder); while (!in.eof()) { inCodec->poll(true); } std::cout << "done." << std::endl; std::cout << "Writing... "; Atlas::Codec * outCodec = getCodec(codec_out, out, decoder); Atlas::Bridge * bridge; if (option_format) { Atlas::Formatter * format; bridge = format = new Atlas::Formatter(out, *outCodec); if (option_spacing != -1) { format->setSpacing(option_spacing); } } else { bridge = outCodec; } Atlas::Message::Encoder encoder(*bridge); encoder.streamBegin(); while (decoder.queueSize() > 0 ) { Atlas::Message::MapType msg(decoder.popMessage()); encoder.streamMessageElement(msg); } encoder.streamEnd(); std::cout << "done." << std::endl; out.close(); in.close(); return 0; }