Esempio n. 1
0
int main( int argc, char* argv[] ){
	QCoreApplication app( argc, argv );
	auto args = app.arguments();
	
	//Get 'from' and 'to' directory paths
	if( args.size() != 2 )
		return printError( "LoadSpeedTest IMAGE_PATH" );
	
	timeLoading( args[1] );
	
	QFile data( args[1] );
	if( !data.open(QIODevice::ReadOnly) )
		return printError( "Could not find file" );
	
	QElapsedTimer t;
	t.start();
	auto buffer = data.readAll();
	qDebug() << "Time for file loading:" << t.elapsed() << "ms";
	qDebug() << "File in bytes:" << buffer.size();
	
	auto ext = getFormat(args[1]);
	const int trials = 1;
	double total = 0;
	for( int i=0; i<trials; i++ ){
		t.start();
		auto img = QImage::fromData( buffer, ext.c_str() );
		if( img.isNull() )
			return printError("Could not decode image");
		auto time = t.elapsed();
		total += time;
		qDebug() << "Trial" << i << " took " << time << "ms";
		img.save("test.png");
	}
	
	qDebug() << "Average: " << (total / trials) << "ms";
	
	return 0;
}
Esempio n. 2
0
/*!
It enables to set the path and the name of the file(s) which as/have to be read.

If you want to read a video file, \f$ filename \f$ corresponds to the path to the file (example : /local/video.mpeg).

If you want to read a sequence of images, \f$ filename \f$ corresponds to the path followed by the image name template. For exemple, if you want to read different images named image0001.jpeg, image0002.jpg, ... and located in the folder /local/image, \f$ filename \f$ will be "/local/image/image%04d.jpg". 

\param filename : Path to a video file or file name template of a image sequence.
*/
void vpVideoReader::setFileName(const char *filename)
{
	if (!filename || *filename == '\0')
	{
		vpERROR_TRACE("filename empty ") ;
		throw (vpImageException(vpImageException::noFileNameError,"filename empty ")) ;
	}

	if (strlen( filename ) >= FILENAME_MAX) {
		throw(vpException(vpException::memoryAllocationError,
			"Not enough memory to intialize the file name"));
	}

	strcpy(this->fileName,filename);

	formatType = getFormat(fileName);

  if (formatType == FORMAT_UNKNOWN) {
    throw(vpException(vpException::badValue, "Filename extension not supported"));
  }

	initFileName = true;
}
Esempio n. 3
0
bool VideoDecoderD3DPrivate::open()
{
    if (!prepare())
        return false;
    if (codec_ctx->codec_id == QTAV_CODEC_ID(HEVC)) {
        // runtime hevc check
        if (!isHEVCSupported()) {
            qWarning("HEVC DXVA2/D3D11VA is not supported by current FFmpeg runtime.");
            return false;
        }
    }
    if (!createDevice())
        return false;
    format_fcc = 0;
    QVector<GUID> codecs = getSupportedCodecs();
    const d3d_format_t *fmt = getFormat(codec_ctx, codecs, &codec_guid);
    if (!fmt)
        return false;
    format_fcc = fmt->fourcc;
    if (!setupSurfaceInterop())
        return false;
    return true;
}
Esempio n. 4
0
bool gkWaveform::load(const char* fname)
{
	m_reader = new utFileStream();
	static_cast<utFileStream*>(m_reader)->open(fname, utStream::SM_READ);


	if (!m_reader->isOpen())
	{
		gkPrintf("Waveform: File %s loading failed.\n", fname);
		return false;
	}

	if (!loadStreamImpl())
	{
		gkPrintf("Waveform: File %s loading failed.\n", fname);
		return false;
	}

	// make sure a valid format & block size is present
	if (getFormat() <= 0)
	{
		gkPrintf("Waveform: File %s loading failed. (invalid format read)\n", fname);
		return false;
	}


	if (getBitsPerSecond() <= 0)
	{
		gkPrintf("Waveform: File %s loading failed. (invalid block size (%i))\n", fname, getBitsPerSecond());
		return false;
	}



	m_data = new char[getBitsPerSecond() + 1];
	return true;
}
Esempio n. 5
0
 void fmt::toPrettyString(std::ostream &o, size_t indent) const{
   o << std::string(indent, ' ') << "[" << getType() << "] (" << (getPayloadSize() + 8)
     << "b):" << std::endl;
   indent += 1;
   o << std::string(indent, ' ') << "Codec: " << getCodec() << " (" << getFormat() << ")"
     << std::endl;
   o << std::string(indent, ' ') << "Channels: " << getChannels() << std::endl;
   o << std::string(indent, ' ') << "Sample rate: " << getHz() << "Hz" << std::endl;
   o << std::string(indent, ' ') << "Bytes/s: " << getBPS() << std::endl;
   o << std::string(indent, ' ') << "Block size: " << getBlockSize() << " bytes" << std::endl;
   o << std::string(indent, ' ') << "Sample size: " << getSize() << " bits" << std::endl;
   if (getExtLen()){
     o << std::string(indent, ' ') << "-- extended " << getExtLen() << "bytes --" << std::endl;
     if (getExtLen() >= 2){
       o << std::string(indent, ' ') << "Valid bits: " << getValidBits() << std::endl;
     }
     if (getExtLen() >= 6){
       o << std::string(indent, ' ') << "Channel mask: " << getChannelMask() << std::endl;
     }
     if (getExtLen() >= 22){
       o << std::string(indent, ' ') << "GUID: " << getGUID() << std::endl;
     }
   }
 }
Esempio n. 6
0
//print method
void AudioStream::print (void) {
    if (streamIsSelected())
       std::cout << "  + ";
    else
        std::cout << "  - ";
        
	std::cout << "Audio Stream: " << getId(); 
    	std::cout << ", Language: " << getLangCode();
	std::cout << "-" << getLanguage();
	std::cout << ", Format: " << getFormat();
	std::cout << ", Quantization: " << getQuantization();
	std::cout << ", Frequency: " << getFrequency();
	std::cout << ", Channels: " << getChannels();
	std::cout << ", Type: " << getLangExtension();
	
	if (getSize() == 0)
	{
		std::cout << ", Size: na" << std::endl;
	}
	else
	{
		std::cout << ", Size: " << getSize() << " MB" << std::endl;
	}
}
Esempio n. 7
0
void Map::importSelected(std::istream& in)
{
    BasicContainerPtr root(new BasicContainer);

    // Instantiate the default import filter
    class MapImportFilter :
        public IMapImportFilter
    {
    private:
        scene::INodePtr _root;
    public:
        MapImportFilter(const scene::INodePtr& root) :
            _root(root)
        {}

        bool addEntity(const scene::INodePtr& entityNode)
        {
            _root->addChildNode(entityNode);
            return true;
        }

        bool addPrimitiveToEntity(const scene::INodePtr& primitive, const scene::INodePtr& entity)
        {
            if (Node_getEntity(entity)->isContainer())
            {
                entity->addChildNode(primitive);
                return true;
            }
            else
            {
                return false;
            }
        }
    } importFilter(root);

    MapFormatPtr format = getFormat();

    IMapReaderPtr reader = format->getMapReader(importFilter);

    try
    {
        // Start parsing
        reader->readFromStream(in);

        // Prepare child primitives
        addOriginToChildPrimitives(root);

        // Adjust all new names to fit into the existing map namespace,
        // this routine will be changing a lot of names in the importNamespace
        INamespacePtr nspace = getRoot()->getNamespace();
        if (nspace)
        {
            // Prepare all names, but do not import them into the namesace. This
            // will happen during the MergeMap call.
            nspace->ensureNoConflicts(root);
        }

        MergeMap(root);
    }
    catch (IMapReader::FailureException& e)
    {
        gtkutil::MessageBox::ShowError(
            (boost::format(_("Failure reading map from clipboard:\n%s")) % e.what()).str(),
            GlobalMainFrame().getTopLevelWindow());

        // Clear out the root node, otherwise we end up with half a map
        scene::NodeRemover remover;
        root->traverse(remover);
    }
}
Esempio n. 8
0
bool Texture2D::isDepth(GLenum target, GLint level) const
{
	return IsDepthTexture(getFormat(target, level));
}
Esempio n. 9
0
GLenum FramebufferAttachment::getComponentType() const
{
    return getFormat().info->componentType;
}
Esempio n. 10
0
GLuint FramebufferAttachment::getDepthSize() const
{
    return getSize().empty() ? 0 : getFormat().info->depthBits;
}
Esempio n. 11
0
void NuPlayer::HTTPLiveSource::onSessionNotify(const sp<AMessage> &msg) {
    int32_t what;
    CHECK(msg->findInt32("what", &what));

    switch (what) {
        case LiveSession::kWhatPrepared:
        {
            // notify the current size here if we have it, otherwise report an initial size of (0,0)
            sp<AMessage> format = getFormat(false /* audio */);
            int32_t width;
            int32_t height;
            if (format != NULL &&
                    format->findInt32("width", &width) && format->findInt32("height", &height)) {
                notifyVideoSizeChanged(format);
            } else {
                notifyVideoSizeChanged();
            }

#ifdef MTK_AOSP_ENHANCEMENT
            uint32_t flags = 0;
#else            
            uint32_t flags = FLAG_CAN_PAUSE;
#endif
            if (mLiveSession->isSeekable()) {
#ifdef MTK_AOSP_ENHANCEMENT
                flags |= FLAG_CAN_PAUSE;
#endif                
                flags |= FLAG_CAN_SEEK;
                flags |= FLAG_CAN_SEEK_BACKWARD;
                flags |= FLAG_CAN_SEEK_FORWARD;
            }

            if (mLiveSession->hasDynamicDuration()) {
                flags |= FLAG_DYNAMIC_DURATION;
            }

            notifyFlagsChanged(flags);

            notifyPrepared();
            break;
        }

        case LiveSession::kWhatPreparationFailed:
        {
            status_t err;
            CHECK(msg->findInt32("err", &err));

            notifyPrepared(err);
            break;
        }

        case LiveSession::kWhatStreamsChanged:
        {
            uint32_t changedMask;
            CHECK(msg->findInt32(
                        "changedMask", (int32_t *)&changedMask));

            bool audio = changedMask & LiveSession::STREAMTYPE_AUDIO;
            bool video = changedMask & LiveSession::STREAMTYPE_VIDEO;
#ifdef MTK_AOSP_ENHANCEMENT
			ALOGI("receive LiveSession::kWhatStreamsChanged,queue Decoder Shutdown for %s,%s",\
				audio?"audio":"",video?"video":"");
#endif
            sp<AMessage> reply;
            CHECK(msg->findMessage("reply", &reply));

            sp<AMessage> notify = dupNotify();
            notify->setInt32("what", kWhatQueueDecoderShutdown);
            notify->setInt32("audio", audio);
            notify->setInt32("video", video);
            notify->setMessage("reply", reply);
            notify->post();
            break;
        }

        case LiveSession::kWhatError:
        {
            break;
        }
#ifdef MTK_AOSP_ENHANCEMENT
        case LiveSession::kWhatPicture:
        case LiveSession::kWhatBufferingStart:
        case LiveSession::kWhatBufferingEnd:
        {
            onSessionNotify_l(msg);
            break;
        }
#endif
        default:
            TRESPASS();
    }
}
Esempio n. 12
0
void AVInputFile::configure(void *ffrapper_) {
	trace("begin configure(void*)");
	/*if (configured) {
		return;
	}*/
	configured = true;


	if (container != AVContainer::NONE) {
		string format = getFormat();
		if (container == AVContainer::MKV) {
			format = "matroska\0";
		} else if (container == AVContainer::MPEG) {
			format = "mpeg\0";
		} else if (container == AVContainer::MOV) {
			format = "mov\0";
		} else if (container == AVContainer::OGA) {
			format = "ogg\0";
		} else if (container == AVContainer::MKA) {
			format = "matroska\0";
		} else if (container == AVContainer::FLA) {
			format = "flac\0";
		}

		if (FFMpeg_setFormat( (char*) format.c_str()) != FFMpeg_SUCCESS) {
			error("Error trying to set the format.");
			throw IllegalParameterException(
					FFMpeg_getErrorStr(),
					"br::ufscar::lince::avencoding::AVInputFile",
					"configure(void*)");
		}
	} else {
		info("Trying to get the format by the file name");
	}

	if (FFMpeg_setInputFile( (char*) getFilename().c_str())
			!= FFMpeg_SUCCESS) {

		error("Error trying to set the input file name.");
		throw IllegalParameterException(
				FFMpeg_getErrorStr(),
				"br::ufscar::lince::avencoding::AVInputFile",
				"configure(void*)");
	}

	if (startTime != -1) {
		info("Setting StartTime");
		if (FFMpeg_setStartTime1((char*) Functions::numberToString(startTime).c_str()) != FFMpeg_SUCCESS) {

			error("Error trying to set the input file start point.");
			throw IllegalParameterException(
					FFMpeg_getErrorStr(),
					"br::ufscar::lince::avencoding::AVInputFile",
					"configure(void*)");

		}
	}

	if (durationTime != -1) {
		info("Setting DurationTime");
		if (FFMpeg_setRecordingTime1((char*) Functions::numberToString(durationTime).c_str()) != FFMpeg_SUCCESS) {
			error("Error trying to set the input file duration.");
			throw IllegalParameterException(
					FFMpeg_getErrorStr(),
					"br::ufscar::lince::avencoding::AVInputFile",
					"configure(void*)");
		}
	}
}
Esempio n. 13
0
 void Image::setColourAt(ColourValue const &cv, size_t x, size_t y, size_t z)
 {
     unsigned char pixelSize = PixelUtil::getNumElemBytes(getFormat());
     PixelUtil::packColour(cv, getFormat(), &((unsigned char *)getData())[pixelSize * (z * getWidth() * getHeight() + y * getWidth() + x)]);
 }
Esempio n. 14
0
bool Image::isRGB() const noexcept                      { return getFormat() == RGB; }
void
GeoMultiProperty::getGenericValue(MaxTypeT &eval, const SizeT index ) const
{
          UInt16  dim    = getDimension();
          bool    norm   = getNormalize();
          UInt32  stride = getStride() ? getStride() : getFormatSize() * dim;
    const UInt8  *data   = getData() + stride * index;

#define getValNormCase(vectype)                                 \
{                                                               \
vectype ival(vectype::Null);                                    \
for (UInt16 i = 0; i < dim; ++i)                                \
    ival[i] =                                                   \
        reinterpret_cast<const vectype::ValueType*>(data)[i];   \
                                                                \
if(norm)                                                        \
{                                                               \
    GeoConvertNormalize::convertOut(eval, ival,              \
        TypeTraits<vectype::ValueType>::getMax(), 0);           \
}                                                               \
else                                                            \
{                                                               \
    GeoConvert::convertOut(eval, ival);                      \
}                                                               \
}

#define getValCase(vectype)                                     \
{                                                               \
vectype ival(vectype::Null);                                    \
for (UInt16 i = 0; i < dim; ++i)                                \
    ival[i] =                                                   \
        reinterpret_cast<const vectype::ValueType*>(data)[i];   \
                                                                \
GeoConvert::convertOut(eval, ival);                          \
}
    switch(getFormat())
    {
        case GL_BYTE:                   getValNormCase(Vec4b );
            break;
        case GL_UNSIGNED_BYTE:          getValNormCase(Vec4ub);
            break;
        case GL_SHORT:                  getValNormCase(Vec4s );
            break;
        case GL_UNSIGNED_SHORT:         getValNormCase(Vec4us);
            break;
/*    case GL_INT:                    getValNormCase(Vec4i );
                                    break;
    case GL_UNSIGNED_INT:           getValNormCase(Vec4ui);
                                    break;
 */    
        case GL_FLOAT:                  getValCase    (Vec4f );
            break;
#ifndef OSG_OGL_NO_DOUBLE
        case GL_DOUBLE:                 getValCase    (Vec4d );
            break;
#endif
    }

#undef getValNormCase
#undef getValCase
}
void GeoMultiProperty::activate(DrawEnv *pEnv, 
                                UInt32   slot )
{
    Window *win = pEnv->getWindow();
    bool isGeneric = (slot >= 16);  // !!!HACK. needs to be replaced for 2.0
    slot &= 15;
    
    if(!win->hasExtOrVersion(_extVertexBufferObject, 0x0105, 0x0200))
    {
        FWARNING(("GeoMultiProperty::activate: Window %p doesn't "
                  "support VBOs!\n", win));
                  return;
    }
    
    win->validateGLObject(getContainer()->getGLId(), pEnv);

     // get "glBindBufferARB" function pointer

    OSGGETGLFUNCBYID_GL3_ES( glBindBuffer, 
                             osgGlBindBuffer,
                            _funcBindBuffer, 
                             win);
   
    osgGlBindBuffer(GL_ARRAY_BUFFER_ARB, 
                    win->getGLObjectId(getContainer()->getGLId()));

#define BUFFER_OFFSET(i)     (static_cast<char *>(NULL) + (i))

    if(isGeneric)
    {
        OSGGETGLFUNCBYID_GL3_ES( glVertexAttribPointer, 
                                 osgGlVertexAttribPointer,
                                _funcglVertexAttribPointerARB,
                                 win);

        osgGlVertexAttribPointer(slot, 
                                 getDimension(), 
                                 getFormat(), 
                                 getNormalize(),
                                 getStride(), 
                                 BUFFER_OFFSET(getOffset()));

        OSGGETGLFUNCBYID_GL3_ES( glEnableVertexAttribArray,
                                 osgGlEnableVertexAttribArray,
                                _funcglEnableVertexAttribArrayARB,
                                 win);
 
        osgGlEnableVertexAttribArray(slot);
    }
    else
    {
#if !defined(OSG_OGL_COREONLY) || defined(OSG_CHECK_COREONLY)
        switch(slot)
        {
            case 0:     
                glVertexPointer(getDimension(), getFormat(),
                                getStride(), BUFFER_OFFSET(getOffset()));
                glEnableClientState(GL_VERTEX_ARRAY);
                break;
            case 2:     
                glNormalPointer(getFormat(),
                                getStride(), BUFFER_OFFSET(getOffset()));
                glEnableClientState(GL_NORMAL_ARRAY);
                break;
            case 3:     
                glColorPointer(getDimension(), getFormat(),
                               getStride(), BUFFER_OFFSET(getOffset()));
                glEnableClientState(GL_COLOR_ARRAY);
                break;
            case 4:     
                if (win->hasExtOrVersion(_extSecondaryColor, 0x0104))
                {
                    OSGGETGLFUNCBYID_EXT( glSecondaryColorPointer,
                                          osgGlSecondaryColorPointer,
                                         _funcglSecondaryColorPointer,
                                          win);

                    osgGlSecondaryColorPointer(getDimension(),
                                               getFormat(),
                                               getStride(), 
                                               BUFFER_OFFSET(getOffset()));

                    glEnableClientState(GL_SECONDARY_COLOR_ARRAY_EXT);
                }
                else
                {
                    FWARNING(("GeoVectorProperty::activate: Window "
                              "has no Secondary Color extension\n"));
                }
                break;
            case 8:  case 9: 
            case 10: case 11: 
            case 12: case 13: 
            case 14: case 15: 
            {
                OSGGETGLFUNCBYID_GL3_ES( glClientActiveTexture,
                                         osgGlClientActiveTexture,
                                        _funcglClientActiveTextureARB,
                                         win);

                osgGlClientActiveTexture(GL_TEXTURE0_ARB + slot - 8);

                glTexCoordPointer(getDimension(), 
                                  getFormat(),
                                  getStride(), 
                                  BUFFER_OFFSET(getOffset()));

                glEnableClientState(GL_TEXTURE_COORD_ARRAY);
            }
            break;
            default:    FWARNING(("GeoVectorProperty::activate: Non-Generic"
                                  " attribute nr. %d unknown!\n", slot));
                break;
        }     
#endif
    } // isGeneric

    osgGlBindBuffer(GL_ARRAY_BUFFER_ARB, 0);
}
 double FFStream::getPositionImpl() const throw()
 {
     return double(ffData->sampleBufferStart) / getFormat().sampleFrequency;
 }
 double FFStream::getLengthImpl() const throw(Exception)
 {
     return double(ffData->streamSize) / getFormat().sampleFrequency;
 }
Esempio n. 19
0
GLenum FramebufferAttachment::getColorEncoding() const
{
    return getFormat().info->colorEncoding;
}
Esempio n. 20
0
Image::Format Image::format() const {	
	return getFormat(array().components());
}
INT_PTR CALLBACK ColumnEditorDlg::run_dlgProc(UINT message, WPARAM wParam, LPARAM)
{
	switch (message) 
	{
		case WM_INITDIALOG :
		{
			switchTo(activeText);
			::SendDlgItemMessage(_hSelf, IDC_COL_DEC_RADIO, BM_SETCHECK, TRUE, 0);
			goToCenter();

			NppParameters *pNppParam = NppParameters::getInstance();
			ETDTProc enableDlgTheme = (ETDTProc)pNppParam->getEnableThemeDlgTexture();
			if (enableDlgTheme)
			{
				enableDlgTheme(_hSelf, ETDT_ENABLETAB);
				redraw();
			}
			return TRUE;
		}
		case WM_COMMAND : 
		{
			switch (wParam)
			{
				case IDCANCEL : // Close
					display(false);
					return TRUE;

				case IDOK :
                {
					(*_ppEditView)->execute(SCI_BEGINUNDOACTION);
					
					const int stringSize = 1024;
					TCHAR str[stringSize];
					
					bool isTextMode = (BST_CHECKED == ::SendDlgItemMessage(_hSelf, IDC_COL_TEXT_RADIO, BM_GETCHECK, 0, 0));
					
					if (isTextMode)
					{
						::SendDlgItemMessage(_hSelf, IDC_COL_TEXT_EDIT, WM_GETTEXT, stringSize, (LPARAM)str);

						display(false);
						
						if ((*_ppEditView)->execute(SCI_SELECTIONISRECTANGLE) || (*_ppEditView)->execute(SCI_GETSELECTIONS) > 1)
						{
							ColumnModeInfos colInfos = (*_ppEditView)->getColumnModeSelectInfo();
							std::sort(colInfos.begin(), colInfos.end(), SortInPositionOrder());
							(*_ppEditView)->columnReplace(colInfos, str);
							std::sort(colInfos.begin(), colInfos.end(), SortInSelectOrder());
							(*_ppEditView)->setMultiSelections(colInfos);
						}
						else
						{
							int cursorPos = (*_ppEditView)->execute(SCI_GETCURRENTPOS);
							int cursorCol = (*_ppEditView)->execute(SCI_GETCOLUMN, cursorPos);
							int cursorLine = (*_ppEditView)->execute(SCI_LINEFROMPOSITION, cursorPos);
							int endPos = (*_ppEditView)->execute(SCI_GETLENGTH);
							int endLine = (*_ppEditView)->execute(SCI_LINEFROMPOSITION, endPos);

							int lineAllocatedLen = 1024;
							TCHAR *line = new TCHAR[lineAllocatedLen];

							for (int i = cursorLine ; i <= endLine ; ++i)
							{
								int lineBegin = (*_ppEditView)->execute(SCI_POSITIONFROMLINE, i);
								int lineEnd = (*_ppEditView)->execute(SCI_GETLINEENDPOSITION, i);

								int lineEndCol = (*_ppEditView)->execute(SCI_GETCOLUMN, lineEnd);
								int lineLen = lineEnd - lineBegin + 1;

								if (lineLen > lineAllocatedLen)
								{
									delete [] line;
									line = new TCHAR[lineLen];
								}
								(*_ppEditView)->getGenericText(line, lineLen, lineBegin, lineEnd);
								generic_string s2r(line);

								if (lineEndCol < cursorCol)
								{
									generic_string s_space(cursorCol - lineEndCol, ' ');
									s2r.append(s_space);
									s2r.append(str);
								}
								else
								{
									int posAbs2Start = (*_ppEditView)->execute(SCI_FINDCOLUMN, i, cursorCol);
									int posRelative2Start = posAbs2Start - lineBegin;
									s2r.insert(posRelative2Start, str);
								}
								(*_ppEditView)->replaceTarget(s2r.c_str(), lineBegin, lineEnd);
							}
							delete [] line;
						}
					}
					else
					{
						int initialNumber = ::GetDlgItemInt(_hSelf, IDC_COL_INITNUM_EDIT, NULL, TRUE);
						int increaseNumber = ::GetDlgItemInt(_hSelf, IDC_COL_INCREASENUM_EDIT, NULL, TRUE);
						int repeat = ::GetDlgItemInt(_hSelf, IDC_COL_REPEATNUM_EDIT, NULL, TRUE);
						if (repeat == 0)
						{
							repeat = 1; // Without this we might get an infinite loop while calculating the set "numbers" below.
						}
						UCHAR format = getFormat();
						display(false);
						
						if ((*_ppEditView)->execute(SCI_SELECTIONISRECTANGLE) || (*_ppEditView)->execute(SCI_GETSELECTIONS) > 1)
						{
							ColumnModeInfos colInfos = (*_ppEditView)->getColumnModeSelectInfo();
							std::sort(colInfos.begin(), colInfos.end(), SortInPositionOrder());
							(*_ppEditView)->columnReplace(colInfos, initialNumber, increaseNumber, repeat, format);
							std::sort(colInfos.begin(), colInfos.end(), SortInSelectOrder());
							(*_ppEditView)->setMultiSelections(colInfos);
						}
						else
						{
							int cursorPos = (*_ppEditView)->execute(SCI_GETCURRENTPOS);
							int cursorCol = (*_ppEditView)->execute(SCI_GETCOLUMN, cursorPos);
							int cursorLine = (*_ppEditView)->execute(SCI_LINEFROMPOSITION, cursorPos);
							int endPos = (*_ppEditView)->execute(SCI_GETLENGTH);
							int endLine = (*_ppEditView)->execute(SCI_LINEFROMPOSITION, endPos);

							// Compute the numbers to be placed at each column.
							std::vector<int> numbers;
							{
								int curNumber = initialNumber;
								const unsigned int kiMaxSize = 1 + (unsigned int)endLine - (unsigned int)cursorLine;
								while (numbers.size() < kiMaxSize)
								{
									for (int i = 0; i < repeat; i++)
									{
										numbers.push_back(curNumber);
										if (numbers.size() >= kiMaxSize)
										{
											break;
										}
									}
									curNumber += increaseNumber;
								}
							}
							assert(numbers.size() > 0);

							int lineAllocatedLen = 1024;
							TCHAR *line = new TCHAR[lineAllocatedLen];


							UCHAR f = format & MASK_FORMAT;
							bool isZeroLeading = (MASK_ZERO_LEADING & format) != 0;
							
							int base = 10;
							if (f == BASE_16)
								base = 16;
							else if (f == BASE_08)
								base = 8;
							else if (f == BASE_02)
								base = 2;

							int endNumber = *numbers.rbegin();
							int nbEnd = getNbDigits(endNumber, base);
							int nbInit = getNbDigits(initialNumber, base);
							int nb = max(nbInit, nbEnd);


							for (int i = cursorLine ; i <= endLine ; ++i)
							{
								int lineBegin = (*_ppEditView)->execute(SCI_POSITIONFROMLINE, i);
								int lineEnd = (*_ppEditView)->execute(SCI_GETLINEENDPOSITION, i);

								int lineEndCol = (*_ppEditView)->execute(SCI_GETCOLUMN, lineEnd);
								int lineLen = lineEnd - lineBegin + 1;

								if (lineLen > lineAllocatedLen)
								{
									delete [] line;
									line = new TCHAR[lineLen];
								}
								(*_ppEditView)->getGenericText(line, lineLen, lineBegin, lineEnd);
								generic_string s2r(line);

								//
								// Calcule generic_string
								//
								int2str(str, stringSize, numbers.at(i - cursorLine), base, nb, isZeroLeading);

								if (lineEndCol < cursorCol)
								{
									generic_string s_space(cursorCol - lineEndCol, ' ');
									s2r.append(s_space);
									s2r.append(str);
								}
								else
								{
									int posAbs2Start = (*_ppEditView)->execute(SCI_FINDCOLUMN, i, cursorCol);
									int posRelative2Start = posAbs2Start - lineBegin;
									s2r.insert(posRelative2Start, str);
								}

								(*_ppEditView)->replaceTarget(s2r.c_str(), lineBegin, lineEnd);
							}
							delete [] line;
						}
					}
					(*_ppEditView)->execute(SCI_ENDUNDOACTION);
                    (*_ppEditView)->getFocus();
                    return TRUE;
                }
				case IDC_COL_TEXT_RADIO :
				case IDC_COL_NUM_RADIO :
				{
					switchTo((wParam == IDC_COL_TEXT_RADIO)? activeText : activeNumeric);
					return TRUE;
				}

				default :
				{
					switch (HIWORD(wParam))
					{
						case EN_SETFOCUS :
						case BN_SETFOCUS :
							//updateLinesNumbers();
							return TRUE;
						default :
							return TRUE;
					}
					break;
				}
			}
		}

		default :
			return FALSE;
	}
	//return FALSE;
}
Esempio n. 22
0
bool Image::isSingleChannel() const noexcept            { return getFormat() == SingleChannel; }
void GL3PlusFrameBufferObject::initialise()
{
    // Release depth and stencil, if they were bound
    mManager->releaseRenderBuffer(mDepth);
    mManager->releaseRenderBuffer(mStencil);
    mManager->releaseRenderBuffer(mMultisampleColourBuffer);
    // First buffer must be bound
    if(!mColour[0].buffer)
    {
        OGRE_EXCEPT(Exception::ERR_INVALIDPARAMS,
                    "Attachment 0 must have surface attached",
                    "GL3PlusFrameBufferObject::initialise");
    }

    // If we're doing multisampling, then we need another FBO which contains a
    // renderbuffer which is set up to multisample, and we'll blit it to the final
    // FBO afterwards to perform the multisample resolve. In that case, the
    // mMultisampleFB is bound during rendering and is the one with a depth/stencil

    // Store basic stats
    size_t width = mColour[0].buffer->getWidth();
    size_t height = mColour[0].buffer->getHeight();
    GLuint format = mColour[0].buffer->getGLFormat();
    ushort maxSupportedMRTs = Root::getSingleton().getRenderSystem()->getCapabilities()->getNumMultiRenderTargets();

    // Bind simple buffer to add colour attachments
    OGRE_CHECK_GL_ERROR(glBindFramebuffer(GL_FRAMEBUFFER, mFB));

    // Bind all attachment points to frame buffer
    for(size_t x=0; x<maxSupportedMRTs; ++x)
    {
        if(mColour[x].buffer)
        {
            if(mColour[x].buffer->getWidth() != width || mColour[x].buffer->getHeight() != height)
            {
                StringStream ss;
                ss << "Attachment " << x << " has incompatible size ";
                ss << mColour[x].buffer->getWidth() << "x" << mColour[x].buffer->getHeight();
                ss << ". It must be of the same as the size of surface 0, ";
                ss << width << "x" << height;
                ss << ".";
                OGRE_EXCEPT(Exception::ERR_INVALIDPARAMS, ss.str(), "GL3PlusFrameBufferObject::initialise");
            }
            if(mColour[x].buffer->getGLFormat() != format)
            {
                StringStream ss;
                ss << "Attachment " << x << " has incompatible format.";
                OGRE_EXCEPT(Exception::ERR_INVALIDPARAMS, ss.str(), "GL3PlusFrameBufferObject::initialise");
            }
            if(getFormat() == PF_DEPTH)
                mColour[x].buffer->bindToFramebuffer(GL_DEPTH_ATTACHMENT, mColour[x].zoffset);
            else
                mColour[x].buffer->bindToFramebuffer(GL_COLOR_ATTACHMENT0+x, mColour[x].zoffset);
        }
        else
        {
            // Detach
            if(getFormat() == PF_DEPTH)
            {
                OGRE_CHECK_GL_ERROR(glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, 0));
            }
            else
            {
                OGRE_CHECK_GL_ERROR(glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0+x, GL_RENDERBUFFER, 0));
            }
        }
    }

    // Now deal with depth / stencil
    if (mMultisampleFB)
    {
        // Bind multisample buffer
        OGRE_CHECK_GL_ERROR(glBindFramebuffer(GL_FRAMEBUFFER, mMultisampleFB));

        // Create AA render buffer (colour)
        // note, this can be shared too because we blit it to the final FBO
        // right after the render is finished
        mMultisampleColourBuffer = mManager->requestRenderBuffer(format, width, height, mNumSamples);

        // Attach it, because we won't be attaching below and non-multisample has
        // actually been attached to other FBO
        mMultisampleColourBuffer.buffer->bindToFramebuffer(GL_COLOR_ATTACHMENT0,
                mMultisampleColourBuffer.zoffset);

        // depth & stencil will be dealt with below
    }

    // Depth buffer is not handled here anymore.
    // See GL3PlusFrameBufferObject::attachDepthBuffer() & RenderSystem::setDepthBufferFor()

    // Do glDrawBuffer calls
    GLenum bufs[OGRE_MAX_MULTIPLE_RENDER_TARGETS];
    GLsizei n=0;
    for(size_t x=0; x<maxSupportedMRTs; ++x)
    {
        // Fill attached colour buffers
        if(mColour[x].buffer)
        {
            if(getFormat() == PF_DEPTH)
                bufs[x] = GL_DEPTH_ATTACHMENT;
            else
                bufs[x] = GL_COLOR_ATTACHMENT0 + x;
            // Keep highest used buffer + 1
            n = x+1;
        }
        else
        {
            bufs[x] = GL_NONE;
        }
    }

    // Drawbuffer extension supported, use it
    OGRE_CHECK_GL_ERROR(glDrawBuffers(n, bufs));

    if (mMultisampleFB)
    {
        // we need a read buffer because we'll be blitting to mFB
        OGRE_CHECK_GL_ERROR(glReadBuffer(bufs[0]));
    }
    else
    {
        // No read buffer, by default, if we want to read anyway we must not forget to set this.
        OGRE_CHECK_GL_ERROR(glReadBuffer(GL_NONE));
    }

    // Check status
    GLuint status;
    OGRE_CHECK_GL_ERROR(status = glCheckFramebufferStatus(GL_FRAMEBUFFER));

    // Bind main buffer
    OGRE_CHECK_GL_ERROR(glBindFramebuffer(GL_FRAMEBUFFER, 0));

    switch(status)
    {
    case GL_FRAMEBUFFER_COMPLETE:
        // All is good
        break;
    case GL_FRAMEBUFFER_UNSUPPORTED:
        OGRE_EXCEPT(Exception::ERR_INVALIDPARAMS,
                    "All framebuffer formats with this texture internal format unsupported",
                    "GL3PlusFrameBufferObject::initialise");
    default:
        OGRE_EXCEPT(Exception::ERR_INVALIDPARAMS,
                    "Framebuffer incomplete or other FBO status error",
                    "GL3PlusFrameBufferObject::initialise");
    }

}
Esempio n. 24
0
bool Image::hasAlphaChannel() const noexcept            { return getFormat() != RGB; }
PixelBufferARGBRef ArchivItem_BitmapBase::getBufferARGB() const
{
    if(getFormat() != FORMAT_BGRA)
        throw std::logic_error("Image not BGRA");
    return PixelBufferARGBRef(reinterpret_cast<uint32_t*>(const_cast<uint8_t*>(pxlData_.data())), width_, height_);
}
void ApexVertexBuffer::applyPermutation(const Array<uint32_t>& permutation)
{
	const uint32_t numVertices = mParams->vertexCount;
	PX_ASSERT(numVertices == permutation.size());
	for (uint32_t i = 0; i < (uint32_t)mParams->buffers.arraySizes[0]; i++)
	{
		NvParameterized::Interface* bufferInterface = mParams->buffers.buf[i];
		RenderDataFormat::Enum format = getFormat().getBufferFormat(i);
		switch(format)
		{
			// all 1 byte
		case RenderDataFormat::UBYTE1:
		case RenderDataFormat::BYTE_UNORM1:
		case RenderDataFormat::BYTE_SNORM1:
			{
				BufferU8x1* byte1 = static_cast<BufferU8x1*>(bufferInterface);
				PX_ASSERT(numVertices == (uint32_t)byte1->data.arraySizes[0]);
				ApexPermute(byte1->data.buf, permutation.begin(), numVertices);
			}
			break;

			// all 2 byte
		case RenderDataFormat::UBYTE2:
		case RenderDataFormat::USHORT1:
		case RenderDataFormat::SHORT1:
		case RenderDataFormat::BYTE_UNORM2:
		case RenderDataFormat::SHORT_UNORM1:
		case RenderDataFormat::BYTE_SNORM2:
		case RenderDataFormat::SHORT_SNORM1:
		case RenderDataFormat::HALF1:
			{
				BufferU16x1* short1 = static_cast<BufferU16x1*>(bufferInterface);
				PX_ASSERT(numVertices == (uint32_t)short1->data.arraySizes[0]);
				ApexPermute(short1->data.buf, permutation.begin(), numVertices);
			}
			break;

			// all 3 byte
		case RenderDataFormat::UBYTE3:
		case RenderDataFormat::BYTE_UNORM3:
		case RenderDataFormat::BYTE_SNORM3:
			{
				BufferU8x3* byte3 = static_cast<BufferU8x3*>(bufferInterface);
				PX_ASSERT(numVertices == (uint32_t)byte3->data.arraySizes[0]);
				ApexPermute(byte3->data.buf, permutation.begin(), numVertices);
			}
			break;

			// all 4 byte
		case RenderDataFormat::UBYTE4:
		case RenderDataFormat::USHORT2:
		case RenderDataFormat::SHORT2:
		case RenderDataFormat::UINT1:
		case RenderDataFormat::R8G8B8A8:
		case RenderDataFormat::B8G8R8A8:
		case RenderDataFormat::BYTE_UNORM4:
		case RenderDataFormat::SHORT_UNORM2:
		case RenderDataFormat::BYTE_SNORM4:
		case RenderDataFormat::SHORT_SNORM2:
		case RenderDataFormat::HALF2:
		case RenderDataFormat::FLOAT1:
		case RenderDataFormat::BYTE_SNORM4_QUATXYZW:
		case RenderDataFormat::SHORT_SNORM4_QUATXYZW:
			{
				BufferU32x1* int1 = static_cast<BufferU32x1*>(bufferInterface);
				PX_ASSERT(numVertices == (uint32_t)int1->data.arraySizes[0]);
				ApexPermute(int1->data.buf, permutation.begin(), numVertices);
			}
			break;

			// all 6 byte
		case RenderDataFormat::USHORT3:
		case RenderDataFormat::SHORT3:
		case RenderDataFormat::SHORT_UNORM3:
		case RenderDataFormat::SHORT_SNORM3:
		case RenderDataFormat::HALF3:
			{
				BufferU16x3* short3 = static_cast<BufferU16x3*>(bufferInterface);
				PX_ASSERT(numVertices == (uint32_t)short3->data.arraySizes[0]);
				ApexPermute(short3->data.buf, permutation.begin(), numVertices);
			}
			break;

			// all 8 byte
		case RenderDataFormat::USHORT4:
		case RenderDataFormat::SHORT4:
		case RenderDataFormat::SHORT_UNORM4:
		case RenderDataFormat::SHORT_SNORM4:
		case RenderDataFormat::UINT2:
		case RenderDataFormat::HALF4:
		case RenderDataFormat::FLOAT2:
			{
				BufferU32x2* int2 = static_cast<BufferU32x2*>(bufferInterface);
				PX_ASSERT(numVertices == (uint32_t)int2->data.arraySizes[0]);
				ApexPermute(int2->data.buf, permutation.begin(), numVertices);
			}
			break;

			// all 12 byte
		case RenderDataFormat::UINT3:
		case RenderDataFormat::FLOAT3:
			{
				BufferU32x3* int3 = static_cast<BufferU32x3*>(bufferInterface);
				PX_ASSERT(numVertices == (uint32_t)int3->data.arraySizes[0]);
				ApexPermute(int3->data.buf, permutation.begin(), numVertices);
			}
			break;

			// all 16 byte
		case RenderDataFormat::UINT4:
		case RenderDataFormat::R32G32B32A32_FLOAT:
		case RenderDataFormat::B32G32R32A32_FLOAT:
		case RenderDataFormat::FLOAT4:
		case RenderDataFormat::FLOAT4_QUAT:
			{
				BufferU32x4* int4 = static_cast<BufferU32x4*>(bufferInterface);
				PX_ASSERT(numVertices == (uint32_t)int4->data.arraySizes[0]);
				ApexPermute(int4->data.buf, permutation.begin(), numVertices);
			}
			break;

			// all 36 byte
		case RenderDataFormat::FLOAT3x3:
			{
				BufferF32x1* float1 = static_cast<BufferF32x1*>(bufferInterface);
				PX_ASSERT(numVertices == (uint32_t)float1->data.arraySizes[0]);
				ApexPermute((PxMat33*)float1->data.buf, permutation.begin(), numVertices);
			}
			break;

			// all 48 byte
		case RenderDataFormat::FLOAT3x4:
			{
				BufferF32x1* float1 = static_cast<BufferF32x1*>(bufferInterface);
				PX_ASSERT(numVertices == (uint32_t)float1->data.arraySizes[0]);
				ApexPermute((PxMat34Legacy*)float1->data.buf, permutation.begin(), numVertices);
				
			}
			break;

			// all 64 byte
		case RenderDataFormat::FLOAT4x4:
			{
				BufferF32x1* float1 = static_cast<BufferF32x1*>(bufferInterface);
				PX_ASSERT(numVertices == (uint32_t)float1->data.arraySizes[0]);
				ApexPermute((PxMat44*)float1->data.buf, permutation.begin(), numVertices);
			}
			break;

		// fix gcc warnings
		case RenderDataFormat::UNSPECIFIED:
		case RenderDataFormat::NUM_FORMATS:
			break;
		}
	}
}
void GeoVectorBufferRefProperty::activate(DrawEnv *pEnv, UInt32 slot)
{
#ifndef OSG_EMBEDDED
    Window *win = pEnv->getWindow();

    bool isGeneric = (slot >= 16);  // !!!HACK. needs to be replaced for 2.0
    slot &= 15;

    bool hasVBO = win->hasExtension(_extVertexBufferObject);

    if(hasVBO && isGeneric == true)
    {
        OSGGETGLFUNCBYID( OSGglVertexAttribPointerARB, 
                          osgGlVertexAttribPointerARB,
                         _funcglVertexAttribPointerARB,
                          win);

        if(getGLId() != 0 && getUseVBO()) // Do we have a VBO?
        {
            OSGGETGLFUNCBYID( OSGglBindBufferARB, 
                              osgGlBindBufferARB,
                             _funcBindBuffer, 
                              win);
            
            osgGlBindBufferARB(GL_ARRAY_BUFFER_ARB,
                               getGLId());
            
            osgGlVertexAttribPointerARB(slot, 
                                        getDimension(),
                                        getFormat   (),
                                        getNormalize(),
                                        getStride   (), 
                                        0);
            
            osgGlBindBufferARB(GL_ARRAY_BUFFER_ARB, 0);
        }
        else
        {
            osgGlVertexAttribPointerARB(slot, 
                                        getDimension(),
                                        getFormat   (), 
                                        getNormalize(),
                                        getStride   (), 
                                        getData     ());
        }
        
        OSGGETGLFUNCBYID( OSGglEnableVertexAttribArrayARB,
                          osgGlEnableVertexAttribArrayARB,
                         _funcglEnableVertexAttribArrayARB,
                          win);
        
        osgGlEnableVertexAttribArrayARB(slot);
    }
    else 
    {        
        const void *pData = NULL;

        OSGGETGLFUNCBYID( OSGglBindBufferARB, 
                          osgGlBindBufferARB,
                         _funcBindBuffer, 
                          win);

        hasVBO &= getUseVBO() && (getGLId() != 0);

        if(hasVBO == true) // Do we have a VBO?
        {
            osgGlBindBufferARB(GL_ARRAY_BUFFER_ARB,
                               getGLId());
        }
        else
        {
            pData = getData();
        }
        
        switch(slot)
        {
            case 0:     
                glVertexPointer(getDimension(), 
                                getFormat   (),
                                getStride   (),
                                pData         );

                glEnableClientState(GL_VERTEX_ARRAY);
                break;

            case 2:     
                glNormalPointer(getFormat(),
                                getStride(),
                                pData      );

                glEnableClientState(GL_NORMAL_ARRAY);
                break;

            case 3:   
                glColorPointer(getDimension(), 
                               getFormat   (),
                               getStride   (), 
                               pData         );

                glEnableClientState(GL_COLOR_ARRAY);
                break;

            case 4:   
                if (win->hasExtension(_extSecondaryColor))
                {
                    OSGGETGLFUNCBYID( OSGglSecondaryColorPointerEXT,
                                      osgGlSecondaryColorPointerEXT,
                                     _funcglSecondaryColorPointer,
                                      win);

                    osgGlSecondaryColorPointerEXT(getDimension(),
                                                  getFormat   (),
                                                  getStride   (), 
                                                  pData         );

                    glEnableClientState(GL_SECONDARY_COLOR_ARRAY_EXT);
                }
                else
                {
                    FWARNING(("GeoVectorProperty::activate: Window "
                              "has no Secondary Color extension\n"));
                }
                break;

            case 8:  
            case 9:
            case 10: 
            case 11:
            case 12: 
            case 13:
            case 14: 
            case 15:
            {
                OSGGETGLFUNCBYID( OSGglClientActiveTextureARB,
                                  osgGlClientActiveTextureARB,
                                 _funcglClientActiveTextureARB,
                                  win);

                osgGlClientActiveTextureARB(GL_TEXTURE0_ARB + slot - 8);

                glTexCoordPointer(getDimension(), 
                                  getFormat   (),
                                  getStride   (),
                                  pData         );

                glEnableClientState(GL_TEXTURE_COORD_ARRAY);
            }
            break;

            default:    FWARNING(("GeoVectorProperty::activate: Non-Generic"
                                  " attribute nr. %d unknown!\n", slot));
                break;

        }
        if(hasVBO == true) // Do we have a VBO?
        {
            osgGlBindBufferARB(GL_ARRAY_BUFFER_ARB, 0);
        }
    }
#endif
}
Esempio n. 28
0
bool Texture2D::isCompressed(GLenum target, GLint level) const
{
	return IsCompressed(getFormat(target, level));
}
void ApexVertexBuffer::applyTransformation(const PxMat44& transformation)
{
	RenderDataFormat::Enum format;
	void* buf;
	uint32_t index;

	// Positions
	index = (uint32_t)getFormat().getBufferIndexFromID(getFormat().getSemanticID(RenderVertexSemantic::POSITION));
	buf = getBuffer(index);
	if (buf)
	{
		format = getFormat().getBufferFormat(index);
		transformRenderBuffer(buf, buf, format, getVertexCount(), transformation);
	}

	// Normals
	index = (uint32_t)getFormat().getBufferIndexFromID(getFormat().getSemanticID(RenderVertexSemantic::NORMAL));
	buf = getBuffer(index);
	if (buf)
	{
		// PH: the Cofactor matrix now also handles negative determinants, so it does the same as multiplying with the inverse transpose of transformation.M.
		const Cof44 cof(transformation);
		format = getFormat().getBufferFormat(index);
		transformRenderBuffer(buf, buf, format, getVertexCount(), cof.getBlock33());
	}

	// Tangents
	index = (uint32_t)getFormat().getBufferIndexFromID(getFormat().getSemanticID(RenderVertexSemantic::TANGENT));
	buf = getBuffer(index);
	if (buf)
	{
		format = getFormat().getBufferFormat(index);
		const PxMat33 tm(transformation.column0.getXYZ(),
								transformation.column1.getXYZ(),
								transformation.column2.getXYZ());
		transformRenderBuffer(buf, buf, format, getVertexCount(), tm);
	}

	// Binormals
	index = (uint32_t)getFormat().getBufferIndexFromID(getFormat().getSemanticID(RenderVertexSemantic::BINORMAL));
	buf = getBuffer(index);
	if (buf)
	{
		format = getFormat().getBufferFormat(index);
		const PxMat33 tm(transformation.column0.getXYZ(),
								transformation.column1.getXYZ(),
								transformation.column2.getXYZ());
		transformRenderBuffer(buf, buf, format, getVertexCount(), tm);
	}
}
Esempio n. 30
0
GLuint FramebufferAttachment::getStencilSize() const
{
    return getSize().empty() ? 0 : getFormat().info->stencilBits;
}