const vector<short> & ofxDepthCompressedFrame::compressedData(){
	if(compressedDirty){
		compressed.resize(getPixels().size()*2+5);
		if(!isKeyFrame()){
			uncompressedDiff.clear();
			int lastPos = 0;
			for(int i=0;i<pixels.size();i++){
				int pos = i-lastPos;
				if(pixels[i]==0 && pos < std::numeric_limits<unsigned short>::max()) continue;
                DiffPixel diffPixel={(unsigned short)pos,pixels[i]};
				uncompressedDiff.push_back(diffPixel);
				lastPos = i;
			}
			if(uncompressedDiff.empty()){
				compressedBytes = 0;
			}else{
				compressedBytes = ofx_compress((unsigned char*)&uncompressedDiff[0],uncompressedDiff.size()*sizeof(DiffPixel),(unsigned char*)&compressed[5]);
			}
		}else{
			compressedBytes = ofx_compress((unsigned char*)pixels.getPixels(),pixels.size()*sizeof(short),(unsigned char*)&compressed[5]);
		}
		compressedDirty = false;
		compressed.resize(compressedBytes/2+5);
	}
	return compressed;
}
Beispiel #2
0
void TimeLine::onDragDrop (ATOM_WidgetDragDropEvent *event)
{
	if (event->dragSource->getProperty("Type") == "KM")
	{
		unsigned numSlices = _duration / _timeSlice;
		unsigned width = _timeLine->getClientRect().size.w;
		unsigned sliceWidth = (width - numSlices - 1) / numSlices;
		unsigned space = (width - numSlices * (sliceWidth + 1) - 1) / 2;
		int x = event->x - space;
		int n = x / (1 + sliceWidth);
		if ( n < numSlices && !isKeyFrame (n))
		{
			int source = event->dragSource->getProperty("SourceFrame").getI();

			ATOM_AUTOPTR(ATOM_CompositionKeyFrameValue) keyFrameValue = _actor->createKeyFrameValue ();
			_actor->evalKeyFrameValue (sliceIndexToTime (source), keyFrameValue.get());

			if ((event->keymod & KEYMOD_SHIFT) == 0)
			{
				removeKeyFrame (source);
			}
			addKeyFrame (n, keyFrameValue.get());
			setCurrentSlice (n, 0);
		}
	}
}
void ofxDepthCompressedFrame::fromCompressedData(const char* data, size_t len){
	const short * shortdata = (const short*)data;
	compressedDirty = true;
	compressed.resize(5);
	compressed[0] = shortdata[0];
	compressed[1] = shortdata[1];
	compressed[2] = shortdata[2];
	compressed[3] = shortdata[3];
	compressed[4] = shortdata[4];


	//FIXME: check that size is correct
	compressed[1] = 160;
	compressed[2] = 120;

	pixels.allocate(compressed[1],compressed[2],1);
	if(isKeyFrame()){
		ofx_uncompress((unsigned char*)data+10,len-10,(unsigned char*)pixels.getPixels(),pixels.size()*sizeof(short));
	}else{
		pixels.set(0);
		if(len>10){
			ofx_uncompress((unsigned char*)data+10,len-10,uncompressedDiff);
			int lastPos = 0;
			for(size_t i=0; i<uncompressedDiff.size(); i++){
				int nextPos = lastPos+uncompressedDiff[i].pos;
				if(nextPos>=pixels.size()) break;
				pixels[nextPos] = uncompressedDiff[i].value;
				lastPos = nextPos;
			}
		}
	}
}
Beispiel #4
0
void TimeLine::onLButtonDown (ATOM_WidgetLButtonDownEvent *event)
{
	unsigned numSlices = _duration / _timeSlice;
	unsigned width = _timeLine->getClientRect().size.w;
	unsigned sliceWidth = (width - numSlices - 1) / numSlices;
	unsigned space = (width - numSlices * (sliceWidth + 1) - 1) / 2;
	int x = event->x - space;
	int n = x / (1 + sliceWidth);
	if ( n < numSlices)
	{
		if (event->shiftState & KEYMOD_CTRL)
		{
			if (isKeyFrame (n))
			{
				removeKeyFrame (n);
			}
			else
			{
				addKeyFrame (n);
			}
		}
		else
		{
			setCurrentSlice (n, 0);
		}
	}
}
Beispiel #5
0
void TimeLine::setActor (ATOM_CompositionActor *actor)
{
	if (actor != _actor.get())
	{
		_actor = actor;
		_keyFrames.clear ();

		if (actor)
		{
			for (unsigned i = 0; i < actor->getNumKeys(); ++i)
			{
				_keyFrames.push_back (timeToSliceIndex(actor->getKeyTime (i)));
			}
			_keyFrameValue = actor->createKeyFrameValue ();

			if (isKeyFrame (_currentSlice))
			{
				actor->evalKeyFrameValue (sliceIndexToTime (_currentSlice), _keyFrameValue.get());
			}
			else
			{
				actor->captureKeyFrameValue (_keyFrameValue.get());
			}
		}
		else
		{
			_keyFrameValue = 0;
		}
	}
}
Beispiel #6
0
void TimeLine::updateKeyFrame (int transformMode)
{
	if (_actor && isKeyFrame (_currentSlice))
	{
		ATOM_Vector3f t = _keyFrameValue->getTranslation();
		ATOM_Vector3f r = _keyFrameValue->getRotation();
		ATOM_Vector3f s = _keyFrameValue->getScale();
		_actor->captureKeyFrameValue (_keyFrameValue.get());

		switch (transformMode)
		{
		case TRANSFORMMODE_SCALE:
			{
#if 1
				_keyFrameValue->setTranslation (t);
				_keyFrameValue->setRotation (r);
#else
				s = _actor->getNode()->getScale();
#endif
				break;
			}
		case TRANSFORMMODE_TRANSLATE:
			{
#if 1
				_keyFrameValue->setScale (s);
				_keyFrameValue->setRotation (r);
#else
				t = _actor->getNode()->getTranslation();
#endif
				break;
			}
		case TRANSFORMMODE_ROTATION:
			{
#if 1
				_keyFrameValue->setTranslation (t);
				_keyFrameValue->setScale (s);
#else
				ATOM_Vector4f r = _actor->getNode()->getRotation ();
				ATOM_Quaternion q(r.x, r.y, r.z, r.w);
				r = q.toEulerXYZ();
#endif
				break;
			}
		default:
			{
				_keyFrameValue->setTranslation (t);
				_keyFrameValue->setScale (s);
				_keyFrameValue->setRotation (r);
				break;
			}
		}
		//_keyFrameValue->setTranslation (t);
		//_keyFrameValue->setScale (s);
		//_keyFrameValue->setRotation (r);
		_actor->addKeyFrame (sliceIndexToTime (_currentSlice), _keyFrameValue.get());
	}
}
Beispiel #7
0
bool Animation::isKeyFrame(const QString& jointName)
{
  if(jointName.isEmpty())
     return isKeyFrame();
  else
  {
    BVHNode* node=bvh->bvhFindNode(frames,jointName);
    return node->isKeyframe(frame);
  }
  qDebug("Animation::isKeyFrame('%s'): no node found.",jointName.toLatin1().constData());
}
Beispiel #8
0
void Animation::deleteKeyFrame(int jointNumber,int frameNum)
{
  // frames should always be current frame, but better play safe for future enhancements
  setFrame(frameNum);
  if(jointNumber)
  {
     BVHNode* joint=getNode(jointNumber);
     if(joint->isKeyframe(frameNum)) deleteKeyFrame(joint,frameNum);
  }
  else if(isKeyFrame()) deleteKeyFrameAllJoints();

  // tell main class that the keyframe has changed
  emit currentFrame(frameNum);
}
Beispiel #9
0
// returns TRUE if frame is now a keyframe for entire animation, FALSE if not
bool Animation::toggleKeyFrameAllJoints()
{
  if(frame==0)
    return true;  // first frame will always stay keyframe

  if(isKeyFrame())
  {
    deleteKeyFrameAllJoints();
    return false;
  }
  else
  {
    addKeyFrameAllJoints();
    return true;
  }
}
Beispiel #10
0
void TimeLine::onPaint (ATOM_WidgetDrawClientEvent *event)
{
	char buffer[256];
	sprintf (buffer, "%d", _currentSlice * _timeSlice);
	_editCurrent->setString (buffer);

	const ATOM_Size2Di &canvasSize = event->canvas->getSize();
	event->canvas->fillRect (ATOM_Rect2Di(ATOM_Point2Di(0, 0), canvasSize), ATOM_ColorARGB(0.3f, 0.3f, 0.3f, 1.f));
	unsigned numSlices = _duration / _timeSlice;
	if (numSlices == 0) numSlices = 1;
	unsigned sliceWidth = (canvasSize.w - numSlices - 1) / numSlices;
	unsigned space = (canvasSize.w - numSlices * (sliceWidth + 1) - 1) / 2;
	for (unsigned i = 0; i < numSlices; ++i)
	{
		int x = space + i * (sliceWidth + 1);
		int y = 1;
		int w = sliceWidth;
		int h = event->canvas->getSize().h - 2;

		ATOM_ColorARGB c;
		if (!getActor() || i * _timeSlice >= _duration)
		{
			c.setFloats (0.5f, 0.5f, 0.5f, 1.f);
		}
		else if (isKeyFrame (i))
		{
			c.setFloats (1.f, 0.f, 0.f, 1.f);
		}
		else
		{
			c.setFloats (1.f, 1.f, 1.f, 1.f);
		}
		event->canvas->fillRect (ATOM_Rect2Di(x, y, w, h), c);

		if (i == _currentSlice)
		{
			c.setFloats (0.f, 0.5f, 0.f, 1.f);
			event->canvas->drawLine (ATOM_Point2Di(x, y), ATOM_Point2Di(x+w-1, y), c);
			event->canvas->drawLine (ATOM_Point2Di(x+w-1, y), ATOM_Point2Di(x+w-1, y+h-1), c);
			event->canvas->drawLine (ATOM_Point2Di(x+w-1, y+h-1), ATOM_Point2Di(x, y+h-1), c);
			event->canvas->drawLine (ATOM_Point2Di(x, y+h-1), ATOM_Point2Di(x, y), c);
		}
	}

	setAutoCallHost (false);
}
Beispiel #11
0
void TimeLine::onDragStart (ATOM_WidgetDragStartEvent *event)
{
	unsigned numSlices = _duration / _timeSlice;
	unsigned width = _timeLine->getClientRect().size.w;
	unsigned sliceWidth = (width - numSlices - 1) / numSlices;
	unsigned space = (width - numSlices * (sliceWidth + 1) - 1) / 2;
	int x = event->x - space;
	int n = x / (1 + sliceWidth);
	if ( n < numSlices && isKeyFrame (n))
	{
		int sliceWidth = (_timeLine->getClientRect().size.w - numSlices - 1) / numSlices;
		int sliceHeight = _timeLine->getClientRect().size.h;
		event->dragSource->setIndicatorRect(ATOM_Rect2Di(-sliceWidth/2, -sliceHeight/2, sliceWidth, sliceHeight));
		event->dragSource->setIndicatorImageId(dragImageId);
		event->dragSource->addProperty ("Type", "KM");
		event->dragSource->addProperty ("SourceFrame", n);
	}
	else
	{
		event->allow = false;
	}
}
Beispiel #12
0
void TimeLine::setCurrentSlice (unsigned index, ATOM_CompositionNode *node)
{
	if (index != _currentSlice)
	{
		_currentSlice = index;

		if (_actor && _keyFrameValue)
		{
			if (isKeyFrame (_currentSlice))
			{
				_actor->evalKeyFrameValue (sliceIndexToTime (_currentSlice), _keyFrameValue.get());
			}
			else
			{
				_actor->captureKeyFrameValue (_keyFrameValue.get());
			}
		}

		TimeChangeEvent *event = ATOM_NEW(TimeChangeEvent);
		event->time = sliceIndexToTime(index);
		_panel->getParent()->queueEvent (event, ATOM_APP);
	}
}
Beispiel #13
0
static int encode_frame(AVCodecContext *avctx, AVPacket *avpkt,	const AVFrame *frame, int *got_packet_ptr) {
	MscEncoderContext * mscEncoderContext;
	MscCodecContext * mscContext;
	uint32_t arithBytesEncoded;
	PutBitContext pb;
	int mb_y, mb_x, value, lastNonZero, max, arithCoderIndex = -1, keyFrame;

	// initialize arithmetic encoder registers
	initialize_arithmetic_encoder();

	mscEncoderContext = avctx->priv_data;
	mscContext = &mscEncoderContext->mscContext;

	init_put_bits(&pb, mscEncoderContext->arithBuff, mscEncoderContext->arithBuffSize);

	keyFrame = isKeyFrame(avctx->frame_number);

	if (avctx->frame_number == 0) {
		av_image_alloc(mscContext->referenceFrame->data, mscContext->referenceFrame->linesize, frame->width, frame->height, frame->format, 128);
	}

	avctx->coded_frame->reference = 0;
	avctx->coded_frame->key_frame = 1;
	avctx->coded_frame->pict_type = AV_PICTURE_TYPE_I;

	int * qmatrix = keyFrame ? mscContext->q_intra_matrix : mscContext->q_non_intra_matrix;

	for (mb_x = 0; mb_x < mscContext->mb_width; mb_x++) {
		for (mb_y = 0; mb_y < mscContext->mb_height; mb_y++) {
			get_blocks(mscEncoderContext, frame, mb_x, mb_y, mscContext->block);

			if (!keyFrame) {
				get_blocks(mscEncoderContext, mscContext->referenceFrame, mb_x, mb_y, mscContext->tmpBlock);

				diff_blocks(mscContext->block, mscContext->tmpBlock);
			}

			for (int n = 0; n < 6; ++n) {

//				if (avctx->frame_number == 1 && mb_x == 0 && mb_y == 0) {
//					av_log(avctx, AV_LOG_INFO, "Block x=%d, y=%d, n=%d\n", mb_x, mb_y, n);
//					print_debug_block(avctx, mscContext->block[n]);
//				}

				mscContext->dsp.fdct(mscContext->block[n]);

//				if (avctx->frame_number == 0 && mb_x == 0 && mb_y == 0) {
//					av_log(avctx, AV_LOG_INFO, "DCT block x=%d, y=%d, n=%d\n", mb_x, mb_y, n);
//					print_debug_block(avctx, mscContext->block[n]);
//				}

				lastNonZero = quantize(mscContext->block[n], qmatrix, &max);

				av_assert1(lastNonZero < 64);

//				if (overflow) {
//					clip_coeffs(m, m->block[n], m->block_last_index[n]);
//					av_log(avctx, AV_LOG_WARNING, "Overflow detected, frame: %d, mb_x: %d, mb_y: %d, n: %d\n",
//							avctx->frame_number, mb_x, mb_y, n);
//				}

//				if (avctx->frame_number == 0 && mb_x == 3 && mb_y == 0) {
//					av_log(avctx, AV_LOG_INFO, "DCT quantized block x=%d, y=%d, n=%d\n", mb_x, mb_y, n);
//					print_debug_block(avctx, mscContext->block[n]);
//				}

				encode_arith_symbol(&mscContext->lastZeroCodingModel, &pb, lastNonZero);

				if (lastNonZero > 0) {
					arithCoderIndex = get_arith_model_index(max);

					encode_arith_symbol(&mscContext->arithModelIndexCodingModel, &pb, arithCoderIndex);
				}

				for (int i = 0; i <= lastNonZero; ++i) {
					int arithCoderBits = i == 0 ? ARITH_CODER_BITS : arithCoderIndex;

					value = mscContext->block[n][scantab[i]] + mscContext->arithModelAddValue[arithCoderBits];

			        encode_arith_symbol(&mscContext->arithModels[arithCoderBits], &pb, value);
				}

				dequantize(mscContext->block[n], mscContext, keyFrame);
			}

			if (keyFrame) {
				idct_put_block(mscContext, mscContext->referenceFrame, mb_x, mb_y);
			}
			else {
				idct_add_block(mscContext, mscContext->referenceFrame, mb_x, mb_y);
			}
		}
	}

	emms_c();

	// flush arithmetic encoder
	flush_arithmetic_encoder(&pb);
	flush_put_bits(&pb);

	arithBytesEncoded = pb.buf_ptr - pb.buf;

	// alocate packet
	if ((value = ff_alloc_packet(avpkt, arithBytesEncoded)) < 0) {
		return value;
	}

	avpkt->flags |= AV_PKT_FLAG_KEY;

	// store encoded data
	memcpy(avpkt->data, mscEncoderContext->arithBuff, arithBytesEncoded);
	*got_packet_ptr = 1;

	return 0;
}
Beispiel #14
0
static int decode(AVCodecContext * avctx, void *outdata, int *outdata_size, AVPacket *avpkt) {
	AVFrame *frame = avctx->coded_frame;
	MscDecoderContext * mscDecoderContext;
	MscCodecContext * mscContext;
	GetBitContext gb;
	int lastNonZero, value, arithCoderIndex = -1, keyFrame;

	mscDecoderContext = avctx->priv_data;
	mscContext = &mscDecoderContext->mscContext;

	if (frame->data[0]) {
		avctx->release_buffer(avctx, frame);
	}

	frame->reference = 0;
	if (avctx->get_buffer(avctx, frame) < 0) {
		av_log(avctx, AV_LOG_ERROR, "Could not allocate buffer.\n");
		return AVERROR(ENOMEM);
	}

	keyFrame = isKeyFrame(avctx->frame_number);

	if (avctx->frame_number == 0) {
		av_image_alloc(mscContext->referenceFrame->data, mscContext->referenceFrame->linesize, frame->width, frame->height, PIX_FMT_YUV420P, 128);
	}

	if (!keyFrame) {
		av_image_copy(frame->data, frame->linesize, mscContext->referenceFrame->data,
				mscContext->referenceFrame->linesize, PIX_FMT_YUV420P, frame->width, frame->height);
	}

	frame->key_frame = 1;
	frame->pict_type = AV_PICTURE_TYPE_I;

	// init encoded data bit buffer
	init_get_bits(&gb, avpkt->data, avpkt->size * 8);

	initialize_arithmetic_decoder(&gb);

	for (int mb_x = 0; mb_x < mscContext->mb_width; mb_x++) {
		for (int mb_y = 0; mb_y < mscContext->mb_height; mb_y++) {

			for (int n = 0; n < 6; ++n) {

				mscContext->dsp.clear_block(mscContext->block[n]);

				lastNonZero = decode_arith_symbol(&mscContext->lastZeroCodingModel, &gb);

				if (lastNonZero > 0) {
					arithCoderIndex = decode_arith_symbol(&mscContext->arithModelIndexCodingModel, &gb);
				}

				for (int i = 0; i <= lastNonZero; ++i) {
					int arithCoderBits = i == 0 ? ARITH_CODER_BITS : arithCoderIndex;

					value = decode_arith_symbol(&mscContext->arithModels[arithCoderBits], &gb);

					mscContext->block[n][scantab[i]] = value - mscContext->arithModelAddValue[arithCoderBits];
				}

//				if (avctx->frame_number == 0 && mb_x == 3 && mb_y == 0) {
//					av_log(avctx, AV_LOG_INFO, "Quantized x=%d, y=%d, n=%d\n", mb_x, mb_y, n);
//					print_debug_block(avctx, mscContext->block[n]);
//				}

				dequantize(mscContext->block[n], mscContext, keyFrame);

//				if (avctx->frame_number == 0 && mb_x == 0 && mb_y == 0) {
//					av_log(avctx, AV_LOG_INFO, "Dequantized x=%d, y=%d, n=%d\n", mb_x, mb_y, n);
//					print_debug_block(avctx, mscContext->block[n]);
//				}
			}

//			if (avctx->frame_number == 0 && mb_x == 0 && mb_y == 0) {
//				av_log(avctx, AV_LOG_INFO, "IDCT x=%d, y=%d, n=%d\n", mb_x, mb_y, 0);
//				print_debug_block(avctx, mscContext->block[0]);
//			}
			if (keyFrame) {
				idct_put_block(mscContext, frame, mb_x, mb_y);
			}
			else {
				idct_add_block(mscContext, frame, mb_x, mb_y);
			}

			copy_macroblock(frame, mscContext->referenceFrame, mb_x, mb_y);
		}
	}

	emms_c();

	*outdata_size = sizeof(AVFrame);
	*(AVFrame *) outdata = *frame;

	return avpkt->size;
}