void alexLissamojiWhitney::drawLissaous() { double y,x; float v,iconHeight,iconWidth; v = TWO_PI / pDensity; ofFill(); float emojiDiff = (float)getFrameNum() / 10; float frac = fmod((float)emojiDiff, 1); // int frameMod = getFrameNum() % 40; for (int i = 0; i < pDensity ; i++) { x = pAmp * cos( plissajouRatioX * v * ((float)i - frac) + pPhase); y = pAmp * sin( plissajouRatioY * v * ((float)i - frac) + pPhase); iconWidth = eyesImg.getWidth() * pImgScale; iconHeight = eyesImg.getHeight() * pImgScale; int emoji = (i + (int)emojiDiff) % 870; // TBD: use emoji or circles? emojiSheet.drawSubsection(x-16, y-16, 32, 32, 32*(emoji/29+1), 32*(emoji%29+1)); //eyesImg.draw(x-iconWidth/2, y-iconHeight/2, iconWidth, iconHeight); // Alternative 2: use circles //ofSetColor(i/pDensity * 255); //ofDrawCircle(x, y, iconWidth / 4.0); } }
Vec3 getMaxVertexPos() { Vec3 max(0, 0, 0); for (int32 index : Rep(getFrameNum())) { // MV1GetFrameMaxVertexLocalPosition } }
void VTable::testShow(){ for(int i = 0; i < 64; i++){ cout << ifPresent(i) << " " << ifModified(i) << " " << ifReferenced(i) << " " << ifPageout(i) << " " << getFrameNum(i) << endl; } }
void MSOpenH264Decoder::feed() { if (!isInitialized()) { ms_error("MSOpenH264Decoder::feed(): not initialized"); ms_queue_flush(mFilter->inputs[0]); return; } MSQueue nalus; ms_queue_init(&nalus); mblk_t *im; while ((im = ms_queue_get(mFilter->inputs[0])) != NULL) { if ((getIDRPicId() == 0) && (mSPS != 0) && (mPPS != 0)) { // Push the sps/pps given in sprop-parameter-sets if any mblk_set_timestamp_info(mSPS, mblk_get_timestamp_info(im)); mblk_set_timestamp_info(mPPS, mblk_get_timestamp_info(im)); rfc3984_unpack(mUnpacker, mSPS, &nalus); rfc3984_unpack(mUnpacker, mPPS, &nalus); mSPS = 0; mPPS = 0; } rfc3984_unpack(mUnpacker, im, &nalus); if (!ms_queue_empty(&nalus)) { void * pData[3] = { 0 }; SBufferInfo sDstBufInfo = { 0 }; int len = nalusToFrame(&nalus); DECODING_STATE state = mDecoder->DecodeFrame2(mBitstream, len, (uint8_t**)pData, &sDstBufInfo); if (state != dsErrorFree) { ms_error("OpenH264 decoder: DecodeFrame2 failed: 0x%x", state); if (((mFilter->ticker->time - mLastErrorReportTime) > 5000) || (mLastErrorReportTime == 0)) { mLastErrorReportTime = mFilter->ticker->time; ms_filter_notify_no_arg(mFilter, MS_VIDEO_DECODER_DECODING_ERRORS); } } if (sDstBufInfo.iBufferStatus == 1) { uint8_t * pDst[3] = { 0 }; pDst[0] = (uint8_t *)pData[0]; pDst[1] = (uint8_t *)pData[1]; pDst[2] = (uint8_t *)pData[2]; // Update video size and (re)allocate YUV buffer if needed if ((mWidth != sDstBufInfo.UsrData.sSystemBuffer.iWidth) || (mHeight != sDstBufInfo.UsrData.sSystemBuffer.iHeight)) { if (mYUVMsg) { freemsg(mYUVMsg); } mWidth = sDstBufInfo.UsrData.sSystemBuffer.iWidth; mHeight = sDstBufInfo.UsrData.sSystemBuffer.iHeight; mYUVMsg = ms_yuv_buf_alloc(&mOutbuf, mWidth, mHeight); ms_filter_notify_no_arg(mFilter,MS_FILTER_OUTPUT_FMT_CHANGED); } // Scale/copy frame to destination mblk_t for (int i = 0; i < 3; i++) { uint8_t *dst = mOutbuf.planes[i]; uint8_t *src = pDst[i]; int h = mHeight >> (( i > 0) ? 1 : 0); for(int j = 0; j < h; j++) { memcpy(dst, src, mOutbuf.strides[i]); dst += mOutbuf.strides[i]; src += sDstBufInfo.UsrData.sSystemBuffer.iStride[(i == 0) ? 0 : 1]; } } ms_queue_put(mFilter->outputs[0], dupmsg(mYUVMsg)); // Update average FPS if (ms_average_fps_update(&mFPS, mFilter->ticker->time)) { ms_message("OpenH264 decoder: Frame size: %dx%d", mWidth, mHeight); } // Notify first decoded image if (!mFirstImageDecoded) { mFirstImageDecoded = true; ms_filter_notify_no_arg(mFilter, MS_VIDEO_DECODER_FIRST_IMAGE_DECODED); } #if MSOPENH264_DEBUG ms_message("OpenH264 decoder: IDR pic id: %d, Frame num: %d, Temporal id: %d, VCL NAL: %d", getIDRPicId(), getFrameNum(), getTemporalId(), getVCLNal()); #endif } }
void Animation::setF(int16 value) { int16 min = MIN(0, getFrameNum() - 1); int16 max = MAX(0, getFrameNum() - 1); _frame = CLIP(value, min, max); }