void gciWidget::setValue(uint16_t x) { if (x >= getFrames()) { x = getFrames() - 1; } _value = x; _redraw = true; }
size_t AudioReadStream::getInterleavedFrames(size_t count, float *frames) { if (m_retrievalRate == 0 || m_retrievalRate == m_sampleRate || m_channelCount == 0) { return getFrames(count, frames); } size_t samples = count * m_channelCount; if (!m_resampler) { m_resampler = new Resampler(Resampler::Best, m_channelCount); m_resampleBuffer = new RingBuffer<float>(samples * 2); } bool finished = false; while (m_resampleBuffer->getReadSpace() < samples && !finished) { float ratio = float(m_retrievalRate) / float(m_sampleRate); size_t req = size_t(ceil(count / ratio)); size_t outSz = size_t(ceil(req * ratio)); float *in = new float[req * m_channelCount]; float *out = new float[(outSz + 1) * m_channelCount]; // take one extra space to be sure size_t got = getFrames(req, in); if (got < req) { finished = true; } if (got > 0) { int resampled = m_resampler->resampleInterleaved (in, out, got, ratio, got < req); if (m_resampleBuffer->getWriteSpace() < resampled * m_channelCount) { m_resampleBuffer = m_resampleBuffer->resized (m_resampleBuffer->getReadSpace() + resampled * m_channelCount); } m_resampleBuffer->write(out, resampled * m_channelCount); } delete[] in; delete[] out; } return m_resampleBuffer->read(frames, samples) / m_channelCount; }
void KeyframeEffectModelBase::ensureKeyframeGroups() const { if (m_keyframeGroups) return; m_keyframeGroups = adoptPtrWillBeNoop(new KeyframeGroupMap); const KeyframeVector keyframes = normalizedKeyframes(getFrames()); for (KeyframeVector::const_iterator keyframeIter = keyframes.begin(); keyframeIter != keyframes.end(); ++keyframeIter) { const Keyframe* keyframe = keyframeIter->get(); PropertySet keyframeProperties = keyframe->properties(); for (PropertySet::const_iterator propertyIter = keyframeProperties.begin(); propertyIter != keyframeProperties.end(); ++propertyIter) { CSSPropertyID property = *propertyIter; ASSERT_WITH_MESSAGE(!isExpandedShorthand(property), "Web Animations: Encountered shorthand CSS property (%d) in normalized keyframes.", property); KeyframeGroupMap::iterator groupIter = m_keyframeGroups->find(property); PropertySpecificKeyframeGroup* group; if (groupIter == m_keyframeGroups->end()) group = m_keyframeGroups->add(property, adoptPtrWillBeNoop(new PropertySpecificKeyframeGroup)).storedValue->value.get(); else group = groupIter->value.get(); group->appendKeyframe(keyframe->createPropertySpecificKeyframe(property)); } } // Add synthetic keyframes. for (KeyframeGroupMap::iterator iter = m_keyframeGroups->begin(); iter != m_keyframeGroups->end(); ++iter) { iter->value->addSyntheticKeyframeIfRequired(this); iter->value->removeRedundantKeyframes(); } }
void KeyframeEffectModelBase::ensureKeyframeGroups() const { if (m_keyframeGroups) return; m_keyframeGroups = adoptPtr(new KeyframeGroupMap); RefPtr<TimingFunction> zeroOffsetEasing = m_defaultKeyframeEasing; for (const auto& keyframe : normalizedKeyframes(getFrames())) { if (keyframe->offset() == 0) zeroOffsetEasing = &keyframe->easing(); for (const PropertyHandle& property : keyframe->properties()) { KeyframeGroupMap::iterator groupIter = m_keyframeGroups->find(property); PropertySpecificKeyframeGroup* group; if (groupIter == m_keyframeGroups->end()) group = m_keyframeGroups->add(property, adoptPtr(new PropertySpecificKeyframeGroup)).storedValue->value.get(); else group = groupIter->value.get(); group->appendKeyframe(keyframe->createPropertySpecificKeyframe(property)); } } // Add synthetic keyframes. m_hasSyntheticKeyframes = false; for (const auto& entry : *m_keyframeGroups) { if (entry.value->addSyntheticKeyframeIfRequired(zeroOffsetEasing)) m_hasSyntheticKeyframes = true; entry.value->removeRedundantKeyframes(); } }
/*! * When the program is closed this will save all * the changes made in the parameters in the configurations.ini file */ Cparameter::~Cparameter(void) { /* WORK SEQUENCE * opens the ini file * Gets the current values if changed in the process by some other functions * simply overwrites all the values when the destructor is called */ QSettings get(QDir::currentPath()+ "/" + "configurations.ini", QSettings::IniFormat); get.beginGroup("Parameters"); QString name = getLogFileName(); //qDebug() << name ; get.setValue("LogFileName", name); QString path = getLogFileDirectory(); get.setValue("LogFileDirectory", path); QString ip = getIpAddress(); get.setValue("IpAddress", ip); int port = getPort(); get.setValue("Port", port); QString resolution = getResolution(); get.setValue("Resolution", resolution); int frames = getFrames(); get.setValue("Frames", frames); float HorizontalFcl = settings->value("HorizontalFocalLength").toFloat(); }
void AnimationCom::playAnimation(const std::string& name,cocos2d::Node* parent,std::function<void()> callback,const cocos2d::Point& point) { auto iter=animations.find(name); if (iter!=animations.end()) { auto cache = cocos2d::AnimationCache::getInstance(); auto animation = cache->getAnimation(name); auto action2 = cocos2d::Animate::create(animation); auto animationFrame=animation->getFrames().at(0); cocos2d::Sprite* sprite=cocos2d::Sprite::createWithSpriteFrame(animationFrame->getSpriteFrame()); sprite->setAnchorPoint(iter->second.anchorPoint); if (point.equals(cocos2d::Point::ZERO)) { sprite->setPosition(iter->second.position); } else{ sprite->setPosition(point); } sprite->setScale(iter->second.scale.x, iter->second.scale.y); sprite->setLocalZOrder(iter->second.localZOrder); parent->addChild(sprite); //animation->setDelayPerUnit(iter->second.delayPerUnit); //animation->setLoops(iter->second.loops); sprite->runAction(cocos2d::Sequence::create(action2, cocos2d::RemoveSelf::create(), NULL)); } }
int Clock::getFps() const { if ( getSeconds() > 0 ) return frames/getSeconds(); else if ( getTicks() > 1000 and getFrames() == 0 ) { throw std::string("Can't getFps if you don't increment the frames"); } else return 0; }
/** * \brief Get a single frame. * * \param ifno interface number */ FramePtr UVCCamera::getFrame(uint8_t ifno) { std::vector<FramePtr> frames = getFrames(ifno, 1); if (frames.size() == 0) { throw std::length_error("no frames returned by getFrames"); } return *frames.begin(); }
int Clock::getAvgFps() const { if ( getSeconds() > 0 && recentFrames.size() > maxFramesToAvg/2) { return recentFrames.size()/(tickSum/1000.0); } if ( getTicks() > 1000 and getFrames() == 0 ) { throw std::string("Can't getFps if you don't increment the frames"); } return 0; }
void Frames :: render() { char frames[30]; sprintf(frames, "FPS:%.3f",getFrames()); string fram(frames); GLFont::initText(); GLFont::print(20,20, fram); GLFont::endText(); }
int main(int argc, char *argv[]) { int *fd = (int*)malloc(sizeof(int)), qual = 0, fps = 0, bufSize = 0; char *camera; if(argc != 5) // Check correct number of args { char errorMsg[256]; sprintf(errorMsg, "usage: %s cameraDevice JpegQuality fps bufferSize", argv[0]); exitWithError(errorMsg); } else { camera = argv[1]; // camera location // Image quality if(atoi(argv[2]) > 0 && atoi(argv[2]) <= 100) qual = atoi(argv[2]); else exitWithError("Set JpegQuality between 1 and 100 inclusive."); // frame delay if(atoi(argv[3]) > 0) fps = atoi(argv[3]); // list size if(atoi(argv[4]) > 0) bufSize = atoi(argv[4]); } printf("Camera Interface Copyright (C) 2012 Jacob Appleton\n\n"); printf("This program comes with ABSOLUTELY NO WARRANTY;\n"); printf("This is free software, and you are welcome to redistribute it \n"); printf("under certain conditions.\n"); printf("Visit http://www.gnu.org/licenses/gpl.html for more details.\n\n"); *fd = -1; openDevice(camera, fd); getCapabilities(fd); int* imageCaptureType = (int*)malloc(sizeof(int)); *imageCaptureType = V4L2_BUF_TYPE_VIDEO_CAPTURE; getFrames(1, 1, fd, imageCaptureType, qual, fps, bufSize); // Turn the stream off - this will turn off the camera's LED light ioctl(*fd, VIDIOC_STREAMOFF, imageCaptureType); closeDevice(fd); free(fd); free(imageCaptureType); pthread_exit(NULL); return 0; }
void SpriteAnimation::setup(int playSide, int entityType, cocos2d::Node * parent) { if (!mSetup) { mSetup = true; mPlaySide = playSide; mEntityType = entityType; auto animation = game::GameAnimations::getInstance()->getAnimation(getAnimationName(game::AnimationName::IDLE_UP)); mSprite = cocos2d::Sprite::createWithSpriteFrame(animation->getFrames().at(0)->getSpriteFrame()); mSprite->setAnchorPoint(cocos2d::Vec2(.5f, 0.1f)); mSprite->setScale(.8f); mCurrentAnimation = game::AnimationName::NONE; this->animate(game::AnimationName::IDLE_UP); parent->addChild(mSprite); CC_SAFE_RETAIN(mSprite); } }
/* output the sequence for one gene for every species to * the file stream */ void outGene(FILE *f, char *geneName, char *dbName, char *mafTable, char *frameTable, char *org, struct slName *speciesNameList) { struct mafFrames *frames = getFrames(geneName, frameTable, org); struct mafFrames *frame, *nextFrame; struct exonInfo *giList = NULL; struct exonInfo *gi = NULL; int start = 0; for(frame = frames; frame; frame = nextFrame) { nextFrame = frame->next; frame->next = NULL; boolean exonEdge = (frame->strand[0] == '-') ? frame->isExonEnd : frame->isExonStart; if (!newTableType || exonEdge) { AllocVar(gi); gi->frame = frame; gi->name = frame->name; gi->ali = getAliForFrame(mafTable, frame); gi->chromStart = frame->chromStart; gi->chromEnd = frame->chromEnd; gi->exonStart = start; gi->exonSize = frame->chromEnd - frame->chromStart; start += gi->exonSize; slAddHead(&giList, gi); } else { struct mafAli *newAli; assert(gi != NULL); int frameWidth = frame->chromEnd - frame->chromStart; gi->exonSize += frameWidth; start += frameWidth; gi->chromEnd = frame->chromEnd; newAli = getAliForFrame(mafTable, frame); gi->ali = slCat(gi->ali, newAli); slAddTail(&gi->frame, frame); } } slReverse(&giList); struct hash *speciesInfoHash = newHash(5); struct speciesInfo *speciesList = getSpeciesInfo(giList, speciesNameList, speciesInfoHash); copyMafs(speciesInfoHash, &giList); struct speciesInfo *si = speciesList; for(; si ; si = si->next) si->curPosString = si->posStrings; writeOutSpecies(f, dbName, speciesList, giList); freeSpeciesInfo(speciesList); freeGIList(giList); }
size_t AudioReadStream::getInterleavedFrames(size_t count, float *frames) { if (m_retrievalRate == 0 || m_retrievalRate == m_sampleRate || m_channelCount == 0) { return getFrames(count, frames); } int samples = count * m_channelCount; if (!m_resampler) { m_resampler = new Resampler(Resampler::Best, m_channelCount); m_resampleBuffer = new RingBuffer<float>(samples * 2); } float ratio = float(m_retrievalRate) / float(m_sampleRate); int fileFrames = int(ceil(count / ratio)); bool finished = false; float *in = allocate<float>(fileFrames * m_channelCount); float *out = allocate<float>((count + 1) * m_channelCount); while (m_resampleBuffer->getReadSpace() < samples) { int fileFramesRemaining = int(ceil((samples - m_resampleBuffer->getReadSpace()) / (m_channelCount * ratio))); int got = 0; if (!finished) { got = getFrames(fileFramesRemaining, in); m_totalFileFrames += got; if (got < fileFramesRemaining) { finished = true; } } else { v_zero(in, fileFramesRemaining * m_channelCount); got = fileFramesRemaining; } if (got > 0) { int resampled = m_resampler->resampleInterleaved (out, count + 1, in, got, ratio, finished); if (m_resampleBuffer->getWriteSpace() < resampled * m_channelCount) { m_resampleBuffer = m_resampleBuffer->resized (m_resampleBuffer->getReadSpace() + resampled * m_channelCount); } m_resampleBuffer->write(out, resampled * m_channelCount); } } deallocate(in); deallocate(out); int toReturn = samples; int available = (int(m_totalFileFrames * ratio) - m_totalRetrievedFrames) * m_channelCount; if (toReturn > available) toReturn = available; m_totalRetrievedFrames += toReturn; return m_resampleBuffer->read(frames, toReturn) / m_channelCount; }
//Testing function "Train" TEST(colorHistDetectorTest, Train) { //Load the input data vector<Frame*> frames = LoadTestProject("speltests_TestData/CHDTrainTestData/", "trijumpSD_50x41.xml"); //Setting parameters auto seq = new Sequence(); map <string, float> params = SetParams(frames, &seq); for (auto f : frames) delete f; frames.clear(); frames = seq->getFrames(); //Counting a keyframes int FirstKeyframe = FirstKeyFrameNum(frames); int KeyframesCount = keyFramesCount(frames); //Copy image and skeleton from keyframe Mat image = frames[FirstKeyframe]->getImage(); Mat image1; image.copyTo(image1); Frame *frame = frames[FirstKeyframe]; Skeleton skeleton = frame->getSkeleton(); tree<BodyPart> PartTree = skeleton.getPartTree(); //Build the rectangles for all of bodyparts map<int, POSERECT<Point2f>> Rects = SkeletonRects(skeleton); //Run "Train()" ColorHistDetector detector; detector.train(frames, params); //Calculate the polygons occlusion //Polygons layers: map<int, int> depth = { { 0, 2 }, { 1, 1 }, { 2, 3 }, { 3, 2 }, { 4, 4 }, { 5, 4 }, { 6, 1 }, { 7, 3 }, { 8, 2 }, { 9, 0 }, { 10, 4 }, { 11, 1 }, { 12, 3 }, { 13, 0 }, { 14, 4 }, { 15, 1 }, { 16, 3 } }; //Polygons occlusion: vector<vector<pair<int, int>>> Crossings = CrossingsList(Rects, depth); //Calculate the parts histograms map <int32_t, ColorHistDetector::PartModel> partModels; for (int i = 0; i < Rects.size(); i++) { ColorHistDetector::PartModel Model(8); Model.sizeFG = 0; float xmin, ymin, xmax, ymax; Rects[i].GetMinMaxXY <float>(xmin, ymin, xmax, ymax); for (int x = xmin; x < xmax; x++) { for (int y = ymin; y < ymax; y++) { bool b = true; if (Rects[i].containsPoint(Point2f(x, y)) > 0) { int k = 0; while ((k < Crossings[i].size()) && b) { if (Rects[Crossings[i][k].first].containsPoint(Point2f(x, y)) > 0) b = false; k++; } if (b) { int c = 50 + i * 10; image1.at<Vec3b>(y, x) = Vec3b(c, c, c); Vec3b color = image.at<Vec3b>(y, x); Model.partHistogram[color[0] / Factor][color[1] / Factor][color[2] / Factor]++; Model.sizeFG++; } } } } partModels.emplace(pair<int32_t, ColorHistDetector::PartModel>(i, Model)); } //Put results int nBins = detector.nBins; bool AllValuesEqual = true; int delta = 2; // tolerable linear error ofstream fout("TrainUnitTest_Output.txt"); fout << "\n--------------------------Don't equal----------------------\n"; cout << "\nTolerable error: " << delta << endl; fout << "Tolerable error: " << delta << endl; for (int i = 0; i < partModels.size(); i++) { for (int r = 0; r < nBins; r++) for (int g = 0; g < nBins; g++) for (int b = 0; b < nBins; b++) { int expected = int(partModels[i].partHistogram[b][g][r]); int actual = int(detector.partModels[i].partHistogram[r][g][b] * detector.partModels[i].sizeFG / KeyframesCount); if (abs(expected - actual) > delta) { cout << "Part[" << i << "]." << "Histogram[" << r << ", " << g << ", " << b << "]: Expected = " << expected << ", Actual = " << actual << endl; fout << "Part[" << i << "]." << "Histogram[" << r << ", " << g << ", " << b << "]: Expected = " << expected << ", Actual = " << actual << endl; if (!(r*g*b == 0)) AllValuesEqual = false; } } } if (AllValuesEqual) fout << "none"; cout << "Output files: TrainUnitTest_Output.txt, UsedPixels.png\n\n"; EXPECT_TRUE(AllValuesEqual); fout << "\n-----------Expected histogram-----------\n"; fout << "In format:\nHistogramm[r, g, b] = pixelsCount\n"; for (int i = 0; i < partModels.size(); i++) { fout << endl << "Rect[" << i << "]:" << endl; PutHistogram(fout, partModels[i].partHistogram, 1); } fout << "\n-----------Actual histogram-----------\n"; fout << "In format:\nHistogramm[b, g, r] = Histogram[b, g, r]*Part.SizeFG/KeyframesCout\n"; for (int i = 0; i < detector.partModels.size(); i++) { fout << endl << "Rect[" << i << "]:" << endl; PutHistogram(fout, detector.partModels[i].partHistogram, detector.partModels[i].sizeFG / KeyframesCount); } fout << "\n------------Occluded polygons-----------\nSorted by layer\n"; for (int i = 0; i < Crossings.size(); i++) { fout << "\nPolygon[" << i << "] crossed by polygons: "; for (int k = 0; k < Crossings[i].size(); k++) fout << Crossings[i][k].first << "; "; Crossings[i].clear(); } imwrite("UsedPixels.png", image1); fout.close(); frames.clear(); params.clear(); Crossings.clear(); partModels.clear(); image.release(); image1.release(); delete seq; }
bool MessageTransfer::requiresAccept() const { const framing::MessageTransferBody* b = getFrames().as<framing::MessageTransferBody>(); return b && b->getAcceptMode() == 0/*EXPLICIT == 0*/; }
void MessageTransfer::setTimestamp() { DeliveryProperties* props = getFrames().getHeaders()->get<DeliveryProperties>(true); time_t now = ::time(0); props->setTimestamp(now); }
std::string MessageTransfer::getExchangeName() const { return getFrames().as<framing::MessageTransferBody>()->getDestination(); }