BlocksQuery::BlocksQuery(BlocksQueryType &&query) : CopyableProto(std::forward<BlocksQueryType>(query)), blob_{[this] { return makeBlob(*proto_); }}, payload_{[this] { return makeBlob(proto_->meta()); }}, signatures_{[this] { SignatureSetType<proto::Signature> set; if (proto_->has_signature()) { set.emplace(proto_->signature()); } return set; }} {}
Query::Query(QueryType &&query) : CopyableProto(std::forward<QueryType>(query)), variant_{[this] { auto &&ar = *proto_; int which = ar.payload() .GetDescriptor() ->FindFieldByNumber(ar.payload().query_case()) ->index_in_oneof(); return shared_model::detail:: variant_impl<Query::ProtoQueryListType>::template load< Query::ProtoQueryVariantType>( std::forward<decltype(ar)>(ar), which); }}, ivariant_{detail::makeLazyInitializer( [this] { return QueryVariantType(*Query::variant_); })}, blob_{[this] { return makeBlob(*proto_); }}, payload_{[this] { return makeBlob(proto_->payload()); }}, signatures_{[this] { SignatureSetType<proto::Signature> set; if (proto_->has_signature()) { set.emplace(proto_->signature()); } return set; }} {}
// Returns a vector of rectangles that pass the requirements for faces. vector<ofxCvBlob> JCHaarFinder::getRectsFromImage(ofImage* inputImage) { curImage = inputImage; float scale = 1; float scaleMultiplier = 1.25; blobs.clear(); generateIIArray(); /* * Strategy: Loop through the image such that a window moves over the * picture, checking stages. Once the window has moved through the image * scale it a bit up, and re-run the algorithm. * * The values to be for'ed over are nested as follows: * * the scale of the cascade window * window's position on y axis * window's position on x axis * stages of the cascade * features of the stage * rectangles of the feature * * Since we don't support larger trees than the ones which only have a * root node, we can ignore traversing trees and just consider a list * of rectangles for any one feature. */ // This implementation keeps enlarging the window until it is bigger than the picture. for (scale = 1; scale*casc.height <= curImage->getHeight() || scale*casc.width <= curImage->getWidth(); scale *= scaleMultiplier) { int window_area = scale*casc.height * scale*casc.width; // Keep moving the window down as long as its offset and its height are within the image. Likewise to the left // As an experiment, we're shifting the window in chunks equal to about a tenth of the current window height and width, respectively. for (int offsetY = 0; (offsetY+scale*casc.height) < curImage->getHeight(); offsetY += (int) ((scale*casc.height)/10) ) { for (int offsetX = 0; (offsetX + scale*casc.width) < curImage->getWidth(); offsetX += (int) ((scale*casc.width)/10)) { bool passed = true; // *** LOOPING OVER STAGES for (int curStageIdx = 0; curStageIdx < casc.stages.size(); curStageIdx++) { if (passed == false) break; stage* s = &casc.stages.at(curStageIdx); float stage_sum = 0.0; // *** LOOPING OVER FEATURES for (int featureIdx = 0; featureIdx < s->features.size(); featureIdx++) { feature* f = & (s->features.at(featureIdx)); float feature_sum = 0.0; // These coordinates are for the sliding window int px1 = offsetX, py1 = offsetY, px2 = offsetX + scale*casc.width, py2 = offsetY + scale*casc.height; float mean = ((float) (ii(px2, py2) + ii(px1, py1) - ii(px1, py2) - ii(px2, py1))) / window_area; float stddev = sqrt((ii2(px2, py2) + ii2(px1, py1) - ii2(px1, py2) - ii2(px2, py1)) / window_area - mean*mean); // *** LOOPING OVER RECTANGLES for (int rectangleIdx = 0; rectangleIdx < f->rectangles.size(); rectangleIdx++) { featureRect* r = & ( f->rectangles.at(rectangleIdx)); // These coordinates are for the rectangles inside the sliding window. The coordinates // are absolute; i.e. they share the same origin as the coordinates of the window. int x1 = offsetX + (r->rectangle.x * scale), y1 = offsetY + (r->rectangle.y * scale), x2 = x1 + r->rectangle.width * scale, y2 = y1 + r->rectangle.height * scale; int thisRect = (ii(x2, y2) + ii(x1, y1) - ii(x1, y2) - ii(x2, y1)) * r->weight; feature_sum += thisRect; } // Determine in which direction the cascade should "fall". If the feature sum is less than // its threshold, fall left, otherwise right. // http://stackoverflow.com/questions/978742/what-do-the-left-and-right-values-mean-in-the-haar-cascade-xml-files if (feature_sum/window_area < f->threshold*stddev) { stage_sum += f->leftVal; } else { stage_sum += f->rightVal; } } // The stage is passed if its sum is above its threshold. passed = (stage_sum > s->threshold); } // passed will be true iff all stages passed; if so, we detected a face. if (passed) { ofLog(OF_LOG_NOTICE, "detected face (maybe)!"); blobs.push_back(makeBlob(offsetX, offsetY, scale*casc.width, scale*casc.height)); } } } } return blobs; }