void NETLIB_NAME(VCCS)::start_internal(const nl_double def_RI) { register_param("G", m_G, 1.0); register_param("RI", m_RI, def_RI); enregister("IP", m_IP); enregister("IN", m_IN); enregister("OP", m_OP); enregister("ON", m_ON); enregister("_OP1", m_OP1); enregister("_ON1", m_ON1); m_IP.m_otherterm = &m_IN; // <= this should be NULL and terminal be filtered out prior to solving... m_IN.m_otherterm = &m_IP; // <= this should be NULL and terminal be filtered out prior to solving... m_OP.m_otherterm = &m_IP; m_OP1.m_otherterm = &m_IN; m_ON.m_otherterm = &m_IP; m_ON1.m_otherterm = &m_IN; connect_late(m_OP, m_OP1); connect_late(m_ON, m_ON1); }
void init(bool deserialize) { LOG(debug) << "global.min_gamma_shape=" << options.get<double>("global.min_gamma_shape"); layer_size = options.get<int>("layer.size"); lf = get_link_function(options.get<string>("lf")); min_gamma_sample = options.get<double>("global.min_gamma_sample"); wshape.set_size(layer_size, n_examples); wscale.set_size(layer_size, n_examples); ScoreFunction score_shape = [=](double z, arma::uword i, arma::uword j) { auto shape0 = wshape(i,j); auto shape = lf->f(shape0); auto scale = lf->f(wscale(i,j)); return lf->g(shape0) * (- gsl_sf_psi(shape) - log(scale) + log(z)); }; register_param(&wshape, score_shape, deserialize); ScoreFunction score_scale = [=](double z, arma::uword i, arma::uword j) { auto shape = lf->f(wshape(i,j)); auto scale0 = wscale(i,j); auto scale = lf->f(scale0); return lf->g(scale0) * (- shape/scale + z/scale/scale); }; register_param(&wscale, score_scale, deserialize); }
Timeout(unsigned int width, unsigned int height) { register_param(m_time, "time", "Current time"); register_param(m_color, "color", "Indicator colour"); register_param(m_transparency, "transparency", "Indicator transparency"); W = std::min(width, height) / 20; H = W; x0 = width-2*W; y0 = height-H; }
FaceBl0r::FaceBl0r(int wdt, int hgt) { face_rect = 0; image = 0; tracked_obj = 0; face_found = 0; cascade = 0; storage = 0; classifier = "/usr/share/opencv/haarcascades/haarcascade_frontalface_default.xml"; register_param(classifier, "Classifier", "Full path to the XML pattern model for recognition; look in /usr/share/opencv/haarcascades"); ellipse = false; register_param(ellipse, "Ellipse", "Draw a red ellipse around the object"); recheck = 0.025; face_notfound = cvRound(recheck * 1000); register_param(recheck, "Recheck", "How often to detect an object in number of frames, divided by 1000"); threads = 0.01; //number of CPUs register_param(threads, "Threads", "How many threads to use divided by 100; 0 uses CPU count"); search_scale = 0.12; // increase size of search window by 20% on each pass register_param(search_scale, "Search scale", "The search window scale factor, divided by 10"); neighbors = 0.02; // require 2 neighbors register_param(neighbors, "Neighbors", "Minimum number of rectangles that makes up an object, divided by 100"); smallest = 0.0; // smallest window size is trained default register_param(smallest, "Smallest", "Minimum window size in pixels, divided by 1000"); largest = 0.0500; // largest object size shown is 500 px register_param(largest, "Largest", "Maximum object size in pixels, divided by 10000"); }
Vignette(unsigned int width, unsigned int height) : m_width(width), m_height(height) { register_param(m_aspect, "aspect", "Aspect ratio"); register_param(m_cc, "clearCenter", "Size of the unaffected center"); register_param(m_soft, "soft", "Softness"); // Suggested default values m_aspect = .5; m_cc = 0; m_soft = .6; m_initialized = width*height > 0; if (m_initialized) { m_vignette = new float[width*height]; updateVignette(); } }
SOPSat(unsigned int, unsigned int) { register_param(rSlope, "rSlope", "Slope of the red color component"); register_param(gSlope, "gSlope", "Slope of the green color component"); register_param(bSlope, "bSlope", "Slope of the blue color component"); register_param(aSlope, "aSlope", "Slope of the alpha component"); register_param(rOffset, "rOffset", "Offset of the red color component"); register_param(gOffset, "gOffset", "Offset of the green color component"); register_param(bOffset, "bOffset", "Offset of the blue color component"); register_param(aOffset, "aOffset", "Offset of the alpha component"); register_param(rPower, "rPower", "Power (Gamma) of the red color component"); register_param(gPower, "gPower", "Power (Gamma) of the green color component"); register_param(bPower, "bPower", "Power (Gamma) of the blue color component"); register_param(aPower, "aPower", "Power (Gamma) of the alpha component"); register_param(saturation, "saturation", "Overall saturation"); rSlope = 1; gSlope = 1; bSlope = 1; aSlope = 1; rOffset = 0; gOffset = 0; bOffset = 0; aOffset = 0; rPower = 1; gPower = 1; bPower = 1; aPower = 1; saturation = 200; // Pre-build the lookup table. // For 1080p, rendering a 5-second video took // * 37 s without the LUT // * 7 s with the LUT // * 5 s without any effect applied (plain rendering). // So the LUT brings about 15x speedup. m_lutR = (unsigned char *) malloc(256*sizeof(char)); m_lutG = (unsigned char *) malloc(256*sizeof(char)); m_lutB = (unsigned char *) malloc(256*sizeof(char)); m_lutA = (unsigned char *) malloc(256*sizeof(char)); updateLUT(); }
xfade0r(unsigned int width, unsigned int height) { fader = 0.0; register_param(fader,"fader","the fader position"); }
delay0r(unsigned int width, unsigned int height) { delay = 0.0; register_param(delay,"DelayTime","the delay time"); }
FaceDetect(int width, int height) : image(0) , count(0) , objects(0) , storage(0) , cascade(0) { roi.width = roi.height = 0; register_param("/usr/share/opencv/haarcascades/haarcascade_frontalface_default.xml", "Classifier", "Full path to the XML pattern model for recognition; look in /usr/share/opencv/haarcascades"); threads = 0.01; //number of CPUs register_param(threads, "Threads", "How many threads to use divided by 100; 0 uses CPU count"); shape = 0.0; register_param(shape, "Shape", "The shape to draw: 0=circle, 0.1=ellipse, 0.2=rectangle, 1=random"); recheck = 0.025; register_param(recheck, "Recheck", "How often to detect an object in number of frames, divided by 1000"); search_scale = 0.12; // increase size of search window by 20% on each pass register_param(search_scale, "Search scale", "The search window scale factor, divided by 10"); neighbors = 0.02; // require 2 neighbors register_param(neighbors, "Neighbors", "Minimum number of rectangles that makes up an object, divided by 100"); smallest = 0.0; // smallest window size is trained default register_param(smallest, "Smallest", "Minimum window size in pixels, divided by 1000"); scale = 1.0 / 1.5; register_param(scale, "Scale", "Down scale the image prior detection"); stroke = CV_FILLED; register_param(stroke, "Stroke", "Line width, divided by 100, or fill if 0"); antialias = false; register_param(antialias, "Antialias", "Draw with antialiasing"); alpha = 1.0; register_param(alpha, "Alpha", "The alpha channel value for the shapes"); f0r_param_color color0 = {1.0, 1.0, 1.0}; color[0] = color0; register_param(color[0], "Color 1", "The color of the first object"); f0r_param_color color1 = {0.0, 0.5, 1.0}; color[1] = color1; register_param(color[0], "Color 2", "The color of the second object"); f0r_param_color color2 = {0.0, 1.0, 1.0}; color[2] = color2; register_param(color[0], "Color 3", "The color of the third object"); f0r_param_color color3 = {0.0, 1.0, 0.0}; color[3] = color3; register_param(color[0], "Color 4", "The color of the fourth object"); f0r_param_color color4 = {1.0, 0.5, 0.0}; color[4] = color4; register_param(color[0], "Color 5", "The color of the fifth object"); srand(::time(NULL)); }
onecol0r(unsigned int width, unsigned int height) { register_param(color,"Color","the color of the image"); }