int Overlay::load_configuration() { KeyFrame *prev_keyframe; prev_keyframe = get_prev_keyframe(get_source_position()); read_data(prev_keyframe); return 0; }
int ThresholdMain::process_buffer(VFrame *frame, int64_t start_position, double frame_rate) { load_configuration(); int use_opengl = get_use_opengl() && (!config.plot || !gui_open()); read_frame(frame, 0, get_source_position(), get_framerate(), use_opengl); if(use_opengl) return run_opengl(); send_render_gui(frame); if(!threshold_engine) threshold_engine = new ThresholdEngine(this); threshold_engine->process_packages(frame); return 0; }
int RGB601Main::load_configuration() { KeyFrame *prev_keyframe; prev_keyframe = get_prev_keyframe(get_source_position()); // Must also switch between interpolation between keyframes and using first keyframe read_data(prev_keyframe); return 1; }
int FreezeFrameMain::load_configuration() { KeyFrame *prev_keyframe = get_prev_keyframe(get_source_position()); int64_t prev_position = edl_to_local(prev_keyframe->position); if(prev_position < get_source_start()) prev_position = get_source_start(); read_data(prev_keyframe); // Invalidate stored frame if(config.enabled) first_frame_position = prev_position; return 0; }
int FlipMain::process_buffer(VFrame *frame, int64_t start_position, double frame_rate) { int i, j, k, l; int w = frame->get_w(); int h = frame->get_h(); int colormodel = frame->get_color_model(); load_configuration(); read_frame(frame, 0, get_source_position(), get_framerate(), get_use_opengl()); if(get_use_opengl()) { if(config.flip_vertical || config.flip_horizontal) return run_opengl(); else return 0; } switch(colormodel) { case BC_RGB888: case BC_YUV888: FLIP_MACRO(unsigned char, 3); break; case BC_RGB_FLOAT: FLIP_MACRO(float, 3); break; case BC_RGB161616: case BC_YUV161616: FLIP_MACRO(uint16_t, 3); break; case BC_RGBA8888: case BC_YUVA8888: FLIP_MACRO(unsigned char, 4); break; case BC_RGBA_FLOAT: FLIP_MACRO(float, 4); break; case BC_RGBA16161616: case BC_YUVA16161616: FLIP_MACRO(uint16_t, 4); break; } return 0; }
int UnsharpMain::process_buffer(VFrame *frame, int64_t start_position, double frame_rate) { /*int need_reconfigure =*/ load_configuration(); if(!engine) engine = new UnsharpEngine(this, get_project_smp() + 1, get_project_smp() + 1); read_frame(frame, 0, get_source_position(), get_framerate()); engine->do_unsharp(frame); return 0; }
int DelayAudio::load_configuration() { KeyFrame *prev_keyframe; prev_keyframe = get_prev_keyframe(get_source_position()); DelayAudioConfig old_config; old_config.copy_from(config); read_data(prev_keyframe); if(!old_config.equivalent(config)) { // Reconfigure need_reconfigure = 1; return 1; } return 0; }
int Color3WayMain::process_buffer(VFrame *frame, int64_t start_position, double frame_rate) { need_reconfigure |= load_configuration(); if(!engine) engine = new Color3WayEngine(this, // 1); PluginClient::smp + 1); //printf("Color3WayMain::process_realtime 1 %d\n", need_reconfigure); if(need_reconfigure) { reconfigure(); need_reconfigure = 0; } read_frame(frame, 0, get_source_position(), get_framerate(), get_use_opengl()); int aggregate_interpolate = 0; int aggregate_gamma = 0; get_aggregation(&aggregate_interpolate, &aggregate_gamma); engine->process_packages(); return 0; }
int Synth::process_realtime(int64_t size, Samples *input_ptr, Samples *output_ptr) { // sample relative to start of plugin waveform_sample = get_source_position(); need_reconfigure |= load_configuration(); if(need_reconfigure) reconfigure(); double wetness = DB::fromdb(config.wetness); if(EQUIV(config.wetness, INFINITYGAIN)) wetness = 0; // Apply wetness double *output_samples = output_ptr->get_data(); double *input_samples = input_ptr->get_data(); for(int j = 0; j < size; j++) output_samples[j] = input_samples[j] * wetness; // Overlay each frequency for(int j = 0; j < MAX_FREQS; j++) { if(!EQUIV(config.base_freq[j], 0)) { // Compute fragment overlay_synth( config.base_freq[j], size, input_ptr->get_data(), output_ptr->get_data()); //printf("Synth::process_realtime 2\n"); } } // waveform_sample += size; return 0; }
// blobtrack_sample.cpp void FindObjectMain::process_blob() { if(!blob_initialized) { blob_initialized = 1; blob_param.FGTrainFrames = 5; /* Create FG Detection module: */ blob_param.pFG = cvCreateFGDetectorBase(CV_BG_MODEL_FGD, NULL); /* Create Blob Entrance Detection module: */ blob_param.pBD = cvCreateBlobDetectorCC(); /* Create blob tracker module: */ blob_param.pBT = cvCreateBlobTrackerCCMSPF(); /* Create whole pipline: */ blob_pTracker = cvCreateBlobTrackerAuto1(&blob_param); } /* Process: */ IplImage* pMask = NULL; // Create aligned, RGB images if(!scene_image) { scene_image = cvCreateImage( cvSize(scene_image_w, scene_image_h), 8, 3); } // Temporary row pointers unsigned char **scene_rows = new unsigned char*[scene_image_h]; for(int i = 0; i < scene_image_h; i++) { scene_rows[i] = (unsigned char*)(scene_image->imageData + i * scene_image_w * 3); } BC_CModels::transfer(scene_rows, get_input(scene_layer)->get_rows(), 0, 0, 0, 0, 0, 0, scene_x1, scene_y1, scene_w, scene_h, 0, 0, scene_w, scene_h, get_input(scene_layer)->get_color_model(), BC_RGB888, 0, 0, 0); delete [] scene_rows; blob_pTracker->Process(scene_image, pMask); printf("FindObjectMain::process_blob %d %ld %d\n", __LINE__, get_source_position(), blob_pTracker->GetBlobNum()); #if 0 if(blob_pTracker->GetFGMask()) { IplImage* pFG = blob_pTracker->GetFGMask(); printf("FindObjectMain::process_blob %d %ld\n", __LINE__, get_source_position()); // Temporary row pointers unsigned char **mask_rows = new unsigned char*[scene_image_h]; for(int i = 0; i < scene_image_h; i++) { mask_rows[i] = (unsigned char*)(pFG->imageData + i * scene_image_w); } for(int i = 0; i < scene_image_h; i++) { switch(get_input(scene_layer)->get_color_model()) { case BC_RGB888: APPLY_MASK(unsigned char, 0xff, 3, 0) break; case BC_RGB_FLOAT: APPLY_MASK(float, 1.0, 3, 0) break; case BC_YUV888: APPLY_MASK(unsigned char, 0xff, 3, 1) break; case BC_RGBA8888: APPLY_MASK(unsigned char, 0xff, 4, 0) break; case BC_RGBA_FLOAT: APPLY_MASK(float, 1.0, 4, 0) break; case BC_YUVA8888: APPLY_MASK(unsigned char, 0xff, 4, 1) break; } } delete [] mask_rows; }
int DeInterlaceMain::process_buffer(VFrame *frame, int64_t start_position, double frame_rate) { changed_rows = frame->get_h(); load_configuration(); read_frame(frame, 0, start_position, frame_rate); // Temp was used for adaptive deinterlacing where it took deinterlacing // an entire frame to decide if the deinterlaced output should be used. temp = frame; // if(!temp) // temp = new VFrame( // frame->get_w(), // frame->get_h(), // frame->get_color_model()); if(!temp_prevframe) temp_prevframe = new VFrame( frame->get_w(), frame->get_h(), frame->get_color_model()); switch(config.mode) { case DEINTERLACE_NONE: // output->copy_from(input); break; case DEINTERLACE_KEEP: deinterlace_top(frame, frame, config.dominance); break; case DEINTERLACE_AVG: deinterlace_avg(frame, frame); break; case DEINTERLACE_AVG_1F: deinterlace_avg_top(frame, frame, config.dominance); break; case DEINTERLACE_SWAP: deinterlace_swap(frame, frame, config.dominance); break; case DEINTERLACE_BOBWEAVE: if (get_source_position()==0) read_frame(temp_prevframe,0, get_source_position(), get_framerate()); else read_frame(temp_prevframe,0, get_source_position()-1, get_framerate()); deinterlace_bobweave(frame, temp_prevframe, frame, config.dominance); break; case DEINTERLACE_TEMPORALSWAP: if (get_source_position()==0) read_frame(temp_prevframe,0, get_source_position(), get_framerate()); else read_frame(temp_prevframe,0, get_source_position()-1, get_framerate()); deinterlace_temporalswap(frame, temp_prevframe, frame, config.dominance); break; } send_render_gui(&changed_rows); return 0; }
int ShapeWipeMain::load_configuration() { read_data(get_prev_keyframe(get_source_position())); return 1; }
int SelTempAvgMain::load_configuration() { KeyFrame *prev_keyframe; KeyFrame *temp_keyframe; SelTempAvgConfig old_config; old_config.copy_from(&config); int64_t curpos = get_source_position(); prev_keyframe = get_prev_keyframe(curpos); read_data(prev_keyframe); if (curpos == prev_keyframe->position) onakeyframe = 1; else onakeyframe = 0; int64_t next_restart_keyframe = curpos + config.frames; int64_t prev_restart_keyframe = curpos - config.frames; for (int i = curpos; i < curpos + config.frames; i++) { temp_keyframe = get_next_keyframe(i); if ( (temp_keyframe->position < curpos + config.frames/2) && (temp_keyframe->position > curpos) && nextkeyframeisoffsetrestart(temp_keyframe) ) { next_restart_keyframe = temp_keyframe->position; i = curpos + config.frames; } else if (temp_keyframe->position > i) i = temp_keyframe->position; } for (int i = curpos; i > curpos - config.frames; i--) { temp_keyframe = get_prev_keyframe(i); if ( (temp_keyframe->position > curpos - config.frames/2) && (temp_keyframe->position < curpos) && nextkeyframeisoffsetrestart(temp_keyframe) ) { prev_restart_keyframe = temp_keyframe->position; i = curpos - config.frames; } else if (temp_keyframe->position < i) i = temp_keyframe->position; } restartoffset = -config.frames/2; if (onakeyframe && config.offset_restartmarker_keyframe) restartoffset = 0; else if ((curpos - prev_restart_keyframe) < config.frames/2) restartoffset = prev_restart_keyframe - curpos; else if ((next_restart_keyframe - curpos) < config.frames/2) { restartoffset = (next_restart_keyframe - curpos) - config.frames; // Probably should put another if in here, (when two "restart" keyframes are close together } return !old_config.equivalent(&config); }
int InterpolateVideo::load_configuration() { KeyFrame *prev_keyframe, *next_keyframe; InterpolateVideoConfig old_config; old_config.copy_from(&config); next_keyframe = get_next_keyframe(get_source_position()); prev_keyframe = get_prev_keyframe(get_source_position()); // Previous keyframe stays in config object. read_data(prev_keyframe); int64_t prev_position = edl_to_local(prev_keyframe->position); int64_t next_position = edl_to_local(next_keyframe->position); if(prev_position == 0 && next_position == 0) { next_position = prev_position = get_source_start(); } // printf("InterpolateVideo::load_configuration 1 %lld %lld %lld %lld\n", // prev_keyframe->position, // next_keyframe->position, // prev_position, // next_position); // Get range to average in requested rate range_start = prev_position; range_end = next_position; // Use keyframes to determine range if(config.use_keyframes) { active_input_rate = get_framerate(); // Between keyframe and edge of range or no keyframes if(range_start == range_end) { // Between first keyframe and start of effect if(get_source_position() >= get_source_start() && get_source_position() < range_start) { range_start = get_source_start(); } else // Between last keyframe and end of effect if(get_source_position() >= range_start && get_source_position() < get_source_start() + get_total_len()) { // Last frame should be inclusive of current effect range_end = get_source_start() + get_total_len() - 1; } else { // Should never get here ; } } // Make requested rate equal to input rate for this mode. // Convert requested rate to input rate // printf("InterpolateVideo::load_configuration 2 %lld %lld %f %f\n", // range_start, // range_end, // get_framerate(), // config.input_rate); // range_start = (int64_t)((double)range_start / get_framerate() * active_input_rate + 0.5); // range_end = (int64_t)((double)range_end / get_framerate() * active_input_rate + 0.5); } else // Use frame rate { active_input_rate = config.input_rate; // Convert to input frame rate range_start = (int64_t)(get_source_position() / get_framerate() * active_input_rate); range_end = (int64_t)(get_source_position() / get_framerate() * active_input_rate) + 1; } // printf("InterpolateVideo::load_configuration 1 %lld %lld %lld %lld %lld %lld\n", // prev_keyframe->position, // next_keyframe->position, // prev_position, // next_position, // range_start, // range_end); return !config.equivalent(&old_config); }
int ColorBalanceMain::process_buffer(VFrame *frame, int64_t start_position, double frame_rate) { need_reconfigure |= load_configuration(); //printf("ColorBalanceMain::process_realtime 1 %d\n", need_reconfigure); if(need_reconfigure) { if(!engine) { total_engines = PluginClient::smp + 1; engine = new ColorBalanceEngine*[total_engines]; for(int i = 0; i < total_engines; i++) { engine[i] = new ColorBalanceEngine(this); engine[i]->start(); } } reconfigure(); need_reconfigure = 0; } frame->get_params()->update("COLORBALANCE_PRESERVE", config.preserve); frame->get_params()->update("COLORBALANCE_CYAN", calculate_transfer(config.cyan)); frame->get_params()->update("COLORBALANCE_MAGENTA", calculate_transfer(config.magenta)); frame->get_params()->update("COLORBALANCE_YELLOW", calculate_transfer(config.yellow)); read_frame(frame, 0, get_source_position(), get_framerate(), get_use_opengl()); int aggregate_interpolate = 0; int aggregate_gamma = 0; get_aggregation(&aggregate_interpolate, &aggregate_gamma); if(!EQUIV(config.cyan, 0) || !EQUIV(config.magenta, 0) || !EQUIV(config.yellow, 0) || (get_use_opengl() && (aggregate_interpolate || aggregate_gamma))) { if(get_use_opengl()) { //get_output()->dump_stacks(); // Aggregate if(next_effect_is("Histogram")) return 0; return run_opengl(); } for(int i = 0; i < total_engines; i++) { engine[i]->start_process_frame(frame, frame, frame->get_h() * i / total_engines, frame->get_h() * (i + 1) / total_engines); } for(int i = 0; i < total_engines; i++) { engine[i]->wait_process_frame(); } } return 0; }