/* this function takes in the UArray_T of the pixels, and outputs the Y, Pb, Pr representation */ CVC *rgb_pixels_to_CVC(UArray_T block, int denominator) { int num_pixels = UArray_length(block); float avg_Pb = 0.0; float avg_Pr = 0.0; Pnm_rgb cur_pix; Pnm_rgb_float cur_pix_float; CVC *YPbPr = malloc(sizeof(struct CVC)); assert(YPbPr); YPbPr->Y = malloc(sizeof(Lum_vals) * num_pixels); assert(YPbPr->Y); YPbPr->num_vals = num_pixels; for (int i = 0; i < num_pixels; i++) { cur_pix = (Pnm_rgb)UArray_at(block, i); cur_pix_float = normalize_pixel(cur_pix, denominator); YPbPr->Y[i] = get_Y(cur_pix_float); avg_Pb += get_Pb(cur_pix_float); avg_Pr += get_Pr(cur_pix_float); free(cur_pix_float); } YPbPr->avg_Pb = avg_Pb / (float)num_pixels; YPbPr->avg_Pr = avg_Pr / (float)num_pixels; return YPbPr; }
/*! Print to stdout the values of the current visual feature \f$ s \f$. \param select : Selection of a subset of the possible 3D point feature coordinates. - To print all the three coordinates used as features use vpBasicFeature::FEATURE_ALL. - To print only one of the coordinate feature \f$(X,Y,Z)\f$ use one of the corresponding function selectX(), selectX() or selectZ(). \code vpPoint point; // Creation of the current feature s vpFeaturePoint3D s; s.buildFrom(point); s.print(); // print all the 3 components of the translation feature s.print(vpBasicFeature::FEATURE_ALL); // same behavior then previous line s.print(vpFeaturePoint3D::selectZ()); // print only the Z component \endcode */ void vpFeaturePoint3D::print(const unsigned int select ) const { std::cout <<"Point3D: " ; if (vpFeaturePoint3D::selectX() & select ) std::cout << " X=" << get_X() ; if (vpFeaturePoint3D::selectY() & select ) std::cout << " Y=" << get_Y() ; if (vpFeaturePoint3D::selectZ() & select ) std::cout << " Z=" << get_Z() ; std::cout <<std::endl ; }
static int filter_get_image( mlt_frame frame, uint8_t **image, mlt_image_format *format, int *width, int *height, int writable ) { // Get the filter mlt_filter filter = mlt_frame_pop_service( frame ); mlt_properties properties = MLT_FILTER_PROPERTIES( filter ); mlt_position position = mlt_filter_get_position( filter, frame ); mlt_position length = mlt_filter_get_length2( filter, frame ); // Get the image *format = mlt_image_yuv422; int error = mlt_frame_get_image( frame, image, format, width, height, 1 ); // Only process if we have no error and a valid colour space if ( error == 0 ) { // Get the charcoal scatter value int x_scatter = mlt_properties_anim_get_double( properties, "x_scatter", position, length ); int y_scatter = mlt_properties_anim_get_double( properties, "y_scatter", position, length ); float scale = mlt_properties_anim_get_double( properties, "scale" ,position, length); float mix = mlt_properties_anim_get_double( properties, "mix", position, length); int invert = mlt_properties_anim_get_int( properties, "invert", position, length); // We'll process pixel by pixel int x = 0; int y = 0; // We need to create a new frame as this effect modifies the input uint8_t *temp = mlt_pool_alloc( *width * *height * 2 ); uint8_t *p = temp; uint8_t *q = *image; // Calculations are carried out on a 3x3 matrix int matrix[ 3 ][ 3 ]; // Used to carry out the matrix calculations int sum1; int sum2; float sum; int val; // Loop for each row for ( y = 0; y < *height; y ++ ) { // Loop for each pixel for ( x = 0; x < *width; x ++ ) { // Populate the matrix matrix[ 0 ][ 0 ] = get_Y( *image, *width, *height, x - x_scatter, y - y_scatter ); matrix[ 0 ][ 1 ] = get_Y( *image, *width, *height, x , y - y_scatter ); matrix[ 0 ][ 2 ] = get_Y( *image, *width, *height, x + x_scatter, y - y_scatter ); matrix[ 1 ][ 0 ] = get_Y( *image, *width, *height, x - x_scatter, y ); matrix[ 1 ][ 2 ] = get_Y( *image, *width, *height, x + x_scatter, y ); matrix[ 2 ][ 0 ] = get_Y( *image, *width, *height, x - x_scatter, y + y_scatter ); matrix[ 2 ][ 1 ] = get_Y( *image, *width, *height, x , y + y_scatter ); matrix[ 2 ][ 2 ] = get_Y( *image, *width, *height, x + x_scatter, y + y_scatter ); // Do calculations sum1 = (matrix[2][0] - matrix[0][0]) + ( (matrix[2][1] - matrix[0][1]) << 1 ) + (matrix[2][2] - matrix[2][0]); sum2 = (matrix[0][2] - matrix[0][0]) + ( (matrix[1][2] - matrix[1][0]) << 1 ) + (matrix[2][2] - matrix[2][0]); sum = scale * sqrti( sum1 * sum1 + sum2 * sum2 ); // Assign value *p ++ = !invert ? ( sum >= 16 && sum <= 235 ? 251 - sum : sum < 16 ? 235 : 16 ) : ( sum >= 16 && sum <= 235 ? sum : sum < 16 ? 16 : 235 ); q ++; val = 128 + mix * ( *q ++ - 128 ); val = val < 16 ? 16 : val > 240 ? 240 : val; *p ++ = val; } } // Return the created image *image = temp; // Store new and destroy old mlt_frame_set_image( frame, *image, *width * *height * 2, mlt_pool_release ); } return error; }
/*! Compute and return the interaction matrix \f$ L \f$ associated to a subset of the possible 3D point features \f$(X,Y,Z)\f$ that represent the 3D point coordinates expressed in the camera frame. \f[ L = \left[ \begin{array}{rrrrrr} -1 & 0 & 0 & 0 & -Z & Y \\ 0 & -1 & 0 & Z & 0 & -X \\ 0 & 0 & -1 & -Y & X & 0 \\ \end{array} \right] \f] \param select : Selection of a subset of the possible 3D point coordinate features. - To compute the interaction matrix for all the three subset features \f$(X,Y,Z)\f$ use vpBasicFeature::FEATURE_ALL. In that case the dimension of the interaction matrix is \f$ [3 \times 6] \f$ - To compute the interaction matrix for only one of the subset (\f$X, Y,Z\f$) use one of the corresponding function selectX(), selectY() or selectZ(). In that case the returned interaction matrix is \f$ [1 \times 6] \f$ dimension. \return The interaction matrix computed from the 3D point coordinate features. The code below shows how to compute the interaction matrix associated to the visual feature \f$s = X \f$. \code vpPoint point; ... // Creation of the current feature s vpFeaturePoint3D s; s.buildFrom(point); vpMatrix L_X = s.interaction( vpFeaturePoint3D::selectX() ); \endcode The code below shows how to compute the interaction matrix associated to the \f$s = (X,Y) \f$ subset visual feature: \code vpMatrix L_XY = s.interaction( vpFeaturePoint3D::selectX() | vpFeaturePoint3D::selectY() ); \endcode L_XY is here now a 2 by 6 matrix. The first line corresponds to the \f$ X \f$ visual feature while the second one to the \f$ Y \f$ visual feature. It is also possible to build the interaction matrix from all the 3D point coordinates by: \code vpMatrix L_XYZ = s.interaction( vpBasicFeature::FEATURE_ALL ); \endcode In that case, L_XYZ is a 3 by 6 interaction matrix where the last line corresponds to the \f$ Z \f$ visual feature. */ vpMatrix vpFeaturePoint3D::interaction(const unsigned int select) { vpMatrix L ; L.resize(0,6) ; if (deallocate == vpBasicFeature::user) { for (unsigned int i = 0; i < nbParameters; i++) { if (flags[i] == false) { switch(i){ case 0: vpTRACE("Warning !!! The interaction matrix is computed but X was not set yet"); break; case 1: vpTRACE("Warning !!! The interaction matrix is computed but Y was not set yet"); break; case 2: vpTRACE("Warning !!! The interaction matrix is computed but Z was not set yet"); break; default: vpTRACE("Problem during the reading of the variable flags"); } } } resetFlags(); } double X = get_X() ; double Y = get_Y() ; double Z = get_Z() ; if (vpFeaturePoint3D::selectX() & select ) { vpMatrix Lx(1,6) ; Lx = 0; Lx[0][0] = -1 ; Lx[0][1] = 0 ; Lx[0][2] = 0 ; Lx[0][3] = 0 ; Lx[0][4] = -Z ; Lx[0][5] = Y ; L = vpMatrix::stackMatrices(L,Lx) ; } if (vpFeaturePoint3D::selectY() & select ) { vpMatrix Ly(1,6) ; Ly = 0; Ly[0][0] = 0 ; Ly[0][1] = -1 ; Ly[0][2] = 0 ; Ly[0][3] = Z ; Ly[0][4] = 0 ; Ly[0][5] = -X ; L = vpMatrix::stackMatrices(L,Ly) ; } if (vpFeaturePoint3D::selectZ() & select ) { vpMatrix Lz(1,6) ; Lz = 0; Lz[0][0] = 0 ; Lz[0][1] = 0 ; Lz[0][2] = -1 ; Lz[0][3] = -Y ; Lz[0][4] = X ; Lz[0][5] = 0 ; L = vpMatrix::stackMatrices(L,Lz) ; } return L ; }
void main() { unsigned int x1, x2; unsigned int y1, y2; unsigned int color = White; TFT_Initial(); CLR_Screen(Black); for(x1 = 0; x1 < 40; x1 ++) for(y1 = 0; y1 < 40; y1 ++) Put_pixel(x1, y1, Blue); for(x1 = 0; x1 < 40; x1 ++) for(y1 = 40; y1 < 80; y1 ++) Put_pixel(x1, y1, White); for(x1 = 0; x1 < 40; x1 ++) for(y1 = 80; y1 < 120; y1 ++) Put_pixel(x1, y1, Red); for(x1 = 0; x1 < 40; x1 ++) for(y1 = 120; y1 < 160; y1 ++) Put_pixel(x1, y1, Magenta); for(x1 = 0; x1 < 40; x1 ++) for(y1 = 160; y1 < 200; y1 ++) Put_pixel(x1, y1, Green); for(x1 = 0; x1 < 40; x1 ++) for(y1 = 200; y1 < 240; y1 ++) Put_pixel(x1, y1, Cyan); for(x1 = 0; x1 < 40; x1 ++) for(y1 = 240; y1 < 280; y1 ++) Put_pixel(x1, y1, Yellow); for(y1 = 0; y1<320; y1 ++) { Put_pixel(42, y1, 0x0FF2); Put_pixel(43, y1, 0xD621); } while(1) { if(!Penirq) { x1 = get_X(); x2 = get_X(); y1 = get_Y(); y2 = get_Y(); if(abs(x1-x2)<2 && abs(y1-y2)<2) { x1 = (x1+x2)/2; y1 = (y1+y2)/2; y1 = 320 - y1; if(x1 < 41) { if(y1<41) color = Blue; else if(y1<81) color = White; else if(y1<121) color = Red; else if(y1<161) color = Magenta; else if(y1<201) color = Green; else if(y1<241) color = Cyan; else if(y1<281) color = Yellow; else { LCD_SetPos(44, 240, 0, 320); for (y1 = 0; y1 < 320; y1 ++) { for (x1 = 44; x1 < 240; x1 ++) Write_Data_U16(Black); } } } else { Put_pixel(x1, y1, color); } } } } }
// TODO wirite shot boundary info to files // 1. deal m_curr_frame_ts // 2. timediff of two video file is large int video_split_processor::video_split(FILE *fp, off_t pos, video_file_info *p_info, \ live_timeval &task_begin_time, live_timeval &task_end_time) { off_t len = p_info->width * p_info->height * 2; size_t mapped_size = 0; unsigned char *mapped_buffer = map_file(fp, len * (int)p_info->frame_count, \ mapped_size); int fts = 1000 / p_info->frame_rate; write_yuv_file(mapped_buffer, mapped_size, p_info); if (timevaldiff(m_prev_end_time, p_info->begin_time) < fts) { unsigned char frame_buf[len]; memset(frame_buf, 0, len); int y_size = p_info->width * p_info->height; unsigned char *ybuffer = (unsigned char*)malloc(y_size); memset(ybuffer, 0, y_size); for (int i = 0; i < p_info->frame_count; ++i) { memcpy(frame_buf, mapped_buffer + i * len, len); //posix_memalign((void **)ybuffer, 32, y_size); get_Y(p_info->color_type, frame_buf, ybuffer, y_size); // first time detect invoke restart after init if (!mb_initialized) { // 1. init video_split_processor // 2. create a new yuv file memcpy(&m_shot_begin_time, &p_info->begin_time, sizeof(live_timeval)); mp_sd = new shot_detector((uint32_t)p_info->width, \ (uint32_t)p_info->height, 4, 4, m_cfg); memset(m_tmp_video, 0, FILE_NAME_LEN); // shot info file char shot_file[FILE_NAME_LEN]; memset(shot_file, 0, FILE_NAME_LEN); sprintf(shot_file, "%s/%lld%03lld.%lld%03lld.xml", m_shot_path, \ task_begin_time.tv_sec, task_begin_time.tv_usec /1000, task_end_time.tv_sec, task_end_time.tv_usec / 1000); m_fshot = fopen(shot_file, "w"); if (!m_fshot) { // open failed cout << " open " << shot_file << "failed" << endl; return -1; } string s = "</shots>\n"; cout << "write:"<< shot_file << endl; size_t l= fwrite(s.c_str(), s.size(), 1, m_fshot); cout << "tettttttt " << l << endl; mp_sd->restart(ybuffer, fts); //size_t s = fwrite(frame_buf, len, 1, m_fp); m_curr_frame_ts = fts; memcpy(&m_shot_begin_time, &p_info->begin_time, sizeof(live_timeval)); mb_initialized = true; #if DEBUG #endif } else { // TODO // 1. detect shot // 2. write frame to yuv file // 3. frame offset m_curr_frame_ts += fts; m_curr_shot = mp_sd->detect(ybuffer, m_curr_frame_ts); //fwrite(frame_buf, len, 1, m_fp); if (m_curr_shot.start > 0 and m_curr_shot.end > 0) { write_shot_info(); cout << "shot info: " << m_curr_shot.start << ", " << m_curr_shot.end << ", " << m_curr_shot.start_frame_type << "," << m_curr_shot.end_frame_type << endl; memcpy(&m_shot_end_time, &p_info->begin_time, sizeof(live_timeval)); // current shot end time timeval tv; unsigned int t_seconds = (p_info->begin_time.tv_usec + i * fts*1000) / 1000000; tv.tv_usec = (p_info->begin_time.tv_usec + i * fts*1000) % 1000000; tv.tv_sec = p_info->begin_time.tv_sec + t_seconds; m_shot_end_time.tv_sec = tv.tv_sec; m_shot_end_time.tv_usec = tv.tv_usec; // next shot begin time tv.tv_usec = (tv.tv_usec + fts * 1000) % 1000000; t_seconds = (tv.tv_usec + fts * 1000) / 1000000; tv.tv_sec = tv.tv_sec + t_seconds; m_shot_begin_time.tv_sec = tv.tv_sec; m_shot_begin_time.tv_usec = tv.tv_usec; memset(m_tmp_video, 0, FILE_NAME_LEN); sprintf(m_tmp_video, "%s/tmp_%lld_%03lld.yuv", m_video_path, \ p_info->begin_time.tv_sec, p_info->begin_time.tv_usec / 1000 ); //m_fp = fopen(m_tmp_video, "w"); } } } free(ybuffer); } else { // lost frames } memcpy(&m_prev_end_time, &(p_info->end_time), sizeof(live_timeval)); // if the last video_file to deal memcpy(&m_shot_end_time, &(p_info->end_time), sizeof(live_timeval)); return 0; }
void reglage_odometrie() { delay_ms(2000); while(!SYS_JACK); COULEUR = couleur_depart(); EVITEMENT_ADV_ARRIERE = OFF; EVITEMENT_ADV_AVANT = OFF; init_position_robot(0, 0, 0); // faire_des_tours(64); // faire_des_tours(-32); // rejoindre(2000, 0, MARCHE_AVANT, 50); // delay_ms(30000); // carre(MARCHE_AVANT); delay_ms(10000); PutsUART(UART_XBEE, "\n\n\n\r X : "); PutLongUART((int32_t) get_X()); PutcUART(UART_XBEE, '.'); PutcUART(UART_XBEE, ((uint8_t) ((int32_t) ((double) get_X() * 10)) - (((int32_t) get_X()) * 10)) + 48); PutsUART(UART_XBEE, " Y : "); PutLongUART((int32_t) get_Y()); PutcUART(UART_XBEE, '.'); PutcUART(UART_XBEE, ((uint8_t) ((int32_t) ((double) (get_Y() * 10))) - (((int32_t) get_Y()) * 10)) + 48); PutsUART(UART_XBEE, " Teta : "); PutLongUART((int32_t) get_orientation()); PutcUART(UART_XBEE, '.'); PutcUART(UART_XBEE, ((uint8_t) ((int32_t) ((double) (get_orientation() * 10))) - (((int32_t) get_orientation()) * 10)) + 48); /* rejoindre(0, 0, MARCHE_AVANT, 100); trapeze(MARCHE_AVANT); trapeze(MARCHE_AVANT); trapeze(MARCHE_AVANT); trapeze(MARCHE_AVANT); trapeze(MARCHE_AVANT); * */ while(1); // TIMER_DEBUG = ACTIVE; // init_position_robot(0, 0, 0); // //Horraire // rejoindre(2000, 0, MARCHE_AVANT, 50); // orienter(90, 50); // rejoindre(300, 0, MARCHE_AVANT, 50); // orienter(-90, 50); // rejoindre(2000, 0, MARCHE_AVANT, 50); // orienter(90, 50); // rejoindre(300, 0, MARCHE_AVANT, 50); // orienter (-90, 50); // rejoindre(2000, 0, MARCHE_AVANT, 50); // orienter(90, 50); // rejoindre(300, 0, MARCHE_AVANT, 50); // orienter (-90, 50); // rejoindre(2000, 0, MARCHE_AVANT, 50); // orienter(90, 50); // rejoindre(300, 0, MARCHE_AVANT, 50); // orienter (-90, 50); // rejoindre(2000, 0, MARCHE_AVANT, 50); // orienter(90, 50); // rejoindre(300, 0, MARCHE_AVANT, 50); // orienter (-90, 50); // rejoindre(2000, 0, MARCHE_AVANT, 50); // orienter(90, 50); // rejoindre(300, 0, MARCHE_AVANT, 50); // orienter(-90, 50); // Anti horaire // rejoindre(2000, 0, MARCHE_AVANT, 50); // orienter(-90, 50); // rejoindre(300, 0, MARCHE_AVANT, 50); // orienter(90, 50); // rejoindre(2000, 0, MARCHE_AVANT, 50); // orienter(-90, 50); // rejoindre(300, 0, MARCHE_AVANT, 50); // orienter (90, 50); // rejoindre(2000, 0, MARCHE_AVANT, 50); // orienter(-90, 50); // rejoindre(300, 0, MARCHE_AVANT, 50); // orienter (90, 50); // rejoindre(2000, 0, MARCHE_AVANT, 50); // orienter(-90, 50); // rejoindre(300, 0, MARCHE_AVANT, 50); // orienter (90, 50); // rejoindre(2000, 0, MARCHE_AVANT, 50); // orienter(-90, 50); // rejoindre(300, 0, MARCHE_AVANT, 50); // orienter (90, 50); // rejoindre(2000, 0, MARCHE_AVANT, 50); // orienter(-90, 50); // rejoindre(300, 0, MARCHE_AVANT, 50); // orienter(90, 50); // rejoindre(500, 0, MARCHE_AVANT, 100); // rejoindre(2000, 0, MARCHE_AVANT, 100); // rejoindre(300, 0, MARCHE_AVANT, 100); // rejoindre(2000, 0, MARCHE_AVANT, 100); // rejoindre(300, 0, MARCHE_AVANT, 100); // rejoindre(2000, 0, MARCHE_AVANT, 100); // rejoindre(300, 0, MARCHE_AVANT, 100); // rejoindre(2000, 0, MARCHE_AVANT, 100); // rejoindre(300, 0, MARCHE_AVANT, 100); // rejoindre(2000, 0, MARCHE_AVANT, 100); // rejoindre(300, 0, MARCHE_AVANT, 100); // rejoindre(2000, 0, MARCHE_AVANT, 100); // rejoindre(300, 0, MARCHE_AVANT, 100); // rejoindre(2000, 0, MARCHE_AVANT, 100); // rejoindre(300, 0, MARCHE_AVANT, 100); // rejoindre(500, 0, MARCHE_AVANT, 100); // TIMER_DEBUG = DESACTIVE; }