route_t *file_create_route(FILE *fp) { route_t *route = (route_t*)vp_os_malloc(sizeof(route_t)); INIT_ROUTE(route); distance_t x, y, z; NODE_T *p = NULL; while (1) { if(-1 == fscanf(fp, "%f %f %f", &x, &y, &z)) break; if (route->head == NULL) { route->head = (NODE_T*)vp_os_malloc(sizeof(NODE_T)); p = route->head; INIT_NODE(p, x, y, z); printw("x %f, y %f, z %f\n", x, y, z); } else { p->next = (NODE_T*)vp_os_malloc(sizeof(NODE_T)); p = p->next; INIT_NODE(p, x, y, z); printw("x %f, y %f, z %f\n", x, y, z); } } printw("\n"); if (route->head == NULL) { vp_os_free(route); return NULL; } return route; }
control_t *route_control_create(route_t* route) { if (route == NULL) return NULL; control_t *control = (control_t*)vp_os_malloc(sizeof(control_t)); control_t *control_head = control; NODE_T *target = route->head; if (target == NULL) return NULL; //INIT_CONTROL(control, 0, 0, 0); angle_t hori_angle; angle_t vert_angle; distance_t distance; vp_os_mutex_lock(&NOW_mutex); distance_t last_x = NOW.x; distance_t last_y = NOW.y; distance_t last_z = NOW.z; vp_os_mutex_unlock(&NOW_mutex); while (target != NULL) { //get paras distance_t vect_x = target->x - last_x; distance_t vect_y = target->y - last_y; distance_t vect_z = target->z - last_z; printf("last %lf %lf %lf\n", last_x, last_y, last_z); printf("vect %lf %lf %lf\n", vect_x, vect_y, vect_z); last_x = target->x; last_y = target->y; last_z = target->z; //calcu MAKE_HORI_ANGLE(hori_angle, vect_x, vect_y); MAKE_VERT_ANGLE(vert_angle, vect_x, vect_y, vect_z); distance = sqrt(POW2(vect_x) + POW2(vect_y) + POW2(vect_z)) * route->unit_distance; //init control node INIT_CONTROL(control, hori_angle, vert_angle, distance); printf("control dis%lf angle%lf\n", control->distance, control->hori_angle); if (target->next != NULL) { control->next = (control_t*)vp_os_malloc(sizeof(control_t)); control = control->next; } target = target->next; } return control_head; }
TClientList *clientList_create(int32_t maxClients) { TClientList *cl = vp_os_malloc(sizeof(TClientList)); cl->maxClients = maxClients; cl->numClients = 0; cl->clients = vp_os_malloc(maxClients * sizeof(TClient)); clientList_removeAllClients(cl); vp_os_mutex_init(&cl->mutex); return cl; }
void init_gui(int argc, char **argv) { gui = vp_os_malloc(sizeof (gui_t)); g_thread_init(NULL); gdk_threads_init(); gtk_init(&argc, &argv); gui->window = gtk_window_new(GTK_WINDOW_TOPLEVEL); g_signal_connect(G_OBJECT(gui->window), "destroy", G_CALLBACK(on_destroy), NULL); gui->box = gtk_vbox_new(FALSE, 10); gtk_container_add(GTK_CONTAINER(gui->window), gui->box); gui->cam = gtk_image_new(); gtk_box_pack_start(GTK_BOX(gui->box), gui->cam, FALSE, TRUE, 0); gui->start = gtk_button_new_with_label("Start"); g_signal_connect (gui->start, "clicked", G_CALLBACK (buttons_callback), NULL); gui->stop = gtk_button_new_with_label("Stop"); g_signal_connect (gui->stop, "clicked", G_CALLBACK (buttons_callback), NULL); gtk_widget_set_sensitive(gui->start, TRUE); gtk_widget_set_sensitive(gui->stop, FALSE); gtk_box_pack_start(GTK_BOX(gui->box), gui->start, TRUE, TRUE, 0); gtk_box_pack_start(GTK_BOX(gui->box), gui->stop, TRUE, TRUE, 0); gtk_widget_show_all(gui->window); }
void checkNeonSupport () { neon_status_t loc_neonStat = NEON_SUPPORT_FAIL; FILE *cpuInfo = fopen ("/proc/cpuinfo", "r"); if (NULL == cpuInfo) { return; } char *neonCheckStrBuf = vp_os_malloc (NEONCHECK_BUFFER_STRING_SIZE * sizeof (char)); if (NULL == neonCheckStrBuf) { fclose (cpuInfo); return; } while (NULL != fgets (neonCheckStrBuf, NEONCHECK_BUFFER_STRING_SIZE, cpuInfo)) { char *supportTest = strstr (neonCheckStrBuf, "neon"); if (NULL != supportTest) { loc_neonStat = NEON_SUPPORT_OK; break; } } vp_os_free (neonCheckStrBuf); fclose (cpuInfo); neonStatus = loc_neonStat; }
C_RESULT picture_to_buffer_transform(buffer_to_picture_config_t *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out) { vp_os_mutex_lock(&out->lock); if(out->status == VP_API_STATUS_INIT) { out->numBuffers = 1; out->size = (ACQ_WIDTH*ACQ_HEIGHT*3)/2; out->buffers = (int8_t **) vp_os_malloc(out->size*sizeof(int8_t) + sizeof(int8_t*)); out->indexBuffer = 0; out->status = VP_API_STATUS_PROCESSING; out->buffers[0] = (int8_t *)(out->buffers+1); } if(out->status == VP_API_STATUS_PROCESSING) { if( in->size == 1 ) { // got a picture vp_os_memcpy( out->buffers[0], cfg->picture->y_buf, ACQ_WIDTH*ACQ_HEIGHT ); vp_os_memcpy( out->buffers[0] + ACQ_WIDTH*ACQ_HEIGHT, cfg->picture->cb_buf, ACQ_WIDTH*ACQ_HEIGHT/4); vp_os_memcpy( out->buffers[0] + ACQ_WIDTH*ACQ_HEIGHT + ACQ_WIDTH*ACQ_HEIGHT/4, cfg->picture->cr_buf, ACQ_WIDTH*ACQ_HEIGHT/4); } } // out->status = in->status; vp_os_mutex_unlock(&out->lock); return (SUCCESS); }
/* Link a program with all currently attached shaders */ GLint opengl_shader_link(GLuint prog) { #if defined(DEBUG_SHADER) printf("%s : %d\n", __FUNCTION__, __LINE__); #endif GLint status; glLinkProgram(prog); #if defined(DEBUG_SHADER) GLint logLength; glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength); if (logLength > 0) { GLchar *log = (GLchar *)vp_os_malloc(logLength); glGetProgramInfoLog(prog, logLength, &logLength, log); printf("Program link log:\n%s\n", log); vp_os_free(log); } #endif glGetProgramiv(prog, GL_LINK_STATUS, &status); if (status == GL_FALSE) printf("Failed to link program %d\n", prog); return status; }
// align_size has to be a power of two !!! // // The basic working of this algorithm is to allocate a bigger chunk of data than requested. // This chunk of data must be big enough to contain an address aligned on requested boundary // We also alloc 2 more words to keep base ptr (bptr) & requested size (size) of allocation // bptr is the base pointer of this allocation // _____ ______ ______ __________ // ... | bptr | size | .... | // _____|______|______|__________| // void* vp_os_aligned_malloc(size_t size, size_t align_size) { char *ptr, *aligned_ptr; int* ptr2; int allocation_size; size_t align_mask = align_size - 1; // Check if align_size is a power of two // If the result of this test is non zero then align_size is not a power of two if( align_size & align_mask ) return NULL; // Allocation size is : // - Requested user size // - a size (align_size) to make sure we can align on the requested boundary // - 8 more bytes to register base adress & allocation size allocation_size = size + align_size + 2*sizeof(int); ptr = (char*) vp_os_malloc(allocation_size); if(ptr == NULL) return NULL; ptr2 = (int*)(ptr + 2*sizeof(int)); aligned_ptr = ptr + 2*sizeof(int) + (align_size - ((size_t) ptr2 & align_mask)); ptr2 = (int*)(aligned_ptr - 2*sizeof(int)); *ptr2++ = (int) (aligned_ptr - ptr); *ptr2 = size; return aligned_ptr; }
ardrone_users_t *ardrone_get_user_list(void) { if (0 == droneSupportsMulticonfig) { return NULL; } ardrone_users_t *retVal = vp_os_malloc (sizeof (ardrone_users_t)); if (NULL == retVal) return NULL; // Assume that userlist is up to date int validUserCount = 0; // User whose descriptions start with a dot ('.') are hidden users that may not be shown to the application user (e.g. default user for each iPhone, or user specific to a control mode int configIndex; for (configIndex = 0; configIndex < available_configurations[CAT_USER].nb_configurations; configIndex++) // Check all existing user_ids { if ('.' != available_configurations[CAT_USER].list[configIndex].description[0]) // Not an hidden user { validUserCount++; retVal->userList = vp_os_realloc (retVal->userList, validUserCount * sizeof (ardrone_user_t)); if (NULL == retVal->userList) { vp_os_free (retVal); return NULL; } strncpy (retVal->userList[validUserCount-1].ident, available_configurations[CAT_USER].list[configIndex].id, MULTICONFIG_ID_SIZE); strncpy (retVal->userList[validUserCount-1].description, available_configurations[CAT_USER].list[configIndex].description, USER_NAME_SIZE); } } retVal->userCount = validUserCount; return retVal; }
C_RESULT vp_stages_input_com_stage_transform(vp_stages_input_com_config_t *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out) { vp_os_mutex_lock(&out->lock); if(out->status == VP_API_STATUS_INIT) { out->numBuffers = 1; out->size = cfg->buffer_size; out->buffers = (int8_t **) vp_os_malloc (sizeof(int8_t *)+out->size*sizeof(int8_t)); out->buffers[0] = (int8_t *)(out->buffers+1); out->indexBuffer = 0; // out->lineSize not used out->status = VP_API_STATUS_PROCESSING; } if(out->status == VP_API_STATUS_PROCESSING && cfg->read != NULL) { out->size = cfg->buffer_size; if(VP_FAILED(cfg->read(&cfg->socket_client, out->buffers[0], &out->size))) out->status = VP_API_STATUS_ERROR; } vp_os_mutex_unlock(&out->lock); return (VP_SUCCESS); }
C_RESULT output_gtk_stage_transform( void *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out) { uint64_t timeCode; struct timeval lastFrameReceptionTime; gettimeofday(&lastFrameReceptionTime, NULL); if (!videoServer_isStarted()) return SUCCESS; videoServer_lockFrameBuffer(); if(out->status == VP_API_STATUS_INIT) { out->numBuffers = 1; out->size = QVGA_WIDTH * QVGA_HEIGHT * 3; out->buffers = (int8_t **) vp_os_malloc (sizeof(int8_t *)+out->size*sizeof(int8_t)); out->buffers[0] = (int8_t *)(out->buffers+1); outputImageBuffer = out->buffers[0]; cvSetData(debugImage, outputImageBuffer, QVGA_WIDTH * 3); out->indexBuffer = 0; // out->lineSize not used out->status = VP_API_STATUS_PROCESSING; } if(out->status == VP_API_STATUS_PROCESSING && outputImageBuffer != NULL) { /* Get a reference to the last decoded picture */ pixbuf_data = (uint8_t*)in->buffers[0]; { int i; int8_t *outBuffer = outputImageBuffer; for (i = 0; i < QVGA_WIDTH * QVGA_HEIGHT * 3; i += 3) { outBuffer[i] = pixbuf_data[i + 2]; outBuffer[i + 1] = pixbuf_data[i + 1]; outBuffer[i + 2] = pixbuf_data[i]; } } timeCode = (uint64_t)lastFrameReceptionTime.tv_sec * 1e6 + (uint64_t)lastFrameReceptionTime.tv_usec; videoServer_feedFrame_BGR24(timeCode, outputImageBuffer); /* struct timeval currentTime; gettimeofday(¤tTime, NULL); double elapsed = (currentTime.tv_sec - lastFrameTime.tv_sec) + (currentTime.tv_usec - lastFrameTime.tv_usec) / 1e6; lastFrameTime = currentTime; PRINT("fps: %f\n", 1.0 / elapsed); */ #ifdef IMAGE_DEBUG cvShowImage("DroneCamera", debugImage); #endif } videoServer_unlockFrameBuffer(); return (SUCCESS); }
C_RESULT video_stage_open(vlib_stage_decoding_config_t *cfg) { vp_os_mutex_init( &video_stage_config.mutex ); vp_os_mutex_lock( &video_stage_config.mutex ); video_stage_config.data = vp_os_malloc(512 * 512 * 4); vp_os_memset(video_stage_config.data, 0x0, 512 * 512 * 4); vp_os_mutex_unlock( &video_stage_config.mutex ); return C_OK; }
void p264_codec_alloc( video_controller_t* controller ) { video_codec_t* video_codec; video_codec = (video_codec_t*) vp_os_malloc( sizeof(p264_codec) ); vp_os_memcpy(video_codec, &p264_codec, sizeof(p264_codec)); controller->video_codec = video_codec; }
void ATcodec_Buffer_init (ATcodec_Buffer_t *s, size_t elementSize, int nbElementsStart) { s->totalSize = elementSize*nbElementsStart; VP_OS_ASSERT(s->totalSize); s->data = vp_os_malloc(s->totalSize); s->topElement = NULL; s->nbElements = 0; s->elementSize = elementSize; }
C_RESULT video_com_stage_transform(video_com_config_t *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out) { C_RESULT res; vp_os_mutex_lock(&out->lock); if(out->status == VP_API_STATUS_INIT) { out->numBuffers = 1; out->size = cfg->buffer_size; out->buffers = (int8_t **) vp_os_malloc (sizeof(int8_t *)+out->size*sizeof(int8_t)); out->buffers[0] = (int8_t *)(out->buffers+1); out->indexBuffer = 0; // out->lineSize not used out->status = VP_API_STATUS_PROCESSING; } if(out->status == VP_API_STATUS_PROCESSING && cfg->read != NULL) { out->size = cfg->buffer_size; res = cfg->read(&cfg->socket, out->buffers[0], &out->size); if( cfg->protocol == VP_COM_UDP ) { if( out->size == 0 ) { // Send "1" for Unicast // Send "2" to enable Multicast int32_t flag = 1, len = sizeof(flag); if ( cfg->socket.is_multicast == 1 ) flag = 2; cfg->write(&cfg->socket, (int8_t*) &flag, &len); } } if( VP_FAILED(res) ) { out->status = VP_API_STATUS_ERROR; out->size = 0; } if( out->size == 0) cfg->num_retries++; else cfg->num_retries = 0; } vp_os_mutex_unlock(&out->lock); return C_OK; }
void ATcodec_Sorted_List_insertElement (ATcodec_Sorted_List_t *list, const void *element, int sortValue) { ATcodec_Sorted_List_header_t *current = (ATcodec_Sorted_List_header_t *)list->head; ATcodec_Sorted_List_header_t *previous = NULL; ATcodec_Sorted_List_header_t *ptr = (ATcodec_Sorted_List_header_t *)vp_os_malloc(list->size+sizeof(ATcodec_Sorted_List_header_t)-sizeof(void *)); ptr->magic = ATCODEC_MAGIC_NUMBER; ptr->sortValue = sortValue; memcpy(&ptr->element, element, list->size); while(current && sortValue >= current->sortValue) { previous = current; current = current->next; } if(current && sortValue < current->sortValue) { if(current->previous) { current->previous->next = ptr; } else { list->head = ptr; } ptr->previous = current->previous; ptr->next = current; current->previous = ptr; } else { if(previous) { previous->next = ptr; ptr->previous = previous; ptr->next = NULL; } else { list->head = ptr; ptr->previous = NULL; ptr->next = NULL; } } list->nb++; }
void ATcodec_Buffer_init (ATcodec_Buffer_t *s, size_t elementSize, int nbElementsStart) { s->totalSize = elementSize*nbElementsStart; VP_OS_ASSERT(s->totalSize); s->data = vp_os_malloc(s->totalSize); VP_OS_ASSERT(s->data); s->topElement = NULL; s->nbElements = 0; /* Buffer is actually emply. nbElementsStart refers to allocated memory size.*/ s->elementSize = elementSize; }
void ardroneEngineStart ( ardroneEngineCallback callback, const char *appName, const char *usrName ) { #ifdef DEBUG_THREAD PRINT( "%s\n", __FUNCTION__ ); #endif video_stage_init(); mobile_main_param_t *param = vp_os_malloc (sizeof (mobile_main_param_t)); if (NULL != param) { param->callback = callback; strcpy(param->appName, appName); strcpy(param->usrName, usrName); START_THREAD( mobile_main, param); } }
void get_iphone_mac_address(const char *itfName) { int mib[6]; size_t len; char *buf; unsigned char *ptr; struct if_msghdr *ifm; struct sockaddr_dl *sdl; mib[0] = CTL_NET; mib[1] = AF_ROUTE; mib[2] = 0; mib[3] = AF_LINK; mib[4] = NET_RT_IFLIST; if ((mib[5] = if_nametoindex(itfName)) == 0) { printf("Error: if_nametoindex error\n"); return; } if (sysctl(mib, 6, NULL, &len, NULL, 0) < 0) { printf("Error: sysctl, take 1\n"); return; } if ((buf = vp_os_malloc(len)) == NULL) { printf("Could not allocate memory. error!\n"); return; } if (sysctl(mib, 6, buf, &len, NULL, 0) < 0) { printf("Error: sysctl, take 2"); return; } ifm = (struct if_msghdr *)buf; sdl = (struct sockaddr_dl *)(ifm + 1); ptr = (unsigned char *)LLADDR(sdl); sprintf(iphone_mac_address, "%02X:%02X:%02X:%02X:%02X:%02X",*ptr, *(ptr+1), *(ptr+2), *(ptr+3), *(ptr+4), *(ptr+5)); if(buf != NULL) vp_os_free(buf); }
void video_init(void) { int tex_width, tex_height; GLint crop[4] = { VIDEO_WIDTH, 0, -VIDEO_WIDTH, VIDEO_HEIGHT }; INFO("initializing OpenGL video rendering...\n"); // FIXME: this should be computed (smallest power of 2 > dimension) tex_width = 512; tex_height = 512; otick = 0; if (!pixbuf) { pixbuf = vp_os_malloc(tex_width * tex_height * 2); assert(pixbuf); //memcpy(pixbuf, default_image, VIDEO_WIDTH*VIDEO_HEIGHT*2); } glEnable(GL_TEXTURE_2D); glTexEnvx(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE); glDeleteTextures(1, &texture); glGenTextures(1, &texture); glBindTexture(GL_TEXTURE_2D, texture); // Call glTexImage2D only once, and use glTexSubImage2D later glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, tex_width, tex_height, 0, GL_RGB, GL_UNSIGNED_SHORT_5_6_5, pixbuf); glTexParameteriv(GL_TEXTURE_2D, GL_TEXTURE_CROP_RECT_OES, crop); glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, VIDEO_WIDTH, VIDEO_HEIGHT, GL_RGB, GL_UNSIGNED_SHORT_5_6_5, default_image); glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT); glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT); glTexEnvx(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE); glDisable(GL_BLEND); glDisable(GL_DITHER); glDisable(GL_DEPTH_TEST); glDepthMask(GL_FALSE); glDisable(GL_CULL_FACE); glClearColor(0.0f, 0.0f, 0.0f, 1.0f); glShadeModel(GL_FLAT); glClear(GL_COLOR_BUFFER_BIT); }
C_RESULT mjpeg_stage_encoding_transform(mjpeg_stage_encoding_config_t *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out) { C_RESULT res; uint32_t num_frames; bool_t got_image; res = C_OK; vp_os_mutex_lock(&out->lock); if( out->status == VP_API_STATUS_INIT ) { out->numBuffers = 1; out->buffers = (int8_t**) vp_os_malloc( sizeof(int8_t*) + cfg->out_buffer_size*sizeof(int8_t) ); out->buffers[0] = (int8_t*) ( out->buffers + 1 ); out->indexBuffer = 0; out->status = VP_API_STATUS_PROCESSING; } if( out->status == VP_API_STATUS_PROCESSING ) { stream_config( &cfg->stream, cfg->out_buffer_size, out->buffers[0] ); num_frames = cfg->mjpeg.num_frames; res = mjpeg_encode( &cfg->mjpeg, cfg->picture, &cfg->stream, &got_image ); if( got_image ) { PRINT("Frame complete. Size = %d bytes\n", cfg->stream.index); } out->size = cfg->stream.index; } if( out->status == VP_API_STATUS_ENDED ) { PRINT("End of data\n"); } vp_os_mutex_unlock( &out->lock ); return C_OK; }
void parrot_ardrone_notify_start(JNIEnv* env, jobject obj, ardroneEngineCallback callback, const char *appName, const char *userName, const char* rootdir, const char* flightdir, int flight_storing_size, academy_download_new_media academy_download_callback_func, VIDEO_RECORDING_CAPABILITY recordingCapability) { video_stage_init(); video_recorder_init(); mobile_main_param_t *param = vp_os_malloc(sizeof(mobile_main_param_t)); if(param==NULL){ LOGII("paramisnull"); } if (NULL != param) { param->obj = (*env)->NewGlobalRef(env, obj); param->callback = callback; vp_os_memset(¶m->app_name, 0, STRING_BUFFER_LENGTH); vp_os_memset(¶m->user_name, 0, STRING_BUFFER_LENGTH); vp_os_memset(¶m->root_dir, 0, STRING_BUFFER_LENGTH); vp_os_memset(¶m->flight_dir, 0, STRING_BUFFER_LENGTH); strncpy(param->app_name, appName, STRING_BUFFER_LENGTH); strncpy(param->user_name, userName, STRING_BUFFER_LENGTH); strncpy(param->root_dir, rootdir, STRING_BUFFER_LENGTH); strncpy(param->flight_dir, flightdir, STRING_BUFFER_LENGTH); param->flight_storing_size = flight_storing_size; param->academy_download_callback_func = academy_download_callback_func; ctrldata.recordingCapability = recordingCapability; LOGII("startsuccess"); START_THREAD(app_main, param); } }
C_RESULT vlib_stage_decoding_open(vlib_stage_decoding_config_t *cfg) { // init video decoder with NULL_CODEC video_codec_open( &cfg->controller, NULL_CODEC ); if(cfg->block_mode_enable) { vp_os_free( cfg->controller.in_stream.bytes ); } else { stream.bytes = (uint32_t*)vp_os_malloc(FRAME_MODE_BUFFER_SIZE*sizeof(uint32_t)); stream.index = 0; stream.used = 0; stream.size = FRAME_MODE_BUFFER_SIZE*sizeof(uint32_t); } cfg->num_picture_decoded = 0; return C_OK; }
/* Create and compile a shader from the provided source(s) */ GLint opengl_shader_compile(GLuint *shader, GLenum type, GLsizei count, const char* content_file) { #if defined(DEBUG_SHADER) printf("%s : %d\n", __FUNCTION__, __LINE__); #endif GLint status; const GLchar *sources = (const GLchar *)content_file; // get source code if (!sources) { printf("Failed to load vertex shader\n"); return 0; } *shader = glCreateShader(type); // create shader glShaderSource(*shader, 1, &sources, NULL); // set source code in the shader glCompileShader(*shader); // compile shader #if defined(DEBUG_SHADER) GLint logLength; glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength); if (logLength > 0) { GLchar *log = (GLchar *)vp_os_malloc(logLength); glGetShaderInfoLog(*shader, logLength, &logLength, log); printf("Shader compile log:\n%s\n", log); vp_os_free(log); } #endif glGetShaderiv(*shader, GL_COMPILE_STATUS, &status); if (status == GL_FALSE) { printf("Failed to compile shader:\n"); printf("%s\n", sources); } return status; }
void configManager_registerParamArray(uint32_t id, const char *name, uint8_t valueType, uint32_t valueLength) { configManager_params[id].id = id; if (strlen(name) > MAX_PARAM_NAME_LENGTH) { vp_os_memcpy(configManager_params[id].name, name, MAX_PARAM_NAME_LENGTH); configManager_params[id].name[MAX_PARAM_NAME_LENGTH] = '\0'; } else strcpy(configManager_params[id].name, name); configManager_params[id].valueType = valueType; switch(valueType) { case VALUETYPE_ARRAY: configManager_params[id].valueLength = valueLength; break; case VALUETYPE_INT32: configManager_params[id].valueLength = 4; break; case VALUETYPE_FLOAT: configManager_params[id].valueLength = 4; break; case VALUETYPE_BOOL: configManager_params[id].valueLength = 1; break; } configManager_params[id].value = vp_os_malloc(valueLength); }
C_RESULT video_stage_merge_slices_open(video_stage_merge_slices_config_t *cfg) { int idx; for (idx=0;idx<2;idx++) { cfg->bufs[idx].bufferPointer = vp_os_malloc (sizeof (int8_t *)); if (NULL == cfg->bufs[idx].bufferPointer) { printf ("Unable to allocate output buffer pointer\n"); return C_FAIL; } cfg->bufs[idx].accumulated_size =0; cfg->bufs[idx].buffer = NULL; cfg->bufs[idx].buffer_size = 0; } cfg->mergingBuffer = 0; cfg->readyBuffer = 0; return C_OK; }
C_RESULT video_com_multisocket_stage_transform(video_com_multisocket_config_t *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out) { C_RESULT res; fd_set rfds; int retval; int fs,maxfs; struct timeval tv; int i; bool_t selectTimeout; vp_os_mutex_lock(&out->lock); out->size = 0; for (i=0;i<cfg->nb_sockets;i++) { if (1 == cfg->configs[i]->mustReconnect) { PDBG ("Will call connect"); PRINT ("Reconnecting ... "); res = C_FAIL; res = video_com_stage_connect (cfg->configs[i]); PRINT ("%s\n", (VP_FAILED (res) ? "FAIL" : "OK")); cfg->configs[i]->mustReconnect = 0; } } if(out->status == VP_API_STATUS_INIT) { out->numBuffers = 1; out->size = 0; out->buffers = (uint8_t **) vp_os_malloc (sizeof(uint8_t *)+cfg->buffer_size*sizeof(uint8_t)); out->buffers[0] = (uint8_t *)(out->buffers+1); out->indexBuffer = 0; // out->lineSize not used out->status = VP_API_STATUS_PROCESSING; } if(out->status == VP_API_STATUS_PROCESSING) { /* Check which socket has data to read */ tv.tv_sec = 1; tv.tv_usec = 0; if(cfg->forceNonBlocking && *(cfg->forceNonBlocking)==TRUE) { tv.tv_sec = 0; } FD_ZERO(&rfds); maxfs=0; for (i=0;i<cfg->nb_sockets;i++) { if(cfg->configs[i]->connected) { fs = (int)cfg->configs[i]->socket.priv; if (fs>maxfs) maxfs=fs; FD_SET(fs,&rfds); } } retval = select(maxfs+1, &rfds, NULL, NULL, &tv); /* Read the socket which has available data */ selectTimeout = FALSE; i = -1; if (retval>0) { if (cfg->last_active_socket!=-1) { i=cfg->last_active_socket; fs = (int)cfg->configs[i]->socket.priv; if (cfg->configs[i]->read && FD_ISSET(fs, &rfds)) { out->size = cfg->configs[i]->buffer_size; } else { i = -1; } } if (-1 == i) { for (i=0;i<cfg->nb_sockets;i++) { fs = (int)cfg->configs[i]->socket.priv; if (cfg->configs[i]->read && FD_ISSET(fs, &rfds)) { DEBUG_PRINT_SDK("Video multisocket : found data on port %s %d\n", (cfg->configs[i]->socket.protocol==VP_COM_TCP)?"TCP":"UDP",cfg->configs[i]->socket.port); cfg->last_active_socket = i; if (VP_COM_TCP == cfg->configs[i]->protocol) { DEBUG_isTcp = 1; } else { DEBUG_isTcp = 0; } out->size = cfg->configs[i]->buffer_size; break; } } if(i == cfg->nb_sockets) { PRINT("%s:%d BUG !!!!!", __FUNCTION__, __LINE__); selectTimeout = TRUE; } } } else { DEBUG_PRINT_SDK("%s\n",(retval==0)?"timeout":"error"); selectTimeout = TRUE; } if (FALSE == selectTimeout) { // DEBUG_PRINT_SDK ("Will read on socket %d\n", i); // Actual first time read res = cfg->configs[i]->read(&cfg->configs[i]->socket, out->buffers[0], &out->size); if (VP_FAILED (res)) { PDBG ("%s [%d] : status set to error !\n", __FUNCTION__, __LINE__); perror ("Video_com_stage"); cfg->configs[i]->mustReconnect = 1; out->size = 0; vp_os_mutex_unlock (&out->lock); return C_OK; } // Loop read to empty the socket buffers if needed cfg->configs[i]->socket.block = VP_COM_DONTWAIT; bool_t bContinue = TRUE; int32_t readSize = cfg->configs[i]->buffer_size - out->size; while (TRUE == bContinue) { // DEBUG_PRINT_SDK ("Will read %d octets from socket %d\n", readSize, i); res = cfg->configs[i]->read(&cfg->configs[i]->socket, &(out->buffers[0][out->size]), &readSize); if (VP_FAILED (res)) { PDBG ("%s [%d] : status set to error !\n", __FUNCTION__, __LINE__); perror ("Video_com_stage"); cfg->configs[i]->mustReconnect = 1; out->size = 0; vp_os_mutex_unlock (&out->lock); return C_OK; } if (0 == readSize) { bContinue = FALSE; } out->size += readSize; readSize = cfg->configs[i]->buffer_size - out->size; } cfg->configs[i]->socket.block = VP_COM_DEFAULT; } /* Resend a connection packet on UDP sockets */ if(!(cfg->forceNonBlocking && *(cfg->forceNonBlocking)==TRUE)) { if( /* select timed out */ retval==0 ) { for (i=0;i<cfg->nb_sockets;i++) { if( cfg->configs[i]->protocol == VP_COM_UDP ) { // Send "1" for Unicast // Send "2" to enable Multicast char flag = 1; int len = sizeof(flag); if ( cfg->configs[i]->socket.is_multicast == 1 ) flag = 2; DEBUG_PRINT_SDK("Video multisocket : sending connection byte on port %s %d\n", (cfg->configs[i]->socket.protocol==VP_COM_TCP)?"TCP":"UDP",cfg->configs[i]->socket.port); cfg->configs[i]->write(&cfg->configs[i]->socket, (uint8_t*) &flag, &len); } } } } if( (TRUE == selectTimeout || out->size == 0) && (!cfg->forceNonBlocking || (*(cfg->forceNonBlocking) == FALSE)) ) { cfg->num_retries++; } else cfg->num_retries = 0; if((selectTimeout == TRUE) && cfg->forceNonBlocking && *(cfg->forceNonBlocking)==TRUE) { //PRINT("Debug %s:%d\n",__FUNCTION__,__LINE__); /* No data are available here, but some are available in the next stage */ /* out->size=0 would restart the pipeline */ out->size=-1; } } vp_os_mutex_unlock(&out->lock); if (out->size > 0) { DEBUG_totalBytes += out->size; static struct timeval DEBUG_now = {0, 0}, DEBUG_prev = {0, 0}; gettimeofday (&DEBUG_now, NULL); float DEBUG_timeDiff = ((DEBUG_now.tv_sec - DEBUG_prev.tv_sec) * 1000.0) + ((DEBUG_now.tv_usec - DEBUG_prev.tv_usec) / 1000.0); if (DEBUG_TIME_BITRATE_CALCULATION_MSEC <= DEBUG_timeDiff) { DEBUG_prev.tv_sec = DEBUG_now.tv_sec; DEBUG_prev.tv_usec = DEBUG_now.tv_usec; float DEBUG_instantBitrate = 8.0 * (float)(DEBUG_totalBytes) / DEBUG_TIME_BITRATE_CALCULATION_MSEC; DEBUG_totalBytes = 0; DEBUG_bitrate = 0.8 * DEBUG_bitrate + 0.2 * DEBUG_instantBitrate; } } return C_OK; }
DEFINE_THREAD_ROUTINE(video_update_thread, data) { C_RESULT res; vp_api_io_pipeline_t pipeline; vp_api_io_data_t out; vp_api_io_stage_t stages[NB_STAGES]; vp_api_picture_t picture; video_com_config_t icc; vlib_stage_decoding_config_t vec; vp_stages_yuv2rgb_config_t yuv2rgbconf; /// Picture configuration picture.format = PIX_FMT_YUV420P; picture.width = STREAM_WIDTH; picture.height = STREAM_HEIGHT; picture.framerate = 30; picture.y_buf = (uint8_t *)vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT ); picture.cr_buf = (uint8_t *)vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT / 4 ); picture.cb_buf = (uint8_t *)vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT / 4 ); picture.y_line_size = STREAM_WIDTH; picture.cb_line_size = STREAM_WIDTH / 2; picture.cr_line_size = STREAM_WIDTH / 2; vp_os_memset(&icc, 0, sizeof( icc )); vp_os_memset(&vec, 0, sizeof( vec )); vp_os_memset(&yuv2rgbconf, 0, sizeof( yuv2rgbconf )); icc.com = COM_VIDEO(); icc.buffer_size = 100000; icc.protocol = VP_COM_UDP; COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip); vec.width = STREAM_WIDTH; vec.height = STREAM_HEIGHT; vec.picture = &picture; #ifdef USE_VIDEO_YUV vec.luma_only = FALSE; #else vec.luma_only = TRUE; #endif // USE_VIDEO_YUV vec.block_mode_enable = TRUE; vec.luma_only = FALSE; yuv2rgbconf.rgb_format = VP_STAGES_RGB_FORMAT_RGB24; if( CAMIF_H_CAMERA_USED == CAMIF_CAMERA_OVTRULY_UPSIDE_DOWN_ONE_BLOCKLINE_LESS ) yuv2rgbconf.mode = VP_STAGES_YUV2RGB_MODE_UPSIDE_DOWN; pipeline.nb_stages = 0; stages[pipeline.nb_stages].type = VP_API_INPUT_SOCKET; stages[pipeline.nb_stages].cfg = (void *)&icc; stages[pipeline.nb_stages].funcs = video_com_funcs; pipeline.nb_stages++; stages[pipeline.nb_stages].type = VP_API_FILTER_DECODER; stages[pipeline.nb_stages].cfg = (void*)&vec; stages[pipeline.nb_stages].funcs = vlib_decoding_funcs; pipeline.nb_stages++; stages[pipeline.nb_stages].type = VP_API_FILTER_YUV2RGB; stages[pipeline.nb_stages].cfg = (void*)&yuv2rgbconf; stages[pipeline.nb_stages].funcs = vp_stages_yuv2rgb_funcs; pipeline.nb_stages++; stages[pipeline.nb_stages].type = VP_API_OUTPUT_SDL; stages[pipeline.nb_stages].cfg = (void *)&vec; stages[pipeline.nb_stages].funcs = vp_stages_export_funcs; pipeline.nb_stages++; pipeline.stages = &stages[0]; PIPELINE_HANDLE pipeline_handle; res = vp_api_open(&pipeline, &pipeline_handle); if( SUCCEED(res) ) { while( SUCCEED(vp_api_run(&pipeline, &out)) ) { } vp_api_close(&pipeline, &pipeline_handle); } return (THREAD_RET)0; }
C_RESULT video_com_stage_transform(video_com_config_t *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out) { C_RESULT res; vp_os_mutex_lock(&out->lock); if (1 == cfg->mustReconnect) { PDBG ("Will call connect"); PRINT ("Reconnecting ... "); res = video_com_stage_connect (cfg); PRINT ("%s\n", (VP_FAILED (res) ? "FAIL" : "OK")); cfg->mustReconnect = 0; } if(out->status == VP_API_STATUS_INIT) { out->numBuffers = 1; out->size = 0; out->buffers = (uint8_t **) vp_os_malloc (sizeof(uint8_t *)+cfg->buffer_size*sizeof(uint8_t)); out->buffers[0] = (uint8_t *)(out->buffers+1); out->indexBuffer = 0; // out->lineSize not used out->status = VP_API_STATUS_PROCESSING; } if(out->status == VP_API_STATUS_PROCESSING && cfg->read != NULL) { bool_t nonBlock = (cfg->forceNonBlocking && *(cfg->forceNonBlocking)==TRUE) ? TRUE : FALSE; out->size = cfg->buffer_size; if (nonBlock) { cfg->socket.block = VP_COM_DONTWAIT; } res = cfg->read(&cfg->socket, out->buffers[0], &out->size); if (! nonBlock && cfg->protocol == VP_COM_UDP && out->size == 0) { // Send "1" for Unicast // Send "2" to enable Multicast char flag = 1; int32_t len = sizeof(flag); if ( cfg->socket.is_multicast == 1 ) { flag = 2; } cfg->write(&cfg->socket, (uint8_t*) &flag, &len); } bool_t bContinue = TRUE; if( VP_FAILED(res) ) { PDBG ("%s [%d] : status set to error !\n", __FUNCTION__, __LINE__); perror ("Video_com_stage"); cfg->mustReconnect = 1; out->size = 0; vp_os_mutex_unlock (&out->lock); return C_OK; } if( out->size == 0) { if (nonBlock) { out->size = -1; // Signal next stage that we don't have data waiting } else { cfg->num_retries++; } bContinue = FALSE; } else { cfg->num_retries = 0; } cfg->socket.block = VP_COM_DONTWAIT; int32_t readSize = cfg->buffer_size - out->size; while (TRUE == bContinue) { res = cfg->read(&cfg->socket, &(out->buffers[0][out->size]), &readSize); if( VP_FAILED(res) ) { PDBG ("%s [%d] : status set to error !\n", __FUNCTION__, __LINE__); perror ("Video_com_stage"); cfg->mustReconnect = 1; out->size = 0; vp_os_mutex_unlock (&out->lock); return C_OK; } if (0 == readSize) { bContinue = FALSE; } out->size += readSize; readSize = cfg->buffer_size - out->size; } cfg->socket.block = VP_COM_DEFAULT; } if (NULL != cfg->timeoutFunc && 0 != cfg->timeoutFuncAfterSec) { if (cfg->num_retries >= cfg->timeoutFuncAfterSec) { cfg->timeoutFunc (); } } vp_os_mutex_unlock(&out->lock); return C_OK; }
C_RESULT video_stage_ffmpeg_recorder_transform(video_stage_ffmpeg_recorder_config_t *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out) { time_t temptime; struct timeval tv; struct tm *atm; long long int current_timestamp_us; static long long int first_frame_timestamp_us=0; static int frame_counter=0; int i; int frame_size; static int flag_video_file_open=0; vp_os_mutex_lock( &out->lock ); vp_api_picture_t* picture = (vp_api_picture_t *) in->buffers; gettimeofday(&tv,NULL); temptime = (time_t)tv.tv_sec; atm = localtime(&temptime); //atm = localtime(&tv.tv_sec); current_timestamp_us = tv.tv_sec *1000000 + tv.tv_usec; if( out->status == VP_API_STATUS_INIT ) { out->numBuffers = 1; out->indexBuffer = 0; out->lineSize = NULL; //out->buffers = (int8_t **) vp_os_malloc( sizeof(int8_t *) ); } out->size = in->size; out->status = in->status; out->buffers = in->buffers; if( in->status == VP_API_STATUS_ENDED ) { out->status = in->status; } else if(in->status == VP_API_STATUS_STILL_RUNNING) { out->status = VP_API_STATUS_PROCESSING; } else { out->status = in->status; } if(cfg->startRec==VIDEO_RECORD_HOLD) { /* Create a new video file */ sprintf(video_filename_ffmpeg, "%s/video_%04d%02d%02d_%02d%02d%02d_w%i_h%i.mp4", VIDEO_FILE_DEFAULT_PATH, atm->tm_year+1900, atm->tm_mon+1, atm->tm_mday, atm->tm_hour, atm->tm_min, atm->tm_sec, picture->width, picture->height); create_video_file(video_filename_ffmpeg, picture->width,picture->height); flag_video_file_open=1; cfg->startRec=VIDEO_RECORD_START; first_frame_timestamp_us = current_timestamp_us; frame_counter=1; } if( out->size > 0 && out->status == VP_API_STATUS_PROCESSING && cfg->startRec==VIDEO_RECORD_START) { frame_size = ( previous_frame.width * previous_frame.height )*3/2; /* Send the previous frame to FFMPEG */ if (previous_frame.buffer!=NULL) { /* Compute the number of frames to store to achieve 60 FPS * This should be computed using the timestamp of the first frame * to avoid error accumulation. */ int current_frame_number = (current_timestamp_us - first_frame_timestamp_us) / 16666; int nb_frames_to_write = current_frame_number - previous_frame.frame_number; if (picture_to_encode!=NULL){ picture_to_encode->data[0] = picture_to_encode->base[0] = picture->y_buf; picture_to_encode->data[1] = picture_to_encode->base[1] = picture->cb_buf; picture_to_encode->data[2] = picture_to_encode->base[2] = picture->cr_buf; picture_to_encode->linesize[0] = picture->width; picture_to_encode->linesize[1] = picture->width/2; picture_to_encode->linesize[2] = picture->width/2; } for (i=0;i<nb_frames_to_write;i++) { //printf("Storing %i frames\n",nb_frames_to_write); write_video_frame(oc, video_st); } /* Pass infos to next iteration */ previous_frame.frame_number = current_frame_number; } /* Create a buffer to hold the current frame */ //if (0) { if (previous_frame.buffer!=NULL && (previous_frame.width!=picture->width || previous_frame.height!=picture->height)) { vp_os_free(previous_frame.buffer); previous_frame.buffer=NULL; } if (previous_frame.buffer==NULL) { previous_frame.width = picture->width; previous_frame.height = picture->height; frame_size = ( previous_frame.width * previous_frame.height )*3/2; printf("Allocating previous frame.\n"); previous_frame.buffer=vp_os_malloc( frame_size ); } /* Copy the current frame in a buffer so it can be encoded at next stage call */ if (previous_frame.buffer!=NULL) { char * dest = previous_frame.buffer; int size = picture->width*picture->height; vp_os_memcpy(dest,picture->y_buf,size); dest+=size; size /= 4; vp_os_memcpy(dest,picture->cb_buf,size); dest+=size; vp_os_memcpy(dest,picture->cr_buf,size); } } } else { if(cfg->startRec==VIDEO_RECORD_STOP && flag_video_file_open) { close_video_file(); flag_video_file_open=0; } } vp_os_mutex_unlock( &out->lock ); return C_OK; }