/* Destroy SVP instance. svp may be NULL. */ void svp_destroy(SVP_T *svp) { if (svp) { MMAL_COMPONENT_T *components[] = { svp->reader, svp->video_decode, svp->camera }; MMAL_COMPONENT_T **comp; /* Stop thread, disable connection and components */ svp_stop(svp); for (comp = components; comp < components + vcos_countof(components); comp++) { mmal_component_disable(*comp); } /* Destroy connection + components */ if (svp->connection) { mmal_connection_destroy(svp->connection); } for (comp = components; comp < components + vcos_countof(components); comp++) { mmal_component_destroy(*comp); } /* Free remaining resources */ if (svp->out_pool) { mmal_pool_destroy(svp->out_pool); } if (svp->queue) { mmal_queue_destroy(svp->queue); } if (svp->created & SVP_CREATED_WD_TIMER) { vcos_timer_delete(&svp->wd_timer); } if (svp->created & SVP_CREATED_TIMER) { vcos_timer_delete(&svp->timer); } if (svp->created & SVP_CREATED_MUTEX) { vcos_mutex_delete(&svp->mutex); } if (svp->created & SVP_CREATED_SEM) { vcos_semaphore_delete(&svp->sema); } vcos_free(svp); } }
/* Return the string representation of 3D support bit mask */ static const char* threed_str(uint32_t struct_3d_mask, int json_output) { #define THREE_D_FORMAT_NAME_MAX_LEN 10 //Including the separator static const char* three_d_format_names[] = { //See HDMI_3D_STRUCT_T bit fields "FP", "F-Alt", "L-Alt", "SbS-Full", "Ldep", "Ldep+Gfx", "TopBot", "SbS-HH", "SbS-OLOR", "SbS-OLER", "SbS-ELOR", "SbS-ELER" }; //Longest possible string is the concatenation of all of the above static char struct_desc[vcos_countof(three_d_format_names)*THREE_D_FORMAT_NAME_MAX_LEN+4] = {0}; const size_t struct_desc_length = sizeof(struct_desc); size_t j, added = 0, offset = 0; int tmp = 0; if(!json_output) tmp = status_sprintf(struct_desc, struct_desc_length, &offset, "3D:"); if(struct_3d_mask) { for(j = 0; !tmp && j < vcos_countof(three_d_format_names); j++) { if(struct_3d_mask & (1 << j)) { tmp = status_sprintf(struct_desc, struct_desc_length, &offset, json_output ? "\"%s\"," : "%s|", three_d_format_names[j]); added++; } } if(!tmp && added > 0) struct_desc[strlen(struct_desc)-1] = '\0'; //Get rid of final "|" } else if(!tmp && !added && !json_output) { status_sprintf(struct_desc, struct_desc_length, &offset, "none"); } return struct_desc; }
/* Return the string representation of 3D support bit mask */ static const char* threed_str(uint32_t struct_3d_mask) { #define THREE_D_FORMAT_NAME_MAX_LEN 10 //Including the separator static const char* three_d_format_names[] = { //See HDMI_3D_STRUCT_T bit fields "FP", "F-Alt", "L-Alt", "SbS-Full", "Ldep", "Ldep+Gfx", "TopBot", "SbS-HH", "SbS-OLOR", "SbS-OLER", "SbS-ELOR", "SbS-ELER" }; //Longest possible string is the concatenation of all of the above static char struct_desc[vcos_countof(three_d_format_names)*THREE_D_FORMAT_NAME_MAX_LEN+4] = {0}; char *p = &struct_desc[0]; char * const p_end = p + sizeof struct_desc; size_t j, added = 0; p += snprintf(p, p_end - p, "3D:"); if(struct_3d_mask) { for(j = 0; j < vcos_countof(three_d_format_names); j++) { if(struct_3d_mask & (1 << j)) { p += snprintf(p, p_end - p, "%s|", three_d_format_names[j]); added++; } } if(added > 0) *(--p) = '\0'; //Get rid of final "|" } if(!added) p += snprintf(p, p_end - p, "none"); return struct_desc; }
MMAL_STATUS_T mmal_parse_video_size(uint32_t *w, uint32_t *h, const char *str) { static struct { const char *name; uint32_t width; uint32_t height; } sizes[] = { { "1080p", 1920, 1080 }, { "720p", 1280, 720 }, { "vga", 640, 480 }, { "wvga", 800, 480 }, { "cif", 352, 288 }, { "qcif", 352/2, 288/2 }, }; size_t i; for (i=0; i<vcos_countof(sizes); i++) { if (vcos_strcasecmp(str, sizes[i].name) == 0) { *w = sizes[i].width; *h = sizes[i].height; return MMAL_SUCCESS; } } return MMAL_EINVAL; }
static int square_redraw(RASPITEX_STATE *state) { /* Bind the OES texture which is used to render the camera preview */ GLCHK(glBindTexture(GL_TEXTURE_EXTERNAL_OES, state->texture)); glLoadIdentity(); glRotatef(angle, 0.0, 0.0, 1.0); glEnableClientState(GL_VERTEX_ARRAY); glVertexPointer(3, GL_FLOAT, 0, vertices); glDisableClientState(GL_COLOR_ARRAY); glEnableClientState(GL_TEXTURE_COORD_ARRAY); glTexCoordPointer(2, GL_FLOAT, 0, tex_coords); GLCHK(glDrawArrays(GL_TRIANGLES, 0, vcos_countof(tex_coords) / 2)); return 0; }
/** Destroy all EGL images */ static void vidtex_destroy_images(VIDTEX_T *vt) { VIDTEX_IMAGE_SLOT_T *slot; for (slot = vt->slots; slot < vt->slots + vcos_countof(vt->slots); slot++) { slot->video_frame = NULL; if (slot->image) { vcos_log_trace("Destroying EGL image %p", slot->image); eglDestroyImageKHR(vt->display, slot->image); slot->image = NULL; } } }
MMAL_STATUS_T mmal_parse_video_codec(uint32_t *dest, const char *str) { static string_pair_t video_codec_enums[] = { { "h264", MMAL_ENCODING_H264 }, { "h263", MMAL_ENCODING_H263 }, { "mpeg4", MMAL_ENCODING_MP4V }, { "mpeg2", MMAL_ENCODING_MP2V }, { "vp8", MMAL_ENCODING_VP8 }, { "vp7", MMAL_ENCODING_VP7 }, { "vp6", MMAL_ENCODING_VP6 }, }; int i = 0; MMAL_STATUS_T ret; ret = parse_enum(&i, video_codec_enums, vcos_countof(video_codec_enums), str); *dest = i; return ret; }
static int get_modes( HDMI_RES_GROUP_T group) { static TV_SUPPORTED_MODE_T supported_modes[MAX_MODE_ID] = {{0}}; HDMI_RES_GROUP_T preferred_group; uint32_t preferred_mode; int num_modes; int i; vcos_assert(( group == HDMI_RES_GROUP_CEA ) || ( group == HDMI_RES_GROUP_DMT )); num_modes = vc_tv_hdmi_get_supported_modes( group, supported_modes, vcos_countof(supported_modes), &preferred_group, &preferred_mode ); if ( num_modes < 0 ) { LOG_ERR( "Failed to get modes" ); return -1; } LOG_STD( "Group %s has %u modes:", HDMI_RES_GROUP_NAME(group), num_modes ); for ( i = 0; i < num_modes; i++ ) { char p[8] = {0}; if( supported_modes[i].pixel_rep ) snprintf(p, sizeof(p)-1, "x%d ", supported_modes[i].pixel_rep); else snprintf(p, sizeof(p)-1, " "); LOG_STD( "%s mode %u: %ux%u @ %uHz %s, clock:%luMHz %s%s %s", supported_modes[i].native ? " (native)" : " ", supported_modes[i].code, supported_modes[i].width, supported_modes[i].height, supported_modes[i].frame_rate, aspect_ratio_str(supported_modes[i].aspect_ratio), supported_modes[i].pixel_freq / 1000000UL, p, supported_modes[i].scan_mode ? "interlaced" : "progressive", supported_modes[i].struct_3d_mask ? threed_str(supported_modes[i].struct_3d_mask) : ""); } return 0; }
static int do_stats(int argc, const char **argv) { MMAL_VC_STATS_T stats; int reset_stats = strcasecmp(argv[1], "reset") == 0; MMAL_STATUS_T st = mmal_vc_get_stats(&stats, reset_stats); int ret; (void)argc; (void)argv; if (st != MMAL_SUCCESS) { fprintf(stderr, "error getting status (%i,%s)\n", st, mmal_status_to_string(st)); ret = -1; } else { unsigned i; uint32_t *ptr = (uint32_t*)&stats; for (i=0; i<vcos_countof(stats_fields); i++) { printf("%-32s: %u\n", stats_fields[i].name, ptr[stats_fields[i].offset/sizeof(uint32_t)]); } ret = 0; } return ret; }
static int get_modes( HDMI_RES_GROUP_T group, int json_output) { static TV_SUPPORTED_MODE_T supported_modes[MAX_MODE_ID]; HDMI_RES_GROUP_T preferred_group; uint32_t preferred_mode; int num_modes; int i; vcos_assert(( group == HDMI_RES_GROUP_CEA ) || ( group == HDMI_RES_GROUP_DMT )); memset(supported_modes, 0, sizeof(supported_modes)); num_modes = vc_tv_hdmi_get_supported_modes( group, supported_modes, vcos_countof(supported_modes), &preferred_group, &preferred_mode ); if ( num_modes < 0 ) { LOG_ERR( "Failed to get modes" ); return -1; } if (json_output) { LOG_STD( "[" ); } else { LOG_STD( "Group %s has %u modes:", HDMI_RES_GROUP_NAME(group), num_modes ); } for ( i = 0; i < num_modes; i++ ) { char p[8] = {0}; if( supported_modes[i].pixel_rep ) vcos_safe_sprintf(p, sizeof(p)-1, 0, "x%d ", supported_modes[i].pixel_rep); if (json_output) { LOG_STD( "{ \"code\":%u, \"width\":%u, \"height\":%u, \"rate\":%u, \"aspect_ratio\":\"%s\", \"scan\":\"%s\", \"3d_modes\":[%s] }%s", supported_modes[i].code, supported_modes[i].width, supported_modes[i].height, supported_modes[i].frame_rate, aspect_ratio_str(supported_modes[i].aspect_ratio), supported_modes[i].scan_mode ? "i" : "p", supported_modes[i].struct_3d_mask ? threed_str(supported_modes[i].struct_3d_mask, 1) : "", (i+1 < num_modes) ? "," : ""); } else { int preferred = supported_modes[i].group == preferred_group && supported_modes[i].code == preferred_mode; LOG_STD( "%s mode %u: %ux%u @ %uHz %s, clock:%uMHz %s%s %s", preferred ? " (prefer)" : supported_modes[i].native ? " (native)" : " ", supported_modes[i].code, supported_modes[i].width, supported_modes[i].height, supported_modes[i].frame_rate, aspect_ratio_str(supported_modes[i].aspect_ratio), supported_modes[i].pixel_freq / 1000000U, p, supported_modes[i].scan_mode ? "interlaced" : "progressive", supported_modes[i].struct_3d_mask ? threed_str(supported_modes[i].struct_3d_mask, 0) : ""); } } if (json_output) { LOG_STD( "]" ); } return 0; }
/* Draw one video frame onto EGL surface. * @param vt vidtex instance. * @param video_frame MMAL opaque buffer handle for decoded video frame. Can't be NULL. */ static void vidtex_draw(VIDTEX_T *vt, void *video_frame) { EGLImageKHR image; VIDTEX_IMAGE_SLOT_T *slot; static uint32_t frame_num = 0; vcos_assert(video_frame); glClearColor(0, 0, 0, 0); glClearDepthf(1); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); glLoadIdentity(); glBindTexture(GL_TEXTURE_EXTERNAL_OES, vt->texture); VIDTEX_CHECK_GL(vt); /* Lookup or create EGL image corresponding to supplied buffer handle. * N.B. Slot array is filled in sequentially, with the images all destroyed together on * vidtex termination; it never has holes. */ image = EGL_NO_IMAGE_KHR; for (slot = vt->slots; slot < vt->slots + vcos_countof(vt->slots); slot++) { if (slot->video_frame == video_frame) { vcos_assert(slot->image); image = slot->image; break; } if (slot->video_frame == NULL) { EGLenum target; vcos_assert(slot->image == NULL); if (vt->opts & VIDTEX_OPT_Y_TEXTURE) target = EGL_IMAGE_BRCM_MULTIMEDIA_Y; else if (vt->opts & VIDTEX_OPT_U_TEXTURE) target = EGL_IMAGE_BRCM_MULTIMEDIA_U; else if (vt->opts & VIDTEX_OPT_V_TEXTURE) target = EGL_IMAGE_BRCM_MULTIMEDIA_V; else target = EGL_IMAGE_BRCM_MULTIMEDIA; image = eglCreateImageKHR(vt->display, EGL_NO_CONTEXT, target, (EGLClientBuffer)video_frame, NULL); if (image == EGL_NO_IMAGE_KHR) { vcos_log_error("EGL image conversion error"); } else { vcos_log_trace("Created EGL image %p for buf %p", image, video_frame); slot->video_frame = video_frame; slot->image = image; } VIDTEX_CHECK_GL(vt); break; } } if (slot == vt->slots + vcos_countof(vt->slots)) { vcos_log_error("Exceeded configured max number of EGL images"); } /* Draw the EGL image */ if (image != EGL_NO_IMAGE_KHR) { /* Assume 30fps */ int frames_per_rev = 30 * 15; GLfloat angle = (frame_num * 360) / (GLfloat) frames_per_rev; frame_num = (frame_num + 1) % frames_per_rev; glEGLImageTargetTexture2DOES(GL_TEXTURE_EXTERNAL_OES, image); VIDTEX_CHECK_GL(vt); glRotatef(angle, 0.0, 0.0, 1.0); glEnableClientState(GL_VERTEX_ARRAY); glVertexPointer(3, GL_FLOAT, 0, vt_vertices); glDisableClientState(GL_COLOR_ARRAY); glEnableClientState(GL_TEXTURE_COORD_ARRAY); glTexCoordPointer(2, GL_FLOAT, 0, vt_tex_coords); glDrawArrays(GL_TRIANGLES, 0, vcos_countof(vt_tex_coords) / 2); eglSwapBuffers(vt->display, vt->surface); if (vt->opts & VIDTEX_OPT_IMG_PER_FRAME) { vidtex_destroy_images(vt); } vt->num_swaps++; } VIDTEX_CHECK_GL(vt); }