int opengl_push_message(glc_message_header_t *hdr, void *message, size_t message_size) { ps_packet_t packet; int ret = 0; ps_buffer_t *to; if (!lib.running) return EAGAIN; if (opengl.unscaled) to = opengl.unscaled; else to = opengl.buffer; if ((ret = ps_packet_init(&packet, to))) goto finish; if ((ret = ps_packet_open(&packet, PS_PACKET_WRITE))) goto finish; if ((ret = ps_packet_write(&packet, hdr, sizeof(glc_message_header_t)))) goto finish; if ((ret = ps_packet_write(&packet, message, message_size))) goto finish; if ((ret = ps_packet_close(&packet))) goto finish; if ((ret = ps_packet_destroy(&packet))) goto finish; finish: return ret; }
/* * The purpose of this thread is to make this module async signal safe. * ie: it couldn't be called safely from a sighandler if host process * use ALSA async mode and write to ALSA API from a sighandler. */ void *alsa_hook_thread(void *argptr) { struct alsa_hook_stream_s *stream = (struct alsa_hook_stream_s *) argptr; glc_audio_data_header_t hdr; glc_message_header_t msg_hdr; int ret = 0; msg_hdr.type = GLC_MESSAGE_AUDIO_DATA; hdr.id = stream->id; stream->capture_ready = 1; while (1) { sem_wait(&stream->capture_full); stream->capture_ready = 0; if (unlikely(!stream->thread.running)) break; if (unlikely(stream->capture_size < 0)) { alsa_hook_realloc_capture_buf(stream,-stream->capture_size); goto capture_ready; } hdr.time = stream->capture_time; hdr.size = stream->capture_size; if (unlikely((ret = ps_packet_open(&stream->packet, PS_PACKET_WRITE)))) break; if (unlikely((ret = ps_packet_setsize(&stream->packet, hdr.size + sizeof(glc_message_header_t) + sizeof(glc_audio_data_header_t))))) break; if (unlikely((ret = ps_packet_write(&stream->packet, &msg_hdr, sizeof(glc_message_header_t))))) break; if (unlikely((ret = ps_packet_write(&stream->packet, &hdr, sizeof(glc_audio_data_header_t))))) break; if (unlikely((ret = ps_packet_write(&stream->packet, stream->capture_data, hdr.size)))) break; if (unlikely((ret = ps_packet_close(&stream->packet)))) break; if (!(stream->mode & SND_PCM_ASYNC)) sem_post(&stream->capture_empty); capture_ready: stream->capture_ready = 1; } if (ret != 0) glc_log(stream->alsa_hook->glc, GLC_ERROR, "alsa_hook", "thread failed: %s (%d)", strerror(ret), ret); return NULL; }
int audio_capture_data(audio_capture_t audio_capture, void *data, size_t size) { glc_message_header_t msg_hdr; glc_audio_data_header_t audio_hdr; int ret; if (!(audio_capture->flags & AUDIO_CAPTURE_CAPTURING)) return 0; if (audio_capture->flags & AUDIO_CAPTURE_CFG_CHANGED) { if ((ret = audio_capture_write_cfg(audio_capture))) return ret; audio_capture->flags &= ~AUDIO_CAPTURE_CFG_CHANGED; } if (!(audio_capture->flags & AUDIO_CAPTURE_IGNORE_TIME)) audio_capture->time = glc_state_time(audio_capture->glc); msg_hdr.type = GLC_MESSAGE_AUDIO_DATA; audio_hdr.id = audio_capture->id; /* should be set to valid one */ audio_hdr.size = size; audio_hdr.time = audio_capture->time; if (audio_capture->flags & AUDIO_CAPTURE_IGNORE_TIME) audio_capture->time += ((glc_utime_t) size * (glc_utime_t) 1000000) / (glc_utime_t) (audio_capture_frames_to_bytes(audio_capture, 1) * audio_capture->rate); if ((ret = ps_packet_open(&audio_capture->packet, PS_PACKET_WRITE))) goto err; if ((ret = ps_packet_write(&audio_capture->packet, &msg_hdr, sizeof(glc_message_header_t)))) goto err; if ((ret = ps_packet_write(&audio_capture->packet, &audio_hdr, sizeof(glc_audio_data_header_t)))) goto err; if ((ret = ps_packet_write(&audio_capture->packet, data, size))) goto err; if ((ret = ps_packet_close(&audio_capture->packet))) goto err; return 0; err: ps_buffer_cancel(audio_capture->target); glc_state_set(audio_capture->glc, GLC_STATE_CANCEL); glc_log(audio_capture->glc, GLC_ERROR, "audio_capture", "can't write audio data to buffer"); glc_log(audio_capture->glc, GLC_ERROR, "audio_capture", "%s (%d)", strerror(ret), ret); return ret; }
int alsa_hook_stream_init(alsa_hook_t alsa_hook, struct alsa_hook_stream_s *stream) { int ret; glc_message_header_t msg_hdr; glc_audio_format_message_t fmt_msg; if (unlikely(!stream->fmt)) return EINVAL; /* we need proper id for the stream */ if (stream->id < 1) glc_state_audio_new(alsa_hook->glc, &stream->id, &stream->state_audio); glc_log(alsa_hook->glc, GLC_INFO, "alsa_hook", "%p: initializing stream %d", stream->pcm, stream->id); /* init packet */ if (stream->initialized) ps_packet_destroy(&stream->packet); ps_packet_init(&stream->packet, alsa_hook->to); /* prepare audio format message */ msg_hdr.type = GLC_MESSAGE_AUDIO_FORMAT; fmt_msg.id = stream->id; fmt_msg.flags = stream->flags; fmt_msg.rate = stream->rate; fmt_msg.channels = stream->channels; fmt_msg.format = stream->format; ps_packet_open(&stream->packet, PS_PACKET_WRITE); ps_packet_write(&stream->packet, &msg_hdr, sizeof(glc_message_header_t)); ps_packet_write(&stream->packet, &fmt_msg, sizeof(glc_audio_format_message_t)); ps_packet_close(&stream->packet); alsa_hook_stream_wait(stream); ret = glc_simple_thread_create(alsa_hook->glc, &stream->thread, alsa_hook_thread, stream); stream->initialized = 1; return ret; }
int gl_capture_write_video_format_message(gl_capture_t gl_capture, struct gl_capture_video_stream_s *video, unsigned int w, unsigned int h) { glc_message_header_t msg; glc_video_format_message_t format_msg; gl_capture_calc_geometry(gl_capture, video, w, h); glc_log(gl_capture->glc, GLC_INFO, "gl_capture", "creating/updating configuration for video %d", video->id); msg.type = GLC_MESSAGE_VIDEO_FORMAT; format_msg.flags = video->flags; format_msg.format = video->format; format_msg.id = video->id; format_msg.width = video->cw; format_msg.height = video->ch; ps_packet_open(&video->packet, PS_PACKET_WRITE); ps_packet_write(&video->packet, &msg, sizeof(glc_message_header_t)); ps_packet_write(&video->packet, &format_msg, sizeof(glc_video_format_message_t)); ps_packet_close(&video->packet); glc_log(gl_capture->glc, GLC_DEBUG, "gl_capture", "video %d: %ux%u (%ux%u), 0x%02x flags", video->id, video->cw, video->ch, video->w, video->h, video->flags); /* how about color correction? */ gl_capture_update_color(gl_capture, video); if (gl_capture->flags & GL_CAPTURE_USE_PBO) { if (video->pbo) gl_capture_destroy_pbo(gl_capture, video); if (gl_capture_create_pbo(gl_capture, video)) { gl_capture->flags &= ~(GL_CAPTURE_TRY_PBO | GL_CAPTURE_USE_PBO); /** \todo destroy pbo stuff? */ /** \todo race condition? */ } } return 0; }
int glc_util_write_end_of_stream(glc_t *glc, ps_buffer_t *to) { int ret = 0; ps_packet_t packet; glc_message_header_t header; header.type = GLC_MESSAGE_CLOSE; if (unlikely((ret = ps_packet_init(&packet, to)))) goto finish; if (unlikely((ret = ps_packet_open(&packet, PS_PACKET_WRITE)))) goto finish; if (unlikely((ret = ps_packet_write(&packet, &header, sizeof(glc_message_header_t))))) goto finish; if (unlikely((ret = ps_packet_close(&packet)))) goto finish; if (unlikely((ret = ps_packet_destroy(&packet)))) goto finish; finish: return ret; }
int audio_capture_write_cfg(audio_capture_t audio_capture) { glc_message_header_t hdr; glc_audio_format_message_t fmt_msg; int ret = 0; if (!audio_capture->id) glc_state_audio_new(audio_capture->glc, &audio_capture->id, &audio_capture->state_audio); hdr.type = GLC_MESSAGE_AUDIO_FORMAT; fmt_msg.id = audio_capture->id; fmt_msg.flags = audio_capture->format_flags; fmt_msg.rate = audio_capture->rate; fmt_msg.channels = audio_capture->channels; fmt_msg.format = audio_capture->format; if ((ret = ps_packet_open(&audio_capture->packet, PS_PACKET_WRITE))) goto err; if ((ret = ps_packet_write(&audio_capture->packet, &hdr, sizeof(glc_message_header_t)))) goto err; if ((ret = ps_packet_write(&audio_capture->packet, &fmt_msg, sizeof(glc_audio_format_message_t)))) goto err; if ((ret = ps_packet_close(&audio_capture->packet))) goto err; return 0; err: glc_state_set(audio_capture->glc, GLC_STATE_CANCEL); ps_buffer_cancel(audio_capture->target); glc_log(audio_capture->glc, GLC_ERROR, "audio_capture", "can't write audio stream configuration to buffer"); glc_log(audio_capture->glc, GLC_ERROR, "audio_capture", "%s (%d)", strerror(ret), ret); return ret; }
/* * multithreading notes: * * This function could be accessed concurrently for different video streams, with * the pair dpy,drawable identifying each stream. */ int gl_capture_frame(gl_capture_t gl_capture, Display *dpy, GLXDrawable drawable) { struct gl_capture_video_stream_s *video; glc_message_header_t msg; glc_video_frame_header_t pic; glc_utime_t now; glc_utime_t before_capture,after_capture; char *dma; int ret = 0; if (!(gl_capture->flags & GL_CAPTURE_CAPTURING)) return 0; /* capturing not active */ gl_capture_get_video_stream(gl_capture, &video, dpy, drawable); /* get current time */ if (unlikely(gl_capture->flags & GL_CAPTURE_IGNORE_TIME)) now = video->last + gl_capture->fps; else now = glc_state_time(gl_capture->glc); /* has gl_capture->fps nanoseconds elapsed since last capture */ if ((now - video->last < gl_capture->fps) && !(gl_capture->flags & GL_CAPTURE_LOCK_FPS) && !(gl_capture->flags & GL_CAPTURE_IGNORE_TIME)) goto finish; /* not really needed until now */ gl_capture_update_video_stream(gl_capture, video); /* if PBO is not active, just start transfer and finish */ if (unlikely((gl_capture->flags & GL_CAPTURE_USE_PBO) && __sync_bool_compare_and_swap(&video->pbo_active,0,1))) { ret = gl_capture_start_pbo(gl_capture, video); video->pbo_time = now; goto finish; } if (unlikely(ps_packet_open(&video->packet, ((gl_capture->flags & GL_CAPTURE_LOCK_FPS) || (gl_capture->flags & GL_CAPTURE_IGNORE_TIME)) ? (PS_PACKET_WRITE) : (PS_PACKET_WRITE | PS_PACKET_TRY)))) goto finish; if (unlikely((ret = ps_packet_setsize(&video->packet, video->row * video->ch + sizeof(glc_message_header_t) + sizeof(glc_video_frame_header_t))))) goto cancel; msg.type = GLC_MESSAGE_VIDEO_FRAME; if (unlikely((ret = ps_packet_write(&video->packet, &msg, sizeof(glc_message_header_t))))) goto cancel; /* * if we are using PBO we will actually write previous picture to buffer. * Also, make sure that pbo_time is not in the future. This could happen if * the state time is reset by reloading the capture between a pbo start * and a pbo read. */ pic.time = (gl_capture->flags & GL_CAPTURE_USE_PBO && video->pbo_time < now)?video->pbo_time:now; pic.id = video->id; if (unlikely((ret = ps_packet_write(&video->packet, &pic, sizeof(glc_video_frame_header_t))))) goto cancel; if (video->gather_stats) before_capture = glc_state_time(gl_capture->glc); if (gl_capture->flags & GL_CAPTURE_USE_PBO) { if (unlikely((ret = gl_capture_read_pbo(gl_capture, video)))) goto cancel; ret = gl_capture_start_pbo(gl_capture, video); video->pbo_time = now; } else { if (unlikely((ret = ps_packet_dma(&video->packet, (void *) &dma, video->row * video->ch, PS_ACCEPT_FAKE_DMA)))) goto cancel; ret = gl_capture_get_pixels(gl_capture, video, dma); } if (video->gather_stats) { after_capture = glc_state_time(gl_capture->glc); video->capture_time_ns += after_capture - before_capture; } ps_packet_close(&video->packet); video->num_frames++; now = glc_state_time(gl_capture->glc); if (unlikely((gl_capture->flags & GL_CAPTURE_LOCK_FPS) && !(gl_capture->flags & GL_CAPTURE_IGNORE_TIME))) { if (now - video->last < gl_capture->fps) { struct timespec ts = { .tv_sec = (gl_capture->fps + video->last - now)/1000000000, .tv_nsec = (gl_capture->fps + video->last - now)%1000000000 }; nanosleep(&ts,NULL); } } /* increment by 1/fps seconds */ video->last += gl_capture->fps; finish: if (unlikely(ret != 0)) gl_capture_error(gl_capture, ret); if (gl_capture->flags & GL_CAPTURE_DRAW_INDICATOR) glCallList(video->indicator_list); return ret; cancel: if (ret == EBUSY) { ret = 0; glc_log(gl_capture->glc, GLC_INFO, "gl_capture", "dropped frame, buffer not ready"); } ps_packet_cancel(&video->packet); goto finish; } int gl_capture_refresh_color_correction(gl_capture_t gl_capture) { struct gl_capture_video_stream_s *video; if (unlikely(!(gl_capture->flags & GL_CAPTURE_CAPTURING))) return 0; /* capturing not active */ glc_log(gl_capture->glc, GLC_INFO, "gl_capture", "refreshing color correction"); pthread_rwlock_rdlock(&gl_capture->videolist_lock); video = gl_capture->video; while (video != NULL) { gl_capture_update_color(gl_capture, video); video = video->next; } pthread_rwlock_unlock(&gl_capture->videolist_lock); return 0; } /** \todo support GammaRamp */ int gl_capture_update_color(gl_capture_t gl_capture, struct gl_capture_video_stream_s *video) { glc_message_header_t msg_hdr; glc_color_message_t msg; XF86VidModeGamma gamma; int ret = 0; XF86VidModeGetGamma(video->dpy, video->screen, &gamma); if ((gamma.red == video->gamma_red) && (gamma.green == video->gamma_green) && (gamma.blue == video->gamma_blue)) return 0; /* nothing to update */ msg_hdr.type = GLC_MESSAGE_COLOR; msg.id = video->id; msg.red = gamma.red; msg.green = gamma.green; msg.blue = gamma.blue; /** \todo figure out brightness and contrast */ msg.brightness = msg.contrast = 0; glc_log(gl_capture->glc, GLC_INFO, "gl_capture", "color correction: brightness=%f, contrast=%f, red=%f, green=%f, blue=%f", msg.brightness, msg.contrast, msg.red, msg.green, msg.blue); if (unlikely((ret = ps_packet_open(&video->packet, PS_PACKET_WRITE)))) goto err; if (unlikely((ret = ps_packet_write(&video->packet, &msg_hdr, sizeof(glc_message_header_t))))) goto err; if (unlikely((ret = ps_packet_write(&video->packet, &msg, sizeof(glc_color_message_t))))) goto err; if (unlikely((ret = ps_packet_close(&video->packet)))) goto err; return 0; err: ps_packet_cancel(&video->packet); glc_log(gl_capture->glc, GLC_ERROR, "gl_capture", "can't write gamma correction information to buffer: %s (%d)", strerror(ret), ret); return ret; }