jint Java_com_example_jni_FLACStreamEncoder_write(JNIEnv * env, jobject obj, jobject buffer, jint bufsize) { FLACStreamEncoder * encoder = get_encoder(env, obj); if (NULL == encoder) { aj::throwByName(env, IllegalArgumentException_classname, "Called without a valid encoder instance!"); return 0; } if (bufsize > env->GetDirectBufferCapacity(buffer)) { aj::throwByName(env, IllegalArgumentException_classname, "Asked to read more from a buffer than the buffer's capacity!"); } char * buf = static_cast<char *>(env->GetDirectBufferAddress(buffer)); return encoder->write(buf, bufsize); }
/** * httpserver_dispatcher: * @data: RequestData type pointer * @user_data: httpstreaming type pointer * * Process http request. * * Returns: positive value if have not completed the processing, for example live streaming. * 0 if have completed the processing. */ static GstClockTime httpstreaming_dispatcher (gpointer data, gpointer user_data) { RequestData *request_data = data; HTTPStreaming *httpstreaming = (HTTPStreaming *)user_data; gchar *buf; int i = 0, j, ret; Encoder *encoder; EncoderOutput *encoder_output; Channel *channel; RequestDataUserData *request_user_data; GstClockTime ret_clock_time; channel = get_channel (httpstreaming, request_data); switch (request_data->status) { case HTTP_REQUEST: GST_DEBUG ("new request arrived, socket is %d, uri is %s", request_data->sock, request_data->uri); encoder_output = get_encoder_output (httpstreaming, request_data); if (encoder_output == NULL) { buf = g_strdup_printf (http_404, PACKAGE_NAME, PACKAGE_VERSION); httpserver_write (request_data->sock, buf, strlen (buf)); g_free (buf); return 0; } else if ((request_data->parameters[0] == '\0') || (request_data->parameters[0] == 'b')) { /* default operator is play, ?bitrate= */ GST_ERROR ("Play %s.", request_data->uri); request_user_data = (RequestDataUserData *)g_malloc (sizeof (RequestDataUserData));//FIXME if (request_user_data == NULL) { GST_ERROR ("Internal Server Error, g_malloc for request_user_data failure."); buf = g_strdup_printf (http_500, PACKAGE_NAME, PACKAGE_VERSION); httpserver_write (request_data->sock, buf, strlen (buf)); g_free (buf); return 0; } if (*(encoder_output->head_addr) == *(encoder_output->tail_addr)) { GST_DEBUG ("%s unready.", request_data->uri); buf = g_strdup_printf (http_404, PACKAGE_NAME, PACKAGE_VERSION); httpserver_write (request_data->sock, buf, strlen (buf)); g_free (buf); return 0; } /* let send_chunk send new chunk. */ encoder = get_encoder (request_data->uri, httpstreaming->itvencoder->channel_array); request_user_data->encoder = encoder; request_user_data->chunk_size = 0; request_user_data->send_count = 2; request_user_data->chunk_size_str = g_strdup (""); request_user_data->chunk_size_str_len = 0; request_user_data->encoder_output = encoder_output; request_user_data->current_rap_addr = *(encoder_output->last_rap_addr); request_user_data->current_send_position = *(encoder_output->last_rap_addr) + 12; request_user_data->channel_age = channel->age; request_data->user_data = request_user_data; request_data->bytes_send = 0; buf = g_strdup_printf (http_chunked, PACKAGE_NAME, PACKAGE_VERSION); httpserver_write (request_data->sock, buf, strlen (buf)); g_free (buf); return gst_clock_get_time (httpstreaming->httpserver->system_clock) + GST_MSECOND; } else { buf = g_strdup_printf (http_404, PACKAGE_NAME, PACKAGE_VERSION); httpserver_write (request_data->sock, buf, strlen (buf)); g_free (buf); return 0; } break; case HTTP_CONTINUE: request_user_data = request_data->user_data; if ((request_user_data->channel_age != channel->age) || (*(channel->output->state) != GST_STATE_PLAYING)) { g_free (request_data->user_data); request_data->user_data = NULL; return 0; } encoder_output = request_user_data->encoder_output; if (request_user_data->current_send_position == *(encoder_output->tail_addr)) { /* no more stream, wait 10ms */ GST_DEBUG ("current:%llu == tail:%llu", request_user_data->current_send_position, encoder_output->tail_addr); return gst_clock_get_time (httpstreaming->httpserver->system_clock) + 500 * GST_MSECOND + g_random_int_range (1, 1000000); } ret_clock_time = send_chunk (encoder_output, request_data); if (ret_clock_time != GST_CLOCK_TIME_NONE) { return ret_clock_time + gst_clock_get_time (httpstreaming->httpserver->system_clock); } else { return GST_CLOCK_TIME_NONE; } case HTTP_FINISH: g_free (request_data->user_data); request_data->user_data = NULL; return 0; default: GST_ERROR ("Unknown status %d", request_data->status); buf = g_strdup_printf (http_400, PACKAGE_NAME, PACKAGE_VERSION); httpserver_write (request_data->sock, buf, strlen (buf)); g_free (buf); return 0; } }
static gboolean setup_recoder_pipeline (GstSmartEncoder * smart_encoder) { GstPad *tmppad; GstCaps *caps; /* Fast path */ if (G_UNLIKELY (smart_encoder->encoder)) return TRUE; GST_DEBUG ("Creating internal decoder and encoder"); /* Create decoder/encoder */ caps = gst_pad_get_current_caps (smart_encoder->sinkpad); smart_encoder->decoder = get_decoder (caps); if (G_UNLIKELY (smart_encoder->decoder == NULL)) goto no_decoder; gst_caps_unref (caps); gst_element_set_bus (smart_encoder->decoder, GST_ELEMENT_BUS (smart_encoder)); caps = gst_pad_get_current_caps (smart_encoder->sinkpad); smart_encoder->encoder = get_encoder (caps); if (G_UNLIKELY (smart_encoder->encoder == NULL)) goto no_encoder; gst_caps_unref (caps); gst_element_set_bus (smart_encoder->encoder, GST_ELEMENT_BUS (smart_encoder)); GST_DEBUG ("Creating internal pads"); /* Create internal pads */ /* Source pad which we'll use to feed data to decoders */ smart_encoder->internal_srcpad = gst_pad_new ("internal_src", GST_PAD_SRC); g_object_set_qdata ((GObject *) smart_encoder->internal_srcpad, INTERNAL_ELEMENT, smart_encoder); gst_pad_set_active (smart_encoder->internal_srcpad, TRUE); /* Sink pad which will get the buffers from the encoder. * Note: We don't need an event function since we'll be discarding all * of them. */ smart_encoder->internal_sinkpad = gst_pad_new ("internal_sink", GST_PAD_SINK); g_object_set_qdata ((GObject *) smart_encoder->internal_sinkpad, INTERNAL_ELEMENT, smart_encoder); gst_pad_set_chain_function (smart_encoder->internal_sinkpad, internal_chain); gst_pad_set_active (smart_encoder->internal_sinkpad, TRUE); GST_DEBUG ("Linking pads to elements"); /* Link everything */ tmppad = gst_element_get_static_pad (smart_encoder->encoder, "src"); if (GST_PAD_LINK_FAILED (gst_pad_link (tmppad, smart_encoder->internal_sinkpad))) goto sinkpad_link_fail; gst_object_unref (tmppad); if (!gst_element_link (smart_encoder->decoder, smart_encoder->encoder)) goto encoder_decoder_link_fail; tmppad = gst_element_get_static_pad (smart_encoder->decoder, "sink"); if (GST_PAD_LINK_FAILED (gst_pad_link (smart_encoder->internal_srcpad, tmppad))) goto srcpad_link_fail; gst_object_unref (tmppad); GST_DEBUG ("Done creating internal elements/pads"); return TRUE; no_decoder: { GST_WARNING ("Couldn't find a decoder for %" GST_PTR_FORMAT, caps); gst_caps_unref (caps); return FALSE; } no_encoder: { GST_WARNING ("Couldn't find an encoder for %" GST_PTR_FORMAT, caps); gst_caps_unref (caps); return FALSE; } srcpad_link_fail: { gst_object_unref (tmppad); GST_WARNING ("Couldn't link internal srcpad to decoder"); return FALSE; } sinkpad_link_fail: { gst_object_unref (tmppad); GST_WARNING ("Couldn't link encoder to internal sinkpad"); return FALSE; } encoder_decoder_link_fail: { GST_WARNING ("Couldn't link decoder to encoder"); return FALSE; } }
static GstElement * build_convert_frame_pipeline (GstElement ** src_element, GstElement ** sink_element, const GstCaps * from_caps, const GstCaps * to_caps, GError ** err) { GstElement *src = NULL, *csp = NULL, *vscale = NULL; GstElement *sink = NULL, *encoder = NULL, *pipeline; GError *error = NULL; /* videoscale is here to correct for the pixel-aspect-ratio for us */ GST_DEBUG ("creating elements"); if (!create_element ("appsrc", &src, &error) || !create_element ("ffmpegcolorspace", &csp, &error) || !create_element ("videoscale", &vscale, &error) || !create_element ("appsink", &sink, &error)) goto no_elements; pipeline = gst_pipeline_new ("videoconvert-pipeline"); if (pipeline == NULL) goto no_pipeline; /* Add black borders if necessary to keep the DAR */ g_object_set (vscale, "add-borders", TRUE, NULL); GST_DEBUG ("adding elements"); gst_bin_add_many (GST_BIN (pipeline), src, csp, vscale, sink, NULL); /* set caps */ g_object_set (src, "caps", from_caps, NULL); g_object_set (sink, "caps", to_caps, NULL); /* FIXME: linking is still way too expensive, profile this properly */ GST_DEBUG ("linking src->csp"); if (!gst_element_link_pads (src, "src", csp, "sink")) goto link_failed; GST_DEBUG ("linking csp->vscale"); if (!gst_element_link_pads (csp, "src", vscale, "sink")) goto link_failed; if (caps_are_raw (to_caps)) { GST_DEBUG ("linking vscale->sink"); if (!gst_element_link_pads (vscale, "src", sink, "sink")) goto link_failed; } else { encoder = get_encoder (to_caps, &error); if (!encoder) goto no_encoder; gst_bin_add (GST_BIN (pipeline), encoder); GST_DEBUG ("linking vscale->encoder"); if (!gst_element_link (vscale, encoder)) goto link_failed; GST_DEBUG ("linking encoder->sink"); if (!gst_element_link_pads (encoder, "src", sink, "sink")) goto link_failed; } g_object_set (src, "emit-signals", TRUE, NULL); g_object_set (sink, "emit-signals", TRUE, NULL); *src_element = src; *sink_element = sink; return pipeline; /* ERRORS */ no_encoder: { gst_object_unref (pipeline); GST_ERROR ("could not find an encoder for provided caps"); if (err) *err = error; else g_error_free (error); return NULL; } no_elements: { if (src) gst_object_unref (src); if (csp) gst_object_unref (csp); if (vscale) gst_object_unref (vscale); if (sink) gst_object_unref (sink); GST_ERROR ("Could not convert video frame: %s", error->message); if (err) *err = error; else g_error_free (error); return NULL; } no_pipeline: { gst_object_unref (src); gst_object_unref (csp); gst_object_unref (vscale); gst_object_unref (sink); GST_ERROR ("Could not convert video frame: no pipeline (unknown error)"); if (err) *err = g_error_new (GST_CORE_ERROR, GST_CORE_ERROR_FAILED, "Could not convert video frame: no pipeline (unknown error)"); return NULL; } link_failed: { gst_object_unref (pipeline); GST_ERROR ("Could not convert video frame: failed to link elements"); if (err) *err = g_error_new (GST_CORE_ERROR, GST_CORE_ERROR_NEGOTIATION, "Could not convert video frame: failed to link elements"); return NULL; } }
static GstElement * build_convert_frame_pipeline (GstElement ** src_element, GstElement ** sink_element, const GstCaps * from_caps, GstVideoCropMeta * cmeta, const GstCaps * to_caps, GError ** err) { GstElement *vcrop = NULL, *csp = NULL, *csp2 = NULL, *vscale = NULL; GstElement *src = NULL, *sink = NULL, *encoder = NULL, *pipeline; GstVideoInfo info; GError *error = NULL; if (cmeta) { if (!create_element ("videocrop", &vcrop, &error)) { g_error_free (error); g_warning ("build_convert_frame_pipeline: Buffer has crop metadata but videocrop element is not found. Cropping will be disabled"); } else { if (!create_element ("videoconvert", &csp2, &error)) goto no_elements; } } /* videoscale is here to correct for the pixel-aspect-ratio for us */ GST_DEBUG ("creating elements"); if (!create_element ("appsrc", &src, &error) || !create_element ("videoconvert", &csp, &error) || !create_element ("videoscale", &vscale, &error) || !create_element ("appsink", &sink, &error)) goto no_elements; pipeline = gst_pipeline_new ("videoconvert-pipeline"); if (pipeline == NULL) goto no_pipeline; /* Add black borders if necessary to keep the DAR */ g_object_set (vscale, "add-borders", TRUE, NULL); GST_DEBUG ("adding elements"); gst_bin_add_many (GST_BIN (pipeline), src, csp, vscale, sink, NULL); if (vcrop) gst_bin_add_many (GST_BIN (pipeline), vcrop, csp2, NULL); /* set caps */ g_object_set (src, "caps", from_caps, NULL); if (vcrop) { gst_video_info_from_caps (&info, from_caps); g_object_set (vcrop, "left", cmeta->x, NULL); g_object_set (vcrop, "top", cmeta->y, NULL); g_object_set (vcrop, "right", GST_VIDEO_INFO_WIDTH (&info) - cmeta->width, NULL); g_object_set (vcrop, "bottom", GST_VIDEO_INFO_HEIGHT (&info) - cmeta->height, NULL); GST_DEBUG ("crop meta [x,y,width,height]: %d %d %d %d", cmeta->x, cmeta->y, cmeta->width, cmeta->height); } g_object_set (sink, "caps", to_caps, NULL); /* FIXME: linking is still way too expensive, profile this properly */ if (vcrop) { GST_DEBUG ("linking src->csp2"); if (!gst_element_link_pads (src, "src", csp2, "sink")) goto link_failed; GST_DEBUG ("linking csp2->vcrop"); if (!gst_element_link_pads (csp2, "src", vcrop, "sink")) goto link_failed; GST_DEBUG ("linking vcrop->csp"); if (!gst_element_link_pads (vcrop, "src", csp, "sink")) goto link_failed; } else { GST_DEBUG ("linking src->csp"); if (!gst_element_link_pads (src, "src", csp, "sink")) goto link_failed; } GST_DEBUG ("linking csp->vscale"); if (!gst_element_link_pads_full (csp, "src", vscale, "sink", GST_PAD_LINK_CHECK_NOTHING)) goto link_failed; if (caps_are_raw (to_caps)) { GST_DEBUG ("linking vscale->sink"); if (!gst_element_link_pads_full (vscale, "src", sink, "sink", GST_PAD_LINK_CHECK_NOTHING)) goto link_failed; } else { encoder = get_encoder (to_caps, &error); if (!encoder) goto no_encoder; gst_bin_add (GST_BIN (pipeline), encoder); GST_DEBUG ("linking vscale->encoder"); if (!gst_element_link (vscale, encoder)) goto link_failed; GST_DEBUG ("linking encoder->sink"); if (!gst_element_link_pads (encoder, "src", sink, "sink")) goto link_failed; } g_object_set (src, "emit-signals", TRUE, NULL); g_object_set (sink, "emit-signals", TRUE, NULL); *src_element = src; *sink_element = sink; return pipeline; /* ERRORS */ no_encoder: { gst_object_unref (pipeline); GST_ERROR ("could not find an encoder for provided caps"); if (err) *err = error; else g_error_free (error); return NULL; } no_elements: { if (src) gst_object_unref (src); if (vcrop) gst_object_unref (vcrop); if (csp) gst_object_unref (csp); if (csp2) gst_object_unref (csp2); if (vscale) gst_object_unref (vscale); if (sink) gst_object_unref (sink); GST_ERROR ("Could not convert video frame: %s", error->message); if (err) *err = error; else g_error_free (error); return NULL; } no_pipeline: { gst_object_unref (src); if (vcrop) gst_object_unref (vcrop); gst_object_unref (csp); if (csp2) gst_object_unref (csp2); gst_object_unref (vscale); gst_object_unref (sink); GST_ERROR ("Could not convert video frame: no pipeline (unknown error)"); if (err) *err = g_error_new (GST_CORE_ERROR, GST_CORE_ERROR_FAILED, "Could not convert video frame: no pipeline (unknown error)"); return NULL; } link_failed: { gst_object_unref (pipeline); GST_ERROR ("Could not convert video frame: failed to link elements"); if (err) *err = g_error_new (GST_CORE_ERROR, GST_CORE_ERROR_NEGOTIATION, "Could not convert video frame: failed to link elements"); return NULL; } }