void gst_core_audio_remove_render_callback (GstCoreAudio * core_audio) { AURenderCallbackStruct input; OSStatus status; /* Deactivate the render callback by calling SetRenderCallback * with a NULL inputProc. */ input.inputProc = NULL; input.inputProcRefCon = NULL; status = AudioUnitSetProperty (core_audio->audiounit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Global, 0, /* N/A for global */ &input, sizeof (input)); if (status) { GST_WARNING_OBJECT (core_audio->osxbuf, "Failed to remove render callback %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (status)); } /* Remove the RenderNotify too */ status = AudioUnitRemoveRenderNotify (core_audio->audiounit, (AURenderCallback) gst_core_audio_render_notify, core_audio); if (status) { GST_WARNING_OBJECT (core_audio->osxbuf, "Failed to remove render notify callback %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (status)); } /* We're deactivated.. */ core_audio->io_proc_needs_deactivation = FALSE; core_audio->io_proc_active = FALSE; }
gboolean gst_core_audio_io_proc_start (GstCoreAudio * core_audio) { OSStatus status; AURenderCallbackStruct input; AudioUnitPropertyID callback_type; GST_DEBUG_OBJECT (core_audio->osxbuf, "osx ring buffer start ioproc: %p device_id %lu", core_audio->element->io_proc, (gulong) core_audio->device_id); if (!core_audio->io_proc_active) { callback_type = core_audio->is_src ? kAudioOutputUnitProperty_SetInputCallback : kAudioUnitProperty_SetRenderCallback; input.inputProc = (AURenderCallback) core_audio->element->io_proc; input.inputProcRefCon = core_audio->osxbuf; status = AudioUnitSetProperty (core_audio->audiounit, callback_type, kAudioUnitScope_Global, 0, /* N/A for global */ &input, sizeof (input)); if (status) { GST_ERROR_OBJECT (core_audio->osxbuf, "AudioUnitSetProperty failed: %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (status)); return FALSE; } // ### does it make sense to do this notify stuff for input mode? status = AudioUnitAddRenderNotify (core_audio->audiounit, (AURenderCallback) gst_core_audio_render_notify, core_audio); if (status) { GST_ERROR_OBJECT (core_audio->osxbuf, "AudioUnitAddRenderNotify failed %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (status)); return FALSE; } core_audio->io_proc_active = TRUE; } core_audio->io_proc_needs_deactivation = FALSE; status = AudioOutputUnitStart (core_audio->audiounit); if (status) { GST_ERROR_OBJECT (core_audio->osxbuf, "AudioOutputUnitStart failed: %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (status)); return FALSE; } return TRUE; }
/** * gst_vaapi_image_new: * @display: a #GstVaapiDisplay * @format: a #GstVaapiImageFormat * @width: the requested image width * @height: the requested image height * * Creates a new #GstVaapiImage with the specified format and * dimensions. * * Return value: the newly allocated #GstVaapiImage object */ GstVaapiImage * gst_vaapi_image_new( GstVaapiDisplay *display, GstVaapiImageFormat format, guint width, guint height ) { GstVaapiImage *image; g_return_val_if_fail(GST_VAAPI_IS_DISPLAY(display), NULL); g_return_val_if_fail(width > 0, NULL); g_return_val_if_fail(height > 0, NULL); GST_DEBUG("format %" GST_FOURCC_FORMAT ", size %ux%u", GST_FOURCC_ARGS(format), width, height); image = g_object_new( GST_VAAPI_TYPE_IMAGE, "display", display, "id", GST_VAAPI_ID(VA_INVALID_ID), "format", format, "width", width, "height", height, NULL ); if (!image) return NULL; if (!image->priv->is_constructed) { g_object_unref(image); return NULL; } return image; }
gboolean gst_core_audio_set_format (GstCoreAudio * core_audio, AudioStreamBasicDescription format) { /* Configure the output stream and allocate ringbuffer memory */ OSStatus status; UInt32 propertySize; int element; AudioUnitScope scope; GST_DEBUG_OBJECT (core_audio->osxbuf, "Setting format for AudioUnit"); scope = core_audio->is_src ? kAudioUnitScope_Output : kAudioUnitScope_Input; element = core_audio->is_src ? 1 : 0; propertySize = sizeof (AudioStreamBasicDescription); status = AudioUnitSetProperty (core_audio->audiounit, kAudioUnitProperty_StreamFormat, scope, element, &format, propertySize); if (status) { GST_WARNING_OBJECT (core_audio->osxbuf, "Failed to set audio description: %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (status)); return FALSE;; } return TRUE; }
ImageDescription * image_description_from_codec_data (GstBuffer * buf, guint32 codectype) { ImageDescription *desc = NULL; GST_LOG ("codectype:%" GST_FOURCC_FORMAT " buf:%p", GST_FOURCC_ARGS (codectype), buf); if ((GST_BUFFER_SIZE (buf) == GST_READ_UINT32_BE (GST_BUFFER_DATA (buf))) && (QT_MAKE_FOURCC_LE ('s', 't', 's', 'd') == GST_READ_UINT32_BE (GST_BUFFER_DATA (buf) + 4))) { /* We have the full stsd (ImageDescription) in our codec_data */ desc = image_description_from_stsd_buffer (buf); } else { switch (codectype) { case QT_MAKE_FOURCC_LE ('m', 'p', '4', 'v'): desc = image_description_for_mp4v (buf); break; case QT_MAKE_FOURCC_LE ('a', 'v', 'c', '1'): desc = image_description_for_avc1 (buf); break; default: GST_WARNING ("Format not handled !"); } } return desc; }
gboolean gst_core_audio_bind_device (GstCoreAudio * core_audio) { OSStatus status; /* Specify which device we're using. */ GST_DEBUG_OBJECT (core_audio->osxbuf, "Bind AudioUnit to device %d", (int) core_audio->device_id); status = AudioUnitSetProperty (core_audio->audiounit, kAudioOutputUnitProperty_CurrentDevice, kAudioUnitScope_Global, 0, &core_audio->device_id, sizeof (AudioDeviceID)); if (status) { GST_ERROR_OBJECT (core_audio->osxbuf, "Failed binding to device: %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (status)); goto audiounit_error; } return TRUE; audiounit_error: if (core_audio->recBufferList) { buffer_list_free (core_audio->recBufferList); core_audio->recBufferList = NULL; } return FALSE; }
/** * gst_vaapi_image_new_with_image: * @display: a #GstVaapiDisplay * @va_image: a VA image * * Creates a new #GstVaapiImage from a foreign VA image. The image * format and dimensions will be extracted from @va_image. This * function is mainly used by gst_vaapi_surface_derive_image() to bind * a VA image to a #GstVaapiImage object. * * Return value: the newly allocated #GstVaapiImage object */ GstVaapiImage * gst_vaapi_image_new_with_image(GstVaapiDisplay *display, VAImage *va_image) { GstVaapiImage *image; g_return_val_if_fail(GST_VAAPI_IS_DISPLAY(display), NULL); g_return_val_if_fail(va_image, NULL); g_return_val_if_fail(va_image->image_id != VA_INVALID_ID, NULL); g_return_val_if_fail(va_image->buf != VA_INVALID_ID, NULL); GST_DEBUG("VA image 0x%08x, format %" GST_FOURCC_FORMAT ", size %ux%u", va_image->image_id, GST_FOURCC_ARGS(va_image->format.fourcc), va_image->width, va_image->height); image = g_object_new( GST_VAAPI_TYPE_IMAGE, "display", display, "id", GST_VAAPI_ID(va_image->image_id), "image", va_image, NULL ); if (!image) return NULL; if (!image->priv->is_constructed) { g_object_unref(image); return NULL; } return image; }
void dump_cvpixel_buffer (CVPixelBufferRef pixbuf) { gsize left, right, top, bottom; GST_LOG ("buffer %p", pixbuf); if (CVPixelBufferLockBaseAddress (pixbuf, 0)) { GST_WARNING ("Couldn't lock base adress on pixel buffer !"); return; } GST_LOG ("Width:%" G_GSIZE_FORMAT " , Height:%" G_GSIZE_FORMAT, CVPixelBufferGetWidth (pixbuf), CVPixelBufferGetHeight (pixbuf)); GST_LOG ("Format:%" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (CVPixelBufferGetPixelFormatType (pixbuf))); GST_LOG ("base address:%p", CVPixelBufferGetBaseAddress (pixbuf)); GST_LOG ("Bytes per row:%" G_GSIZE_FORMAT, CVPixelBufferGetBytesPerRow (pixbuf)); GST_LOG ("Data Size:%" G_GSIZE_FORMAT, CVPixelBufferGetDataSize (pixbuf)); GST_LOG ("Plane count:%" G_GSIZE_FORMAT, CVPixelBufferGetPlaneCount (pixbuf)); CVPixelBufferGetExtendedPixels (pixbuf, &left, &right, &top, &bottom); GST_LOG ("Extended pixels. left/right/top/bottom : %" G_GSIZE_FORMAT "/%" G_GSIZE_FORMAT "/%" G_GSIZE_FORMAT "/%" G_GSIZE_FORMAT, left, right, top, bottom); CVPixelBufferUnlockBaseAddress (pixbuf, 0); }
GstFlowReturn gst_riff_read_chunk (GstElement * element, GstPad * pad, guint64 * _offset, guint32 * tag, GstBuffer ** _chunk_data) { GstBuffer *buf; GstFlowReturn res; guint size; guint64 offset = *_offset; g_return_val_if_fail (element != NULL, GST_FLOW_ERROR); g_return_val_if_fail (pad != NULL, GST_FLOW_ERROR); g_return_val_if_fail (_offset != NULL, GST_FLOW_ERROR); g_return_val_if_fail (tag != NULL, GST_FLOW_ERROR); g_return_val_if_fail (_chunk_data != NULL, GST_FLOW_ERROR); skip_junk: size = 8; if ((res = gst_pad_pull_range (pad, offset, size, &buf)) != GST_FLOW_OK) return res; else if (GST_BUFFER_SIZE (buf) < size) goto too_small; *tag = GST_READ_UINT32_LE (GST_BUFFER_DATA (buf)); size = GST_READ_UINT32_LE (GST_BUFFER_DATA (buf) + 4); gst_buffer_unref (buf); GST_DEBUG_OBJECT (element, "fourcc=%" GST_FOURCC_FORMAT ", size=%u", GST_FOURCC_ARGS (*tag), size); /* skip 'JUNK' chunks */ if (*tag == GST_RIFF_TAG_JUNK || *tag == GST_RIFF_TAG_JUNQ) { size = GST_ROUND_UP_2 (size); *_offset += 8 + size; offset += 8 + size; GST_DEBUG_OBJECT (element, "skipping JUNK chunk"); goto skip_junk; } if ((res = gst_pad_pull_range (pad, offset + 8, size, &buf)) != GST_FLOW_OK) return res; else if (GST_BUFFER_SIZE (buf) < size) goto too_small; *_chunk_data = buf; *_offset += 8 + GST_ROUND_UP_2 (size); return GST_FLOW_OK; /* ERRORS */ too_small: { /* short read, we return UNEXPECTED to mark the EOS case */ GST_DEBUG_OBJECT (element, "not enough data (available=%u, needed=%u)", GST_BUFFER_SIZE (buf), size); gst_buffer_unref (buf); return GST_FLOW_UNEXPECTED; } }
static void write_metadata (GstWavEnc * wavenc) { GString *info_str; GList *props; int total = 4; gboolean need_to_write = FALSE; info_str = g_string_new ("LIST INFO"); for (props = wavenc->metadata->properties->properties; props; props = props->next) { GstPropsEntry *entry = props->data; const char *name; guint32 id; name = gst_props_entry_get_name (entry); id = get_id_from_name (name); if (id != 0) { const char *text; char *tmp; int len, req, i; need_to_write = TRUE; /* We've got at least one entry */ gst_props_entry_get_string (entry, &text); len = strlen (text) + 1; /* The length in the file includes the \0 */ tmp = g_strdup_printf ("%" GST_FOURCC_FORMAT "%d%s", GST_FOURCC_ARGS (id), GUINT32_TO_LE (len), text); g_string_append (info_str, tmp); g_free (tmp); /* Check that we end on an even boundary */ req = ((len + 8) + 1) & ~1; for (i = 0; i < req - len; i++) { g_string_append_printf (info_str, "%c", 0); } total += req; } } if (need_to_write) { GstBuffer *buf; /* Now we've got all the strings together, we can write our length in */ info_str->str[4] = GUINT32_TO_LE (total); buf = gst_buffer_new (); gst_buffer_set_data (buf, info_str->str, info_str->len); gst_pad_push (wavenc->srcpad, GST_DATA (buf)); g_string_free (info_str, FALSE); } }
/* * _gst_vaapi_image_set_image: * @image: a #GstVaapiImage * @va_image: a VA image * * Initializes #GstVaapiImage with a foreign VA image. This function * will try to "linearize" the VA image. i.e. making sure that the VA * image offsets into the data buffer are in increasing order with the * number of planes available in the image. * * This is an internal function used by gst_vaapi_image_new_with_image(). * * Return value: %TRUE on success */ gboolean _gst_vaapi_image_set_image(GstVaapiImage *image, const VAImage *va_image) { GstVaapiImagePrivate * const priv = image->priv; GstVaapiImageFormat format; VAImage alt_va_image; const VAImageFormat *alt_va_format; if (!va_image) return FALSE; format = gst_vaapi_image_format(&va_image->format); if (!format) return FALSE; priv->create_image = FALSE; priv->internal_image = *va_image; priv->internal_format = format; priv->is_linear = vaapi_image_is_linear(va_image); priv->image = *va_image; priv->format = format; priv->width = va_image->width; priv->height = va_image->height; /* Try to linearize image */ if (!priv->is_linear) { switch (format) { case GST_VAAPI_IMAGE_I420: format = GST_VAAPI_IMAGE_YV12; break; case GST_VAAPI_IMAGE_YV12: format = GST_VAAPI_IMAGE_I420; break; default: format = 0; break; } if (format && (alt_va_format = gst_vaapi_image_format_get_va_format(format))) { alt_va_image = *va_image; alt_va_image.format = *alt_va_format; SWAP_UINT(alt_va_image.offsets[1], alt_va_image.offsets[2]); SWAP_UINT(alt_va_image.pitches[1], alt_va_image.pitches[2]); if (vaapi_image_is_linear(&alt_va_image)) { priv->image = alt_va_image; priv->format = format; priv->is_linear = TRUE; GST_DEBUG("linearized image to %" GST_FOURCC_FORMAT " format", GST_FOURCC_ARGS(format)); } } } return TRUE; }
const QtNodeType * qtdemux_type_get (guint32 fourcc) { int i; for (i = 0; i < n_qt_node_types; i++) { if (G_UNLIKELY (qt_node_types[i].fourcc == fourcc)) return qt_node_types + i; } GST_WARNING ("unknown QuickTime node type %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (fourcc)); return qt_node_types + n_qt_node_types - 1; }
/* * gst_aiffparse_peek_chunk_info: * @aiff AIFFparse object * @tag holder for tag * @size holder for tag size * * Peek next chunk info (tag and size) * * Returns: %TRUE when the chunk info (header) is available */ static gboolean gst_aiffparse_peek_chunk_info (AIFFParse * aiff, guint32 * tag, guint32 * size) { const guint8 *data = NULL; if (gst_adapter_available (aiff->adapter) < 8) return FALSE; data = gst_adapter_peek (aiff->adapter, 8); *tag = GST_READ_UINT32_LE (data); *size = GST_READ_UINT32_BE (data + 4); GST_DEBUG ("Next chunk size is %d bytes, type %" GST_FOURCC_FORMAT, *size, GST_FOURCC_ARGS (*tag)); return TRUE; }
static gboolean gst_aiff_parse_parse_comm (GstAiffParse * aiff, GstBuffer * buf) { guint8 *data; int size; if (aiff->is_aifc) size = 22; else size = 18; if (GST_BUFFER_SIZE (buf) < size) { GST_WARNING_OBJECT (aiff, "COMM chunk too short, cannot parse header"); return FALSE; } data = GST_BUFFER_DATA (buf); aiff->channels = GST_READ_UINT16_BE (data); aiff->total_frames = GST_READ_UINT32_BE (data + 2); aiff->depth = GST_READ_UINT16_BE (data + 6); aiff->width = GST_ROUND_UP_8 (aiff->depth); aiff->rate = (int) gst_aiff_parse_read_IEEE80 (data + 8); if (aiff->is_aifc) { /* We only support the 'trivial' uncompressed AIFC, but it can be * either big or little endian */ if (GST_READ_UINT32_LE (data + 18) == GST_MAKE_FOURCC ('N', 'O', 'N', 'E')) aiff->endianness = G_BIG_ENDIAN; else if (GST_READ_UINT32_LE (data + 18) == GST_MAKE_FOURCC ('s', 'o', 'w', 't')) aiff->endianness = G_LITTLE_ENDIAN; else { GST_WARNING_OBJECT (aiff, "Unsupported compression in AIFC " "file: %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (GST_READ_UINT32_LE (data + 18))); return FALSE; } } else aiff->endianness = G_BIG_ENDIAN; return TRUE; }
/** * gst_riff_parse_file_header: * @element: caller element (used for debugging/error). * @buf: input buffer from which the file header will be parsed, * should be at least 12 bytes long. * @doctype: a fourcc (returned by this function) to indicate the * type of document (according to the header). * * Reads the first few bytes from the provided buffer, checks * if this stream is a RIFF stream, and determines document type. * This function takes ownership of @buf so it should not be used anymore * after calling this function. * * Returns: FALSE if this is not a RIFF stream (in which case the * caller should error out; we already throw an error), or TRUE * if it is. */ gboolean gst_riff_parse_file_header (GstElement * element, GstBuffer * buf, guint32 * doctype) { guint8 *data; guint32 tag; g_return_val_if_fail (buf != NULL, FALSE); g_return_val_if_fail (doctype != NULL, FALSE); if (GST_BUFFER_SIZE (buf) < 12) goto too_small; data = GST_BUFFER_DATA (buf); tag = GST_READ_UINT32_LE (data); if (tag != GST_RIFF_TAG_RIFF && tag != GST_RIFF_TAG_AVF0) goto not_riff; *doctype = GST_READ_UINT32_LE (data + 8); gst_buffer_unref (buf); return TRUE; /* ERRORS */ too_small: { GST_ELEMENT_ERROR (element, STREAM, WRONG_TYPE, (NULL), ("Not enough data to parse RIFF header (%d available, %d needed)", GST_BUFFER_SIZE (buf), 12)); gst_buffer_unref (buf); return FALSE; } not_riff: { GST_ELEMENT_ERROR (element, STREAM, WRONG_TYPE, (NULL), ("Stream is no RIFF stream: %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (tag))); gst_buffer_unref (buf); return FALSE; } }
static void gst_aiff_parse_ignore_chunk (GstAiffParse * aiff, GstBuffer * buf, guint32 tag, guint32 size) { guint flush; if (aiff->streaming) { if (!gst_aiff_parse_peek_chunk (aiff, &tag, &size)) return; } GST_DEBUG_OBJECT (aiff, "Ignoring tag %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (tag)); flush = 8 + ((size + 1) & ~1); aiff->offset += flush; if (aiff->streaming) { gst_adapter_flush (aiff->adapter, flush); } else { gst_buffer_unref (buf); } }
void dump_image_description (ImageDescription * desc) { GST_LOG ("Description %p , size:%" G_GSIZE_FORMAT, desc, desc->idSize); #if DEBUG_DUMP gst_util_dump_mem ((const guchar *) desc, desc->idSize); #endif GST_LOG ("cType : %" GST_FOURCC_FORMAT, QT_FOURCC_ARGS (desc->cType)); GST_LOG ("version:%d", desc->version); GST_LOG ("revisionLevel:%d", desc->revisionLevel); GST_LOG ("vendor:%" GST_FOURCC_FORMAT, QT_FOURCC_ARGS (desc->vendor)); GST_LOG ("temporalQuality:%lu", desc->temporalQuality); GST_LOG ("spatialQuality:%lu", desc->spatialQuality); GST_LOG ("width:%u", desc->width); GST_LOG ("height:%u", desc->height); GST_LOG ("hres:%f", desc->hRes / 65536.0); GST_LOG ("vres:%f", desc->vRes / 65536.0); GST_LOG ("dataSize:%" G_GSIZE_FORMAT, desc->dataSize); GST_LOG ("frameCount:%d", desc->frameCount); GST_LOG ("name:%.*s", desc->name[0], desc->name + 1); GST_LOG ("depth:%d", desc->depth); GST_LOG ("clutID:%d", desc->clutID); if (desc->idSize > sizeof (ImageDescription)) { guint8 *extradata = (guint8 *) desc + sizeof (ImageDescription); guint32 type = QT_READ_UINT32 (extradata + 4); GST_LOG ("Extra Data size:%lu", (gulong) desc->idSize - (gulong) sizeof (ImageDescription)); #if DEBUG_DUMP gst_util_dump_mem ((gpointer) (gulong) desc + (gulong) sizeof (ImageDescription), (gulong) desc->idSize - (gulong) sizeof (ImageDescription)); #endif GST_LOG ("Extra Data Type : %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (type)); if (type == QT_MAKE_FOURCC ('a', 'v', 'c', 'C')) dump_avcc_atom (extradata); } }
static gboolean ensure_allowed_caps (GstKMSSink * self, drmModePlane * plane, drmModeRes * res) { GstCaps *out_caps, *caps; int i; GstVideoFormat fmt; const gchar *format; if (self->allowed_caps) return TRUE; out_caps = gst_caps_new_empty (); if (!out_caps) return FALSE; for (i = 0; i < plane->count_formats; i++) { fmt = gst_video_format_from_drm (plane->formats[i]); if (fmt == GST_VIDEO_FORMAT_UNKNOWN) { GST_INFO_OBJECT (self, "ignoring format %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (plane->formats[i])); continue; } format = gst_video_format_to_string (fmt); caps = gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING, format, "width", GST_TYPE_INT_RANGE, res->min_width, res->max_width, "height", GST_TYPE_INT_RANGE, res->min_height, res->max_height, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL); if (!caps) continue; out_caps = gst_caps_merge (out_caps, caps); } self->allowed_caps = gst_caps_simplify (out_caps); GST_DEBUG_OBJECT (self, "allowed caps = %" GST_PTR_FORMAT, self->allowed_caps); return TRUE; }
gboolean gst_core_audio_io_proc_stop (GstCoreAudio * core_audio) { OSErr status; GST_DEBUG_OBJECT (core_audio->osxbuf, "osx ring buffer stop ioproc: %p device_id %lu", core_audio->element->io_proc, (gulong) core_audio->device_id); status = AudioOutputUnitStop (core_audio->audiounit); if (status) { GST_WARNING_OBJECT (core_audio->osxbuf, "AudioOutputUnitStop failed: %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (status)); } // ###: why is it okay to directly remove from here but not from pause() ? if (core_audio->io_proc_active) { gst_core_audio_remove_render_callback (core_audio); } return TRUE; }
static gboolean gst_aiff_parse_parse_file_header (GstAiffParse * aiff, GstBuffer * buf) { guint8 *data; guint32 header, type = 0; if (GST_BUFFER_SIZE (buf) < 12) { GST_WARNING_OBJECT (aiff, "Buffer too short"); goto not_aiff; } data = GST_BUFFER_DATA (buf); header = GST_READ_UINT32_LE (data); type = GST_READ_UINT32_LE (data + 8); if (header != GST_MAKE_FOURCC ('F', 'O', 'R', 'M')) goto not_aiff; if (type == GST_MAKE_FOURCC ('A', 'I', 'F', 'F')) aiff->is_aifc = FALSE; else if (type == GST_MAKE_FOURCC ('A', 'I', 'F', 'C')) aiff->is_aifc = TRUE; else goto not_aiff; gst_buffer_unref (buf); return TRUE; /* ERRORS */ not_aiff: { GST_ELEMENT_ERROR (aiff, STREAM, WRONG_TYPE, (NULL), ("File is not an AIFF file: %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (type))); gst_buffer_unref (buf); return FALSE; } }
static gboolean gst_aasink_setcaps (GstBaseSink * basesink, GstCaps * caps) { GstAASink *aasink; GstStructure *structure; aasink = GST_AASINK (basesink); structure = gst_caps_get_structure (caps, 0); gst_structure_get_int (structure, "width", &aasink->width); gst_structure_get_int (structure, "height", &aasink->height); /* FIXME aasink->format is never set */ g_print ("%d %d\n", aasink->width, aasink->height); GST_DEBUG ("aasink: setting %08lx (%" GST_FOURCC_FORMAT ")", aasink->format, GST_FOURCC_ARGS (aasink->format)); g_signal_emit (G_OBJECT (aasink), gst_aasink_signals[SIGNAL_HAVE_SIZE], 0, aasink->width, aasink->height); return TRUE; }
static GstFlowReturn gst_mim_dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf) { GstMimDec *mimdec = GST_MIM_DEC (parent); GstBuffer *out_buf; const guchar *header, *frame_body; guint32 fourcc; guint16 header_size; gint width, height; GstCaps *caps; GstFlowReturn res = GST_FLOW_OK; GstClockTime in_time = GST_BUFFER_TIMESTAMP (buf); GstEvent *event = NULL; gboolean result = TRUE; guint32 payload_size; guint32 current_ts; GstMapInfo map; gst_adapter_push (mimdec->adapter, buf); /* do we have enough bytes to read a header */ while (gst_adapter_available (mimdec->adapter) >= 24) { header = gst_adapter_map (mimdec->adapter, 24); header_size = header[0]; if (header_size != 24) { gst_adapter_unmap (mimdec->adapter); gst_adapter_flush (mimdec->adapter, 24); GST_ELEMENT_ERROR (mimdec, STREAM, DECODE, (NULL), ("invalid frame: header size %d incorrect", header_size)); return GST_FLOW_ERROR; } if (header[1] == 1) { /* This is a a paused frame, skip it */ gst_adapter_unmap (mimdec->adapter); gst_adapter_flush (mimdec->adapter, 24); continue; } fourcc = GUINT32_FROM_LE (*((guint32 *) (header + 12))); if (GST_MAKE_FOURCC ('M', 'L', '2', '0') != fourcc) { gst_adapter_unmap (mimdec->adapter); gst_adapter_flush (mimdec->adapter, 24); GST_ELEMENT_ERROR (mimdec, STREAM, WRONG_TYPE, (NULL), ("invalid frame: unknown FOURCC code %X (%" GST_FOURCC_FORMAT ")", fourcc, GST_FOURCC_ARGS (fourcc))); return GST_FLOW_ERROR; } payload_size = GUINT32_FROM_LE (*((guint32 *) (header + 8))); current_ts = GUINT32_FROM_LE (*((guint32 *) (header + 20))); gst_adapter_unmap (mimdec->adapter); GST_LOG_OBJECT (mimdec, "Got packet, payload size %d", payload_size); if (gst_adapter_available (mimdec->adapter) < payload_size + 24) return GST_FLOW_OK; /* We have a whole packet and have read the header, lets flush it out */ gst_adapter_flush (mimdec->adapter, 24); frame_body = gst_adapter_map (mimdec->adapter, payload_size); if (mimdec->buffer_size < 0) { /* Check if its a keyframe, otherwise skip it */ if (GUINT32_FROM_LE (*((guint32 *) (frame_body + 12))) != 0) { gst_adapter_unmap (mimdec->adapter); gst_adapter_flush (mimdec->adapter, payload_size); return GST_FLOW_OK; } if (!mimic_decoder_init (mimdec->dec, frame_body)) { gst_adapter_unmap (mimdec->adapter); gst_adapter_flush (mimdec->adapter, payload_size); GST_ELEMENT_ERROR (mimdec, LIBRARY, INIT, (NULL), ("mimic_decoder_init error")); return GST_FLOW_ERROR; } if (!mimic_get_property (mimdec->dec, "buffer_size", &mimdec->buffer_size)) { gst_adapter_unmap (mimdec->adapter); gst_adapter_flush (mimdec->adapter, payload_size); GST_ELEMENT_ERROR (mimdec, LIBRARY, INIT, (NULL), ("mimic_get_property('buffer_size') error")); return GST_FLOW_ERROR; } mimic_get_property (mimdec->dec, "width", &width); mimic_get_property (mimdec->dec, "height", &height); GST_DEBUG_OBJECT (mimdec, "Initialised decoder with %d x %d payload size %d buffer_size %d", width, height, payload_size, mimdec->buffer_size); caps = gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING, "RGB", "framerate", GST_TYPE_FRACTION, 0, 1, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL); gst_pad_set_caps (mimdec->srcpad, caps); gst_caps_unref (caps); } if (mimdec->need_segment) { GstSegment segment; gst_segment_init (&segment, GST_FORMAT_TIME); if (GST_CLOCK_TIME_IS_VALID (in_time)) segment.start = in_time; else segment.start = current_ts * GST_MSECOND; event = gst_event_new_segment (&segment); } mimdec->need_segment = FALSE; if (event) result = gst_pad_push_event (mimdec->srcpad, event); event = NULL; if (!result) { GST_WARNING_OBJECT (mimdec, "gst_pad_push_event failed"); return GST_FLOW_ERROR; } out_buf = gst_buffer_new_allocate (NULL, mimdec->buffer_size, NULL); gst_buffer_map (out_buf, &map, GST_MAP_READWRITE); if (!mimic_decode_frame (mimdec->dec, frame_body, map.data)) { GST_WARNING_OBJECT (mimdec, "mimic_decode_frame error\n"); gst_adapter_flush (mimdec->adapter, payload_size); gst_buffer_unmap (out_buf, &map); gst_buffer_unref (out_buf); GST_ELEMENT_ERROR (mimdec, STREAM, DECODE, (NULL), ("mimic_decode_frame error")); return GST_FLOW_ERROR; } gst_buffer_unmap (out_buf, &map); gst_adapter_flush (mimdec->adapter, payload_size); if (GST_CLOCK_TIME_IS_VALID (in_time)) GST_BUFFER_TIMESTAMP (out_buf) = in_time; else GST_BUFFER_TIMESTAMP (out_buf) = current_ts * GST_MSECOND; res = gst_pad_push (mimdec->srcpad, out_buf); if (res != GST_FLOW_OK) break; } return res; }
/* Must be called with the sdl lock held */ static gboolean gst_sdlvideosink_create (GstSDLVideoSink * sdlvideosink) { if (GST_VIDEO_SINK_HEIGHT (sdlvideosink) <= 0) GST_VIDEO_SINK_HEIGHT (sdlvideosink) = sdlvideosink->height; if (GST_VIDEO_SINK_WIDTH (sdlvideosink) <= 0) GST_VIDEO_SINK_WIDTH (sdlvideosink) = sdlvideosink->width; gst_sdlvideosink_destroy (sdlvideosink); if (sdlvideosink->is_xwindows && !sdlvideosink->xwindow_id) { g_mutex_unlock (sdlvideosink->lock); gst_x_overlay_prepare_xwindow_id (GST_X_OVERLAY (sdlvideosink)); g_mutex_lock (sdlvideosink->lock); } /* create a SDL window of the size requested by the user */ if (sdlvideosink->full_screen) { sdlvideosink->screen = SDL_SetVideoMode (GST_VIDEO_SINK_WIDTH (sdlvideosink), GST_VIDEO_SINK_HEIGHT (sdlvideosink), 0, SDL_SWSURFACE | SDL_FULLSCREEN); } else { sdlvideosink->screen = SDL_SetVideoMode (GST_VIDEO_SINK_WIDTH (sdlvideosink), GST_VIDEO_SINK_HEIGHT (sdlvideosink), 0, SDL_HWSURFACE | SDL_RESIZABLE); } if (sdlvideosink->screen == NULL) goto no_screen; /* create a new YUV overlay */ sdlvideosink->overlay = SDL_CreateYUVOverlay (sdlvideosink->width, sdlvideosink->height, sdlvideosink->format, sdlvideosink->screen); if (sdlvideosink->overlay == NULL) goto no_overlay; GST_DEBUG ("Using a %dx%d %dbpp SDL screen with a %dx%d \'%" GST_FOURCC_FORMAT "\' YUV overlay", GST_VIDEO_SINK_WIDTH (sdlvideosink), GST_VIDEO_SINK_HEIGHT (sdlvideosink), sdlvideosink->screen->format->BitsPerPixel, sdlvideosink->width, sdlvideosink->height, GST_FOURCC_ARGS (sdlvideosink->format)); sdlvideosink->rect.x = 0; sdlvideosink->rect.y = 0; sdlvideosink->rect.w = GST_VIDEO_SINK_WIDTH (sdlvideosink); sdlvideosink->rect.h = GST_VIDEO_SINK_HEIGHT (sdlvideosink); /*SDL_DisplayYUVOverlay (sdlvideosink->overlay, &(sdlvideosink->rect)); */ GST_DEBUG ("sdlvideosink: setting %08x (%" GST_FOURCC_FORMAT ")", sdlvideosink->format, GST_FOURCC_ARGS (sdlvideosink->format)); return TRUE; /* ERRORS */ no_screen: { GST_ELEMENT_ERROR (sdlvideosink, LIBRARY, TOO_LAZY, (NULL), ("SDL: Couldn't set %dx%d: %s", GST_VIDEO_SINK_WIDTH (sdlvideosink), GST_VIDEO_SINK_HEIGHT (sdlvideosink), SDL_GetError ())); return FALSE; } no_overlay: { GST_ELEMENT_ERROR (sdlvideosink, LIBRARY, TOO_LAZY, (NULL), ("SDL: Couldn't create SDL YUV overlay (%dx%d \'%" GST_FOURCC_FORMAT "\'): %s", sdlvideosink->width, sdlvideosink->height, GST_FOURCC_ARGS (sdlvideosink->format), SDL_GetError ())); return FALSE; } }
static gboolean gst_rtp_quicktime_parse_sd (GstRtpXQTDepay * rtpxqtdepay, guint8 * data, guint data_len) { gint len; guint32 fourcc; if (data_len < 8) goto too_short; len = (data[0] << 24) | (data[1] << 16) | (data[2] << 8) | data[3]; if (len > data_len) goto too_short; fourcc = QT_FOURCC (data + 4); GST_DEBUG_OBJECT (rtpxqtdepay, "parsing %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (fourcc)); switch (fourcc) { case FOURCC_avc1: { guint32 chlen; if (len < 0x56) goto too_short; len -= 0x56; data += 0x56; /* find avcC */ while (len >= 8) { chlen = QT_UINT32 (data); fourcc = QT_FOURCC (data + 4); if (fourcc == FOURCC_avcC) { GstBuffer *buf; gint size; GstCaps *caps; GST_DEBUG_OBJECT (rtpxqtdepay, "found avcC codec_data in sd, %u", chlen); /* parse, if found */ if (chlen < len) size = chlen - 8; else size = len - 8; buf = gst_buffer_new_and_alloc (size); gst_buffer_fill (buf, 0, data + 8, size); caps = gst_caps_new_simple ("video/x-h264", "codec_data", GST_TYPE_BUFFER, buf, NULL); gst_buffer_unref (buf); gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD (rtpxqtdepay)->srcpad, caps); gst_caps_unref (caps); break; } len -= chlen; data += chlen; } break; } default: break; } return TRUE; /* ERRORS */ too_short: { return FALSE; } }
/** * gst_riff_parse_info: * @element: caller element (used for debugging/error). * @buf: input data to be used for parsing, stripped from header. * @taglist: a pointer to a taglist (returned by this function) * containing information about this stream. May be * NULL if no supported tags were found. * * Parses stream metadata from input data. */ void gst_riff_parse_info (GstElement * element, GstBuffer * buf, GstTagList ** _taglist) { guint8 *data; guint size, tsize; guint32 tag; const gchar *type; GstTagList *taglist; g_return_if_fail (_taglist != NULL); if (!buf) { *_taglist = NULL; return; } data = GST_BUFFER_DATA (buf); size = GST_BUFFER_SIZE (buf); taglist = gst_tag_list_new (); while (size > 8) { tag = GST_READ_UINT32_LE (data); tsize = GST_READ_UINT32_LE (data + 4); size -= 8; data += 8; GST_DEBUG ("tag %" GST_FOURCC_FORMAT ", size %u", GST_FOURCC_ARGS (tag), tsize); if (tsize > size) { GST_WARNING_OBJECT (element, "Tagsize %d is larger than available data %d", tsize, size); tsize = size; } /* find out the type of metadata */ switch (tag) { case GST_RIFF_INFO_IARL: type = GST_TAG_LOCATION; break; case GST_RIFF_INFO_IART: type = GST_TAG_ARTIST; break; case GST_RIFF_INFO_ICMS: type = NULL; /*"Commissioner"; */ break; case GST_RIFF_INFO_ICMT: type = GST_TAG_COMMENT; break; case GST_RIFF_INFO_ICOP: type = GST_TAG_COPYRIGHT; break; case GST_RIFF_INFO_ICRD: type = GST_TAG_DATE; break; case GST_RIFF_INFO_ICRP: type = NULL; /*"Cropped"; */ break; case GST_RIFF_INFO_IDIM: type = NULL; /*"Dimensions"; */ break; case GST_RIFF_INFO_IDPI: type = NULL; /*"Dots per Inch"; */ break; case GST_RIFF_INFO_IENG: type = NULL; /*"Engineer"; */ break; case GST_RIFF_INFO_IGNR: type = GST_TAG_GENRE; break; case GST_RIFF_INFO_IKEY: type = GST_TAG_KEYWORDS; break; case GST_RIFF_INFO_ILGT: type = NULL; /*"Lightness"; */ break; case GST_RIFF_INFO_IMED: type = NULL; /*"Medium"; */ break; case GST_RIFF_INFO_INAM: type = GST_TAG_TITLE; break; case GST_RIFF_INFO_IPLT: type = NULL; /*"Palette"; */ break; case GST_RIFF_INFO_IPRD: type = NULL; /*"Product"; */ break; case GST_RIFF_INFO_ISBJ: type = NULL; /*"Subject"; */ break; case GST_RIFF_INFO_ISFT: type = GST_TAG_ENCODER; break; case GST_RIFF_INFO_ISHP: type = NULL; /*"Sharpness"; */ break; case GST_RIFF_INFO_ISRC: type = GST_TAG_ISRC; break; case GST_RIFF_INFO_ISRF: type = NULL; /*"Source Form"; */ break; case GST_RIFF_INFO_ITCH: type = NULL; /*"Technician"; */ break; default: type = NULL; GST_WARNING_OBJECT (element, "Unknown INFO (metadata) tag entry %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (tag)); break; } if (type != NULL && data[0] != '\0') { static const gchar *env_vars[] = { "GST_AVI_TAG_ENCODING", "GST_RIFF_TAG_ENCODING", "GST_TAG_ENCODING", NULL }; gchar *val; val = gst_tag_freeform_string_to_utf8 ((gchar *) data, tsize, env_vars); if (val) { gst_tag_list_add (taglist, GST_TAG_MERGE_APPEND, type, val, NULL); g_free (val); } else { GST_WARNING_OBJECT (element, "could not extract %s tag", type); } } if (tsize & 1) { tsize++; if (tsize > size) tsize = size; } data += tsize; size -= tsize; } if (!gst_tag_list_is_empty (taglist)) { *_taglist = taglist; } else { *_taglist = NULL; gst_tag_list_free (taglist); } return; }
gboolean gst_riff_parse_strf_vids (GstElement * element, GstBuffer * buf, gst_riff_strf_vids ** _strf, GstBuffer ** data) { gst_riff_strf_vids *strf; g_return_val_if_fail (buf != NULL, FALSE); g_return_val_if_fail (_strf != NULL, FALSE); g_return_val_if_fail (data != NULL, FALSE); if (GST_BUFFER_SIZE (buf) < sizeof (gst_riff_strf_vids)) goto too_small; strf = g_memdup (GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf)); #if (G_BYTE_ORDER == G_BIG_ENDIAN) strf->size = GUINT32_FROM_LE (strf->size); strf->width = GUINT32_FROM_LE (strf->width); strf->height = GUINT32_FROM_LE (strf->height); strf->planes = GUINT16_FROM_LE (strf->planes); strf->bit_cnt = GUINT16_FROM_LE (strf->bit_cnt); strf->compression = GUINT32_FROM_LE (strf->compression); strf->image_size = GUINT32_FROM_LE (strf->image_size); strf->xpels_meter = GUINT32_FROM_LE (strf->xpels_meter); strf->ypels_meter = GUINT32_FROM_LE (strf->ypels_meter); strf->num_colors = GUINT32_FROM_LE (strf->num_colors); strf->imp_colors = GUINT32_FROM_LE (strf->imp_colors); #endif /* size checking */ *data = NULL; if (strf->size > GST_BUFFER_SIZE (buf)) { GST_WARNING_OBJECT (element, "strf_vids header gave %d bytes data, only %d available", strf->size, GST_BUFFER_SIZE (buf)); strf->size = GST_BUFFER_SIZE (buf); } if (sizeof (gst_riff_strf_vids) < GST_BUFFER_SIZE (buf)) { *data = gst_buffer_create_sub (buf, sizeof (gst_riff_strf_vids), GST_BUFFER_SIZE (buf) - sizeof (gst_riff_strf_vids)); } /* debug */ GST_INFO_OBJECT (element, "strf tag found in context vids:"); GST_INFO_OBJECT (element, " size %d", strf->size); GST_INFO_OBJECT (element, " width %d", strf->width); GST_INFO_OBJECT (element, " height %d", strf->height); GST_INFO_OBJECT (element, " planes %d", strf->planes); GST_INFO_OBJECT (element, " bit_cnt %d", strf->bit_cnt); GST_INFO_OBJECT (element, " compression %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (strf->compression)); GST_INFO_OBJECT (element, " image_size %d", strf->image_size); GST_INFO_OBJECT (element, " xpels_meter %d", strf->xpels_meter); GST_INFO_OBJECT (element, " ypels_meter %d", strf->ypels_meter); GST_INFO_OBJECT (element, " num_colors %d", strf->num_colors); GST_INFO_OBJECT (element, " imp_colors %d", strf->imp_colors); if (*data) GST_INFO_OBJECT (element, " %d bytes extradata", GST_BUFFER_SIZE (*data)); gst_buffer_unref (buf); *_strf = strf; return TRUE; /* ERRORS */ too_small: { GST_ERROR_OBJECT (element, "Too small strf_vids (%d available, %d needed)", GST_BUFFER_SIZE (buf), (int) sizeof (gst_riff_strf_vids)); gst_buffer_unref (buf); return FALSE; } }
gboolean gst_riff_parse_strh (GstElement * element, GstBuffer * buf, gst_riff_strh ** _strh) { gst_riff_strh *strh; g_return_val_if_fail (buf != NULL, FALSE); g_return_val_if_fail (_strh != NULL, FALSE); if (GST_BUFFER_SIZE (buf) < sizeof (gst_riff_strh)) goto too_small; strh = g_memdup (GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf)); gst_buffer_unref (buf); #if (G_BYTE_ORDER == G_BIG_ENDIAN) strh->type = GUINT32_FROM_LE (strh->type); strh->fcc_handler = GUINT32_FROM_LE (strh->fcc_handler); strh->flags = GUINT32_FROM_LE (strh->flags); strh->priority = GUINT32_FROM_LE (strh->priority); strh->init_frames = GUINT32_FROM_LE (strh->init_frames); strh->scale = GUINT32_FROM_LE (strh->scale); strh->rate = GUINT32_FROM_LE (strh->rate); strh->start = GUINT32_FROM_LE (strh->start); strh->length = GUINT32_FROM_LE (strh->length); strh->bufsize = GUINT32_FROM_LE (strh->bufsize); strh->quality = GUINT32_FROM_LE (strh->quality); strh->samplesize = GUINT32_FROM_LE (strh->samplesize); #endif /* avoid divisions by zero */ if (!strh->scale) strh->scale = 1; if (!strh->rate) strh->rate = 1; /* debug */ GST_INFO_OBJECT (element, "strh tag found:"); GST_INFO_OBJECT (element, " type %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (strh->type)); GST_INFO_OBJECT (element, " fcc_handler %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (strh->fcc_handler)); GST_INFO_OBJECT (element, " flags 0x%08x", strh->flags); GST_INFO_OBJECT (element, " priority %d", strh->priority); GST_INFO_OBJECT (element, " init_frames %d", strh->init_frames); GST_INFO_OBJECT (element, " scale %d", strh->scale); GST_INFO_OBJECT (element, " rate %d", strh->rate); GST_INFO_OBJECT (element, " start %d", strh->start); GST_INFO_OBJECT (element, " length %d", strh->length); GST_INFO_OBJECT (element, " bufsize %d", strh->bufsize); GST_INFO_OBJECT (element, " quality %d", strh->quality); GST_INFO_OBJECT (element, " samplesize %d", strh->samplesize); *_strh = strh; return TRUE; /* ERRORS */ too_small: { GST_ERROR_OBJECT (element, "Too small strh (%d available, %d needed)", GST_BUFFER_SIZE (buf), (int) sizeof (gst_riff_strh)); gst_buffer_unref (buf); return FALSE; } }
static GstFlowReturn gst_aiff_parse_stream_headers (GstAiffParse * aiff) { GstFlowReturn res; GstBuffer *buf; guint32 tag, size; gboolean gotdata = FALSE; gboolean done = FALSE; GstEvent **event_p; GstFormat bformat; gint64 upstream_size = 0; bformat = GST_FORMAT_BYTES; gst_pad_query_peer_duration (aiff->sinkpad, &bformat, &upstream_size); GST_DEBUG_OBJECT (aiff, "upstream size %" G_GUINT64_FORMAT, upstream_size); /* loop headers until we get data */ while (!done) { if (aiff->streaming) { if (!gst_aiff_parse_peek_chunk_info (aiff, &tag, &size)) return GST_FLOW_OK; } else { if ((res = gst_pad_pull_range (aiff->sinkpad, aiff->offset, 8, &buf)) != GST_FLOW_OK) goto header_read_error; tag = GST_READ_UINT32_LE (GST_BUFFER_DATA (buf)); size = GST_READ_UINT32_BE (GST_BUFFER_DATA (buf) + 4); } GST_INFO_OBJECT (aiff, "Got TAG: %" GST_FOURCC_FORMAT ", offset %" G_GUINT64_FORMAT, GST_FOURCC_ARGS (tag), aiff->offset); /* We just keep reading chunks until we find the one we're interested in. */ switch (tag) { case GST_MAKE_FOURCC ('C', 'O', 'M', 'M'):{ if (aiff->streaming) { if (!gst_aiff_parse_peek_chunk (aiff, &tag, &size)) return GST_FLOW_OK; gst_adapter_flush (aiff->adapter, 8); aiff->offset += 8; buf = gst_adapter_take_buffer (aiff->adapter, size); } else { if ((res = gst_aiff_parse_read_chunk (aiff, &aiff->offset, &tag, &buf)) != GST_FLOW_OK) return res; } if (!gst_aiff_parse_parse_comm (aiff, buf)) { gst_buffer_unref (buf); goto parse_header_error; } gst_buffer_unref (buf); /* do sanity checks of header fields */ if (aiff->channels == 0) goto no_channels; if (aiff->rate == 0) goto no_rate; GST_DEBUG_OBJECT (aiff, "creating the caps"); aiff->caps = gst_aiff_parse_create_caps (aiff); if (!aiff->caps) goto unknown_format; gst_pad_set_caps (aiff->srcpad, aiff->caps); aiff->bytes_per_sample = aiff->channels * aiff->width / 8; aiff->bps = aiff->bytes_per_sample * aiff->rate; if (aiff->bytes_per_sample <= 0) goto no_bytes_per_sample; aiff->got_comm = TRUE; break; } case GST_MAKE_FOURCC ('S', 'S', 'N', 'D'):{ GstFormat fmt; GstBuffer *ssndbuf = NULL; const guint8 *ssnddata = NULL; guint32 datasize; GST_DEBUG_OBJECT (aiff, "Got 'SSND' TAG, size : %d", size); /* Now, read the 8-byte header in the SSND chunk */ if (aiff->streaming) { if (!gst_aiff_parse_peek_data (aiff, 16, &ssnddata)) return GST_FLOW_OK; } else { gst_buffer_unref (buf); if ((res = gst_pad_pull_range (aiff->sinkpad, aiff->offset, 16, &ssndbuf)) != GST_FLOW_OK) goto header_read_error; ssnddata = GST_BUFFER_DATA (ssndbuf); } aiff->ssnd_offset = GST_READ_UINT32_BE (ssnddata + 8); aiff->ssnd_blocksize = GST_READ_UINT32_BE (ssnddata + 12); gotdata = TRUE; if (aiff->streaming) { gst_adapter_flush (aiff->adapter, 16); } else { gst_buffer_unref (ssndbuf); } /* 8 byte chunk header, 16 byte SSND header */ aiff->offset += 24; datasize = size - 16; aiff->datastart = aiff->offset + aiff->ssnd_offset; /* file might be truncated */ fmt = GST_FORMAT_BYTES; if (upstream_size) { size = MIN (datasize, (upstream_size - aiff->datastart)); } aiff->datasize = (guint64) datasize; aiff->dataleft = (guint64) datasize; aiff->end_offset = datasize + aiff->datastart; if (!aiff->streaming) { /* We will continue looking at chunks until the end - to read tags, * etc. */ aiff->offset += datasize; } GST_DEBUG_OBJECT (aiff, "datasize = %d", datasize); if (aiff->streaming) { done = TRUE; } break; } default: gst_aiff_parse_ignore_chunk (aiff, buf, tag, size); } if (upstream_size && (aiff->offset >= upstream_size)) { /* Now we have gone through the whole file */ done = TRUE; } } /* We read all the chunks (in pull mode) or reached the SSND chunk * (in push mode). We must have both COMM and SSND now; error out * otherwise. */ if (!aiff->got_comm) { GST_WARNING_OBJECT (aiff, "Failed to find COMM chunk"); goto no_header; } if (!gotdata) { GST_WARNING_OBJECT (aiff, "Failed to find SSND chunk"); goto no_data; } GST_DEBUG_OBJECT (aiff, "Finished parsing headers"); if (gst_aiff_parse_calculate_duration (aiff)) { gst_segment_init (&aiff->segment, GST_FORMAT_TIME); gst_segment_set_duration (&aiff->segment, GST_FORMAT_TIME, aiff->duration); } else { /* no bitrate, let downstream peer do the math, we'll feed it bytes. */ gst_segment_init (&aiff->segment, GST_FORMAT_BYTES); gst_segment_set_duration (&aiff->segment, GST_FORMAT_BYTES, aiff->datasize); } /* now we have all the info to perform a pending seek if any, if no * event, this will still do the right thing and it will also send * the right newsegment event downstream. */ gst_aiff_parse_perform_seek (aiff, aiff->seek_event); /* remove pending event */ event_p = &aiff->seek_event; gst_event_replace (event_p, NULL); /* we just started, we are discont */ aiff->discont = TRUE; aiff->state = AIFF_PARSE_DATA; return GST_FLOW_OK; /* ERROR */ no_header: { GST_ELEMENT_ERROR (aiff, STREAM, TYPE_NOT_FOUND, (NULL), ("Invalid AIFF header (no COMM found)")); return GST_FLOW_ERROR; } no_data: { GST_ELEMENT_ERROR (aiff, STREAM, TYPE_NOT_FOUND, (NULL), ("Invalid AIFF: no SSND found")); return GST_FLOW_ERROR; } parse_header_error: { GST_ELEMENT_ERROR (aiff, STREAM, DEMUX, (NULL), ("Couldn't parse audio header")); return GST_FLOW_ERROR; } no_channels: { GST_ELEMENT_ERROR (aiff, STREAM, FAILED, (NULL), ("Stream claims to contain no channels - invalid data")); return GST_FLOW_ERROR; } no_rate: { GST_ELEMENT_ERROR (aiff, STREAM, FAILED, (NULL), ("Stream with sample_rate == 0 - invalid data")); return GST_FLOW_ERROR; } no_bytes_per_sample: { GST_ELEMENT_ERROR (aiff, STREAM, FAILED, (NULL), ("Could not caluclate bytes per sample - invalid data")); return GST_FLOW_ERROR; } unknown_format: { GST_ELEMENT_ERROR (aiff, STREAM, TYPE_NOT_FOUND, (NULL), ("No caps found for format 0x%x, %d channels, %d Hz", aiff->format, aiff->channels, aiff->rate)); return GST_FLOW_ERROR; } header_read_error: { GST_ELEMENT_ERROR (aiff, STREAM, DEMUX, (NULL), ("Couldn't read in header")); return GST_FLOW_ERROR; } }
static GstFlowReturn gst_png_parse_handle_frame (GstBaseParse * parse, GstBaseParseFrame * frame, gint * skipsize) { GstPngParse *pngparse = GST_PNG_PARSE (parse); GstMapInfo map; GstByteReader reader; GstFlowReturn ret = GST_FLOW_OK; guint64 signature; guint width = 0, height = 0; gst_buffer_map (frame->buffer, &map, GST_MAP_READ); gst_byte_reader_init (&reader, map.data, map.size); if (!gst_byte_reader_peek_uint64_be (&reader, &signature)) goto beach; if (signature != PNG_SIGNATURE) { for (;;) { guint offset; offset = gst_byte_reader_masked_scan_uint32 (&reader, 0xffffffff, 0x89504E47, 0, gst_byte_reader_get_remaining (&reader)); if (offset == -1) { *skipsize = gst_byte_reader_get_remaining (&reader) - 4; goto beach; } gst_byte_reader_skip (&reader, offset); if (!gst_byte_reader_peek_uint64_be (&reader, &signature)) goto beach; if (signature == PNG_SIGNATURE) { /* We're skipping, go out, we'll be back */ *skipsize = gst_byte_reader_get_pos (&reader); goto beach; } gst_byte_reader_skip (&reader, 4); } } gst_byte_reader_skip (&reader, 8); for (;;) { guint32 length; guint32 code; if (!gst_byte_reader_get_uint32_be (&reader, &length)) goto beach; if (!gst_byte_reader_get_uint32_le (&reader, &code)) goto beach; GST_TRACE_OBJECT (parse, "%" GST_FOURCC_FORMAT " chunk, %u bytes", GST_FOURCC_ARGS (code), length); if (code == GST_MAKE_FOURCC ('I', 'H', 'D', 'R')) { if (!gst_byte_reader_get_uint32_be (&reader, &width)) goto beach; if (!gst_byte_reader_get_uint32_be (&reader, &height)) goto beach; length -= 8; } else if (code == GST_MAKE_FOURCC ('I', 'D', 'A', 'T')) { gst_base_parse_set_min_frame_size (parse, gst_byte_reader_get_pos (&reader) + 4 + length + 12); } if (!gst_byte_reader_skip (&reader, length + 4)) goto beach; if (code == GST_MAKE_FOURCC ('I', 'E', 'N', 'D')) { /* the start code and at least 2 empty frames (IHDR and IEND) */ gst_base_parse_set_min_frame_size (parse, 8 + 12 + 12); if (pngparse->width != width || pngparse->height != height) { GstCaps *caps, *sink_caps; pngparse->height = height; pngparse->width = width; caps = gst_caps_new_simple ("image/png", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL); sink_caps = gst_pad_get_current_caps (GST_BASE_PARSE_SINK_PAD (pngparse)); if (sink_caps) { GstStructure *st; gint fr_num, fr_denom; st = gst_caps_get_structure (sink_caps, 0); if (st && gst_structure_get_fraction (st, "framerate", &fr_num, &fr_denom)) { gst_caps_set_simple (caps, "framerate", GST_TYPE_FRACTION, fr_num, fr_denom, NULL); } else { GST_WARNING_OBJECT (pngparse, "No framerate set"); } gst_caps_unref (sink_caps); } if (!gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps)) ret = GST_FLOW_NOT_NEGOTIATED; gst_caps_unref (caps); if (ret != GST_FLOW_OK) goto beach; } gst_buffer_unmap (frame->buffer, &map); return gst_base_parse_finish_frame (parse, frame, gst_byte_reader_get_pos (&reader)); } } beach: gst_buffer_unmap (frame->buffer, &map); return ret; }
static gboolean colorspace_setup_converter (GstHermesColorspace * space, GstCaps * from_caps, GstCaps * to_caps) { guint32 from_space, to_space; GstStructure *from_struct; GstStructure *to_struct; g_return_val_if_fail (to_caps != NULL, FALSE); g_return_val_if_fail (from_caps != NULL, FALSE); from_struct = gst_caps_get_structure (from_caps, 0); to_struct = gst_caps_get_structure (to_caps, 0); from_space = GST_MAKE_FOURCC ('R', 'G', 'B', ' '); gst_structure_get_fourcc (from_struct, "format", &from_space); to_space = GST_MAKE_FOURCC ('R', 'G', 'B', ' '); gst_structure_get_fourcc (to_struct, "format", &to_space); GST_INFO ("set up converter for " GST_FOURCC_FORMAT " (%08x) to " GST_FOURCC_FORMAT " (%08x)", GST_FOURCC_ARGS (from_space), from_space, GST_FOURCC_ARGS (to_space), to_space); switch (from_space) { case GST_MAKE_FOURCC ('R', 'G', 'B', ' '): { gint from_bpp; gst_structure_get_int (from_struct, "bpp", &from_bpp); switch (to_space) { case GST_MAKE_FOURCC ('R', 'G', 'B', ' '): #ifdef HAVE_HERMES { gint to_bpp; gst_structure_get_int (to_struct, "bpp", &to_bpp); gst_structure_get_int (from_struct, "red_mask", &space->source.r); gst_structure_get_int (from_struct, "green_mask", &space->source.g); gst_structure_get_int (from_struct, "blue_mask", &space->source.b); space->source.a = 0; space->srcbpp = space->source.bits = from_bpp; space->source.indexed = 0; space->source.has_colorkey = 0; GST_INFO ("source red mask %08x", space->source.r); GST_INFO ("source green mask %08x", space->source.g); GST_INFO ("source blue mask %08x", space->source.b); GST_INFO ("source bpp %08x", space->srcbpp); gst_structure_get_int (to_struct, "red_mask", &space->dest.r); gst_structure_get_int (to_struct, "green_mask", &space->dest.g); gst_structure_get_int (to_struct, "blue_mask", &space->dest.b); space->dest.a = 0; space->destbpp = space->dest.bits = to_bpp; space->dest.indexed = 0; space->dest.has_colorkey = 0; GST_INFO ("dest red mask %08x", space->dest.r); GST_INFO ("dest green mask %08x", space->dest.g); GST_INFO ("dest blue mask %08x", space->dest.b); GST_INFO ("dest bpp %08x", space->destbpp); if (!Hermes_ConverterRequest (space->h_handle, &space->source, &space->dest)) { g_warning ("Hermes: could not get converter\n"); return FALSE; } GST_INFO ("converter set up"); space->type = GST_HERMES_COLORSPACE_HERMES; return TRUE; } #else g_warning ("colorspace: compiled without hermes!"); return FALSE; #endif case GST_MAKE_FOURCC ('Y', 'V', '1', '2'): if (from_bpp == 32) { space->type = GST_HERMES_COLORSPACE_RGB32_YV12; space->destbpp = 12; return TRUE; } case GST_MAKE_FOURCC ('I', '4', '2', '0'): if (from_bpp == 32) { space->type = GST_HERMES_COLORSPACE_RGB32_I420; space->destbpp = 12; return TRUE; } case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'): GST_INFO ("colorspace: RGB to YUV with bpp %d not implemented!!", from_bpp); return FALSE; } break; } case GST_MAKE_FOURCC ('I', '4', '2', '0'): switch (to_space) { case GST_MAKE_FOURCC ('R', 'G', 'B', ' '): GST_INFO ("colorspace: YUV to RGB"); gst_structure_get_int (to_struct, "bpp", &space->destbpp); space->converter = gst_hermes_colorspace_yuv2rgb_get_converter (from_caps, to_caps); space->type = GST_HERMES_COLORSPACE_YUV_RGB; return TRUE; case GST_MAKE_FOURCC ('I', '4', '2', '0'): space->type = GST_HERMES_COLORSPACE_NONE; space->destbpp = 12; return TRUE; case GST_MAKE_FOURCC ('Y', 'V', '1', '2'): space->type = GST_HERMES_COLORSPACE_420_SWAP; space->destbpp = 12; return TRUE; } break; case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'): switch (to_space) { case GST_MAKE_FOURCC ('I', '4', '2', '0'): space->type = GST_HERMES_COLORSPACE_YUY2_I420; space->destbpp = 12; return TRUE; case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'): space->type = GST_HERMES_COLORSPACE_NONE; space->destbpp = 16; return TRUE; case GST_MAKE_FOURCC ('R', 'G', 'B', ' '): GST_INFO ("colorspace: YUY2 to RGB not implemented!!"); return FALSE; } break; case GST_MAKE_FOURCC ('Y', 'V', '1', '2'): switch (to_space) { case GST_MAKE_FOURCC ('R', 'G', 'B', ' '): GST_INFO ("colorspace: YV12 to RGB"); gst_structure_get_int (to_struct, "bpp", &space->destbpp); space->converter = gst_hermes_colorspace_yuv2rgb_get_converter (from_caps, to_caps); space->type = GST_HERMES_COLORSPACE_YUV_RGB; return TRUE; case GST_MAKE_FOURCC ('I', '4', '2', '0'): space->type = GST_HERMES_COLORSPACE_420_SWAP; space->destbpp = 12; return TRUE; case GST_MAKE_FOURCC ('Y', 'V', '1', '2'): space->type = GST_HERMES_COLORSPACE_NONE; space->destbpp = 12; return TRUE; } break; } return FALSE; }