Beispiel #1
0
int mic_create_mouse_connect(MediaControl* control, VideoSwitchSink* videoSwitch,
    MouseInput* input, MouseConnect** connect)
{
    MouseConnect* newConnect;

    CALLOC_ORET(newConnect, MouseConnect, 1);

    newConnect->control = control;
    newConnect->videoSwitch = videoSwitch;
    newConnect->input = input;

    newConnect->listener.data = newConnect;
    newConnect->listener.click = mic_click;

    mip_set_listener(input, &newConnect->listener);

    *connect = newConnect;
    return 1;
}
Beispiel #2
0
int cps_create(MediaSource* targetSource, const Rational* frameRate, int64_t start, int64_t duration, ClipSource** clipSource)
{
    ClipSource* newClipSource;

    if (start > 0 && !msc_seek(targetSource, start))
    {
        ml_log_warn("Failed to seek to start of the clip\n");
    }

    CALLOC_ORET(newClipSource, ClipSource, 1);

    newClipSource->targetSource = targetSource;
    newClipSource->start = (start < 0) ? 0 : start;
    newClipSource->duration = duration;
    newClipSource->frameRate = *frameRate; /* note: frameRate could be 0/0 */

    newClipSource->mediaSource.data = newClipSource;
    newClipSource->mediaSource.is_complete = cps_is_complete;
    newClipSource->mediaSource.post_complete = cps_post_complete;
    newClipSource->mediaSource.finalise_blank_source = cps_finalise_blank_source;
    newClipSource->mediaSource.get_num_streams = cps_get_num_streams;
    newClipSource->mediaSource.get_stream_info = cps_get_stream_info;
    newClipSource->mediaSource.set_frame_rate_or_disable = cps_set_frame_rate_or_disable;
    newClipSource->mediaSource.disable_stream = cps_disable_stream;
    newClipSource->mediaSource.disable_audio = cps_disable_audio;
    newClipSource->mediaSource.stream_is_disabled = cps_stream_is_disabled;
    newClipSource->mediaSource.read_frame = cps_read_frame;
    newClipSource->mediaSource.is_seekable = cps_is_seekable;
    newClipSource->mediaSource.seek = cps_seek;
    newClipSource->mediaSource.seek_timecode = cps_seek_timecode;
    newClipSource->mediaSource.get_length = cps_get_length;
    newClipSource->mediaSource.get_position = cps_get_position;
    newClipSource->mediaSource.get_available_length = cps_get_available_length;
    newClipSource->mediaSource.eof = cps_eof;
    newClipSource->mediaSource.set_source_name = cps_set_source_name;
    newClipSource->mediaSource.set_clip_id = cps_set_clip_id;
    newClipSource->mediaSource.close = cps_close;
    newClipSource->mediaSource.get_buffer_state = cps_get_buffer_state;
    newClipSource->mediaSource.convert_position = cps_convert_position;

    *clipSource = newClipSource;
    return 1;
}
Beispiel #3
0
int mls_create(const Rational* aspectRatio, int64_t maxLength, const Rational* maxLengthFrameRate, MultipleMediaSources** multSource)
{
    MultipleMediaSources* newMultSource;

    CALLOC_ORET(newMultSource, MultipleMediaSources, 1);

    if (aspectRatio != NULL)
    {
        newMultSource->aspectRatio = *aspectRatio;
    }
    newMultSource->maxLength = maxLength;
    newMultSource->maxLengthFrameRate = *maxLengthFrameRate;

    newMultSource->collectiveSource.data = newMultSource;
    newMultSource->collectiveSource.is_complete = mls_is_complete;
    newMultSource->collectiveSource.post_complete = mls_post_complete;
    newMultSource->collectiveSource.finalise_blank_source = mls_finalise_blank_source;
    newMultSource->collectiveSource.get_num_streams = mls_get_num_streams;
    newMultSource->collectiveSource.get_stream_info = mls_get_stream_info;
    newMultSource->collectiveSource.set_frame_rate_or_disable = mls_set_frame_rate_or_disable;
    newMultSource->collectiveSource.disable_stream = mls_disable_stream;
    newMultSource->collectiveSource.disable_audio = mls_disable_audio;
    newMultSource->collectiveSource.disable_video = mls_disable_video;
    newMultSource->collectiveSource.stream_is_disabled = mls_stream_is_disabled;
    newMultSource->collectiveSource.read_frame = mls_read_frame;
    newMultSource->collectiveSource.is_seekable = mls_is_seekable;
    newMultSource->collectiveSource.seek = mls_seek;
    newMultSource->collectiveSource.seek_timecode = mls_seek_timecode;
    newMultSource->collectiveSource.get_length = mls_get_length;
    newMultSource->collectiveSource.get_position = mls_get_position;
    newMultSource->collectiveSource.get_available_length = mls_get_available_length;
    newMultSource->collectiveSource.eof = mls_eof;
    newMultSource->collectiveSource.set_source_name = mls_set_source_name;
    newMultSource->collectiveSource.set_clip_id = mls_set_clip_id;
    newMultSource->collectiveSource.close = mls_close;
    newMultSource->collectiveSource.convert_position = mls_convert_position;

    *multSource = newMultSource;
    return 1;
}
Beispiel #4
0
int create_dv_connect(MediaSink* sink, int sinkStreamId, int sourceStreamId,
    const StreamInfo* streamInfo, int numFFMPEGThreads, int useWorkerThread, StreamConnect** connect)
{
    DVDecodeStreamConnect* newConnect;
    StreamInfo decodedStreamInfo;
    int result;

    /* register stream with sink */
    if (streamInfo->format == DV25_YUV420_FORMAT)
    {
        decodedStreamInfo = *streamInfo;
        decodedStreamInfo.format = YUV420_FORMAT;

        result = msk_accept_stream(sink, &decodedStreamInfo);
    }
    else if (streamInfo->format == DV25_YUV411_FORMAT)
    {
        decodedStreamInfo = *streamInfo;
        decodedStreamInfo.format = YUV411_FORMAT;

        result = msk_accept_stream(sink, &decodedStreamInfo);
    }
    else /* streamInfo->format == DV50_FORMAT || streamInfo->format == DV100_FORMAT */
    {
        decodedStreamInfo = *streamInfo;
        decodedStreamInfo.format = YUV422_FORMAT;

        result = msk_accept_stream(sink, &decodedStreamInfo);
    }

    /* try UYVY if default format is not accepted */
    if (!result)
    {
        decodedStreamInfo = *streamInfo;
        decodedStreamInfo.format = UYVY_FORMAT;

        result = msk_accept_stream(sink, &decodedStreamInfo);
    }

    if (!result)
    {
        /* shouldn't be here because a call to dv_connect_accept() should've returned false already */
        ml_log_error("Failed to create DV connector because format is not accepted\n");
        return 0;
    }


    if (!msk_register_stream(sink, sinkStreamId, &decodedStreamInfo))
    {
        /* could have failed if max streams exceeded for example */
        return 0;
    }


    CALLOC_ORET(newConnect, DVDecodeStreamConnect, 1);

    newConnect->useWorkerThread = useWorkerThread;
    newConnect->decodedFormat = decodedStreamInfo.format;

    if (streamInfo->format == DV25_YUV420_FORMAT || streamInfo->format == DV25_YUV411_FORMAT)
    {
        newConnect->dvDataSize = (stream_is_pal_frame_rate(streamInfo) ? 144000 : 120000);
    }
    else if (streamInfo->format == DV50_FORMAT)
    {
        newConnect->dvDataSize = (stream_is_pal_frame_rate(streamInfo) ? 288000 : 240000);
    }
    else /* streamInfo->format == DV100_FORMAT */
    {
        newConnect->dvDataSize = 576000;
    }
    if ((newConnect->dvData = (unsigned char*)calloc(
        newConnect->dvDataSize + FF_INPUT_BUFFER_PADDING_SIZE /* FFMPEG for some reason needs the extra space */,
        sizeof(unsigned char))) == NULL)
    {
        ml_log_error("Failed to allocate memory\n");
        goto fail;
    }

    newConnect->sink = sink;
    newConnect->sourceStreamId = sourceStreamId;
    newConnect->sinkStreamId = sinkStreamId;
    newConnect->streamInfo = *streamInfo;
    if (decodedStreamInfo.format == UYVY_FORMAT)
    {
        newConnect->sinkBufferSize = streamInfo->width * streamInfo->height * 2;
    }
    else if (decodedStreamInfo.format == YUV422_FORMAT)
    {
        newConnect->sinkBufferSize = streamInfo->width * streamInfo->height * 2;
    }
    else /* YUV420 / YUV411 */
    {
        newConnect->sinkBufferSize = streamInfo->width * streamInfo->height * 3 / 2;
    }

    newConnect->streamConnect.data = newConnect;
    newConnect->streamConnect.get_source_listener = ddc_get_source_listener;
    newConnect->streamConnect.sync = ddc_sync;
    newConnect->streamConnect.close = ddc_close;

    newConnect->sourceListener.data = newConnect;
    newConnect->sourceListener.accept_frame = ddc_accept_frame;
    newConnect->sourceListener.allocate_buffer = ddc_allocate_buffer;
    newConnect->sourceListener.deallocate_buffer = ddc_deallocate_buffer;
    newConnect->sourceListener.receive_frame = ddc_receive_frame;
    newConnect->sourceListener.receive_frame_const = ddc_receive_frame_const;



    /* create DV decoder */

    CHK_OFAIL(init_dv_decoder_resources());

    CHK_OFAIL(create_dv_decoder(streamInfo->format, streamInfo->width, streamInfo->height,
        numFFMPEGThreads, &newConnect->decoder));


    /* create worker thread */

    if (useWorkerThread)
    {
        CHK_OFAIL(init_mutex(&newConnect->workerMutex));
        CHK_OFAIL(init_cond_var(&newConnect->frameIsReadyCond));
        CHK_OFAIL(init_cond_var(&newConnect->workerIsBusyCond));

        CHK_OFAIL(create_joinable_thread(&newConnect->workerThreadId, worker_thread, newConnect));
    }


    *connect = &newConnect->streamConnect;
    return 1;

fail:
    ddc_close(newConnect);
    return 0;
}
Beispiel #5
0
static int create_dv_decoder(StreamFormat format, int width, int height, int numFFMPEGThreads, DVDecoder** decoder)
{
    int decoderResourceRefCount = g_decoderResourceRefCount;
    int i;
    DVDecoder* newDecoder = NULL;
    int numDecoders = g_decoderResource.numDecoders;
    AVCodec* avDecoder = NULL;

    /* see if there is matching decoder not in use */
    if (decoderResourceRefCount > 0)
    {
        for (i = 0; i < numDecoders; i++)
        {
            if (!g_decoderResource.decoder[i]->inUse &&
                g_decoderResource.decoder[i]->format == format &&
                g_decoderResource.decoder[i]->width == width &&
                g_decoderResource.decoder[i]->height == height)
            {
                /* found one not in use */
                *decoder = g_decoderResource.decoder[i];
                g_decoderResource.decoder[i]->inUse = 1;
                return 1;
            }
        }
    }


    /* create a new one */

    CALLOC_ORET(newDecoder, DVDecoder, 1);

    newDecoder->inUse = 1;
    newDecoder->format = format;
    newDecoder->width = width;
    newDecoder->height = height;


    avDecoder = avcodec_find_decoder(CODEC_ID_DVVIDEO);
    if (!avDecoder)
    {
        ml_log_error("Could not find the DV decoder\n");
        goto fail;
    }

    newDecoder->dec = avcodec_alloc_context();
    if (!newDecoder->dec)
    {
        ml_log_error("Could not allocate DV decoder context\n");
        goto fail;
    }

    if (numFFMPEGThreads > 1)
    {
        avcodec_thread_init(newDecoder->dec, numFFMPEGThreads);
        newDecoder->isThreaded = 1;
    }


    avcodec_set_dimensions(newDecoder->dec, width, height);
    if (format == DV25_YUV420_FORMAT)
    {
        newDecoder->dec->pix_fmt = PIX_FMT_YUV420P;
    }
    else if (format == DV25_YUV411_FORMAT)
    {
        newDecoder->dec->pix_fmt = PIX_FMT_YUV411P;
    }
    else
    {
        newDecoder->dec->pix_fmt = PIX_FMT_YUV422P;
    }

    if (avcodec_open(newDecoder->dec, avDecoder) < 0)
    {
        ml_log_error("Could not open decoder\n");
        goto fail;
    }
    newDecoder->openedDecoder = 1;

    newDecoder->decFrame = avcodec_alloc_frame();
    if (!newDecoder->decFrame)
    {
        ml_log_error("Could not allocate decoded frame\n");
        goto fail;
    }


    /* add to static resources if they have been initialised */

    if (decoderResourceRefCount > 0)
    {
        if ((size_t)g_decoderResource.numDecoders >= sizeof(g_decoderResource.decoder) / sizeof(DVDecoder*))
        {
            /* more than x decoders? what are you doing? */
            ml_log_error("Number of DV decoders exceeded hard coded limit %d\n",
                sizeof(g_decoderResource.decoder) / sizeof(DVDecoder));
            goto fail;
        }

        PTHREAD_MUTEX_LOCK(&g_decoderResource.resourceMutex);
        g_decoderResource.decoder[g_decoderResource.numDecoders] = newDecoder;
        g_decoderResource.numDecoders++;
        PTHREAD_MUTEX_UNLOCK(&g_decoderResource.resourceMutex);
    }

    *decoder = newDecoder;
    return 1;

fail:
    free_dv_decoder(&newDecoder);
    return 0;
}
Beispiel #6
0
// The multicast address is of the form 239.255.x.y:port
// where port by convention is:
//   2000 for channel 0
//   2001 for channel 1
//   ...
int udp_open(const char *address, MediaSource** source)
{
    UDPSource* newSource = NULL;
    int sourceId;

    // Extract address and port number from string e.g. "239.255.1.1:2000"
    char remote[FILENAME_MAX];
    strncpy(remote, address, sizeof(remote)-1);
    char *p;
    int port;
//     if ((p = strchr(remote, ':')) == NULL) {    /* couldn't find ':' */
//         port = MULTICAST_DEFAULT_PORT;        /* default port to 2000 (channel 0) */
//     }
//     else {
//         port = atol(p+1);    // extract port
//         *p = '\0';          // terminate remote string
//     }
    if ((p = strchr(remote, ']')) == NULL)
    {           /* search for [ipv6] end */
        if ((p = strchr(remote, ':')) == NULL) //v4 version
        {   /* get the port number */
            port =MULTICAST_DEFAULT_PORT;               /* default port to 1234 */
        }

        else
        {
            port = atol(p+1);
            *(p) = '\0';
        }
    }
    else//v6 version
    {
        const char *p_bracket = strchr(address, '[');
        strcpy(remote, p_bracket + 1);
        if ((strchr(remote, ']'))-strrchr(remote,':')<= 0)
        {   /* get the port number */
            port =1234;             /* default port to 1234 */
        }

        p=strrchr(remote,':');
        port =atol(p+1);
        *(p-1) = '\0';
    }
    // setup network socket
    int fd;
    printf("The UDP Address is: %s and the port is: %d \n",remote,port); // show address and port

    if ((fd = connect_to_multicast_address(remote, port)) == -1)
    {
        ml_log_error("Failed to connect to UDP address %s:%d\n", remote, port);
        return 0;
    }

    // Read video parameters from multicast stream
    IngexNetworkHeader header;
    if (udp_read_frame_header(fd, &header) == -1)
    {
        ml_log_error("Failed to read from UDP address %s:%d\n", remote, port);
        return 0;
    }

    /* TODO: handle varying NTSC audio frame sizes */
    if (header.framerate_numer != 25 && header.framerate_denom != 1)
    {
        ml_log_error("TODO: udp_source() only supports PAL frame rates\n");
        return 0;
    }


    CALLOC_ORET(newSource, UDPSource, 1);

    newSource->socket_fd = fd;
    newSource->frameRate.num = header.framerate_numer;
    newSource->frameRate.den = header.framerate_denom;

#ifndef MULTICAST_SINGLE_THREAD
    newSource->udp_reader.fd = fd;
    udp_init_reader(header.width, header.height, &newSource->udp_reader);
#endif

    newSource->prevLastFrame = -1;
    memset(newSource->sourceName, 0, MULTICAST_SOURCE_NAME_SIZE);

    char title[FILENAME_MAX];
    sprintf(title, "%s from %s\n", header.source_name, address);


    /* FIXME: can this be done later? */
    /* allocate video and audio buffers for this source */
    newSource->video = (uint8_t *)malloc(header.width * header.height * 3/2);
    newSource->audio = (uint8_t *)malloc(header.audio_size);


    // setup media source
    newSource->mediaSource.data = newSource;
    newSource->mediaSource.get_num_streams = udp_get_num_streams;
    newSource->mediaSource.get_stream_info = udp_get_stream_info;
    newSource->mediaSource.set_frame_rate_or_disable = udp_set_frame_rate_or_disable;
    newSource->mediaSource.disable_stream = udp_disable_stream;
    newSource->mediaSource.disable_audio = udp_disable_audio;
    newSource->mediaSource.disable_video = udp_disable_video;
    newSource->mediaSource.stream_is_disabled = udp_stream_is_disabled;
    newSource->mediaSource.read_frame = udp_read_frame;
    newSource->mediaSource.is_seekable = udp_is_seekable;
    newSource->mediaSource.seek = udp_seek;
    newSource->mediaSource.get_length = udp_get_length;
    newSource->mediaSource.get_position = udp_get_position;
    newSource->mediaSource.get_available_length = udp_get_available_length;
    newSource->mediaSource.set_source_name = udp_set_source_name;
    newSource->mediaSource.set_clip_id = udp_set_clip_id;
    newSource->mediaSource.eof = udp_eof;
    newSource->mediaSource.close = udp_close;


    sourceId = msc_create_id();

    /* video track */
    CHK_OFAIL(initialise_stream_info(&newSource->tracks[newSource->numTracks].streamInfo));
    newSource->tracks[newSource->numTracks].streamInfo.type = PICTURE_STREAM_TYPE;
    newSource->tracks[newSource->numTracks].streamInfo.sourceId = sourceId;
    newSource->tracks[newSource->numTracks].streamInfo.frameRate = newSource->frameRate;
    newSource->tracks[newSource->numTracks].streamInfo.isHardFrameRate = 1;
    newSource->tracks[newSource->numTracks].streamInfo.width = header.width;
    newSource->tracks[newSource->numTracks].streamInfo.height = header.height;
    newSource->tracks[newSource->numTracks].streamInfo.aspectRatio.num = 4;
    newSource->tracks[newSource->numTracks].streamInfo.aspectRatio.den = 3;
    newSource->tracks[newSource->numTracks].frameSize = header.width * header.height * 3/2;
    newSource->tracks[newSource->numTracks].streamInfo.format = YUV420_FORMAT;
    CHK_OFAIL(add_known_source_info(&newSource->tracks[newSource->numTracks].streamInfo, SRC_INFO_TITLE, title));
    CHK_OFAIL(add_known_source_info(&newSource->tracks[newSource->numTracks].streamInfo, SRC_INFO_ORIGINAL_STREAM_FORMAT,
                                    get_stream_format_string(newSource->tracks[newSource->numTracks].streamInfo.format)));
    newSource->numTracks++;

    /* audio track 1 */
    CHK_OFAIL(initialise_stream_info(&newSource->tracks[newSource->numTracks].streamInfo));
    newSource->tracks[newSource->numTracks].streamInfo.type = SOUND_STREAM_TYPE;
    newSource->tracks[newSource->numTracks].streamInfo.format = PCM_FORMAT;
    newSource->tracks[newSource->numTracks].streamInfo.sourceId = sourceId;
    newSource->tracks[newSource->numTracks].streamInfo.frameRate = newSource->frameRate;
    newSource->tracks[newSource->numTracks].streamInfo.isHardFrameRate = 1;
    newSource->tracks[newSource->numTracks].streamInfo.samplingRate = g_profAudioSamplingRate;
    newSource->tracks[newSource->numTracks].streamInfo.numChannels = 1;
    newSource->tracks[newSource->numTracks].streamInfo.bitsPerSample = 16;
    CHK_OFAIL(add_known_source_info(&newSource->tracks[newSource->numTracks].streamInfo, SRC_INFO_TITLE, "UDP Audio 1"));
    CHK_OFAIL(add_known_source_info(&newSource->tracks[newSource->numTracks].streamInfo, SRC_INFO_ORIGINAL_STREAM_FORMAT,
                                    get_stream_format_string(newSource->tracks[newSource->numTracks].streamInfo.format)));
    newSource->tracks[newSource->numTracks].frameSize = 2 * 48000 / 25; // TODO: varying for NTSC frame rates
    newSource->numTracks++;

    /* audio track 2 */
    CHK_OFAIL(initialise_stream_info(&newSource->tracks[newSource->numTracks].streamInfo));
    newSource->tracks[newSource->numTracks].streamInfo.type = SOUND_STREAM_TYPE;
    newSource->tracks[newSource->numTracks].streamInfo.format = PCM_FORMAT;
    newSource->tracks[newSource->numTracks].streamInfo.sourceId = sourceId;
    newSource->tracks[newSource->numTracks].streamInfo.frameRate = newSource->frameRate;
    newSource->tracks[newSource->numTracks].streamInfo.isHardFrameRate = 1;
    newSource->tracks[newSource->numTracks].streamInfo.samplingRate = g_profAudioSamplingRate;
    newSource->tracks[newSource->numTracks].streamInfo.numChannels = 1;
    newSource->tracks[newSource->numTracks].streamInfo.bitsPerSample = 16;
    CHK_OFAIL(add_known_source_info(&newSource->tracks[newSource->numTracks].streamInfo, SRC_INFO_TITLE, "UDP Audio 2"));
    CHK_OFAIL(add_known_source_info(&newSource->tracks[newSource->numTracks].streamInfo, SRC_INFO_ORIGINAL_STREAM_FORMAT,
                                    get_stream_format_string(newSource->tracks[newSource->numTracks].streamInfo.format)));
    newSource->tracks[newSource->numTracks].frameSize = 2 * 48000 / 25; // TODO: varying for NTSC frame rates
    newSource->numTracks++;

    /* timecode track 1 */
    CHK_OFAIL(initialise_stream_info(&newSource->tracks[newSource->numTracks].streamInfo));
    newSource->tracks[newSource->numTracks].streamInfo.type = TIMECODE_STREAM_TYPE;
    newSource->tracks[newSource->numTracks].streamInfo.format = TIMECODE_FORMAT;
    newSource->tracks[newSource->numTracks].streamInfo.sourceId = sourceId;
    newSource->tracks[newSource->numTracks].streamInfo.frameRate = newSource->frameRate;
    newSource->tracks[newSource->numTracks].streamInfo.isHardFrameRate = 1;
    newSource->tracks[newSource->numTracks].streamInfo.timecodeType = SOURCE_TIMECODE_TYPE;
    newSource->tracks[newSource->numTracks].streamInfo.timecodeSubType = VITC_SOURCE_TIMECODE_SUBTYPE;
    CHK_OFAIL(add_known_source_info(&newSource->tracks[newSource->numTracks].streamInfo, SRC_INFO_TITLE, "UDP Timecode 1"));
    CHK_OFAIL(add_known_source_info(&newSource->tracks[newSource->numTracks].streamInfo, SRC_INFO_ORIGINAL_STREAM_FORMAT,
                                    get_stream_format_string(newSource->tracks[newSource->numTracks].streamInfo.format)));
    newSource->tracks[newSource->numTracks].frameSize = sizeof(Timecode);
    newSource->numTracks++;

    /* timecode track 2 */
    CHK_OFAIL(initialise_stream_info(&newSource->tracks[newSource->numTracks].streamInfo));
    newSource->tracks[newSource->numTracks].streamInfo.type = TIMECODE_STREAM_TYPE;
    newSource->tracks[newSource->numTracks].streamInfo.format = TIMECODE_FORMAT;
    newSource->tracks[newSource->numTracks].streamInfo.sourceId = sourceId;
    newSource->tracks[newSource->numTracks].streamInfo.frameRate = newSource->frameRate;
    newSource->tracks[newSource->numTracks].streamInfo.isHardFrameRate = 1;
    newSource->tracks[newSource->numTracks].streamInfo.timecodeType = SOURCE_TIMECODE_TYPE;
    newSource->tracks[newSource->numTracks].streamInfo.timecodeSubType = LTC_SOURCE_TIMECODE_SUBTYPE;
    CHK_OFAIL(add_known_source_info(&newSource->tracks[newSource->numTracks].streamInfo, SRC_INFO_TITLE, "UDP Timecode 2"));
    CHK_OFAIL(add_known_source_info(&newSource->tracks[newSource->numTracks].streamInfo, SRC_INFO_ORIGINAL_STREAM_FORMAT,
                                    get_stream_format_string(newSource->tracks[newSource->numTracks].streamInfo.format)));
    newSource->tracks[newSource->numTracks].frameSize = sizeof(Timecode);
    newSource->numTracks++;

    /* event track */
    CHK_OFAIL(initialise_stream_info(&newSource->tracks[newSource->numTracks].streamInfo));
    newSource->tracks[newSource->numTracks].streamInfo.type = EVENT_STREAM_TYPE;
    newSource->tracks[newSource->numTracks].streamInfo.format = SOURCE_EVENT_FORMAT;
    newSource->tracks[newSource->numTracks].streamInfo.sourceId = sourceId;
    newSource->tracks[newSource->numTracks].streamInfo.frameRate = newSource->frameRate;
    newSource->tracks[newSource->numTracks].streamInfo.isHardFrameRate = 1;
    CHK_OFAIL(add_known_source_info(&newSource->tracks[newSource->numTracks].streamInfo, SRC_INFO_TITLE, "UDP Event"));
    CHK_OFAIL(add_known_source_info(&newSource->tracks[newSource->numTracks].streamInfo, SRC_INFO_ORIGINAL_STREAM_FORMAT,
                                    get_stream_format_string(newSource->tracks[newSource->numTracks].streamInfo.format)));
    newSource->numTracks++;


    *source = &newSource->mediaSource;
    return 1;

fail:
    udp_close(newSource);
    return 0;
}