コード例 #1
0
ファイル: asfenc.c プロジェクト: cnh/FFmpeg
static int asf_write_packet(AVFormatContext *s, AVPacket *pkt)
{
    ASFContext *asf = s->priv_data;
    ASFStream *stream;
    AVCodecContext *codec;
    uint32_t packet_number;
    int64_t pts;
    int start_sec;
    int flags = pkt->flags;
    int ret;

    codec  = s->streams[pkt->stream_index]->codec;
    stream = &asf->streams[pkt->stream_index];

    if (codec->codec_type == AVMEDIA_TYPE_AUDIO)
        flags &= ~AV_PKT_FLAG_KEY;

    pts = (pkt->pts != AV_NOPTS_VALUE) ? pkt->pts : pkt->dts;
    av_assert0(pts != AV_NOPTS_VALUE);
    pts *= 10000;
    asf->duration = FFMAX(asf->duration, pts + pkt->duration * 10000);

    packet_number = asf->nb_packets;
    put_frame(s, stream, s->streams[pkt->stream_index],
              pkt->dts, pkt->data, pkt->size, flags);

    start_sec = (int)((PREROLL_TIME * 10000 + pts + ASF_INDEXED_INTERVAL - 1)
              / ASF_INDEXED_INTERVAL);

    /* check index */
    if ((!asf->is_streamed) && (flags & AV_PKT_FLAG_KEY)) {
        uint16_t packet_count = asf->nb_packets - packet_number;
        if ((ret = update_index(s, start_sec, packet_number, packet_count)) < 0)
            return ret;
    }
    asf->end_sec = start_sec;

    return 0;
}
コード例 #2
0
int main(int argc, char **argv)
{
    int stop_at = 0;
    char *yuv_path;
    if (argc == 2) {
        yuv_path = argv[1];
    } else if (argc == 3) {
        yuv_path = argv[1];
        stop_at = atoi(argv[2]);
    } else {
        printf("usage: %s input [max frames]\n", argv[0]);
        return -1;
    }

    printf("[test] initializing streams list\n");
    printf("[test] init_stream_list\n");
    stream_list_t *streams = init_stream_list();
    printf("[test] init_stream\n");
    stream_data_t *stream = init_stream(VIDEO, OUTPUT, 0, ACTIVE, "i2catrocks");
    printf("[test] set_stream_video_data\n");
    printf("[test] add_stream\n");
    add_stream(streams, stream);

    printf("[test] initializing transmitter\n");
    transmitter_t *transmitter = init_transmitter(streams, 20.0);
    start_transmitter(transmitter);

	rtsp_serv_t *server;
	server = malloc(sizeof(rtsp_serv_t));
  
	server->port = 8554;
	server->streams = streams;
	server->transmitter = transmitter;
    
    init_encoder(stream->video);
    
    c_start_server(server);
    c_update_server(server);
       
    // Stuff ... 
    AVFormatContext *pformat_ctx = avformat_alloc_context();
    AVCodecContext codec_ctx;
    int video_stream = -1;
    av_register_all();

    int width = 1280;
    int height = 534;

    load_video(yuv_path, pformat_ctx, &codec_ctx, &video_stream);

    uint8_t *b1 = (uint8_t *)av_malloc(avpicture_get_size(codec_ctx.pix_fmt,
                        codec_ctx.width, codec_ctx.height)*sizeof(uint8_t));
    
    int counter = 0;

    struct timeval a, b;
    video_data_frame_t *decoded_frame;
    
    while(1) {
    
        gettimeofday(&a, NULL);
        
        int ret = read_frame(pformat_ctx, video_stream, &codec_ctx, b1);
        if (stop_at > 0 && counter == stop_at) {
            break;
        }

        if (ret == 0) {
            counter++;
            
            decoded_frame = curr_in_frame(stream->video->decoded_frames);
            if (decoded_frame == NULL){
                continue;
            }
            
            decoded_frame->buffer_len = vc_get_linesize(width, RGB)*height;
            memcpy(decoded_frame->buffer, b1, decoded_frame->buffer_len); 
            
            put_frame(stream->video->decoded_frames);
        } else {
            break;
        }
        gettimeofday(&b, NULL);
        long diff = (b.tv_sec - a.tv_sec)*1000000 + b.tv_usec - a.tv_usec;

        if (diff < 40000) {
            usleep(40000 - diff);
        } else {
            usleep(0);
        }
    }
    debug_msg(" deallocating resources and terminating threads\n");
    av_free(pformat_ctx);
    av_free(b1);
    debug_msg(" done!\n");

    stop_transmitter(transmitter);

    destroy_stream_list(streams);

    return 0;
}
コード例 #3
0
ファイル: wcX.c プロジェクト: aji/nursery
int main(int argc, char *argv[]) {
	ModularContext mctx;
	Module *master;
	Module *output;
	int timer;

	srand(0);

	mctx.rate = SAMPLE_RATE;

	ModularInitialize(&mctx);
	master = ModularMaster(&mctx);
	output = ModularOutput(&mctx);

	Module *osc = NewModule(&mctx, &ModOscillator);
	Module *env = NewModule(&mctx, &ModADSR);

	OscillatorGet(osc)->waveform = OscBandlimitedSaw;

	*ADSRGet(env)->A = 0.006;
	*ADSRGet(env)->D = 0.700;
	*ADSRGet(env)->S = 0.000;
	*ADSRGet(env)->R = 0.100;
	*ADSRGet(env)->trig = 0.0;

	OscillatorGet(osc)->gain = env->out;
	AddDependency(&mctx, osc, env);

	Module *filt = NewModule(&mctx, &ModFilter);
	FilterSetInput(&mctx, filt, osc);

	Module *env2filt = NewModule(&mctx, &ModMatrix);
	MatrixSetInput(&mctx, env2filt, env);
	MatrixScale(env2filt, 0.0, 1.0, 10.0, 10000.0);
	FilterGet(filt)->cutoff = env2filt->out;
	AddDependency(&mctx, filt, env2filt);

	Module *env2reso = NewModule(&mctx, &ModMatrix);
	MatrixSetInput(&mctx, env2reso, env);
	MatrixScale(env2reso, 0.0, 1.0, 0.1, 2.0);
	FilterGet(filt)->reso = env2reso->out;
	AddDependency(&mctx, filt, env2reso);

	MixerSlot *slot = MixerAddSlot(&mctx, master, filt, 0.3, 0.0);

	KeyController *kc = NewMonoSynth();
	OscillatorGet(osc)->freq  = MonoSynthGet(kc)->freq;
	ADSRGet(env)->trig        = MonoSynthGet(kc)->trig;


	Module *bassosc = NewModule(&mctx, &ModOscillator);
	Module *bassenv = NewModule(&mctx, &ModADSR);

	OscillatorGet(bassosc)->waveform = OscBandlimitedSaw;

	*ADSRGet(bassenv)->A = 0.006;
	*ADSRGet(bassenv)->D = 0.200;
	*ADSRGet(bassenv)->S = 0.000;
	*ADSRGet(bassenv)->R = 0.100;
	*ADSRGet(bassenv)->trig = 0.0;

	//OscillatorGet(bassosc)->gain = bassenv->out;
	AddDependency(&mctx, bassosc, bassenv);

	Module *bassfilt = NewModule(&mctx, &ModFilter);
	FilterSetInput(&mctx, bassfilt, bassosc);

	Module *bassenv2filt = NewModule(&mctx, &ModMatrix);
	MatrixSetInput(&mctx, bassenv2filt, bassenv);
	MatrixScale(bassenv2filt, 0.0, 1.0, 50.0, 5000.0);
	FilterGet(bassfilt)->cutoff = bassenv2filt->out;
	AddDependency(&mctx, bassfilt, bassenv2filt);
	*FilterGet(bassfilt)->reso = 0.4;

	MixerSlot *bassslot = MixerAddSlot(&mctx, master, bassfilt, 0.3, 0.0);

	KeyController *bkc = NewMonoSynth();
	OscillatorGet(bassosc)->freq  = MonoSynthGet(bkc)->freq;
	ADSRGet(bassenv)->trig        = MonoSynthGet(bkc)->trig;


	for (timer=0; ; timer++) {
		ModularStep(&mctx);

		if (timer % 96000 == 84000)
			seq = seqs[((timer / 96000) % 2)];

		if (timer % 12000 == 0)
			KeyControllerKeyDown(kc, seq[(timer / 12000) % 4] - 12, 64);
		if (timer % 24000 == 22000)
			KeyControllerKeyUp(kc, seq[(timer / 12000) % 4] - 12);
		KeyControllerUpdate(kc);

		if (timer % 12000 == 0)
			KeyControllerKeyDown(bkc, bass[(timer / 12000) % 16], 64);
		if (timer % 12000 == 11000)
			KeyControllerKeyUp(bkc, bass[(timer / 12000) % 16]);
		KeyControllerUpdate(bkc);

		put_frame(output->out[0] * OUTPUT_SCALE,
		          output->out[1] * OUTPUT_SCALE);
	}

	return 0;
}
コード例 #4
0
ファイル: link.cpp プロジェクト: auberj/ComputerNetworkB1
int SendPacket(char dest, char* Spacket) {
    /*
    Packet length = 122  => max 6 frames
    split packet into frames, send frame, await acknowledgement  
    */
    put_string("\r\nstring passed to DLL: ");
    put_string(Spacket);
    struct frame data[FRAMECOUNT];
    int no_frames;
    no_frames = makeframe(&data, dest, Spacket, 0);
    
    put_string("\r\n\nnumber of frames generated: ");
    put_number(no_frames);

    uint8_t *bufptr;
    char temp[50];

    int i,k, send_complete;
    struct frame ack;
    unsigned long time;

    for(i = 0; i < no_frames; i++) {
        send_complete = 0;
        if((uint8_t)dest != (uint8_t)BROADCAST) {
            put_string("\r\nSend message and await acknowledgement: ");
            while(!send_complete) {
                ///////////////////send//////////////////////
                put_frame(data[i]);

                SendFrame(data[i].frame);


                time = millis() + 1000;

                while((millis() != time)) {
                    ///////////check for acknowledgemt/////////////////
                    //put_char(':');
                    if(ReceiveFrame()) {
                        //send_complete = 1;
                        ReceiveFrame(temp);
                        // bufptr = rfm12_rx_buffer();
                        // for(k = 0; k < (rfm12_rx_len()); k++) {
                        //     temp[k] = bufptr[k];
                        // }
                        // temp[rfm12_rx_len()] = '\0';
                        // rfm12_rx_clear();
                        // put_string("\r\nRECEIVED: ");
                        // put_string(temp);
                        // put_string("\r\n\r\n");
                        ////////////////check if acknowledgemnt valid////////////////
                        int check_ack = decode_frame(&ack, temp);
                        if((check_ack & (1<<1)) && !(check_ack & 1<<2)) {
                            //if(strcmp(ack.data, data[i].data)) {
                            put_string("\r\nSend Complete!\r\n");
                            send_complete = 1;
                            break;  
                            //}
                        }
                     }
                }
                if(!send_complete) {
                    put_string("\r\ntimeout\r\n");
                }

            }
        }
        else {
            put_frame(data[i]);
            put_string("\n\rBROADCAST frame now : ");
            SendFrame(data[i].frame);

            _delay_ms(400);
        }
    }

    /*
    for frame in frames:
        send_complete = 0;
        while not send complete:
            send frame
            start timer
            while timer:
                if acknowledgement:
                    send_complete = 1;
    */





    return 0;
}
コード例 #5
0
ファイル: link.cpp プロジェクト: auberj/ComputerNetworkB1
int RecievePacket(char* Rpacket) {
    /*
    see http://www.hansinator.de/rfm12lib/ for rfm12b libray details
    receive frame, send acknowledgement
    if received a frame:
        de-bytestuff
        check crc
        if recipient:
            acknowledge
            pass to network
    */
    int i = 0;
    put_string("\r\n>>>Begin receive packet: \r\n");
    if (ReceiveFrame()) { //Status complete 1 - passes this section
        put_char('.');
        uint8_t* bufptr;
        char Rframe[50], ackstr[50];
        struct frame ack, decode;
        struct frame Nrframe[FRAMECOUNT];
        struct frame ackarr[FRAMECOUNT];
        int Received_Final_frame = 0;
        unsigned long timeout = millis() + 20000;
        while(!Received_Final_frame && (millis() < timeout)){ //never passes this while statement
                                                                //Also maybe add RFM12B tick??
            //int Rframe_len;
            put_string("Trying to receive data");
            if (ReceiveFrame()) { //Status complete 2 - why?

                ReceiveFrame(Rframe);
                // bufptr = rfm12_rx_buffer();
                // put_string("Raw data received: ");
                // put_string((char*)bufptr);
                // for(uint8_t k = 0; k < (rfm12_rx_len()); k++) {
                //     Rframe[k] = bufptr[k];
                // }
                // Rframe[rfm12_rx_len()] = '\0';
                // rfm12_rx_clear();
                put_string("\r\nRframe: ");
                put_string(Rframe);
                //strcpy(ackstr, Rframe);
                decode = Nrframe[i];
                int Rframe_check = decode_frame(&decode, Rframe);
                Nrframe[i] = decode;
                put_string("\r\nReceived Data: ");
                put_frame(Nrframe[i]);

                if(Rframe_check & (1<<1)) {
                    if(Rframe_check & 1<<4) {
                        Received_Final_frame = 1;
                    }
                    put_string("\r\nFrame Number; ");
                    put_number(i);
                    /*
                    frame received, frame for me
                    acknowledge
                    */
                    if(!(Rframe_check & 1<<5)) {
                        put_string("\n\r>>>>START MAKE AND SEND ACKNOWLEDGEMENT\r\n");
                        ackarr[0] = Nrframe[i];

                        int framecount = makeframe(&ackarr, Nrframe[i].address[0], Nrframe[i].data, 1, 1);
                        put_string("\r\nNumber of acknowledgement frames generated: ");
                        put_number(framecount);
                        put_string("\r\nacknowledgement: ");
                        put_frame(ackarr[0]);
                        //_delay_ms(100);
                        SendFrame(ackarr[0].frame);
                        // rfm12_tx(strlen(ackarr[0].frame), 0, (uint8_t*)ackarr[0].frame);
                        // for (uint8_t j = 0; j < 100; j++)   
                        // {   
                        //     //put_string(". ");
                        //     rfm12_tick();   
                        //     _delay_us(500); 
                        // }
                    }
                    i++;
                    timeout = millis() + 20000;
                    
                }
                else if(!Rframe_check) {
                    ;
                }
                else {
                    break;
                }

            }
        }
        if(i && i < FRAMECOUNT) {
            put_string("\r\nPacketComplete\n\r");
            strcpy(Rpacket, Nrframe[0].data);
            for(int l = 1; l<i; l++) {
                strcat(Rpacket, Nrframe[l].data);
            }
            strcat(Rpacket, "\0");
            put_string("\r\nReceived packet: ");
            put_string(Rpacket);
        }
    }
    put_string("\r\n>>>End ReceivePacket\r\n");
    return i;

}
コード例 #6
0
ファイル: commercials.C プロジェクト: TravisKraatz/cinelerra
int Commercials::
put_clip(File *file, int track, double position, double length)
{
	if( file->asset->format != FILE_MPEG ) return -1;
	double framerate;  int pid, width, height;  char title[BCTEXTLEN];
	if( file->get_video_info(track, pid, framerate,
					width, height, title) ) return -1;
	if( file->set_layer(track) ) return -1;
	int64_t pos = position * framerate;
	if( file->set_video_position(pos, 0) ) return 1;
	time_t ct;  time(&ct);
	int64_t creation_time = (int64_t)ct, system_time;
	if( file->get_system_time(system_time) ) system_time = 0;
	int frames = length * framerate;
	int prefix_size = 2*framerate, length2 = frames/2;
	if( prefix_size > length2 ) prefix_size = length2;
	int suffix_size = prefix_size;

	if( mdb->new_clip_set(title, file->asset->path, position,
		framerate, frames, prefix_size, suffix_size,
		creation_time, system_time) ) return 1;

	clip_id = mdb->clip_id();
	cancelled = 0;
	scan_status = new ScanStatus(this, 30, 30, 1, 1,
		cancelled, "Cutting Ads");
	scan_status->update_length(0, frames);
	scan_status->update_position(0, 0);
	update_cut_info(track+1, position);

	clip_weights = mdb->clip_weights();
	frame_period = 1. / framerate;
	VFrame frame(width, height, BC_YUV420P);

	int i = 0, n = 0, result = 0;
	// first 2 secs of frame data and weights
	while( i < prefix_size && !result ) {
		if( (result=file->read_frame(&frame)) != 0 ) break;
		if( (result=put_frame(&frame, n++, 1, i/framerate)) != 0 ) break;
		if( (result=put_weight(&frame, i)) != 0 ) break;
		result = scan_status->update_position(0, ++i);
	}
	int suffix_start = frames - suffix_size;
	while( i < suffix_start && !result ) {
		if( (result=file->read_frame(&frame)) != 0 ) break;
		if( (result=put_weight(&frame, i)) != 0 ) break;
		result = scan_status->update_position(0, ++i);
		++n;
	}
	// last 2 secs of frame data and weights
	while( i < frames && !result ) {
		if( (result=file->read_frame(&frame)) != 0 ) break;
		if( (result=put_frame(&frame, n++, 2, i/framerate)) != 0 ) break;
		if( (result=put_weight(&frame, i)) != 0 ) break;
		result = scan_status->update_position(0, ++i);
	}

	double wt = 0;
	for( i=0; i<frames; ++i ) wt += clip_weights[i];
	mdb->clip_average_weight(wt/frames);

	delete scan_status;
	scan_status = 0;
	return result;
}