Esempio n. 1
0
static int rtp_write_header(AVFormatContext *s1)
{
    RTPMuxContext *s = s1->priv_data;
    int max_packet_size, n;
    AVStream *st;

    if (s1->nb_streams != 1)
        return -1;
    st = s1->streams[0];
    if (!is_supported(st->codec->codec_id)) {
        av_log(s1, AV_LOG_ERROR, "Unsupported codec %x\n", st->codec->codec_id);

        return -1;
    }

    s->payload_type = ff_rtp_get_payload_type(st->codec);
    if (s->payload_type < 0)
        s->payload_type = RTP_PT_PRIVATE + (st->codec->codec_type == AVMEDIA_TYPE_AUDIO);

    s->base_timestamp = av_get_random_seed();
    s->timestamp = s->base_timestamp;
    s->cur_timestamp = 0;
    s->ssrc = av_get_random_seed();
    s->first_packet = 1;
    s->first_rtcp_ntp_time = ff_ntp_time();
    if (s1->start_time_realtime)
        /* Round the NTP time to whole milliseconds. */
        s->first_rtcp_ntp_time = (s1->start_time_realtime / 1000) * 1000 +
                                 NTP_OFFSET_US;

    max_packet_size = url_fget_max_packet_size(s1->pb);
    if (max_packet_size <= 12)
        return AVERROR(EIO);
    s->buf = av_malloc(max_packet_size);
    if (s->buf == NULL) {
        return AVERROR(ENOMEM);
    }
    s->max_payload_size = max_packet_size - 12;

    s->max_frames_per_packet = 0;
    if (s1->max_delay) {
        if (st->codec->codec_type == AVMEDIA_TYPE_AUDIO) {
            if (st->codec->frame_size == 0) {
                av_log(s1, AV_LOG_ERROR, "Cannot respect max delay: frame size = 0\n");
            } else {
                s->max_frames_per_packet = av_rescale_rnd(s1->max_delay, st->codec->sample_rate, AV_TIME_BASE * st->codec->frame_size, AV_ROUND_DOWN);
            }
        }
        if (st->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
            /* FIXME: We should round down here... */
            s->max_frames_per_packet = av_rescale_q(s1->max_delay, (AVRational){1, 1000000}, st->codec->time_base);
        }
    }

    av_set_pts_info(st, 32, 1, 90000);
    switch(st->codec->codec_id) {
    case CODEC_ID_MP2:
    case CODEC_ID_MP3:
        s->buf_ptr = s->buf + 4;
        break;
    case CODEC_ID_MPEG1VIDEO:
    case CODEC_ID_MPEG2VIDEO:
        break;
    case CODEC_ID_MPEG2TS:
        n = s->max_payload_size / TS_PACKET_SIZE;
        if (n < 1)
            n = 1;
        s->max_payload_size = n * TS_PACKET_SIZE;
        s->buf_ptr = s->buf;
        break;
    case CODEC_ID_H264:
        /* check for H.264 MP4 syntax */
        if (st->codec->extradata_size > 4 && st->codec->extradata[0] == 1) {
            s->nal_length_size = (st->codec->extradata[4] & 0x03) + 1;
        }
        break;
    case CODEC_ID_AMR_NB:
    case CODEC_ID_AMR_WB:
        if (!s->max_frames_per_packet)
            s->max_frames_per_packet = 12;
        if (st->codec->codec_id == CODEC_ID_AMR_NB)
            n = 31;
        else
            n = 61;
        /* max_header_toc_size + the largest AMR payload must fit */
        if (1 + s->max_frames_per_packet + n > s->max_payload_size) {
            av_log(s1, AV_LOG_ERROR, "RTP max payload size too small for AMR\n");
            return -1;
        }
        if (st->codec->channels != 1) {
            av_log(s1, AV_LOG_ERROR, "Only mono is supported\n");
            return -1;
        }
    case CODEC_ID_AAC:
        s->num_frames = 0;
    default:
        if (st->codec->codec_type == AVMEDIA_TYPE_AUDIO) {
            av_set_pts_info(st, 32, 1, st->codec->sample_rate);
        }
        s->buf_ptr = s->buf;
        break;
    }

    return 0;
}
Esempio n. 2
0
static int rtp_write_header(AVFormatContext *s1)
{
    RTPMuxContext *s = s1->priv_data;
    int n;
    AVStream *st;

    if (s1->nb_streams != 1) {
        av_log(s1, AV_LOG_ERROR, "Only one stream supported in the RTP muxer\n");
        return AVERROR(EINVAL);
    }
    st = s1->streams[0];
    if (!is_supported(st->codec->codec_id)) {
        av_log(s1, AV_LOG_ERROR, "Unsupported codec %x\n", st->codec->codec_id);

        return -1;
    }

    if (s->payload_type < 0) {
        /* Re-validate non-dynamic payload types */
        if (st->id < RTP_PT_PRIVATE)
            st->id = ff_rtp_get_payload_type(s1, st->codec, -1);

        s->payload_type = st->id;
    } else {
        /* private option takes priority */
        st->id = s->payload_type;
    }

    s->base_timestamp = av_get_random_seed();
    s->timestamp = s->base_timestamp;
    s->cur_timestamp = 0;
    if (!s->ssrc)
        s->ssrc = av_get_random_seed();
    s->first_packet = 1;
    s->first_rtcp_ntp_time = ff_ntp_time();
    if (s1->start_time_realtime)
        /* Round the NTP time to whole milliseconds. */
        s->first_rtcp_ntp_time = (s1->start_time_realtime / 1000) * 1000 +
                                 NTP_OFFSET_US;
    // Pick a random sequence start number, but in the lower end of the
    // available range, so that any wraparound doesn't happen immediately.
    // (Immediate wraparound would be an issue for SRTP.)
    if (s->seq < 0)
        s->seq = av_get_random_seed() & 0x0fff;
    else
        s->seq &= 0xffff; // Use the given parameter, wrapped to the right interval

    if (s1->packet_size) {
        if (s1->pb->max_packet_size)
            s1->packet_size = FFMIN(s1->packet_size,
                                    s1->pb->max_packet_size);
    } else
        s1->packet_size = s1->pb->max_packet_size;
    if (s1->packet_size <= 12) {
        av_log(s1, AV_LOG_ERROR, "Max packet size %d too low\n", s1->packet_size);
        return AVERROR(EIO);
    }
    s->buf = av_malloc(s1->packet_size);
    if (s->buf == NULL) {
        return AVERROR(ENOMEM);
    }
    s->max_payload_size = s1->packet_size - 12;

    s->max_frames_per_packet = 0;
    if (s1->max_delay > 0) {
        if (st->codec->codec_type == AVMEDIA_TYPE_AUDIO) {
            int frame_size = av_get_audio_frame_duration(st->codec, 0);
            if (!frame_size)
                frame_size = st->codec->frame_size;
            if (frame_size == 0) {
                av_log(s1, AV_LOG_ERROR, "Cannot respect max delay: frame size = 0\n");
            } else {
                s->max_frames_per_packet =
                        av_rescale_q_rnd(s1->max_delay,
                                         AV_TIME_BASE_Q,
                                         (AVRational){ frame_size, st->codec->sample_rate },
                                         AV_ROUND_DOWN);
            }
        }
        if (st->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
            /* FIXME: We should round down here... */
            s->max_frames_per_packet = av_rescale_q(s1->max_delay, (AVRational){1, 1000000}, st->codec->time_base);
        }
    }

    avpriv_set_pts_info(st, 32, 1, 90000);
    switch(st->codec->codec_id) {
    case AV_CODEC_ID_MP2:
    case AV_CODEC_ID_MP3:
        s->buf_ptr = s->buf + 4;
        break;
    case AV_CODEC_ID_MPEG1VIDEO:
    case AV_CODEC_ID_MPEG2VIDEO:
        break;
    case AV_CODEC_ID_MPEG2TS:
        n = s->max_payload_size / TS_PACKET_SIZE;
        if (n < 1)
            n = 1;
        s->max_payload_size = n * TS_PACKET_SIZE;
        s->buf_ptr = s->buf;
        break;
    case AV_CODEC_ID_H264:
        /* check for H.264 MP4 syntax */
        if (st->codec->extradata_size > 4 && st->codec->extradata[0] == 1) {
            s->nal_length_size = (st->codec->extradata[4] & 0x03) + 1;
        }
        break;
    case AV_CODEC_ID_VORBIS:
    case AV_CODEC_ID_THEORA:
        if (!s->max_frames_per_packet) s->max_frames_per_packet = 15;
        s->max_frames_per_packet = av_clip(s->max_frames_per_packet, 1, 15);
        s->max_payload_size -= 6; // ident+frag+tdt/vdt+pkt_num+pkt_length
        s->num_frames = 0;
        goto defaultcase;
    case AV_CODEC_ID_ADPCM_G722:
        /* Due to a historical error, the clock rate for G722 in RTP is
         * 8000, even if the sample rate is 16000. See RFC 3551. */
        avpriv_set_pts_info(st, 32, 1, 8000);
        break;
    case AV_CODEC_ID_OPUS:
        if (st->codec->channels > 2) {
            av_log(s1, AV_LOG_ERROR, "Multistream opus not supported in RTP\n");
            goto fail;
        }
        /* The opus RTP RFC says that all opus streams should use 48000 Hz
         * as clock rate, since all opus sample rates can be expressed in
         * this clock rate, and sample rate changes on the fly are supported. */
        avpriv_set_pts_info(st, 32, 1, 48000);
        break;
    case AV_CODEC_ID_ILBC:
        if (st->codec->block_align != 38 && st->codec->block_align != 50) {
            av_log(s1, AV_LOG_ERROR, "Incorrect iLBC block size specified\n");
            goto fail;
        }
        if (!s->max_frames_per_packet)
            s->max_frames_per_packet = 1;
        s->max_frames_per_packet = FFMIN(s->max_frames_per_packet,
                                         s->max_payload_size / st->codec->block_align);
        goto defaultcase;
    case AV_CODEC_ID_AMR_NB:
    case AV_CODEC_ID_AMR_WB:
        if (!s->max_frames_per_packet)
            s->max_frames_per_packet = 12;
        if (st->codec->codec_id == AV_CODEC_ID_AMR_NB)
            n = 31;
        else
            n = 61;
        /* max_header_toc_size + the largest AMR payload must fit */
        if (1 + s->max_frames_per_packet + n > s->max_payload_size) {
            av_log(s1, AV_LOG_ERROR, "RTP max payload size too small for AMR\n");
            goto fail;
        }
        if (st->codec->channels != 1) {
            av_log(s1, AV_LOG_ERROR, "Only mono is supported\n");
            goto fail;
        }
    case AV_CODEC_ID_AAC:
        s->num_frames = 0;
    default:
defaultcase:
        if (st->codec->codec_type == AVMEDIA_TYPE_AUDIO) {
            avpriv_set_pts_info(st, 32, 1, st->codec->sample_rate);
        }
        s->buf_ptr = s->buf;
        break;
    }

    return 0;

fail:
    av_freep(&s->buf);
    return AVERROR(EINVAL);
}
Esempio n. 3
0
static int rtp_write_header(AVFormatContext *s1)
{
    RTPMuxContext *s = s1->priv_data;
    int n, ret = AVERROR(EINVAL);
    AVStream *st;

    if (s1->nb_streams != 1) {
        av_log(s1, AV_LOG_ERROR, "Only one stream supported in the RTP muxer\n");
        return AVERROR(EINVAL);
    }
    st = s1->streams[0];
    if (!is_supported(st->codecpar->codec_id)) {
        av_log(s1, AV_LOG_ERROR, "Unsupported codec %s\n", avcodec_get_name(st->codecpar->codec_id));

        return -1;
    }

    if (s->payload_type < 0) {
        /* Re-validate non-dynamic payload types */
        if (st->id < RTP_PT_PRIVATE)
            st->id = ff_rtp_get_payload_type(s1, st->codecpar, -1);

        s->payload_type = st->id;
    } else {
        /* private option takes priority */
        st->id = s->payload_type;
    }

    s->base_timestamp = av_get_random_seed();
    s->timestamp = s->base_timestamp;
    s->cur_timestamp = 0;
    if (!s->ssrc)
        s->ssrc = av_get_random_seed();
    s->first_packet = 1;
    s->first_rtcp_ntp_time = ff_ntp_time();
    if (s1->start_time_realtime != 0  &&  s1->start_time_realtime != AV_NOPTS_VALUE)
        /* Round the NTP time to whole milliseconds. */
        s->first_rtcp_ntp_time = (s1->start_time_realtime / 1000) * 1000 +
                                 NTP_OFFSET_US;
    // Pick a random sequence start number, but in the lower end of the
    // available range, so that any wraparound doesn't happen immediately.
    // (Immediate wraparound would be an issue for SRTP.)
    if (s->seq < 0) {
        if (s1->flags & AVFMT_FLAG_BITEXACT) {
            s->seq = 0;
        } else
            s->seq = av_get_random_seed() & 0x0fff;
    } else
        s->seq &= 0xffff; // Use the given parameter, wrapped to the right interval

    if (s1->packet_size) {
        if (s1->pb->max_packet_size)
            s1->packet_size = FFMIN(s1->packet_size,
                                    s1->pb->max_packet_size);
    } else
        s1->packet_size = s1->pb->max_packet_size;
    if (s1->packet_size <= 12) {
        av_log(s1, AV_LOG_ERROR, "Max packet size %d too low\n", s1->packet_size);
        return AVERROR(EIO);
    }
    s->buf = av_malloc(s1->packet_size);
    if (!s->buf) {
        return AVERROR(ENOMEM);
    }
    s->max_payload_size = s1->packet_size - 12;

    if (st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
        avpriv_set_pts_info(st, 32, 1, st->codecpar->sample_rate);
    } else {
        avpriv_set_pts_info(st, 32, 1, 90000);
    }
    s->buf_ptr = s->buf;
    switch(st->codecpar->codec_id) {
    case AV_CODEC_ID_MP2:
    case AV_CODEC_ID_MP3:
        s->buf_ptr = s->buf + 4;
        avpriv_set_pts_info(st, 32, 1, 90000);
        break;
    case AV_CODEC_ID_MPEG1VIDEO:
    case AV_CODEC_ID_MPEG2VIDEO:
        break;
    case AV_CODEC_ID_MPEG2TS:
        n = s->max_payload_size / TS_PACKET_SIZE;
        if (n < 1)
            n = 1;
        s->max_payload_size = n * TS_PACKET_SIZE;
        break;
    case AV_CODEC_ID_H261:
        if (s1->strict_std_compliance > FF_COMPLIANCE_EXPERIMENTAL) {
            av_log(s, AV_LOG_ERROR,
                   "Packetizing H261 is experimental and produces incorrect "
                   "packetization for cases where GOBs don't fit into packets "
                   "(even though most receivers may handle it just fine). "
                   "Please set -f_strict experimental in order to enable it.\n");
            ret = AVERROR_EXPERIMENTAL;
            goto fail;
        }
        break;
    case AV_CODEC_ID_H264:
        /* check for H.264 MP4 syntax */
        if (st->codecpar->extradata_size > 4 && st->codecpar->extradata[0] == 1) {
            s->nal_length_size = (st->codecpar->extradata[4] & 0x03) + 1;
        }
        break;
    case AV_CODEC_ID_HEVC:
        /* Only check for the standardized hvcC version of extradata, keeping
         * things simple and similar to the avcC/H264 case above, instead
         * of trying to handle the pre-standardization versions (as in
         * libavcodec/hevc.c). */
        if (st->codecpar->extradata_size > 21 && st->codecpar->extradata[0] == 1) {
            s->nal_length_size = (st->codecpar->extradata[21] & 0x03) + 1;
        }
        break;
    case AV_CODEC_ID_VORBIS:
    case AV_CODEC_ID_THEORA:
        s->max_frames_per_packet = 15;
        break;
    case AV_CODEC_ID_ADPCM_G722:
        /* Due to a historical error, the clock rate for G722 in RTP is
         * 8000, even if the sample rate is 16000. See RFC 3551. */
        avpriv_set_pts_info(st, 32, 1, 8000);
        break;
    case AV_CODEC_ID_OPUS:
        if (st->codecpar->channels > 2) {
            av_log(s1, AV_LOG_ERROR, "Multistream opus not supported in RTP\n");
            goto fail;
        }
        /* The opus RTP RFC says that all opus streams should use 48000 Hz
         * as clock rate, since all opus sample rates can be expressed in
         * this clock rate, and sample rate changes on the fly are supported. */
        avpriv_set_pts_info(st, 32, 1, 48000);
        break;
    case AV_CODEC_ID_ILBC:
        if (st->codecpar->block_align != 38 && st->codecpar->block_align != 50) {
            av_log(s1, AV_LOG_ERROR, "Incorrect iLBC block size specified\n");
            goto fail;
        }
        s->max_frames_per_packet = s->max_payload_size / st->codecpar->block_align;
        break;
    case AV_CODEC_ID_AMR_NB:
    case AV_CODEC_ID_AMR_WB:
        s->max_frames_per_packet = 50;
        if (st->codecpar->codec_id == AV_CODEC_ID_AMR_NB)
            n = 31;
        else
            n = 61;
        /* max_header_toc_size + the largest AMR payload must fit */
        if (1 + s->max_frames_per_packet + n > s->max_payload_size) {
            av_log(s1, AV_LOG_ERROR, "RTP max payload size too small for AMR\n");
            goto fail;
        }
        if (st->codecpar->channels != 1) {
            av_log(s1, AV_LOG_ERROR, "Only mono is supported\n");
            goto fail;
        }
        break;
    case AV_CODEC_ID_AAC:
        s->max_frames_per_packet = 50;
        break;
    default:
        break;
    }

    return 0;

fail:
    av_freep(&s->buf);
    return ret;
}
int egl_lock_surface(void)
{
	fprintf(stderr, "TEST_EXTENSIONS : EGL_KHR_lock_surface & EGL_KHR_lock_surface2 \n");
	int ret = 1;
	int i = 0;

	EGLImageKHR image;
	GLuint image_texture;
	int move_x = 0;
	int move_y = 0;
	int direction_x = 1;
	int direction_y = 1;
	

	/* CREATE CONTEXT STRUCTURE */
	X11Context* x11_ctx = (X11Context*)malloc(sizeof(X11Context));
	EglContext* egl_ctx = (EglContext*)malloc(sizeof(EglContext));
	RenderingContext* gles_ctx = (RenderingContext*)malloc(sizeof(RenderingContext));
	PFNEGLCREATEIMAGEKHRPROC p_eglCreateImageKHR;
	PFNEGLDESTROYIMAGEKHRPROC p_eglDestroyImageKHR;
	PFNGLEGLIMAGETARGETTEXTURE2DOESPROC p_glEGLImageTargetTexture2DOES;
	
	/* Initialize native x11 */
	if(!init_x11_native(x11_ctx))
		goto finish;

	/* Initialize egl */
	if(!init_egl(x11_ctx, egl_ctx))
		goto finish;
		
	if(!is_supported(egl_ctx->egl_display,"EGL_KHR_lock_surface"))
	{	
		/* EGL_KHR_lock_surface is not supporect */
		fprintf(stderr, "EGL_KHR_lock_surface is not supported\n");
		goto finish;
	}

	/* Create native pixmap */
	int xDefaultScreen = DefaultScreen(x11_ctx->native_display);
	int xDefaultDepth = XDefaultDepth( x11_ctx->native_display, xDefaultScreen );
	Pixmap pixmap = XCreatePixmap(x11_ctx->native_display,x11_ctx->native_window, TEX_W, TEX_H, xDefaultDepth);
	if(pixmap == None)
	{
		fprintf(stderr, "FAIL to XCreatePixmap \n");
		goto finish;
	}
	
	PFNEGLLOCKSURFACEKHRPROC p_eglLockSurfaceKHR = (PFNEGLLOCKSURFACEKHRPROC)eglGetProcAddress( "eglLockSurfaceKHR" );
	PFNEGLUNLOCKSURFACEKHRPROC p_eglUnlockSurfaceKHR = (PFNEGLUNLOCKSURFACEKHRPROC)eglGetProcAddress( "eglUnlockSurfaceKHR" );
	if(!p_eglLockSurfaceKHR || !p_eglUnlockSurfaceKHR)
	{
		fprintf(stderr, "EGL_KHR_lock_surface is not supported \n");
		goto finish;
	}

	/* Create eglPixmapSurface */
	if(!create_lockable_pixmap_surface(egl_ctx, pixmap))
	{
		fprintf(stderr, "FAIL to Create PixmapSurface \n");
		goto finish;
	}

	/* Create eglImage and texture */
	if(!create_egl_image_texture(pixmap, &image_texture, egl_ctx))
	{
		fprintf(stderr, "FAIL to Create eglImage \n");
		goto finish;
	}

	/* vertices, color, texture coordinate info */
	static GLfloat vertices[] = {  -1.0,  1.0, 0.0,
									1.0,  1.0, 0.0,
								   -1.0, -1.0, 0.0, 
									1.0, -1.0, 0.0};

	static GLfloat colors[] = { 1.0f, 1.0f, 1.0f, 1.0f, 			
								1.0f, 1.0f, 1.0f, 1.0f,
								1.0f, 1.0f, 1.0f, 1.0f, 			
								1.0f, 1.0f, 1.0f, 1.0f};

	static GLfloat texcoord[] = {0.0, 1.0,
								 1.0, 1.0,
								 0.0, 0.0,
								 1.0, 0.0};
								 
	EGLint lock_surface_attrib [] = {EGL_MAP_PRESERVE_PIXELS_KHR, EGL_FALSE, 
									EGL_LOCK_USAGE_HINT_KHR, EGL_READ_SURFACE_BIT_KHR | EGL_WRITE_SURFACE_BIT_KHR,
									EGL_NONE};	
	unsigned char* p_eglimg_data = NULL;

	if(!init_gles(vertices, colors, texcoord, gles_ctx))
	{
		fprintf(stderr, "FAIL TO INIT GLES\n");
		goto finish;
	}

	/* Query Lockable Surface infomation */
	if(!eglMakeCurrent(egl_ctx->egl_display, egl_ctx->pixmap_sur, egl_ctx->pixmap_sur, egl_ctx->pixmap_ctx))
		goto finish;
		
	if(!p_eglLockSurfaceKHR( egl_ctx->egl_display, egl_ctx->pixmap_sur, lock_surface_attrib ))
	{
		p_eglUnlockSurfaceKHR( egl_ctx->egl_display, egl_ctx->pixmap_sur );
		if(!eglMakeCurrent(egl_ctx->egl_display, egl_ctx->wnd_sur, egl_ctx->wnd_sur, egl_ctx->wnd_ctx))
			goto finish;

		if(!p_eglLockSurfaceKHR( egl_ctx->egl_display, egl_ctx->pixmap_sur, lock_surface_attrib ))
		{
			fprintf(stderr, "FAIL to p_eglLockSurfaceKHR %x \n", eglGetError());	
			p_eglUnlockSurfaceKHR( egl_ctx->egl_display, egl_ctx->pixmap_sur );
		}
	}
	EGLint data[7];
	eglQuerySurface(egl_ctx->egl_display, egl_ctx->pixmap_sur,  EGL_BITMAP_PITCH_KHR, &data[0]);
	eglQuerySurface(egl_ctx->egl_display, egl_ctx->pixmap_sur,  EGL_BITMAP_ORIGIN_KHR, &data[1]);
	eglQuerySurface(egl_ctx->egl_display, egl_ctx->pixmap_sur,  EGL_BITMAP_PIXEL_RED_OFFSET_KHR, &data[2]);
	eglQuerySurface(egl_ctx->egl_display, egl_ctx->pixmap_sur,  EGL_BITMAP_PIXEL_GREEN_OFFSET_KHR , &data[3]);
	eglQuerySurface(egl_ctx->egl_display, egl_ctx->pixmap_sur,  EGL_BITMAP_PIXEL_BLUE_OFFSET_KHR, &data[4]);
	eglQuerySurface(egl_ctx->egl_display, egl_ctx->pixmap_sur,  EGL_BITMAP_PIXEL_ALPHA_OFFSET_KHR , &data[5]);
	fprintf(stderr, "eglSurface Infomation \n");
	fprintf(stderr, " EGL_BITMAP_PITCH_KHR %d\n EGL_BITMAP_ORIGIN_KHR 0x%x\n EGL_BITMAP_PIXEL_RED_OFFSET_KHR %d\n EGL_BITMAP_PIXEL_GREEN_OFFSET_KHR %d\n ", data[0], data[1], data[2], data[3]);
	fprintf(stderr, "EGL_BITMAP_PIXEL_BLUE_OFFSET_KHR %d\n EGL_BITMAP_PIXEL_ALPHA_OFFSET_KHR %d\n", data[4], data[5]);
	
	if(is_supported(egl_ctx->egl_display, "EGL_KHR_lock_surface2"))
	{
		eglQuerySurface(egl_ctx->egl_display, egl_ctx->pixmap_sur,  EGL_BITMAP_PIXEL_SIZE_KHR , &data[6]);
		fprintf(stderr, " EGL_BITMAP_PIXEL_SIZE_KHR %d\n", data[6]);
	}
	
	if(!p_eglUnlockSurfaceKHR( egl_ctx->egl_display, egl_ctx->pixmap_sur ))
	{
		fprintf(stderr, "FAIL to eglUnlockSurfaceKHR %x \n", eglGetError());
		goto finish;
	}

	for(i=0; i<FRAME; i++)
	{
		/* MakeCurrent eglPixmapSurface */
		if(!eglMakeCurrent(egl_ctx->egl_display, egl_ctx->pixmap_sur, egl_ctx->pixmap_sur, egl_ctx->pixmap_ctx))
			goto finish;
			
		if(!p_eglLockSurfaceKHR( egl_ctx->egl_display, egl_ctx->pixmap_sur, lock_surface_attrib ))
		{
			p_eglUnlockSurfaceKHR( egl_ctx->egl_display, egl_ctx->pixmap_sur );
			/* MakeCurrent eglWindowSurface */
			if(!eglMakeCurrent(egl_ctx->egl_display, egl_ctx->wnd_sur, egl_ctx->wnd_sur, egl_ctx->wnd_ctx))
				goto finish;

			if(!p_eglLockSurfaceKHR( egl_ctx->egl_display, egl_ctx->pixmap_sur, lock_surface_attrib ))
			{
				fprintf(stderr, "FAIL to p_eglLockSurfaceKHR %x \n", eglGetError());	
				p_eglUnlockSurfaceKHR( egl_ctx->egl_display, egl_ctx->pixmap_sur );
			}
		}
		if(!eglQuerySurface(egl_ctx->egl_display, egl_ctx->pixmap_sur, EGL_BITMAP_POINTER_KHR, (EGLint *) &p_eglimg_data))
		{
				fprintf(stderr, "FAIL to query surface %x \n", eglGetError());
				p_eglUnlockSurfaceKHR( egl_ctx->egl_display, egl_ctx->pixmap_sur );
				goto finish;
		}

		if(p_eglimg_data == NULL)
		{
			fprintf(stderr, "eglQuerySurface return NULL for locksurface \n");
			goto finish;
		}

		if(i%30 < 10)
			update_eglimg( p_eglimg_data, 'R', TEX_W, TEX_H, 4 );
		else if(i%30 >= 10 && i%30 <20)
			update_eglimg( p_eglimg_data, 'G', TEX_W, TEX_H, 4 );
		else
			update_eglimg( p_eglimg_data, 'B', TEX_W, TEX_H, 4 );
			
		if(!p_eglUnlockSurfaceKHR( egl_ctx->egl_display, egl_ctx->pixmap_sur ))
		{
			fprintf(stderr, "FAIL to eglUnlockSurfaceKHR %x \n", eglGetError());
			goto finish;
		}
		
		/* MakeCurrent eglWindowSurface */
		if(!eglMakeCurrent(egl_ctx->egl_display, egl_ctx->wnd_sur, egl_ctx->wnd_sur, egl_ctx->wnd_ctx))
			goto finish;
		
		/* Draw on to eglWindowSurface */
		glClearColor(1.0, 1.0, 1.0, 1.0);
		glClear(GL_COLOR_BUFFER_BIT);
		glViewport(move_x,move_y, RECT_W, RECT_H );
		glBindTexture(GL_TEXTURE_2D, image_texture);
		glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
		eglSwapBuffers(egl_ctx->egl_display, egl_ctx->wnd_sur);

		if(direction_x == 1)
			move_x += SPEED;
		else 
			move_x -= SPEED;
			
		if(direction_y == 1)
			move_y += SPEED;
		else
			move_y -= SPEED;

		if(move_x+RECT_W > x11_ctx->width || move_x < 0)
			direction_x = - direction_x;
		if(move_y+RECT_H > x11_ctx->height || move_y < 0)
			direction_y = - direction_y;
		if(!eglSwapBuffers(egl_ctx->egl_display, egl_ctx->wnd_sur))
			goto finish;
	}
	destroy_egl_image_texture(&image_texture, egl_ctx);			
finish:
	/* Deinit gl */
	deinit_gles(gles_ctx);
	
	/* Deinit egl */
	deinit_egl(egl_ctx);
	
	/* Deinit native x11 */
	deinit_x11_native(x11_ctx);

	if(x11_ctx)
		free(x11_ctx);
	if(egl_ctx)
		free(egl_ctx);
	if(gles_ctx)
		free(gles_ctx);

	return ret;
}
Esempio n. 5
0
std::shared_ptr<node::Node> VolumeLoader::create_volume_from_file(std::string const& node_name,
                              std::string const& file_name,
                              unsigned flags) {

  std::shared_ptr<node::Node> cached_node;
  std::string key(file_name + "_" + string_utils::to_string(flags));

  auto searched(loaded_files_.find(key));
  if (searched != loaded_files_.end()) {

    cached_node = searched->second;

  } else {

    std::ifstream f(file_name.c_str());

    if (!f.good()) {
      f.close();
      Logger::LOG_WARNING << "Unable to load " << file_name << ": File does not exist!" << std::endl;
      return std::make_shared<node::TransformNode>(node_name);
    }

    f.close();

    if (is_supported(file_name)) {
      cached_node = load(file_name, flags);
      cached_node->update_cache();
      loaded_files_.insert(std::make_pair(key, cached_node));

      // normalize volume position and rotation
      if (flags & VolumeLoader::NORMALIZE_POSITION || flags & VolumeLoader::NORMALIZE_SCALE) {
        auto bbox = cached_node->get_bounding_box();

        if (flags & VolumeLoader::NORMALIZE_POSITION) {
          auto center((bbox.min + bbox.max)*0.5);
          cached_node->translate(-center);
        }

        if (flags & VolumeLoader::NORMALIZE_SCALE) {
          auto size(bbox.max - bbox.min);
          auto max_size(std::max(std::max(size.x, size.y), size.z));
          cached_node->scale(1.f / max_size);
        }

      }
    }

    if (!cached_node) {
      Logger::LOG_WARNING << "Unable to load " << file_name << ": Volume Type is not supported!" << std::endl;
    }
  }

  if (cached_node) {
    auto copy(cached_node->deep_copy());

    copy->set_name(node_name);
    return copy;
  }

  return std::make_shared<node::TransformNode>(node_name);
}