示例#1
0
	surface& surface::set_color_key(color key,bool rleaccel) {
		SDL_Surface* ptr = get_low();
		int r;
		if(rleaccel)
			r=SDL_SetColorKey(ptr,SDL_SRCCOLORKEY|SDL_RLEACCEL,get_pixel_format().map_rgb(key));
		else
			r=SDL_SetColorKey(ptr,SDL_SRCCOLORKEY,get_pixel_format().map_rgb(key));
		if(r!=0)
			throw exception_sdl();
		return *this;
	}
示例#2
0
static __init void s5pv210_fb_init(void)
{
	struct s5pv210fb_lcd * lcd;

	s5pv210_fb.priv = resource_get_data(s5pv210_fb.info->name);
	lcd = (struct s5pv210fb_lcd *)(s5pv210_fb.priv);

	if(! s5pv210_fb.priv)
	{
		LOG_W("can't get the resource of \'%s\'", s5pv210_fb.info->name);
		return;
	}

	if(! clk_get_rate("dsys-hclk", 0))
	{
		LOG_E("can't get the clock of \'dsys-hclk\'");
		return;
	}

	if( (lcd->bits_per_pixel != 16) && (lcd->bits_per_pixel != 24) && (lcd->bits_per_pixel != 32) )
		return;

	info.surface.info.bits_per_pixel = lcd->bits_per_pixel;
	info.surface.info.bytes_per_pixel = lcd->bytes_per_pixel;
	info.surface.info.red_mask_size = lcd->rgba.r_mask;
	info.surface.info.red_field_pos = lcd->rgba.r_field;
	info.surface.info.green_mask_size = lcd->rgba.g_mask;
	info.surface.info.green_field_pos = lcd->rgba.g_field;
	info.surface.info.blue_mask_size = lcd->rgba.b_mask;
	info.surface.info.blue_field_pos = lcd->rgba.b_field;
	info.surface.info.alpha_mask_size = lcd->rgba.a_mask;
	info.surface.info.alpha_field_pos = lcd->rgba.a_field;
	info.surface.info.fmt = get_pixel_format(&(info.surface.info));

	info.surface.w = lcd->width;
	info.surface.h = lcd->height;
	info.surface.pitch = lcd->width * lcd->bytes_per_pixel;
	info.surface.flag = SURFACE_PIXELS_DONTFREE;
	info.surface.pixels = lcd->vram_front;

	info.surface.clip.x = 0;
	info.surface.clip.y = 0;
	info.surface.clip.w = lcd->width;
	info.surface.clip.h = lcd->height;

	memset(&info.surface.maps, 0, sizeof(struct surface_maps));
	surface_set_maps(&info.surface.maps);

	if(! register_framebuffer(&s5pv210_fb))
		LOG_E("failed to register framebuffer driver '%s'", s5pv210_fb.info->name);
}
示例#3
0
core::pixel_format_desc pixel_format_desc(AVPixelFormat pix_fmt, int width, int height)
{
    // Get linesizes
    AVPicture dummy_pict;
    avpicture_fill(&dummy_pict, nullptr, pix_fmt, width, height);

    core::pixel_format_desc desc = get_pixel_format(pix_fmt);

    switch (desc.format) {
        case core::pixel_format::gray:
        case core::pixel_format::luma: {
            desc.planes.push_back(core::pixel_format_desc::plane(dummy_pict.linesize[0], height, 1));
            return desc;
        }
        case core::pixel_format::bgr:
        case core::pixel_format::rgb: {
            desc.planes.push_back(core::pixel_format_desc::plane(dummy_pict.linesize[0] / 3, height, 3));
            return desc;
        }
        case core::pixel_format::bgra:
        case core::pixel_format::argb:
        case core::pixel_format::rgba:
        case core::pixel_format::abgr: {
            desc.planes.push_back(core::pixel_format_desc::plane(dummy_pict.linesize[0] / 4, height, 4));
            return desc;
        }
        case core::pixel_format::ycbcr:
        case core::pixel_format::ycbcra: {
            // Find chroma height
            auto size2 = static_cast<int>(dummy_pict.data[2] - dummy_pict.data[1]);
            auto h2    = size2 / dummy_pict.linesize[1];

            desc.planes.push_back(core::pixel_format_desc::plane(dummy_pict.linesize[0], height, 1));
            desc.planes.push_back(core::pixel_format_desc::plane(dummy_pict.linesize[1], h2, 1));
            desc.planes.push_back(core::pixel_format_desc::plane(dummy_pict.linesize[2], h2, 1));

            if (desc.format == core::pixel_format::ycbcra)
                desc.planes.push_back(core::pixel_format_desc::plane(dummy_pict.linesize[3], height, 1));

            return desc;
        }
        default:
            desc.format = core::pixel_format::invalid;
            return desc;
    }
}
示例#4
0
void VideoEncoder::thread_loop() {
    int encnum;
    int res;
    auto screen = this->screen.lock();

    /* Convert picture from rgb to yuv420 planar

       two steps here:

       1) rgb24a or bgr24a to yuv422 interlaced (yuyv)
       2) yuv422 to yuv420 planar (yuv420p)

       to fix endiannes issues try adding #define ARCH_PPC
       and using
       mlt_convert_bgr24a_to_yuv422
       or
       mlt_convert_argb_to_yuv422
       (see mlt_frame.h in mltframework.org sourcecode)
       i can't tell as i don't have PPC, waiting for u mr.goil :)
     */

    uint8_t *surface = (uint8_t *)screen->get_surface();
    time_t *tm = (time_t *)malloc(sizeof(time_t));
    time(tm);
//   std::cerr << "-- ENC:" << asctime(localtime(tm));
    if(!surface) {
        fps->delay();
        /* std::cout << "fps->start_tv.tv_sec :" << fps->start_tv.tv_sec << \
           " tv_usec :" << fps->start_tv.tv_usec << "   \r" << std::endl; */
        return;
    }
    fps->delay();
    //uncomment this to see how long it takes between two frames in us.
    /*    timeval start_t;
        gettimeofday(&start_t,NULL);
        timeval did;
        timersub(&start_t, &m_lastTime, &did);
        m_lastTime.tv_sec = start_t.tv_sec;
        m_lastTime.tv_usec = start_t.tv_usec;
        std::cerr << "diff time :" << did.tv_usec << std::endl;*/
    screen->lock();
    auto & geo = screen->getGeometry();
    switch(screen->get_pixel_format()) {
    case ViewPort::RGBA32:
        mlt_convert_rgb24a_to_yuv422(surface,
                                     geo.getSize().x(), geo.getSize().y(),
                                     geo.getSize().x() << 2, (uint8_t*)enc_yuyv, NULL);
        break;

    case ViewPort::BGRA32:
        mlt_convert_bgr24a_to_yuv422(surface,
                                     geo.getSize().x(), geo.getSize().y(),
                                     geo.getSize().x() << 2, (uint8_t*)enc_yuyv, NULL);
        break;

    case ViewPort::ARGB32:
        mlt_convert_argb_to_yuv422(surface,
                                   geo.getSize().x(), geo.getSize().y(),
                                   geo.getSize().x() << 2, (uint8_t*)enc_yuyv, NULL);
        break;

    default:
        error("Video Encoder %s doesn't supports Screen %s pixel format",
              name.c_str(), screen->getName().c_str());
    }

    screen->unlock();

    ccvt_yuyv_420p(geo.getSize().x(), geo.getSize().y(), enc_yuyv, enc_y, enc_u, enc_v);

    ////// got the YUV, do the encoding
    res = encode_frame();
    if(res != 0) error("Can't encode frame");

    /// proceed writing and streaming encoded data in encpipe

    encnum = 0;
    if(write_to_disk || write_to_stream) {
        if((encnum = ringbuffer_read_space(ringbuffer)) > 0) {
            encbuf = (char *)realloc(encbuf, encnum);
//      encbuf = (char *)realloc(encbuf, (((audio_kbps + video_kbps)*1024)/24)); //doesn't change anything for shifting problem
            encnum = ringbuffer_read(ringbuffer, encbuf, encnum);
//      encnum = ringbuffer_read(ringbuffer, encbuf,
//                             ((audio_kbps + video_kbps)*1024)/24);
        }
    }

    if(encnum > 0) {
        //      func("%s has encoded %i bytes", name, encnum);
        if(write_to_disk && filedump_fd)
            fwrite(encbuf, 1, encnum, filedump_fd);

        if(write_to_stream && ice) {
            /*	int	wait_ms;
                wait_ms = shout_delay(ice);
                std::cerr << "---- shout delay :" << wait_ms << std::endl;*/
            shout_sync(ice);
            if(shout_send(ice, (const unsigned char*)encbuf, encnum)
               != SHOUTERR_SUCCESS) {
                error("shout_send: %s", shout_get_error(ice));
            } // else
              //printf("%d %d\n", encnum, (int)shout_queuelen(ice));
        }
        gettimeofday(&m_ActualTime, NULL);
        if(m_ActualTime.tv_sec == m_OldTime.tv_sec)
            m_ElapsedTime += ((double)(m_ActualTime.tv_usec - m_OldTime.tv_usec)) / 1000000.0;
        else
            m_ElapsedTime += ((double)(m_ActualTime.tv_sec - m_OldTime.tv_sec)) + \
                             (((double)(m_ActualTime.tv_usec - m_OldTime.tv_usec)) / 1000000.0);
        m_OldTime.tv_sec = m_ActualTime.tv_sec;
        m_OldTime.tv_usec = m_ActualTime.tv_usec;
        m_Streamed += encnum;
        if(m_ElapsedTime >= 3.0) {      //calculate stream rate every minimum 3 seconds
            m_StreamRate = ((double)m_Streamed / m_ElapsedTime) / 1000.0;
            m_ElapsedTime = 0;
            m_Streamed = 0;
        }
    }
}
示例#5
0
	bool Window::PlatformInit()
	{
		hInstance = (HINSTANCE)&__ImageBase;

		WNDCLASS winClass = {};
		winClass.hInstance = hInstance; // GetModuleHandle(0);
		winClass.style = CS_HREDRAW | CS_VREDRAW | CS_OWNDC;
		winClass.lpfnWndProc = (WNDPROC)WndProc;
		winClass.lpszClassName = "Sparky Win32 Window";
		winClass.hCursor = LoadCursor(NULL, IDC_ARROW);
		winClass.hIcon = LoadIcon(NULL, IDI_WINLOGO);

		if (!RegisterClassA(&winClass))
		{
			// TODO: Handle error
			SPARKY_ERROR("Could not register Win32 class!");
			return false;
		}

		RECT size = { 0, 0, m_Width, m_Height };
		AdjustWindowRectEx(&size, WS_OVERLAPPEDWINDOW | WS_CLIPSIBLINGS | WS_CLIPCHILDREN, false, WS_EX_APPWINDOW | WS_EX_WINDOWEDGE);

		hWnd = CreateWindowExA(WS_EX_APPWINDOW | WS_EX_WINDOWEDGE,
			winClass.lpszClassName, m_Title,
			WS_OVERLAPPEDWINDOW | WS_CLIPSIBLINGS | WS_CLIPCHILDREN,
			GetSystemMetrics(SM_CXSCREEN) / 2 - m_Width / 2,
			GetSystemMetrics(SM_CYSCREEN) / 2 - m_Height / 2,
			// TODO: This requires some... attention
			size.right + (-size.left), size.bottom + (-size.top), NULL, NULL, hInstance, NULL);
			
		if (!hWnd)
		{
			SPARKY_ERROR("Could not create window!");
			return false;
		}

		RegisterWindowClass(hWnd, this);

		hDc = GetDC(hWnd);
		PIXELFORMATDESCRIPTOR pfd = get_pixel_format();
		int pixelFormat = ChoosePixelFormat(hDc, &pfd);
		if (pixelFormat)
		{
			if (!SetPixelFormat(hDc, pixelFormat, &pfd))
			{
				SPARKY_ERROR("Failed setting pixel format!");
				return false;
			}
		}
		else
		{
			SPARKY_ERROR("Failed choosing pixel format!");
			return false;
		}

		HGLRC hrc = wglCreateContext(hDc);
		if (hrc)
		{
			if (!wglMakeCurrent(hDc, hrc))
			{
				SPARKY_ERROR("Failed setting OpenGL context!");
				return false;
			}
		}
		else
		{
			SPARKY_ERROR("Failed creating OpenGL context!");
			return false;
		}

		if (glewInit() != GLEW_OK)
		{
			SPARKY_FATAL("Could not initialize GLEW!");
			return false;
		}

		ShowWindow(hWnd, SW_SHOW);
		SetFocus(hWnd);
		// resize

		return true;
	}
示例#6
0
文件: image.c 项目: coog009/myproject
static int image_convert_libswscale(
    uint8_t     *arg_src[MAX_IMAGE_PLANES],
    int          arg_src_stride[MAX_IMAGE_PLANES],
    unsigned int src_width,
    unsigned int src_height,
    uint32_t     src_fourcc,
    uint8_t     *arg_dst[MAX_IMAGE_PLANES],
    int          arg_dst_stride[MAX_IMAGE_PLANES],
    unsigned int dst_width,
    unsigned int dst_height,
    uint32_t     dst_fourcc
)
{
    int error = -1;
    struct SwsContext *sws = NULL;
    enum PixelFormat src_pix_fmt, dst_pix_fmt;
    uint8_t *src[MAX_IMAGE_PLANES];
    uint8_t *dst[MAX_IMAGE_PLANES];
    int src_stride[MAX_IMAGE_PLANES];
    int dst_stride[MAX_IMAGE_PLANES];
    uint8_t *tmp_src[MAX_IMAGE_PLANES] = { NULL, };
    uint8_t *tmp_dst[MAX_IMAGE_PLANES] = { NULL, };
    int tmp_src_stride[MAX_IMAGE_PLANES];
    int tmp_dst_stride[MAX_IMAGE_PLANES];
    int i, j, stride;

    /* XXX: libswscale does not support AYUV formats yet */
    switch (src_fourcc) {
    case IMAGE_AYUV:
        src_pix_fmt = PIX_FMT_YUV444P;
        for (i = 0; i < 3; i++) {
            tmp_src[i] = malloc(src_width * src_height);
            if (!tmp_src[i])
                goto end;
            tmp_src_stride[i] = src_width;
            src[i] = tmp_src[i];
            src_stride[i] = tmp_src_stride[i];
        }
        stride = arg_src_stride[0];
        for (j = 0; j < src_height; j++) {
            for (i = 0; i < src_width; i++) {
                const int src_offset = j * arg_src_stride[0] + 4 * i;
                const int dst_offset = j * src_width + i;
#ifdef WORDS_BIGENDIAN
                tmp_src[0][dst_offset] = arg_src[0][src_offset + 1];
                tmp_src[1][dst_offset] = arg_src[0][src_offset + 2];
                tmp_src[2][dst_offset] = arg_src[0][src_offset + 3];
#else
                tmp_src[0][dst_offset] = arg_src[0][src_offset + 2];
                tmp_src[1][dst_offset] = arg_src[0][src_offset + 1];
                tmp_src[2][dst_offset] = arg_src[0][src_offset + 0];
#endif
            }
        }
        break;
    case IMAGE_YV12:
        src_pix_fmt = PIX_FMT_YUV420P;
        src[0] = arg_src[0];
        src_stride[0] = arg_src_stride[0];
        src[1] = arg_src[2];
        src_stride[1] = arg_src_stride[2];
        src[2] = arg_src[1];
        src_stride[2] = arg_src_stride[1];
        break;
    default:
        src_pix_fmt = get_pixel_format(src_fourcc);
        for (i = 0; i < MAX_IMAGE_PLANES; i++) {
            src[i] = arg_src[i];
            src_stride[i] = arg_src_stride[i];
        }
        break;
    }

    /* XXX: libswscale does not support AYUV formats yet */
    switch (dst_fourcc) {
    case IMAGE_AYUV:
        dst_pix_fmt = PIX_FMT_YUV444P;
        for (i = 0; i < 3; i++) {
            tmp_dst[i] = malloc(dst_width * dst_height);
            if (!tmp_dst[i])
                goto end;
            tmp_dst_stride[i] = dst_width;
            dst[i] = tmp_dst[i];
            dst_stride[i] = tmp_dst_stride[i];
        }
        break;
    case IMAGE_YV12:
        dst_pix_fmt = PIX_FMT_YUV420P;
        dst[0] = arg_dst[0];
        dst_stride[0] = arg_dst_stride[0];
        dst[1] = arg_dst[2];
        dst_stride[1] = arg_dst_stride[2];
        dst[2] = arg_dst[1];
        dst_stride[2] = arg_dst_stride[1];
        break;
    default:
        dst_pix_fmt = get_pixel_format(dst_fourcc);
        for (i = 0; i < MAX_IMAGE_PLANES; i++) {
            dst[i] = arg_dst[i];
            dst_stride[i] = arg_dst_stride[i];
        }
        break;
    }

    if (src_pix_fmt == PIX_FMT_NONE || dst_pix_fmt == PIX_FMT_NONE)
        goto end;

    sws = sws_getContext(src_width, src_height, src_pix_fmt,
                         dst_width, dst_height, dst_pix_fmt,
                         SWS_BICUBIC, NULL, NULL, NULL);
    if (!sws)
        goto end;

    sws_scale(sws, src, src_stride, 0, src_height, dst, dst_stride);
    sws_freeContext(sws);

    /* XXX: libswscale does not support AYUV formats yet */
    switch (dst_fourcc) {
    case IMAGE_AYUV:
        stride = arg_dst_stride[0];
        for (j = 0; j < dst_height; j++) {
            for (i = 0; i < dst_width; i++) {
                const int src_offset = j * dst_width + i;
                const int dst_offset = j * arg_dst_stride[0] + 4 * i;
#ifdef WORDS_BIGENDIAN
                arg_dst[0][dst_offset + 0] = 0xff;
                arg_dst[0][dst_offset + 1] = tmp_dst[0][src_offset];
                arg_dst[0][dst_offset + 2] = tmp_dst[1][src_offset];
                arg_dst[0][dst_offset + 3] = tmp_dst[2][src_offset];
#else
                arg_dst[0][dst_offset + 0] = tmp_dst[2][src_offset];
                arg_dst[0][dst_offset + 1] = tmp_dst[1][src_offset];
                arg_dst[0][dst_offset + 2] = tmp_dst[0][src_offset];
                arg_dst[0][dst_offset + 3] = 0xff;
#endif
            }
        }
        break;
    }

    error = 0;
end:
    for (i = 0; i < MAX_IMAGE_PLANES; i++) {
        free(tmp_src[i]);
        free(tmp_dst[i]);
    }
    return error;
}