コード例 #1
0
ファイル: decklink.cpp プロジェクト: DigiDaz/exacore
RawFrame *create_raw_frame_from_decklink(IDeckLinkVideoFrame *frame,
        RawFrame::PixelFormat pf, bool rotate = false) {
    void *dp;
    RawFrame *ret = new RawFrame(
        frame->GetWidth( ),
        frame->GetHeight( ),
        pf, frame->GetRowBytes( )
    );

    if (frame->GetBytes(&dp) != S_OK) {
        throw std::runtime_error("Cannot get pointer to raw data");
    }

    if (rotate) {
        flip_copy(ret, (uint8_t *) dp);
    } else {
        memcpy(ret->data( ), dp, ret->size( ));
    }

    return ret;
}
コード例 #2
0
ファイル: freetype_font.cpp プロジェクト: exavideo/exacore
RawFrame *FreetypeFont::render_string(const char *string) {
    int x;
    RawFrame *ret;
    FT_GlyphSlot slot = face->glyph;
    FT_Bool use_kerning = FT_HAS_KERNING(face);
    FT_UInt glyph_index, previous;

    uint8_t *glyph_scanline;

    x = 0;
    previous = 0;

    /* first compute the size of the resulting image */
    const char *scan_ptr = string;
    while (*scan_ptr != '\0') {
        glyph_index = FT_Get_Char_Index(face, *scan_ptr);
        scan_ptr++;

        if (use_kerning && previous != 0 && glyph_index != 0) {
            FT_Vector delta;
            FT_Get_Kerning(face, previous, glyph_index, 
                    FT_KERNING_DEFAULT, &delta);
            x += delta.x / 64;
        }

        FTCHK(FT_Load_Glyph(face, glyph_index, FT_LOAD_DEFAULT));

        x += slot->advance.x / 64;

        previous = glyph_index;
    }

    /* initialize a raw frame */
    ret = new RawFrame(x, _h, RawFrame::BGRAn8);
    
    /* second pass: draw it */
    scan_ptr = string;
    int xd = 0;
    previous = 0;
    uint8_t *dest_scanline = ret->data( );

    for (unsigned int i = 0; i < ret->size( ); i += 4) {
        dest_scanline[i] = bb;
        dest_scanline[i+1] = gb;
        dest_scanline[i+2] = rb;
        dest_scanline[i+3] = ab;
    }

    while (*scan_ptr != '\0') {
        glyph_index = FT_Get_Char_Index(face, *scan_ptr);
        scan_ptr++;

        if (use_kerning && previous != 0 && glyph_index != 0) {
            FT_Vector delta;
            FT_Get_Kerning(face, previous, glyph_index,
                    FT_KERNING_DEFAULT, &delta);
            xd += delta.x / 64;
        }

        FTCHK(FT_Load_Glyph(face, glyph_index, FT_LOAD_RENDER));

        //int yd = -(slot->bitmap_top);
        int yd = _baseline - slot->bitmap_top;
        for (unsigned int y = 0; y < slot->bitmap.rows && yd < _h; y++, yd++) {
            if (yd >= 0) {
                glyph_scanline = ((uint8_t *)slot->bitmap.buffer) 
                        + slot->bitmap.pitch * y;
                dest_scanline = ret->scanline(yd) + 4*xd;
                int xd2 = xd;
                for (unsigned int x = 0; x < slot->bitmap.width && xd2 < ret->w( ); 
                        x++, xd2++) {

                    dest_scanline[0] = (bf * glyph_scanline[x] 
                            + bb * (255 - glyph_scanline[x])) / 255;
                    dest_scanline[1] = (gf * glyph_scanline[x]
                            + gb * (255 - glyph_scanline[x])) / 255;
                    dest_scanline[2] = (rf * glyph_scanline[x]
                            + rb * (255 - glyph_scanline[x])) / 255;
                    dest_scanline[3] = (af * glyph_scanline[x]
                            + ab * (255 - glyph_scanline[x])) / 255;
                    dest_scanline += 4;
                }
            }
        }

        xd += slot->advance.x / 64;
        previous = glyph_index;
    }

    return ret;
}
コード例 #3
0
int ReplayPlayoutLavfSource::run_lavc( ) {
    AVPacket packet;
    int frame_finished = 0;
    int audio_finished = 0;

    /* 
     * read stream until we get a video frame, 
     * possibly also decoding some audio along the way
     */
    while (frame_finished == 0 && audio_finished == 0 &&
            av_read_frame(format_ctx, &packet) >= 0) {
        if (packet.stream_index == video_stream) {
            avcodec_decode_video2(video_codecctx, lavc_frame, 
                    &frame_finished, &packet);
        } else if (packet.stream_index == audio_stream) {
            avcodec_decode_audio4(audio_codecctx, audio_frame, 
                    &audio_finished, &packet);
        }

        av_free_packet(&packet);
    }

    if (frame_finished) {
        /* make a RawFrame out of lavc_frame */
        RawFrame *fr = new RawFrame(1920, 1080, RawFrame::CbYCrY8422);
        switch (lavc_frame->format) {
            case AV_PIX_FMT_YUVJ422P:
            case AV_PIX_FMT_YUV422P:
                fr->pack->YCbCr8P422(
                    lavc_frame->data[0], 
                    lavc_frame->data[1],
                    lavc_frame->data[2],
                    lavc_frame->linesize[0],
                    lavc_frame->linesize[1],
                    lavc_frame->linesize[2]
                );
                break;

            case AV_PIX_FMT_UYVY422:
                /* copy stuff */
                memcpy(fr->data( ), lavc_frame->data[0], fr->size( ));
                break;

            case AV_PIX_FMT_YUV422P10LE:
                fr->pack->YCbCr10P422(
                    (uint16_t *)lavc_frame->data[0],
                    (uint16_t *)lavc_frame->data[1],
                    (uint16_t *)lavc_frame->data[2],
                    lavc_frame->linesize[0] / 2,
                    lavc_frame->linesize[1] / 2,
                    lavc_frame->linesize[2] / 2
                );
                break;

            case AV_PIX_FMT_YUV420P:
                fr->pack->YCbCr8P420(
                    lavc_frame->data[0],
                    lavc_frame->data[1],
                    lavc_frame->data[2],
                    lavc_frame->linesize[0],
                    lavc_frame->linesize[1],
                    lavc_frame->linesize[2]
                );
                break;

            default:
                fprintf(stderr, "ReplayPlayoutLavfSource doesn't know how "
                    "to handle AVPixelFormat %d\n", lavc_frame->format);
                memset(fr->data( ), 128, fr->size( ));
                break;
        }

        pending_video_frames.push_back(fr);
        return 1;
    } else if (audio_finished) {
        PackedAudioPacket<int16_t> apkt(
            audio_frame->nb_samples,
            audio_codecctx->channels
        );

        if (audio_codecctx->sample_fmt == AV_SAMPLE_FMT_S16) {
            memcpy(apkt.data( ), audio_frame->data[0], apkt.size_bytes( ));
        } else if (audio_codecctx->sample_fmt == AV_SAMPLE_FMT_FLTP) {
            /* convert planar float (from AAC) to signed 16-bit */
            copy_fltp(audio_frame, apkt);
        } else {
            fprintf(stderr, "sample_fmt=%d\n", audio_codecctx->sample_fmt);
            throw std::runtime_error("don't understand sample format");
        }
        if (audio_codecctx->sample_rate != 48000) {
            throw std::runtime_error("need 48khz");
        }

        if (audio_codecctx->channels != 2) {
            /* mix down to 2 channels if needed */
            PackedAudioPacket<int16_t> *twoch = apkt.change_channels(2);
            pending_audio.add_packet(twoch);
            delete twoch;
        } else {
            pending_audio.add_packet(&apkt);
        }

        return 1;
    } else {
        return 0;
    }
}