void SvgSubprocessCharacterGenerator::run_thread( ) { RawFrame *frame; size_t svg_size; uint8_t alpha; char *svg_data; /* fork subprocess */ do_fork( ); for (;;) { /* request a SVG from the subprocess */ request( ); /* get the SVG */ svg_size = read_item_from_fd<uint32_t>(recv_fd); alpha = read_item_from_fd<uint8_t>(recv_fd); _dirty_level = read_item_from_fd<uint8_t>(recv_fd); if (svg_size > 0) { svg_data = read_svg(svg_size); /* render SVG to frame */ frame = RsvgFrame::render_svg(svg_data, svg_size); frame->set_global_alpha(alpha); free(svg_data); /* put frame down the pipe */ _output_pipe.put(frame); } else { _output_pipe.put(NULL); } } }
int main( ) { static char svg_buf[MAX_SVG]; ssize_t read_ret; size_t read_pos = 0; /* slurp input */ while (read_pos < MAX_SVG) { read_ret = read(STDIN_FILENO, svg_buf + read_pos, MAX_SVG - read_pos); if (read_ret < 0) { perror("read"); exit(1); } else if (read_ret == 0) { break; } else { read_pos += read_ret; } } /* create SVG frame */ RawFrame *svg = RsvgFrame::render_svg(svg_buf, read_pos); /* dump raw BGRAn8 video to stdout */ fprintf(stderr, "w=%d h=%d\n", svg->w( ), svg->h( )); svg->write_to_fd(STDOUT_FILENO); }
int main(int argc, char **argv) { if (argc > 1 && strcmp(argv[1], "-n") == 0) { cpu_force_no_simd( ); } /* Read 540p frames on stdin; dump 1080p frames on stdout... */ RawFrame frame(960, 540, RawFrame::CbYCrY8422); ssize_t ret; for (;;) { ret = frame.read_from_fd(STDIN_FILENO); if (ret < 0) { perror("frame.read_from_fd"); exit(1); } else if (ret == 0) { break; } else { RawFrame *out = frame.convert->CbYCrY8422_scan_double( ); if (out->write_to_fd(STDOUT_FILENO) < 0) { perror("write_to_fd"); break; } delete out; } } }
int main(int argc, char **argv) { int n_frames = 1000; if (argc > 1 && strcmp(argv[1], "-n") == 0) { cpu_force_no_simd( ); } RawFrame frame(1920, 1080, RawFrame::CbYCrY8422); ssize_t ret; ret = frame.read_from_fd(STDIN_FILENO); if (ret <= 0) { perror("frame.read_from_fd"); exit(1); } else { while (n_frames != 0) { RawFrame *out = frame.convert->BGRAn8( ); #if 0 if (out->write_to_fd(STDOUT_FILENO) < 0) { perror("write_to_fd"); exit(1); } #endif delete out; n_frames--; } } return 0; }
int main(int argc, char **argv) { if (argc > 1 && strcmp(argv[1], "-n") == 0) { cpu_force_no_simd( ); } build_lookup_tables( ); /* Read 1080p UYVY422 frames on stdin. Dump M-JPEG stream on stdout. */ RawFrame frame(720, 480, RawFrame::CbYCrY8422); ssize_t ret; for (;;) { ret = frame.read_from_fd(STDIN_FILENO); if (ret < 0) { perror("frame.read_from_fd"); exit(1); } else if (ret == 0) { break; } else { RawFrame *out = upscale(&frame); if (out->write_to_fd(STDOUT_FILENO) < 0) { perror("write_to_fd"); break; } delete out; } } }
void VideoSource::deliverFrame(const RawFrame &frame) { for (auto capturer : capturers_) capturer->incomingArgbFrame(frame.getWidth(), frame.getHeight(), frame.getBuffer().get(), frame.getFrameSizeInBytes()); // LogTrace("") << "delivered frame to " << capturers_.size() << " capturers" << endl; }
virtual HRESULT VideoInputFrameArrived(IDeckLinkVideoInputFrame *in, IDeckLinkAudioInputPacket *audio_in) { RawFrame *out; AudioPacket *audio_out; void *data; uint8_t *bytes; size_t i, spitch, h; if (in != NULL) { if (in->GetFlags( ) & bmdFrameHasNoInputSource) { fprintf(stderr, "DeckLink input: no signal\n"); } else { out = new RawFrame(in->GetWidth(), in->GetHeight(), pf); out->set_field_dominance(dominance); spitch = in->GetRowBytes( ); h = in->GetHeight( ); in->GetBytes(&data); bytes = (uint8_t *) data; for (i = 0; i < h; i++) { memcpy(out->scanline(i), bytes, out->pitch( )); bytes += spitch; } if (out_pipe.can_put( )) { out_pipe.put(out); } else { fprintf(stderr, "DeckLink: dropping input frame on floor\n"); delete out; } } } if (audio_in != NULL && audio_pipe != NULL) { audio_out = new AudioPacket(audio_rate, n_channels, 2, audio_in->GetSampleFrameCount( )); if (audio_in->GetBytes(&data) != S_OK) { throw std::runtime_error( "DeckLink audio input: GetBytes failed" ); } memcpy(audio_out->data( ), data, audio_out->size( )); audio_pipe->put(audio_out); } return S_OK; }
int main(int argc, const char *argv[]) { RawFrame *frame; char *script; int script_fd; struct stat st; int n_frames; if (argc != 3) { fprintf(stderr, "usage: %s script.js n_frames\n", argv[0]); return 1; } n_frames = atoi(argv[2]); script_fd = open(argv[1], O_RDONLY); if (script_fd < 0) { perror("open"); return 1; } if (fstat(script_fd, &st) < 0) { perror("fstat"); return 1; } script = (char *) malloc(st.st_size + 1); if (script == NULL) { fprintf(stderr, "failed to allocate memory for script\n"); return 1; } memset(script, 0, st.st_size + 1); if (read_all(script_fd, script, st.st_size) != 1) { fprintf(stderr, "failed to read script\n"); return 1; } JsCharacterGeneratorScript cg_script(script, strlen(script)); for (int i = 0; i < n_frames; i++) { frame = cg_script.render_frame( ); frame->write_tga_to_fd(STDOUT_FILENO); delete frame; } return 0; }
RawFrame *create_raw_frame_from_decklink(IDeckLinkVideoFrame *frame, RawFrame::PixelFormat pf, bool rotate = false) { void *dp; RawFrame *ret = new RawFrame( frame->GetWidth( ), frame->GetHeight( ), pf, frame->GetRowBytes( ) ); if (frame->GetBytes(&dp) != S_OK) { throw std::runtime_error("Cannot get pointer to raw data"); } if (rotate) { flip_copy(ret, (uint8_t *) dp); } else { memcpy(ret->data( ), dp, ret->size( )); } return ret; }
RawFrame *upscale(RawFrame *in) { RawFrame *out = new RawFrame(1920, 1080, RawFrame::CbYCrY8422); uint8_t *scaled_1 = new uint8_t[2*1920]; uint8_t *scaled_2 = new uint8_t[2*1920]; for (int i = 0; i < out->h( ); i++) { int src_scan = i * 4 / 9; uint8_t *scanline_1 = in->scanline(src_scan); uint8_t *scanline_2; if (src_scan + 1 < in->h( )) { scanline_2 = in->scanline(src_scan + 1); } else { scanline_2 = in->scanline(src_scan); } upscale_scanline(scaled_1, scanline_1); upscale_scanline(scaled_2, scanline_2); interpolate_scanline(out->scanline(i), scaled_1, scaled_2, (4 * i) % 9); } return out; }
int main(int argc, char **argv) { if (argc > 1 && strcmp(argv[1], "-n") == 0) { cpu_force_no_simd( ); } /* Read 1080p UYVY422 frames on stdin. Dump M-JPEG stream on stdout. */ RawFrame frame(1920, 1080, RawFrame::CbYCrY8422); Mjpeg422Encoder enc(1920, 1080); Mjpeg422Decoder dec(1920, 1080); ssize_t ret; std::string comment; for (;;) { ret = frame.read_from_fd(STDIN_FILENO); if (ret < 0) { perror("frame.read_from_fd"); exit(1); } else if (ret == 0) { break; } else { enc.set_comment("Hello JPEG world!"); enc.encode(&frame); RawFrame *out = dec.decode(enc.get_data( ), enc.get_data_size( )); comment = " "; dec.get_comment(comment); fprintf(stderr, "comment: %s\n", comment.c_str( )); if (out->write_to_fd(STDOUT_FILENO) < 0) { perror("write_to_fd"); break; } delete out; } } }
RawFrame *FreetypeFont::render_string(const char *string) { int x; RawFrame *ret; FT_GlyphSlot slot = face->glyph; FT_Bool use_kerning = FT_HAS_KERNING(face); FT_UInt glyph_index, previous; uint8_t *glyph_scanline; x = 0; previous = 0; /* first compute the size of the resulting image */ const char *scan_ptr = string; while (*scan_ptr != '\0') { glyph_index = FT_Get_Char_Index(face, *scan_ptr); scan_ptr++; if (use_kerning && previous != 0 && glyph_index != 0) { FT_Vector delta; FT_Get_Kerning(face, previous, glyph_index, FT_KERNING_DEFAULT, &delta); x += delta.x / 64; } FTCHK(FT_Load_Glyph(face, glyph_index, FT_LOAD_DEFAULT)); x += slot->advance.x / 64; previous = glyph_index; } /* initialize a raw frame */ ret = new RawFrame(x, _h, RawFrame::BGRAn8); /* second pass: draw it */ scan_ptr = string; int xd = 0; previous = 0; uint8_t *dest_scanline = ret->data( ); for (unsigned int i = 0; i < ret->size( ); i += 4) { dest_scanline[i] = bb; dest_scanline[i+1] = gb; dest_scanline[i+2] = rb; dest_scanline[i+3] = ab; } while (*scan_ptr != '\0') { glyph_index = FT_Get_Char_Index(face, *scan_ptr); scan_ptr++; if (use_kerning && previous != 0 && glyph_index != 0) { FT_Vector delta; FT_Get_Kerning(face, previous, glyph_index, FT_KERNING_DEFAULT, &delta); xd += delta.x / 64; } FTCHK(FT_Load_Glyph(face, glyph_index, FT_LOAD_RENDER)); //int yd = -(slot->bitmap_top); int yd = _baseline - slot->bitmap_top; for (unsigned int y = 0; y < slot->bitmap.rows && yd < _h; y++, yd++) { if (yd >= 0) { glyph_scanline = ((uint8_t *)slot->bitmap.buffer) + slot->bitmap.pitch * y; dest_scanline = ret->scanline(yd) + 4*xd; int xd2 = xd; for (unsigned int x = 0; x < slot->bitmap.width && xd2 < ret->w( ); x++, xd2++) { dest_scanline[0] = (bf * glyph_scanline[x] + bb * (255 - glyph_scanline[x])) / 255; dest_scanline[1] = (gf * glyph_scanline[x] + gb * (255 - glyph_scanline[x])) / 255; dest_scanline[2] = (rf * glyph_scanline[x] + rb * (255 - glyph_scanline[x])) / 255; dest_scanline[3] = (af * glyph_scanline[x] + ab * (255 - glyph_scanline[x])) / 255; dest_scanline += 4; } } } xd += slot->advance.x / 64; previous = glyph_index; } return ret; }
void KeyerApp::run( ) { RawFrame *frame = NULL; RawFrame *cgout = NULL; IOAudioPacket *audio = NULL; if (iadp == NULL) { throw std::runtime_error("cannot run with no input adapter"); } if (oadps.size( ) == 0) { throw std::runtime_error("cannot run with no output adapter"); } if (cgs.size( ) == 0) { fprintf(stderr, "Keyer warning: no CGs, just passing through video\n"); } /* main loop */ try { iadp->start( ); for (;;) { /* get incoming frame */ frame = iadp->output_pipe( ).get( ); if (iadp->audio_output_pipe( ) != NULL) { audio = iadp->audio_output_pipe( )->get( ); } clear_all_flags( ); /* we haven't keyed anything yet */ std::vector<RawFrame *> stale_frames(oadps.size( )); for (unsigned int j = 0; j < oadps.size( ); j++) { /* get overlay from each CG */ for (unsigned int i = 0; i < cgs.size( ); i++) { CharacterGenerator *cg = cgs[i]; /* * dirty_level determines which outputs will * get keyed with this CG's output * so skip this CG if its dirty level is higher * than that of the output. * (For now, the output dirty level is just the * order in which the output was added. * * Also, if the key was already added in an * earlier pass, skip it. */ if (cg->dirty_level( ) > j || flags[i]) { continue; } #if 0 if (!cg->output_pipe( ).data_ready( )) { if (stale_frames[i] == NULL) { fprintf(stderr, "not keying this frame on account of staleness\n"); continue; } else { cgout = stale_frames[i]; } } else { delete stale_frames[i]; cgout = cg->output_pipe( ).get( ); stale_frames[i] = cgout; } #endif if (!cg->output_pipe( ).data_ready( )) { fprintf(stderr, "using stale frame from keyer %d\n", i); continue; } else { cgout = cg->output_pipe( ).get( ); } /* * If no overlay is being rendered by this CG right now, the CG * will output a NULL frame. We can safely ignore those. */ if (cgout != NULL && cgout->global_alpha( ) != 0) { frame->draw->alpha_key(cg->x( ), cg->y( ), cgout, cgout->global_alpha( )); } delete cgout; /* * mark this CG as "done" so we don't waste * time later on subsequent passes */ flags[i] = true; } /* Lastly, send output to the output adapter. */ oadps[j]->input_pipe( ).put(frame->convert->CbYCrY8422( )); if (oadps[j]->audio_input_pipe( ) != NULL && audio != NULL) { oadps[j]->audio_input_pipe( )->put(audio->copy<int16_t>( )); } } delete frame; if (audio != NULL) { delete audio; } } } catch (BrokenPipe &) { fprintf(stderr, "Unexpected component shutdown\n"); } }
virtual HRESULT VideoInputFrameArrived(IDeckLinkVideoInputFrame *in, IDeckLinkAudioInputPacket *audio_in) { RawFrame *out; IOAudioPacket *audio_out; void *data; if (in == NULL && audio_in == NULL) { fprintf(stderr, "VideoInputFrameArrived got nothing??\n"); } /* Process video frame if available. */ if (in != NULL) { if (in->GetFlags( ) & bmdFrameHasNoInputSource) { if (!signal_lost) { fprintf(stderr, "DeckLink input: signal lost\n"); signal_lost = true; } } else { if (signal_lost) { fprintf(stderr, "DeckLink input: signal re-acquired\n"); signal_lost = false; } } /* * we can't actually tell the card to just not send video, * so if video is disabled, we just ignore the video frames... */ if (enable_video) { out = create_raw_frame_from_decklink(in, pf, rotate); out->set_field_dominance(dominance); if (out_pipe.can_put( ) && started) { if (enable_video) { out_pipe.put(out); avsync++; } } else { fprintf(stderr, "DeckLink: dropping input frame\n"); drop_audio++; delete out; } } } /* Process audio, if available. */ if (audio_in != NULL && audio_pipe != NULL) { audio_out = new IOAudioPacket( audio_in->GetSampleFrameCount( ), n_channels ); if (audio_in->GetBytes(&data) != S_OK) { throw std::runtime_error( "DeckLink audio input: GetBytes failed" ); } memcpy(audio_out->data( ), data, audio_out->size_bytes( )); if (drop_audio > 0) { fprintf(stderr, "DeckLink: dropping some audio to get back in sync\n"); delete audio_out; drop_audio--; } else if (audio_pipe->can_put( ) && started) { audio_pipe->put(audio_out); avsync--; } else { fprintf(stderr, "DeckLink: dropping input AudioPacket\n"); delete audio_out; } } if ( (avsync > 10 || avsync < -10) && audio_pipe != NULL && enable_video ) { fprintf(stderr, "DeckLink warning: avsync drift = %d\n", avsync); } return S_OK; }
int ReplayPlayoutLavfSource::run_lavc( ) { AVPacket packet; int frame_finished = 0; int audio_finished = 0; /* * read stream until we get a video frame, * possibly also decoding some audio along the way */ while (frame_finished == 0 && audio_finished == 0 && av_read_frame(format_ctx, &packet) >= 0) { if (packet.stream_index == video_stream) { avcodec_decode_video2(video_codecctx, lavc_frame, &frame_finished, &packet); } else if (packet.stream_index == audio_stream) { avcodec_decode_audio4(audio_codecctx, audio_frame, &audio_finished, &packet); } av_free_packet(&packet); } if (frame_finished) { /* make a RawFrame out of lavc_frame */ RawFrame *fr = new RawFrame(1920, 1080, RawFrame::CbYCrY8422); switch (lavc_frame->format) { case AV_PIX_FMT_YUVJ422P: case AV_PIX_FMT_YUV422P: fr->pack->YCbCr8P422( lavc_frame->data[0], lavc_frame->data[1], lavc_frame->data[2], lavc_frame->linesize[0], lavc_frame->linesize[1], lavc_frame->linesize[2] ); break; case AV_PIX_FMT_UYVY422: /* copy stuff */ memcpy(fr->data( ), lavc_frame->data[0], fr->size( )); break; case AV_PIX_FMT_YUV422P10LE: fr->pack->YCbCr10P422( (uint16_t *)lavc_frame->data[0], (uint16_t *)lavc_frame->data[1], (uint16_t *)lavc_frame->data[2], lavc_frame->linesize[0] / 2, lavc_frame->linesize[1] / 2, lavc_frame->linesize[2] / 2 ); break; case AV_PIX_FMT_YUV420P: fr->pack->YCbCr8P420( lavc_frame->data[0], lavc_frame->data[1], lavc_frame->data[2], lavc_frame->linesize[0], lavc_frame->linesize[1], lavc_frame->linesize[2] ); break; default: fprintf(stderr, "ReplayPlayoutLavfSource doesn't know how " "to handle AVPixelFormat %d\n", lavc_frame->format); memset(fr->data( ), 128, fr->size( )); break; } pending_video_frames.push_back(fr); return 1; } else if (audio_finished) { PackedAudioPacket<int16_t> apkt( audio_frame->nb_samples, audio_codecctx->channels ); if (audio_codecctx->sample_fmt == AV_SAMPLE_FMT_S16) { memcpy(apkt.data( ), audio_frame->data[0], apkt.size_bytes( )); } else if (audio_codecctx->sample_fmt == AV_SAMPLE_FMT_FLTP) { /* convert planar float (from AAC) to signed 16-bit */ copy_fltp(audio_frame, apkt); } else { fprintf(stderr, "sample_fmt=%d\n", audio_codecctx->sample_fmt); throw std::runtime_error("don't understand sample format"); } if (audio_codecctx->sample_rate != 48000) { throw std::runtime_error("need 48khz"); } if (audio_codecctx->channels != 2) { /* mix down to 2 channels if needed */ PackedAudioPacket<int16_t> *twoch = apkt.change_channels(2); pending_audio.add_packet(twoch); delete twoch; } else { pending_audio.add_packet(&apkt); } return 1; } else { return 0; } }