void for_loop_finish_iteration(Stack* context) { Frame* frame = top_frame(context); Branch* contents = frame->branch; // Find list length caValue* listInput = get_frame_register(frame, 0); // Increment the loop index caValue* index = get_frame_register(frame, for_loop_find_index(contents)->index); set_int(index, as_int(index) + 1); // Check if we are finished if (as_int(index) >= list_length(listInput)) { frame->loop = false; finish_frame(context); return; } // If we're not finished yet, copy rebound outputs back to inputs. for (int i=1;; i++) { Term* input = get_input_placeholder(contents, i); if (input == NULL) break; Term* output = get_output_placeholder(contents, i); copy(get_frame_register(frame, output->index), get_frame_register(frame, input->index)); } // Return to start of loop body frame->pc = 0; frame->nextPc = 0; }
/* make sure that the current frame is of the type desired, and has sufficient space to add at least about_to_add bytes -- finishes the current frame if needed */ static void ensure_space(framer_state *st, size_t need_bytes) { if (st->output->length - st->output_length_at_start_of_frame + need_bytes <= st->max_frame_size) { return; } finish_frame(st, 0, 0); begin_frame(st); }
/* make sure that the current frame is of the type desired, and has sufficient space to add at least about_to_add bytes -- finishes the current frame if needed */ static void ensure_space(framer_state *st, size_t need_bytes) { if (st->output->length - st->output_length_at_start_of_frame + need_bytes <= GRPC_CHTTP2_MAX_PAYLOAD_LENGTH) { return; } finish_frame(st, 0, 0); begin_frame(st); }
int main(int argc, char **argv) { SDL_Surface *screen, *background, *pause_text, *press_enter_text, *game_over_text; const SDL_VideoInfo *video_info; Uint32 frame_start, frame_end = 0, game_frame_end = 0; systems_init(); screen = SDL_SetVideoMode(800, 600, 0, SDL_HWSURFACE|SDL_DOUBLEBUF); video_info = SDL_GetVideoInfo(); create_images(&press_enter_text, &pause_text, &game_over_text, &background); create_entities(); for(;;) { start_frame(&frame_start, &frame_end); if(check_SDL_events()) break; draw_background(background, screen, video_info); update_entities(game_frame_end ); if(is_paused()) { if(is_game_over()) { draw_centered(screen, video_info, game_over_text); } else if(is_started()) { draw_centered(screen, video_info, pause_text); } else { draw_centered(screen, video_info, press_enter_text); } } SDL_Flip(screen); finish_frame(&frame_start, &frame_end, &game_frame_end); } SDL_FreeSurface(background); SDL_FreeSurface(pause_text); SDL_FreeSurface(press_enter_text); systems_shutdown(); return 0; }
static void add_header_data(framer_state *st, gpr_slice slice) { size_t len = GPR_SLICE_LENGTH(slice); size_t remaining; if (len == 0) return; remaining = st->max_frame_size + st->output_length_at_start_of_frame - st->output->length; if (len <= remaining) { st->stats->header_bytes += len; gpr_slice_buffer_add(st->output, slice); } else { st->stats->header_bytes += remaining; gpr_slice_buffer_add(st->output, gpr_slice_split_head(&slice, remaining)); finish_frame(st, 0, 0); begin_frame(st); add_header_data(st, slice); } }
void grpc_chttp2_encode_header(grpc_exec_ctx *exec_ctx, grpc_chttp2_hpack_compressor *c, uint32_t stream_id, grpc_metadata_batch *metadata, int is_eof, size_t max_frame_size, grpc_transport_one_way_stats *stats, grpc_slice_buffer *outbuf) { framer_state st; grpc_linked_mdelem *l; gpr_timespec deadline; GPR_ASSERT(stream_id != 0); st.seen_regular_header = 0; st.stream_id = stream_id; st.output = outbuf; st.is_first_frame = 1; st.stats = stats; st.max_frame_size = max_frame_size; /* Encode a metadata batch; store the returned values, representing a metadata element that needs to be unreffed back into the metadata slot. THIS MAY NOT BE THE SAME ELEMENT (if a decoder table slot got updated). After this loop, we'll do a batch unref of elements. */ begin_frame(&st); if (c->advertise_table_size_change != 0) { emit_advertise_table_size_change(c, &st); } grpc_metadata_batch_assert_ok(metadata); for (l = metadata->list.head; l; l = l->next) { hpack_enc(exec_ctx, c, l->md, &st); } deadline = metadata->deadline; if (gpr_time_cmp(deadline, gpr_inf_future(deadline.clock_type)) != 0) { deadline_enc(exec_ctx, c, deadline, &st); } finish_frame(&st, 1, is_eof); }
void Renderer::renderclient(Mailbox in) { FpsCounter fpsc(20); fpsc.start(); for (auto& cmd : gua::concurrent::pull_items_range<Item, Mailbox>(in)) { auto window_name(cmd.serialized_cam->config.get_output_window_name()); if (window_name != "") { auto window = WindowDatabase::instance()->lookup(window_name); if (window && !window->get_is_open()) { window->open(); } // update window if one is assigned if (window && window->get_is_open()) { window->set_active(true); window->start_frame(); if (window->get_context()->framecount == 0) { display_loading_screen(*window); } // make sure pipeline was created std::shared_ptr<Pipeline> pipe = nullptr; auto pipe_iter = window->get_context()->render_pipelines.find( cmd.serialized_cam->uuid); if (pipe_iter == window->get_context()->render_pipelines.end()) { pipe = std::make_shared<Pipeline>( *window->get_context(), cmd.serialized_cam->config.get_resolution()); window->get_context()->render_pipelines.insert( std::make_pair(cmd.serialized_cam->uuid, pipe)); } else { pipe = pipe_iter->second; } window->rendering_fps = fpsc.fps; if (cmd.serialized_cam->config.get_enable_stereo()) { if (window->config.get_stereo_mode() == StereoMode::NVIDIA_3D_VISION) { #ifdef GUACAMOLE_ENABLE_NVIDIA_3D_VISION if ((window->get_context()->framecount % 2) == 0) { auto img(pipe->render_scene(CameraMode::LEFT, *cmd.serialized_cam, *cmd.scene_graphs)); if (img) window->display(img, true); } else { auto img(pipe->render_scene(CameraMode::RIGHT, *cmd.serialized_cam, *cmd.scene_graphs)); if (img) window->display(img, false); } #else Logger::LOG_WARNING << "guacamole has not been compiled with NVIDIA 3D Vision support!" << std::endl; #endif } else { // TODO: add alternate frame rendering here? -> take clear and render methods auto img(pipe->render_scene(CameraMode::LEFT, *cmd.serialized_cam, *cmd.scene_graphs)); if (img) window->display(img, true); img = pipe->render_scene(CameraMode::RIGHT, *cmd.serialized_cam, *cmd.scene_graphs); if (img) window->display(img, false); } } else { auto img(pipe->render_scene(cmd.serialized_cam->config.get_mono_mode(), *cmd.serialized_cam, *cmd.scene_graphs)); if (img) window->display(img, cmd.serialized_cam->config.get_mono_mode() != CameraMode::RIGHT); } pipe->clear_frame_cache(); // swap buffers window->finish_frame(); ++(window->get_context()->framecount); fpsc.step(); } } } }
void Renderer::draw_single_threaded(std::vector<SceneGraph const*> const& scene_graphs) { for (auto graph : scene_graphs) { graph->update_cache(); } auto sgs = garbage_collected_copy(scene_graphs); for (auto graph : scene_graphs) { for (auto& cam : graph->get_camera_nodes()) { auto window_name(cam->config.get_output_window_name()); auto serialized_cam(cam->serialize()); if (window_name != "") { auto window = WindowDatabase::instance()->lookup(window_name); if (window && !window->get_is_open()) { window->open(); } // update window if one is assigned if (window && window->get_is_open()) { window->set_active(true); window->start_frame(); if (window->get_context()->framecount == 0) { display_loading_screen(*window); } // make sure pipeline was created std::shared_ptr<Pipeline> pipe = nullptr; auto pipe_iter = window->get_context()->render_pipelines.find( serialized_cam.uuid); if (pipe_iter == window->get_context()->render_pipelines.end()) { pipe = std::make_shared<Pipeline>( *window->get_context(), serialized_cam.config.get_resolution()); window->get_context()->render_pipelines.insert( std::make_pair(serialized_cam.uuid, pipe)); } else { pipe = pipe_iter->second; } window->rendering_fps = application_fps_.fps; if (serialized_cam.config.get_enable_stereo()) { if (window->config.get_stereo_mode() == StereoMode::NVIDIA_3D_VISION) { #ifdef GUACAMOLE_ENABLE_NVIDIA_3D_VISION if ((window->get_context()->framecount % 2) == 0) { auto img(pipe->render_scene(CameraMode::LEFT, serialized_cam, *sgs)); if (img) window->display(img, true); } else { auto img(pipe->render_scene(CameraMode::RIGHT, serialized_cam, *sgs)); if (img) window->display(img, false); } #else Logger::LOG_WARNING << "guacamole has not been compiled with NVIDIA 3D Vision support!" << std::endl; #endif } else { auto img(pipe->render_scene(CameraMode::LEFT, serialized_cam, *sgs)); if (img) window->display(img, true); img = pipe->render_scene(CameraMode::RIGHT, serialized_cam, *sgs); if (img) window->display(img, false); } } else { auto img(pipe->render_scene(serialized_cam.config.get_mono_mode(), serialized_cam, *sgs)); if (img) window->display(img, serialized_cam.config.get_mono_mode() != CameraMode::RIGHT); } pipe->clear_frame_cache(); // swap buffers window->finish_frame(); ++(window->get_context()->framecount); } } } } application_fps_.step(); }
void for_loop_finish_iteration(Stack* stack, bool enableLoopOutput) { INCREMENT_STAT(LoopFinishIteration); Frame* frame = top_frame(stack); Branch* contents = frame->branch; // Find list length caValue* listInput = get_frame_register(frame, 0); // Increment the loop index caValue* index = get_top_register(stack, for_loop_find_index(contents)); set_int(index, as_int(index) + 1); // Preserve list output if (enableLoopOutput && frame->exitType != name_Discard) { caValue* outputIndex = get_frame_register(frame, for_loop_find_output_index(contents)); Term* outputPlaceholder = get_output_placeholder(contents, 0); caValue* outputList = get_frame_register(frame, outputPlaceholder); caValue* outputValue = find_stack_value_for_term(stack, outputPlaceholder->input(0), 0); if (!is_list(outputList)) set_list(outputList); list_touch(outputList); copy(outputValue, list_get(outputList, as_int(outputIndex))); INCREMENT_STAT(LoopWriteOutput); // Advance output index set_int(outputIndex, as_int(outputIndex) + 1); } // Check if we are finished if (as_int(index) >= list_length(listInput) || frame->exitType == name_Break || frame->exitType == name_Return) { // Possibly truncate output list, in case any elements were discarded. if (enableLoopOutput) { caValue* outputIndex = get_frame_register(frame, for_loop_find_output_index(contents)); Term* outputPlaceholder = get_output_placeholder(contents, 0); caValue* outputList = get_frame_register(frame, outputPlaceholder); list_resize(outputList, as_int(outputIndex)); } else { Term* outputPlaceholder = get_output_placeholder(contents, 0); caValue* outputList = get_frame_register(frame, outputPlaceholder); set_list(outputList, 0); } finish_frame(stack); return; } // If we're not finished yet, copy rebound outputs back to inputs. for (int i=1;; i++) { Term* input = get_input_placeholder(contents, i); if (input == NULL) break; Term* output = get_output_placeholder(contents, i); copy(get_frame_register(frame, output), get_frame_register(frame, input)); INCREMENT_STAT(Copy_LoopCopyRebound); } // Return to start of loop body frame->pc = 0; frame->nextPc = 0; frame->exitType = name_None; }