void SliceEffect::set_gl_state(GLuint glsl_program_num, const string &prefix, unsigned *sampler_num) { Effect::set_gl_state(glsl_program_num, prefix, sampler_num); unsigned output_width, output_height; get_output_size(&output_width, &output_height, &output_width, &output_height); if (direction == HORIZONTAL) { uniform_output_coord_to_slice_num = float(output_width) / float(output_slice_size); uniform_slice_num_to_input_coord = float(input_slice_size) / float(input_width); uniform_slice_offset_to_input_coord = float(output_slice_size) / float(input_width); uniform_offset = float(offset) / float(input_width); } else { uniform_output_coord_to_slice_num = float(output_height) / float(output_slice_size); uniform_slice_num_to_input_coord = float(input_slice_size) / float(input_height); uniform_slice_offset_to_input_coord = float(output_slice_size) / float(input_height); uniform_offset = float(offset) / float(input_height); } // Normalized coordinates could potentially cause blurring of the image. // It isn't critical, but still good practice. Node *self = chain->find_node_for_effect(this); glActiveTexture(chain->get_input_sampler(self, 0)); check_error(); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); check_error(); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); check_error(); }
/* definitions */ void *static_deflate(void *io_struct) { io *io_s = (io *) io_struct; prepare_input_output(io_s); cyclic_queue *cqbuff = new_cyclic_queue(LEN_SIZE_Q, false); write_static_header(io_s); byte buff[LEN_MAX]; size_t count = fread(buff, EL_SIZE, LEN_MAX, io_s->input); push_back_cyclic_queue(cqbuff, buff, count); byte front_b; size_t offset, length, last_count; while (!isempty_cyclic_queue(cqbuff)) { search_cyclic_queue(io_s->cqdict, cqbuff, &offset, &length); if (offset == 0 && length == 0) { front_b = front_cyclic_queue(cqbuff); write_literal(io_s, front_b); move_front_cyclic_queue(cqbuff, io_s->cqdict, 1); } else { write_pointer(io_s, length, offset); move_front_cyclic_queue(cqbuff, io_s->cqdict, length); } if (length == 0) length = 1; if (count < io_s->block_size) { if (count + length > io_s->block_size) length = count + length - io_s->block_size; last_count = fread(buff, EL_SIZE, length, io_s->input); push_back_cyclic_queue(cqbuff, buff, last_count); count += last_count; } } io_s->block_size = count; write_end_of_block(io_s); if (io_s->isfinal) byte_flush(io_s); delete_cyclic_queue(cqbuff); io_s->result = get_output_size(io_s); pthread_exit(NULL); }
void run() { if (!setup_capture_source()) { std::cout << "Could not initialize capture source" << std::endl; return; } cv::Size output_size = get_output_size(); size_t full_image_dimensions = output_size.width * output_size.height * 3; m_framework = create_message_framework(m_direction, full_image_dimensions); if (m_framework == NULL) { std::cout << "Could not initialize message framework" << std::endl; return; } std::cout << "Set up camera. Starting image acquisition" << std::endl; long last_acq_time = 0; while (running) { std::experimental::optional<std::pair<cv::Mat, long>> next_image_op = acquire_next_image(); if (!next_image_op) { std::cout << "Error acquiring image! Trying again." << std::endl; continue; } cv::Mat next_image = next_image_op.value().first; long acq_time = next_image_op.value().second; write_frame(m_framework, next_image.data, acq_time, output_size.width, output_size.height, 3); long curr_time = get_time(); long time_since_last_acq = curr_time - last_acq_time; long min_acq_time = 1000 / m_max_fps; auto sleep_time = std::chrono::milliseconds(min_acq_time - time_since_last_acq); if (sleep_time > std::chrono::milliseconds(0)) { std::this_thread::sleep_for(sleep_time); } last_acq_time = acq_time; } destroy_capture_source(); std::cout << "Cleaned up capture source" << std::endl; cleanup_message_framework(m_framework); std::cout << "Cleaned up message framework" << std::endl; }
void ImpersonateSession::get_output(void* output_buffer) { std::memcpy(output_buffer, thread_->get_task_output_memory(), get_output_size()); }