Пример #1
0
static int filter_frame(AVFilterLink *inlink, AVFrame *buf)
{
    AVFilterContext  *ctx = inlink->dst;
    MixContext       *s = ctx->priv;
    AVFilterLink *outlink = ctx->outputs[0];
    int i, ret = 0;

    for (i = 0; i < ctx->nb_inputs; i++)
        if (ctx->inputs[i] == inlink)
            break;
    if (i >= ctx->nb_inputs) {
        av_log(ctx, AV_LOG_ERROR, "unknown input link\n");
        ret = AVERROR(EINVAL);
        goto fail;
    }

    if (i == 0) {
        int64_t pts = av_rescale_q(buf->pts, inlink->time_base,
                                   outlink->time_base);
        ret = frame_list_add_frame(s->frame_list, buf->nb_samples, pts);
        if (ret < 0)
            goto fail;
    }

    ret = av_audio_fifo_write(s->fifos[i], (void **)buf->extended_data,
                              buf->nb_samples);

    av_frame_free(&buf);
    return output_frame(outlink);

fail:
    av_frame_free(&buf);

    return ret;
}
Пример #2
0
static int request_frame(AVFilterLink *outlink)
{
    AVFilterContext *ctx = outlink->src;
    MixContext      *s = ctx->priv;
    int ret;
    int wanted_samples;

    ret = calc_active_inputs(s);
    if (ret < 0)
        return ret;

    if (!(s->input_state[0] & INPUT_ON))
        return request_samples(ctx, 1);

    if (s->frame_list->nb_frames == 0) {
        ret = ff_request_frame(ctx->inputs[0]);
        if (ret == AVERROR_EOF) {
            s->input_state[0] = 0;
            if (s->nb_inputs == 1)
                return AVERROR_EOF;
            return output_frame(ctx->outputs[0]);
        }
        return ret;
    }
    av_assert0(s->frame_list->nb_frames > 0);

    wanted_samples = frame_list_next_frame_size(s->frame_list);

    return request_samples(ctx, wanted_samples);
}
Пример #3
0
/**
 * Requests a frame, if needed, from each input link other than the first.
 */
static int request_samples(AVFilterContext *ctx, int min_samples)
{
    MixContext *s = ctx->priv;
    int i, ret;

    av_assert0(s->nb_inputs > 1);

    for (i = 1; i < s->nb_inputs; i++) {
        ret = 0;
        if (!(s->input_state[i] & INPUT_ON))
            continue;
        if (av_audio_fifo_size(s->fifos[i]) >= min_samples)
            continue;
        ret = ff_request_frame(ctx->inputs[i]);
        if (ret == AVERROR_EOF) {
            s->input_state[i] |= INPUT_EOF;
            if (av_audio_fifo_size(s->fifos[i]) == 0) {
                s->input_state[i] = 0;
                continue;
            }
        } else if (ret < 0)
            return ret;
    }
    return output_frame(ctx->outputs[0]);
}
Пример #4
0
static int fmt(uintptr_t pc, int sig, void *userdata)
{
    fmt_userdata_t *u = userdata;
    diag_backtrace_param_t *p = u->p;
    diag_output_t *o = u->o;
    int rc;
    Dl_info dlip = {0};

#ifdef BROKEN_SIGNAL_UCONTEXT_T
    if (u->skips) {
        --u->skips;
        return 0;
    }
#endif

    rc = dladdr1((void *)pc, &dlip, NULL, 0);
    if (rc != 0) {
        char buf[128];
        char addr_buf[20];
        char offset_buf[20];
        const char *module_path = dlip.dli_fname;
        const char *module = NULL;
        const char *function = dlip.dli_sname;

        module = module_path;
        if (module) {
            module = strrchr(module_path, '/');
            if (module) {
                module += 1;
            }
        }
        add_int(addr_buf, addr_buf + sizeof addr_buf - 1, (long long)pc, 16);
        add_int(offset_buf, offset_buf + sizeof offset_buf - 1,
                (long long)((char *)pc - (char *)dlip.dli_saddr), 16);

        output_frame(buf, buf + sizeof buf - 1,
                     p->backtrace_fields,
                     module_path, module, function,
                     offset_buf, addr_buf);

        if (o->output_mode == DIAG_CALL_FN) {
            o->output_fn(o->user_data, buf);
        }
        else {
            write(o->outfile, buf, strlen(buf));
            write(o->outfile, "\n", 1);
        }
    }
    else {
        /* printf("dladdr1 failed, errno %d\n", errno); */
    }

    ++u->cur;
    return u->cur >= u->count;
}
Пример #5
0
/**
 * Requests a frame, if needed, from each input link other than the first.
 */
static int request_samples(AVFilterContext *ctx, int min_samples)
{
    MixContext *s = ctx->priv;
    int i;

    av_assert0(s->nb_inputs > 1);

    for (i = 1; i < s->nb_inputs; i++) {
        if (!(s->input_state[i] & INPUT_ON) ||
             (s->input_state[i] & INPUT_EOF))
            continue;
        if (av_audio_fifo_size(s->fifos[i]) >= min_samples)
            continue;
        ff_inlink_request_frame(ctx->inputs[i]);
    }
    return output_frame(ctx->outputs[0]);
}
Пример #6
0
static void format_frameinfo(const char *s,
                             unsigned int fields,
                             char *buf,
                             size_t buf_size)
{
    char *outch = buf;
    char *lastoutch = buf + buf_size - 1;
    const char *module_path = NULL; /* not implemented */
    const char *module, *address, *function, *offset;

    /* skip over frame number to find module */
    module = s;
    while (!isspace(*module)) {
        ++module;
    }
    while (isspace(*module)) {
        ++module;
    }

    /* find address */
    address = strstr(module, "0x");

    /* find function */
    function = address;
    if (function) {
        while (!isspace(*function)) {
            ++function;
        }
        while (isspace(*function)) {
            ++function;
        }
    }

    /* find offset */
    offset = function;

    if (offset) {
        offset = strstr(function, " + ");
        if (offset) {
            offset += 3;
        }
    }

    output_frame(outch, lastoutch, fields, module_path,
                 module, function, offset, address);
}
Пример #7
0
void *obs_video_thread(void *param)
{
	uint64_t last_time = 0;

	while (video_output_wait(obs->video.video)) {
		uint64_t cur_time = video_gettime(obs->video.video);

		tick_sources(cur_time, &last_time);

		render_displays();

		output_frame(cur_time);
	}

	UNUSED_PARAMETER(param);
	return NULL;
}
Пример #8
0
/* 0x400ba7 <_init+807> at /usr/home/trawick/myhg/apache/mod/diag/testdiag */
static void format_frameinfo(const char *s,
                             unsigned int fields,
                             char *buf,
                             size_t buf_size)
{
    char *outch = buf;
    char *lastoutch = buf + buf_size - 1;
    const char *module_path, *module, *address, *function, *offset;

    address = s;

    function = address;
    function = strchr(function, '<');
    if (function) {
        function += 1;
    }

    offset = function;
    if (offset) {
        offset = strchr(offset, '+');
        if (offset) {
            offset += 1;
        }
    }

    module_path = offset;
    if (module_path) {
        module_path = strstr(module_path, " at ");
        if (module_path) {
            module_path += 4;
        }
    }

    module = module_path;
    if (module) {
        module = strrchr(module, '/');
        if (module) {
            module += 1;
        }
    }

    output_frame(outch, lastoutch, fields, module_path,
                 module, function, offset, address);
}
Пример #9
0
void *obs_video_thread(void *param)
{
	uint64_t last_time = 0;
	uint64_t cur_time = os_gettime_ns();
	uint64_t interval = video_output_get_frame_time(obs->video.video);

	os_set_thread_name("libobs: graphics thread");

	while (!video_output_stopped(obs->video.video)) {
		last_time = tick_sources(cur_time, last_time);

		render_displays();

		output_frame(&cur_time, interval);
	}

	UNUSED_PARAMETER(param);
	return NULL;
}
Пример #10
0
void *obs_video_thread(void *param)
{
	uint64_t last_time = 0;
	uint64_t interval = video_output_get_frame_time(obs->video.video);

	obs->video.video_time = os_gettime_ns();

	os_set_thread_name("libobs: graphics thread");

	const char *video_thread_name =
		profile_store_name(obs_get_profiler_name_store(),
			"obs_video_thread(%g ms)", interval / 1000000.);
	profile_register_root(video_thread_name, interval);

	while (!video_output_stopped(obs->video.video)) {
		profile_start(video_thread_name);

		profile_start(tick_sources_name);
		last_time = tick_sources(obs->video.video_time, last_time);
		profile_end(tick_sources_name);

		profile_start(render_displays_name);
		render_displays();
		profile_end(render_displays_name);

		profile_start(output_frame_name);
		output_frame();
		profile_end(output_frame_name);

		profile_end(video_thread_name);

		profile_reenable_thread();

		video_sleep(&obs->video, &obs->video.video_time, interval);
	}

	UNUSED_PARAMETER(param);
	return NULL;
}
Пример #11
0
// 'slot' callback emitted from the timer timing out.
// Takes next frame, feeds it into the tracker, and displays 
// the results on our Qt window.
void Tracker::nextFrame()
{
	// load frame.
	cv::Mat3b frame = getFrame();
	if (!frame.data)
	{
		return;
	}

	cv::Mat gray(frame);
	cv::cvtColor(frame, gray, CV_BGR2GRAY);

	if (frame_number % 20 == 0)
	{
		detectAndSeedTrackers(gray);
	}

	cv::Mat3b output_frame(frame.rows, frame.cols);
	frame.copyTo(output_frame);

	trackFrame(gray, output_frame);
	updateGUI(frame, output_frame);
	frame_number++;
}
Пример #12
0
int main(int argc, char **argv) {
  int i = 0, j = 0, addr = 0;;
  int count = 10000;
  unsigned char frame[192] = {0};
  unsigned long delay;


  if (argc > 1) {
    delay = strtoul(argv[1], NULL, 0);
  } else {
    delay = 0;
  }

  srand(time(NULL));


  for (;;) {
    if (count > 2000) {
      count = 0;
      srand(time(NULL));

      for (i = 0; i < 48; i++) {
        for (j = 0; j < 24; j++) {
          alive[0][i][j] = rand()%2;
          neighbor_counts[0][i][j] = rand()%9;
        }
      }
    }
    count++;

    if (!(count % 2))
     step_sim();

    for (addr = 13; addr <= 30; addr++) {
      for (i = 0; i < 64; i++) {
        int xpos = FIRST_X(addr) + (i%8);
        int ypos = FIRST_Y(addr) + (i/8);

        int is_alive = alive[current_frame][xpos][ypos];
        int was_alive = alive[(1 - current_frame)][xpos][ypos];

        if (count % 2) {
          frame[i] = 0;
          frame[i+64] = 0;
          frame[i+128] = (is_alive) ? 0xF : 0;
        } else if (is_alive) {
          if (was_alive) {
            // blue
            frame[i] = 0;
            frame[64+i] = 0;
            frame[128+i] = 0xF;
          } else {
            // green
            frame[i] = 0xF;
            frame[64+i] = 0;
            frame[128+i] = 0;
          }
        } else {
          if (was_alive) {
            // red
            frame[i] = 0;
            frame[64+i] = 0xF;
            frame[128+i] = 0;
          } else {
            // black
            frame[i] = 0;
            frame[64+i] = 0;
            frame[128+i] = 0;
          }
        }
      }


      output_frame(addr, frame);

    }


    usleep(delay);
  }

}
Пример #13
0
static int request_frame(AVFilterLink *outlink)
{
    AVFilterContext *ctx = outlink->src;
    MixContext      *s = ctx->priv;
    int ret;
    int wanted_samples, available_samples;

    ret = calc_active_inputs(s);
    if (ret < 0)
        return ret;

    if (s->input_state[0] == INPUT_OFF) {
        ret = request_samples(ctx, 1);
        if (ret < 0)
            return ret;

        ret = calc_active_inputs(s);
        if (ret < 0)
            return ret;

        available_samples = get_available_samples(s);
        if (!available_samples)
            return AVERROR(EAGAIN);

        return output_frame(outlink, available_samples);
    }

    if (s->frame_list->nb_frames == 0) {
        ret = ff_request_frame(ctx->inputs[0]);
        if (ret == AVERROR_EOF) {
            s->input_state[0] = INPUT_OFF;
            if (s->nb_inputs == 1)
                return AVERROR_EOF;
            else
                return AVERROR(EAGAIN);
        } else if (ret < 0)
            return ret;
    }
    av_assert0(s->frame_list->nb_frames > 0);

    wanted_samples = frame_list_next_frame_size(s->frame_list);

    if (s->active_inputs > 1) {
        ret = request_samples(ctx, wanted_samples);
        if (ret < 0)
            return ret;

        ret = calc_active_inputs(s);
        if (ret < 0)
            return ret;
    }

    if (s->active_inputs > 1) {
        available_samples = get_available_samples(s);
        if (!available_samples)
            return AVERROR(EAGAIN);
        available_samples = FFMIN(available_samples, wanted_samples);
    } else {
        available_samples = wanted_samples;
    }

    s->next_pts = frame_list_next_pts(s->frame_list);
    frame_list_remove_samples(s->frame_list, available_samples);

    return output_frame(outlink, available_samples);
}
Пример #14
0
/* ./testdiag(diag_backtrace+0x75)[0x401824] */
static void format_frameinfo(const char *s,
                             unsigned int fields,
                             char *buf,
                             size_t buf_size)
{
    size_t s_len = strlen(s);
    char *outch = buf;
    char *lastoutch = buf + buf_size - 1;
    const char *lastslash, *firstparen, *firstbracket;
    const char *module_path, *module, *function, *offset, *address;
    
    lastslash = strrchr(s, '/');
    firstparen = strchr(s, '(');
    firstbracket = strchr(s, '[');
    
    if (!lastslash || !firstbracket) {
        /* format of string not recognized; just copy and get out */
        if (s_len < buf_size) {
            strcpy(buf, s);
        }
        else {
            memcpy(buf, s, buf_size - 1);
            buf[buf_size - 1] = 0;
        }
        return;
    }

    module_path = s;

    module = lastslash;
    if (module) {
        module += 1;
    }
    
    function = firstparen;
    if (function) {
        function += 1;
        if (*function == ')' || *function == '+') {
            /* here's one such scenario:
             * "/home/trawick/inst/24-64/modules/mod_backtrace.so(+0x2b6c) [0x7f2727df4b6c]"
             */
            function = NULL;
        }
    }

    offset = function;
    if (offset) {
        offset = strchr(function, '+');
        if (offset) {
            offset += 1;
        }
    }
    
    address = firstbracket;
    if (address) {
        address += 1;
    }
    
    output_frame(outch, lastoutch, fields, module_path,
                 module, function, offset, address);
}
Пример #15
0
void *obs_graphics_thread(void *param)
{
	uint64_t last_time = 0;
	uint64_t interval = video_output_get_frame_time(obs->video.video);
	uint64_t frame_time_total_ns = 0;
	uint64_t fps_total_ns = 0;
	uint32_t fps_total_frames = 0;

	obs->video.video_time = os_gettime_ns();

	os_set_thread_name("libobs: graphics thread");

	const char *video_thread_name =
		profile_store_name(obs_get_profiler_name_store(),
			"obs_graphics_thread(%g"NBSP"ms)", interval / 1000000.);
	profile_register_root(video_thread_name, interval);

	srand((unsigned int)time(NULL));

	while (!video_output_stopped(obs->video.video)) {
		uint64_t frame_start = os_gettime_ns();
		uint64_t frame_time_ns;

		profile_start(video_thread_name);

		profile_start(tick_sources_name);
		last_time = tick_sources(obs->video.video_time, last_time);
		profile_end(tick_sources_name);

		profile_start(render_displays_name);
		render_displays();
		profile_end(render_displays_name);

		profile_start(output_frame_name);
		output_frame();
		profile_end(output_frame_name);

		frame_time_ns = os_gettime_ns() - frame_start;

		profile_end(video_thread_name);

		profile_reenable_thread();

		video_sleep(&obs->video, &obs->video.video_time, interval);

		frame_time_total_ns += frame_time_ns;
		fps_total_ns += (obs->video.video_time - last_time);
		fps_total_frames++;

		if (fps_total_ns >= 1000000000ULL) {
			obs->video.video_fps = (double)fps_total_frames /
				((double)fps_total_ns / 1000000000.0);
			obs->video.video_avg_frame_time_ns =
				frame_time_total_ns / (uint64_t)fps_total_frames;

			frame_time_total_ns = 0;
			fps_total_ns = 0;
			fps_total_frames = 0;
		}
	}

	UNUSED_PARAMETER(param);
	return NULL;
}
Пример #16
0
void View::output(string x) {
	if (x == "") {
		std::cout << "View: " << name << endl;
	}
	else {
		std::cout << "View: " << x << endl;
	}
	vector<int> longestEachCol;
	//pre-process
	int columnsSize = (int)columns.size();
	int i = 0;
	int row = 0;
	bool finish = false;
	while (i < columnsSize) {
		longestEachCol.push_back(columns[i].name.size());
		i++;
	}
	while (!finish) {
		i = 0;
		finish = true;
		while (i < columnsSize) {
			if (row < columns[i].spans.size()) {
				finish = false;
				int span_length = columns[i].spans[row].to - columns[i].spans[row].from;
				stringstream stream;
				string string_temp;
				stream << columns[i].spans[row].from;
				stream << columns[i].spans[row].to;
				string_temp = stream.str();
				span_length += string_temp.length();
				span_length += 6;
				if (span_length > longestEachCol[i]) {
					longestEachCol[i] = span_length;
				}
			}
			i++;
		}
		row++;
	}
	output_frame(columnsSize, longestEachCol);

	//output name
	i = 0;
	std::cout << "|";
	while (i < columnsSize) {
		std::cout << " " << columns[i].name;
		int j = 0;
		while (j < longestEachCol[i] - columns[i].name.size()-1) {
			std::cout << " ";
			j++;
		}
		std::cout << "|";
		i++;
	}
	std::cout << endl;
	
	output_frame(columnsSize, longestEachCol);

	i = 0;
	finish = false;
	row = 0;
	int rowSize;
	if (columnsSize > 0) {
		rowSize = (int)columns[0].spans.size();
	}
	while (row < rowSize) {
		i = 0;
		std::cout << "|";
		finish = true;
		while (i < columnsSize) {
			if (row < columns[i].spans.size()) {
				stringstream stream_from;
				stringstream stream_to;
				string string_temp;
				stream_from << columns[i].spans[row].from;
				stream_to << columns[i].spans[row].to;
				string temp_span = " " + columns[i].spans[row].value + ":(" + stream_from.str() + "," + stream_to.str() +")";
				std::cout << temp_span;
				int j = 0;
				while (j < longestEachCol[i] - temp_span.size()) {
					std::cout << " ";
					j++;
				}
			}
			std::cout << "|";
			i++;
		}
		row++;
		std::cout << endl;
	}
	output_frame(columnsSize, longestEachCol);
	std::cout << rowSize << " rows in set" << endl << endl;
}
Пример #17
0
static int activate(AVFilterContext *ctx)
{
    AVFilterLink *outlink = ctx->outputs[0];
    MixContext *s = ctx->priv;
    AVFrame *buf = NULL;
    int i, ret;

    for (i = 0; i < s->nb_inputs; i++) {
        AVFilterLink *inlink = ctx->inputs[i];

        if ((ret = ff_inlink_consume_frame(ctx->inputs[i], &buf)) > 0) {
            if (i == 0) {
                int64_t pts = av_rescale_q(buf->pts, inlink->time_base,
                                           outlink->time_base);
                ret = frame_list_add_frame(s->frame_list, buf->nb_samples, pts);
                if (ret < 0) {
                    av_frame_free(&buf);
                    return ret;
                }
            }

            ret = av_audio_fifo_write(s->fifos[i], (void **)buf->extended_data,
                                      buf->nb_samples);
            if (ret < 0) {
                av_frame_free(&buf);
                return ret;
            }

            av_frame_free(&buf);

            ret = output_frame(outlink);
            if (ret < 0)
                return ret;
        }
    }

    for (i = 0; i < s->nb_inputs; i++) {
        int64_t pts;
        int status;

        if (ff_inlink_acknowledge_status(ctx->inputs[i], &status, &pts)) {
            if (status == AVERROR_EOF) {
                if (i == 0) {
                    s->input_state[i] = 0;
                    if (s->nb_inputs == 1) {
                        ff_outlink_set_status(outlink, status, pts);
                        return 0;
                    }
                } else {
                    s->input_state[i] |= INPUT_EOF;
                    if (av_audio_fifo_size(s->fifos[i]) == 0) {
                        s->input_state[i] = 0;
                    }
                }
            }
        }
    }

    if (calc_active_inputs(s)) {
        ff_outlink_set_status(outlink, AVERROR_EOF, s->next_pts);
        return 0;
    }

    if (ff_outlink_frame_wanted(outlink)) {
        int wanted_samples;

        if (!(s->input_state[0] & INPUT_ON))
            return request_samples(ctx, 1);

        if (s->frame_list->nb_frames == 0) {
            ff_inlink_request_frame(ctx->inputs[0]);
            return 0;
        }
        av_assert0(s->frame_list->nb_frames > 0);

        wanted_samples = frame_list_next_frame_size(s->frame_list);

        return request_samples(ctx, wanted_samples);
    }

    return 0;
}
Пример #18
0
int diag_backtrace(diag_output_t *o, diag_backtrace_param_t *p, diag_context_t *c)
{
    char frame[128];
    char addr_buf[20];
    char offset_buf[20];
    char name_buf[80];
    char *name;
    const char *module_path, *module;
    int count, cur, rc;
    unw_context_t ctx;
    unw_cursor_t csr;
    unw_word_t ip, offp;
#if DIAG_PLATFORM_LINUX || DIAG_PLATFORM_FREEBSD || DIAG_PLATFORM_MACOSX
    Dl_info info;
#endif

    if (p->backtrace_count && p->backtrace_count < DIAG_BT_LIMIT) {
        count = p->backtrace_count;
    }
    else {
        count = DIAG_BT_LIMIT;
    }
    
    rc = unw_getcontext(&ctx);
    if (!rc) {
        rc = unw_init_local(&csr, &ctx);
    }

    if (rc) {
        return DIAG_ERR_INIT;
    }

    cur = 0;
    while ((rc = unw_step(&csr)) > 0) {

        cur++;
        if (cur > count) {
            break;
        }

        unw_get_reg(&csr, UNW_REG_IP, &ip);

        if (!ip) {
            break;
        }

        add_int(addr_buf, addr_buf + sizeof addr_buf - 1, ip, 16);

        rc = unw_get_proc_name(&csr, name_buf, sizeof name_buf, &offp);
        if (rc && rc != UNW_ENOMEM) {
            name = NULL;
        }
        else {
            name = name_buf;
        }

        module = module_path = NULL;
#if DIAG_PLATFORM_LINUX || DIAG_PLATFORM_FREEBSD || DIAG_PLATFORM_MACOSX
        if (p->backtrace_fields
            & (DIAG_BTFIELDS_MODULE_PATH | DIAG_BTFIELDS_MODULE_NAME)) {
            if ((rc = dladdr((void *)ip, &info)) != 0) {
                module_path = info.dli_fname;
                module = strrchr(module_path, '/');
                if (module) {
                    module += 1;
                }
            }
        }
#endif

        add_int(offset_buf, offset_buf + sizeof offset_buf - 1,
                offp, 16);
        output_frame(frame, frame + sizeof frame - 1,
                     p->backtrace_fields,
                     module_path, module, name, offset_buf, addr_buf);

        if (o->output_mode == DIAG_CALL_FN) {
            o->output_fn(o->user_data, frame);
        }
        else {
            write(o->outfile, frame, strlen(frame));
            write(o->outfile, "\n", 1);
        }
    }

    return 0;
}
Пример #19
0
void *obs_graphics_thread(void *param)
{
	uint64_t last_time = 0;
	uint64_t interval = video_output_get_frame_time(obs->video.video);
	uint64_t frame_time_total_ns = 0;
	uint64_t fps_total_ns = 0;
	uint32_t fps_total_frames = 0;
	bool gpu_was_active = false;
	bool raw_was_active = false;
	bool was_active = false;

	obs->video.video_time = os_gettime_ns();

	os_set_thread_name("libobs: graphics thread");

	const char *video_thread_name =
		profile_store_name(obs_get_profiler_name_store(),
			"obs_graphics_thread(%g"NBSP"ms)", interval / 1000000.);
	profile_register_root(video_thread_name, interval);

	srand((unsigned int)time(NULL));

	while (!video_output_stopped(obs->video.video)) {
		uint64_t frame_start = os_gettime_ns();
		uint64_t frame_time_ns;
		bool raw_active = obs->video.raw_active > 0;
#ifdef _WIN32
		bool gpu_active = obs->video.gpu_encoder_active > 0;
#else
		const bool gpu_active = 0;
#endif
		bool active = raw_active || gpu_active;

		if (!was_active && active)
			clear_base_frame_data();
		if (!raw_was_active && raw_active)
			clear_raw_frame_data();
#ifdef _WIN32
		if (!gpu_was_active && gpu_active)
			clear_gpu_frame_data();
#endif
		raw_was_active = raw_active;
		gpu_was_active = gpu_active;
		was_active = active;

		profile_start(video_thread_name);

		profile_start(tick_sources_name);
		last_time = tick_sources(obs->video.video_time, last_time);
		profile_end(tick_sources_name);

		profile_start(output_frame_name);
		output_frame(raw_active, gpu_active);
		profile_end(output_frame_name);

		profile_start(render_displays_name);
		render_displays();
		profile_end(render_displays_name);

		frame_time_ns = os_gettime_ns() - frame_start;

		profile_end(video_thread_name);

		profile_reenable_thread();

		video_sleep(&obs->video, raw_active, gpu_active,
				&obs->video.video_time, interval);

		frame_time_total_ns += frame_time_ns;
		fps_total_ns += (obs->video.video_time - last_time);
		fps_total_frames++;

		if (fps_total_ns >= 1000000000ULL) {
			obs->video.video_fps = (double)fps_total_frames /
				((double)fps_total_ns / 1000000000.0);
			obs->video.video_avg_frame_time_ns =
				frame_time_total_ns / (uint64_t)fps_total_frames;

			frame_time_total_ns = 0;
			fps_total_ns = 0;
			fps_total_frames = 0;
		}
	}

	UNUSED_PARAMETER(param);
	return NULL;
}
Пример #20
0
int diag_backtrace(diag_output_t *o, diag_backtrace_param_t *p, diag_context_t *c)
{
    int cur = 0, count;
    STACKFRAME64 stackframe;
    CONTEXT context;
    HANDLE process = GetCurrentProcess();
    HANDLE thread = GetCurrentThread();
    DWORD bytes_written;

    if (c) {
        context = *c->context;
    }
    else {
        RtlCaptureContext(&context);
    }

    if (p->backtrace_count && p->backtrace_count < DIAG_BT_LIMIT) {
        count = p->backtrace_count;
    }
    else {
        count = DIAG_BT_LIMIT;
    }

    memset(&stackframe, 0, sizeof stackframe);
    stackframe.AddrPC.Mode = 
        stackframe.AddrFrame.Mode =
            stackframe.AddrStack.Mode = AddrModeFlat;

#ifdef DIAG_BITS_64
    stackframe.AddrPC.Offset    = context.Rip;
    stackframe.AddrFrame.Offset = context.Rbp;
    stackframe.AddrStack.Offset = context.Rsp;
#else
    stackframe.AddrPC.Offset    = context.Eip;
    stackframe.AddrFrame.Offset = context.Ebp;
    stackframe.AddrStack.Offset = context.Esp;
#endif

    if (!p->symbols_initialized) {
        SymInitialize(process, NULL, TRUE);
    }

    while (StackWalk64(
#ifdef DIAG_BITS_64
                       IMAGE_FILE_MACHINE_AMD64,
#else
                       IMAGE_FILE_MACHINE_I386,
#endif
                       process, thread,
                       &stackframe,
                       &context,
                       NULL,                       /* ReadMemoryRoutine */
                       SymFunctionTableAccess64,   /* FunctionTableAccessRoutine */
                       SymGetModuleBase64,         /* GetModuleBaseRoutine */
                       NULL)                       /* TranslateAddress */
           == TRUE) {
        char symbol_buffer[128] = {0};
        IMAGEHLP_SYMBOL64 *symbol = (IMAGEHLP_SYMBOL64 *)&symbol_buffer;
        DWORD64 ignored;
        const char *function;
        const char *offset;
        char address_buf[20], offset_buf[20];
        char buf[128];
        char *outch = buf;
        char *lastoutch = buf + sizeof buf - 1;

        if (cur + 1 > count) { /* avoid loop on corrupted chain, respect caller's wishes */
            break;
        }
        symbol->SizeOfStruct = sizeof(IMAGEHLP_SYMBOL64);
        symbol->MaxNameLength = sizeof(symbol_buffer) - sizeof(IMAGEHLP_SYMBOL64);
        ignored = 0;
        if (SymGetSymFromAddr64(process, stackframe.AddrPC.Offset, &ignored, symbol) != TRUE) {
            function = NULL;
            offset = NULL;
        }
        else {
            function = symbol->Name;
            add_int(offset_buf, offset_buf + sizeof offset_buf - 1,
                    stackframe.AddrPC.Offset - symbol->Address, 16);
            offset = offset_buf;
        }

        add_int(address_buf, address_buf + sizeof address_buf - 1,
                stackframe.AddrPC.Offset, 16);

        if (function && !strcmp(function, "diag_backtrace")) {
            /* filter outselves out */
            continue;
        }

        cur++; /* gonna keep this frame, so count it */

        output_frame(outch, lastoutch, p->backtrace_fields,
                     NULL, /* no module path */
                     NULL, /* no module */
                     function,
                     offset,
                     address_buf);

        if (o->output_mode == DIAG_CALL_FN) {
            o->output_fn(o->user_data, buf);
        }
        else {
            WriteFile(o->outfile, buf, strlen(buf), &bytes_written, NULL);
            WriteFile(o->outfile, "\r\n", 2, &bytes_written, NULL);
        }
    }

    return 0;
}