コード例 #1
0
ファイル: audio-io.c プロジェクト: SpaderQueen/Gifscreen1
static void *audio_thread(void *param)
{
	struct audio_output *audio = param;
	uint64_t buffer_time = audio->info.buffer_ms * 1000000;
	uint64_t prev_time = os_gettime_ns() - buffer_time;
	uint64_t audio_time;

	os_set_thread_name("audio-io: audio thread");

	const char *audio_thread_name =
		profile_store_name(obs_get_profiler_name_store(),
				"audio_thread(%s)", audio->info.name);
	
	while (os_event_try(audio->stop_event) == EAGAIN) {
		os_sleep_ms(AUDIO_WAIT_TIME);

		profile_start(audio_thread_name);
		pthread_mutex_lock(&audio->line_mutex);

		audio_time = os_gettime_ns() - buffer_time;
		audio_time = mix_and_output(audio, audio_time, prev_time);
		prev_time  = audio_time;

		pthread_mutex_unlock(&audio->line_mutex);
		profile_end(audio_thread_name);

		profile_reenable_thread();
	}

	return NULL;
}
コード例 #2
0
ファイル: obs-source.c プロジェクト: Alucard014/obs-studio
static void source_output_audio_line(obs_source_t source,
		const struct audio_data *data)
{
	struct audio_data in = *data;

	if (!in.timestamp) {
		in.timestamp = os_gettime_ns();
		if (!source->timing_set) {
			source->timing_set    = true;
			source->timing_adjust = 0;
		}
	}

	if (!source->timing_set) {
		source->timing_set    = true;
		source->timing_adjust = in.timestamp - os_gettime_ns();

		/* detects 'directly' set timestamps as long as they're within
		 * a certain threashold */
		if ((source->timing_adjust+MAX_VARIANCE) < MAX_VARIANCE*2)
			source->timing_adjust = 0;
	}

	in.timestamp += source->timing_adjust;
	audio_line_output(source->audio_line, &in);
}
コード例 #3
0
ファイル: rtmp-stream.c プロジェクト: Birdboat/obs-studio
static bool send_remaining_packets(struct rtmp_stream *stream)
{
	struct encoder_packet packet;
	uint64_t max_ns = (uint64_t)stream->max_shutdown_time_sec * 1000000000;
	uint64_t begin_time_ns = os_gettime_ns();

	if (!stream->sent_headers) {
		if (!send_headers(stream))
			return false;
	}

	while (get_next_packet(stream, &packet)) {
		if (send_packet(stream, &packet, false, packet.track_idx) < 0)
			return false;

		/* Just disconnect if it takes too long to shut down */
		if ((os_gettime_ns() - begin_time_ns) > max_ns) {
			info("Took longer than %d second(s) to shut down, "
			     "automatically stopping connection",
			     stream->max_shutdown_time_sec);
			return false;
		}
	}

	return true;
}
コード例 #4
0
ファイル: test-sinewave.c プロジェクト: gameroast/obs-studio
static void *sinewave_thread(void *pdata)
{
	struct sinewave_data *swd = pdata;
	uint64_t last_time = os_gettime_ns();
	uint64_t ts = 0;
	double cos_val = 0.0;
	uint8_t bytes[480];

	while (event_try(swd->event) == EAGAIN) {
		if (!os_sleepto_ns(last_time += 10000000))
			last_time = os_gettime_ns();

		for (size_t i = 0; i < 480; i++) {
			cos_val += rate * M_PI_X2;
			if (cos_val > M_PI_X2)
				cos_val -= M_PI_X2;

			double wave = cos(cos_val) * 0.5;
			bytes[i] = (uint8_t)((wave+1.0)*0.5 * 255.0);
		}

		struct source_audio data;
		data.data[0] = bytes;
		data.frames = 480;
		data.speakers = SPEAKERS_MONO;
		data.samples_per_sec = 48000;
		data.timestamp = ts;
		data.format = AUDIO_FORMAT_U8BIT;
		obs_source_output_audio(swd->source, &data);

		ts += 10000000;
	}

	return NULL;
}
コード例 #5
0
void OBSBasicStatusBar::UpdateBandwidth()
{
	if (!streamOutput)
		return;

	if (++bitrateUpdateSeconds < BITRATE_UPDATE_SECONDS)
		return;

	uint64_t bytesSent     = obs_output_get_total_bytes(streamOutput);
	uint64_t bytesSentTime = os_gettime_ns();
	uint64_t bitsBetween   = (bytesSent - lastBytesSent) * 8;

	double timePassed = double(bytesSentTime - lastBytesSentTime) /
		1000000000.0;

	double kbitsPerSec = double(bitsBetween) / timePassed / 1000.0;

	QString text;
	text += QString("kb/s: ") +
		QString::number(kbitsPerSec, 'f', 0);
	kbps->setText(text);
	kbps->setMinimumWidth(kbps->width());

	lastBytesSent        = bytesSent;
	lastBytesSentTime    = bytesSentTime;
	bitrateUpdateSeconds = 0;
}
コード例 #6
0
ファイル: obs-video.c プロジェクト: Dead133/obs-studio
static inline void video_sleep(struct obs_core_video *video,
		bool raw_active, const bool gpu_active,
		uint64_t *p_time, uint64_t interval_ns)
{
	struct obs_vframe_info vframe_info;
	uint64_t cur_time = *p_time;
	uint64_t t = cur_time + interval_ns;
	int count;

	if (os_sleepto_ns(t)) {
		*p_time = t;
		count = 1;
	} else {
		count = (int)((os_gettime_ns() - cur_time) / interval_ns);
		*p_time = cur_time + interval_ns * count;
	}

	video->total_frames += count;
	video->lagged_frames += count - 1;

	vframe_info.timestamp = cur_time;
	vframe_info.count = count;

	if (raw_active)
		circlebuf_push_back(&video->vframe_info_buffer, &vframe_info,
				sizeof(vframe_info));
	if (gpu_active)
		circlebuf_push_back(&video->vframe_info_buffer_gpu,
				&vframe_info, sizeof(vframe_info));
}
コード例 #7
0
ファイル: video-io.c プロジェクト: GamingAtheist/obs-studio
static void *video_thread(void *param)
{
	struct video_output *video = param;
	uint64_t cur_time = os_gettime_ns();

	while (os_event_try(video->stop_event) == EAGAIN) {
		/* wait half a frame, update frame */
		cur_time += (video->frame_time/2);
		os_sleepto_ns(cur_time);

		video->cur_video_time = cur_time;
		os_event_signal(video->update_event);

		/* wait another half a frame, swap and output frames */
		cur_time += (video->frame_time/2);
		os_sleepto_ns(cur_time);

		pthread_mutex_lock(&video->data_mutex);

		video_swapframes(video);
		video_output_cur_frame(video);

		pthread_mutex_unlock(&video->data_mutex);
	}

	return NULL;
}
コード例 #8
0
ファイル: jack-wrapper.c プロジェクト: AmesianX/obs-studio
int jack_process_callback(jack_nframes_t nframes, void* arg)
{
	struct jack_data* data = (struct jack_data*)arg;
	if (data == 0)
		return 0;

	pthread_mutex_lock(&data->jack_mutex);

	struct obs_source_audio out;
	out.speakers        = jack_channels_to_obs_speakers(data->channels);
	out.samples_per_sec = jack_get_sample_rate (data->jack_client);
	/* format is always 32 bit float for jack */
	out.format          = AUDIO_FORMAT_FLOAT_PLANAR;

	for (unsigned int i = 0; i < data->channels; ++i) {
		jack_default_audio_sample_t *jack_buffer =
			(jack_default_audio_sample_t *)jack_port_get_buffer(
				data->jack_ports[i], nframes);
		out.data[i] = (uint8_t *)jack_buffer;
	}

	out.frames    = nframes;
	out.timestamp = os_gettime_ns() -
				jack_frames_to_time(data->jack_client, nframes);

	obs_source_output_audio(data->source, &out);
	pthread_mutex_unlock(&data->jack_mutex);
	return 0;
}
コード例 #9
0
ファイル: rtmp-stream.c プロジェクト: Eegee/obs-studio
static void droptest_cap_data_rate(struct rtmp_stream *stream, size_t size)
{
	uint64_t ts = os_gettime_ns();
	struct droptest_info info;

	info.ts = ts;
	info.size = size;

	circlebuf_push_back(&stream->droptest_info, &info, sizeof(info));
	stream->droptest_size += size;

	if (stream->droptest_info.size) {
		circlebuf_peek_front(&stream->droptest_info,
				&info, sizeof(info));

		if (stream->droptest_size > DROPTEST_MAX_BYTES) {
			uint64_t elapsed = ts - info.ts;

			if (elapsed < 1000000000ULL) {
				elapsed = 1000000000ULL - elapsed;
				os_sleepto_ns(ts + elapsed);
			}

			while (stream->droptest_size > DROPTEST_MAX_BYTES) {
				circlebuf_pop_front(&stream->droptest_info,
						&info, sizeof(info));
				stream->droptest_size -= info.size;
			}
		}
	}
}
コード例 #10
0
void OBSBasicStatusBar::ReconnectSuccess()
{
	showMessage(QTStr("Basic.StatusBar.ReconnectSuccessful"), 4000);
	retries              = 0;
	bitrateUpdateSeconds = -1;
	lastBytesSent        = 0;
	lastBytesSentTime    = os_gettime_ns();
}
コード例 #11
0
static void *audio_thread(void *param)
{
	struct audio_output *audio = param;
	size_t rate = audio->info.samples_per_sec;
	uint64_t samples = 0;
	uint64_t start_time = os_gettime_ns();
	uint64_t prev_time = start_time;
	uint64_t audio_time = prev_time;
	uint32_t audio_wait_time =
		(uint32_t)(audio_frames_to_ns(rate, AUDIO_OUTPUT_FRAMES) /
				1000000);

	os_set_thread_name("audio-io: audio thread");

	const char *audio_thread_name =
		profile_store_name(obs_get_profiler_name_store(),
				"audio_thread(%s)", audio->info.name);

	while (os_event_try(audio->stop_event) == EAGAIN) {
		uint64_t cur_time;

		os_sleep_ms(audio_wait_time);

		profile_start(audio_thread_name);

		cur_time = os_gettime_ns();
		while (audio_time <= cur_time) {
			samples += AUDIO_OUTPUT_FRAMES;
			audio_time = start_time +
				audio_frames_to_ns(rate, samples);

			input_and_output(audio, audio_time, prev_time);
			prev_time = audio_time;
		}

		profile_end(audio_thread_name);

		profile_reenable_thread();
	}

	return NULL;
}
コード例 #12
0
ファイル: obs-source.c プロジェクト: BraginWoW/obs-studio
/*
 * Ensures that cached frames are displayed on time.  If multiple frames
 * were cached between renders, then releases the unnecessary frames and uses
 * the frame with the closest timing to ensure sync.
 */
struct source_frame *obs_source_getframe(obs_source_t source)
{
    uint64_t last_frame_time = source->last_frame_timestamp;
    struct   source_frame *frame = NULL;
    struct   source_frame *next_frame;
    uint64_t sys_time, frame_time;

    pthread_mutex_lock(&source->video_mutex);

    if (!source->video_frames.num)
        goto unlock;

    next_frame = source->video_frames.array[0];
    sys_time   = os_gettime_ns();
    frame_time = next_frame->timestamp;

    if (!source->last_frame_timestamp) {
        frame = next_frame;
        da_erase(source->video_frames, 0);

        source->last_frame_timestamp = frame_time;
    } else {
        uint64_t sys_offset, frame_offset;
        sys_offset   = sys_time   - source->last_sys_timestamp;
        frame_offset = frame_time - last_frame_time;

        source->last_frame_timestamp += sys_offset;

        while (frame_offset <= sys_offset) {
            if (frame)
                source_frame_destroy(frame);

            frame = next_frame;
            da_erase(source->video_frames, 0);

            if (!source->video_frames.num)
                break;

            next_frame   = source->video_frames.array[0];
            frame_time   = next_frame->timestamp;
            frame_offset = frame_time - last_frame_time;
        }
    }

    source->last_sys_timestamp = sys_time;

unlock:
    pthread_mutex_unlock(&source->video_mutex);

    if (frame != NULL)
        obs_source_addref(source);

    return frame;
}
コード例 #13
0
ファイル: media.c プロジェクト: AmesianX/obs-studio
static inline bool mp_media_sleepto(mp_media_t *m)
{
	bool timeout = false;

	if (!m->next_ns) {
		m->next_ns = os_gettime_ns();
	} else {
		uint64_t t = os_gettime_ns();
		const uint64_t timeout_ns = 200000000;

		if (m->next_ns > t && (m->next_ns - t) > timeout_ns) {
			os_sleepto_ns(t + timeout_ns);
			timeout = true;
		} else {
			os_sleepto_ns(m->next_ns);
		}
	}

	return timeout;
}
コード例 #14
0
ファイル: rtmp-stream.c プロジェクト: Eegee/obs-studio
static inline bool can_shutdown_stream(struct rtmp_stream *stream,
		struct encoder_packet *packet)
{
	uint64_t cur_time = os_gettime_ns();
	bool timeout = cur_time >= stream->shutdown_timeout_ts;

	if (timeout)
		info("Stream shutdown timeout reached (%d second(s))",
				stream->max_shutdown_time_sec);

	return timeout || packet->sys_dts_usec >= (int64_t)stream->stop_ts;
}
コード例 #15
0
ファイル: audio-io.c プロジェクト: ArnoldSchiller/obs-studio
static void *audio_thread(void *param)
{
	struct audio_output *audio = param;
	uint64_t buffer_time = audio->info.buffer_ms * 1000000;
	uint64_t prev_time = os_gettime_ns() - buffer_time;
	uint64_t audio_time;

	while (os_event_try(audio->stop_event) == EAGAIN) {
		os_sleep_ms(AUDIO_WAIT_TIME);

		pthread_mutex_lock(&audio->line_mutex);

		audio_time = os_gettime_ns() - buffer_time;
		audio_time = mix_and_output(audio, audio_time, prev_time);
		prev_time  = audio_time;

		pthread_mutex_unlock(&audio->line_mutex);
	}

	return NULL;
}
コード例 #16
0
ファイル: text-freetype2.c プロジェクト: Demiguise/obs-studio
static void ft2_video_tick(void *data, float seconds)
{
	struct ft2_source *srcdata = data;
	if (srcdata == NULL) return;
	if (!srcdata->from_file || !srcdata->text_file) return;

	if (os_gettime_ns() - srcdata->last_checked >= 1000000000) {
		time_t t = get_modified_timestamp(srcdata->text_file);
		srcdata->last_checked = os_gettime_ns();

		if (srcdata->m_timestamp != t) {
			if (srcdata->log_mode)
				read_from_end(srcdata, srcdata->text_file);
			else
				load_text_from_file(srcdata,
					srcdata->text_file);
			set_up_vertex_buffer(srcdata);
		}
	}

	UNUSED_PARAMETER(seconds);
}
コード例 #17
0
void OBSBasicStatusBar::StreamStarted(obs_output_t *output)
{
	streamOutput = output;

	signal_handler_connect(obs_output_get_signal_handler(streamOutput),
			"reconnect", OBSOutputReconnect, this);
	signal_handler_connect(obs_output_get_signal_handler(streamOutput),
			"reconnect_success", OBSOutputReconnectSuccess, this);

	retries           = 0;
	lastBytesSent     = 0;
	lastBytesSentTime = os_gettime_ns();
	IncRef();
}
コード例 #18
0
void OBSBasicStatusBar::ReconnectSuccess()
{
	showMessage(QTStr("Basic.StatusBar.ReconnectSuccessful"), 4000);
	retries              = 0;
	reconnectTimeout     = 0;
	bitrateUpdateSeconds = -1;
	lastBytesSent        = 0;
	lastBytesSentTime    = os_gettime_ns();

	if (streamOutput) {
		delaySecTotal = obs_output_get_active_delay(streamOutput);
		UpdateDelayMsg();
	}
}
コード例 #19
0
void os_sleepto_ns(uint64_t time_target)
{
	uint64_t t = os_gettime_ns();
	uint32_t milliseconds;

	if (t >= time_target)
		return;

	milliseconds = (uint32_t)((time_target - t)/1000000);
	if (milliseconds > 1)
		os_sleep_ms(milliseconds);

	for (;;) {
		t = os_gettime_ns();
		if (t >= time_target)
			return;

#if 1
		Sleep(1);
#else
		Sleep(0);
#endif
	}
}
コード例 #20
0
bool os_sleepto_ns(uint64_t time_target)
{
	uint64_t t = os_gettime_ns();
	uint32_t milliseconds;

	if (t >= time_target)
		return false;

	milliseconds = (uint32_t)((time_target - t)/1000000);
	if (milliseconds > 1)
		Sleep(milliseconds-1);

	for (;;) {
		t = os_gettime_ns();
		if (t >= time_target)
			return true;

#if 1
		Sleep(1);
#else
		Sleep(0);
#endif
	}
}
コード例 #21
0
ファイル: obs-source.c プロジェクト: BraginWoW/obs-studio
static void obs_source_render_async_video(obs_source_t source)
{
    struct source_frame *frame = obs_source_getframe(source);
    if (!frame)
        return;

    source->timing_adjust = frame->timestamp - os_gettime_ns();
    if (!source->timing_set && source->audio_wait_buffer.num)
        obs_source_flush_audio_wait_buffer(source);

    if (set_texture_size(source, frame))
        obs_source_draw_texture(source->output_texture, frame);

    obs_source_releaseframe(source, frame);
}
コード例 #22
0
/**
 * Callback for pulse which gets executed when new audio data is available
 *
 * @warning The function may be called even after disconnecting the stream
 */
static void pulse_stream_read(pa_stream *p, size_t nbytes, void *userdata)
{
	UNUSED_PARAMETER(p);
	UNUSED_PARAMETER(nbytes);
	PULSE_DATA(userdata);

	const void *frames;
	size_t bytes;
	int64_t latency;

	if (!data->stream)
		goto exit;

	pa_stream_peek(data->stream, &frames, &bytes);

	// check if we got data
	if (!bytes)
		goto exit;

	if (!frames) {
		blog(LOG_ERROR, "pulse-input: Got audio hole of %u bytes",
			(unsigned int) bytes);
		pa_stream_drop(data->stream);
		goto exit;
	}

	if (pulse_get_stream_latency(data->stream, &latency) < 0) {
		blog(LOG_ERROR, "pulse-input: Failed to get timing info !");
		pa_stream_drop(data->stream);
		goto exit;
	}

	struct source_audio out;
	out.speakers        = data->speakers;
	out.samples_per_sec = data->samples_per_sec;
	out.format          = pulse_to_obs_audio_format(data->format);
	out.data[0]         = (uint8_t *) frames;
	out.frames          = bytes / data->bytes_per_frame;
	out.timestamp       = os_gettime_ns() - (latency * 1000ULL);
	obs_source_output_audio(data->source, &out);

	data->packets++;
	data->frames += out.frames;

	pa_stream_drop(data->stream);
exit:
	pulse_signal(0);
}
コード例 #23
0
ファイル: media.c プロジェクト: AmesianX/obs-studio
static int interrupt_callback(void *data)
{
	mp_media_t *m = data;
	bool stop = false;
	uint64_t ts = os_gettime_ns();

	if ((ts - m->interrupt_poll_ts) > 20000000) {
		pthread_mutex_lock(&m->mutex);
		stop = m->kill || m->stopping;
		pthread_mutex_unlock(&m->mutex);

		m->interrupt_poll_ts = ts;
	}

	return stop;
}
コード例 #24
0
ファイル: platform-cocoa.c プロジェクト: bradparks/obs-studio
void os_sleepto_ns(uint64_t time_target)
{
	uint64_t current = os_gettime_ns();
	if(time_target < current)
		return;
	time_target -= current;
	struct timespec req,
			remain;
	memset(&req, 0, sizeof(req));
	memset(&remain, 0, sizeof(remain));
	req.tv_sec = time_target/1000000000;
	req.tv_nsec = time_target%1000000000;
	while(nanosleep(&req, &remain))
	{
		req = remain;
		memset(&remain, 0, sizeof(remain));
	}
}
コード例 #25
0
ファイル: obs-video.c プロジェクト: Bl00drav3n/obs-studio
void *obs_video_thread(void *param)
{
	uint64_t last_time = 0;
	uint64_t cur_time = os_gettime_ns();
	uint64_t interval = video_output_get_frame_time(obs->video.video);

	os_set_thread_name("libobs: graphics thread");

	while (!video_output_stopped(obs->video.video)) {
		last_time = tick_sources(cur_time, last_time);

		render_displays();

		output_frame(&cur_time, interval);
	}

	UNUSED_PARAMETER(param);
	return NULL;
}
コード例 #26
0
ファイル: video-io.c プロジェクト: Alucard014/obs-studio
static void *video_thread(void *param)
{
	struct video_output *video = param;
	uint64_t cur_time = os_gettime_ns();

	while (event_try(&video->stop_event) == EAGAIN) {
		/* wait half a frame, update frame */
		os_sleepto_ns(cur_time += (video->frame_time/2));
		video->cur_video_time = cur_time;
		event_signal(&video->update_event);

		/* wait another half a frame, swap and output frames */
		os_sleepto_ns(cur_time += (video->frame_time/2));
		video_swapframes(video);
		if (video->cur_frame)
			media_output_data(video->output, video->cur_frame);
	}

	return NULL;
}
コード例 #27
0
ファイル: graphics-hook.c プロジェクト: RoyHP/obs-studio
static inline bool frame_ready(uint64_t interval)
{
	static uint64_t last_time = 0;
	uint64_t        elapsed;
	uint64_t        t;

	if (!interval) {
		return true;
	}

	t = os_gettime_ns();
	elapsed = t - last_time;

	if (elapsed < interval) {
		return false;
	}

	last_time = (elapsed > interval * 2) ? t : last_time + interval;
	return true;
}
コード例 #28
0
ファイル: obs-video.c プロジェクト: Bl00drav3n/obs-studio
static inline void video_sleep(struct obs_core_video *video,
		uint64_t *p_time, uint64_t interval_ns)
{
	struct obs_vframe_info vframe_info;
	uint64_t cur_time = *p_time;
	uint64_t t = cur_time + interval_ns;
	int count;

	if (os_sleepto_ns(t)) {
		*p_time = t;
		count = 1;
	} else {
		count = (int)((os_gettime_ns() - cur_time) / interval_ns);
		*p_time = cur_time + interval_ns * count;
	}

	vframe_info.timestamp = cur_time;
	vframe_info.count = count;
	circlebuf_push_back(&video->vframe_info_buffer, &vframe_info,
			sizeof(vframe_info));
}
コード例 #29
0
ファイル: obs-source.c プロジェクト: Christicles/obs-studio
/*
 * Ensures that cached frames are displayed on time.  If multiple frames
 * were cached between renders, then releases the unnecessary frames and uses
 * the frame with the closest timing to ensure sync.  Also ensures that timing
 * with audio is synchronized.
 */
struct source_frame *obs_source_getframe(obs_source_t source)
{
	struct source_frame *frame = NULL;
	uint64_t sys_time;

	if (!source)
		return NULL;

	pthread_mutex_lock(&source->video_mutex);

	if (!source->video_frames.num)
		goto unlock;

	sys_time = os_gettime_ns();

	if (!source->last_frame_ts) {
		frame = source->video_frames.array[0];
		da_erase(source->video_frames, 0);

		source->last_frame_ts = frame->timestamp;
	} else {
		frame = get_closest_frame(source, sys_time);
	}

	/* reset timing to current system time */
	if (frame) {
		source->timing_adjust = sys_time - frame->timestamp;
		source->timing_set = true;
	}

	source->last_sys_timestamp = sys_time;

unlock:
	pthread_mutex_unlock(&source->video_mutex);

	if (frame)
		obs_source_addref(source);

	return frame;
}
コード例 #30
0
ファイル: obs-video.c プロジェクト: dourgulf/biliobs
void *obs_video_thread(void *param)
{
	uint64_t last_time = 0;
	uint64_t interval = video_output_get_frame_time(obs->video.video);

	obs->video.video_time = os_gettime_ns();

	os_set_thread_name("libobs: graphics thread");

	const char *video_thread_name =
		profile_store_name(obs_get_profiler_name_store(),
			"obs_video_thread(%g ms)", interval / 1000000.);
	profile_register_root(video_thread_name, interval);

	while (!video_output_stopped(obs->video.video)) {
		profile_start(video_thread_name);

		profile_start(tick_sources_name);
		last_time = tick_sources(obs->video.video_time, last_time);
		profile_end(tick_sources_name);

		profile_start(render_displays_name);
		render_displays();
		profile_end(render_displays_name);

		profile_start(output_frame_name);
		output_frame();
		profile_end(output_frame_name);

		profile_end(video_thread_name);

		profile_reenable_thread();

		video_sleep(&obs->video, &obs->video.video_time, interval);
	}

	UNUSED_PARAMETER(param);
	return NULL;
}