Example #1
0
// Find intersecting clips (or non intersecting clips)
vector<Clip*> Timeline::find_intersecting_clips(long int requested_frame, int number_of_frames, bool include)
{
	// Find matching clips
	vector<Clip*> matching_clips;

	// Calculate time of frame
	float min_requested_time = calculate_time(requested_frame, info.fps);
	float max_requested_time = calculate_time(requested_frame + (number_of_frames - 1), info.fps);

	// Re-Sort Clips (since they likely changed)
	sort_clips();

	// Find Clips at this time
	list<Clip*>::iterator clip_itr;
	for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
	{
		// Get clip object from the iterator
		Clip *clip = (*clip_itr);

		// Does clip intersect the current requested time
		float clip_duration = clip->End() - clip->Start();
		bool does_clip_intersect = (clip->Position() <= min_requested_time && clip->Position() + clip_duration >= min_requested_time) ||
								   (clip->Position() > min_requested_time && clip->Position() <= max_requested_time);

		// Debug output
		AppendDebugMethod("Timeline::find_intersecting_clips (Is clip near or intersecting)", "requested_frame", requested_frame, "min_requested_time", min_requested_time, "max_requested_time", max_requested_time, "clip->Position()", clip->Position(), "clip_duration", clip_duration, "does_clip_intersect", does_clip_intersect);

		// Open (or schedule for closing) this clip, based on if it's intersecting or not
		#pragma omp critical (reader_lock)
		update_open_clips(clip, does_clip_intersect);


		// Clip is visible
		if (does_clip_intersect && include)
			// Add the intersecting clip
			matching_clips.push_back(clip);

		else if (!does_clip_intersect && !include)
			// Add the non-intersecting clip
			matching_clips.push_back(clip);

	} // end clip loop

	// return list
	return matching_clips;
}
Example #2
0
// Get an openshot::Frame object for a specific frame number of this reader.
tr1::shared_ptr<Frame> Timeline::GetFrame(long int requested_frame) throw(ReaderClosed, OutOfBoundsFrame)
{
	// Check for open reader (or throw exception)
	if (!is_open)
		throw ReaderClosed("The Timeline is closed.  Call Open() before calling this method.", "");

	// Adjust out of bounds frame number
	if (requested_frame < 1)
		requested_frame = 1;

	// Check cache
	tr1::shared_ptr<Frame> frame = final_cache.GetFrame(requested_frame);
	if (frame) {
		// Debug output
		AppendDebugMethod("Timeline::GetFrame (Cached frame found)", "requested_frame", requested_frame, "", -1, "", -1, "", -1, "", -1, "", -1);

		// Return cached frame
		return frame;
	}
	else
	{
		// Create a scoped lock, allowing only a single thread to run the following code at one time
		const GenericScopedLock<CriticalSection> lock(getFrameCriticalSection);

		// Check cache again (due to locking)
		frame = final_cache.GetFrame(requested_frame);
		if (frame) {
			// Debug output
			AppendDebugMethod("Timeline::GetFrame (Cached frame found on 2nd look)", "requested_frame", requested_frame, "", -1, "", -1, "", -1, "", -1, "", -1);

			// Return cached frame
			return frame;
		}

		// Minimum number of frames to process (for performance reasons)
		int minimum_frames = OPEN_MP_NUM_PROCESSORS;

		// Get a list of clips that intersect with the requested section of timeline
		// This also opens the readers for intersecting clips, and marks non-intersecting clips as 'needs closing'
		vector<Clip*> nearby_clips = find_intersecting_clips(requested_frame, minimum_frames, true);

		omp_set_num_threads(OPEN_MP_NUM_PROCESSORS);
		// Allow nested OpenMP sections
		omp_set_nested(true);

		// Debug output
		AppendDebugMethod("Timeline::GetFrame", "requested_frame", requested_frame, "minimum_frames", minimum_frames, "OPEN_MP_NUM_PROCESSORS", OPEN_MP_NUM_PROCESSORS, "", -1, "", -1, "", -1);

		// GENERATE CACHE FOR CLIPS (IN FRAME # SEQUENCE)
		// Determine all clip frames, and request them in order (to keep resampled audio in sequence)
		for (long int frame_number = requested_frame; frame_number < requested_frame + minimum_frames; frame_number++)
		{
			// Calculate time of timeline frame
			float requested_time = calculate_time(frame_number, info.fps);
			// Loop through clips
			for (int clip_index = 0; clip_index < nearby_clips.size(); clip_index++)
			{
				// Get clip object from the iterator
				Clip *clip = nearby_clips[clip_index];
				bool does_clip_intersect = (clip->Position() <= requested_time && clip->Position() + clip->Duration() >= requested_time);
				if (does_clip_intersect)
				{
					// Get clip frame #
					float time_diff = (requested_time - clip->Position()) + clip->Start();
					int clip_frame_number = round(time_diff * info.fps.ToFloat()) + 1;
					// Cache clip object
					clip->GetFrame(clip_frame_number);
				}
			}
		}

		#pragma omp parallel
		{
			// Loop through all requested frames
			#pragma omp for ordered firstprivate(nearby_clips, requested_frame, minimum_frames)
			for (long int frame_number = requested_frame; frame_number < requested_frame + minimum_frames; frame_number++)
			{
				// Debug output
				AppendDebugMethod("Timeline::GetFrame (processing frame)", "frame_number", frame_number, "omp_get_thread_num()", omp_get_thread_num(), "", -1, "", -1, "", -1, "", -1);

				// Init some basic properties about this frame
				int samples_in_frame = Frame::GetSamplesPerFrame(frame_number, info.fps, info.sample_rate, info.channels);

				// Create blank frame (which will become the requested frame)
				tr1::shared_ptr<Frame> new_frame(tr1::shared_ptr<Frame>(new Frame(frame_number, info.width, info.height, "#000000", samples_in_frame, info.channels)));
				new_frame->SampleRate(info.sample_rate);
				new_frame->ChannelsLayout(info.channel_layout);

				// Debug output
				AppendDebugMethod("Timeline::GetFrame (Adding solid color)", "frame_number", frame_number, "info.width", info.width, "info.height", info.height, "", -1, "", -1, "", -1);

				// Add Background Color to 1st layer (if animated or not black)
				if ((color.red.Points.size() > 1 || color.green.Points.size() > 1 || color.blue.Points.size() > 1) ||
					(color.red.GetValue(frame_number) != 0.0 || color.green.GetValue(frame_number) != 0.0 || color.blue.GetValue(frame_number) != 0.0))
				new_frame->AddColor(info.width, info.height, color.GetColorHex(frame_number));

				// Calculate time of frame
				float requested_time = calculate_time(frame_number, info.fps);

				// Debug output
				AppendDebugMethod("Timeline::GetFrame (Loop through clips)", "frame_number", frame_number, "requested_time", requested_time, "clips.size()", clips.size(), "nearby_clips.size()", nearby_clips.size(), "", -1, "", -1);

				// Find Clips near this time
				for (int clip_index = 0; clip_index < nearby_clips.size(); clip_index++)
				{
					// Get clip object from the iterator
					Clip *clip = nearby_clips[clip_index];

					// Does clip intersect the current requested time
					bool does_clip_intersect = (clip->Position() <= requested_time && clip->Position() + clip->Duration() >= requested_time);

					// Debug output
					AppendDebugMethod("Timeline::GetFrame (Does clip intersect)", "frame_number", frame_number, "requested_time", requested_time, "clip->Position()", clip->Position(), "clip->Duration()", clip->Duration(), "does_clip_intersect", does_clip_intersect, "", -1);

					// Clip is visible
					if (does_clip_intersect)
					{
						// Determine if clip is "top" clip on this layer (only happens when multiple clips are overlapping)
						bool is_top_clip = true;
						for (int top_clip_index = 0; top_clip_index < nearby_clips.size(); top_clip_index++)
						{
							Clip *nearby_clip = nearby_clips[top_clip_index];
							if (clip->Id() != nearby_clip->Id() && clip->Layer() == nearby_clip->Layer() &&
									nearby_clip->Position() <= requested_time && nearby_clip->Position() + nearby_clip->Duration() >= requested_time &&
									nearby_clip->Position() > clip->Position()) {
								is_top_clip = false;
								break;
							}
						}

						// Determine the frame needed for this clip (based on the position on the timeline)
						float time_diff = (requested_time - clip->Position()) + clip->Start();
						int clip_frame_number = round(time_diff * info.fps.ToFloat()) + 1;

						// Debug output
						AppendDebugMethod("Timeline::GetFrame (Calculate clip's frame #)", "time_diff", time_diff, "requested_time", requested_time, "clip->Position()", clip->Position(), "clip->Start()", clip->Start(), "info.fps.ToFloat()", info.fps.ToFloat(), "clip_frame_number", clip_frame_number);

						// Add clip's frame as layer
						add_layer(new_frame, clip, clip_frame_number, frame_number, is_top_clip);

					} else
						// Debug output
						AppendDebugMethod("Timeline::GetFrame (clip does not intersect)", "frame_number", frame_number, "requested_time", requested_time, "does_clip_intersect", does_clip_intersect, "", -1, "", -1, "", -1);

				} // end clip loop

				// Debug output
				AppendDebugMethod("Timeline::GetFrame (Add frame to cache)", "frame_number", frame_number, "info.width", info.width, "info.height", info.height, "", -1, "", -1, "", -1);

				// Add final frame to cache
				final_cache.Add(frame_number, new_frame);

			} // end frame loop
		} // end parallel

		// Debug output
		AppendDebugMethod("Timeline::GetFrame (end parallel region)", "requested_frame", requested_frame, "omp_get_thread_num()", omp_get_thread_num(), "", -1, "", -1, "", -1, "", -1);

		// Return frame (or blank frame)
		return final_cache.GetFrame(requested_frame);
	}
}