Ejemplo n.º 1
0
// Apply JSON diff to clips
void Timeline::apply_json_to_clips(Json::Value change) throw(InvalidJSONKey) {

	// Get key and type of change
	string change_type = change["type"].asString();
	string clip_id = "";
	Clip *existing_clip = NULL;

	// Find id of clip (if any)
	for (int x = 0; x < change["key"].size(); x++) {
		// Get each change
		Json::Value key_part = change["key"][x];

		if (key_part.isObject()) {
			// Check for id
			if (!key_part["id"].isNull()) {
				// Set the id
				clip_id = key_part["id"].asString();

				// Find matching clip in timeline (if any)
				list<Clip*>::iterator clip_itr;
				for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
				{
					// Get clip object from the iterator
					Clip *c = (*clip_itr);
					if (c->Id() == clip_id) {
						existing_clip = c;
						break; // clip found, exit loop
					}
				}
				break; // id found, exit loop
			}
		}
	}

	// Check for a more specific key (targetting this clip's effects)
	// For example: ["clips", {"id:123}, "effects", {"id":432}]
	if (existing_clip && change["key"].size() == 4 && change["key"][2] == "effects")
	{
		// This change is actually targetting a specific effect under a clip (and not the clip)
		EffectBase *existing_effect = NULL;
		Json::Value key_part = change["key"][3];

		if (key_part.isObject()) {
			// Check for id
			if (!key_part["id"].isNull())
			{
				// Set the id
				string effect_id = key_part["id"].asString();

				// Find matching effect in timeline (if any)
				list<EffectBase*>::iterator effect_itr;
				for (effect_itr=existing_clip->Effects().begin(); effect_itr != existing_clip->Effects().end(); ++effect_itr)
				{
					// Get effect object from the iterator
					EffectBase *e = (*effect_itr);
					if (e->Id() == effect_id) {
						existing_effect = e;

						// Apply the change to the effect directly
						apply_json_to_effects(change, existing_effect);
						return; // effect found, don't update clip
					}
				}
			}
		}
	}

	// Determine type of change operation
	if (change_type == "insert") {

		// Create new clip
		Clip *clip = new Clip();
		clip->SetJsonValue(change["value"]); // Set properties of new clip from JSON
		AddClip(clip); // Add clip to timeline

	} else if (change_type == "update") {

		// Update existing clip
		if (existing_clip)
			existing_clip->SetJsonValue(change["value"]); // Update clip properties from JSON

	} else if (change_type == "delete") {

		// Remove existing clip
		if (existing_clip)
			RemoveClip(existing_clip); // Remove clip from timeline

	}

}
Ejemplo n.º 2
0
// Get an openshot::Frame object for a specific frame number of this reader.
tr1::shared_ptr<Frame> Timeline::GetFrame(long int requested_frame) throw(ReaderClosed, OutOfBoundsFrame)
{
	// Check for open reader (or throw exception)
	if (!is_open)
		throw ReaderClosed("The Timeline is closed.  Call Open() before calling this method.", "");

	// Adjust out of bounds frame number
	if (requested_frame < 1)
		requested_frame = 1;

	// Check cache
	tr1::shared_ptr<Frame> frame = final_cache.GetFrame(requested_frame);
	if (frame) {
		// Debug output
		AppendDebugMethod("Timeline::GetFrame (Cached frame found)", "requested_frame", requested_frame, "", -1, "", -1, "", -1, "", -1, "", -1);

		// Return cached frame
		return frame;
	}
	else
	{
		// Create a scoped lock, allowing only a single thread to run the following code at one time
		const GenericScopedLock<CriticalSection> lock(getFrameCriticalSection);

		// Check cache again (due to locking)
		frame = final_cache.GetFrame(requested_frame);
		if (frame) {
			// Debug output
			AppendDebugMethod("Timeline::GetFrame (Cached frame found on 2nd look)", "requested_frame", requested_frame, "", -1, "", -1, "", -1, "", -1, "", -1);

			// Return cached frame
			return frame;
		}

		// Minimum number of frames to process (for performance reasons)
		int minimum_frames = OPEN_MP_NUM_PROCESSORS;

		// Get a list of clips that intersect with the requested section of timeline
		// This also opens the readers for intersecting clips, and marks non-intersecting clips as 'needs closing'
		vector<Clip*> nearby_clips = find_intersecting_clips(requested_frame, minimum_frames, true);

		omp_set_num_threads(OPEN_MP_NUM_PROCESSORS);
		// Allow nested OpenMP sections
		omp_set_nested(true);

		// Debug output
		AppendDebugMethod("Timeline::GetFrame", "requested_frame", requested_frame, "minimum_frames", minimum_frames, "OPEN_MP_NUM_PROCESSORS", OPEN_MP_NUM_PROCESSORS, "", -1, "", -1, "", -1);

		// GENERATE CACHE FOR CLIPS (IN FRAME # SEQUENCE)
		// Determine all clip frames, and request them in order (to keep resampled audio in sequence)
		for (long int frame_number = requested_frame; frame_number < requested_frame + minimum_frames; frame_number++)
		{
			// Calculate time of timeline frame
			float requested_time = calculate_time(frame_number, info.fps);
			// Loop through clips
			for (int clip_index = 0; clip_index < nearby_clips.size(); clip_index++)
			{
				// Get clip object from the iterator
				Clip *clip = nearby_clips[clip_index];
				bool does_clip_intersect = (clip->Position() <= requested_time && clip->Position() + clip->Duration() >= requested_time);
				if (does_clip_intersect)
				{
					// Get clip frame #
					float time_diff = (requested_time - clip->Position()) + clip->Start();
					int clip_frame_number = round(time_diff * info.fps.ToFloat()) + 1;
					// Cache clip object
					clip->GetFrame(clip_frame_number);
				}
			}
		}

		#pragma omp parallel
		{
			// Loop through all requested frames
			#pragma omp for ordered firstprivate(nearby_clips, requested_frame, minimum_frames)
			for (long int frame_number = requested_frame; frame_number < requested_frame + minimum_frames; frame_number++)
			{
				// Debug output
				AppendDebugMethod("Timeline::GetFrame (processing frame)", "frame_number", frame_number, "omp_get_thread_num()", omp_get_thread_num(), "", -1, "", -1, "", -1, "", -1);

				// Init some basic properties about this frame
				int samples_in_frame = Frame::GetSamplesPerFrame(frame_number, info.fps, info.sample_rate, info.channels);

				// Create blank frame (which will become the requested frame)
				tr1::shared_ptr<Frame> new_frame(tr1::shared_ptr<Frame>(new Frame(frame_number, info.width, info.height, "#000000", samples_in_frame, info.channels)));
				new_frame->SampleRate(info.sample_rate);
				new_frame->ChannelsLayout(info.channel_layout);

				// Debug output
				AppendDebugMethod("Timeline::GetFrame (Adding solid color)", "frame_number", frame_number, "info.width", info.width, "info.height", info.height, "", -1, "", -1, "", -1);

				// Add Background Color to 1st layer (if animated or not black)
				if ((color.red.Points.size() > 1 || color.green.Points.size() > 1 || color.blue.Points.size() > 1) ||
					(color.red.GetValue(frame_number) != 0.0 || color.green.GetValue(frame_number) != 0.0 || color.blue.GetValue(frame_number) != 0.0))
				new_frame->AddColor(info.width, info.height, color.GetColorHex(frame_number));

				// Calculate time of frame
				float requested_time = calculate_time(frame_number, info.fps);

				// Debug output
				AppendDebugMethod("Timeline::GetFrame (Loop through clips)", "frame_number", frame_number, "requested_time", requested_time, "clips.size()", clips.size(), "nearby_clips.size()", nearby_clips.size(), "", -1, "", -1);

				// Find Clips near this time
				for (int clip_index = 0; clip_index < nearby_clips.size(); clip_index++)
				{
					// Get clip object from the iterator
					Clip *clip = nearby_clips[clip_index];

					// Does clip intersect the current requested time
					bool does_clip_intersect = (clip->Position() <= requested_time && clip->Position() + clip->Duration() >= requested_time);

					// Debug output
					AppendDebugMethod("Timeline::GetFrame (Does clip intersect)", "frame_number", frame_number, "requested_time", requested_time, "clip->Position()", clip->Position(), "clip->Duration()", clip->Duration(), "does_clip_intersect", does_clip_intersect, "", -1);

					// Clip is visible
					if (does_clip_intersect)
					{
						// Determine if clip is "top" clip on this layer (only happens when multiple clips are overlapping)
						bool is_top_clip = true;
						for (int top_clip_index = 0; top_clip_index < nearby_clips.size(); top_clip_index++)
						{
							Clip *nearby_clip = nearby_clips[top_clip_index];
							if (clip->Id() != nearby_clip->Id() && clip->Layer() == nearby_clip->Layer() &&
									nearby_clip->Position() <= requested_time && nearby_clip->Position() + nearby_clip->Duration() >= requested_time &&
									nearby_clip->Position() > clip->Position()) {
								is_top_clip = false;
								break;
							}
						}

						// Determine the frame needed for this clip (based on the position on the timeline)
						float time_diff = (requested_time - clip->Position()) + clip->Start();
						int clip_frame_number = round(time_diff * info.fps.ToFloat()) + 1;

						// Debug output
						AppendDebugMethod("Timeline::GetFrame (Calculate clip's frame #)", "time_diff", time_diff, "requested_time", requested_time, "clip->Position()", clip->Position(), "clip->Start()", clip->Start(), "info.fps.ToFloat()", info.fps.ToFloat(), "clip_frame_number", clip_frame_number);

						// Add clip's frame as layer
						add_layer(new_frame, clip, clip_frame_number, frame_number, is_top_clip);

					} else
						// Debug output
						AppendDebugMethod("Timeline::GetFrame (clip does not intersect)", "frame_number", frame_number, "requested_time", requested_time, "does_clip_intersect", does_clip_intersect, "", -1, "", -1, "", -1);

				} // end clip loop

				// Debug output
				AppendDebugMethod("Timeline::GetFrame (Add frame to cache)", "frame_number", frame_number, "info.width", info.width, "info.height", info.height, "", -1, "", -1, "", -1);

				// Add final frame to cache
				final_cache.Add(frame_number, new_frame);

			} // end frame loop
		} // end parallel

		// Debug output
		AppendDebugMethod("Timeline::GetFrame (end parallel region)", "requested_frame", requested_frame, "omp_get_thread_num()", omp_get_thread_num(), "", -1, "", -1, "", -1, "", -1);

		// Return frame (or blank frame)
		return final_cache.GetFrame(requested_frame);
	}
}