OpenShot Library | libopenshot 0.3.2
Loading...
Searching...
No Matches
Timeline.cpp
Go to the documentation of this file.
1
9// Copyright (c) 2008-2019 OpenShot Studios, LLC
10//
11// SPDX-License-Identifier: LGPL-3.0-or-later
12
13#include "Timeline.h"
14
15#include "CacheBase.h"
16#include "CacheDisk.h"
17#include "CacheMemory.h"
18#include "CrashHandler.h"
19#include "FrameMapper.h"
20#include "Exceptions.h"
21
22#include <QDir>
23#include <QFileInfo>
24
25using namespace openshot;
26
27// Default Constructor for the timeline (which sets the canvas width and height)
28Timeline::Timeline(int width, int height, Fraction fps, int sample_rate, int channels, ChannelLayout channel_layout) :
29 is_open(false), auto_map_clips(true), managed_cache(true), path(""),
30 max_concurrent_frames(OPEN_MP_NUM_PROCESSORS), max_time(0.0)
31{
32 // Create CrashHandler and Attach (incase of errors)
34
35 // Init viewport size (curve based, because it can be animated)
36 viewport_scale = Keyframe(100.0);
37 viewport_x = Keyframe(0.0);
38 viewport_y = Keyframe(0.0);
39
40 // Init background color
41 color.red = Keyframe(0.0);
42 color.green = Keyframe(0.0);
43 color.blue = Keyframe(0.0);
44
45 // Init FileInfo struct (clear all values)
46 info.width = width;
47 info.height = height;
50 info.fps = fps;
51 info.sample_rate = sample_rate;
52 info.channels = channels;
53 info.channel_layout = channel_layout;
55 info.duration = 60 * 30; // 30 minute default duration
56 info.has_audio = true;
57 info.has_video = true;
59 info.display_ratio = openshot::Fraction(width, height);
62 info.acodec = "openshot::timeline";
63 info.vcodec = "openshot::timeline";
64
65 // Init max image size
67
68 // Init cache
69 final_cache = new CacheMemory();
70 final_cache->SetMaxBytesFromInfo(max_concurrent_frames * 4, info.width, info.height, info.sample_rate, info.channels);
71}
72
73// Delegating constructor that copies parameters from a provided ReaderInfo
75 info.width, info.height, info.fps, info.sample_rate,
76 info.channels, info.channel_layout) {}
77
78// Constructor for the timeline (which loads a JSON structure from a file path, and initializes a timeline)
79Timeline::Timeline(const std::string& projectPath, bool convert_absolute_paths) :
80 is_open(false), auto_map_clips(true), managed_cache(true), path(projectPath),
81 max_concurrent_frames(OPEN_MP_NUM_PROCESSORS), max_time(0.0) {
82
83 // Create CrashHandler and Attach (incase of errors)
85
86 // Init final cache as NULL (will be created after loading json)
87 final_cache = NULL;
88
89 // Init viewport size (curve based, because it can be animated)
90 viewport_scale = Keyframe(100.0);
91 viewport_x = Keyframe(0.0);
92 viewport_y = Keyframe(0.0);
93
94 // Init background color
95 color.red = Keyframe(0.0);
96 color.green = Keyframe(0.0);
97 color.blue = Keyframe(0.0);
98
99 // Check if path exists
100 QFileInfo filePath(QString::fromStdString(path));
101 if (!filePath.exists()) {
102 throw InvalidFile("File could not be opened.", path);
103 }
104
105 // Check OpenShot Install Path exists
107 QDir openshotPath(QString::fromStdString(s->PATH_OPENSHOT_INSTALL));
108 if (!openshotPath.exists()) {
109 throw InvalidFile("PATH_OPENSHOT_INSTALL could not be found.", s->PATH_OPENSHOT_INSTALL);
110 }
111 QDir openshotTransPath(openshotPath.filePath("transitions"));
112 if (!openshotTransPath.exists()) {
113 throw InvalidFile("PATH_OPENSHOT_INSTALL/transitions could not be found.", openshotTransPath.path().toStdString());
114 }
115
116 // Determine asset path
117 QString asset_name = filePath.baseName().left(30) + "_assets";
118 QDir asset_folder(filePath.dir().filePath(asset_name));
119 if (!asset_folder.exists()) {
120 // Create directory if needed
121 asset_folder.mkpath(".");
122 }
123
124 // Load UTF-8 project file into QString
125 QFile projectFile(QString::fromStdString(path));
126 projectFile.open(QFile::ReadOnly);
127 QString projectContents = QString::fromUtf8(projectFile.readAll());
128
129 // Convert all relative paths into absolute paths (if requested)
130 if (convert_absolute_paths) {
131
132 // Find all "image" or "path" references in JSON (using regex). Must loop through match results
133 // due to our path matching needs, which are not possible with the QString::replace() function.
134 QRegularExpression allPathsRegex(QStringLiteral("\"(image|path)\":.*?\"(.*?)\""));
135 std::vector<QRegularExpressionMatch> matchedPositions;
136 QRegularExpressionMatchIterator i = allPathsRegex.globalMatch(projectContents);
137 while (i.hasNext()) {
138 QRegularExpressionMatch match = i.next();
139 if (match.hasMatch()) {
140 // Push all match objects into a vector (so we can reverse them later)
141 matchedPositions.push_back(match);
142 }
143 }
144
145 // Reverse the matches (bottom of file to top, so our replacements don't break our match positions)
146 std::vector<QRegularExpressionMatch>::reverse_iterator itr;
147 for (itr = matchedPositions.rbegin(); itr != matchedPositions.rend(); itr++) {
148 QRegularExpressionMatch match = *itr;
149 QString relativeKey = match.captured(1); // image or path
150 QString relativePath = match.captured(2); // relative file path
151 QString absolutePath = "";
152
153 // Find absolute path of all path, image (including special replacements of @assets and @transitions)
154 if (relativePath.startsWith("@assets")) {
155 absolutePath = QFileInfo(asset_folder.absoluteFilePath(relativePath.replace("@assets", "."))).canonicalFilePath();
156 } else if (relativePath.startsWith("@transitions")) {
157 absolutePath = QFileInfo(openshotTransPath.absoluteFilePath(relativePath.replace("@transitions", "."))).canonicalFilePath();
158 } else {
159 absolutePath = QFileInfo(filePath.absoluteDir().absoluteFilePath(relativePath)).canonicalFilePath();
160 }
161
162 // Replace path in JSON content, if an absolute path was successfully found
163 if (!absolutePath.isEmpty()) {
164 projectContents.replace(match.capturedStart(0), match.capturedLength(0), "\"" + relativeKey + "\": \"" + absolutePath + "\"");
165 }
166 }
167 // Clear matches
168 matchedPositions.clear();
169 }
170
171 // Set JSON of project
172 SetJson(projectContents.toStdString());
173
174 // Calculate valid duration and set has_audio and has_video
175 // based on content inside this Timeline's clips.
176 float calculated_duration = 0.0;
177 for (auto clip : clips)
178 {
179 float clip_last_frame = clip->Position() + clip->Duration();
180 if (clip_last_frame > calculated_duration)
181 calculated_duration = clip_last_frame;
182 if (clip->Reader() && clip->Reader()->info.has_audio)
183 info.has_audio = true;
184 if (clip->Reader() && clip->Reader()->info.has_video)
185 info.has_video = true;
186
187 }
188 info.video_length = calculated_duration * info.fps.ToFloat();
189 info.duration = calculated_duration;
190
191 // Init FileInfo settings
192 info.acodec = "openshot::timeline";
193 info.vcodec = "openshot::timeline";
195 info.has_video = true;
196 info.has_audio = true;
197
198 // Init max image size
200
201 // Init cache
202 final_cache = new CacheMemory();
203 final_cache->SetMaxBytesFromInfo(max_concurrent_frames * 4, info.width, info.height, info.sample_rate, info.channels);
204}
205
207 if (is_open) {
208 // Auto Close if not already
209 Close();
210 }
211
212 // Remove all clips, effects, and frame mappers
213 Clear();
214
215 // Destroy previous cache (if managed by timeline)
216 if (managed_cache && final_cache) {
217 delete final_cache;
218 final_cache = NULL;
219 }
220}
221
222// Add to the tracked_objects map a pointer to a tracked object (TrackedObjectBBox)
223void Timeline::AddTrackedObject(std::shared_ptr<openshot::TrackedObjectBase> trackedObject){
224
225 // Search for the tracked object on the map
226 auto iterator = tracked_objects.find(trackedObject->Id());
227
228 if (iterator != tracked_objects.end()){
229 // Tracked object's id already present on the map, overwrite it
230 iterator->second = trackedObject;
231 }
232 else{
233 // Tracked object's id not present -> insert it on the map
234 tracked_objects[trackedObject->Id()] = trackedObject;
235 }
236
237 return;
238}
239
240// Return tracked object pointer by it's id
241std::shared_ptr<openshot::TrackedObjectBase> Timeline::GetTrackedObject(std::string id) const{
242
243 // Search for the tracked object on the map
244 auto iterator = tracked_objects.find(id);
245
246 if (iterator != tracked_objects.end()){
247 // Id found, return the pointer to the tracked object
248 std::shared_ptr<openshot::TrackedObjectBase> trackedObject = iterator->second;
249 return trackedObject;
250 }
251 else {
252 // Id not found, return a null pointer
253 return nullptr;
254 }
255}
256
257// Return the ID's of the tracked objects as a list of strings
258std::list<std::string> Timeline::GetTrackedObjectsIds() const{
259
260 // Create a list of strings
261 std::list<std::string> trackedObjects_ids;
262
263 // Iterate through the tracked_objects map
264 for (auto const& it: tracked_objects){
265 // Add the IDs to the list
266 trackedObjects_ids.push_back(it.first);
267 }
268
269 return trackedObjects_ids;
270}
271
272#ifdef USE_OPENCV
273// Return the trackedObject's properties as a JSON string
274std::string Timeline::GetTrackedObjectValues(std::string id, int64_t frame_number) const {
275
276 // Initialize the JSON object
277 Json::Value trackedObjectJson;
278
279 // Search for the tracked object on the map
280 auto iterator = tracked_objects.find(id);
281
282 if (iterator != tracked_objects.end())
283 {
284 // Id found, Get the object pointer and cast it as a TrackedObjectBBox
285 std::shared_ptr<TrackedObjectBBox> trackedObject = std::static_pointer_cast<TrackedObjectBBox>(iterator->second);
286
287 // Get the trackedObject values for it's first frame
288 if (trackedObject->ExactlyContains(frame_number)){
289 BBox box = trackedObject->GetBox(frame_number);
290 float x1 = box.cx - (box.width/2);
291 float y1 = box.cy - (box.height/2);
292 float x2 = box.cx + (box.width/2);
293 float y2 = box.cy + (box.height/2);
294 float rotation = box.angle;
295
296 trackedObjectJson["x1"] = x1;
297 trackedObjectJson["y1"] = y1;
298 trackedObjectJson["x2"] = x2;
299 trackedObjectJson["y2"] = y2;
300 trackedObjectJson["rotation"] = rotation;
301
302 } else {
303 BBox box = trackedObject->BoxVec.begin()->second;
304 float x1 = box.cx - (box.width/2);
305 float y1 = box.cy - (box.height/2);
306 float x2 = box.cx + (box.width/2);
307 float y2 = box.cy + (box.height/2);
308 float rotation = box.angle;
309
310 trackedObjectJson["x1"] = x1;
311 trackedObjectJson["y1"] = y1;
312 trackedObjectJson["x2"] = x2;
313 trackedObjectJson["y2"] = y2;
314 trackedObjectJson["rotation"] = rotation;
315 }
316
317 }
318 else {
319 // Id not found, return all 0 values
320 trackedObjectJson["x1"] = 0;
321 trackedObjectJson["y1"] = 0;
322 trackedObjectJson["x2"] = 0;
323 trackedObjectJson["y2"] = 0;
324 trackedObjectJson["rotation"] = 0;
325 }
326
327 return trackedObjectJson.toStyledString();
328}
329#endif
330
331// Add an openshot::Clip to the timeline
333{
334 // Get lock (prevent getting frames while this happens)
335 const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
336
337 // Assign timeline to clip
338 clip->ParentTimeline(this);
339
340 // Clear cache of clip and nested reader (if any)
341 if (clip->Reader() && clip->Reader()->GetCache())
342 clip->Reader()->GetCache()->Clear();
343
344 // All clips should be converted to the frame rate of this timeline
345 if (auto_map_clips) {
346 // Apply framemapper (or update existing framemapper)
347 apply_mapper_to_clip(clip);
348 }
349
350 // Add clip to list
351 clips.push_back(clip);
352
353 // Sort clips
354 sort_clips();
355}
356
357// Add an effect to the timeline
359{
360 // Assign timeline to effect
361 effect->ParentTimeline(this);
362
363 // Add effect to list
364 effects.push_back(effect);
365
366 // Sort effects
367 sort_effects();
368}
369
370// Remove an effect from the timeline
372{
373 effects.remove(effect);
374
375 // Delete effect object (if timeline allocated it)
376 bool allocated = allocated_effects.count(effect);
377 if (allocated) {
378 delete effect;
379 effect = NULL;
380 allocated_effects.erase(effect);
381 }
382
383 // Sort effects
384 sort_effects();
385}
386
387// Remove an openshot::Clip to the timeline
389{
390 // Get lock (prevent getting frames while this happens)
391 const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
392
393 clips.remove(clip);
394
395 // Delete clip object (if timeline allocated it)
396 bool allocated = allocated_clips.count(clip);
397 if (allocated) {
398 delete clip;
399 clip = NULL;
400 allocated_clips.erase(clip);
401 }
402
403 // Sort clips
404 sort_clips();
405}
406
407// Look up a clip
408openshot::Clip* Timeline::GetClip(const std::string& id)
409{
410 // Find the matching clip (if any)
411 for (const auto& clip : clips) {
412 if (clip->Id() == id) {
413 return clip;
414 }
415 }
416 return nullptr;
417}
418
419// Look up a timeline effect
421{
422 // Find the matching effect (if any)
423 for (const auto& effect : effects) {
424 if (effect->Id() == id) {
425 return effect;
426 }
427 }
428 return nullptr;
429}
430
432{
433 // Search all clips for matching effect ID
434 for (const auto& clip : clips) {
435 const auto e = clip->GetEffect(id);
436 if (e != nullptr) {
437 return e;
438 }
439 }
440 return nullptr;
441}
442
443// Return the list of effects on all clips
444std::list<openshot::EffectBase*> Timeline::ClipEffects() const {
445
446 // Initialize the list
447 std::list<EffectBase*> timelineEffectsList;
448
449 // Loop through all clips
450 for (const auto& clip : clips) {
451
452 // Get the clip's list of effects
453 std::list<EffectBase*> clipEffectsList = clip->Effects();
454
455 // Append the clip's effects to the list
456 timelineEffectsList.insert(timelineEffectsList.end(), clipEffectsList.begin(), clipEffectsList.end());
457 }
458
459 return timelineEffectsList;
460}
461
462// Compute the end time of the latest timeline element
464 // Return cached max_time variable (threadsafe)
465 return max_time;
466}
467
468// Compute the highest frame# based on the latest time and FPS
470 double fps = info.fps.ToDouble();
471 auto max_time = GetMaxTime();
472 return std::round(max_time * fps) + 1;
473}
474
475// Apply a FrameMapper to a clip which matches the settings of this timeline
476void Timeline::apply_mapper_to_clip(Clip* clip)
477{
478 // Determine type of reader
479 ReaderBase* clip_reader = NULL;
480 if (clip->Reader()->Name() == "FrameMapper")
481 {
482 // Get the existing reader
483 clip_reader = (ReaderBase*) clip->Reader();
484
485 // Update the mapping
486 FrameMapper* clip_mapped_reader = (FrameMapper*) clip_reader;
488
489 } else {
490
491 // Create a new FrameMapper to wrap the current reader
493 allocated_frame_mappers.insert(mapper);
494 clip_reader = (ReaderBase*) mapper;
495 }
496
497 // Update clip reader
498 clip->Reader(clip_reader);
499}
500
501// Apply the timeline's framerate and samplerate to all clips
503{
504 // Clear all cached frames
506
507 // Loop through all clips
508 for (auto clip : clips)
509 {
510 // Apply framemapper (or update existing framemapper)
511 apply_mapper_to_clip(clip);
512 }
513}
514
515// Calculate time of a frame number, based on a framerate
516double Timeline::calculate_time(int64_t number, Fraction rate)
517{
518 // Get float version of fps fraction
519 double raw_fps = rate.ToFloat();
520
521 // Return the time (in seconds) of this frame
522 return double(number - 1) / raw_fps;
523}
524
525// Apply effects to the source frame (if any)
526std::shared_ptr<Frame> Timeline::apply_effects(std::shared_ptr<Frame> frame, int64_t timeline_frame_number, int layer)
527{
528 // Debug output
530 "Timeline::apply_effects",
531 "frame->number", frame->number,
532 "timeline_frame_number", timeline_frame_number,
533 "layer", layer);
534
535 // Find Effects at this position and layer
536 for (auto effect : effects)
537 {
538 // Does clip intersect the current requested time
539 long effect_start_position = round(effect->Position() * info.fps.ToDouble()) + 1;
540 long effect_end_position = round((effect->Position() + (effect->Duration())) * info.fps.ToDouble());
541
542 bool does_effect_intersect = (effect_start_position <= timeline_frame_number && effect_end_position >= timeline_frame_number && effect->Layer() == layer);
543
544 // Debug output
546 "Timeline::apply_effects (Does effect intersect)",
547 "effect->Position()", effect->Position(),
548 "does_effect_intersect", does_effect_intersect,
549 "timeline_frame_number", timeline_frame_number,
550 "layer", layer);
551
552 // Clip is visible
553 if (does_effect_intersect)
554 {
555 // Determine the frame needed for this clip (based on the position on the timeline)
556 long effect_start_frame = (effect->Start() * info.fps.ToDouble()) + 1;
557 long effect_frame_number = timeline_frame_number - effect_start_position + effect_start_frame;
558
559 // Debug output
561 "Timeline::apply_effects (Process Effect)",
562 "effect_frame_number", effect_frame_number,
563 "does_effect_intersect", does_effect_intersect);
564
565 // Apply the effect to this frame
566 frame = effect->GetFrame(frame, effect_frame_number);
567 }
568
569 } // end effect loop
570
571 // Return modified frame
572 return frame;
573}
574
575// Get or generate a blank frame
576std::shared_ptr<Frame> Timeline::GetOrCreateFrame(std::shared_ptr<Frame> background_frame, Clip* clip, int64_t number, openshot::TimelineInfoStruct* options)
577{
578 std::shared_ptr<Frame> new_frame;
579
580 // Init some basic properties about this frame
581 int samples_in_frame = Frame::GetSamplesPerFrame(number, info.fps, info.sample_rate, info.channels);
582
583 try {
584 // Debug output
586 "Timeline::GetOrCreateFrame (from reader)",
587 "number", number,
588 "samples_in_frame", samples_in_frame);
589
590 // Attempt to get a frame (but this could fail if a reader has just been closed)
591 new_frame = std::shared_ptr<Frame>(clip->GetFrame(background_frame, number, options));
592
593 // Return real frame
594 return new_frame;
595
596 } catch (const ReaderClosed & e) {
597 // ...
598 } catch (const OutOfBoundsFrame & e) {
599 // ...
600 }
601
602 // Debug output
604 "Timeline::GetOrCreateFrame (create blank)",
605 "number", number,
606 "samples_in_frame", samples_in_frame);
607
608 // Create blank frame
609 return new_frame;
610}
611
612// Process a new layer of video or audio
613void Timeline::add_layer(std::shared_ptr<Frame> new_frame, Clip* source_clip, int64_t clip_frame_number, bool is_top_clip, float max_volume)
614{
615 // Create timeline options (with details about this current frame request)
616 TimelineInfoStruct* options = new TimelineInfoStruct();
617 options->is_top_clip = is_top_clip;
618
619 // Get the clip's frame, composited on top of the current timeline frame
620 std::shared_ptr<Frame> source_frame;
621 source_frame = GetOrCreateFrame(new_frame, source_clip, clip_frame_number, options);
622 delete options;
623
624 // No frame found... so bail
625 if (!source_frame)
626 return;
627
628 // Debug output
630 "Timeline::add_layer",
631 "new_frame->number", new_frame->number,
632 "clip_frame_number", clip_frame_number);
633
634 /* COPY AUDIO - with correct volume */
635 if (source_clip->Reader()->info.has_audio) {
636 // Debug output
638 "Timeline::add_layer (Copy Audio)",
639 "source_clip->Reader()->info.has_audio", source_clip->Reader()->info.has_audio,
640 "source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(),
641 "info.channels", info.channels,
642 "clip_frame_number", clip_frame_number);
643
644 if (source_frame->GetAudioChannelsCount() == info.channels && source_clip->has_audio.GetInt(clip_frame_number) != 0)
645 for (int channel = 0; channel < source_frame->GetAudioChannelsCount(); channel++)
646 {
647 // Get volume from previous frame and this frame
648 float previous_volume = source_clip->volume.GetValue(clip_frame_number - 1);
649 float volume = source_clip->volume.GetValue(clip_frame_number);
650 int channel_filter = source_clip->channel_filter.GetInt(clip_frame_number); // optional channel to filter (if not -1)
651 int channel_mapping = source_clip->channel_mapping.GetInt(clip_frame_number); // optional channel to map this channel to (if not -1)
652
653 // Apply volume mixing strategy
654 if (source_clip->mixing == VOLUME_MIX_AVERAGE && max_volume > 1.0) {
655 // Don't allow this clip to exceed 100% (divide volume equally between all overlapping clips with volume
656 previous_volume = previous_volume / max_volume;
657 volume = volume / max_volume;
658 }
659 else if (source_clip->mixing == VOLUME_MIX_REDUCE && max_volume > 1.0) {
660 // Reduce clip volume by a bit, hoping it will prevent exceeding 100% (but it is very possible it will)
661 previous_volume = previous_volume * 0.77;
662 volume = volume * 0.77;
663 }
664
665 // If channel filter enabled, check for correct channel (and skip non-matching channels)
666 if (channel_filter != -1 && channel_filter != channel)
667 continue; // skip to next channel
668
669 // If no volume on this frame or previous frame, do nothing
670 if (previous_volume == 0.0 && volume == 0.0)
671 continue; // skip to next channel
672
673 // If channel mapping disabled, just use the current channel
674 if (channel_mapping == -1)
675 channel_mapping = channel;
676
677 // Apply ramp to source frame (if needed)
678 if (!isEqual(previous_volume, 1.0) || !isEqual(volume, 1.0))
679 source_frame->ApplyGainRamp(channel_mapping, 0, source_frame->GetAudioSamplesCount(), previous_volume, volume);
680
681 // TODO: Improve FrameMapper (or Timeline) to always get the correct number of samples per frame.
682 // Currently, the ResampleContext sometimes leaves behind a few samples for the next call, and the
683 // number of samples returned is variable... and does not match the number expected.
684 // This is a crude solution at best. =)
685 if (new_frame->GetAudioSamplesCount() != source_frame->GetAudioSamplesCount()){
686 // Force timeline frame to match the source frame
687 new_frame->ResizeAudio(info.channels, source_frame->GetAudioSamplesCount(), info.sample_rate, info.channel_layout);
688 }
689 // Copy audio samples (and set initial volume). Mix samples with existing audio samples. The gains are added together, to
690 // be sure to set the gain's correctly, so the sum does not exceed 1.0 (of audio distortion will happen).
691 new_frame->AddAudio(false, channel_mapping, 0, source_frame->GetAudioSamples(channel), source_frame->GetAudioSamplesCount(), 1.0);
692 }
693 else
694 // Debug output
696 "Timeline::add_layer (No Audio Copied - Wrong # of Channels)",
697 "source_clip->Reader()->info.has_audio",
698 source_clip->Reader()->info.has_audio,
699 "source_frame->GetAudioChannelsCount()",
700 source_frame->GetAudioChannelsCount(),
701 "info.channels", info.channels,
702 "clip_frame_number", clip_frame_number);
703 }
704
705 // Debug output
707 "Timeline::add_layer (Transform: Composite Image Layer: Completed)",
708 "source_frame->number", source_frame->number,
709 "new_frame->GetImage()->width()", new_frame->GetWidth(),
710 "new_frame->GetImage()->height()", new_frame->GetHeight());
711}
712
713// Update the list of 'opened' clips
714void Timeline::update_open_clips(Clip *clip, bool does_clip_intersect)
715{
716 // Get lock (prevent getting frames while this happens)
717 const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
718
720 "Timeline::update_open_clips (before)",
721 "does_clip_intersect", does_clip_intersect,
722 "closing_clips.size()", closing_clips.size(),
723 "open_clips.size()", open_clips.size());
724
725 // is clip already in list?
726 bool clip_found = open_clips.count(clip);
727
728 if (clip_found && !does_clip_intersect)
729 {
730 // Remove clip from 'opened' list, because it's closed now
731 open_clips.erase(clip);
732
733 // Close clip
734 clip->Close();
735 }
736 else if (!clip_found && does_clip_intersect)
737 {
738 // Add clip to 'opened' list, because it's missing
739 open_clips[clip] = clip;
740
741 try {
742 // Open the clip
743 clip->Open();
744
745 } catch (const InvalidFile & e) {
746 // ...
747 }
748 }
749
750 // Debug output
752 "Timeline::update_open_clips (after)",
753 "does_clip_intersect", does_clip_intersect,
754 "clip_found", clip_found,
755 "closing_clips.size()", closing_clips.size(),
756 "open_clips.size()", open_clips.size());
757}
758
759// Calculate the max duration (in seconds) of the timeline, based on all the clips, and cache the value
760void Timeline::calculate_max_duration() {
761 double last_clip = 0.0;
762 double last_effect = 0.0;
763
764 if (!clips.empty()) {
765 const auto max_clip = std::max_element(
766 clips.begin(), clips.end(), CompareClipEndFrames());
767 last_clip = (*max_clip)->Position() + (*max_clip)->Duration();
768 }
769 if (!effects.empty()) {
770 const auto max_effect = std::max_element(
771 effects.begin(), effects.end(), CompareEffectEndFrames());
772 last_effect = (*max_effect)->Position() + (*max_effect)->Duration();
773 }
774 max_time = std::max(last_clip, last_effect);
775}
776
777// Sort clips by position on the timeline
778void Timeline::sort_clips()
779{
780 // Get lock (prevent getting frames while this happens)
781 const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
782
783 // Debug output
785 "Timeline::SortClips",
786 "clips.size()", clips.size());
787
788 // sort clips
789 clips.sort(CompareClips());
790
791 // calculate max timeline duration
792 calculate_max_duration();
793}
794
795// Sort effects by position on the timeline
796void Timeline::sort_effects()
797{
798 // Get lock (prevent getting frames while this happens)
799 const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
800
801 // sort clips
802 effects.sort(CompareEffects());
803
804 // calculate max timeline duration
805 calculate_max_duration();
806}
807
808// Clear all clips from timeline
810{
811 ZmqLogger::Instance()->AppendDebugMethod("Timeline::Clear");
812
813 // Get lock (prevent getting frames while this happens)
814 const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
815
816 // Close all open clips
817 for (auto clip : clips)
818 {
819 update_open_clips(clip, false);
820
821 // Delete clip object (if timeline allocated it)
822 bool allocated = allocated_clips.count(clip);
823 if (allocated) {
824 delete clip;
825 }
826 }
827 // Clear all clips
828 clips.clear();
829 allocated_clips.clear();
830
831 // Close all effects
832 for (auto effect : effects)
833 {
834 // Delete effect object (if timeline allocated it)
835 bool allocated = allocated_effects.count(effect);
836 if (allocated) {
837 delete effect;
838 }
839 }
840 // Clear all effects
841 effects.clear();
842 allocated_effects.clear();
843
844 // Delete all FrameMappers
845 for (auto mapper : allocated_frame_mappers)
846 {
847 mapper->Reader(NULL);
848 mapper->Close();
849 delete mapper;
850 }
851 allocated_frame_mappers.clear();
852}
853
854// Close the reader (and any resources it was consuming)
856{
857 ZmqLogger::Instance()->AppendDebugMethod("Timeline::Close");
858
859 // Get lock (prevent getting frames while this happens)
860 const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
861
862 // Close all open clips
863 for (auto clip : clips)
864 {
865 // Open or Close this clip, based on if it's intersecting or not
866 update_open_clips(clip, false);
867 }
868
869 // Mark timeline as closed
870 is_open = false;
871
872 // Clear all cache (deep clear, including nested Readers)
873 ClearAllCache(true);
874}
875
876// Open the reader (and start consuming resources)
878{
879 is_open = true;
880}
881
882// Compare 2 floating point numbers for equality
883bool Timeline::isEqual(double a, double b)
884{
885 return fabs(a - b) < 0.000001;
886}
887
888// Get an openshot::Frame object for a specific frame number of this reader.
889std::shared_ptr<Frame> Timeline::GetFrame(int64_t requested_frame)
890{
891 // Adjust out of bounds frame number
892 if (requested_frame < 1)
893 requested_frame = 1;
894
895 // Check cache
896 std::shared_ptr<Frame> frame;
897 frame = final_cache->GetFrame(requested_frame);
898 if (frame) {
899 // Debug output
901 "Timeline::GetFrame (Cached frame found)",
902 "requested_frame", requested_frame);
903
904 // Return cached frame
905 return frame;
906 }
907 else
908 {
909 // Prevent async calls to the following code
910 const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
911
912 // Check cache 2nd time
913 std::shared_ptr<Frame> frame;
914 frame = final_cache->GetFrame(requested_frame);
915 if (frame) {
916 // Debug output
918 "Timeline::GetFrame (Cached frame found on 2nd check)",
919 "requested_frame", requested_frame);
920
921 // Return cached frame
922 return frame;
923 } else {
924 // Get a list of clips that intersect with the requested section of timeline
925 // This also opens the readers for intersecting clips, and marks non-intersecting clips as 'needs closing'
926 std::vector<Clip *> nearby_clips;
927 nearby_clips = find_intersecting_clips(requested_frame, 1, true);
928
929 // Debug output
931 "Timeline::GetFrame (processing frame)",
932 "requested_frame", requested_frame,
933 "omp_get_thread_num()", omp_get_thread_num());
934
935 // Init some basic properties about this frame
936 int samples_in_frame = Frame::GetSamplesPerFrame(requested_frame, info.fps, info.sample_rate, info.channels);
937
938 // Create blank frame (which will become the requested frame)
939 std::shared_ptr<Frame> new_frame(std::make_shared<Frame>(requested_frame, preview_width, preview_height, "#000000", samples_in_frame, info.channels));
940 new_frame->AddAudioSilence(samples_in_frame);
941 new_frame->SampleRate(info.sample_rate);
942 new_frame->ChannelsLayout(info.channel_layout);
943
944 // Debug output
946 "Timeline::GetFrame (Adding solid color)",
947 "requested_frame", requested_frame,
948 "info.width", info.width,
949 "info.height", info.height);
950
951 // Add Background Color to 1st layer (if animated or not black)
952 if ((color.red.GetCount() > 1 || color.green.GetCount() > 1 || color.blue.GetCount() > 1) ||
953 (color.red.GetValue(requested_frame) != 0.0 || color.green.GetValue(requested_frame) != 0.0 ||
954 color.blue.GetValue(requested_frame) != 0.0))
955 new_frame->AddColor(preview_width, preview_height, color.GetColorHex(requested_frame));
956
957 // Debug output
959 "Timeline::GetFrame (Loop through clips)",
960 "requested_frame", requested_frame,
961 "clips.size()", clips.size(),
962 "nearby_clips.size()", nearby_clips.size());
963
964 // Find Clips near this time
965 for (auto clip : nearby_clips) {
966 long clip_start_position = round(clip->Position() * info.fps.ToDouble()) + 1;
967 long clip_end_position = round((clip->Position() + clip->Duration()) * info.fps.ToDouble());
968 bool does_clip_intersect = (clip_start_position <= requested_frame && clip_end_position >= requested_frame);
969
970 // Debug output
972 "Timeline::GetFrame (Does clip intersect)",
973 "requested_frame", requested_frame,
974 "clip->Position()", clip->Position(),
975 "clip->Duration()", clip->Duration(),
976 "does_clip_intersect", does_clip_intersect);
977
978 // Clip is visible
979 if (does_clip_intersect) {
980 // Determine if clip is "top" clip on this layer (only happens when multiple clips are overlapping)
981 bool is_top_clip = true;
982 float max_volume = 0.0;
983 for (auto nearby_clip : nearby_clips) {
984 long nearby_clip_start_position = round(nearby_clip->Position() * info.fps.ToDouble()) + 1;
985 long nearby_clip_end_position = round((nearby_clip->Position() + nearby_clip->Duration()) * info.fps.ToDouble()) + 1;
986 long nearby_clip_start_frame = (nearby_clip->Start() * info.fps.ToDouble()) + 1;
987 long nearby_clip_frame_number = requested_frame - nearby_clip_start_position + nearby_clip_start_frame;
988
989 // Determine if top clip
990 if (clip->Id() != nearby_clip->Id() && clip->Layer() == nearby_clip->Layer() &&
991 nearby_clip_start_position <= requested_frame && nearby_clip_end_position >= requested_frame &&
992 nearby_clip_start_position > clip_start_position && is_top_clip == true) {
993 is_top_clip = false;
994 }
995
996 // Determine max volume of overlapping clips
997 if (nearby_clip->Reader() && nearby_clip->Reader()->info.has_audio &&
998 nearby_clip->has_audio.GetInt(nearby_clip_frame_number) != 0 &&
999 nearby_clip_start_position <= requested_frame && nearby_clip_end_position >= requested_frame) {
1000 max_volume += nearby_clip->volume.GetValue(nearby_clip_frame_number);
1001 }
1002 }
1003
1004 // Determine the frame needed for this clip (based on the position on the timeline)
1005 long clip_start_frame = (clip->Start() * info.fps.ToDouble()) + 1;
1006 long clip_frame_number = requested_frame - clip_start_position + clip_start_frame;
1007
1008 // Debug output
1010 "Timeline::GetFrame (Calculate clip's frame #)",
1011 "clip->Position()", clip->Position(),
1012 "clip->Start()", clip->Start(),
1013 "info.fps.ToFloat()", info.fps.ToFloat(),
1014 "clip_frame_number", clip_frame_number);
1015
1016 // Add clip's frame as layer
1017 add_layer(new_frame, clip, clip_frame_number, is_top_clip, max_volume);
1018
1019 } else {
1020 // Debug output
1022 "Timeline::GetFrame (clip does not intersect)",
1023 "requested_frame", requested_frame,
1024 "does_clip_intersect", does_clip_intersect);
1025 }
1026
1027 } // end clip loop
1028
1029 // Debug output
1031 "Timeline::GetFrame (Add frame to cache)",
1032 "requested_frame", requested_frame,
1033 "info.width", info.width,
1034 "info.height", info.height);
1035
1036 // Set frame # on mapped frame
1037 new_frame->SetFrameNumber(requested_frame);
1038
1039 // Add final frame to cache
1040 final_cache->Add(new_frame);
1041
1042 // Return frame (or blank frame)
1043 return new_frame;
1044 }
1045 }
1046}
1047
1048
1049// Find intersecting clips (or non intersecting clips)
1050std::vector<Clip*> Timeline::find_intersecting_clips(int64_t requested_frame, int number_of_frames, bool include)
1051{
1052 // Find matching clips
1053 std::vector<Clip*> matching_clips;
1054
1055 // Calculate time of frame
1056 float min_requested_frame = requested_frame;
1057 float max_requested_frame = requested_frame + (number_of_frames - 1);
1058
1059 // Find Clips at this time
1060 for (auto clip : clips)
1061 {
1062 // Does clip intersect the current requested time
1063 long clip_start_position = round(clip->Position() * info.fps.ToDouble()) + 1;
1064 long clip_end_position = round((clip->Position() + clip->Duration()) * info.fps.ToDouble()) + 1;
1065
1066 bool does_clip_intersect =
1067 (clip_start_position <= min_requested_frame || clip_start_position <= max_requested_frame) &&
1068 (clip_end_position >= min_requested_frame || clip_end_position >= max_requested_frame);
1069
1070 // Debug output
1072 "Timeline::find_intersecting_clips (Is clip near or intersecting)",
1073 "requested_frame", requested_frame,
1074 "min_requested_frame", min_requested_frame,
1075 "max_requested_frame", max_requested_frame,
1076 "clip->Position()", clip->Position(),
1077 "does_clip_intersect", does_clip_intersect);
1078
1079 // Open (or schedule for closing) this clip, based on if it's intersecting or not
1080 update_open_clips(clip, does_clip_intersect);
1081
1082 // Clip is visible
1083 if (does_clip_intersect && include)
1084 // Add the intersecting clip
1085 matching_clips.push_back(clip);
1086
1087 else if (!does_clip_intersect && !include)
1088 // Add the non-intersecting clip
1089 matching_clips.push_back(clip);
1090
1091 } // end clip loop
1092
1093 // return list
1094 return matching_clips;
1095}
1096
1097// Set the cache object used by this reader
1099 // Get lock (prevent getting frames while this happens)
1100 const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
1101
1102 // Destroy previous cache (if managed by timeline)
1103 if (managed_cache && final_cache) {
1104 delete final_cache;
1105 final_cache = NULL;
1106 managed_cache = false;
1107 }
1108
1109 // Set new cache
1110 final_cache = new_cache;
1111}
1112
1113// Generate JSON string of this object
1114std::string Timeline::Json() const {
1115
1116 // Return formatted string
1117 return JsonValue().toStyledString();
1118}
1119
1120// Generate Json::Value for this object
1121Json::Value Timeline::JsonValue() const {
1122
1123 // Create root json object
1124 Json::Value root = ReaderBase::JsonValue(); // get parent properties
1125 root["type"] = "Timeline";
1126 root["viewport_scale"] = viewport_scale.JsonValue();
1127 root["viewport_x"] = viewport_x.JsonValue();
1128 root["viewport_y"] = viewport_y.JsonValue();
1129 root["color"] = color.JsonValue();
1130 root["path"] = path;
1131
1132 // Add array of clips
1133 root["clips"] = Json::Value(Json::arrayValue);
1134
1135 // Find Clips at this time
1136 for (const auto existing_clip : clips)
1137 {
1138 root["clips"].append(existing_clip->JsonValue());
1139 }
1140
1141 // Add array of effects
1142 root["effects"] = Json::Value(Json::arrayValue);
1143
1144 // loop through effects
1145 for (const auto existing_effect: effects)
1146 {
1147 root["effects"].append(existing_effect->JsonValue());
1148 }
1149
1150 // return JsonValue
1151 return root;
1152}
1153
1154// Load JSON string into this object
1155void Timeline::SetJson(const std::string value) {
1156
1157 // Get lock (prevent getting frames while this happens)
1158 const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
1159
1160 // Parse JSON string into JSON objects
1161 try
1162 {
1163 const Json::Value root = openshot::stringToJson(value);
1164 // Set all values that match
1165 SetJsonValue(root);
1166 }
1167 catch (const std::exception& e)
1168 {
1169 // Error parsing JSON (or missing keys)
1170 throw InvalidJSON("JSON is invalid (missing keys or invalid data types)");
1171 }
1172}
1173
1174// Load Json::Value into this object
1175void Timeline::SetJsonValue(const Json::Value root) {
1176
1177 // Get lock (prevent getting frames while this happens)
1178 const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
1179
1180 // Close timeline before we do anything (this closes all clips)
1181 bool was_open = is_open;
1182 Close();
1183
1184 // Set parent data
1186
1187 // Set data from Json (if key is found)
1188 if (!root["path"].isNull())
1189 path = root["path"].asString();
1190
1191 if (!root["clips"].isNull()) {
1192 // Clear existing clips
1193 clips.clear();
1194
1195 // loop through clips
1196 for (const Json::Value existing_clip : root["clips"]) {
1197 // Create Clip
1198 Clip *c = new Clip();
1199
1200 // Keep track of allocated clip objects
1201 allocated_clips.insert(c);
1202
1203 // When a clip is attached to an object, it searches for the object
1204 // on it's parent timeline. Setting the parent timeline of the clip here
1205 // allows attaching it to an object when exporting the project (because)
1206 // the exporter script initializes the clip and it's effects
1207 // before setting its parent timeline.
1208 c->ParentTimeline(this);
1209
1210 // Load Json into Clip
1211 c->SetJsonValue(existing_clip);
1212
1213 // Add Clip to Timeline
1214 AddClip(c);
1215 }
1216 }
1217
1218 if (!root["effects"].isNull()) {
1219 // Clear existing effects
1220 effects.clear();
1221
1222 // loop through effects
1223 for (const Json::Value existing_effect :root["effects"]) {
1224 // Create Effect
1225 EffectBase *e = NULL;
1226
1227 if (!existing_effect["type"].isNull()) {
1228 // Create instance of effect
1229 if ( (e = EffectInfo().CreateEffect(existing_effect["type"].asString())) ) {
1230
1231 // Keep track of allocated effect objects
1232 allocated_effects.insert(e);
1233
1234 // Load Json into Effect
1235 e->SetJsonValue(existing_effect);
1236
1237 // Add Effect to Timeline
1238 AddEffect(e);
1239 }
1240 }
1241 }
1242 }
1243
1244 if (!root["duration"].isNull()) {
1245 // Update duration of timeline
1246 info.duration = root["duration"].asDouble();
1248 }
1249
1250 // Update preview settings
1253
1254 // Re-open if needed
1255 if (was_open)
1256 Open();
1257}
1258
1259// Apply a special formatted JSON object, which represents a change to the timeline (insert, update, delete)
1260void Timeline::ApplyJsonDiff(std::string value) {
1261
1262 // Get lock (prevent getting frames while this happens)
1263 const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
1264
1265 // Parse JSON string into JSON objects
1266 try
1267 {
1268 const Json::Value root = openshot::stringToJson(value);
1269 // Process the JSON change array, loop through each item
1270 for (const Json::Value change : root) {
1271 std::string change_key = change["key"][(uint)0].asString();
1272
1273 // Process each type of change
1274 if (change_key == "clips")
1275 // Apply to CLIPS
1276 apply_json_to_clips(change);
1277
1278 else if (change_key == "effects")
1279 // Apply to EFFECTS
1280 apply_json_to_effects(change);
1281
1282 else
1283 // Apply to TIMELINE
1284 apply_json_to_timeline(change);
1285
1286 }
1287 }
1288 catch (const std::exception& e)
1289 {
1290 // Error parsing JSON (or missing keys)
1291 throw InvalidJSON("JSON is invalid (missing keys or invalid data types)");
1292 }
1293}
1294
1295// Apply JSON diff to clips
1296void Timeline::apply_json_to_clips(Json::Value change) {
1297
1298 // Get key and type of change
1299 std::string change_type = change["type"].asString();
1300 std::string clip_id = "";
1301 Clip *existing_clip = NULL;
1302
1303 // Find id of clip (if any)
1304 for (auto key_part : change["key"]) {
1305 // Get each change
1306 if (key_part.isObject()) {
1307 // Check for id
1308 if (!key_part["id"].isNull()) {
1309 // Set the id
1310 clip_id = key_part["id"].asString();
1311
1312 // Find matching clip in timeline (if any)
1313 for (auto c : clips)
1314 {
1315 if (c->Id() == clip_id) {
1316 existing_clip = c;
1317 break; // clip found, exit loop
1318 }
1319 }
1320 break; // id found, exit loop
1321 }
1322 }
1323 }
1324
1325 // Check for a more specific key (targetting this clip's effects)
1326 // For example: ["clips", {"id:123}, "effects", {"id":432}]
1327 if (existing_clip && change["key"].size() == 4 && change["key"][2] == "effects")
1328 {
1329 // This change is actually targetting a specific effect under a clip (and not the clip)
1330 Json::Value key_part = change["key"][3];
1331
1332 if (key_part.isObject()) {
1333 // Check for id
1334 if (!key_part["id"].isNull())
1335 {
1336 // Set the id
1337 std::string effect_id = key_part["id"].asString();
1338
1339 // Find matching effect in timeline (if any)
1340 std::list<EffectBase*> effect_list = existing_clip->Effects();
1341 for (auto e : effect_list)
1342 {
1343 if (e->Id() == effect_id) {
1344 // Apply the change to the effect directly
1345 apply_json_to_effects(change, e);
1346
1347 // Calculate start and end frames that this impacts, and remove those frames from the cache
1348 int64_t new_starting_frame = (existing_clip->Position() * info.fps.ToDouble()) + 1;
1349 int64_t new_ending_frame = ((existing_clip->Position() + existing_clip->Duration()) * info.fps.ToDouble()) + 1;
1350 final_cache->Remove(new_starting_frame - 8, new_ending_frame + 8);
1351
1352 return; // effect found, don't update clip
1353 }
1354 }
1355 }
1356 }
1357 }
1358
1359 // Calculate start and end frames that this impacts, and remove those frames from the cache
1360 if (!change["value"].isArray() && !change["value"]["position"].isNull()) {
1361 int64_t new_starting_frame = (change["value"]["position"].asDouble() * info.fps.ToDouble()) + 1;
1362 int64_t new_ending_frame = ((change["value"]["position"].asDouble() + change["value"]["end"].asDouble() - change["value"]["start"].asDouble()) * info.fps.ToDouble()) + 1;
1363 final_cache->Remove(new_starting_frame - 8, new_ending_frame + 8);
1364 }
1365
1366 // Determine type of change operation
1367 if (change_type == "insert") {
1368
1369 // Create clip
1370 Clip *clip = new Clip();
1371
1372 // Keep track of allocated clip objects
1373 allocated_clips.insert(clip);
1374
1375 // Set properties of clip from JSON
1376 clip->SetJsonValue(change["value"]);
1377
1378 // Add clip to timeline
1379 AddClip(clip);
1380
1381 } else if (change_type == "update") {
1382
1383 // Update existing clip
1384 if (existing_clip) {
1385
1386 // Calculate start and end frames that this impacts, and remove those frames from the cache
1387 int64_t old_starting_frame = (existing_clip->Position() * info.fps.ToDouble()) + 1;
1388 int64_t old_ending_frame = ((existing_clip->Position() + existing_clip->Duration()) * info.fps.ToDouble()) + 1;
1389 final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
1390
1391 // Remove cache on clip's Reader (if found)
1392 if (existing_clip->Reader() && existing_clip->Reader()->GetCache())
1393 existing_clip->Reader()->GetCache()->Remove(old_starting_frame - 8, old_ending_frame + 8);
1394
1395 // Update clip properties from JSON
1396 existing_clip->SetJsonValue(change["value"]);
1397
1398 // Apply framemapper (or update existing framemapper)
1399 if (auto_map_clips) {
1400 apply_mapper_to_clip(existing_clip);
1401 }
1402 }
1403
1404 } else if (change_type == "delete") {
1405
1406 // Remove existing clip
1407 if (existing_clip) {
1408
1409 // Calculate start and end frames that this impacts, and remove those frames from the cache
1410 int64_t old_starting_frame = (existing_clip->Position() * info.fps.ToDouble()) + 1;
1411 int64_t old_ending_frame = ((existing_clip->Position() + existing_clip->Duration()) * info.fps.ToDouble()) + 1;
1412 final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
1413
1414 // Remove clip from timeline
1415 RemoveClip(existing_clip);
1416 }
1417
1418 }
1419
1420 // Re-Sort Clips (since they likely changed)
1421 sort_clips();
1422}
1423
1424// Apply JSON diff to effects
1425void Timeline::apply_json_to_effects(Json::Value change) {
1426
1427 // Get key and type of change
1428 std::string change_type = change["type"].asString();
1429 EffectBase *existing_effect = NULL;
1430
1431 // Find id of an effect (if any)
1432 for (auto key_part : change["key"]) {
1433
1434 if (key_part.isObject()) {
1435 // Check for id
1436 if (!key_part["id"].isNull())
1437 {
1438 // Set the id
1439 std::string effect_id = key_part["id"].asString();
1440
1441 // Find matching effect in timeline (if any)
1442 for (auto e : effects)
1443 {
1444 if (e->Id() == effect_id) {
1445 existing_effect = e;
1446 break; // effect found, exit loop
1447 }
1448 }
1449 break; // id found, exit loop
1450 }
1451 }
1452 }
1453
1454 // Now that we found the effect, apply the change to it
1455 if (existing_effect || change_type == "insert") {
1456 // Apply change to effect
1457 apply_json_to_effects(change, existing_effect);
1458 }
1459}
1460
1461// Apply JSON diff to effects (if you already know which effect needs to be updated)
1462void Timeline::apply_json_to_effects(Json::Value change, EffectBase* existing_effect) {
1463
1464 // Get key and type of change
1465 std::string change_type = change["type"].asString();
1466
1467 // Calculate start and end frames that this impacts, and remove those frames from the cache
1468 if (!change["value"].isArray() && !change["value"]["position"].isNull()) {
1469 int64_t new_starting_frame = (change["value"]["position"].asDouble() * info.fps.ToDouble()) + 1;
1470 int64_t new_ending_frame = ((change["value"]["position"].asDouble() + change["value"]["end"].asDouble() - change["value"]["start"].asDouble()) * info.fps.ToDouble()) + 1;
1471 final_cache->Remove(new_starting_frame - 8, new_ending_frame + 8);
1472 }
1473
1474 // Determine type of change operation
1475 if (change_type == "insert") {
1476
1477 // Determine type of effect
1478 std::string effect_type = change["value"]["type"].asString();
1479
1480 // Create Effect
1481 EffectBase *e = NULL;
1482
1483 // Init the matching effect object
1484 if ( (e = EffectInfo().CreateEffect(effect_type)) ) {
1485
1486 // Keep track of allocated effect objects
1487 allocated_effects.insert(e);
1488
1489 // Load Json into Effect
1490 e->SetJsonValue(change["value"]);
1491
1492 // Add Effect to Timeline
1493 AddEffect(e);
1494 }
1495
1496 } else if (change_type == "update") {
1497
1498 // Update existing effect
1499 if (existing_effect) {
1500
1501 // Calculate start and end frames that this impacts, and remove those frames from the cache
1502 int64_t old_starting_frame = (existing_effect->Position() * info.fps.ToDouble()) + 1;
1503 int64_t old_ending_frame = ((existing_effect->Position() + existing_effect->Duration()) * info.fps.ToDouble()) + 1;
1504 final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
1505
1506 // Update effect properties from JSON
1507 existing_effect->SetJsonValue(change["value"]);
1508 }
1509
1510 } else if (change_type == "delete") {
1511
1512 // Remove existing effect
1513 if (existing_effect) {
1514
1515 // Calculate start and end frames that this impacts, and remove those frames from the cache
1516 int64_t old_starting_frame = (existing_effect->Position() * info.fps.ToDouble()) + 1;
1517 int64_t old_ending_frame = ((existing_effect->Position() + existing_effect->Duration()) * info.fps.ToDouble()) + 1;
1518 final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
1519
1520 // Remove effect from timeline
1521 RemoveEffect(existing_effect);
1522 }
1523
1524 }
1525
1526 // Re-Sort Effects (since they likely changed)
1527 sort_effects();
1528}
1529
1530// Apply JSON diff to timeline properties
1531void Timeline::apply_json_to_timeline(Json::Value change) {
1532 bool cache_dirty = true;
1533
1534 // Get key and type of change
1535 std::string change_type = change["type"].asString();
1536 std::string root_key = change["key"][(uint)0].asString();
1537 std::string sub_key = "";
1538 if (change["key"].size() >= 2)
1539 sub_key = change["key"][(uint)1].asString();
1540
1541 // Determine type of change operation
1542 if (change_type == "insert" || change_type == "update") {
1543
1544 // INSERT / UPDATE
1545 // Check for valid property
1546 if (root_key == "color")
1547 // Set color
1548 color.SetJsonValue(change["value"]);
1549 else if (root_key == "viewport_scale")
1550 // Set viewport scale
1551 viewport_scale.SetJsonValue(change["value"]);
1552 else if (root_key == "viewport_x")
1553 // Set viewport x offset
1554 viewport_x.SetJsonValue(change["value"]);
1555 else if (root_key == "viewport_y")
1556 // Set viewport y offset
1557 viewport_y.SetJsonValue(change["value"]);
1558 else if (root_key == "duration") {
1559 // Update duration of timeline
1560 info.duration = change["value"].asDouble();
1562
1563 // We don't want to clear cache for duration adjustments
1564 cache_dirty = false;
1565 }
1566 else if (root_key == "width") {
1567 // Set width
1568 info.width = change["value"].asInt();
1570 }
1571 else if (root_key == "height") {
1572 // Set height
1573 info.height = change["value"].asInt();
1575 }
1576 else if (root_key == "fps" && sub_key == "" && change["value"].isObject()) {
1577 // Set fps fraction
1578 if (!change["value"]["num"].isNull())
1579 info.fps.num = change["value"]["num"].asInt();
1580 if (!change["value"]["den"].isNull())
1581 info.fps.den = change["value"]["den"].asInt();
1582 }
1583 else if (root_key == "fps" && sub_key == "num")
1584 // Set fps.num
1585 info.fps.num = change["value"].asInt();
1586 else if (root_key == "fps" && sub_key == "den")
1587 // Set fps.den
1588 info.fps.den = change["value"].asInt();
1589 else if (root_key == "display_ratio" && sub_key == "" && change["value"].isObject()) {
1590 // Set display_ratio fraction
1591 if (!change["value"]["num"].isNull())
1592 info.display_ratio.num = change["value"]["num"].asInt();
1593 if (!change["value"]["den"].isNull())
1594 info.display_ratio.den = change["value"]["den"].asInt();
1595 }
1596 else if (root_key == "display_ratio" && sub_key == "num")
1597 // Set display_ratio.num
1598 info.display_ratio.num = change["value"].asInt();
1599 else if (root_key == "display_ratio" && sub_key == "den")
1600 // Set display_ratio.den
1601 info.display_ratio.den = change["value"].asInt();
1602 else if (root_key == "pixel_ratio" && sub_key == "" && change["value"].isObject()) {
1603 // Set pixel_ratio fraction
1604 if (!change["value"]["num"].isNull())
1605 info.pixel_ratio.num = change["value"]["num"].asInt();
1606 if (!change["value"]["den"].isNull())
1607 info.pixel_ratio.den = change["value"]["den"].asInt();
1608 }
1609 else if (root_key == "pixel_ratio" && sub_key == "num")
1610 // Set pixel_ratio.num
1611 info.pixel_ratio.num = change["value"].asInt();
1612 else if (root_key == "pixel_ratio" && sub_key == "den")
1613 // Set pixel_ratio.den
1614 info.pixel_ratio.den = change["value"].asInt();
1615
1616 else if (root_key == "sample_rate")
1617 // Set sample rate
1618 info.sample_rate = change["value"].asInt();
1619 else if (root_key == "channels")
1620 // Set channels
1621 info.channels = change["value"].asInt();
1622 else if (root_key == "channel_layout")
1623 // Set channel layout
1624 info.channel_layout = (ChannelLayout) change["value"].asInt();
1625 else
1626 // Error parsing JSON (or missing keys)
1627 throw InvalidJSONKey("JSON change key is invalid", change.toStyledString());
1628
1629
1630 } else if (change["type"].asString() == "delete") {
1631
1632 // DELETE / RESET
1633 // Reset the following properties (since we can't delete them)
1634 if (root_key == "color") {
1635 color = Color();
1636 color.red = Keyframe(0.0);
1637 color.green = Keyframe(0.0);
1638 color.blue = Keyframe(0.0);
1639 }
1640 else if (root_key == "viewport_scale")
1641 viewport_scale = Keyframe(1.0);
1642 else if (root_key == "viewport_x")
1643 viewport_x = Keyframe(0.0);
1644 else if (root_key == "viewport_y")
1645 viewport_y = Keyframe(0.0);
1646 else
1647 // Error parsing JSON (or missing keys)
1648 throw InvalidJSONKey("JSON change key is invalid", change.toStyledString());
1649
1650 }
1651
1652 if (cache_dirty) {
1653 // Clear entire cache
1654 ClearAllCache();
1655 }
1656}
1657
1658// Clear all caches
1660
1661 // Clear primary cache
1662 if (final_cache) {
1663 final_cache->Clear();
1664 }
1665
1666 // Loop through all clips
1667 try {
1668 for (const auto clip : clips) {
1669 // Clear cache on clip
1670 clip->Reader()->GetCache()->Clear();
1671
1672 // Clear nested Reader (if deep clear requested)
1673 if (deep && clip->Reader()->Name() == "FrameMapper") {
1674 FrameMapper *nested_reader = static_cast<FrameMapper *>(clip->Reader());
1675 if (nested_reader->Reader() && nested_reader->Reader()->GetCache())
1676 nested_reader->Reader()->GetCache()->Clear();
1677 }
1678
1679 // Clear clip cache
1680 clip->GetCache()->Clear();
1681 }
1682 } catch (const ReaderClosed & e) {
1683 // ...
1684 }
1685}
1686
1687// Set Max Image Size (used for performance optimization). Convenience function for setting
1688// Settings::Instance()->MAX_WIDTH and Settings::Instance()->MAX_HEIGHT.
1689void Timeline::SetMaxSize(int width, int height) {
1690 // Maintain aspect ratio regardless of what size is passed in
1691 QSize display_ratio_size = QSize(info.width, info.height);
1692 QSize proposed_size = QSize(std::min(width, info.width), std::min(height, info.height));
1693
1694 // Scale QSize up to proposed size
1695 display_ratio_size.scale(proposed_size, Qt::KeepAspectRatio);
1696
1697 // Update preview settings
1698 preview_width = display_ratio_size.width();
1699 preview_height = display_ratio_size.height();
1700}
Header file for CacheBase class.
Header file for CacheDisk class.
Header file for CacheMemory class.
Header file for CrashHandler class.
Header file for all Exception classes.
Header file for the FrameMapper class.
#define OPEN_MP_NUM_PROCESSORS
Header file for Timeline class.
All cache managers in libopenshot are based on this CacheBase class.
Definition CacheBase.h:35
virtual void Clear()=0
Clear the cache of all frames.
virtual void Remove(int64_t frame_number)=0
Remove a specific frame.
virtual std::shared_ptr< openshot::Frame > GetFrame(int64_t frame_number)=0
Get a frame from the cache.
virtual void Add(std::shared_ptr< openshot::Frame > frame)=0
Add a Frame to the cache.
void SetMaxBytesFromInfo(int64_t number_of_frames, int width, int height, int sample_rate, int channels)
Set maximum bytes to a different amount based on a ReaderInfo struct.
Definition CacheBase.cpp:30
This class is a memory-based cache manager for Frame objects.
Definition CacheMemory.h:29
float Start() const
Get start position (in seconds) of clip (trim start of video)
Definition ClipBase.h:88
float Duration() const
Get the length of this clip (in seconds)
Definition ClipBase.h:90
virtual std::shared_ptr< openshot::Frame > GetFrame(int64_t frame_number)=0
This method is required for all derived classes of ClipBase, and returns a new openshot::Frame object...
std::string Id() const
Get the Id of this clip object.
Definition ClipBase.h:85
int Layer() const
Get layer of clip on timeline (lower number is covered by higher numbers)
Definition ClipBase.h:87
virtual void SetJsonValue(const Json::Value root)=0
Load Json::Value into this object.
Definition ClipBase.cpp:80
float Position() const
Get position on timeline (in seconds)
Definition ClipBase.h:86
virtual openshot::TimelineBase * ParentTimeline()
Get the associated Timeline pointer (if any)
Definition ClipBase.h:91
This class represents a clip (used to arrange readers on the timeline)
Definition Clip.h:89
openshot::VolumeMixType mixing
What strategy should be followed when mixing audio with other clips.
Definition Clip.h:169
openshot::Keyframe channel_filter
A number representing an audio channel to filter (clears all other channels)
Definition Clip.h:329
void SetJsonValue(const Json::Value root) override
Load Json::Value into this object.
Definition Clip.cpp:962
openshot::Keyframe has_audio
An optional override to determine if this clip has audio (-1=undefined, 0=no, 1=yes)
Definition Clip.h:333
openshot::TimelineBase * ParentTimeline() override
Get the associated Timeline pointer (if any)
Definition Clip.h:276
std::list< openshot::EffectBase * > Effects()
Return the list of effects on the timeline.
Definition Clip.h:225
openshot::Keyframe volume
Curve representing the volume (0 to 1)
Definition Clip.h:313
openshot::Keyframe channel_mapping
A number representing an audio channel to output (only works when filtering a channel)
Definition Clip.h:330
void Reader(openshot::ReaderBase *new_reader)
Set the current reader.
Definition Clip.cpp:272
This class represents a color (used on the timeline and clips)
Definition Color.h:27
std::string GetColorHex(int64_t frame_number)
Get the HEX value of a color at a specific frame.
Definition Color.cpp:47
openshot::Keyframe blue
Curve representing the red value (0 - 255)
Definition Color.h:32
openshot::Keyframe red
Curve representing the red value (0 - 255)
Definition Color.h:30
openshot::Keyframe green
Curve representing the green value (0 - 255)
Definition Color.h:31
void SetJsonValue(const Json::Value root)
Load Json::Value into this object.
Definition Color.cpp:117
Json::Value JsonValue() const
Generate Json::Value for this object.
Definition Color.cpp:86
static CrashHandler * Instance()
This abstract class is the base class, used by all effects in libopenshot.
Definition EffectBase.h:53
virtual void SetJsonValue(const Json::Value root)
Load Json::Value into this object.
This class returns a listing of all effects supported by libopenshot.
Definition EffectInfo.h:29
EffectBase * CreateEffect(std::string effect_type)
Create an instance of an effect (factory style)
This class represents a fraction.
Definition Fraction.h:30
int num
Numerator for the fraction.
Definition Fraction.h:32
float ToFloat()
Return this fraction as a float (i.e. 1/2 = 0.5)
Definition Fraction.cpp:35
double ToDouble() const
Return this fraction as a double (i.e. 1/2 = 0.5)
Definition Fraction.cpp:40
void Reduce()
Reduce this fraction (i.e. 640/480 = 4/3)
Definition Fraction.cpp:65
Fraction Reciprocal() const
Return the reciprocal as a Fraction.
Definition Fraction.cpp:78
int den
Denominator for the fraction.
Definition Fraction.h:33
This class creates a mapping between 2 different frame rates, applying a specific pull-down technique...
void ChangeMapping(Fraction target_fps, PulldownType pulldown, int target_sample_rate, int target_channels, ChannelLayout target_channel_layout)
Change frame rate or audio mapping details.
ReaderBase * Reader()
Get the current reader.
void Close() override
Close the openshot::FrameMapper and internal reader.
int GetSamplesPerFrame(openshot::Fraction fps, int sample_rate, int channels)
Calculate the # of samples per video frame (for the current frame number)
Definition Frame.cpp:480
Exception for files that can not be found or opened.
Definition Exceptions.h:188
Exception for missing JSON Change key.
Definition Exceptions.h:263
Exception for invalid JSON.
Definition Exceptions.h:218
A Keyframe is a collection of Point instances, which is used to vary a number or property over time.
Definition KeyFrame.h:53
int GetInt(int64_t index) const
Get the rounded INT value at a specific index.
Definition KeyFrame.cpp:282
void SetJsonValue(const Json::Value root)
Load Json::Value into this object.
Definition KeyFrame.cpp:372
double GetValue(int64_t index) const
Get the value at a specific index.
Definition KeyFrame.cpp:258
Json::Value JsonValue() const
Generate Json::Value for this object.
Definition KeyFrame.cpp:339
int64_t GetCount() const
Get the number of points (i.e. # of points)
Definition KeyFrame.cpp:417
Exception for frames that are out of bounds.
Definition Exceptions.h:301
This abstract class is the base class, used by all readers in libopenshot.
Definition ReaderBase.h:76
openshot::ReaderInfo info
Information about the current media file.
Definition ReaderBase.h:88
virtual void SetJsonValue(const Json::Value root)=0
Load Json::Value into this object.
virtual Json::Value JsonValue() const =0
Generate Json::Value for this object.
std::recursive_mutex getFrameMutex
Mutex for multiple threads.
Definition ReaderBase.h:79
virtual openshot::CacheBase * GetCache()=0
Get the cache object used by this reader (note: not all readers use cache)
openshot::ClipBase * clip
Pointer to the parent clip instance (if any)
Definition ReaderBase.h:80
Exception when a reader is closed, and a frame is requested.
Definition Exceptions.h:364
This class is contains settings used by libopenshot (and can be safely toggled at any point)
Definition Settings.h:26
std::string PATH_OPENSHOT_INSTALL
Definition Settings.h:108
static Settings * Instance()
Create or get an instance of this logger singleton (invoke the class with this method)
Definition Settings.cpp:23
int preview_height
Optional preview width of timeline image. If your preview window is smaller than the timeline,...
int preview_width
Optional preview width of timeline image. If your preview window is smaller than the timeline,...
This class represents a timeline.
Definition Timeline.h:150
void AddTrackedObject(std::shared_ptr< openshot::TrackedObjectBase > trackedObject)
Add to the tracked_objects map a pointer to a tracked object (TrackedObjectBBox)
Definition Timeline.cpp:223
Json::Value JsonValue() const override
Generate Json::Value for this object.
openshot::Keyframe viewport_scale
Curve representing the scale of the viewport (0 to 100)
Definition Timeline.h:314
void ApplyJsonDiff(std::string value)
Apply a special formatted JSON object, which represents a change to the timeline (add,...
openshot::EffectBase * GetClipEffect(const std::string &id)
Look up a clip effect by ID.
Definition Timeline.cpp:431
void AddClip(openshot::Clip *clip)
Add an openshot::Clip to the timeline.
Definition Timeline.cpp:332
virtual ~Timeline()
Definition Timeline.cpp:206
std::list< openshot::EffectBase * > ClipEffects() const
Return the list of effects on all clips.
Definition Timeline.cpp:444
std::list< std::string > GetTrackedObjectsIds() const
Return the ID's of the tracked objects as a list of strings.
Definition Timeline.cpp:258
std::string Json() const override
Generate JSON string of this object.
int64_t GetMaxFrame()
Look up the end frame number of the latest element on the timeline.
Definition Timeline.cpp:469
std::shared_ptr< openshot::Frame > GetFrame(int64_t requested_frame) override
Definition Timeline.cpp:889
void ApplyMapperToClips()
Apply the timeline's framerate and samplerate to all clips.
Definition Timeline.cpp:502
openshot::Color color
Background color of timeline canvas.
Definition Timeline.h:319
std::string GetTrackedObjectValues(std::string id, int64_t frame_number) const
Return the trackedObject's properties as a JSON string.
Definition Timeline.cpp:274
Timeline(int width, int height, openshot::Fraction fps, int sample_rate, int channels, openshot::ChannelLayout channel_layout)
Constructor for the timeline (which configures the default frame properties)
Definition Timeline.cpp:28
std::shared_ptr< openshot::TrackedObjectBase > GetTrackedObject(std::string id) const
Return tracked object pointer by it's id.
Definition Timeline.cpp:241
openshot::EffectBase * GetEffect(const std::string &id)
Look up a timeline effect by ID.
Definition Timeline.cpp:420
void SetJsonValue(const Json::Value root) override
Load Json::Value into this object.
openshot::Clip * GetClip(const std::string &id)
Look up a single clip by ID.
Definition Timeline.cpp:408
void ClearAllCache(bool deep=false)
void AddEffect(openshot::EffectBase *effect)
Add an effect to the timeline.
Definition Timeline.cpp:358
std::shared_ptr< openshot::Frame > apply_effects(std::shared_ptr< openshot::Frame > frame, int64_t timeline_frame_number, int layer)
Apply global/timeline effects to the source frame (if any)
Definition Timeline.cpp:526
void SetCache(openshot::CacheBase *new_cache)
void Clear()
Clear all clips, effects, and frame mappers from timeline (and free memory)
Definition Timeline.cpp:809
openshot::Keyframe viewport_x
Curve representing the x coordinate for the viewport.
Definition Timeline.h:315
void RemoveClip(openshot::Clip *clip)
Remove an openshot::Clip from the timeline.
Definition Timeline.cpp:388
void SetMaxSize(int width, int height)
double GetMaxTime()
Look up the end time of the latest timeline element.
Definition Timeline.cpp:463
void RemoveEffect(openshot::EffectBase *effect)
Remove an effect from the timeline.
Definition Timeline.cpp:371
void Open() override
Open the reader (and start consuming resources)
Definition Timeline.cpp:877
void SetJson(const std::string value) override
Load JSON string into this object.
openshot::Keyframe viewport_y
Curve representing the y coordinate for the viewport.
Definition Timeline.h:316
void Close() override
Close the timeline reader (and any resources it was consuming)
Definition Timeline.cpp:855
void AppendDebugMethod(std::string method_name, std::string arg1_name="", float arg1_value=-1.0, std::string arg2_name="", float arg2_value=-1.0, std::string arg3_name="", float arg3_value=-1.0, std::string arg4_name="", float arg4_value=-1.0, std::string arg5_name="", float arg5_value=-1.0, std::string arg6_name="", float arg6_value=-1.0)
Append debug information.
static ZmqLogger * Instance()
Create or get an instance of this logger singleton (invoke the class with this method)
Definition ZmqLogger.cpp:35
This namespace is the default namespace for all code in the openshot library.
Definition Compressor.h:29
@ PULLDOWN_NONE
Do not apply pull-down techniques, just repeat or skip entire frames.
Definition FrameMapper.h:46
ChannelLayout
This enumeration determines the audio channel layout (such as stereo, mono, 5 point surround,...
@ VOLUME_MIX_AVERAGE
Evenly divide the overlapping clips volume keyframes, so that the sum does not exceed 100%.
Definition Enums.h:63
@ VOLUME_MIX_REDUCE
Reduce volume by about %25, and then mix (louder, but could cause pops if the sum exceeds 100%)
Definition Enums.h:64
const Json::Value stringToJson(const std::string value)
Definition Json.cpp:16
This struct holds the information of a bounding-box.
float cy
y-coordinate of the bounding box center
float height
bounding box height
float cx
x-coordinate of the bounding box center
float width
bounding box width
float angle
bounding box rotation angle [degrees]
Like CompareClipEndFrames, but for effects.
Definition Timeline.h:76
This struct contains info about a media file, such as height, width, frames per second,...
Definition ReaderBase.h:39
float duration
Length of time (in seconds)
Definition ReaderBase.h:43
int width
The width of the video (in pixesl)
Definition ReaderBase.h:46
int channels
The number of audio channels used in the audio stream.
Definition ReaderBase.h:61
openshot::Fraction fps
Frames per second, as a fraction (i.e. 24/1 = 24 fps)
Definition ReaderBase.h:48
openshot::Fraction display_ratio
The ratio of width to height of the video stream (i.e. 640x480 has a ratio of 4/3)
Definition ReaderBase.h:51
int height
The height of the video (in pixels)
Definition ReaderBase.h:45
int64_t video_length
The number of frames in the video stream.
Definition ReaderBase.h:53
std::string acodec
The name of the audio codec used to encode / decode the video stream.
Definition ReaderBase.h:58
std::string vcodec
The name of the video codec used to encode / decode the video stream.
Definition ReaderBase.h:52
openshot::Fraction pixel_ratio
The pixel ratio of the video stream as a fraction (i.e. some pixels are not square)
Definition ReaderBase.h:50
openshot::ChannelLayout channel_layout
The channel layout (mono, stereo, 5 point surround, etc...)
Definition ReaderBase.h:62
bool has_video
Determines if this file has a video stream.
Definition ReaderBase.h:40
bool has_audio
Determines if this file has an audio stream.
Definition ReaderBase.h:41
openshot::Fraction video_timebase
The video timebase determines how long each frame stays on the screen.
Definition ReaderBase.h:55
int sample_rate
The number of audio samples per second (44100 is a common sample rate)
Definition ReaderBase.h:60
This struct contains info about the current Timeline clip instance.
bool is_top_clip
Is clip on top (if overlapping another clip)