OpenShot Library | libopenshot 0.2.7
Timeline.cpp
Go to the documentation of this file.
1/**
2 * @file
3 * @brief Source file for Timeline class
4 * @author Jonathan Thomas <jonathan@openshot.org>
5 *
6 * @ref License
7 */
8
9/* LICENSE
10 *
11 * Copyright (c) 2008-2019 OpenShot Studios, LLC
12 * <http://www.openshotstudios.com/>. This file is part of
13 * OpenShot Library (libopenshot), an open-source project dedicated to
14 * delivering high quality video editing and animation solutions to the
15 * world. For more information visit <http://www.openshot.org/>.
16 *
17 * OpenShot Library (libopenshot) is free software: you can redistribute it
18 * and/or modify it under the terms of the GNU Lesser General Public License
19 * as published by the Free Software Foundation, either version 3 of the
20 * License, or (at your option) any later version.
21 *
22 * OpenShot Library (libopenshot) is distributed in the hope that it will be
23 * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
24 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
25 * GNU Lesser General Public License for more details.
26 *
27 * You should have received a copy of the GNU Lesser General Public License
28 * along with OpenShot Library. If not, see <http://www.gnu.org/licenses/>.
29 */
30
31#include "Timeline.h"
32
33#include "CacheBase.h"
34#include "CacheDisk.h"
35#include "CacheMemory.h"
36#include "CrashHandler.h"
37#include "FrameMapper.h"
38#include "Exceptions.h"
39
40#include <QDir>
41#include <QFileInfo>
42
43using namespace openshot;
44
45// Default Constructor for the timeline (which sets the canvas width and height)
46Timeline::Timeline(int width, int height, Fraction fps, int sample_rate, int channels, ChannelLayout channel_layout) :
47 is_open(false), auto_map_clips(true), managed_cache(true), path(""),
48 max_concurrent_frames(OPEN_MP_NUM_PROCESSORS)
49{
50 // Create CrashHandler and Attach (incase of errors)
52
53 // Init viewport size (curve based, because it can be animated)
54 viewport_scale = Keyframe(100.0);
55 viewport_x = Keyframe(0.0);
56 viewport_y = Keyframe(0.0);
57
58 // Init background color
59 color.red = Keyframe(0.0);
60 color.green = Keyframe(0.0);
61 color.blue = Keyframe(0.0);
62
63 // Init FileInfo struct (clear all values)
64 info.width = width;
65 info.height = height;
68 info.fps = fps;
69 info.sample_rate = sample_rate;
70 info.channels = channels;
71 info.channel_layout = channel_layout;
73 info.duration = 60 * 30; // 30 minute default duration
74 info.has_audio = true;
75 info.has_video = true;
77 info.display_ratio = openshot::Fraction(width, height);
80 info.acodec = "openshot::timeline";
81 info.vcodec = "openshot::timeline";
82
83 // Init cache
84 final_cache = new CacheMemory();
85
86 // Init max image size
88}
89
90// Delegating constructor that copies parameters from a provided ReaderInfo
92 Timeline::Timeline(info.width, info.height, info.fps, info.sample_rate,
93 info.channels, info.channel_layout) {};
94
95// Constructor for the timeline (which loads a JSON structure from a file path, and initializes a timeline)
96Timeline::Timeline(const std::string& projectPath, bool convert_absolute_paths) :
97 is_open(false), auto_map_clips(true), managed_cache(true), path(projectPath),
98 max_concurrent_frames(OPEN_MP_NUM_PROCESSORS) {
99
100 // Create CrashHandler and Attach (incase of errors)
102
103 // Init final cache as NULL (will be created after loading json)
104 final_cache = NULL;
105
106 // Init viewport size (curve based, because it can be animated)
107 viewport_scale = Keyframe(100.0);
108 viewport_x = Keyframe(0.0);
109 viewport_y = Keyframe(0.0);
110
111 // Init background color
112 color.red = Keyframe(0.0);
113 color.green = Keyframe(0.0);
114 color.blue = Keyframe(0.0);
115
116 // Check if path exists
117 QFileInfo filePath(QString::fromStdString(path));
118 if (!filePath.exists()) {
119 throw InvalidFile("File could not be opened.", path);
120 }
121
122 // Check OpenShot Install Path exists
124 QDir openshotPath(QString::fromStdString(s->PATH_OPENSHOT_INSTALL));
125 if (!openshotPath.exists()) {
126 throw InvalidFile("PATH_OPENSHOT_INSTALL could not be found.", s->PATH_OPENSHOT_INSTALL);
127 }
128 QDir openshotTransPath(openshotPath.filePath("transitions"));
129 if (!openshotTransPath.exists()) {
130 throw InvalidFile("PATH_OPENSHOT_INSTALL/transitions could not be found.", openshotTransPath.path().toStdString());
131 }
132
133 // Determine asset path
134 QString asset_name = filePath.baseName().left(30) + "_assets";
135 QDir asset_folder(filePath.dir().filePath(asset_name));
136 if (!asset_folder.exists()) {
137 // Create directory if needed
138 asset_folder.mkpath(".");
139 }
140
141 // Load UTF-8 project file into QString
142 QFile projectFile(QString::fromStdString(path));
143 projectFile.open(QFile::ReadOnly);
144 QString projectContents = QString::fromUtf8(projectFile.readAll());
145
146 // Convert all relative paths into absolute paths (if requested)
147 if (convert_absolute_paths) {
148
149 // Find all "image" or "path" references in JSON (using regex). Must loop through match results
150 // due to our path matching needs, which are not possible with the QString::replace() function.
151 QRegularExpression allPathsRegex(QStringLiteral("\"(image|path)\":.*?\"(.*?)\""));
152 std::vector<QRegularExpressionMatch> matchedPositions;
153 QRegularExpressionMatchIterator i = allPathsRegex.globalMatch(projectContents);
154 while (i.hasNext()) {
155 QRegularExpressionMatch match = i.next();
156 if (match.hasMatch()) {
157 // Push all match objects into a vector (so we can reverse them later)
158 matchedPositions.push_back(match);
159 }
160 }
161
162 // Reverse the matches (bottom of file to top, so our replacements don't break our match positions)
163 std::vector<QRegularExpressionMatch>::reverse_iterator itr;
164 for (itr = matchedPositions.rbegin(); itr != matchedPositions.rend(); itr++) {
165 QRegularExpressionMatch match = *itr;
166 QString relativeKey = match.captured(1); // image or path
167 QString relativePath = match.captured(2); // relative file path
168 QString absolutePath = "";
169
170 // Find absolute path of all path, image (including special replacements of @assets and @transitions)
171 if (relativePath.startsWith("@assets")) {
172 absolutePath = QFileInfo(asset_folder.absoluteFilePath(relativePath.replace("@assets", "."))).canonicalFilePath();
173 } else if (relativePath.startsWith("@transitions")) {
174 absolutePath = QFileInfo(openshotTransPath.absoluteFilePath(relativePath.replace("@transitions", "."))).canonicalFilePath();
175 } else {
176 absolutePath = QFileInfo(filePath.absoluteDir().absoluteFilePath(relativePath)).canonicalFilePath();
177 }
178
179 // Replace path in JSON content, if an absolute path was successfully found
180 if (!absolutePath.isEmpty()) {
181 projectContents.replace(match.capturedStart(0), match.capturedLength(0), "\"" + relativeKey + "\": \"" + absolutePath + "\"");
182 }
183 }
184 // Clear matches
185 matchedPositions.clear();
186 }
187
188 // Set JSON of project
189 SetJson(projectContents.toStdString());
190
191 // Calculate valid duration and set has_audio and has_video
192 // based on content inside this Timeline's clips.
193 float calculated_duration = 0.0;
194 for (auto clip : clips)
195 {
196 float clip_last_frame = clip->Position() + clip->Duration();
197 if (clip_last_frame > calculated_duration)
198 calculated_duration = clip_last_frame;
199 if (clip->Reader() && clip->Reader()->info.has_audio)
200 info.has_audio = true;
201 if (clip->Reader() && clip->Reader()->info.has_video)
202 info.has_video = true;
203
204 }
205 info.video_length = calculated_duration * info.fps.ToFloat();
206 info.duration = calculated_duration;
207
208 // Init FileInfo settings
209 info.acodec = "openshot::timeline";
210 info.vcodec = "openshot::timeline";
212 info.has_video = true;
213 info.has_audio = true;
214
215 // Init cache
216 final_cache = new CacheMemory();
217
218 // Init max image size
220}
221
223 if (is_open)
224 // Auto Close if not already
225 Close();
226
227 // Free all allocated frame mappers
228 std::set<FrameMapper *>::iterator it;
229 for (it = allocated_frame_mappers.begin(); it != allocated_frame_mappers.end(); ) {
230 // Dereference and clean up FrameMapper object
231 FrameMapper *mapper = (*it);
232 mapper->Reader(NULL);
233 mapper->Close();
234 delete mapper;
235 // Remove reference and proceed to next element
236 it = allocated_frame_mappers.erase(it);
237 }
238
239 // Destroy previous cache (if managed by timeline)
240 if (managed_cache && final_cache) {
241 delete final_cache;
242 final_cache = NULL;
243 }
244}
245
246// Add to the tracked_objects map a pointer to a tracked object (TrackedObjectBBox)
247void Timeline::AddTrackedObject(std::shared_ptr<openshot::TrackedObjectBase> trackedObject){
248
249 // Search for the tracked object on the map
250 auto iterator = tracked_objects.find(trackedObject->Id());
251
252 if (iterator != tracked_objects.end()){
253 // Tracked object's id already present on the map, overwrite it
254 iterator->second = trackedObject;
255 }
256 else{
257 // Tracked object's id not present -> insert it on the map
258 tracked_objects[trackedObject->Id()] = trackedObject;
259 }
260
261 return;
262}
263
264// Return tracked object pointer by it's id
265std::shared_ptr<openshot::TrackedObjectBase> Timeline::GetTrackedObject(std::string id) const{
266
267 // Search for the tracked object on the map
268 auto iterator = tracked_objects.find(id);
269
270 if (iterator != tracked_objects.end()){
271 // Id found, return the pointer to the tracked object
272 std::shared_ptr<openshot::TrackedObjectBase> trackedObject = iterator->second;
273 return trackedObject;
274 }
275 else {
276 // Id not found, return a null pointer
277 return nullptr;
278 }
279}
280
281// Return the ID's of the tracked objects as a list of strings
282std::list<std::string> Timeline::GetTrackedObjectsIds() const{
283
284 // Create a list of strings
285 std::list<std::string> trackedObjects_ids;
286
287 // Iterate through the tracked_objects map
288 for (auto const& it: tracked_objects){
289 // Add the IDs to the list
290 trackedObjects_ids.push_back(it.first);
291 }
292
293 return trackedObjects_ids;
294}
295
296#ifdef USE_OPENCV
297// Return the trackedObject's properties as a JSON string
298std::string Timeline::GetTrackedObjectValues(std::string id, int64_t frame_number) const {
299
300 // Initialize the JSON object
301 Json::Value trackedObjectJson;
302
303 // Search for the tracked object on the map
304 auto iterator = tracked_objects.find(id);
305
306 if (iterator != tracked_objects.end())
307 {
308 // Id found, Get the object pointer and cast it as a TrackedObjectBBox
309 std::shared_ptr<TrackedObjectBBox> trackedObject = std::static_pointer_cast<TrackedObjectBBox>(iterator->second);
310
311 // Get the trackedObject values for it's first frame
312 if (trackedObject->ExactlyContains(frame_number)){
313 BBox box = trackedObject->GetBox(frame_number);
314 float x1 = box.cx - (box.width/2);
315 float y1 = box.cy - (box.height/2);
316 float x2 = box.cx + (box.width/2);
317 float y2 = box.cy + (box.height/2);
318 float rotation = box.angle;
319
320 trackedObjectJson["x1"] = x1;
321 trackedObjectJson["y1"] = y1;
322 trackedObjectJson["x2"] = x2;
323 trackedObjectJson["y2"] = y2;
324 trackedObjectJson["rotation"] = rotation;
325
326 } else {
327 BBox box = trackedObject->BoxVec.begin()->second;
328 float x1 = box.cx - (box.width/2);
329 float y1 = box.cy - (box.height/2);
330 float x2 = box.cx + (box.width/2);
331 float y2 = box.cy + (box.height/2);
332 float rotation = box.angle;
333
334 trackedObjectJson["x1"] = x1;
335 trackedObjectJson["y1"] = y1;
336 trackedObjectJson["x2"] = x2;
337 trackedObjectJson["y2"] = y2;
338 trackedObjectJson["rotation"] = rotation;
339 }
340
341 }
342 else {
343 // Id not found, return all 0 values
344 trackedObjectJson["x1"] = 0;
345 trackedObjectJson["y1"] = 0;
346 trackedObjectJson["x2"] = 0;
347 trackedObjectJson["y2"] = 0;
348 trackedObjectJson["rotation"] = 0;
349 }
350
351 return trackedObjectJson.toStyledString();
352}
353#endif
354
355// Add an openshot::Clip to the timeline
357{
358 // Assign timeline to clip
359 clip->ParentTimeline(this);
360
361 // Clear cache of clip and nested reader (if any)
362 clip->cache.Clear();
363 if (clip->Reader() && clip->Reader()->GetCache())
364 clip->Reader()->GetCache()->Clear();
365
366 // All clips should be converted to the frame rate of this timeline
367 if (auto_map_clips)
368 // Apply framemapper (or update existing framemapper)
369 apply_mapper_to_clip(clip);
370
371 // Add clip to list
372 clips.push_back(clip);
373
374 // Sort clips
375 sort_clips();
376}
377
378// Add an effect to the timeline
380{
381 // Assign timeline to effect
382 effect->ParentTimeline(this);
383
384 // Add effect to list
385 effects.push_back(effect);
386
387 // Sort effects
388 sort_effects();
389}
390
391// Remove an effect from the timeline
393{
394 effects.remove(effect);
395}
396
397// Remove an openshot::Clip to the timeline
399{
400 clips.remove(clip);
401}
402
403// Look up a clip
404openshot::Clip* Timeline::GetClip(const std::string& id)
405{
406 // Find the matching clip (if any)
407 for (const auto& clip : clips) {
408 if (clip->Id() == id) {
409 return clip;
410 }
411 }
412 return nullptr;
413}
414
415// Look up a timeline effect
417{
418 // Find the matching effect (if any)
419 for (const auto& effect : effects) {
420 if (effect->Id() == id) {
421 return effect;
422 }
423 }
424 return nullptr;
425}
426
428{
429 // Search all clips for matching effect ID
430 for (const auto& clip : clips) {
431 const auto e = clip->GetEffect(id);
432 if (e != nullptr) {
433 return e;
434 }
435 }
436 return nullptr;
437}
438
439// Return the list of effects on all clips
440std::list<openshot::EffectBase*> Timeline::ClipEffects() const {
441
442 // Initialize the list
443 std::list<EffectBase*> timelineEffectsList;
444
445 // Loop through all clips
446 for (const auto& clip : clips) {
447
448 // Get the clip's list of effects
449 std::list<EffectBase*> clipEffectsList = clip->Effects();
450
451 // Append the clip's effects to the list
452 timelineEffectsList.insert(timelineEffectsList.end(), clipEffectsList.begin(), clipEffectsList.end());
453 }
454
455 return timelineEffectsList;
456}
457
458// Compute the end time of the latest timeline element
460 double last_clip = 0.0;
461 double last_effect = 0.0;
462
463 if (!clips.empty()) {
464 const auto max_clip = std::max_element(
465 clips.begin(), clips.end(), CompareClipEndFrames());
466 last_clip = (*max_clip)->Position() + (*max_clip)->Duration();
467 }
468 if (!effects.empty()) {
469 const auto max_effect = std::max_element(
470 effects.begin(), effects.end(), CompareEffectEndFrames());
471 last_effect = (*max_effect)->Position() + (*max_effect)->Duration();
472 }
473 return std::max(last_clip, last_effect);
474}
475
476// Compute the highest frame# based on the latest time and FPS
478 double fps = info.fps.ToDouble();
479 auto max_time = GetMaxTime();
480 return std::round(max_time * fps) + 1;
481}
482
483// Apply a FrameMapper to a clip which matches the settings of this timeline
484void Timeline::apply_mapper_to_clip(Clip* clip)
485{
486 // Get lock (prevent getting frames while this happens)
487 const GenericScopedLock<CriticalSection> lock(getFrameCriticalSection);
488
489 // Determine type of reader
490 ReaderBase* clip_reader = NULL;
491 if (clip->Reader()->Name() == "FrameMapper")
492 {
493 // Get the existing reader
494 clip_reader = (ReaderBase*) clip->Reader();
495
496 } else {
497
498 // Create a new FrameMapper to wrap the current reader
500 allocated_frame_mappers.insert(mapper);
501 clip_reader = (ReaderBase*) mapper;
502 }
503
504 // Update the mapping
505 FrameMapper* clip_mapped_reader = (FrameMapper*) clip_reader;
507
508 // Update clip reader
509 clip->Reader(clip_reader);
510}
511
512// Apply the timeline's framerate and samplerate to all clips
514{
515 // Clear all cached frames
517
518 // Loop through all clips
519 for (auto clip : clips)
520 {
521 // Apply framemapper (or update existing framemapper)
522 apply_mapper_to_clip(clip);
523 }
524}
525
526// Calculate time of a frame number, based on a framerate
527double Timeline::calculate_time(int64_t number, Fraction rate)
528{
529 // Get float version of fps fraction
530 double raw_fps = rate.ToFloat();
531
532 // Return the time (in seconds) of this frame
533 return double(number - 1) / raw_fps;
534}
535
536// Apply effects to the source frame (if any)
537std::shared_ptr<Frame> Timeline::apply_effects(std::shared_ptr<Frame> frame, int64_t timeline_frame_number, int layer)
538{
539 // Debug output
540 ZmqLogger::Instance()->AppendDebugMethod("Timeline::apply_effects", "frame->number", frame->number, "timeline_frame_number", timeline_frame_number, "layer", layer);
541
542 // Find Effects at this position and layer
543 for (auto effect : effects)
544 {
545 // Does clip intersect the current requested time
546 long effect_start_position = round(effect->Position() * info.fps.ToDouble()) + 1;
547 long effect_end_position = round((effect->Position() + (effect->Duration())) * info.fps.ToDouble()) + 1;
548
549 bool does_effect_intersect = (effect_start_position <= timeline_frame_number && effect_end_position >= timeline_frame_number && effect->Layer() == layer);
550
551 // Debug output
552 ZmqLogger::Instance()->AppendDebugMethod("Timeline::apply_effects (Does effect intersect)", "effect->Position()", effect->Position(), "does_effect_intersect", does_effect_intersect, "timeline_frame_number", timeline_frame_number, "layer", layer);
553
554 // Clip is visible
555 if (does_effect_intersect)
556 {
557 // Determine the frame needed for this clip (based on the position on the timeline)
558 long effect_start_frame = (effect->Start() * info.fps.ToDouble()) + 1;
559 long effect_frame_number = timeline_frame_number - effect_start_position + effect_start_frame;
560
561 // Debug output
562 ZmqLogger::Instance()->AppendDebugMethod("Timeline::apply_effects (Process Effect)", "effect_frame_number", effect_frame_number, "does_effect_intersect", does_effect_intersect);
563
564 // Apply the effect to this frame
565 frame = effect->GetFrame(frame, effect_frame_number);
566 }
567
568 } // end effect loop
569
570 // Return modified frame
571 return frame;
572}
573
574// Get or generate a blank frame
575std::shared_ptr<Frame> Timeline::GetOrCreateFrame(std::shared_ptr<Frame> background_frame, Clip* clip, int64_t number, openshot::TimelineInfoStruct* options)
576{
577 std::shared_ptr<Frame> new_frame;
578
579 // Init some basic properties about this frame
580 int samples_in_frame = Frame::GetSamplesPerFrame(number, info.fps, info.sample_rate, info.channels);
581
582 try {
583 // Debug output
584 ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetOrCreateFrame (from reader)", "number", number, "samples_in_frame", samples_in_frame);
585
586 // Attempt to get a frame (but this could fail if a reader has just been closed)
587 new_frame = std::shared_ptr<Frame>(clip->GetFrame(background_frame, number, options));
588
589 // Return real frame
590 return new_frame;
591
592 } catch (const ReaderClosed & e) {
593 // ...
594 } catch (const OutOfBoundsFrame & e) {
595 // ...
596 }
597
598 // Debug output
599 ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetOrCreateFrame (create blank)", "number", number, "samples_in_frame", samples_in_frame);
600
601 // Create blank frame
602 return new_frame;
603}
604
605// Process a new layer of video or audio
606void Timeline::add_layer(std::shared_ptr<Frame> new_frame, Clip* source_clip, int64_t clip_frame_number, bool is_top_clip, float max_volume)
607{
608 // Create timeline options (with details about this current frame request)
609 TimelineInfoStruct* options = new TimelineInfoStruct();
610 options->is_top_clip = is_top_clip;
611
612 // Get the clip's frame, composited on top of the current timeline frame
613 std::shared_ptr<Frame> source_frame;
614 source_frame = GetOrCreateFrame(new_frame, source_clip, clip_frame_number, options);
615 delete options;
616
617 // No frame found... so bail
618 if (!source_frame)
619 return;
620
621 // Debug output
622 ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer", "new_frame->number", new_frame->number, "clip_frame_number", clip_frame_number);
623
624 /* COPY AUDIO - with correct volume */
625 if (source_clip->Reader()->info.has_audio) {
626 // Debug output
627 ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Copy Audio)", "source_clip->Reader()->info.has_audio", source_clip->Reader()->info.has_audio, "source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(), "info.channels", info.channels, "clip_frame_number", clip_frame_number);
628
629 if (source_frame->GetAudioChannelsCount() == info.channels && source_clip->has_audio.GetInt(clip_frame_number) != 0)
630 for (int channel = 0; channel < source_frame->GetAudioChannelsCount(); channel++)
631 {
632 // Get volume from previous frame and this frame
633 float previous_volume = source_clip->volume.GetValue(clip_frame_number - 1);
634 float volume = source_clip->volume.GetValue(clip_frame_number);
635 int channel_filter = source_clip->channel_filter.GetInt(clip_frame_number); // optional channel to filter (if not -1)
636 int channel_mapping = source_clip->channel_mapping.GetInt(clip_frame_number); // optional channel to map this channel to (if not -1)
637
638 // Apply volume mixing strategy
639 if (source_clip->mixing == VOLUME_MIX_AVERAGE && max_volume > 1.0) {
640 // Don't allow this clip to exceed 100% (divide volume equally between all overlapping clips with volume
641 previous_volume = previous_volume / max_volume;
642 volume = volume / max_volume;
643 }
644 else if (source_clip->mixing == VOLUME_MIX_REDUCE && max_volume > 1.0) {
645 // Reduce clip volume by a bit, hoping it will prevent exceeding 100% (but it is very possible it will)
646 previous_volume = previous_volume * 0.77;
647 volume = volume * 0.77;
648 }
649
650 // If channel filter enabled, check for correct channel (and skip non-matching channels)
651 if (channel_filter != -1 && channel_filter != channel)
652 continue; // skip to next channel
653
654 // If no volume on this frame or previous frame, do nothing
655 if (previous_volume == 0.0 && volume == 0.0)
656 continue; // skip to next channel
657
658 // If channel mapping disabled, just use the current channel
659 if (channel_mapping == -1)
660 channel_mapping = channel;
661
662 // Apply ramp to source frame (if needed)
663 if (!isEqual(previous_volume, 1.0) || !isEqual(volume, 1.0))
664 source_frame->ApplyGainRamp(channel_mapping, 0, source_frame->GetAudioSamplesCount(), previous_volume, volume);
665
666 // TODO: Improve FrameMapper (or Timeline) to always get the correct number of samples per frame.
667 // Currently, the ResampleContext sometimes leaves behind a few samples for the next call, and the
668 // number of samples returned is variable... and does not match the number expected.
669 // This is a crude solution at best. =)
670 if (new_frame->GetAudioSamplesCount() != source_frame->GetAudioSamplesCount()){
671 // Force timeline frame to match the source frame
672 new_frame->ResizeAudio(info.channels, source_frame->GetAudioSamplesCount(), info.sample_rate, info.channel_layout);
673 }
674 // Copy audio samples (and set initial volume). Mix samples with existing audio samples. The gains are added together, to
675 // be sure to set the gain's correctly, so the sum does not exceed 1.0 (of audio distortion will happen).
676 new_frame->AddAudio(false, channel_mapping, 0, source_frame->GetAudioSamples(channel), source_frame->GetAudioSamplesCount(), 1.0);
677 }
678 else
679 // Debug output
680 ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (No Audio Copied - Wrong # of Channels)", "source_clip->Reader()->info.has_audio", source_clip->Reader()->info.has_audio, "source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(), "info.channels", info.channels, "clip_frame_number", clip_frame_number);
681 }
682
683 // Debug output
684 ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Transform: Composite Image Layer: Completed)", "source_frame->number", source_frame->number, "new_frame->GetImage()->width()", new_frame->GetImage()->width());
685}
686
687// Update the list of 'opened' clips
688void Timeline::update_open_clips(Clip *clip, bool does_clip_intersect)
689{
690 ZmqLogger::Instance()->AppendDebugMethod("Timeline::update_open_clips (before)", "does_clip_intersect", does_clip_intersect, "closing_clips.size()", closing_clips.size(), "open_clips.size()", open_clips.size());
691
692 // is clip already in list?
693 bool clip_found = open_clips.count(clip);
694
695 if (clip_found && !does_clip_intersect)
696 {
697 // Remove clip from 'opened' list, because it's closed now
698 open_clips.erase(clip);
699
700 // Close clip
701 clip->Close();
702 }
703 else if (!clip_found && does_clip_intersect)
704 {
705 // Add clip to 'opened' list, because it's missing
706 open_clips[clip] = clip;
707
708 try {
709 // Open the clip
710 clip->Open();
711
712 } catch (const InvalidFile & e) {
713 // ...
714 }
715 }
716
717 // Debug output
718 ZmqLogger::Instance()->AppendDebugMethod("Timeline::update_open_clips (after)", "does_clip_intersect", does_clip_intersect, "clip_found", clip_found, "closing_clips.size()", closing_clips.size(), "open_clips.size()", open_clips.size());
719}
720
721// Sort clips by position on the timeline
722void Timeline::sort_clips()
723{
724 // Debug output
725 ZmqLogger::Instance()->AppendDebugMethod("Timeline::SortClips", "clips.size()", clips.size());
726
727 // sort clips
728 clips.sort(CompareClips());
729}
730
731// Sort effects by position on the timeline
732void Timeline::sort_effects()
733{
734 // sort clips
735 effects.sort(CompareEffects());
736}
737
738// Close the reader (and any resources it was consuming)
740{
741 ZmqLogger::Instance()->AppendDebugMethod("Timeline::Close");
742
743 // Close all open clips
744 for (auto clip : clips)
745 {
746 // Open or Close this clip, based on if it's intersecting or not
747 update_open_clips(clip, false);
748 }
749
750 // Mark timeline as closed
751 is_open = false;
752
753 // Clear cache
754 if (final_cache)
755 final_cache->Clear();
756}
757
758// Open the reader (and start consuming resources)
760{
761 is_open = true;
762}
763
764// Compare 2 floating point numbers for equality
765bool Timeline::isEqual(double a, double b)
766{
767 return fabs(a - b) < 0.000001;
768}
769
770// Get an openshot::Frame object for a specific frame number of this reader.
771std::shared_ptr<Frame> Timeline::GetFrame(int64_t requested_frame)
772{
773
774 // Adjust out of bounds frame number
775 if (requested_frame < 1)
776 requested_frame = 1;
777
778 // Check cache
779 std::shared_ptr<Frame> frame;
780 std::lock_guard<std::mutex> guard(get_frame_mutex);
781 frame = final_cache->GetFrame(requested_frame);
782 if (frame) {
783 // Debug output
784 ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Cached frame found)", "requested_frame", requested_frame);
785
786 // Return cached frame
787 return frame;
788 }
789 else
790 {
791 // Create a scoped lock, allowing only a single thread to run the following code at one time
792 const GenericScopedLock<CriticalSection> lock(getFrameCriticalSection);
793
794 // Check for open reader (or throw exception)
795 if (!is_open)
796 throw ReaderClosed("The Timeline is closed. Call Open() before calling this method.");
797
798 // Check cache again (due to locking)
799 frame = final_cache->GetFrame(requested_frame);
800 if (frame) {
801 // Debug output
802 ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Cached frame found on 2nd look)", "requested_frame", requested_frame);
803
804 // Return cached frame
805 return frame;
806 }
807
808 // Check if previous frame was cached? (if not, assume we are seeking somewhere else on the Timeline, and need
809 // to clear all cache (for continuity sake). For example, jumping back to a previous spot can cause issues with audio
810 // data where the new jump location doesn't match up with the previously cached audio data.
811 std::shared_ptr<Frame> previous_frame = final_cache->GetFrame(requested_frame - 1);
812 if (!previous_frame) {
813 // Seeking to new place on timeline (destroy cache)
815 }
816
817 // Minimum number of frames to process (for performance reasons)
818 int minimum_frames = OPEN_MP_NUM_PROCESSORS;
819
820 // Get a list of clips that intersect with the requested section of timeline
821 // This also opens the readers for intersecting clips, and marks non-intersecting clips as 'needs closing'
822 std::vector<Clip*> nearby_clips;
823 nearby_clips = find_intersecting_clips(requested_frame, 1, true);
824
825 // Debug output
826 ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (processing frame)", "requested_frame", requested_frame, "omp_get_thread_num()", omp_get_thread_num());
827
828 // Init some basic properties about this frame
829 int samples_in_frame = Frame::GetSamplesPerFrame(requested_frame, info.fps, info.sample_rate, info.channels);
830
831 // Create blank frame (which will become the requested frame)
832 std::shared_ptr<Frame> new_frame(std::make_shared<Frame>(requested_frame, preview_width, preview_height, "#000000", samples_in_frame, info.channels));
833 new_frame->AddAudioSilence(samples_in_frame);
834 new_frame->SampleRate(info.sample_rate);
835 new_frame->ChannelsLayout(info.channel_layout);
836
837 // Debug output
838 ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Adding solid color)", "requested_frame", requested_frame, "info.width", info.width, "info.height", info.height);
839
840 // Add Background Color to 1st layer (if animated or not black)
841 if ((color.red.GetCount() > 1 || color.green.GetCount() > 1 || color.blue.GetCount() > 1) ||
842 (color.red.GetValue(requested_frame) != 0.0 || color.green.GetValue(requested_frame) != 0.0 || color.blue.GetValue(requested_frame) != 0.0))
843 new_frame->AddColor(preview_width, preview_height, color.GetColorHex(requested_frame));
844
845 // Debug output
846 ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Loop through clips)", "requested_frame", requested_frame, "clips.size()", clips.size(), "nearby_clips.size()", nearby_clips.size());
847
848 // Find Clips near this time
849 for (auto clip : nearby_clips)
850 {
851 long clip_start_position = round(clip->Position() * info.fps.ToDouble()) + 1;
852 long clip_end_position = round((clip->Position() + clip->Duration()) * info.fps.ToDouble()) + 1;
853
854 bool does_clip_intersect = (clip_start_position <= requested_frame && clip_end_position >= requested_frame);
855
856 // Debug output
857 ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Does clip intersect)", "requested_frame", requested_frame, "clip->Position()", clip->Position(), "clip->Duration()", clip->Duration(), "does_clip_intersect", does_clip_intersect);
858
859 // Clip is visible
860 if (does_clip_intersect)
861 {
862 // Determine if clip is "top" clip on this layer (only happens when multiple clips are overlapping)
863 bool is_top_clip = true;
864 float max_volume = 0.0;
865 for (auto nearby_clip : nearby_clips)
866 {
867 long nearby_clip_start_position = round(nearby_clip->Position() * info.fps.ToDouble()) + 1;
868 long nearby_clip_end_position = round((nearby_clip->Position() + nearby_clip->Duration()) * info.fps.ToDouble()) + 1;
869 long nearby_clip_start_frame = (nearby_clip->Start() * info.fps.ToDouble()) + 1;
870 long nearby_clip_frame_number = requested_frame - nearby_clip_start_position + nearby_clip_start_frame;
871
872 // Determine if top clip
873 if (clip->Id() != nearby_clip->Id() && clip->Layer() == nearby_clip->Layer() &&
874 nearby_clip_start_position <= requested_frame && nearby_clip_end_position >= requested_frame &&
875 nearby_clip_start_position > clip_start_position && is_top_clip == true) {
876 is_top_clip = false;
877 }
878
879 // Determine max volume of overlapping clips
880 if (nearby_clip->Reader() && nearby_clip->Reader()->info.has_audio &&
881 nearby_clip->has_audio.GetInt(nearby_clip_frame_number) != 0 &&
882 nearby_clip_start_position <= requested_frame && nearby_clip_end_position >= requested_frame) {
883 max_volume += nearby_clip->volume.GetValue(nearby_clip_frame_number);
884 }
885 }
886
887 // Determine the frame needed for this clip (based on the position on the timeline)
888 long clip_start_frame = (clip->Start() * info.fps.ToDouble()) + 1;
889 long clip_frame_number = requested_frame - clip_start_position + clip_start_frame;
890
891 // Debug output
892 ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Calculate clip's frame #)", "clip->Position()", clip->Position(), "clip->Start()", clip->Start(), "info.fps.ToFloat()", info.fps.ToFloat(), "clip_frame_number", clip_frame_number);
893
894 // Add clip's frame as layer
895 add_layer(new_frame, clip, clip_frame_number, is_top_clip, max_volume);
896
897 } else {
898 // Debug output
899 ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (clip does not intersect)",
900 "requested_frame", requested_frame, "does_clip_intersect",
901 does_clip_intersect);
902 }
903
904 } // end clip loop
905
906 // Debug output
907 ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Add frame to cache)", "requested_frame", requested_frame, "info.width", info.width, "info.height", info.height);
908
909 // Set frame # on mapped frame
910 new_frame->SetFrameNumber(requested_frame);
911
912 // Add final frame to cache
913 final_cache->Add(new_frame);
914
915 // Return frame (or blank frame)
916 return final_cache->GetFrame(requested_frame);
917 }
918}
919
920
921// Find intersecting clips (or non intersecting clips)
922std::vector<Clip*> Timeline::find_intersecting_clips(int64_t requested_frame, int number_of_frames, bool include)
923{
924 // Find matching clips
925 std::vector<Clip*> matching_clips;
926
927 // Calculate time of frame
928 float min_requested_frame = requested_frame;
929 float max_requested_frame = requested_frame + (number_of_frames - 1);
930
931 // Re-Sort Clips (since they likely changed)
932 sort_clips();
933
934 // Find Clips at this time
935 for (auto clip : clips)
936 {
937 // Does clip intersect the current requested time
938 long clip_start_position = round(clip->Position() * info.fps.ToDouble()) + 1;
939 long clip_end_position = round((clip->Position() + clip->Duration()) * info.fps.ToDouble()) + 1;
940
941 bool does_clip_intersect =
942 (clip_start_position <= min_requested_frame || clip_start_position <= max_requested_frame) &&
943 (clip_end_position >= min_requested_frame || clip_end_position >= max_requested_frame);
944
945 // Debug output
946 ZmqLogger::Instance()->AppendDebugMethod("Timeline::find_intersecting_clips (Is clip near or intersecting)", "requested_frame", requested_frame, "min_requested_frame", min_requested_frame, "max_requested_frame", max_requested_frame, "clip->Position()", clip->Position(), "does_clip_intersect", does_clip_intersect);
947
948 // Open (or schedule for closing) this clip, based on if it's intersecting or not
949 update_open_clips(clip, does_clip_intersect);
950
951 // Clip is visible
952 if (does_clip_intersect && include)
953 // Add the intersecting clip
954 matching_clips.push_back(clip);
955
956 else if (!does_clip_intersect && !include)
957 // Add the non-intersecting clip
958 matching_clips.push_back(clip);
959
960 } // end clip loop
961
962 // return list
963 return matching_clips;
964}
965
966// Set the cache object used by this reader
968 // Destroy previous cache (if managed by timeline)
969 if (managed_cache && final_cache) {
970 delete final_cache;
971 final_cache = NULL;
972 managed_cache = false;
973 }
974
975 // Set new cache
976 final_cache = new_cache;
977}
978
979// Generate JSON string of this object
980std::string Timeline::Json() const {
981
982 // Return formatted string
983 return JsonValue().toStyledString();
984}
985
986// Generate Json::Value for this object
987Json::Value Timeline::JsonValue() const {
988
989 // Create root json object
990 Json::Value root = ReaderBase::JsonValue(); // get parent properties
991 root["type"] = "Timeline";
992 root["viewport_scale"] = viewport_scale.JsonValue();
993 root["viewport_x"] = viewport_x.JsonValue();
994 root["viewport_y"] = viewport_y.JsonValue();
995 root["color"] = color.JsonValue();
996 root["path"] = path;
997
998 // Add array of clips
999 root["clips"] = Json::Value(Json::arrayValue);
1000
1001 // Find Clips at this time
1002 for (const auto existing_clip : clips)
1003 {
1004 root["clips"].append(existing_clip->JsonValue());
1005 }
1006
1007 // Add array of effects
1008 root["effects"] = Json::Value(Json::arrayValue);
1009
1010 // loop through effects
1011 for (const auto existing_effect: effects)
1012 {
1013 root["effects"].append(existing_effect->JsonValue());
1014 }
1015
1016 // return JsonValue
1017 return root;
1018}
1019
1020// Load JSON string into this object
1021void Timeline::SetJson(const std::string value) {
1022
1023 // Get lock (prevent getting frames while this happens)
1024 const GenericScopedLock<CriticalSection> lock(getFrameCriticalSection);
1025
1026 // Parse JSON string into JSON objects
1027 try
1028 {
1029 const Json::Value root = openshot::stringToJson(value);
1030 // Set all values that match
1031 SetJsonValue(root);
1032 }
1033 catch (const std::exception& e)
1034 {
1035 // Error parsing JSON (or missing keys)
1036 throw InvalidJSON("JSON is invalid (missing keys or invalid data types)");
1037 }
1038}
1039
1040// Load Json::Value into this object
1041void Timeline::SetJsonValue(const Json::Value root) {
1042
1043 // Close timeline before we do anything (this also removes all open and closing clips)
1044 bool was_open = is_open;
1045 Close();
1046
1047 // Set parent data
1049
1050 // Set data from Json (if key is found)
1051 if (!root["path"].isNull())
1052 path = root["path"].asString();
1053
1054 if (!root["clips"].isNull()) {
1055 // Clear existing clips
1056 clips.clear();
1057
1058 // loop through clips
1059 for (const Json::Value existing_clip : root["clips"]) {
1060 // Create Clip
1061 Clip *c = new Clip();
1062
1063 // When a clip is attached to an object, it searches for the object
1064 // on it's parent timeline. Setting the parent timeline of the clip here
1065 // allows attaching it to an object when exporting the project (because)
1066 // the exporter script initializes the clip and it's effects
1067 // before setting it's parent timeline.
1068 c->ParentTimeline(this);
1069
1070 // Load Json into Clip
1071 c->SetJsonValue(existing_clip);
1072
1073 // Add Clip to Timeline
1074 AddClip(c);
1075 }
1076 }
1077
1078 if (!root["effects"].isNull()) {
1079 // Clear existing effects
1080 effects.clear();
1081
1082 // loop through effects
1083 for (const Json::Value existing_effect :root["effects"]) {
1084 // Create Effect
1085 EffectBase *e = NULL;
1086
1087 if (!existing_effect["type"].isNull()) {
1088 // Create instance of effect
1089 if ( (e = EffectInfo().CreateEffect(existing_effect["type"].asString())) ) {
1090
1091 // Load Json into Effect
1092 e->SetJsonValue(existing_effect);
1093
1094 // Add Effect to Timeline
1095 AddEffect(e);
1096 }
1097 }
1098 }
1099 }
1100
1101 if (!root["duration"].isNull()) {
1102 // Update duration of timeline
1103 info.duration = root["duration"].asDouble();
1105 }
1106
1107 // Update preview settings
1110
1111 // Re-open if needed
1112 if (was_open)
1113 Open();
1114}
1115
1116// Apply a special formatted JSON object, which represents a change to the timeline (insert, update, delete)
1117void Timeline::ApplyJsonDiff(std::string value) {
1118
1119 // Get lock (prevent getting frames while this happens)
1120 const GenericScopedLock<CriticalSection> lock(getFrameCriticalSection);
1121
1122 // Parse JSON string into JSON objects
1123 try
1124 {
1125 const Json::Value root = openshot::stringToJson(value);
1126 // Process the JSON change array, loop through each item
1127 for (const Json::Value change : root) {
1128 std::string change_key = change["key"][(uint)0].asString();
1129
1130 // Process each type of change
1131 if (change_key == "clips")
1132 // Apply to CLIPS
1133 apply_json_to_clips(change);
1134
1135 else if (change_key == "effects")
1136 // Apply to EFFECTS
1137 apply_json_to_effects(change);
1138
1139 else
1140 // Apply to TIMELINE
1141 apply_json_to_timeline(change);
1142
1143 }
1144 }
1145 catch (const std::exception& e)
1146 {
1147 // Error parsing JSON (or missing keys)
1148 throw InvalidJSON("JSON is invalid (missing keys or invalid data types)");
1149 }
1150}
1151
1152// Apply JSON diff to clips
1153void Timeline::apply_json_to_clips(Json::Value change) {
1154
1155 // Get key and type of change
1156 std::string change_type = change["type"].asString();
1157 std::string clip_id = "";
1158 Clip *existing_clip = NULL;
1159
1160 // Find id of clip (if any)
1161 for (auto key_part : change["key"]) {
1162 // Get each change
1163 if (key_part.isObject()) {
1164 // Check for id
1165 if (!key_part["id"].isNull()) {
1166 // Set the id
1167 clip_id = key_part["id"].asString();
1168
1169 // Find matching clip in timeline (if any)
1170 for (auto c : clips)
1171 {
1172 if (c->Id() == clip_id) {
1173 existing_clip = c;
1174 break; // clip found, exit loop
1175 }
1176 }
1177 break; // id found, exit loop
1178 }
1179 }
1180 }
1181
1182 // Check for a more specific key (targetting this clip's effects)
1183 // For example: ["clips", {"id:123}, "effects", {"id":432}]
1184 if (existing_clip && change["key"].size() == 4 && change["key"][2] == "effects")
1185 {
1186 // This change is actually targetting a specific effect under a clip (and not the clip)
1187 Json::Value key_part = change["key"][3];
1188
1189 if (key_part.isObject()) {
1190 // Check for id
1191 if (!key_part["id"].isNull())
1192 {
1193 // Set the id
1194 std::string effect_id = key_part["id"].asString();
1195
1196 // Find matching effect in timeline (if any)
1197 std::list<EffectBase*> effect_list = existing_clip->Effects();
1198 for (auto e : effect_list)
1199 {
1200 if (e->Id() == effect_id) {
1201 // Apply the change to the effect directly
1202 apply_json_to_effects(change, e);
1203
1204 // Calculate start and end frames that this impacts, and remove those frames from the cache
1205 int64_t new_starting_frame = (existing_clip->Position() * info.fps.ToDouble()) + 1;
1206 int64_t new_ending_frame = ((existing_clip->Position() + existing_clip->Duration()) * info.fps.ToDouble()) + 1;
1207 final_cache->Remove(new_starting_frame - 8, new_ending_frame + 8);
1208
1209 return; // effect found, don't update clip
1210 }
1211 }
1212 }
1213 }
1214 }
1215
1216 // Calculate start and end frames that this impacts, and remove those frames from the cache
1217 if (!change["value"].isArray() && !change["value"]["position"].isNull()) {
1218 int64_t new_starting_frame = (change["value"]["position"].asDouble() * info.fps.ToDouble()) + 1;
1219 int64_t new_ending_frame = ((change["value"]["position"].asDouble() + change["value"]["end"].asDouble() - change["value"]["start"].asDouble()) * info.fps.ToDouble()) + 1;
1220 final_cache->Remove(new_starting_frame - 8, new_ending_frame + 8);
1221 }
1222
1223 // Determine type of change operation
1224 if (change_type == "insert") {
1225
1226 // Create new clip
1227 Clip *clip = new Clip();
1228 clip->SetJsonValue(change["value"]); // Set properties of new clip from JSON
1229 AddClip(clip); // Add clip to timeline
1230
1231 // Apply framemapper (or update existing framemapper)
1232 apply_mapper_to_clip(clip);
1233
1234 } else if (change_type == "update") {
1235
1236 // Update existing clip
1237 if (existing_clip) {
1238
1239 // Calculate start and end frames that this impacts, and remove those frames from the cache
1240 int64_t old_starting_frame = (existing_clip->Position() * info.fps.ToDouble()) + 1;
1241 int64_t old_ending_frame = ((existing_clip->Position() + existing_clip->Duration()) * info.fps.ToDouble()) + 1;
1242 final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
1243
1244 // Remove cache on clip's Reader (if found)
1245 if (existing_clip->Reader() && existing_clip->Reader()->GetCache())
1246 existing_clip->Reader()->GetCache()->Remove(old_starting_frame - 8, old_ending_frame + 8);
1247
1248 // Update clip properties from JSON
1249 existing_clip->SetJsonValue(change["value"]);
1250
1251 // Apply framemapper (or update existing framemapper)
1252 apply_mapper_to_clip(existing_clip);
1253 }
1254
1255 } else if (change_type == "delete") {
1256
1257 // Remove existing clip
1258 if (existing_clip) {
1259
1260 // Calculate start and end frames that this impacts, and remove those frames from the cache
1261 int64_t old_starting_frame = (existing_clip->Position() * info.fps.ToDouble()) + 1;
1262 int64_t old_ending_frame = ((existing_clip->Position() + existing_clip->Duration()) * info.fps.ToDouble()) + 1;
1263 final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
1264
1265 // Remove clip from timeline
1266 RemoveClip(existing_clip);
1267 }
1268
1269 }
1270
1271}
1272
1273// Apply JSON diff to effects
1274void Timeline::apply_json_to_effects(Json::Value change) {
1275
1276 // Get key and type of change
1277 std::string change_type = change["type"].asString();
1278 EffectBase *existing_effect = NULL;
1279
1280 // Find id of an effect (if any)
1281 for (auto key_part : change["key"]) {
1282
1283 if (key_part.isObject()) {
1284 // Check for id
1285 if (!key_part["id"].isNull())
1286 {
1287 // Set the id
1288 std::string effect_id = key_part["id"].asString();
1289
1290 // Find matching effect in timeline (if any)
1291 for (auto e : effects)
1292 {
1293 if (e->Id() == effect_id) {
1294 existing_effect = e;
1295 break; // effect found, exit loop
1296 }
1297 }
1298 break; // id found, exit loop
1299 }
1300 }
1301 }
1302
1303 // Now that we found the effect, apply the change to it
1304 if (existing_effect || change_type == "insert")
1305 // Apply change to effect
1306 apply_json_to_effects(change, existing_effect);
1307}
1308
1309// Apply JSON diff to effects (if you already know which effect needs to be updated)
1310void Timeline::apply_json_to_effects(Json::Value change, EffectBase* existing_effect) {
1311
1312 // Get key and type of change
1313 std::string change_type = change["type"].asString();
1314
1315 // Calculate start and end frames that this impacts, and remove those frames from the cache
1316 if (!change["value"].isArray() && !change["value"]["position"].isNull()) {
1317 int64_t new_starting_frame = (change["value"]["position"].asDouble() * info.fps.ToDouble()) + 1;
1318 int64_t new_ending_frame = ((change["value"]["position"].asDouble() + change["value"]["end"].asDouble() - change["value"]["start"].asDouble()) * info.fps.ToDouble()) + 1;
1319 final_cache->Remove(new_starting_frame - 8, new_ending_frame + 8);
1320 }
1321
1322 // Determine type of change operation
1323 if (change_type == "insert") {
1324
1325 // Determine type of effect
1326 std::string effect_type = change["value"]["type"].asString();
1327
1328 // Create Effect
1329 EffectBase *e = NULL;
1330
1331 // Init the matching effect object
1332 if ( (e = EffectInfo().CreateEffect(effect_type)) ) {
1333
1334 // Load Json into Effect
1335 e->SetJsonValue(change["value"]);
1336
1337 // Add Effect to Timeline
1338 AddEffect(e);
1339 }
1340
1341 } else if (change_type == "update") {
1342
1343 // Update existing effect
1344 if (existing_effect) {
1345
1346 // Calculate start and end frames that this impacts, and remove those frames from the cache
1347 int64_t old_starting_frame = (existing_effect->Position() * info.fps.ToDouble()) + 1;
1348 int64_t old_ending_frame = ((existing_effect->Position() + existing_effect->Duration()) * info.fps.ToDouble()) + 1;
1349 final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
1350
1351 // Update effect properties from JSON
1352 existing_effect->SetJsonValue(change["value"]);
1353 }
1354
1355 } else if (change_type == "delete") {
1356
1357 // Remove existing effect
1358 if (existing_effect) {
1359
1360 // Calculate start and end frames that this impacts, and remove those frames from the cache
1361 int64_t old_starting_frame = (existing_effect->Position() * info.fps.ToDouble()) + 1;
1362 int64_t old_ending_frame = ((existing_effect->Position() + existing_effect->Duration()) * info.fps.ToDouble()) + 1;
1363 final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
1364
1365 // Remove effect from timeline
1366 RemoveEffect(existing_effect);
1367 }
1368
1369 }
1370}
1371
1372// Apply JSON diff to timeline properties
1373void Timeline::apply_json_to_timeline(Json::Value change) {
1374
1375 // Get key and type of change
1376 std::string change_type = change["type"].asString();
1377 std::string root_key = change["key"][(uint)0].asString();
1378 std::string sub_key = "";
1379 if (change["key"].size() >= 2)
1380 sub_key = change["key"][(uint)1].asString();
1381
1382 // Clear entire cache
1383 ClearAllCache();
1384
1385 // Determine type of change operation
1386 if (change_type == "insert" || change_type == "update") {
1387
1388 // INSERT / UPDATE
1389 // Check for valid property
1390 if (root_key == "color")
1391 // Set color
1392 color.SetJsonValue(change["value"]);
1393 else if (root_key == "viewport_scale")
1394 // Set viewport scale
1395 viewport_scale.SetJsonValue(change["value"]);
1396 else if (root_key == "viewport_x")
1397 // Set viewport x offset
1398 viewport_x.SetJsonValue(change["value"]);
1399 else if (root_key == "viewport_y")
1400 // Set viewport y offset
1401 viewport_y.SetJsonValue(change["value"]);
1402 else if (root_key == "duration") {
1403 // Update duration of timeline
1404 info.duration = change["value"].asDouble();
1406 }
1407 else if (root_key == "width") {
1408 // Set width
1409 info.width = change["value"].asInt();
1411 }
1412 else if (root_key == "height") {
1413 // Set height
1414 info.height = change["value"].asInt();
1416 }
1417 else if (root_key == "fps" && sub_key == "" && change["value"].isObject()) {
1418 // Set fps fraction
1419 if (!change["value"]["num"].isNull())
1420 info.fps.num = change["value"]["num"].asInt();
1421 if (!change["value"]["den"].isNull())
1422 info.fps.den = change["value"]["den"].asInt();
1423 }
1424 else if (root_key == "fps" && sub_key == "num")
1425 // Set fps.num
1426 info.fps.num = change["value"].asInt();
1427 else if (root_key == "fps" && sub_key == "den")
1428 // Set fps.den
1429 info.fps.den = change["value"].asInt();
1430 else if (root_key == "display_ratio" && sub_key == "" && change["value"].isObject()) {
1431 // Set display_ratio fraction
1432 if (!change["value"]["num"].isNull())
1433 info.display_ratio.num = change["value"]["num"].asInt();
1434 if (!change["value"]["den"].isNull())
1435 info.display_ratio.den = change["value"]["den"].asInt();
1436 }
1437 else if (root_key == "display_ratio" && sub_key == "num")
1438 // Set display_ratio.num
1439 info.display_ratio.num = change["value"].asInt();
1440 else if (root_key == "display_ratio" && sub_key == "den")
1441 // Set display_ratio.den
1442 info.display_ratio.den = change["value"].asInt();
1443 else if (root_key == "pixel_ratio" && sub_key == "" && change["value"].isObject()) {
1444 // Set pixel_ratio fraction
1445 if (!change["value"]["num"].isNull())
1446 info.pixel_ratio.num = change["value"]["num"].asInt();
1447 if (!change["value"]["den"].isNull())
1448 info.pixel_ratio.den = change["value"]["den"].asInt();
1449 }
1450 else if (root_key == "pixel_ratio" && sub_key == "num")
1451 // Set pixel_ratio.num
1452 info.pixel_ratio.num = change["value"].asInt();
1453 else if (root_key == "pixel_ratio" && sub_key == "den")
1454 // Set pixel_ratio.den
1455 info.pixel_ratio.den = change["value"].asInt();
1456
1457 else if (root_key == "sample_rate")
1458 // Set sample rate
1459 info.sample_rate = change["value"].asInt();
1460 else if (root_key == "channels")
1461 // Set channels
1462 info.channels = change["value"].asInt();
1463 else if (root_key == "channel_layout")
1464 // Set channel layout
1465 info.channel_layout = (ChannelLayout) change["value"].asInt();
1466 else
1467 // Error parsing JSON (or missing keys)
1468 throw InvalidJSONKey("JSON change key is invalid", change.toStyledString());
1469
1470
1471 } else if (change["type"].asString() == "delete") {
1472
1473 // DELETE / RESET
1474 // Reset the following properties (since we can't delete them)
1475 if (root_key == "color") {
1476 color = Color();
1477 color.red = Keyframe(0.0);
1478 color.green = Keyframe(0.0);
1479 color.blue = Keyframe(0.0);
1480 }
1481 else if (root_key == "viewport_scale")
1482 viewport_scale = Keyframe(1.0);
1483 else if (root_key == "viewport_x")
1484 viewport_x = Keyframe(0.0);
1485 else if (root_key == "viewport_y")
1486 viewport_y = Keyframe(0.0);
1487 else
1488 // Error parsing JSON (or missing keys)
1489 throw InvalidJSONKey("JSON change key is invalid", change.toStyledString());
1490
1491 }
1492
1493}
1494
1495// Clear all caches
1497
1498 // Get lock (prevent getting frames while this happens)
1499 const GenericScopedLock<CriticalSection> lock(getFrameCriticalSection);
1500
1501 // Clear primary cache
1502 final_cache->Clear();
1503
1504 // Loop through all clips
1505 for (auto clip : clips)
1506 {
1507 // Clear cache on clip
1508 clip->Reader()->GetCache()->Clear();
1509
1510 // Clear nested Reader (if any)
1511 if (clip->Reader()->Name() == "FrameMapper") {
1512 FrameMapper* nested_reader = (FrameMapper*) clip->Reader();
1513 if (nested_reader->Reader() && nested_reader->Reader()->GetCache())
1514 nested_reader->Reader()->GetCache()->Clear();
1515 }
1516
1517 }
1518}
1519
1520// Set Max Image Size (used for performance optimization). Convenience function for setting
1521// Settings::Instance()->MAX_WIDTH and Settings::Instance()->MAX_HEIGHT.
1522void Timeline::SetMaxSize(int width, int height) {
1523 // Maintain aspect ratio regardless of what size is passed in
1524 QSize display_ratio_size = QSize(info.display_ratio.num * info.pixel_ratio.ToFloat(), info.display_ratio.den * info.pixel_ratio.ToFloat());
1525 QSize proposed_size = QSize(std::min(width, info.width), std::min(height, info.height));
1526
1527 // Scale QSize up to proposed size
1528 display_ratio_size.scale(proposed_size, Qt::KeepAspectRatio);
1529
1530 // Update preview settings
1531 preview_width = display_ratio_size.width();
1532 preview_height = display_ratio_size.height();
1533
1534 // Update timeline cache size
1535 final_cache->SetMaxBytesFromInfo(max_concurrent_frames * 4, preview_width, preview_height, info.sample_rate, info.channels);
1536}
Header file for CacheBase class.
Header file for CacheDisk class.
Header file for CacheMemory class.
Header file for CrashHandler class.
Header file for all Exception classes.
Header file for the FrameMapper class.
#define OPEN_MP_NUM_PROCESSORS
Header file for Timeline class.
All cache managers in libopenshot are based on this CacheBase class.
Definition: CacheBase.h:49
virtual void Clear()=0
Clear the cache of all frames.
virtual void Remove(int64_t frame_number)=0
Remove a specific frame.
virtual std::shared_ptr< openshot::Frame > GetFrame(int64_t frame_number)=0
Get a frame from the cache.
virtual void Add(std::shared_ptr< openshot::Frame > frame)=0
Add a Frame to the cache.
void SetMaxBytesFromInfo(int64_t number_of_frames, int width, int height, int sample_rate, int channels)
Set maximum bytes to a different amount based on a ReaderInfo struct.
Definition: CacheBase.cpp:49
This class is a memory-based cache manager for Frame objects.
Definition: CacheMemory.h:50
void Clear()
Clear the cache of all frames.
openshot::TimelineBase * ParentTimeline()
Get the associated Timeline pointer (if any)
Definition: ClipBase.h:113
float Start() const
Get start position (in seconds) of clip (trim start of video)
Definition: ClipBase.h:110
float Duration() const
Get the length of this clip (in seconds)
Definition: ClipBase.h:112
virtual std::shared_ptr< openshot::Frame > GetFrame(int64_t frame_number)=0
This method is required for all derived classes of ClipBase, and returns a new openshot::Frame object...
std::string Id() const
Get the Id of this clip object.
Definition: ClipBase.h:107
CacheMemory cache
Definition: ClipBase.h:68
int Layer() const
Get layer of clip on timeline (lower number is covered by higher numbers)
Definition: ClipBase.h:109
virtual void SetJsonValue(const Json::Value root)=0
Load Json::Value into this object.
Definition: ClipBase.cpp:52
float Position() const
Get position on timeline (in seconds)
Definition: ClipBase.h:108
This class represents a clip (used to arrange readers on the timeline)
Definition: Clip.h:109
openshot::VolumeMixType mixing
What strategy should be followed when mixing audio with other clips.
Definition: Clip.h:178
openshot::Keyframe channel_filter
A number representing an audio channel to filter (clears all other channels)
Definition: Clip.h:336
void SetJsonValue(const Json::Value root) override
Load Json::Value into this object.
Definition: Clip.cpp:982
openshot::Keyframe has_audio
An optional override to determine if this clip has audio (-1=undefined, 0=no, 1=yes)
Definition: Clip.h:340
std::list< openshot::EffectBase * > Effects()
Return the list of effects on the timeline.
Definition: Clip.h:234
openshot::Keyframe volume
Curve representing the volume (0 to 1)
Definition: Clip.h:320
openshot::Keyframe channel_mapping
A number representing an audio channel to output (only works when filtering a channel)
Definition: Clip.h:337
void Reader(openshot::ReaderBase *new_reader)
Set the current reader.
Definition: Clip.cpp:279
This class represents a color (used on the timeline and clips)
Definition: Color.h:45
std::string GetColorHex(int64_t frame_number)
Get the HEX value of a color at a specific frame.
Definition: Color.cpp:68
openshot::Keyframe blue
Curve representing the red value (0 - 255)
Definition: Color.h:50
openshot::Keyframe red
Curve representing the red value (0 - 255)
Definition: Color.h:48
openshot::Keyframe green
Curve representing the green value (0 - 255)
Definition: Color.h:49
void SetJsonValue(const Json::Value root)
Load Json::Value into this object.
Definition: Color.cpp:138
Json::Value JsonValue() const
Generate Json::Value for this object.
Definition: Color.cpp:107
static CrashHandler * Instance()
This abstract class is the base class, used by all effects in libopenshot.
Definition: EffectBase.h:71
virtual void SetJsonValue(const Json::Value root)
Load Json::Value into this object.
Definition: EffectBase.cpp:127
This class returns a listing of all effects supported by libopenshot.
Definition: EffectInfo.h:48
EffectBase * CreateEffect(std::string effect_type)
Create an instance of an effect (factory style)
Definition: EffectInfo.cpp:44
This class represents a fraction.
Definition: Fraction.h:48
int num
Numerator for the fraction.
Definition: Fraction.h:50
float ToFloat()
Return this fraction as a float (i.e. 1/2 = 0.5)
Definition: Fraction.cpp:54
double ToDouble() const
Return this fraction as a double (i.e. 1/2 = 0.5)
Definition: Fraction.cpp:59
void Reduce()
Reduce this fraction (i.e. 640/480 = 4/3)
Definition: Fraction.cpp:84
Fraction Reciprocal() const
Return the reciprocal as a Fraction.
Definition: Fraction.cpp:94
int den
Denominator for the fraction.
Definition: Fraction.h:51
This class creates a mapping between 2 different frame rates, applying a specific pull-down technique...
Definition: FrameMapper.h:138
void ChangeMapping(Fraction target_fps, PulldownType pulldown, int target_sample_rate, int target_channels, ChannelLayout target_channel_layout)
Change frame rate or audio mapping details.
ReaderBase * Reader()
Get the current reader.
Definition: FrameMapper.cpp:74
void Close() override
Close the openshot::FrameMapper and internal reader.
int GetSamplesPerFrame(openshot::Fraction fps, int sample_rate, int channels)
Calculate the # of samples per video frame (for the current frame number)
Definition: Frame.cpp:536
Exception for files that can not be found or opened.
Definition: Exceptions.h:174
Exception for missing JSON Change key.
Definition: Exceptions.h:254
Exception for invalid JSON.
Definition: Exceptions.h:206
A Keyframe is a collection of Point instances, which is used to vary a number or property over time.
Definition: KeyFrame.h:72
int GetInt(int64_t index) const
Get the rounded INT value at a specific index.
Definition: KeyFrame.cpp:292
void SetJsonValue(const Json::Value root)
Load Json::Value into this object.
Definition: KeyFrame.cpp:368
double GetValue(int64_t index) const
Get the value at a specific index.
Definition: KeyFrame.cpp:268
Json::Value JsonValue() const
Generate Json::Value for this object.
Definition: KeyFrame.cpp:335
int64_t GetCount() const
Get the number of points (i.e. # of points)
Definition: KeyFrame.cpp:516
Exception for frames that are out of bounds.
Definition: Exceptions.h:286
This abstract class is the base class, used by all readers in libopenshot.
Definition: ReaderBase.h:98
juce::CriticalSection getFrameCriticalSection
Section lock for multiple threads.
Definition: ReaderBase.h:101
openshot::ReaderInfo info
Information about the current media file.
Definition: ReaderBase.h:111
virtual void SetJsonValue(const Json::Value root)=0
Load Json::Value into this object.
Definition: ReaderBase.cpp:171
virtual Json::Value JsonValue() const =0
Generate Json::Value for this object.
Definition: ReaderBase.cpp:116
virtual openshot::CacheBase * GetCache()=0
Get the cache object used by this reader (note: not all readers use cache)
openshot::ClipBase * clip
Pointer to the parent clip instance (if any)
Definition: ReaderBase.h:103
Exception when a reader is closed, and a frame is requested.
Definition: Exceptions.h:338
This class is contains settings used by libopenshot (and can be safely toggled at any point)
Definition: Settings.h:44
std::string PATH_OPENSHOT_INSTALL
Definition: Settings.h:108
static Settings * Instance()
Create or get an instance of this logger singleton (invoke the class with this method)
Definition: Settings.cpp:41
int preview_height
Optional preview width of timeline image. If your preview window is smaller than the timeline,...
Definition: TimelineBase.h:58
int preview_width
Optional preview width of timeline image. If your preview window is smaller than the timeline,...
Definition: TimelineBase.h:57
This class represents a timeline.
Definition: Timeline.h:168
void AddTrackedObject(std::shared_ptr< openshot::TrackedObjectBase > trackedObject)
Add to the tracked_objects map a pointer to a tracked object (TrackedObjectBBox)
Definition: Timeline.cpp:247
Json::Value JsonValue() const override
Generate Json::Value for this object.
Definition: Timeline.cpp:987
openshot::Keyframe viewport_scale
Curve representing the scale of the viewport (0 to 100)
Definition: Timeline.h:323
void ApplyJsonDiff(std::string value)
Apply a special formatted JSON object, which represents a change to the timeline (add,...
Definition: Timeline.cpp:1117
openshot::EffectBase * GetClipEffect(const std::string &id)
Look up a clip effect by ID.
Definition: Timeline.cpp:427
void AddClip(openshot::Clip *clip)
Add an openshot::Clip to the timeline.
Definition: Timeline.cpp:356
virtual ~Timeline()
Definition: Timeline.cpp:222
std::list< openshot::EffectBase * > ClipEffects() const
Return the list of effects on all clips.
Definition: Timeline.cpp:440
std::list< std::string > GetTrackedObjectsIds() const
Return the ID's of the tracked objects as a list of strings.
Definition: Timeline.cpp:282
std::string Json() const override
Generate JSON string of this object.
Definition: Timeline.cpp:980
int64_t GetMaxFrame()
Look up the end frame number of the latest element on the timeline.
Definition: Timeline.cpp:477
std::shared_ptr< openshot::Frame > GetFrame(int64_t requested_frame) override
Definition: Timeline.cpp:771
void ClearAllCache()
Clear all cache for this timeline instance, and all clips, mappers, and readers under it.
Definition: Timeline.cpp:1496
void ApplyMapperToClips()
Apply the timeline's framerate and samplerate to all clips.
Definition: Timeline.cpp:513
openshot::Color color
Background color of timeline canvas.
Definition: Timeline.h:328
std::string GetTrackedObjectValues(std::string id, int64_t frame_number) const
Return the trackedObject's properties as a JSON string.
Definition: Timeline.cpp:298
Timeline(int width, int height, openshot::Fraction fps, int sample_rate, int channels, openshot::ChannelLayout channel_layout)
Constructor for the timeline (which configures the default frame properties)
Definition: Timeline.cpp:46
std::shared_ptr< openshot::TrackedObjectBase > GetTrackedObject(std::string id) const
Return tracked object pointer by it's id.
Definition: Timeline.cpp:265
openshot::EffectBase * GetEffect(const std::string &id)
Look up a timeline effect by ID.
Definition: Timeline.cpp:416
void SetJsonValue(const Json::Value root) override
Load Json::Value into this object.
Definition: Timeline.cpp:1041
openshot::Clip * GetClip(const std::string &id)
Look up a single clip by ID.
Definition: Timeline.cpp:404
void AddEffect(openshot::EffectBase *effect)
Add an effect to the timeline.
Definition: Timeline.cpp:379
std::shared_ptr< openshot::Frame > apply_effects(std::shared_ptr< openshot::Frame > frame, int64_t timeline_frame_number, int layer)
Apply global/timeline effects to the source frame (if any)
Definition: Timeline.cpp:537
void SetCache(openshot::CacheBase *new_cache)
Definition: Timeline.cpp:967
openshot::Keyframe viewport_x
Curve representing the x coordinate for the viewport.
Definition: Timeline.h:324
void RemoveClip(openshot::Clip *clip)
Remove an openshot::Clip from the timeline.
Definition: Timeline.cpp:398
void SetMaxSize(int width, int height)
Definition: Timeline.cpp:1522
double GetMaxTime()
Look up the end time of the latest timeline element.
Definition: Timeline.cpp:459
void RemoveEffect(openshot::EffectBase *effect)
Remove an effect from the timeline.
Definition: Timeline.cpp:392
void Open() override
Open the reader (and start consuming resources)
Definition: Timeline.cpp:759
void SetJson(const std::string value) override
Load JSON string into this object.
Definition: Timeline.cpp:1021
openshot::Keyframe viewport_y
Curve representing the y coordinate for the viewport.
Definition: Timeline.h:325
void Close() override
Close the timeline reader (and any resources it was consuming)
Definition: Timeline.cpp:739
void AppendDebugMethod(std::string method_name, std::string arg1_name="", float arg1_value=-1.0, std::string arg2_name="", float arg2_value=-1.0, std::string arg3_name="", float arg3_value=-1.0, std::string arg4_name="", float arg4_value=-1.0, std::string arg5_name="", float arg5_value=-1.0, std::string arg6_name="", float arg6_value=-1.0)
Append debug information.
Definition: ZmqLogger.cpp:190
static ZmqLogger * Instance()
Create or get an instance of this logger singleton (invoke the class with this method)
Definition: ZmqLogger.cpp:52
This namespace is the default namespace for all code in the openshot library.
Definition: Compressor.h:47
@ PULLDOWN_NONE
Do not apply pull-down techniques, just repeat or skip entire frames.
Definition: FrameMapper.h:63
ChannelLayout
This enumeration determines the audio channel layout (such as stereo, mono, 5 point surround,...
@ VOLUME_MIX_AVERAGE
Evenly divide the overlapping clips volume keyframes, so that the sum does not exceed 100%.
Definition: Enums.h:80
@ VOLUME_MIX_REDUCE
Reduce volume by about %25, and then mix (louder, but could cause pops if the sum exceeds 100%)
Definition: Enums.h:81
const Json::Value stringToJson(const std::string value)
Definition: Json.cpp:34
This struct holds the information of a bounding-box.
float cy
y-coordinate of the bounding box center
float height
bounding box height
float cx
x-coordinate of the bounding box center
float width
bounding box width
float angle
bounding box rotation angle [degrees]
Like CompareClipEndFrames, but for effects.
Definition: Timeline.h:94
This struct contains info about a media file, such as height, width, frames per second,...
Definition: ReaderBase.h:61
float duration
Length of time (in seconds)
Definition: ReaderBase.h:65
int width
The width of the video (in pixesl)
Definition: ReaderBase.h:68
int channels
The number of audio channels used in the audio stream.
Definition: ReaderBase.h:83
openshot::Fraction fps
Frames per second, as a fraction (i.e. 24/1 = 24 fps)
Definition: ReaderBase.h:70
openshot::Fraction display_ratio
The ratio of width to height of the video stream (i.e. 640x480 has a ratio of 4/3)
Definition: ReaderBase.h:73
int height
The height of the video (in pixels)
Definition: ReaderBase.h:67
int64_t video_length
The number of frames in the video stream.
Definition: ReaderBase.h:75
std::string acodec
The name of the audio codec used to encode / decode the video stream.
Definition: ReaderBase.h:80
std::string vcodec
The name of the video codec used to encode / decode the video stream.
Definition: ReaderBase.h:74
openshot::Fraction pixel_ratio
The pixel ratio of the video stream as a fraction (i.e. some pixels are not square)
Definition: ReaderBase.h:72
openshot::ChannelLayout channel_layout
The channel layout (mono, stereo, 5 point surround, etc...)
Definition: ReaderBase.h:84
bool has_video
Determines if this file has a video stream.
Definition: ReaderBase.h:62
bool has_audio
Determines if this file has an audio stream.
Definition: ReaderBase.h:63
openshot::Fraction video_timebase
The video timebase determines how long each frame stays on the screen.
Definition: ReaderBase.h:77
int sample_rate
The number of audio samples per second (44100 is a common sample rate)
Definition: ReaderBase.h:82
This struct contains info about the current Timeline clip instance.
Definition: TimelineBase.h:47
bool is_top_clip
Is clip on top (if overlapping another clip)
Definition: TimelineBase.h:48