28 #include "../include/Timeline.h" 34 is_open(false), auto_map_clips(true)
73 apply_mapper_to_clip(clip);
76 clips.push_back(clip);
86 effects.push_back(effect);
95 effects.remove(effect);
105 void Timeline::apply_mapper_to_clip(
Clip* clip)
109 if (clip->
Reader()->Name() ==
"FrameMapper")
125 clip->
Reader(clip_reader);
132 final_cache->
Clear();
135 list<Clip*>::iterator clip_itr;
136 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
139 Clip *clip = (*clip_itr);
142 apply_mapper_to_clip(clip);
147 float Timeline::calculate_time(
long int number,
Fraction rate)
150 float raw_fps = rate.
ToFloat();
153 return float(number - 1) / raw_fps;
157 tr1::shared_ptr<Frame> Timeline::apply_effects(tr1::shared_ptr<Frame> frame,
long int timeline_frame_number,
int layer)
160 float requested_time = calculate_time(timeline_frame_number,
info.
fps);
163 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::apply_effects",
"requested_time", requested_time,
"frame->number", frame->number,
"timeline_frame_number", timeline_frame_number,
"layer", layer,
"", -1,
"", -1);
166 list<EffectBase*>::iterator effect_itr;
167 for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr)
173 float effect_duration = effect->
End() - effect->
Start();
174 bool does_effect_intersect = (effect->
Position() <= requested_time && effect->
Position() + effect_duration >= requested_time && effect->
Layer() == layer);
177 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::apply_effects (Does effect intersect)",
"effect->Position()", effect->
Position(),
"requested_time", requested_time,
"does_effect_intersect", does_effect_intersect,
"timeline_frame_number", timeline_frame_number,
"layer", layer,
"effect_duration", effect_duration);
180 if (does_effect_intersect)
183 float time_diff = (requested_time - effect->
Position()) + effect->
Start();
184 int effect_frame_number = round(time_diff *
info.
fps.
ToFloat()) + 1;
187 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::apply_effects (Process Effect)",
"time_diff", time_diff,
"effect_frame_number", effect_frame_number,
"effect_duration", effect_duration,
"does_effect_intersect", does_effect_intersect,
"", -1,
"", -1);
190 frame = effect->
GetFrame(frame, effect_frame_number);
200 tr1::shared_ptr<Frame> Timeline::GetOrCreateFrame(
Clip* clip,
long int number)
202 tr1::shared_ptr<Frame> new_frame;
209 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetOrCreateFrame (from reader)",
"number", number,
"samples_in_frame", samples_in_frame,
"", -1,
"", -1,
"", -1,
"", -1);
215 new_frame = tr1::shared_ptr<Frame>(clip->
GetFrame(number));
229 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetOrCreateFrame (create blank)",
"number", number,
"samples_in_frame", samples_in_frame,
"", -1,
"", -1,
"", -1,
"", -1);
239 void Timeline::add_layer(tr1::shared_ptr<Frame> new_frame,
Clip* source_clip,
long int clip_frame_number,
long int timeline_frame_number,
bool is_top_clip)
242 tr1::shared_ptr<Frame> source_frame = GetOrCreateFrame(source_clip, clip_frame_number);
249 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer",
"new_frame->number", new_frame->number,
"clip_frame_number", clip_frame_number,
"timeline_frame_number", timeline_frame_number,
"", -1,
"", -1,
"", -1);
255 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Generate Waveform Image)",
"source_frame->number", source_frame->number,
"source_clip->Waveform()", source_clip->
Waveform(),
"clip_frame_number", clip_frame_number,
"", -1,
"", -1,
"", -1);
264 tr1::shared_ptr<QImage> source_image = source_frame->GetWaveform(
info.
width,
info.
height, red, green, blue, alpha);
265 source_frame->AddImage(tr1::shared_ptr<QImage>(source_image));
270 if (is_top_clip && source_frame)
271 source_frame = apply_effects(source_frame, timeline_frame_number, source_clip->
Layer());
274 tr1::shared_ptr<QImage> source_image;
277 if (source_clip->
Reader()->info.has_audio) {
280 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Copy Audio)",
"source_clip->Reader()->info.has_audio", source_clip->
Reader()->info.has_audio,
"source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(),
"info.channels",
info.
channels,
"clip_frame_number", clip_frame_number,
"timeline_frame_number", timeline_frame_number,
"", -1);
282 if (source_frame->GetAudioChannelsCount() ==
info.
channels)
283 for (
int channel = 0; channel < source_frame->GetAudioChannelsCount(); channel++)
285 float initial_volume = 1.0f;
286 float previous_volume = source_clip->
volume.
GetValue(clip_frame_number - 1);
292 if (channel_filter != -1 && channel_filter != channel)
296 if (channel_mapping == -1)
297 channel_mapping = channel;
300 if (isEqual(previous_volume, volume))
301 initial_volume = volume;
304 if (!isEqual(previous_volume, volume))
305 source_frame->ApplyGainRamp(channel_mapping, 0, source_frame->GetAudioSamplesCount(), previous_volume, volume);
311 if (new_frame->GetAudioSamplesCount() != source_frame->GetAudioSamplesCount())
317 new_frame->AddAudio(
false, channel_mapping, 0, source_frame->GetAudioSamples(channel), source_frame->GetAudioSamplesCount(), initial_volume);
322 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (No Audio Copied - Wrong # of Channels)",
"source_clip->Reader()->info.has_audio", source_clip->
Reader()->info.has_audio,
"source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(),
"info.channels",
info.
channels,
"clip_frame_number", clip_frame_number,
"timeline_frame_number", timeline_frame_number,
"", -1);
327 if (!source_clip->
Waveform() && !source_clip->
Reader()->info.has_video)
332 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Get Source Image)",
"source_frame->number", source_frame->number,
"source_clip->Waveform()", source_clip->
Waveform(),
"clip_frame_number", clip_frame_number,
"", -1,
"", -1,
"", -1);
335 source_image = source_frame->GetImage();
338 int source_width = source_image->width();
339 int source_height = source_image->height();
344 float alpha = source_clip->
alpha.
GetValue(clip_frame_number);
347 unsigned char *pixels = (
unsigned char *) source_image->bits();
350 for (
int pixel = 0, byte_index=0; pixel < source_image->width() * source_image->height(); pixel++, byte_index+=4)
353 int A = pixels[byte_index + 3];
356 pixels[byte_index + 3] *= alpha;
360 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Set Alpha & Opacity)",
"alpha", alpha,
"source_frame->number", source_frame->number,
"clip_frame_number", clip_frame_number,
"", -1,
"", -1,
"", -1);
364 switch (source_clip->
scale)
368 source_image = tr1::shared_ptr<QImage>(
new QImage(source_image->scaled(
info.
width,
info.
height, Qt::KeepAspectRatio, Qt::SmoothTransformation)));
369 source_width = source_image->width();
370 source_height = source_image->height();
373 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Scale: SCALE_FIT)",
"source_frame->number", source_frame->number,
"source_width", source_width,
"source_height", source_height,
"", -1,
"", -1,
"", -1);
378 source_image = tr1::shared_ptr<QImage>(
new QImage(source_image->scaled(
info.
width,
info.
height, Qt::IgnoreAspectRatio, Qt::SmoothTransformation)));
379 source_width = source_image->width();
380 source_height = source_image->height();
383 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Scale: SCALE_STRETCH)",
"source_frame->number", source_frame->number,
"source_width", source_width,
"source_height", source_height,
"", -1,
"", -1,
"", -1);
387 QSize width_size(
info.
width, round(
info.
width / (
float(source_width) /
float(source_height))));
388 QSize height_size(round(
info.
height / (
float(source_height) /
float(source_width))),
info.
height);
392 source_image = tr1::shared_ptr<QImage>(
new QImage(source_image->scaled(width_size.width(), width_size.height(), Qt::KeepAspectRatio, Qt::SmoothTransformation)));
394 source_image = tr1::shared_ptr<QImage>(
new QImage(source_image->scaled(height_size.width(), height_size.height(), Qt::KeepAspectRatio, Qt::SmoothTransformation)));
395 source_width = source_image->width();
396 source_height = source_image->height();
399 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Scale: SCALE_CROP)",
"source_frame->number", source_frame->number,
"source_width", source_width,
"source_height", source_height,
"", -1,
"", -1,
"", -1);
410 float scaled_source_width = source_width * sx;
411 float scaled_source_height = source_height * sy;
416 x = (
info.
width - scaled_source_width) / 2.0;
422 y = (
info.
height - scaled_source_height) / 2.0;
425 x = (
info.
width - scaled_source_width) / 2.0;
426 y = (
info.
height - scaled_source_height) / 2.0;
430 y = (
info.
height - scaled_source_height) / 2.0;
436 x = (
info.
width - scaled_source_width) / 2.0;
446 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Gravity)",
"source_frame->number", source_frame->number,
"source_clip->gravity", source_clip->
gravity,
"info.width",
info.
width,
"source_width", source_width,
"info.height",
info.
height,
"source_height", source_height);
459 bool transformed =
false;
460 QTransform transform;
463 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Build QTransform - if needed)",
"source_frame->number", source_frame->number,
"x", x,
"y", y,
"r", r,
"sx", sx,
"sy", sy);
465 if (!isEqual(r, 0)) {
467 float origin_x = x + (source_width / 2.0);
468 float origin_y = y + (source_height / 2.0);
469 transform.translate(origin_x, origin_y);
471 transform.translate(-origin_x,-origin_y);
475 if (!isEqual(x, 0) || !isEqual(y, 0)) {
477 transform.translate(x, y);
481 if (!isEqual(sx, 0) || !isEqual(sy, 0)) {
483 transform.scale(sx, sy);
487 if (!isEqual(shear_x, 0) || !isEqual(shear_y, 0)) {
489 transform.shear(shear_x, shear_y);
494 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Transform: Composite Image Layer: Prepare)",
"source_frame->number", source_frame->number,
"offset_x", offset_x,
"offset_y", offset_y,
"new_frame->GetImage()->width()", new_frame->GetImage()->width(),
"transformed", transformed,
"", -1);
497 tr1::shared_ptr<QImage> new_image = new_frame->GetImage();
500 QPainter painter(new_image.get());
501 painter.setRenderHints(QPainter::Antialiasing | QPainter::SmoothPixmapTransform | QPainter::TextAntialiasing,
true);
505 painter.setTransform(transform);
508 painter.setCompositionMode(QPainter::CompositionMode_SourceOver);
509 painter.drawImage(0, 0, *source_image);
514 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Add transform selection handles)",
"source_frame->number", source_frame->number,
"offset_x", offset_x,
"offset_y", offset_y,
"new_frame->GetImage()->width()", new_frame->GetImage()->width(),
"transformed", transformed,
"", -1);
517 painter.fillRect(0.0, 0.0, 12.0/sx, 12.0/sy, QBrush(QColor(
"#53a0ed")));
518 painter.fillRect(source_width - (12.0/sx), 0, 12.0/sx, 12.0/sy, QBrush(QColor(
"#53a0ed")));
519 painter.fillRect(0.0, source_height - (12.0/sy), 12.0/sx, 12.0/sy, QBrush(QColor(
"#53a0ed")));
520 painter.fillRect(source_width - (12.0/sx), source_height - (12.0/sy), 12.0/sx, 12.0/sy, QBrush(QColor(
"#53a0ed")));
523 painter.fillRect(0.0 + (source_width / 2.0) - (6.0/sx), 0, 12.0/sx, 12.0/sy, QBrush(QColor(
"#53a0ed")));
524 painter.fillRect(0.0 + (source_width / 2.0) - (6.0/sx), source_height - (6.0/sy), 12.0/sx, 12.0/sy, QBrush(QColor(
"#53a0ed")));
525 painter.fillRect(0.0, (source_height / 2.0) - (6.0/sy), 12.0/sx, 12.0/sy, QBrush(QColor(
"#53a0ed")));
526 painter.fillRect(source_width - (12.0/sx), (source_height / 2.0) - (6.0/sy), 12.0/sx, 12.0/sy, QBrush(QColor(
"#53a0ed")));
530 painter.setBrush(QColor(83, 160, 237, 122));
531 painter.setPen(Qt::NoPen);
532 painter.drawEllipse((source_width / 2.0) - (25.0/sx), (source_height / 2.0) - (25.0/sy), 50.0/sx, 50.0/sy);
538 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Transform: Composite Image Layer: Completed)",
"source_frame->number", source_frame->number,
"offset_x", offset_x,
"offset_y", offset_y,
"new_frame->GetImage()->width()", new_frame->GetImage()->width(),
"transformed", transformed,
"", -1);
542 void Timeline::update_open_clips(
Clip *clip,
bool does_clip_intersect)
544 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::update_open_clips (before)",
"does_clip_intersect", does_clip_intersect,
"closing_clips.size()", closing_clips.size(),
"open_clips.size()", open_clips.size(),
"", -1,
"", -1,
"", -1);
547 bool clip_found = open_clips.count(clip);
549 if (clip_found && !does_clip_intersect)
552 open_clips.erase(clip);
557 else if (!clip_found && does_clip_intersect)
560 open_clips[clip] = clip;
567 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::update_open_clips (after)",
"does_clip_intersect", does_clip_intersect,
"clip_found", clip_found,
"closing_clips.size()", closing_clips.size(),
"open_clips.size()", open_clips.size(),
"", -1,
"", -1);
571 void Timeline::sort_clips()
574 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::SortClips",
"clips.size()", clips.size(),
"", -1,
"", -1,
"", -1,
"", -1,
"", -1);
581 void Timeline::sort_effects()
590 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::Close",
"", -1,
"", -1,
"", -1,
"", -1,
"", -1,
"", -1);
593 list<Clip*>::iterator clip_itr;
594 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
597 Clip *clip = (*clip_itr);
600 update_open_clips(clip,
false);
607 final_cache->
Clear();
617 bool Timeline::isEqual(
double a,
double b)
619 return fabs(a - b) < 0.000001;
627 throw ReaderClosed(
"The Timeline is closed. Call Open() before calling this method.",
"");
630 if (requested_frame < 1)
634 tr1::shared_ptr<Frame> frame = final_cache->
GetFrame(requested_frame);
637 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Cached frame found)",
"requested_frame", requested_frame,
"", -1,
"", -1,
"", -1,
"", -1,
"", -1);
648 frame = final_cache->
GetFrame(requested_frame);
651 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Cached frame found on 2nd look)",
"requested_frame", requested_frame,
"", -1,
"", -1,
"", -1,
"", -1,
"", -1);
662 vector<Clip*> nearby_clips = find_intersecting_clips(requested_frame, minimum_frames,
true);
666 omp_set_nested(
true);
669 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame",
"requested_frame", requested_frame,
"minimum_frames", minimum_frames,
"OPEN_MP_NUM_PROCESSORS",
OPEN_MP_NUM_PROCESSORS,
"", -1,
"", -1,
"", -1);
673 for (
long int frame_number = requested_frame; frame_number < requested_frame + minimum_frames; frame_number++)
676 float requested_time = calculate_time(frame_number,
info.
fps);
678 for (
int clip_index = 0; clip_index < nearby_clips.size(); clip_index++)
681 Clip *clip = nearby_clips[clip_index];
682 bool does_clip_intersect = (clip->
Position() <= requested_time && clip->
Position() + clip->
Duration() >= requested_time);
683 if (does_clip_intersect)
686 float time_diff = (requested_time - clip->
Position()) + clip->
Start();
687 int clip_frame_number = round(time_diff *
info.
fps.
ToFloat()) + 1;
697 #pragma omp for ordered firstprivate(nearby_clips, requested_frame, minimum_frames) 698 for (
long int frame_number = requested_frame; frame_number < requested_frame + minimum_frames; frame_number++)
701 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (processing frame)",
"frame_number", frame_number,
"omp_get_thread_num()", omp_get_thread_num(),
"", -1,
"", -1,
"", -1,
"", -1);
708 new_frame->AddAudioSilence(samples_in_frame);
713 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Adding solid color)",
"frame_number", frame_number,
"info.width",
info.
width,
"info.height",
info.
height,
"", -1,
"", -1,
"", -1);
721 float requested_time = calculate_time(frame_number,
info.
fps);
724 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Loop through clips)",
"frame_number", frame_number,
"requested_time", requested_time,
"clips.size()", clips.size(),
"nearby_clips.size()", nearby_clips.size(),
"", -1,
"", -1);
727 for (
int clip_index = 0; clip_index < nearby_clips.size(); clip_index++)
730 Clip *clip = nearby_clips[clip_index];
733 bool does_clip_intersect = (clip->
Position() <= requested_time && clip->
Position() + clip->
Duration() >= requested_time);
736 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Does clip intersect)",
"frame_number", frame_number,
"requested_time", requested_time,
"clip->Position()", clip->
Position(),
"clip->Duration()", clip->
Duration(),
"does_clip_intersect", does_clip_intersect,
"", -1);
739 if (does_clip_intersect)
742 bool is_top_clip =
true;
743 for (
int top_clip_index = 0; top_clip_index < nearby_clips.size(); top_clip_index++)
745 Clip *nearby_clip = nearby_clips[top_clip_index];
746 if (clip->
Id() != nearby_clip->
Id() && clip->
Layer() == nearby_clip->
Layer() &&
755 float time_diff = (requested_time - clip->
Position()) + clip->
Start();
756 int clip_frame_number = round(time_diff *
info.
fps.
ToFloat()) + 1;
759 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Calculate clip's frame #)",
"time_diff", time_diff,
"requested_time", requested_time,
"clip->Position()", clip->
Position(),
"clip->Start()", clip->
Start(),
"info.fps.ToFloat()",
info.
fps.
ToFloat(),
"clip_frame_number", clip_frame_number);
762 add_layer(new_frame, clip, clip_frame_number, frame_number, is_top_clip);
766 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (clip does not intersect)",
"frame_number", frame_number,
"requested_time", requested_time,
"does_clip_intersect", does_clip_intersect,
"", -1,
"", -1,
"", -1);
771 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Add frame to cache)",
"frame_number", frame_number,
"info.width",
info.
width,
"info.height",
info.
height,
"", -1,
"", -1,
"", -1);
774 new_frame->SetFrameNumber(frame_number);
777 final_cache->
Add(new_frame);
783 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (end parallel region)",
"requested_frame", requested_frame,
"omp_get_thread_num()", omp_get_thread_num(),
"", -1,
"", -1,
"", -1,
"", -1);
786 return final_cache->
GetFrame(requested_frame);
792 vector<Clip*> Timeline::find_intersecting_clips(
long int requested_frame,
int number_of_frames,
bool include)
795 vector<Clip*> matching_clips;
798 float min_requested_time = calculate_time(requested_frame,
info.
fps);
799 float max_requested_time = calculate_time(requested_frame + (number_of_frames - 1),
info.
fps);
805 list<Clip*>::iterator clip_itr;
806 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
809 Clip *clip = (*clip_itr);
812 float clip_duration = clip->
End() - clip->
Start();
813 bool does_clip_intersect = (clip->
Position() <= min_requested_time && clip->
Position() + clip_duration >= min_requested_time) ||
814 (clip->
Position() > min_requested_time && clip->
Position() <= max_requested_time);
817 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::find_intersecting_clips (Is clip near or intersecting)",
"requested_frame", requested_frame,
"min_requested_time", min_requested_time,
"max_requested_time", max_requested_time,
"clip->Position()", clip->
Position(),
"clip_duration", clip_duration,
"does_clip_intersect", does_clip_intersect);
820 #pragma omp critical (reader_lock) 821 update_open_clips(clip, does_clip_intersect);
825 if (does_clip_intersect && include)
827 matching_clips.push_back(clip);
829 else if (!does_clip_intersect && !include)
831 matching_clips.push_back(clip);
836 return matching_clips;
842 final_cache = new_cache;
857 root[
"type"] =
"Timeline";
864 root[
"clips"] = Json::Value(Json::arrayValue);
867 list<Clip*>::iterator clip_itr;
868 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
871 Clip *existing_clip = (*clip_itr);
872 root[
"clips"].append(existing_clip->
JsonValue());
876 root[
"effects"] = Json::Value(Json::arrayValue);
879 list<EffectBase*>::iterator effect_itr;
880 for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr)
884 root[
"effects"].append(existing_effect->
JsonValue());
897 bool success = reader.parse( value, root );
900 throw InvalidJSON(
"JSON could not be parsed (or is invalid)",
"");
910 throw InvalidJSON(
"JSON is invalid (missing keys or invalid data types)",
"");
923 if (!root[
"clips"].isNull()) {
928 for (
int x = 0; x < root[
"clips"].size(); x++) {
930 Json::Value existing_clip = root[
"clips"][x];
943 if (!root[
"effects"].isNull()) {
948 for (
int x = 0; x < root[
"effects"].size(); x++) {
950 Json::Value existing_effect = root[
"effects"][x];
955 if (!existing_effect[
"type"].isNull()) {
968 if (!root[
"duration"].isNull()) {
981 bool success = reader.parse( value, root );
982 if (!success || !root.isArray())
984 throw InvalidJSON(
"JSON could not be parsed (or is invalid).",
"");
989 for (
int x = 0; x < root.size(); x++) {
991 Json::Value change = root[x];
992 string root_key = change[
"key"][(uint)0].asString();
995 if (root_key ==
"clips")
997 apply_json_to_clips(change);
999 else if (root_key ==
"effects")
1001 apply_json_to_effects(change);
1005 apply_json_to_timeline(change);
1012 throw InvalidJSON(
"JSON is invalid (missing keys or invalid data types)",
"");
1017 void Timeline::apply_json_to_clips(Json::Value change)
throw(
InvalidJSONKey) {
1020 string change_type = change[
"type"].asString();
1021 string clip_id =
"";
1022 Clip *existing_clip = NULL;
1025 for (
int x = 0; x < change[
"key"].size(); x++) {
1027 Json::Value key_part = change[
"key"][x];
1029 if (key_part.isObject()) {
1031 if (!key_part[
"id"].isNull()) {
1033 clip_id = key_part[
"id"].asString();
1036 list<Clip*>::iterator clip_itr;
1037 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
1040 Clip *c = (*clip_itr);
1041 if (c->
Id() == clip_id) {
1053 if (existing_clip && change[
"key"].size() == 4 && change[
"key"][2] ==
"effects")
1056 Json::Value key_part = change[
"key"][3];
1058 if (key_part.isObject()) {
1060 if (!key_part[
"id"].isNull())
1063 string effect_id = key_part[
"id"].asString();
1066 list<EffectBase*> effect_list = existing_clip->
Effects();
1067 list<EffectBase*>::iterator effect_itr;
1068 for (effect_itr=effect_list.begin(); effect_itr != effect_list.end(); ++effect_itr)
1072 if (e->
Id() == effect_id) {
1074 apply_json_to_effects(change, e);
1083 if (!change[
"value"].isArray() && !change[
"value"][
"position"].isNull()) {
1084 long int new_starting_frame = change[
"value"][
"position"].asDouble() *
info.
fps.
ToDouble();
1085 long int new_ending_frame = (change[
"value"][
"position"].asDouble() + change[
"value"][
"end"].asDouble() - change[
"value"][
"start"].asDouble()) *
info.
fps.
ToDouble();
1086 final_cache->
Remove(new_starting_frame - 2, new_ending_frame + 2);
1090 if (change_type ==
"insert") {
1097 }
else if (change_type ==
"update") {
1100 if (existing_clip) {
1105 final_cache->
Remove(old_starting_frame - 2, old_ending_frame + 2);
1111 }
else if (change_type ==
"delete") {
1114 if (existing_clip) {
1119 final_cache->
Remove(old_starting_frame - 2, old_ending_frame + 2);
1130 void Timeline::apply_json_to_effects(Json::Value change)
throw(
InvalidJSONKey) {
1133 string change_type = change[
"type"].asString();
1137 for (
int x = 0; x < change[
"key"].size(); x++) {
1139 Json::Value key_part = change[
"key"][x];
1141 if (key_part.isObject()) {
1143 if (!key_part[
"id"].isNull())
1146 string effect_id = key_part[
"id"].asString();
1149 list<EffectBase*>::iterator effect_itr;
1150 for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr)
1154 if (e->
Id() == effect_id) {
1155 existing_effect =
e;
1165 if (existing_effect || change_type ==
"insert")
1167 apply_json_to_effects(change, existing_effect);
1174 string change_type = change[
"type"].asString();
1177 if (!change[
"value"].isArray() && !change[
"value"][
"position"].isNull()) {
1178 long int new_starting_frame = change[
"value"][
"position"].asDouble() *
info.
fps.
ToDouble();
1179 long int new_ending_frame = (change[
"value"][
"position"].asDouble() + change[
"value"][
"end"].asDouble() - change[
"value"][
"start"].asDouble()) *
info.
fps.
ToDouble();
1180 final_cache->
Remove(new_starting_frame - 2, new_ending_frame + 2);
1184 if (change_type ==
"insert") {
1187 string effect_type = change[
"value"][
"type"].asString();
1201 }
else if (change_type ==
"update") {
1204 if (existing_effect) {
1209 final_cache->
Remove(old_starting_frame - 2, old_ending_frame + 2);
1215 }
else if (change_type ==
"delete") {
1218 if (existing_effect) {
1223 final_cache->
Remove(old_starting_frame - 2, old_ending_frame + 2);
1233 void Timeline::apply_json_to_timeline(Json::Value change)
throw(
InvalidJSONKey) {
1236 string change_type = change[
"type"].asString();
1237 string root_key = change[
"key"][(uint)0].asString();
1238 string sub_key =
"";
1239 if (change[
"key"].size() >= 2)
1240 sub_key = change[
"key"][(uint)1].asString();
1243 final_cache->
Clear();
1246 if (change_type ==
"insert" || change_type ==
"update") {
1250 if (root_key ==
"color")
1253 else if (root_key ==
"viewport_scale")
1256 else if (root_key ==
"viewport_x")
1259 else if (root_key ==
"viewport_y")
1262 else if (root_key ==
"duration") {
1267 else if (root_key ==
"width")
1270 else if (root_key ==
"height")
1273 else if (root_key ==
"fps" && sub_key ==
"" && change[
"value"].isObject()) {
1275 if (!change[
"value"][
"num"].isNull())
1276 info.
fps.
num = change[
"value"][
"num"].asInt();
1277 if (!change[
"value"][
"den"].isNull())
1278 info.
fps.
den = change[
"value"][
"den"].asInt();
1280 else if (root_key ==
"fps" && sub_key ==
"num")
1283 else if (root_key ==
"fps" && sub_key ==
"den")
1286 else if (root_key ==
"sample_rate")
1289 else if (root_key ==
"channels")
1292 else if (root_key ==
"channel_layout")
1299 throw InvalidJSONKey(
"JSON change key is invalid", change.toStyledString());
1302 }
else if (change[
"type"].asString() ==
"delete") {
1306 if (root_key ==
"color") {
1312 else if (root_key ==
"viewport_scale")
1314 else if (root_key ==
"viewport_x")
1316 else if (root_key ==
"viewport_y")
1320 throw InvalidJSONKey(
"JSON change key is invalid", change.toStyledString());
void SetJsonValue(Json::Value root)
Load Json::JsonValue into this object.
tr1::shared_ptr< Frame > GetFrame(long int requested_frame)
void Close()
Close the internal reader.
string Json()
Get and Set JSON methods.
Json::Value JsonValue()
Generate Json::JsonValue for this object.
int num
Numerator for the fraction.
Keyframe scale_y
Curve representing the vertical scaling in percent (0 to 1)
CriticalSection getFrameCriticalSection
Section lock for multiple threads.
This abstract class is the base class, used by all effects in libopenshot.
EffectBase * CreateEffect(string effect_type)
Align clip to the right of its parent (middle aligned)
virtual void Add(tr1::shared_ptr< Frame > frame)=0
Add a Frame to the cache.
Keyframe green
Curve representing the green value (0 - 255)
Keyframe viewport_scale
Curve representing the scale of the viewport (0 to 100)
float End()
Override End() method.
virtual tr1::shared_ptr< Frame > GetFrame(long int frame_number)=0
Get a frame from the cache.
tr1::shared_ptr< Frame > GetFrame(long int requested_frame)
Get an openshot::Frame object for a specific frame number of this timeline.
Align clip to the bottom right of its parent.
void SetCache(CacheBase *new_cache)
Get the cache object used by this reader.
Json::Value JsonValue()
Generate Json::JsonValue for this object.
ChannelLayout channel_layout
The channel layout (mono, stereo, 5 point surround, etc...)
GravityType gravity
The gravity of a clip determines where it snaps to it's parent.
Keyframe alpha
Curve representing the alpha value (0 - 255)
int width
The width of the video (in pixesl)
Keyframe volume
Curve representing the volume (0 to 1)
This class represents a single frame of video (i.e. image & audio data)
float ToFloat()
Return this fraction as a float (i.e. 1/2 = 0.5)
Keyframe red
Curve representing the red value (0 - 255)
float duration
Length of time (in seconds)
Json::Value JsonValue()
Generate Json::JsonValue for this object.
Scale the clip until both height and width fill the canvas (cropping the overlap) ...
float End()
Get end position (in seconds) of clip (trim end of video)
Keyframe viewport_y
Curve representing the y coordinate for the viewport.
Fraction Reciprocal()
Return the reciprocal as a Fraction.
This abstract class is the base class, used by all readers in libopenshot.
int Layer()
Get layer of clip on timeline (lower number is covered by higher numbers)
#define OPEN_MP_NUM_PROCESSORS
Exception when a reader is closed, and a frame is requested.
bool has_video
Determines if this file has a video stream.
void SetMaxBytesFromInfo(long int number_of_frames, int width, int height, int sample_rate, int channels)
Set maximum bytes to a different amount based on a ReaderInfo struct.
virtual tr1::shared_ptr< Frame > GetFrame(tr1::shared_ptr< Frame > frame, long int frame_number)=0
This method is required for all derived classes of EffectBase, and returns a modified openshot::Frame...
Color wave_color
Curve representing the color of the audio wave form.
Align clip to the top right of its parent.
virtual Json::Value JsonValue()=0
Generate Json::JsonValue for this object.
Align clip to the bottom left of its parent.
void SetJsonValue(Json::Value root)
Load Json::JsonValue into this object.
void SetJsonValue(Json::Value root)
Load Json::JsonValue into this object.
Exception for missing JSON Change key.
Keyframe location_x
Curve representing the relative X position in percent based on the gravity (-1 to 1) ...
float GetValue(long int index)
Get the value at a specific index.
Keyframe location_y
Curve representing the relative Y position in percent based on the gravity (-1 to 1) ...
bool has_audio
Determines if this file has an audio stream.
This class represents a clip (used to arrange readers on the timeline)
void ChangeMapping(Fraction target_fps, PulldownType pulldown, int target_sample_rate, int target_channels, ChannelLayout target_channel_layout)
Change frame rate or audio mapping details.
Keyframe blue
Curve representing the red value (0 - 255)
Keyframe shear_x
Curve representing X shear angle in degrees (-45.0=left, 45.0=right)
bool Waveform()
Waveform property.
void SetMaxSize(int width, int height)
Set Max Image Size (used for performance optimization)
ScaleType scale
The scale determines how a clip should be resized to fit it's parent.
int height
The height of the video (in pixels)
Align clip to the bottom center of its parent.
Exception for files that can not be found or opened.
string Id()
Get basic properties.
Keyframe channel_filter
Audio channel filter and mappings.
TransformHandleType handles
The transform handle determines if selection handles are added to clip (to display the clips edges) ...
Add selection handles to clip (useful for editors to display edges of clip)
float Position()
Get position on timeline (in seconds)
static CrashHandler * Instance()
void ApplyMapperToClips()
Apply the timeline's framerate and samplerate to all clips.
void Reader(ReaderBase *new_reader)
Set the current reader.
list< EffectBase * > Effects()
Return the list of effects on the timeline.
void AppendDebugMethod(string method_name, string arg1_name, float arg1_value, string arg2_name, float arg2_value, string arg3_name, float arg3_value, string arg4_name, float arg4_value, string arg5_name, float arg5_value, string arg6_name, float arg6_value)
Append debug information.
This class represents a fraction.
All cache managers in libopenshot are based on this CacheBase class.
Keyframe channel_mapping
A number representing an audio channel to output (only works when filtering a channel) ...
ChannelLayout
This enumeration determines the audio channel layout (such as stereo, mono, 5 point surround...
Align clip to the left of its parent (middle aligned)
void AddClip(Clip *clip)
Add an openshot::Clip to the timeline.
virtual Json::Value JsonValue()=0
Generate Json::JsonValue for this object.
virtual void SetJsonValue(Json::Value root)=0
Load Json::JsonValue into this object.
void Close()
Close the timeline reader (and any resources it was consuming)
Keyframe rotation
Curve representing the rotation (0 to 360)
virtual void SetJsonValue(Json::Value root)=0
Load Json::JsonValue into this object.
Scale the clip until both height and width fill the canvas (distort to fit)
vector< Point > Points
Vector of all Points.
ReaderInfo info
Information about the current media file.
Keyframe shear_y
Curve representing Y shear angle in degrees (-45.0=down, 45.0=up)
Fraction fps
Frames per second, as a fraction (i.e. 24/1 = 24 fps)
Fraction video_timebase
The video timebase determines how long each frame stays on the screen.
Exception for frames that are out of bounds.
This class creates a mapping between 2 different frame rates, applying a specific pull-down technique...
void Open()
Open the internal reader.
This class represents a color (used on the timeline and clips)
static ZmqLogger * Instance()
Create or get an instance of this logger singleton (invoke the class with this method) ...
int GetInt(long int index)
Get the rounded INT value at a specific index.
Align clip to the center of its parent (middle aligned)
void Open()
Open the reader (and start consuming resources)
void ApplyJsonDiff(string value)
Apply a special formatted JSON object, which represents a change to the timeline (add, update, delete) This is primarily designed to keep the timeline (and its child objects... such as clips and effects) in sync with another application... such as OpenShot Video Editor (http://www.openshot.org).
This namespace is the default namespace for all code in the openshot library.
Do not apply pull-down techniques, just repeat or skip entire frames.
virtual void Clear()=0
Clear the cache of all frames.
void RemoveClip(Clip *clip)
Remove an openshot::Clip from the timeline.
void RemoveEffect(EffectBase *effect)
Remove an effect from the timeline.
Exception for invalid JSON.
Keyframe alpha
Curve representing the alpha (1 to 0)
Keyframe viewport_x
Curve representing the x coordinate for the viewport.
void SetJsonValue(Json::Value root)
Load Json::JsonValue into this object.
Keyframe scale_x
Curve representing the horizontal scaling in percent (0 to 1)
string GetColorHex(long int frame_number)
Get the HEX value of a color at a specific frame.
Color color
Background color of timeline canvas.
virtual void Remove(long int frame_number)=0
Remove a specific frame.
Timeline(int width, int height, Fraction fps, int sample_rate, int channels, ChannelLayout channel_layout)
Default Constructor for the timeline (which sets the canvas width and height and FPS) ...
This class returns a listing of all effects supported by libopenshot.
Align clip to the top center of its parent.
void SetJson(string value)
Load JSON string into this object.
int den
Denominator for the fraction.
int channels
The number of audio channels used in the audio stream.
A Keyframe is a collection of Point instances, which is used to vary a number or property over time...
Scale the clip until either height or width fills the canvas (with no cropping)
long int video_length
The number of frames in the video stream.
void AddEffect(EffectBase *effect)
Add an effect to the timeline.
int GetSamplesPerFrame(Fraction fps, int sample_rate, int channels)
Calculate the # of samples per video frame (for the current frame number)
Json::Value JsonValue()
Generate Json::JsonValue for this object.
float Duration()
Get the length of this clip (in seconds)
This class is a memory-based cache manager for Frame objects.
float Start()
Get start position (in seconds) of clip (trim start of video)
double ToDouble()
Return this fraction as a double (i.e. 1/2 = 0.5)
int sample_rate
The number of audio samples per second (44100 is a common sample rate)
Exception when too many seek attempts happen.