28 #include "../include/Timeline.h" 34 is_open(false), auto_map_clips(true)
69 apply_mapper_to_clip(clip);
72 clips.push_back(clip);
82 effects.push_back(effect);
91 effects.remove(effect);
101 void Timeline::apply_mapper_to_clip(
Clip* clip)
105 if (clip->
Reader()->Name() ==
"FrameMapper")
121 clip->
Reader(clip_reader);
131 list<Clip*>::iterator clip_itr;
132 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
135 Clip *clip = (*clip_itr);
138 apply_mapper_to_clip(clip);
143 float Timeline::calculate_time(
long int number,
Fraction rate)
146 float raw_fps = rate.
ToFloat();
149 return float(number - 1) / raw_fps;
153 tr1::shared_ptr<Frame> Timeline::apply_effects(tr1::shared_ptr<Frame> frame,
long int timeline_frame_number,
int layer)
156 float requested_time = calculate_time(timeline_frame_number,
info.
fps);
159 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::apply_effects",
"requested_time", requested_time,
"frame->number", frame->number,
"timeline_frame_number", timeline_frame_number,
"layer", layer,
"", -1,
"", -1);
162 list<EffectBase*>::iterator effect_itr;
163 for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr)
169 float effect_duration = effect->
End() - effect->
Start();
170 bool does_effect_intersect = (effect->
Position() <= requested_time && effect->
Position() + effect_duration >= requested_time && effect->
Layer() == layer);
173 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::apply_effects (Does effect intersect)",
"effect->Position()", effect->
Position(),
"requested_time", requested_time,
"does_effect_intersect", does_effect_intersect,
"timeline_frame_number", timeline_frame_number,
"layer", layer,
"effect_duration", effect_duration);
176 if (does_effect_intersect)
179 float time_diff = (requested_time - effect->
Position()) + effect->
Start();
180 int effect_frame_number = round(time_diff *
info.
fps.
ToFloat()) + 1;
183 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::apply_effects (Process Effect)",
"time_diff", time_diff,
"effect_frame_number", effect_frame_number,
"effect_duration", effect_duration,
"does_effect_intersect", does_effect_intersect,
"", -1,
"", -1);
186 frame = effect->
GetFrame(frame, effect_frame_number);
196 tr1::shared_ptr<Frame> Timeline::GetOrCreateFrame(
Clip* clip,
long int number)
198 tr1::shared_ptr<Frame> new_frame;
205 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetOrCreateFrame (from reader)",
"number", number,
"samples_in_frame", samples_in_frame,
"", -1,
"", -1,
"", -1,
"", -1);
208 new_frame = tr1::shared_ptr<Frame>(clip->
GetFrame(number));
222 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetOrCreateFrame (create blank)",
"number", number,
"samples_in_frame", samples_in_frame,
"", -1,
"", -1,
"", -1,
"", -1);
232 void Timeline::add_layer(tr1::shared_ptr<Frame> new_frame,
Clip* source_clip,
long int clip_frame_number,
long int timeline_frame_number,
bool is_top_clip)
235 tr1::shared_ptr<Frame> source_frame = GetOrCreateFrame(source_clip, clip_frame_number);
242 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer",
"new_frame->number", new_frame->number,
"clip_frame_number", clip_frame_number,
"timeline_frame_number", timeline_frame_number,
"", -1,
"", -1,
"", -1);
248 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Generate Waveform Image)",
"source_frame->number", source_frame->number,
"source_clip->Waveform()", source_clip->
Waveform(),
"clip_frame_number", clip_frame_number,
"", -1,
"", -1,
"", -1);
257 tr1::shared_ptr<QImage> source_image = source_frame->GetWaveform(
info.
width,
info.
height, red, green, blue, alpha);
258 source_frame->AddImage(tr1::shared_ptr<QImage>(source_image));
264 source_frame = apply_effects(source_frame, timeline_frame_number, source_clip->
Layer());
267 tr1::shared_ptr<QImage> source_image;
270 if (source_clip->
Reader()->info.has_audio) {
273 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Copy Audio)",
"source_clip->Reader()->info.has_audio", source_clip->
Reader()->info.has_audio,
"source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(),
"info.channels",
info.
channels,
"clip_frame_number", clip_frame_number,
"timeline_frame_number", timeline_frame_number,
"", -1);
275 if (source_frame->GetAudioChannelsCount() ==
info.
channels)
276 for (
int channel = 0; channel < source_frame->GetAudioChannelsCount(); channel++)
278 float initial_volume = 1.0f;
279 float previous_volume = source_clip->
volume.
GetValue(clip_frame_number - 1);
285 if (channel_filter != -1 && channel_filter != channel)
289 if (channel_mapping == -1)
290 channel_mapping = channel;
293 if (isEqual(previous_volume, volume))
294 initial_volume = volume;
297 if (!isEqual(previous_volume, volume))
298 source_frame->ApplyGainRamp(channel_mapping, 0, source_frame->GetAudioSamplesCount(), previous_volume, volume);
304 if (new_frame->GetAudioSamplesCount() != source_frame->GetAudioSamplesCount())
310 new_frame->AddAudio(
false, channel_mapping, 0, source_frame->GetAudioSamples(channel), source_frame->GetAudioSamplesCount(), initial_volume);
315 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (No Audio Copied - Wrong # of Channels)",
"source_clip->Reader()->info.has_audio", source_clip->
Reader()->info.has_audio,
"source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(),
"info.channels",
info.
channels,
"clip_frame_number", clip_frame_number,
"timeline_frame_number", timeline_frame_number,
"", -1);
320 if (!source_clip->
Waveform() && !source_clip->
Reader()->info.has_video)
325 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Get Source Image)",
"source_frame->number", source_frame->number,
"source_clip->Waveform()", source_clip->
Waveform(),
"clip_frame_number", clip_frame_number,
"", -1,
"", -1,
"", -1);
328 source_image = source_frame->GetImage();
331 int source_width = source_image->width();
332 int source_height = source_image->height();
337 float alpha = source_clip->
alpha.
GetValue(clip_frame_number);
340 unsigned char *pixels = (
unsigned char *) source_image->bits();
343 for (
int pixel = 0, byte_index=0; pixel < source_image->width() * source_image->height(); pixel++, byte_index+=4)
346 int A = pixels[byte_index + 3];
349 pixels[byte_index + 3] *= alpha;
353 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Set Alpha & Opacity)",
"alpha", alpha,
"source_frame->number", source_frame->number,
"clip_frame_number", clip_frame_number,
"", -1,
"", -1,
"", -1);
357 switch (source_clip->
scale)
361 source_image = tr1::shared_ptr<QImage>(
new QImage(source_image->scaled(
info.
width,
info.
height, Qt::KeepAspectRatio, Qt::SmoothTransformation)));
362 source_width = source_image->width();
363 source_height = source_image->height();
366 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Scale: SCALE_FIT)",
"source_frame->number", source_frame->number,
"source_width", source_width,
"source_height", source_height,
"", -1,
"", -1,
"", -1);
371 source_image = tr1::shared_ptr<QImage>(
new QImage(source_image->scaled(
info.
width,
info.
height, Qt::IgnoreAspectRatio, Qt::SmoothTransformation)));
372 source_width = source_image->width();
373 source_height = source_image->height();
376 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Scale: SCALE_STRETCH)",
"source_frame->number", source_frame->number,
"source_width", source_width,
"source_height", source_height,
"", -1,
"", -1,
"", -1);
380 QSize width_size(
info.
width, round(
info.
width / (
float(source_width) /
float(source_height))));
381 QSize height_size(round(
info.
height / (
float(source_height) /
float(source_width))),
info.
height);
385 source_image = tr1::shared_ptr<QImage>(
new QImage(source_image->scaled(width_size.width(), width_size.height(), Qt::KeepAspectRatio, Qt::SmoothTransformation)));
387 source_image = tr1::shared_ptr<QImage>(
new QImage(source_image->scaled(height_size.width(), height_size.height(), Qt::KeepAspectRatio, Qt::SmoothTransformation)));
388 source_width = source_image->width();
389 source_height = source_image->height();
392 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Scale: SCALE_CROP)",
"source_frame->number", source_frame->number,
"source_width", source_width,
"source_height", source_height,
"", -1,
"", -1,
"", -1);
403 float scaled_source_width = source_width * sx;
404 float scaled_source_height = source_height * sy;
409 x = (
info.
width - scaled_source_width) / 2.0;
415 y = (
info.
height - scaled_source_height) / 2.0;
418 x = (
info.
width - scaled_source_width) / 2.0;
419 y = (
info.
height - scaled_source_height) / 2.0;
423 y = (
info.
height - scaled_source_height) / 2.0;
429 x = (
info.
width - scaled_source_width) / 2.0;
439 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Gravity)",
"source_frame->number", source_frame->number,
"source_clip->gravity", source_clip->
gravity,
"info.width",
info.
width,
"source_width", source_width,
"info.height",
info.
height,
"source_height", source_height);
450 bool transformed =
false;
451 QTransform transform;
452 if ((!isEqual(x, 0) || !isEqual(y, 0)) && (isEqual(r, 0) && isEqual(sx, 1) && isEqual(sy, 1) && !is_x_animated && !is_y_animated))
455 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Transform: SIMPLE)",
"source_frame->number", source_frame->number,
"x", x,
"y", y,
"r", r,
"sx", sx,
"sy", sy);
461 transform.translate(x, y);
463 }
else if (!isEqual(r, 0) || !isEqual(x, 0) || !isEqual(y, 0) || !isEqual(sx, 1) || !isEqual(sy, 1))
466 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Transform: COMPLEX)",
"source_frame->number", source_frame->number,
"x", x,
"y", y,
"r", r,
"sx", sx,
"sy", sy);
472 if (!isEqual(r, 0)) {
474 float origin_x = x + (source_width / 2.0);
475 float origin_y = y + (source_height / 2.0);
476 transform.translate(origin_x, origin_y);
478 transform.translate(-origin_x,-origin_y);
482 if (!isEqual(x, 0) || !isEqual(y, 0)) {
484 transform.translate(x, y);
487 if (!isEqual(sx, 0) || !isEqual(sy, 0)) {
489 transform.scale(sx, sy);
493 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Transform: COMPLEX: Completed ScaleRotateTranslateDistortion)",
"source_frame->number", source_frame->number,
"x", x,
"y", y,
"r", r,
"sx", sx,
"sy", sy);
497 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Transform: Composite Image Layer: Prepare)",
"source_frame->number", source_frame->number,
"offset_x", offset_x,
"offset_y", offset_y,
"new_frame->GetImage()->width()", new_frame->GetImage()->width(),
"transformed", transformed,
"", -1);
500 tr1::shared_ptr<QImage> new_image = new_frame->GetImage();
503 QPainter painter(new_image.get());
504 painter.setRenderHints(QPainter::Antialiasing | QPainter::SmoothPixmapTransform | QPainter::TextAntialiasing,
true);
508 painter.setTransform(transform);
511 painter.setCompositionMode(QPainter::CompositionMode_SourceOver);
512 painter.drawImage(0, 0, *source_image);
516 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Transform: Composite Image Layer: Completed)",
"source_frame->number", source_frame->number,
"offset_x", offset_x,
"offset_y", offset_y,
"new_frame->GetImage()->width()", new_frame->GetImage()->width(),
"transformed", transformed,
"", -1);
520 void Timeline::update_open_clips(
Clip *clip,
bool does_clip_intersect)
522 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::update_open_clips (before)",
"does_clip_intersect", does_clip_intersect,
"closing_clips.size()", closing_clips.size(),
"open_clips.size()", open_clips.size(),
"", -1,
"", -1,
"", -1);
525 bool clip_found = open_clips.count(clip);
527 if (clip_found && !does_clip_intersect)
530 open_clips.erase(clip);
535 else if (!clip_found && does_clip_intersect)
538 open_clips[clip] = clip;
545 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::update_open_clips (after)",
"does_clip_intersect", does_clip_intersect,
"clip_found", clip_found,
"closing_clips.size()", closing_clips.size(),
"open_clips.size()", open_clips.size(),
"", -1,
"", -1);
549 void Timeline::sort_clips()
552 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::SortClips",
"clips.size()", clips.size(),
"", -1,
"", -1,
"", -1,
"", -1,
"", -1);
559 void Timeline::sort_effects()
568 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::Close",
"", -1,
"", -1,
"", -1,
"", -1,
"", -1,
"", -1);
571 list<Clip*>::iterator clip_itr;
572 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
575 Clip *clip = (*clip_itr);
578 update_open_clips(clip,
false);
595 bool Timeline::isEqual(
double a,
double b)
597 return fabs(a - b) < 0.000001;
605 throw ReaderClosed(
"The Timeline is closed. Call Open() before calling this method.",
"");
608 if (requested_frame < 1)
612 tr1::shared_ptr<Frame> frame = final_cache.
GetFrame(requested_frame);
615 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Cached frame found)",
"requested_frame", requested_frame,
"", -1,
"", -1,
"", -1,
"", -1,
"", -1);
626 frame = final_cache.
GetFrame(requested_frame);
629 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Cached frame found on 2nd look)",
"requested_frame", requested_frame,
"", -1,
"", -1,
"", -1,
"", -1,
"", -1);
640 vector<Clip*> nearby_clips = find_intersecting_clips(requested_frame, minimum_frames,
true);
644 omp_set_nested(
true);
647 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame",
"requested_frame", requested_frame,
"minimum_frames", minimum_frames,
"OPEN_MP_NUM_PROCESSORS",
OPEN_MP_NUM_PROCESSORS,
"", -1,
"", -1,
"", -1);
651 for (
long int frame_number = requested_frame; frame_number < requested_frame + minimum_frames; frame_number++)
654 float requested_time = calculate_time(frame_number,
info.
fps);
656 for (
int clip_index = 0; clip_index < nearby_clips.size(); clip_index++)
659 Clip *clip = nearby_clips[clip_index];
660 bool does_clip_intersect = (clip->
Position() <= requested_time && clip->
Position() + clip->
Duration() >= requested_time);
661 if (does_clip_intersect)
664 float time_diff = (requested_time - clip->
Position()) + clip->
Start();
665 int clip_frame_number = round(time_diff *
info.
fps.
ToFloat()) + 1;
675 #pragma omp for ordered firstprivate(nearby_clips, requested_frame, minimum_frames) 676 for (
long int frame_number = requested_frame; frame_number < requested_frame + minimum_frames; frame_number++)
679 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (processing frame)",
"frame_number", frame_number,
"omp_get_thread_num()", omp_get_thread_num(),
"", -1,
"", -1,
"", -1,
"", -1);
690 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Adding solid color)",
"frame_number", frame_number,
"info.width",
info.
width,
"info.height",
info.
height,
"", -1,
"", -1,
"", -1);
698 float requested_time = calculate_time(frame_number,
info.
fps);
701 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Loop through clips)",
"frame_number", frame_number,
"requested_time", requested_time,
"clips.size()", clips.size(),
"nearby_clips.size()", nearby_clips.size(),
"", -1,
"", -1);
704 for (
int clip_index = 0; clip_index < nearby_clips.size(); clip_index++)
707 Clip *clip = nearby_clips[clip_index];
710 bool does_clip_intersect = (clip->
Position() <= requested_time && clip->
Position() + clip->
Duration() >= requested_time);
713 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Does clip intersect)",
"frame_number", frame_number,
"requested_time", requested_time,
"clip->Position()", clip->
Position(),
"clip->Duration()", clip->
Duration(),
"does_clip_intersect", does_clip_intersect,
"", -1);
716 if (does_clip_intersect)
719 bool is_top_clip =
true;
720 for (
int top_clip_index = 0; top_clip_index < nearby_clips.size(); top_clip_index++)
722 Clip *nearby_clip = nearby_clips[top_clip_index];
723 if (clip->
Id() != nearby_clip->
Id() && clip->
Layer() == nearby_clip->
Layer() &&
732 float time_diff = (requested_time - clip->
Position()) + clip->
Start();
733 int clip_frame_number = round(time_diff *
info.
fps.
ToFloat()) + 1;
736 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Calculate clip's frame #)",
"time_diff", time_diff,
"requested_time", requested_time,
"clip->Position()", clip->
Position(),
"clip->Start()", clip->
Start(),
"info.fps.ToFloat()",
info.
fps.
ToFloat(),
"clip_frame_number", clip_frame_number);
739 add_layer(new_frame, clip, clip_frame_number, frame_number, is_top_clip);
743 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (clip does not intersect)",
"frame_number", frame_number,
"requested_time", requested_time,
"does_clip_intersect", does_clip_intersect,
"", -1,
"", -1,
"", -1);
748 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Add frame to cache)",
"frame_number", frame_number,
"info.width",
info.
width,
"info.height",
info.
height,
"", -1,
"", -1,
"", -1);
751 final_cache.
Add(frame_number, new_frame);
757 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (end parallel region)",
"requested_frame", requested_frame,
"omp_get_thread_num()", omp_get_thread_num(),
"", -1,
"", -1,
"", -1,
"", -1);
760 return final_cache.
GetFrame(requested_frame);
766 vector<Clip*> Timeline::find_intersecting_clips(
long int requested_frame,
int number_of_frames,
bool include)
769 vector<Clip*> matching_clips;
772 float min_requested_time = calculate_time(requested_frame,
info.
fps);
773 float max_requested_time = calculate_time(requested_frame + (number_of_frames - 1),
info.
fps);
779 list<Clip*>::iterator clip_itr;
780 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
783 Clip *clip = (*clip_itr);
786 float clip_duration = clip->
End() - clip->
Start();
787 bool does_clip_intersect = (clip->
Position() <= min_requested_time && clip->
Position() + clip_duration >= min_requested_time) ||
788 (clip->
Position() > min_requested_time && clip->
Position() <= max_requested_time);
791 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::find_intersecting_clips (Is clip near or intersecting)",
"requested_frame", requested_frame,
"min_requested_time", min_requested_time,
"max_requested_time", max_requested_time,
"clip->Position()", clip->
Position(),
"clip_duration", clip_duration,
"does_clip_intersect", does_clip_intersect);
794 #pragma omp critical (reader_lock) 795 update_open_clips(clip, does_clip_intersect);
799 if (does_clip_intersect && include)
801 matching_clips.push_back(clip);
803 else if (!does_clip_intersect && !include)
805 matching_clips.push_back(clip);
810 return matching_clips;
825 root[
"type"] =
"Timeline";
832 root[
"clips"] = Json::Value(Json::arrayValue);
835 list<Clip*>::iterator clip_itr;
836 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
839 Clip *existing_clip = (*clip_itr);
840 root[
"clips"].append(existing_clip->
JsonValue());
844 root[
"effects"] = Json::Value(Json::arrayValue);
847 list<EffectBase*>::iterator effect_itr;
848 for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr)
852 root[
"effects"].append(existing_effect->
JsonValue());
865 bool success = reader.parse( value, root );
868 throw InvalidJSON(
"JSON could not be parsed (or is invalid)",
"");
878 throw InvalidJSON(
"JSON is invalid (missing keys or invalid data types)",
"");
891 if (!root[
"clips"].isNull()) {
896 for (
int x = 0; x < root[
"clips"].size(); x++) {
898 Json::Value existing_clip = root[
"clips"][x];
911 if (!root[
"effects"].isNull()) {
916 for (
int x = 0; x < root[
"effects"].size(); x++) {
918 Json::Value existing_effect = root[
"effects"][x];
923 if (!existing_effect[
"type"].isNull()) {
936 if (!root[
"duration"].isNull()) {
952 bool success = reader.parse( value, root );
953 if (!success || !root.isArray())
955 throw InvalidJSON(
"JSON could not be parsed (or is invalid).",
"");
960 for (
int x = 0; x < root.size(); x++) {
962 Json::Value change = root[x];
963 string root_key = change[
"key"][(uint)0].asString();
966 if (root_key ==
"clips")
968 apply_json_to_clips(change);
970 else if (root_key ==
"effects")
972 apply_json_to_effects(change);
976 apply_json_to_timeline(change);
983 throw InvalidJSON(
"JSON is invalid (missing keys or invalid data types)",
"");
991 void Timeline::apply_json_to_clips(Json::Value change)
throw(
InvalidJSONKey) {
994 string change_type = change[
"type"].asString();
996 Clip *existing_clip = NULL;
999 for (
int x = 0; x < change[
"key"].size(); x++) {
1001 Json::Value key_part = change[
"key"][x];
1003 if (key_part.isObject()) {
1005 if (!key_part[
"id"].isNull()) {
1007 clip_id = key_part[
"id"].asString();
1010 list<Clip*>::iterator clip_itr;
1011 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
1014 Clip *c = (*clip_itr);
1015 if (c->
Id() == clip_id) {
1027 if (existing_clip && change[
"key"].size() == 4 && change[
"key"][2] ==
"effects")
1030 Json::Value key_part = change[
"key"][3];
1032 if (key_part.isObject()) {
1034 if (!key_part[
"id"].isNull())
1037 string effect_id = key_part[
"id"].asString();
1040 list<EffectBase*> effect_list = existing_clip->
Effects();
1041 list<EffectBase*>::iterator effect_itr;
1042 for (effect_itr=effect_list.begin(); effect_itr != effect_list.end(); ++effect_itr)
1046 if (e->
Id() == effect_id) {
1048 apply_json_to_effects(change, e);
1057 if (change_type ==
"insert") {
1064 }
else if (change_type ==
"update") {
1070 }
else if (change_type ==
"delete") {
1081 void Timeline::apply_json_to_effects(Json::Value change)
throw(
InvalidJSONKey) {
1084 string change_type = change[
"type"].asString();
1088 for (
int x = 0; x < change[
"key"].size(); x++) {
1090 Json::Value key_part = change[
"key"][x];
1092 if (key_part.isObject()) {
1094 if (!key_part[
"id"].isNull())
1097 string effect_id = key_part[
"id"].asString();
1100 list<EffectBase*>::iterator effect_itr;
1101 for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr)
1105 if (e->
Id() == effect_id) {
1106 existing_effect =
e;
1116 if (existing_effect || change_type ==
"insert")
1118 apply_json_to_effects(change, existing_effect);
1125 string change_type = change[
"type"].asString();
1128 if (change_type ==
"insert") {
1131 string effect_type = change[
"value"][
"type"].asString();
1145 }
else if (change_type ==
"update") {
1148 if (existing_effect)
1151 }
else if (change_type ==
"delete") {
1154 if (existing_effect)
1161 void Timeline::apply_json_to_timeline(Json::Value change)
throw(
InvalidJSONKey) {
1164 string change_type = change[
"type"].asString();
1165 string root_key = change[
"key"][(uint)0].asString();
1166 string sub_key =
"";
1167 if (change[
"key"].size() >= 2)
1168 sub_key = change[
"key"][(uint)1].asString();
1171 if (change_type ==
"insert" || change_type ==
"update") {
1175 if (root_key ==
"color")
1178 else if (root_key ==
"viewport_scale")
1181 else if (root_key ==
"viewport_x")
1184 else if (root_key ==
"viewport_y")
1187 else if (root_key ==
"duration") {
1192 else if (root_key ==
"width")
1195 else if (root_key ==
"height")
1198 else if (root_key ==
"fps" && sub_key ==
"" && change[
"value"].isObject()) {
1200 if (!change[
"value"][
"num"].isNull())
1201 info.
fps.
num = change[
"value"][
"num"].asInt();
1202 if (!change[
"value"][
"den"].isNull())
1203 info.
fps.
den = change[
"value"][
"den"].asInt();
1205 else if (root_key ==
"fps" && sub_key ==
"num")
1208 else if (root_key ==
"fps" && sub_key ==
"den")
1211 else if (root_key ==
"sample_rate")
1214 else if (root_key ==
"channels")
1217 else if (root_key ==
"channel_layout")
1224 throw InvalidJSONKey(
"JSON change key is invalid", change.toStyledString());
1227 }
else if (change[
"type"].asString() ==
"delete") {
1231 if (root_key ==
"color") {
1237 else if (root_key ==
"viewport_scale")
1239 else if (root_key ==
"viewport_x")
1241 else if (root_key ==
"viewport_y")
1245 throw InvalidJSONKey(
"JSON change key is invalid", change.toStyledString());
void SetJsonValue(Json::Value root)
Load Json::JsonValue into this object.
tr1::shared_ptr< Frame > GetFrame(long int requested_frame)
void Close()
Close the internal reader.
string Json()
Get and Set JSON methods.
Json::Value JsonValue()
Generate Json::JsonValue for this object.
int num
Numerator for the fraction.
Keyframe scale_y
Curve representing the vertical scaling in percent (0 to 1)
CriticalSection getFrameCriticalSection
Section lock for multiple threads.
This abstract class is the base class, used by all effects in libopenshot.
EffectBase * CreateEffect(string effect_type)
Align clip to the right of its parent (middle aligned)
void Add(long int frame_number, tr1::shared_ptr< Frame > frame)
Add a Frame to the cache.
Keyframe green
Curve representing the green value (0 - 255)
Keyframe viewport_scale
Curve representing the scale of the viewport (0 to 100)
float End()
Override End() method.
tr1::shared_ptr< Frame > GetFrame(long int requested_frame)
Get an openshot::Frame object for a specific frame number of this timeline.
Align clip to the bottom right of its parent.
Json::Value JsonValue()
Generate Json::JsonValue for this object.
ChannelLayout channel_layout
The channel layout (mono, stereo, 5 point surround, etc...)
GravityType gravity
The gravity of a clip determines where it snaps to it's parent.
Keyframe alpha
Curve representing the alpha value (0 - 255)
int width
The width of the video (in pixesl)
Keyframe volume
Curve representing the volume (0 to 1)
This class represents a single frame of video (i.e. image & audio data)
float ToFloat()
Return this fraction as a float (i.e. 1/2 = 0.5)
Keyframe red
Curve representing the red value (0 - 255)
float duration
Length of time (in seconds)
Json::Value JsonValue()
Generate Json::JsonValue for this object.
Scale the clip until both height and width fill the canvas (cropping the overlap) ...
float End()
Get end position (in seconds) of clip (trim end of video)
Keyframe viewport_y
Curve representing the y coordinate for the viewport.
Fraction Reciprocal()
Return the reciprocal as a Fraction.
This abstract class is the base class, used by all readers in libopenshot.
int Layer()
Get layer of clip on timeline (lower number is covered by higher numbers)
#define OPEN_MP_NUM_PROCESSORS
Exception when a reader is closed, and a frame is requested.
bool has_video
Determines if this file has a video stream.
virtual tr1::shared_ptr< Frame > GetFrame(tr1::shared_ptr< Frame > frame, long int frame_number)=0
This method is required for all derived classes of EffectBase, and returns a modified openshot::Frame...
Color wave_color
Curve representing the color of the audio wave form.
Align clip to the top right of its parent.
virtual Json::Value JsonValue()=0
Generate Json::JsonValue for this object.
Align clip to the bottom left of its parent.
void SetJsonValue(Json::Value root)
Load Json::JsonValue into this object.
void SetJsonValue(Json::Value root)
Load Json::JsonValue into this object.
Exception for missing JSON Change key.
void SetMaxBytesFromInfo(long int number_of_frames, int width, int height, int sample_rate, int channels)
Set maximum bytes to a different amount based on a ReaderInfo struct.
Keyframe location_x
Curve representing the relative X position in percent based on the gravity (-1 to 1) ...
float GetValue(long int index)
Get the value at a specific index.
Keyframe location_y
Curve representing the relative Y position in percent based on the gravity (-1 to 1) ...
bool has_audio
Determines if this file has an audio stream.
This class represents a clip (used to arrange readers on the timeline)
void ChangeMapping(Fraction target_fps, PulldownType pulldown, int target_sample_rate, int target_channels, ChannelLayout target_channel_layout)
Change frame rate or audio mapping details.
Keyframe blue
Curve representing the red value (0 - 255)
bool Waveform()
Waveform property.
ScaleType scale
The scale determines how a clip should be resized to fit it's parent.
int height
The height of the video (in pixels)
Align clip to the bottom center of its parent.
Exception for files that can not be found or opened.
string Id()
Get basic properties.
Keyframe channel_filter
Audio channel filter and mappings.
float Position()
Get position on timeline (in seconds)
void ApplyMapperToClips()
Apply the timeline's framerate and samplerate to all clips.
void Reader(ReaderBase *new_reader)
Set the current reader.
list< EffectBase * > Effects()
Return the list of effects on the timeline.
void AppendDebugMethod(string method_name, string arg1_name, float arg1_value, string arg2_name, float arg2_value, string arg3_name, float arg3_value, string arg4_name, float arg4_value, string arg5_name, float arg5_value, string arg6_name, float arg6_value)
Append debug information.
This class represents a fraction.
Keyframe channel_mapping
A number representing an audio channel to output (only works when filtering a channel) ...
ChannelLayout
This enumeration determines the audio channel layout (such as stereo, mono, 5 point surround...
Align clip to the left of its parent (middle aligned)
void AddClip(Clip *clip)
Add an openshot::Clip to the timeline.
virtual Json::Value JsonValue()=0
Generate Json::JsonValue for this object.
virtual void SetJsonValue(Json::Value root)=0
Load Json::JsonValue into this object.
void Close()
Close the timeline reader (and any resources it was consuming)
Keyframe rotation
Curve representing the rotation (0 to 360)
virtual void SetJsonValue(Json::Value root)=0
Load Json::JsonValue into this object.
Scale the clip until both height and width fill the canvas (distort to fit)
vector< Point > Points
Vector of all Points.
ReaderInfo info
Information about the current media file.
Fraction fps
Frames per second, as a fraction (i.e. 24/1 = 24 fps)
Fraction video_timebase
The video timebase determines how long each frame stays on the screen.
Exception for frames that are out of bounds.
This class creates a mapping between 2 different frame rates, applying a specific pull-down technique...
void Open()
Open the internal reader.
This class represents a color (used on the timeline and clips)
static ZmqLogger * Instance()
Create or get an instance of this logger singleton (invoke the class with this method) ...
int GetInt(long int index)
Get the rounded INT value at a specific index.
Align clip to the center of its parent (middle aligned)
void Open()
Open the reader (and start consuming resources)
void ApplyJsonDiff(string value)
Apply a special formatted JSON object, which represents a change to the timeline (add, update, delete) This is primarily designed to keep the timeline (and its child objects... such as clips and effects) in sync with another application... such as OpenShot Video Editor (http://www.openshot.org).
tr1::shared_ptr< Frame > GetFrame(long int frame_number)
Get a frame from the cache.
void Clear()
Clear the cache of all frames.
This namespace is the default namespace for all code in the openshot library.
Do not apply pull-down techniques, just repeat or skip entire frames.
void RemoveClip(Clip *clip)
Remove an openshot::Clip from the timeline.
void RemoveEffect(EffectBase *effect)
Remove an effect from the timeline.
Exception for invalid JSON.
Keyframe alpha
Curve representing the alpha (1 to 0)
Keyframe viewport_x
Curve representing the x coordinate for the viewport.
void SetJsonValue(Json::Value root)
Load Json::JsonValue into this object.
Keyframe scale_x
Curve representing the horizontal scaling in percent (0 to 1)
string GetColorHex(long int frame_number)
Get the HEX value of a color at a specific frame.
Color color
Background color of timeline canvas.
Timeline(int width, int height, Fraction fps, int sample_rate, int channels, ChannelLayout channel_layout)
Default Constructor for the timeline (which sets the canvas width and height and FPS) ...
This class returns a listing of all effects supported by libopenshot.
Align clip to the top center of its parent.
void SetJson(string value)
Load JSON string into this object.
int den
Denominator for the fraction.
int channels
The number of audio channels used in the audio stream.
A Keyframe is a collection of Point instances, which is used to vary a number or property over time...
Scale the clip until either height or width fills the canvas (with no cropping)
long int video_length
The number of frames in the video stream.
void AddEffect(EffectBase *effect)
Add an effect to the timeline.
int GetSamplesPerFrame(Fraction fps, int sample_rate, int channels)
Calculate the # of samples per video frame (for the current frame number)
Json::Value JsonValue()
Generate Json::JsonValue for this object.
float Duration()
Get the length of this clip (in seconds)
float Start()
Get start position (in seconds) of clip (trim start of video)
int sample_rate
The number of audio samples per second (44100 is a common sample rate)
Exception when too many seek attempts happen.