28 #include "../include/Timeline.h" 34 is_open(false), auto_map_clips(true)
76 apply_mapper_to_clip(clip);
79 clips.push_back(clip);
89 effects.push_back(effect);
98 effects.remove(effect);
108 void Timeline::apply_mapper_to_clip(
Clip* clip)
115 if (clip->
Reader()->Name() ==
"FrameMapper")
131 clip->
Reader(clip_reader);
141 list<Clip*>::iterator clip_itr;
142 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
145 Clip *clip = (*clip_itr);
148 apply_mapper_to_clip(clip);
153 double Timeline::calculate_time(int64_t number,
Fraction rate)
156 double raw_fps = rate.
ToFloat();
159 return double(number - 1) / raw_fps;
163 std::shared_ptr<Frame> Timeline::apply_effects(std::shared_ptr<Frame> frame, int64_t timeline_frame_number,
int layer)
166 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::apply_effects",
"frame->number", frame->number,
"timeline_frame_number", timeline_frame_number,
"layer", layer,
"", -1,
"", -1,
"", -1);
169 list<EffectBase*>::iterator effect_itr;
170 for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr)
179 bool does_effect_intersect = (effect_start_position <= timeline_frame_number && effect_end_position >= timeline_frame_number && effect->
Layer() == layer);
182 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::apply_effects (Does effect intersect)",
"effect->Position()", effect->
Position(),
"does_effect_intersect", does_effect_intersect,
"timeline_frame_number", timeline_frame_number,
"layer", layer,
"", -1,
"", -1);
185 if (does_effect_intersect)
189 long effect_frame_number = timeline_frame_number - effect_start_position + effect_start_frame;
192 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::apply_effects (Process Effect)",
"effect_frame_number", effect_frame_number,
"does_effect_intersect", does_effect_intersect,
"", -1,
"", -1,
"", -1,
"", -1);
195 frame = effect->
GetFrame(frame, effect_frame_number);
205 std::shared_ptr<Frame> Timeline::GetOrCreateFrame(
Clip* clip, int64_t number)
207 std::shared_ptr<Frame> new_frame;
214 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetOrCreateFrame (from reader)",
"number", number,
"samples_in_frame", samples_in_frame,
"", -1,
"", -1,
"", -1,
"", -1);
220 #pragma omp critical (T_GetOtCreateFrame) 221 new_frame = std::shared_ptr<Frame>(clip->
GetFrame(number));
235 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetOrCreateFrame (create blank)",
"number", number,
"samples_in_frame", samples_in_frame,
"", -1,
"", -1,
"", -1,
"", -1);
239 #pragma omp critical (T_GetOtCreateFrame) 248 void Timeline::add_layer(std::shared_ptr<Frame> new_frame,
Clip* source_clip, int64_t clip_frame_number, int64_t timeline_frame_number,
bool is_top_clip,
float max_volume)
251 std::shared_ptr<Frame> source_frame;
252 #pragma omp critical (T_addLayer) 253 source_frame = GetOrCreateFrame(source_clip, clip_frame_number);
260 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer",
"new_frame->number", new_frame->number,
"clip_frame_number", clip_frame_number,
"timeline_frame_number", timeline_frame_number,
"", -1,
"", -1,
"", -1);
266 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Generate Waveform Image)",
"source_frame->number", source_frame->number,
"source_clip->Waveform()", source_clip->
Waveform(),
"clip_frame_number", clip_frame_number,
"", -1,
"", -1,
"", -1);
275 std::shared_ptr<QImage> source_image;
276 #pragma omp critical (T_addLayer) 278 source_frame->AddImage(std::shared_ptr<QImage>(source_image));
283 if (is_top_clip && source_frame)
284 #pragma omp critical (T_addLayer) 285 source_frame = apply_effects(source_frame, timeline_frame_number, source_clip->
Layer());
288 std::shared_ptr<QImage> source_image;
291 if (source_clip->
Reader()->info.has_audio) {
293 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Copy Audio)",
"source_clip->Reader()->info.has_audio", source_clip->
Reader()->info.has_audio,
"source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(),
"info.channels",
info.
channels,
"clip_frame_number", clip_frame_number,
"timeline_frame_number", timeline_frame_number,
"", -1);
296 for (
int channel = 0; channel < source_frame->GetAudioChannelsCount(); channel++)
299 float previous_volume = source_clip->
volume.
GetValue(clip_frame_number - 1);
307 previous_volume = previous_volume / max_volume;
308 volume = volume / max_volume;
312 previous_volume = previous_volume * 0.77;
313 volume = volume * 0.77;
317 if (channel_filter != -1 && channel_filter != channel)
321 if (previous_volume == 0.0 && volume == 0.0)
325 if (channel_mapping == -1)
326 channel_mapping = channel;
329 if (!isEqual(previous_volume, 1.0) || !isEqual(volume, 1.0))
330 source_frame->ApplyGainRamp(channel_mapping, 0, source_frame->GetAudioSamplesCount(), previous_volume, volume);
336 if (new_frame->GetAudioSamplesCount() != source_frame->GetAudioSamplesCount())
338 #pragma omp critical (T_addLayer)
343 #pragma omp critical (T_addLayer) 344 new_frame->AddAudio(
false, channel_mapping, 0, source_frame->GetAudioSamples(channel), source_frame->GetAudioSamplesCount(), 1.0);
349 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (No Audio Copied - Wrong # of Channels)",
"source_clip->Reader()->info.has_audio", source_clip->
Reader()->info.has_audio,
"source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(),
"info.channels",
info.
channels,
"clip_frame_number", clip_frame_number,
"timeline_frame_number", timeline_frame_number,
"", -1);
354 if (!source_clip->
Waveform() && !source_clip->
Reader()->info.has_video)
359 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Get Source Image)",
"source_frame->number", source_frame->number,
"source_clip->Waveform()", source_clip->
Waveform(),
"clip_frame_number", clip_frame_number,
"", -1,
"", -1,
"", -1);
362 source_image = source_frame->GetImage();
367 float alpha = source_clip->
alpha.
GetValue(clip_frame_number);
370 unsigned char *pixels = (
unsigned char *) source_image->bits();
373 for (
int pixel = 0, byte_index=0; pixel < source_image->width() * source_image->height(); pixel++, byte_index+=4)
376 int A = pixels[byte_index + 3];
379 pixels[byte_index + 3] *= alpha;
383 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Set Alpha & Opacity)",
"alpha", alpha,
"source_frame->number", source_frame->number,
"clip_frame_number", clip_frame_number,
"", -1,
"", -1,
"", -1);
387 QSize source_size = source_image->size();
388 switch (source_clip->
scale)
395 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Scale: SCALE_FIT)",
"source_frame->number", source_frame->number,
"source_width", source_size.width(),
"source_height", source_size.height(),
"", -1,
"", -1,
"", -1);
403 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Scale: SCALE_STRETCH)",
"source_frame->number", source_frame->number,
"source_width", source_size.width(),
"source_height", source_size.height(),
"", -1,
"", -1,
"", -1);
407 QSize width_size(
max_width, round(
max_width / (
float(source_size.width()) /
float(source_size.height()))));
408 QSize height_size(round(
max_height / (
float(source_size.height()) /
float(source_size.width()))),
max_height);
412 source_size.scale(width_size.width(), width_size.height(), Qt::KeepAspectRatio);
414 source_size.scale(height_size.width(), height_size.height(), Qt::KeepAspectRatio);
417 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Scale: SCALE_CROP)",
"source_frame->number", source_frame->number,
"source_width", source_size.width(),
"source_height", source_size.height(),
"", -1,
"", -1,
"", -1);
428 float scaled_source_width = source_size.width() * sx;
429 float scaled_source_height = source_size.height() * sy;
434 x = (
max_width - scaled_source_width) / 2.0;
440 y = (
max_height - scaled_source_height) / 2.0;
443 x = (
max_width - scaled_source_width) / 2.0;
444 y = (
max_height - scaled_source_height) / 2.0;
448 y = (
max_height - scaled_source_height) / 2.0;
454 x = (
max_width - scaled_source_width) / 2.0;
464 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Gravity)",
"source_frame->number", source_frame->number,
"source_clip->gravity", source_clip->
gravity,
"info.width",
info.
width,
"scaled_source_width", scaled_source_width,
"info.height",
info.
height,
"scaled_source_height", scaled_source_height);
473 bool transformed =
false;
474 QTransform transform;
477 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Build QTransform - if needed)",
"source_frame->number", source_frame->number,
"x", x,
"y", y,
"r", r,
"sx", sx,
"sy", sy);
479 if (!isEqual(r, 0)) {
481 float origin_x = x + (scaled_source_width / 2.0);
482 float origin_y = y + (scaled_source_height / 2.0);
483 transform.translate(origin_x, origin_y);
485 transform.translate(-origin_x,-origin_y);
489 if (!isEqual(x, 0) || !isEqual(y, 0)) {
491 transform.translate(x, y);
496 float source_width_scale = (float(source_size.width()) /
float(source_image->width())) * sx;
497 float source_height_scale = (float(source_size.height()) /
float(source_image->height())) * sy;
499 if (!isEqual(source_width_scale, 1.0) || !isEqual(source_height_scale, 1.0)) {
500 transform.scale(source_width_scale, source_height_scale);
504 if (!isEqual(shear_x, 0) || !isEqual(shear_y, 0)) {
506 transform.shear(shear_x, shear_y);
511 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Transform: Composite Image Layer: Prepare)",
"source_frame->number", source_frame->number,
"new_frame->GetImage()->width()", new_frame->GetImage()->width(),
"transformed", transformed,
"", -1,
"", -1,
"", -1);
514 std::shared_ptr<QImage> new_image;
515 #pragma omp critical (T_addLayer) 516 new_image = new_frame->GetImage();
519 QPainter painter(new_image.get());
520 painter.setRenderHints(QPainter::Antialiasing | QPainter::SmoothPixmapTransform | QPainter::TextAntialiasing,
true);
524 painter.setTransform(transform);
527 painter.setCompositionMode(QPainter::CompositionMode_SourceOver);
528 painter.drawImage(0, 0, *source_image);
532 stringstream frame_number_str;
536 frame_number_str << clip_frame_number;
540 frame_number_str << timeline_frame_number;
544 frame_number_str << timeline_frame_number <<
" (" << clip_frame_number <<
")";
549 painter.setPen(QColor(
"#ffffff"));
550 painter.drawText(20, 20, QString(frame_number_str.str().c_str()));
556 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Transform: Composite Image Layer: Completed)",
"source_frame->number", source_frame->number,
"new_frame->GetImage()->width()", new_frame->GetImage()->width(),
"transformed", transformed,
"", -1,
"", -1,
"", -1);
560 void Timeline::update_open_clips(
Clip *clip,
bool does_clip_intersect)
562 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::update_open_clips (before)",
"does_clip_intersect", does_clip_intersect,
"closing_clips.size()", closing_clips.size(),
"open_clips.size()", open_clips.size(),
"", -1,
"", -1,
"", -1);
565 bool clip_found = open_clips.count(clip);
567 if (clip_found && !does_clip_intersect)
570 open_clips.erase(clip);
575 else if (!clip_found && does_clip_intersect)
578 open_clips[clip] = clip;
590 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::update_open_clips (after)",
"does_clip_intersect", does_clip_intersect,
"clip_found", clip_found,
"closing_clips.size()", closing_clips.size(),
"open_clips.size()", open_clips.size(),
"", -1,
"", -1);
594 void Timeline::sort_clips()
597 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::SortClips",
"clips.size()", clips.size(),
"", -1,
"", -1,
"", -1,
"", -1,
"", -1);
604 void Timeline::sort_effects()
613 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::Close",
"", -1,
"", -1,
"", -1,
"", -1,
"", -1,
"", -1);
616 list<Clip*>::iterator clip_itr;
617 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
620 Clip *clip = (*clip_itr);
623 update_open_clips(clip,
false);
630 final_cache->
Clear();
640 bool Timeline::isEqual(
double a,
double b)
642 return fabs(a - b) < 0.000001;
649 if (requested_frame < 1)
653 std::shared_ptr<Frame> frame;
654 #pragma omp critical (T_GetFrame) 655 frame = final_cache->
GetFrame(requested_frame);
658 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Cached frame found)",
"requested_frame", requested_frame,
"", -1,
"", -1,
"", -1,
"", -1,
"", -1);
670 throw ReaderClosed(
"The Timeline is closed. Call Open() before calling this method.",
"");
673 #pragma omp critical (T_GetFrame) 674 frame = final_cache->
GetFrame(requested_frame);
677 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Cached frame found on 2nd look)",
"requested_frame", requested_frame,
"", -1,
"", -1,
"", -1,
"", -1,
"", -1);
688 vector<Clip*> nearby_clips;
689 #pragma omp critical (T_GetFrame) 690 nearby_clips = find_intersecting_clips(requested_frame, minimum_frames,
true);
694 omp_set_nested(
true);
697 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame",
"requested_frame", requested_frame,
"minimum_frames", minimum_frames,
"OPEN_MP_NUM_PROCESSORS",
OPEN_MP_NUM_PROCESSORS,
"", -1,
"", -1,
"", -1);
701 for (int64_t frame_number = requested_frame; frame_number < requested_frame + minimum_frames; frame_number++)
704 for (
int clip_index = 0; clip_index < nearby_clips.size(); clip_index++)
707 Clip *clip = nearby_clips[clip_index];
711 bool does_clip_intersect = (clip_start_position <= frame_number && clip_end_position >= frame_number);
712 if (does_clip_intersect)
716 long clip_frame_number = frame_number - clip_start_position + clip_start_frame;
726 #pragma omp for ordered firstprivate(nearby_clips, requested_frame, minimum_frames) schedule(static,1) 727 for (int64_t frame_number = requested_frame; frame_number < requested_frame + minimum_frames; frame_number++)
730 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (processing frame)",
"frame_number", frame_number,
"omp_get_thread_num()", omp_get_thread_num(),
"", -1,
"", -1,
"", -1,
"", -1);
737 #pragma omp critical (T_GetFrame) 739 new_frame->AddAudioSilence(samples_in_frame);
745 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Adding solid color)",
"frame_number", frame_number,
"info.width",
info.
width,
"info.height",
info.
height,
"", -1,
"", -1,
"", -1);
753 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Loop through clips)",
"frame_number", frame_number,
"clips.size()", clips.size(),
"nearby_clips.size()", nearby_clips.size(),
"", -1,
"", -1,
"", -1);
756 for (
int clip_index = 0; clip_index < nearby_clips.size(); clip_index++)
759 Clip *clip = nearby_clips[clip_index];
763 bool does_clip_intersect = (clip_start_position <= frame_number && clip_end_position >= frame_number);
766 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Does clip intersect)",
"frame_number", frame_number,
"clip->Position()", clip->
Position(),
"clip->Duration()", clip->
Duration(),
"does_clip_intersect", does_clip_intersect,
"", -1,
"", -1);
769 if (does_clip_intersect)
772 bool is_top_clip =
true;
773 float max_volume = 0.0;
774 for (
int top_clip_index = 0; top_clip_index < nearby_clips.size(); top_clip_index++)
776 Clip *nearby_clip = nearby_clips[top_clip_index];
780 long nearby_clip_frame_number = frame_number - nearby_clip_start_position + nearby_clip_start_frame;
783 if (clip->
Id() != nearby_clip->
Id() && clip->
Layer() == nearby_clip->
Layer() &&
784 nearby_clip_start_position <= frame_number && nearby_clip_end_position >= frame_number &&
785 nearby_clip_start_position > clip_start_position && is_top_clip ==
true) {
790 if (nearby_clip->
Reader() && nearby_clip->
Reader()->info.has_audio &&
792 nearby_clip_start_position <= frame_number && nearby_clip_end_position >= frame_number) {
793 max_volume += nearby_clip->
volume.
GetValue(nearby_clip_frame_number);
799 long clip_frame_number = frame_number - clip_start_position + clip_start_frame;
802 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Calculate clip's frame #)",
"clip->Position()", clip->
Position(),
"clip->Start()", clip->
Start(),
"info.fps.ToFloat()",
info.
fps.
ToFloat(),
"clip_frame_number", clip_frame_number,
"", -1,
"", -1);
805 add_layer(new_frame, clip, clip_frame_number, frame_number, is_top_clip, max_volume);
809 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (clip does not intersect)",
"frame_number", frame_number,
"does_clip_intersect", does_clip_intersect,
"", -1,
"", -1,
"", -1,
"", -1);
814 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Add frame to cache)",
"frame_number", frame_number,
"info.width",
info.
width,
"info.height",
info.
height,
"", -1,
"", -1,
"", -1);
819 new_frame->SetFrameNumber(frame_number);
822 final_cache->
Add(new_frame);
829 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (end parallel region)",
"requested_frame", requested_frame,
"omp_get_thread_num()", omp_get_thread_num(),
"", -1,
"", -1,
"", -1,
"", -1);
832 return final_cache->
GetFrame(requested_frame);
838 vector<Clip*> Timeline::find_intersecting_clips(int64_t requested_frame,
int number_of_frames,
bool include)
841 vector<Clip*> matching_clips;
844 float min_requested_frame = requested_frame;
845 float max_requested_frame = requested_frame + (number_of_frames - 1);
851 list<Clip*>::iterator clip_itr;
852 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
855 Clip *clip = (*clip_itr);
861 bool does_clip_intersect =
862 (clip_start_position <= min_requested_frame || clip_start_position <= max_requested_frame) &&
863 (clip_end_position >= min_requested_frame || clip_end_position >= max_requested_frame);
866 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::find_intersecting_clips (Is clip near or intersecting)",
"requested_frame", requested_frame,
"min_requested_frame", min_requested_frame,
"max_requested_frame", max_requested_frame,
"clip->Position()", clip->
Position(),
"does_clip_intersect", does_clip_intersect,
"", -1);
869 #pragma omp critical (reader_lock) 870 update_open_clips(clip, does_clip_intersect);
873 if (does_clip_intersect && include)
875 matching_clips.push_back(clip);
877 else if (!does_clip_intersect && !include)
879 matching_clips.push_back(clip);
884 return matching_clips;
890 final_cache = new_cache;
905 root[
"type"] =
"Timeline";
912 root[
"clips"] = Json::Value(Json::arrayValue);
915 list<Clip*>::iterator clip_itr;
916 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
919 Clip *existing_clip = (*clip_itr);
920 root[
"clips"].append(existing_clip->
JsonValue());
924 root[
"effects"] = Json::Value(Json::arrayValue);
927 list<EffectBase*>::iterator effect_itr;
928 for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr)
932 root[
"effects"].append(existing_effect->
JsonValue());
948 bool success = reader.parse( value, root );
951 throw InvalidJSON(
"JSON could not be parsed (or is invalid)",
"");
961 throw InvalidJSON(
"JSON is invalid (missing keys or invalid data types)",
"");
969 bool was_open = is_open;
975 if (!root[
"clips"].isNull()) {
980 for (
int x = 0; x < root[
"clips"].size(); x++) {
982 Json::Value existing_clip = root[
"clips"][x];
995 if (!root[
"effects"].isNull()) {
1000 for (
int x = 0; x < root[
"effects"].size(); x++) {
1002 Json::Value existing_effect = root[
"effects"][x];
1007 if (!existing_effect[
"type"].isNull()) {
1021 if (!root[
"duration"].isNull()) {
1040 Json::Reader reader;
1041 bool success = reader.parse( value, root );
1042 if (!success || !root.isArray())
1044 throw InvalidJSON(
"JSON could not be parsed (or is invalid).",
"");
1049 for (
int x = 0; x < root.size(); x++) {
1051 Json::Value change = root[x];
1052 string root_key = change[
"key"][(uint)0].asString();
1055 if (root_key ==
"clips")
1057 apply_json_to_clips(change);
1059 else if (root_key ==
"effects")
1061 apply_json_to_effects(change);
1065 apply_json_to_timeline(change);
1072 throw InvalidJSON(
"JSON is invalid (missing keys or invalid data types)",
"");
1077 void Timeline::apply_json_to_clips(Json::Value change) {
1080 string change_type = change[
"type"].asString();
1081 string clip_id =
"";
1082 Clip *existing_clip = NULL;
1085 for (
int x = 0; x < change[
"key"].size(); x++) {
1087 Json::Value key_part = change[
"key"][x];
1089 if (key_part.isObject()) {
1091 if (!key_part[
"id"].isNull()) {
1093 clip_id = key_part[
"id"].asString();
1096 list<Clip*>::iterator clip_itr;
1097 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
1100 Clip *c = (*clip_itr);
1101 if (c->
Id() == clip_id) {
1113 if (existing_clip && change[
"key"].size() == 4 && change[
"key"][2] ==
"effects")
1116 Json::Value key_part = change[
"key"][3];
1118 if (key_part.isObject()) {
1120 if (!key_part[
"id"].isNull())
1123 string effect_id = key_part[
"id"].asString();
1126 list<EffectBase*> effect_list = existing_clip->
Effects();
1127 list<EffectBase*>::iterator effect_itr;
1128 for (effect_itr=effect_list.begin(); effect_itr != effect_list.end(); ++effect_itr)
1132 if (e->
Id() == effect_id) {
1134 apply_json_to_effects(change, e);
1139 final_cache->
Remove(new_starting_frame - 8, new_ending_frame + 8);
1149 if (!change[
"value"].isArray() && !change[
"value"][
"position"].isNull()) {
1150 int64_t new_starting_frame = (change[
"value"][
"position"].asDouble() *
info.
fps.
ToDouble()) + 1;
1151 int64_t new_ending_frame = ((change[
"value"][
"position"].asDouble() + change[
"value"][
"end"].asDouble() - change[
"value"][
"start"].asDouble()) *
info.
fps.
ToDouble()) + 1;
1152 final_cache->
Remove(new_starting_frame - 8, new_ending_frame + 8);
1156 if (change_type ==
"insert") {
1164 apply_mapper_to_clip(clip);
1166 }
else if (change_type ==
"update") {
1169 if (existing_clip) {
1174 final_cache->
Remove(old_starting_frame - 8, old_ending_frame + 8);
1177 if (existing_clip->
Reader() && existing_clip->
Reader()->GetCache())
1178 existing_clip->
Reader()->GetCache()->Remove(old_starting_frame - 8, old_ending_frame + 8);
1184 apply_mapper_to_clip(existing_clip);
1188 if (existing_clip->
Reader()) {
1189 existing_clip->
Reader()->SetMaxSize(0, 0);
1190 if (existing_clip->
Reader()->Name() ==
"FrameMapper") {
1192 if (nested_reader->
Reader())
1198 }
else if (change_type ==
"delete") {
1201 if (existing_clip) {
1206 final_cache->
Remove(old_starting_frame - 8, old_ending_frame + 8);
1217 void Timeline::apply_json_to_effects(Json::Value change) {
1220 string change_type = change[
"type"].asString();
1224 for (
int x = 0; x < change[
"key"].size(); x++) {
1226 Json::Value key_part = change[
"key"][x];
1228 if (key_part.isObject()) {
1230 if (!key_part[
"id"].isNull())
1233 string effect_id = key_part[
"id"].asString();
1236 list<EffectBase*>::iterator effect_itr;
1237 for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr)
1241 if (e->
Id() == effect_id) {
1242 existing_effect =
e;
1252 if (existing_effect || change_type ==
"insert")
1254 apply_json_to_effects(change, existing_effect);
1258 void Timeline::apply_json_to_effects(Json::Value change,
EffectBase* existing_effect) {
1261 string change_type = change[
"type"].asString();
1264 if (!change[
"value"].isArray() && !change[
"value"][
"position"].isNull()) {
1265 int64_t new_starting_frame = (change[
"value"][
"position"].asDouble() *
info.
fps.
ToDouble()) + 1;
1266 int64_t new_ending_frame = ((change[
"value"][
"position"].asDouble() + change[
"value"][
"end"].asDouble() - change[
"value"][
"start"].asDouble()) *
info.
fps.
ToDouble()) + 1;
1267 final_cache->
Remove(new_starting_frame - 8, new_ending_frame + 8);
1271 if (change_type ==
"insert") {
1274 string effect_type = change[
"value"][
"type"].asString();
1280 if (e =
EffectInfo().CreateEffect(effect_type)) {
1289 }
else if (change_type ==
"update") {
1292 if (existing_effect) {
1297 final_cache->
Remove(old_starting_frame - 8, old_ending_frame + 8);
1303 }
else if (change_type ==
"delete") {
1306 if (existing_effect) {
1311 final_cache->
Remove(old_starting_frame - 8, old_ending_frame + 8);
1321 void Timeline::apply_json_to_timeline(Json::Value change) {
1324 string change_type = change[
"type"].asString();
1325 string root_key = change[
"key"][(uint)0].asString();
1326 string sub_key =
"";
1327 if (change[
"key"].size() >= 2)
1328 sub_key = change[
"key"][(uint)1].asString();
1331 final_cache->
Clear();
1334 if (change_type ==
"insert" || change_type ==
"update") {
1338 if (root_key ==
"color")
1341 else if (root_key ==
"viewport_scale")
1344 else if (root_key ==
"viewport_x")
1347 else if (root_key ==
"viewport_y")
1350 else if (root_key ==
"duration") {
1355 else if (root_key ==
"width")
1358 else if (root_key ==
"height")
1361 else if (root_key ==
"fps" && sub_key ==
"" && change[
"value"].isObject()) {
1363 if (!change[
"value"][
"num"].isNull())
1364 info.
fps.
num = change[
"value"][
"num"].asInt();
1365 if (!change[
"value"][
"den"].isNull())
1366 info.
fps.
den = change[
"value"][
"den"].asInt();
1368 else if (root_key ==
"fps" && sub_key ==
"num")
1371 else if (root_key ==
"fps" && sub_key ==
"den")
1374 else if (root_key ==
"sample_rate")
1377 else if (root_key ==
"channels")
1380 else if (root_key ==
"channel_layout")
1387 throw InvalidJSONKey(
"JSON change key is invalid", change.toStyledString());
1390 }
else if (change[
"type"].asString() ==
"delete") {
1394 if (root_key ==
"color") {
1400 else if (root_key ==
"viewport_scale")
1402 else if (root_key ==
"viewport_x")
1404 else if (root_key ==
"viewport_y")
1408 throw InvalidJSONKey(
"JSON change key is invalid", change.toStyledString());
1421 final_cache->
Clear();
1424 list<Clip*>::iterator clip_itr;
1425 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
1428 Clip *clip = (*clip_itr);
1431 clip->
Reader()->GetCache()->Clear();
1434 if (clip->
Reader()->Name() ==
"FrameMapper") {
void SetJsonValue(Json::Value root)
Load Json::JsonValue into this object.
int max_height
The maximium image height needed by this clip (used for optimizations)
Display the timeline's frame number.
void Close()
Close the internal reader.
string Json()
Get and Set JSON methods.
Json::Value JsonValue()
Generate Json::JsonValue for this object.
int num
Numerator for the fraction.
Keyframe scale_y
Curve representing the vertical scaling in percent (0 to 1)
ReaderBase * Reader()
Get the current reader.
CriticalSection getFrameCriticalSection
Section lock for multiple threads.
This abstract class is the base class, used by all effects in libopenshot.
EffectBase * CreateEffect(string effect_type)
Align clip to the right of its parent (middle aligned)
Keyframe green
Curve representing the green value (0 - 255)
Keyframe viewport_scale
Curve representing the scale of the viewport (0 to 100)
Align clip to the bottom right of its parent.
void SetCache(CacheBase *new_cache)
Get the cache object used by this reader.
Json::Value JsonValue()
Generate Json::JsonValue for this object.
ChannelLayout channel_layout
The channel layout (mono, stereo, 5 point surround, etc...)
GravityType gravity
The gravity of a clip determines where it snaps to it's parent.
Keyframe alpha
Curve representing the alpha value (0 - 255)
int width
The width of the video (in pixesl)
Keyframe volume
Curve representing the volume (0 to 1)
virtual std::shared_ptr< Frame > GetFrame(std::shared_ptr< Frame > frame, int64_t frame_number)=0
This method is required for all derived classes of EffectBase, and returns a modified openshot::Frame...
float ToFloat()
Return this fraction as a float (i.e. 1/2 = 0.5)
Keyframe red
Curve representing the red value (0 - 255)
float duration
Length of time (in seconds)
string GetColorHex(int64_t frame_number)
Get the HEX value of a color at a specific frame.
Json::Value JsonValue()
Generate Json::JsonValue for this object.
void SetMaxBytesFromInfo(int64_t number_of_frames, int width, int height, int sample_rate, int channels)
Set maximum bytes to a different amount based on a ReaderInfo struct.
Scale the clip until both height and width fill the canvas (cropping the overlap) ...
Evenly divide the overlapping clips volume keyframes, so that the sum does not exceed 100%...
Keyframe viewport_y
Curve representing the y coordinate for the viewport.
Fraction Reciprocal()
Return the reciprocal as a Fraction.
This abstract class is the base class, used by all readers in libopenshot.
int Layer()
Get layer of clip on timeline (lower number is covered by higher numbers)
#define OPEN_MP_NUM_PROCESSORS
Keyframe has_audio
Override has_video and has_audio properties of clip (and their readers)
Exception when a reader is closed, and a frame is requested.
bool has_video
Determines if this file has a video stream.
Do not display the frame number.
std::shared_ptr< Frame > GetFrame(int64_t requested_frame)
Get an openshot::Frame object for a specific frame number of this timeline.
Color wave_color
Curve representing the color of the audio wave form.
Align clip to the top right of its parent.
virtual Json::Value JsonValue()=0
Generate Json::JsonValue for this object.
Align clip to the bottom left of its parent.
void SetJsonValue(Json::Value root)
Load Json::JsonValue into this object.
void SetJsonValue(Json::Value root)
Load Json::JsonValue into this object.
Exception for missing JSON Change key.
Keyframe location_x
Curve representing the relative X position in percent based on the gravity (-1 to 1) ...
Keyframe location_y
Curve representing the relative Y position in percent based on the gravity (-1 to 1) ...
void SetMaxSize(int width, int height)
Set Max Image Size (used for performance optimization)
bool has_audio
Determines if this file has an audio stream.
This class represents a clip (used to arrange readers on the timeline)
void ChangeMapping(Fraction target_fps, PulldownType pulldown, int target_sample_rate, int target_channels, ChannelLayout target_channel_layout)
Change frame rate or audio mapping details.
Keyframe blue
Curve representing the red value (0 - 255)
virtual std::shared_ptr< Frame > GetFrame(int64_t frame_number)=0
Get a frame from the cache.
Keyframe shear_x
Curve representing X shear angle in degrees (-45.0=left, 45.0=right)
bool Waveform()
Waveform property.
void SetMaxSize(int width, int height)
Set Max Image Size (used for performance optimization)
int64_t video_length
The number of frames in the video stream.
ScaleType scale
The scale determines how a clip should be resized to fit it's parent.
int height
The height of the video (in pixels)
Align clip to the bottom center of its parent.
virtual void Remove(int64_t frame_number)=0
Remove a specific frame.
Exception for files that can not be found or opened.
string Id()
Get basic properties.
Keyframe channel_filter
Audio channel filter and mappings.
void ClearAllCache()
Clear all cache for this timeline instance, and all clips, mappers, and readers under it...
float Position()
Get position on timeline (in seconds)
static CrashHandler * Instance()
void ApplyMapperToClips()
Apply the timeline's framerate and samplerate to all clips.
void Reader(ReaderBase *new_reader)
Set the current reader.
list< EffectBase * > Effects()
Return the list of effects on the timeline.
void AppendDebugMethod(string method_name, string arg1_name, float arg1_value, string arg2_name, float arg2_value, string arg3_name, float arg3_value, string arg4_name, float arg4_value, string arg5_name, float arg5_value, string arg6_name, float arg6_value)
Append debug information.
FrameDisplayType display
The format to display the frame number (if any)
std::shared_ptr< Frame > GetFrame(int64_t requested_frame)
This class represents a fraction.
All cache managers in libopenshot are based on this CacheBase class.
Keyframe channel_mapping
A number representing an audio channel to output (only works when filtering a channel) ...
virtual void Add(std::shared_ptr< Frame > frame)=0
Add a Frame to the cache.
ChannelLayout
This enumeration determines the audio channel layout (such as stereo, mono, 5 point surround...
Align clip to the left of its parent (middle aligned)
void AddClip(Clip *clip)
Add an openshot::Clip to the timeline.
virtual Json::Value JsonValue()=0
Generate Json::JsonValue for this object.
virtual void SetJsonValue(Json::Value root)=0
Load Json::JsonValue into this object.
void Close()
Close the timeline reader (and any resources it was consuming)
int GetInt(int64_t index)
Get the rounded INT value at a specific index.
Keyframe rotation
Curve representing the rotation (0 to 360)
virtual void SetJsonValue(Json::Value root)=0
Load Json::JsonValue into this object.
Scale the clip until both height and width fill the canvas (distort to fit)
Display the clip's internal frame number.
vector< Point > Points
Vector of all Points.
ReaderInfo info
Information about the current media file.
Keyframe shear_y
Curve representing Y shear angle in degrees (-45.0=down, 45.0=up)
Fraction fps
Frames per second, as a fraction (i.e. 24/1 = 24 fps)
Fraction video_timebase
The video timebase determines how long each frame stays on the screen.
Exception for frames that are out of bounds.
This class creates a mapping between 2 different frame rates, applying a specific pull-down technique...
void Open()
Open the internal reader.
This class represents a color (used on the timeline and clips)
static ZmqLogger * Instance()
Create or get an instance of this logger singleton (invoke the class with this method) ...
Reduce volume by about %25, and then mix (louder, but could cause pops if the sum exceeds 100%) ...
Align clip to the center of its parent (middle aligned)
void Open()
Open the reader (and start consuming resources)
void ApplyJsonDiff(string value)
Apply a special formatted JSON object, which represents a change to the timeline (add, update, delete) This is primarily designed to keep the timeline (and its child objects... such as clips and effects) in sync with another application... such as OpenShot Video Editor (http://www.openshot.org).
double GetValue(int64_t index)
Get the value at a specific index.
Display both the clip's and timeline's frame number.
This namespace is the default namespace for all code in the openshot library.
Do not apply pull-down techniques, just repeat or skip entire frames.
virtual void Clear()=0
Clear the cache of all frames.
void RemoveClip(Clip *clip)
Remove an openshot::Clip from the timeline.
void RemoveEffect(EffectBase *effect)
Remove an effect from the timeline.
Exception for invalid JSON.
Keyframe alpha
Curve representing the alpha (1 to 0)
virtual CacheBase * GetCache()=0
Get the cache object used by this reader (note: not all readers use cache)
Keyframe viewport_x
Curve representing the x coordinate for the viewport.
void SetJsonValue(Json::Value root)
Load Json::JsonValue into this object.
Keyframe scale_x
Curve representing the horizontal scaling in percent (0 to 1)
VolumeMixType mixing
What strategy should be followed when mixing audio with other clips.
Color color
Background color of timeline canvas.
Timeline(int width, int height, Fraction fps, int sample_rate, int channels, ChannelLayout channel_layout)
Default Constructor for the timeline (which sets the canvas width and height and FPS) ...
This class returns a listing of all effects supported by libopenshot.
Align clip to the top center of its parent.
void SetJson(string value)
Load JSON string into this object.
int den
Denominator for the fraction.
int channels
The number of audio channels used in the audio stream.
A Keyframe is a collection of Point instances, which is used to vary a number or property over time...
Scale the clip until either height or width fills the canvas (with no cropping)
void AddEffect(EffectBase *effect)
Add an effect to the timeline.
int max_width
The maximum image width needed by this clip (used for optimizations)
int GetSamplesPerFrame(Fraction fps, int sample_rate, int channels)
Calculate the # of samples per video frame (for the current frame number)
Json::Value JsonValue()
Generate Json::JsonValue for this object.
float Duration()
Get the length of this clip (in seconds)
This class is a memory-based cache manager for Frame objects.
float Start()
Get start position (in seconds) of clip (trim start of video)
double ToDouble()
Return this fraction as a double (i.e. 1/2 = 0.5)
int sample_rate
The number of audio samples per second (44100 is a common sample rate)
Exception when too many seek attempts happen.