diff --git a/fuzzer/fuzzing.cpp b/fuzzer/fuzzing.cpp
index bc791ed95..239eadee4 100644
--- a/fuzzer/fuzzing.cpp
+++ b/fuzzer/fuzzing.cpp
@@ -1,419 +1,422 @@
/***************************************************************************
* Copyright (C) 2019 by Nicolas Carion *
* This file is part of Kdenlive. See www.kdenlive.org. *
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) version 3 or any later version accepted by the *
* membership of KDE e.V. (or its successor approved by the membership *
* of KDE e.V.), which shall act as a proxy defined in Section 14 of *
* version 3 of the license. *
* *
* This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU General Public License for more details. *
* *
* You should have received a copy of the GNU General Public License *
* along with this program. If not, see . *
***************************************************************************/
#include "fuzzing.hpp"
#include "bin/model/markerlistmodel.hpp"
#include "doc/docundostack.hpp"
#include "fakeit_standalone.hpp"
#include "logger.hpp"
#include
#include
#include
#include
#include
#define private public
#define protected public
#include "assets/keyframes/model/keyframemodel.hpp"
#include "assets/model/assetparametermodel.hpp"
#include "bin/clipcreator.hpp"
#include "bin/projectclip.h"
#include "bin/projectfolder.h"
#include "bin/projectitemmodel.h"
#include "core.h"
#include "effects/effectsrepository.hpp"
#include "effects/effectstack/model/effectitemmodel.hpp"
#include "effects/effectstack/model/effectstackmodel.hpp"
#include "mltconnection.h"
#include "project/projectmanager.h"
#include "timeline2/model/clipmodel.hpp"
#include "timeline2/model/compositionmodel.hpp"
#include "timeline2/model/groupsmodel.hpp"
#include "timeline2/model/timelinefunctions.hpp"
#include "timeline2/model/timelineitemmodel.hpp"
#include "timeline2/model/timelinemodel.hpp"
#include "timeline2/model/trackmodel.hpp"
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wunused-parameter"
#pragma GCC diagnostic ignored "-Wsign-conversion"
#pragma GCC diagnostic ignored "-Wfloat-equal"
#pragma GCC diagnostic ignored "-Wshadow"
#pragma GCC diagnostic ignored "-Wpedantic"
#include
#pragma GCC diagnostic pop
using namespace fakeit;
namespace {
QString createProducer(Mlt::Profile &prof, std::string color, std::shared_ptr binModel, int length, bool limited)
{
Logger::log_create_producer("test_producer", {color, binModel, length, limited});
std::shared_ptr producer = std::make_shared(prof, "color", color.c_str());
producer->set("length", length);
producer->set("out", length - 1);
Q_ASSERT(producer->is_valid());
QString binId = QString::number(binModel->getFreeClipId());
auto binClip = ProjectClip::construct(binId, QIcon(), binModel, producer);
if (limited) {
binClip->forceLimitedDuration();
}
Fun undo = []() { return true; };
Fun redo = []() { return true; };
Q_ASSERT(binModel->addItem(binClip, binModel->getRootFolder()->clipId(), undo, redo));
return binId;
}
QString createProducerWithSound(Mlt::Profile &prof, std::shared_ptr binModel)
{
Logger::log_create_producer("test_producer_sound", {binModel});
// std::shared_ptr producer = std::make_shared(prof,
// QFileInfo("../tests/small.mkv").absoluteFilePath().toStdString().c_str());
// In case the test system does not have avformat support, we can switch to the integrated blipflash producer
std::shared_ptr producer = std::make_shared(prof, "blipflash");
producer->set_in_and_out(0, 1);
producer->set("kdenlive:duration", 2);
Q_ASSERT(producer->is_valid());
QString binId = QString::number(binModel->getFreeClipId());
auto binClip = ProjectClip::construct(binId, QIcon(), binModel, producer);
Fun undo = []() { return true; };
Fun redo = []() { return true; };
Q_ASSERT(binModel->addItem(binClip, binModel->getRootFolder()->clipId(), undo, redo));
return binId;
}
inline int modulo(int a, int b)
{
const int result = a % b;
return result >= 0 ? result : result + b;
}
namespace {
bool isIthParamARef(const rttr::method &method, size_t i)
{
QString sig = QString::fromStdString(method.get_signature().to_string());
int deb = sig.indexOf("(");
int end = sig.lastIndexOf(")");
sig = sig.mid(deb + 1, deb - end - 1);
QStringList args = sig.split(QStringLiteral(","));
return args[(int)i].contains("&") && !args[(int)i].contains("const &");
}
} // namespace
} // namespace
void fuzz(const std::string &input)
{
Logger::init();
std::stringstream ss;
ss << input;
Mlt::Profile profile;
auto binModel = pCore->projectItemModel();
binModel->clean();
std::shared_ptr undoStack = std::make_shared(nullptr);
std::shared_ptr guideModel = std::make_shared(undoStack);
TimelineModel::next_id = 0;
Mock pmMock;
When(Method(pmMock, undoStack)).AlwaysReturn(undoStack);
ProjectManager &mocked = pmMock.get();
pCore->m_projectManager = &mocked;
std::vector> all_timelines;
std::unordered_map, std::vector> all_clips, all_tracks, all_compositions;
auto update_elems = [&]() {
all_clips.clear();
all_tracks.clear();
all_compositions.clear();
for (const auto &timeline : all_timelines) {
all_clips[timeline] = {};
all_tracks[timeline] = {};
all_compositions[timeline] = {};
auto &clips = all_clips[timeline];
clips.clear();
for (const auto &c : timeline->m_allClips) {
clips.push_back(c.first);
}
std::sort(clips.begin(), clips.end());
auto &compositions = all_compositions[timeline];
compositions.clear();
for (const auto &c : timeline->m_allCompositions) {
compositions.push_back(c.first);
}
std::sort(compositions.begin(), compositions.end());
auto &tracks = all_tracks[timeline];
tracks.clear();
for (const auto &c : timeline->m_iteratorTable) {
tracks.push_back(c.first);
}
std::sort(tracks.begin(), tracks.end());
}
};
auto get_timeline = [&]() -> std::shared_ptr {
int id = 0;
ss >> id;
if (all_timelines.size() == 0) return nullptr;
id = modulo(id, (int)all_timelines.size());
return all_timelines[size_t(id)];
};
auto get_clip = [&](std::shared_ptr timeline) {
int id = 0;
ss >> id;
if (!timeline) return -1;
if (timeline->isClip(id)) return id;
if (all_timelines.size() == 0) return -1;
if (all_clips.count(timeline) == 0) return -1;
if (all_clips[timeline].size() == 0) return -1;
id = modulo(id, (int)all_clips[timeline].size());
return all_clips[timeline][id];
};
auto get_compo = [&](std::shared_ptr timeline) {
int id = 0;
ss >> id;
if (!timeline) return -1;
if (timeline->isComposition(id)) return id;
if (all_timelines.size() == 0) return -1;
if (all_compositions.count(timeline) == 0) return -1;
if (all_compositions[timeline].size() == 0) return -1;
id = modulo(id, (int)all_compositions[timeline].size());
return all_compositions[timeline][id];
};
auto get_item = [&](std::shared_ptr timeline) {
int id = 0;
ss >> id;
if (!timeline) return -1;
if (timeline->isClip(id)) return id;
if (timeline->isComposition(id)) return id;
if (all_timelines.size() == 0) return -1;
int clip_count = 0;
if (all_clips.count(timeline) > 0) {
clip_count = all_clips[timeline].size();
}
int compo_count = 0;
if (all_compositions.count(timeline) > 0) {
compo_count = all_compositions[timeline].size();
}
if (clip_count + compo_count == 0) return -1;
id = modulo(id, clip_count + compo_count);
if (id < clip_count) {
return all_clips[timeline][id];
}
return all_compositions[timeline][id - clip_count];
};
auto get_track = [&](std::shared_ptr timeline) {
int id = 0;
ss >> id;
if (!timeline) return -1;
if (timeline->isTrack(id)) return id;
if (all_timelines.size() == 0) return -1;
if (all_tracks.count(timeline) == 0) return -1;
if (all_tracks[timeline].size() == 0) return -1;
id = modulo(id, (int)all_tracks[timeline].size());
return all_tracks[timeline][id];
};
std::string c;
while (ss >> c) {
if (Logger::back_translation_table.count(c) > 0) {
// std::cout << "found=" << c;
c = Logger::back_translation_table[c];
// std::cout << " tranlated=" << c << std::endl;
if (c == "constr_TimelineModel") {
all_timelines.emplace_back(TimelineItemModel::construct(&profile, guideModel, undoStack));
} else if (c == "constr_TrackModel") {
auto timeline = get_timeline();
int id, pos = 0;
std::string name;
bool audio = false;
ss >> id >> pos >> name >> audio;
if (name == "$$") {
name = "";
}
if (pos < -1) pos = 0;
pos = std::min((int)all_tracks[timeline].size(), pos);
if (timeline) {
TrackModel::construct(timeline, -1, pos, QString::fromStdString(name), audio);
}
} else if (c == "constr_test_producer") {
std::string color;
int length = 0;
bool limited = false;
ss >> color >> length >> limited;
createProducer(profile, color, binModel, length, limited);
} else if (c == "constr_test_producer_sound") {
createProducerWithSound(profile, binModel);
} else {
// std::cout << "executing " << c << std::endl;
rttr::type target_type = rttr::type::get();
bool found = false;
for (const std::string &t : {"TimelineModel"}) {
rttr::type current_type = rttr::type::get_by_name(t);
// std::cout << "type " << t << " has methods count=" << current_type.get_methods().size() << std::endl;
if (current_type.get_method(c).is_valid()) {
found = true;
target_type = current_type;
break;
}
}
if (found) {
bool valid = true;
rttr::method target_method = target_type.get_method(c);
std::vector arguments;
rttr::variant ptr;
if (target_type == rttr::type::get()) {
if (all_timelines.size() == 0) {
valid = false;
}
ptr = get_timeline();
}
int i = -1;
for (const auto &p : target_method.get_parameter_infos()) {
++i;
std::string arg_name = p.get_name().to_string();
// std::cout << arg_name << std::endl;
if (arg_name == "compoId") {
std::shared_ptr tim =
(ptr.can_convert>() ? ptr.convert>() : nullptr);
int compoId = get_compo(tim);
valid = valid && (compoId >= 0);
// std::cout << "got compo" << compoId << std::endl;
arguments.push_back(compoId);
} else if (arg_name == "clipId") {
std::shared_ptr tim =
(ptr.can_convert>() ? ptr.convert>() : nullptr);
int clipId = get_clip(tim);
valid = valid && (clipId >= 0);
arguments.push_back(clipId);
// std::cout << "got clipId" << clipId << std::endl;
} else if (arg_name == "trackId") {
std::shared_ptr tim =
(ptr.can_convert>() ? ptr.convert>() : nullptr);
int trackId = get_track(tim);
valid = valid && (trackId >= 0);
arguments.push_back(rttr::variant(trackId));
// std::cout << "got trackId" << trackId << std::endl;
} else if (arg_name == "itemId") {
std::shared_ptr tim =
(ptr.can_convert>() ? ptr.convert>() : nullptr);
int itemId = get_item(tim);
valid = valid && (itemId >= 0);
arguments.push_back(itemId);
// std::cout << "got itemId" << itemId << std::endl;
} else if (arg_name == "ids") {
int count = 0;
ss >> count;
// std::cout << "got ids. going to read count=" << count << std::endl;
if (count > 0) {
std::shared_ptr tim =
(ptr.can_convert>() ? ptr.convert>() : nullptr);
std::unordered_set ids;
for (int i = 0; i < count; ++i) {
int itemId = get_item(tim);
// std::cout << "\t read" << itemId << std::endl;
valid = valid && (itemId >= 0);
ids.insert(itemId);
}
arguments.push_back(ids);
} else {
valid = false;
}
} else if (!isIthParamARef(target_method, i)) {
rttr::type arg_type = p.get_type();
if (arg_type == rttr::type::get()) {
int a = 0;
ss >> a;
// std::cout << "read int " << a << std::endl;
arguments.push_back(a);
} else if (arg_type == rttr::type::get()) {
bool a = false;
ss >> a;
// std::cout << "read bool " << a << std::endl;
arguments.push_back(a);
} else if (arg_type == rttr::type::get()) {
std::string str = "";
ss >> str;
// std::cout << "read str " << str << std::endl;
if (str == "$$") {
str = "";
}
arguments.push_back(QString::fromStdString(str));
} else if (arg_type.is_enumeration()) {
int a = 0;
ss >> a;
rttr::variant var_a = a;
var_a.convert((const rttr::type &)arg_type);
// std::cout << "read enum " << arg_type.get_enumeration().value_to_name(var_a).to_string() << std::endl;
arguments.push_back(var_a);
} else {
assert(false);
}
} else {
if (p.get_type() == rttr::type::get()) {
arguments.push_back(-1);
} else {
assert(false);
}
}
}
if (valid) {
// std::cout << "VALID!!!" << std::endl;
std::vector args;
for (const auto &a : arguments) {
args.emplace_back(a);
// std::cout<<"argument="<checkConsistency());
}
}
}
all_clips.clear();
all_tracks.clear();
all_compositions.clear();
for (size_t i = 0; i < all_timelines.size(); ++i) {
all_timelines[i].reset();
}
pCore->m_projectManager = nullptr;
Core::m_self.reset();
MltConnection::m_self.reset();
+ std::cout << "---------------------------------------------------------------------------------------------------------------------------------------------"
+ "---------------"
+ << std::endl;
}
diff --git a/src/bin/projectclip.cpp b/src/bin/projectclip.cpp
index 77082e169..abfcf9e39 100644
--- a/src/bin/projectclip.cpp
+++ b/src/bin/projectclip.cpp
@@ -1,1444 +1,1444 @@
/*
Copyright (C) 2012 Till Theato
Copyright (C) 2014 Jean-Baptiste Mardelle
This file is part of Kdenlive. See www.kdenlive.org.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation; either version 2 of
the License or (at your option) version 3 or any later version
accepted by the membership of KDE e.V. (or its successor approved
by the membership of KDE e.V.), which shall act as a proxy
defined in Section 14 of version 3 of the license.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see .
*/
#include "projectclip.h"
#include "bin.h"
#include "core.h"
#include "doc/docundostack.hpp"
#include "doc/kdenlivedoc.h"
#include "doc/kthumb.h"
#include "effects/effectstack/model/effectstackmodel.hpp"
#include "jobs/jobmanager.h"
#include "jobs/loadjob.hpp"
#include "jobs/thumbjob.hpp"
#include "kdenlivesettings.h"
#include "lib/audio/audioStreamInfo.h"
#include "mltcontroller/clipcontroller.h"
#include "mltcontroller/clippropertiescontroller.h"
#include "model/markerlistmodel.hpp"
#include "profiles/profilemodel.hpp"
#include "project/projectcommands.h"
#include "project/projectmanager.h"
#include "projectfolder.h"
#include "projectitemmodel.h"
#include "projectsubclip.h"
#include "timecode.h"
#include "timeline2/model/snapmodel.hpp"
#include "utils/thumbnailcache.hpp"
#include "xml/xml.hpp"
#include
#include
#include
#include "kdenlive_debug.h"
#include "logger.hpp"
#include
#include
#include
#include
#include
#include
#include
#include
#include
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wunused-parameter"
#pragma GCC diagnostic ignored "-Wsign-conversion"
#pragma GCC diagnostic ignored "-Wfloat-equal"
#pragma GCC diagnostic ignored "-Wshadow"
#pragma GCC diagnostic ignored "-Wpedantic"
#include
#pragma GCC diagnostic pop
RTTR_REGISTRATION
{
using namespace rttr;
registration::class_("ProjectClip");
}
ProjectClip::ProjectClip(const QString &id, const QIcon &thumb, std::shared_ptr model, std::shared_ptr producer)
: AbstractProjectItem(AbstractProjectItem::ClipItem, id, model)
, ClipController(id, producer)
, m_thumbsProducer(nullptr)
{
m_markerModel = std::make_shared(id, pCore->projectManager()->undoStack());
m_clipStatus = StatusReady;
m_name = clipName();
m_duration = getStringDuration();
m_inPoint = 0;
m_date = date;
m_description = ClipController::description();
if (m_clipType == ClipType::Audio) {
m_thumbnail = QIcon::fromTheme(QStringLiteral("audio-x-generic"));
} else {
m_thumbnail = thumb;
}
// Make sure we have a hash for this clip
hash();
connect(m_markerModel.get(), &MarkerListModel::modelChanged, [&]() { setProducerProperty(QStringLiteral("kdenlive:markers"), m_markerModel->toJson()); });
QString markers = getProducerProperty(QStringLiteral("kdenlive:markers"));
if (!markers.isEmpty()) {
QMetaObject::invokeMethod(m_markerModel.get(), "importFromJson", Qt::QueuedConnection, Q_ARG(const QString &, markers), Q_ARG(bool, true),
Q_ARG(bool, false));
}
connectEffectStack();
}
// static
std::shared_ptr ProjectClip::construct(const QString &id, const QIcon &thumb, std::shared_ptr model,
std::shared_ptr producer)
{
std::shared_ptr self(new ProjectClip(id, thumb, model, producer));
baseFinishConstruct(self);
self->m_effectStack->importEffects(producer, PlaylistState::Disabled, true);
model->loadSubClips(id, self->getPropertiesFromPrefix(QStringLiteral("kdenlive:clipzone.")));
return self;
}
ProjectClip::ProjectClip(const QString &id, const QDomElement &description, const QIcon &thumb, std::shared_ptr model)
: AbstractProjectItem(AbstractProjectItem::ClipItem, id, model)
, ClipController(id)
, m_thumbsProducer(nullptr)
{
m_clipStatus = StatusWaiting;
m_thumbnail = thumb;
m_markerModel = std::make_shared(m_binId, pCore->projectManager()->undoStack());
if (description.hasAttribute(QStringLiteral("type"))) {
m_clipType = (ClipType::ProducerType)description.attribute(QStringLiteral("type")).toInt();
if (m_clipType == ClipType::Audio) {
m_thumbnail = QIcon::fromTheme(QStringLiteral("audio-x-generic"));
}
}
m_temporaryUrl = getXmlProperty(description, QStringLiteral("resource"));
QString clipName = getXmlProperty(description, QStringLiteral("kdenlive:clipname"));
if (!clipName.isEmpty()) {
m_name = clipName;
} else if (!m_temporaryUrl.isEmpty()) {
m_name = QFileInfo(m_temporaryUrl).fileName();
} else {
m_name = i18n("Untitled");
}
connect(m_markerModel.get(), &MarkerListModel::modelChanged, [&]() { setProducerProperty(QStringLiteral("kdenlive:markers"), m_markerModel->toJson()); });
}
std::shared_ptr ProjectClip::construct(const QString &id, const QDomElement &description, const QIcon &thumb,
std::shared_ptr model)
{
std::shared_ptr self(new ProjectClip(id, description, thumb, model));
baseFinishConstruct(self);
return self;
}
ProjectClip::~ProjectClip()
{
// controller is deleted in bincontroller
m_thumbMutex.lock();
m_requestedThumbs.clear();
m_thumbMutex.unlock();
m_thumbThread.waitForFinished();
audioFrameCache.clear();
}
void ProjectClip::connectEffectStack()
{
connect(m_effectStack.get(), &EffectStackModel::modelChanged, this, &ProjectClip::updateChildProducers);
connect(m_effectStack.get(), &EffectStackModel::dataChanged, this, &ProjectClip::updateChildProducers);
connect(m_effectStack.get(), &EffectStackModel::dataChanged, [&]() {
if (auto ptr = m_model.lock()) {
std::static_pointer_cast(ptr)->onItemUpdated(std::static_pointer_cast(shared_from_this()),
AbstractProjectItem::IconOverlay);
}
});
/*connect(m_effectStack.get(), &EffectStackModel::modelChanged, [&](){
qDebug()<<"/ / / STACK CHANGED";
updateChildProducers();
});*/
}
QString ProjectClip::getToolTip() const
{
return url();
}
QString ProjectClip::getXmlProperty(const QDomElement &producer, const QString &propertyName, const QString &defaultValue)
{
QString value = defaultValue;
QDomNodeList props = producer.elementsByTagName(QStringLiteral("property"));
for (int i = 0; i < props.count(); ++i) {
if (props.at(i).toElement().attribute(QStringLiteral("name")) == propertyName) {
value = props.at(i).firstChild().nodeValue();
break;
}
}
return value;
}
void ProjectClip::updateAudioThumbnail(QVariantList audioLevels)
{
std::swap(audioFrameCache, audioLevels); // avoid second copy
m_audioThumbCreated = true;
if (auto ptr = m_model.lock()) {
emit std::static_pointer_cast(ptr)->refreshAudioThumbs(m_binId);
}
updateTimelineClips({TimelineModel::AudioLevelsRole});
}
bool ProjectClip::audioThumbCreated() const
{
return (m_audioThumbCreated);
}
ClipType::ProducerType ProjectClip::clipType() const
{
return m_clipType;
}
bool ProjectClip::hasParent(const QString &id) const
{
std::shared_ptr par = parent();
while (par) {
if (par->clipId() == id) {
return true;
}
par = par->parent();
}
return false;
}
std::shared_ptr ProjectClip::clip(const QString &id)
{
if (id == m_binId) {
return std::static_pointer_cast(shared_from_this());
}
return std::shared_ptr();
}
std::shared_ptr ProjectClip::folder(const QString &id)
{
Q_UNUSED(id)
return std::shared_ptr();
}
std::shared_ptr ProjectClip::getSubClip(int in, int out)
{
for (int i = 0; i < childCount(); ++i) {
std::shared_ptr clip = std::static_pointer_cast(child(i))->subClip(in, out);
if (clip) {
return clip;
}
}
return std::shared_ptr();
}
QStringList ProjectClip::subClipIds() const
{
QStringList subIds;
for (int i = 0; i < childCount(); ++i) {
std::shared_ptr clip = std::static_pointer_cast(child(i));
if (clip) {
subIds << clip->clipId();
}
}
return subIds;
}
std::shared_ptr ProjectClip::clipAt(int ix)
{
if (ix == row()) {
return std::static_pointer_cast(shared_from_this());
}
return std::shared_ptr();
}
/*bool ProjectClip::isValid() const
{
return m_controller->isValid();
}*/
bool ProjectClip::hasUrl() const
{
if ((m_clipType != ClipType::Color) && (m_clipType != ClipType::Unknown)) {
return (!clipUrl().isEmpty());
}
return false;
}
const QString ProjectClip::url() const
{
return clipUrl();
}
GenTime ProjectClip::duration() const
{
return getPlaytime();
}
size_t ProjectClip::frameDuration() const
{
GenTime d = duration();
return (size_t)d.frames(pCore->getCurrentFps());
}
void ProjectClip::reloadProducer(bool refreshOnly)
{
// we find if there are some loading job on that clip
int loadjobId = -1;
pCore->jobManager()->hasPendingJob(clipId(), AbstractClipJob::LOADJOB, &loadjobId);
QMutexLocker lock(&m_thumbMutex);
if (refreshOnly) {
// In that case, we only want a new thumbnail.
// We thus set up a thumb job. We must make sure that there is no pending LOADJOB
// Clear cache first
ThumbnailCache::get()->invalidateThumbsForClip(clipId());
pCore->jobManager()->discardJobs(clipId(), AbstractClipJob::THUMBJOB);
m_thumbsProducer.reset();
pCore->jobManager()->startJob({clipId()}, loadjobId, QString(), 150, -1, true, true);
} else {
// If another load job is running?
if (loadjobId > -1) {
pCore->jobManager()->discardJobs(clipId(), AbstractClipJob::LOADJOB);
}
QDomDocument doc;
QDomElement xml = toXml(doc);
if (!xml.isNull()) {
pCore->jobManager()->discardJobs(clipId(), AbstractClipJob::THUMBJOB);
m_thumbsProducer.reset();
ThumbnailCache::get()->invalidateThumbsForClip(clipId());
int loadJob = pCore->jobManager()->startJob({clipId()}, loadjobId, QString(), xml);
pCore->jobManager()->startJob({clipId()}, loadJob, QString(), 150, -1, true, true);
}
}
}
QDomElement ProjectClip::toXml(QDomDocument &document, bool includeMeta)
{
getProducerXML(document, includeMeta);
QDomElement prod = document.documentElement().firstChildElement(QStringLiteral("producer"));
if (m_clipType != ClipType::Unknown) {
prod.setAttribute(QStringLiteral("type"), (int)m_clipType);
}
return prod;
}
void ProjectClip::setThumbnail(const QImage &img)
{
QPixmap thumb = roundedPixmap(QPixmap::fromImage(img));
if (hasProxy() && !thumb.isNull()) {
// Overlay proxy icon
QPainter p(&thumb);
QColor c(220, 220, 10, 200);
QRect r(0, 0, thumb.height() / 2.5, thumb.height() / 2.5);
p.fillRect(r, c);
QFont font = p.font();
font.setPixelSize(r.height());
font.setBold(true);
p.setFont(font);
p.setPen(Qt::black);
p.drawText(r, Qt::AlignCenter, i18nc("The first letter of Proxy, used as abbreviation", "P"));
}
m_thumbnail = QIcon(thumb);
if (auto ptr = m_model.lock()) {
std::static_pointer_cast(ptr)->onItemUpdated(std::static_pointer_cast(shared_from_this()),
AbstractProjectItem::DataThumbnail);
}
}
bool ProjectClip::hasAudioAndVideo() const
{
return hasAudio() && hasVideo() && m_masterProducer->get_int("set.test_image") == 0 && m_masterProducer->get_int("set.test_audio") == 0;
}
bool ProjectClip::isCompatible(PlaylistState::ClipState state) const
{
switch (state) {
case PlaylistState::AudioOnly:
return hasAudio() && (m_masterProducer->get_int("set.test_audio") == 0);
case PlaylistState::VideoOnly:
return hasVideo() && (m_masterProducer->get_int("set.test_image") == 0);
default:
return true;
}
}
QPixmap ProjectClip::thumbnail(int width, int height)
{
return m_thumbnail.pixmap(width, height);
}
bool ProjectClip::setProducer(std::shared_ptr producer, bool replaceProducer)
{
Q_UNUSED(replaceProducer)
qDebug() << "################### ProjectClip::setproducer";
QMutexLocker locker(&m_producerMutex);
updateProducer(std::move(producer));
m_thumbsProducer.reset();
connectEffectStack();
// Update info
if (m_name.isEmpty()) {
m_name = clipName();
}
m_date = date;
m_description = ClipController::description();
m_temporaryUrl.clear();
if (m_clipType == ClipType::Audio) {
m_thumbnail = QIcon::fromTheme(QStringLiteral("audio-x-generic"));
} else if (m_clipType == ClipType::Image) {
if (producer->get_int("meta.media.width") < 8 || producer->get_int("meta.media.height") < 8) {
KMessageBox::information(QApplication::activeWindow(),
i18n("Image dimension smaller than 8 pixels.\nThis is not correctly supported by our video framework."));
}
}
m_duration = getStringDuration();
m_clipStatus = StatusReady;
if (!hasProxy()) {
if (auto ptr = m_model.lock()) emit std::static_pointer_cast(ptr)->refreshPanel(m_binId);
}
if (auto ptr = m_model.lock()) {
std::static_pointer_cast(ptr)->onItemUpdated(std::static_pointer_cast(shared_from_this()),
AbstractProjectItem::DataDuration);
std::static_pointer_cast(ptr)->updateWatcher(std::static_pointer_cast(shared_from_this()));
}
// Make sure we have a hash for this clip
getFileHash();
// set parent again (some info need to be stored in producer)
updateParent(parentItem().lock());
if (pCore->currentDoc()->getDocumentProperty(QStringLiteral("enableproxy")).toInt() == 1) {
QList> clipList;
// automatic proxy generation enabled
if (m_clipType == ClipType::Image && pCore->currentDoc()->getDocumentProperty(QStringLiteral("generateimageproxy")).toInt() == 1) {
if (getProducerIntProperty(QStringLiteral("meta.media.width")) >= KdenliveSettings::proxyimageminsize() &&
getProducerProperty(QStringLiteral("kdenlive:proxy")) == QStringLiteral()) {
clipList << std::static_pointer_cast(shared_from_this());
}
} else if (pCore->currentDoc()->getDocumentProperty(QStringLiteral("generateproxy")).toInt() == 1 &&
(m_clipType == ClipType::AV || m_clipType == ClipType::Video) && getProducerProperty(QStringLiteral("kdenlive:proxy")) == QStringLiteral()) {
bool skipProducer = false;
if (pCore->currentDoc()->getDocumentProperty(QStringLiteral("enableexternalproxy")).toInt() == 1) {
QStringList externalParams = pCore->currentDoc()->getDocumentProperty(QStringLiteral("externalproxyparams")).split(QLatin1Char(';'));
// We have a camcorder profile, check if we have opened a proxy clip
if (externalParams.count() >= 6) {
QFileInfo info(m_path);
QDir dir = info.absoluteDir();
dir.cd(externalParams.at(3));
QString fileName = info.fileName();
if (!externalParams.at(2).isEmpty()) {
fileName.chop(externalParams.at(2).size());
}
fileName.append(externalParams.at(5));
if (dir.exists(fileName)) {
setProducerProperty(QStringLiteral("kdenlive:proxy"), m_path);
m_path = dir.absoluteFilePath(fileName);
setProducerProperty(QStringLiteral("kdenlive:originalurl"), m_path);
getFileHash();
skipProducer = true;
}
}
}
if (!skipProducer && getProducerIntProperty(QStringLiteral("meta.media.width")) >= KdenliveSettings::proxyminsize()) {
clipList << std::static_pointer_cast(shared_from_this());
}
}
if (!clipList.isEmpty()) {
pCore->currentDoc()->slotProxyCurrentItem(true, clipList, false);
}
}
pCore->bin()->reloadMonitorIfActive(clipId());
for (auto &p : m_audioProducers) {
m_effectStack->removeService(p.second);
}
for (auto &p : m_videoProducers) {
m_effectStack->removeService(p.second);
}
for (auto &p : m_timewarpProducers) {
m_effectStack->removeService(p.second);
}
// Release audio producers
m_audioProducers.clear();
m_videoProducers.clear();
m_timewarpProducers.clear();
emit refreshPropertiesPanel();
replaceInTimeline();
return true;
}
std::shared_ptr ProjectClip::thumbProducer()
{
if (m_thumbsProducer) {
return m_thumbsProducer;
}
if (clipType() == ClipType::Unknown) {
return nullptr;
}
QMutexLocker lock(&m_thumbMutex);
std::shared_ptr prod = originalProducer();
if (!prod->is_valid()) {
return nullptr;
}
if (KdenliveSettings::gpu_accel()) {
// TODO: when the original producer changes, we must reload this thumb producer
m_thumbsProducer = softClone(ClipController::getPassPropertiesList());
Mlt::Filter converter(*prod->profile(), "avcolor_space");
m_thumbsProducer->attach(converter);
} else {
QString mltService = m_masterProducer->get("mlt_service");
const QString mltResource = m_masterProducer->get("resource");
if (mltService == QLatin1String("avformat")) {
mltService = QStringLiteral("avformat-novalidate");
}
m_thumbsProducer.reset(new Mlt::Producer(*pCore->thumbProfile(), mltService.toUtf8().constData(), mltResource.toUtf8().constData()));
if (m_thumbsProducer->is_valid()) {
Mlt::Properties original(m_masterProducer->get_properties());
Mlt::Properties cloneProps(m_thumbsProducer->get_properties());
cloneProps.pass_list(original, ClipController::getPassPropertiesList());
Mlt::Filter scaler(*pCore->thumbProfile(), "swscale");
Mlt::Filter padder(*pCore->thumbProfile(), "resize");
Mlt::Filter converter(*pCore->thumbProfile(), "avcolor_space");
m_thumbsProducer->set("audio_index", -1);
m_thumbsProducer->attach(scaler);
m_thumbsProducer->attach(padder);
m_thumbsProducer->attach(converter);
}
}
return m_thumbsProducer;
}
void ProjectClip::createDisabledMasterProducer()
{
if (!m_disabledProducer) {
m_disabledProducer = cloneProducer(&pCore->getCurrentProfile()->profile());
m_disabledProducer->set("set.test_audio", 1);
m_disabledProducer->set("set.test_image", 1);
m_effectStack->addService(m_disabledProducer);
}
}
std::shared_ptr ProjectClip::getTimelineProducer(int clipId, PlaylistState::ClipState state, double speed)
{
if (!m_masterProducer) {
return nullptr;
}
if (qFuzzyCompare(speed, 1.0)) {
// we are requesting a normal speed producer
// We can first cleen the speed producers we have for the current id
if (m_timewarpProducers.count(clipId) > 0) {
m_effectStack->removeService(m_timewarpProducers[clipId]);
m_timewarpProducers.erase(clipId);
}
if (state == PlaylistState::AudioOnly) {
// We need to get an audio producer, if none exists
if (m_audioProducers.count(clipId) == 0) {
m_audioProducers[clipId] = cloneProducer(&pCore->getCurrentProfile()->profile(), true);
m_audioProducers[clipId]->set("set.test_audio", 0);
m_audioProducers[clipId]->set("set.test_image", 1);
m_effectStack->addService(m_audioProducers[clipId]);
}
return std::shared_ptr(m_audioProducers[clipId]->cut());
}
if (m_audioProducers.count(clipId) > 0) {
m_effectStack->removeService(m_audioProducers[clipId]);
m_audioProducers.erase(clipId);
}
if (state == PlaylistState::VideoOnly) {
// we return the video producer
// We need to get an audio producer, if none exists
if (m_clipType == ClipType::Color || m_clipType == ClipType::Image || m_clipType == ClipType::Text) {
int duration = m_masterProducer->time_to_frames(m_masterProducer->get("kdenlive:duration"));
return std::shared_ptr(m_masterProducer->cut(-1, duration > 0 ? duration : -1));
}
if (m_videoProducers.count(clipId) == 0) {
m_videoProducers[clipId] = cloneProducer(&pCore->getCurrentProfile()->profile(), true);
m_videoProducers[clipId]->set("set.test_audio", 1);
m_videoProducers[clipId]->set("set.test_image", 0);
m_effectStack->addService(m_videoProducers[clipId]);
}
int duration = m_masterProducer->time_to_frames(m_masterProducer->get("kdenlive:duration"));
return std::shared_ptr(m_videoProducers[clipId]->cut(-1, duration > 0 ? duration : -1));
}
if (m_videoProducers.count(clipId) > 0) {
m_effectStack->removeService(m_videoProducers[clipId]);
m_videoProducers.erase(clipId);
}
Q_ASSERT(state == PlaylistState::Disabled);
createDisabledMasterProducer();
int duration = m_masterProducer->time_to_frames(m_masterProducer->get("kdenlive:duration"));
return std::shared_ptr(m_disabledProducer->cut(-1, duration > 0 ? duration : -1));
}
// in that case, we need to create a warp producer, if we don't have one
if (m_audioProducers.count(clipId) > 0) {
m_effectStack->removeService(m_audioProducers[clipId]);
m_audioProducers.erase(clipId);
}
if (m_videoProducers.count(clipId) > 0) {
m_effectStack->removeService(m_videoProducers[clipId]);
m_videoProducers.erase(clipId);
}
std::shared_ptr warpProducer;
if (m_timewarpProducers.count(clipId) > 0) {
// remove in all cases, we add it unconditionally anyways
m_effectStack->removeService(m_timewarpProducers[clipId]);
if (qFuzzyCompare(m_timewarpProducers[clipId]->get_double("warp_speed"), speed)) {
// the producer we have is good, use it !
warpProducer = m_timewarpProducers[clipId];
qDebug() << "Reusing producer!";
} else {
m_timewarpProducers.erase(clipId);
}
}
if (!warpProducer) {
QLocale locale;
QString resource(originalProducer()->get("resource"));
if (resource.isEmpty() || resource == QLatin1String("")) {
resource = m_service;
}
QString url = QString("timewarp:%1:%2").arg(locale.toString(speed)).arg(resource);
- warpProducer.reset(new Mlt::Producer(*originalProducer()->profile(), url.toUtf8().constData()));
+ warpProducer.reset(new Mlt::Producer(originalProducer()->get_profile(), url.toUtf8().constData()));
qDebug() << "new producer: " << url;
qDebug() << "warp LENGTH before" << warpProducer->get_length();
int original_length = originalProducer()->get_length();
// this is a workaround to cope with Mlt erroneous rounding
warpProducer->set("length", double(original_length) / speed);
}
qDebug() << "warp LENGTH" << warpProducer->get_length();
warpProducer->set("set.test_audio", 1);
warpProducer->set("set.test_image", 1);
if (state == PlaylistState::AudioOnly) {
warpProducer->set("set.test_audio", 0);
}
if (state == PlaylistState::VideoOnly) {
warpProducer->set("set.test_image", 0);
}
m_timewarpProducers[clipId] = warpProducer;
m_effectStack->addService(m_timewarpProducers[clipId]);
return std::shared_ptr(warpProducer->cut());
}
std::pair, bool> ProjectClip::giveMasterAndGetTimelineProducer(int clipId, std::shared_ptr master,
PlaylistState::ClipState state)
{
int in = master->get_in();
int out = master->get_out();
if (master->parent().is_valid()) {
// in that case, we have a cut
// check whether it's a timewarp
double speed = 1.0;
bool timeWarp = false;
if (QString::fromUtf8(master->parent().get("mlt_service")) == QLatin1String("timewarp")) {
speed = master->parent().get_double("warp_speed");
timeWarp = true;
}
if (master->parent().get_int("_loaded") == 1) {
// we already have a clip that shares the same master
if (state != PlaylistState::Disabled || timeWarp) {
// In that case, we must create copies
std::shared_ptr prod(getTimelineProducer(clipId, state, speed)->cut(in, out));
return {prod, false};
}
if (state == PlaylistState::Disabled && !m_disabledProducer) {
qDebug() << "Warning: weird, we found a disabled clip whose master is already loaded but we don't have any yet";
createDisabledMasterProducer();
return {std::shared_ptr(m_disabledProducer->cut(in, out)), false};
}
if (state == PlaylistState::Disabled && QString::fromUtf8(m_disabledProducer->get("id")) != QString::fromUtf8(master->parent().get("id"))) {
qDebug() << "Warning: weird, we found a disabled clip whose master is already loaded but doesn't match ours";
return {std::shared_ptr(m_disabledProducer->cut(in, out)), false};
}
// We have a good id, this clip can be used
return {master, true};
} else {
master->parent().set("_loaded", 1);
if (timeWarp) {
m_timewarpProducers[clipId] = std::shared_ptr(new Mlt::Producer(&master->parent()));
m_effectStack->loadService(m_timewarpProducers[clipId]);
return {master, true};
}
if (state == PlaylistState::AudioOnly) {
m_audioProducers[clipId] = std::shared_ptr(new Mlt::Producer(&master->parent()));
m_effectStack->loadService(m_audioProducers[clipId]);
return {master, true};
}
if (state == PlaylistState::VideoOnly) {
// good, we found a master video producer, and we didn't have any
m_videoProducers[clipId] = std::shared_ptr(new Mlt::Producer(&master->parent()));
m_effectStack->loadService(m_videoProducers[clipId]);
return {master, true};
}
if (state == PlaylistState::Disabled && !m_disabledProducer) {
// good, we found a master disabled producer, and we didn't have any
m_disabledProducer.reset(master->parent().cut());
m_effectStack->loadService(m_disabledProducer);
return {master, true};
}
qDebug() << "Warning: weird, we found a clip whose master is not loaded but we already have a master";
Q_ASSERT(false);
}
} else if (master->is_valid()) {
// in that case, we have a master
qDebug() << "Warning: weird, we received a master clip in lieue of a cut";
double speed = 1.0;
if (QString::fromUtf8(master->parent().get("mlt_service")) == QLatin1String("timewarp")) {
speed = master->get_double("warp_speed");
}
return {getTimelineProducer(clipId, state, speed), false};
}
// we have a problem
return {std::shared_ptr(ClipController::mediaUnavailable->cut()), false};
}
/*
std::shared_ptr ProjectClip::timelineProducer(PlaylistState::ClipState state, int track)
{
if (!m_service.startsWith(QLatin1String("avformat"))) {
std::shared_ptr prod(originalProducer()->cut());
int length = getProducerIntProperty(QStringLiteral("kdenlive:duration"));
if (length > 0) {
prod->set_in_and_out(0, length);
}
return prod;
}
if (state == PlaylistState::VideoOnly) {
if (m_timelineProducers.count(0) > 0) {
return std::shared_ptr(m_timelineProducers.find(0)->second->cut());
}
std::shared_ptr videoProd = cloneProducer();
videoProd->set("audio_index", -1);
m_timelineProducers[0] = videoProd;
return std::shared_ptr(videoProd->cut());
}
if (state == PlaylistState::AudioOnly) {
if (m_timelineProducers.count(-track) > 0) {
return std::shared_ptr(m_timelineProducers.find(-track)->second->cut());
}
std::shared_ptr audioProd = cloneProducer();
audioProd->set("video_index", -1);
m_timelineProducers[-track] = audioProd;
return std::shared_ptr(audioProd->cut());
}
if (m_timelineProducers.count(track) > 0) {
return std::shared_ptr(m_timelineProducers.find(track)->second->cut());
}
std::shared_ptr normalProd = cloneProducer();
m_timelineProducers[track] = normalProd;
return std::shared_ptr(normalProd->cut());
}*/
std::shared_ptr ProjectClip::cloneProducer(Mlt::Profile *destProfile, bool removeEffects)
{
Mlt::Profile master_profile(mlt_profile_clone(m_masterProducer->get_profile()));
Mlt::Consumer c(master_profile, "xml", "string");
Mlt::Service s(m_masterProducer->get_service());
int ignore = s.get_int("ignore_points");
if (ignore) {
s.set("ignore_points", 0);
}
c.connect(s);
c.set("time_format", "frames");
c.set("no_meta", 1);
c.set("no_root", 1);
c.set("no_profile", 1);
c.set("root", "/");
c.set("store", "kdenlive");
c.run();
if (ignore) {
s.set("ignore_points", ignore);
}
const QByteArray clipXml = c.get("string");
c.stop();
c.purge();
std::shared_ptr prod;
prod.reset(new Mlt::Producer(destProfile ? *destProfile : master_profile, "xml-string", clipXml.constData()));
if (strcmp(prod->get("mlt_service"), "avformat") == 0) {
prod->set("mlt_service", "avformat-novalidate");
}
if (removeEffects) {
int ct = 0;
Mlt::Filter *filter = prod->filter(ct);
while (filter) {
qDebug() << "// EFFECT " << ct << " : " << filter->get("mlt_service");
QString ix = QString::fromLatin1(filter->get("kdenlive_id"));
if (!ix.isEmpty()) {
qDebug() << "/ + + DELTING";
if (prod->detach(*filter) == 0) {
} else {
ct++;
}
} else {
ct++;
}
delete filter;
filter = prod->filter(ct);
}
}
prod->set("id", (char *)nullptr);
return prod;
}
std::shared_ptr ProjectClip::cloneProducer(std::shared_ptr producer)
{
Mlt::Consumer c(*producer->profile(), "xml", "string");
Mlt::Service s(producer->get_service());
int ignore = s.get_int("ignore_points");
if (ignore) {
s.set("ignore_points", 0);
}
c.connect(s);
c.set("time_format", "frames");
c.set("no_meta", 1);
c.set("no_root", 1);
c.set("no_profile", 1);
c.set("root", "/");
c.set("store", "kdenlive");
c.start();
if (ignore) {
s.set("ignore_points", ignore);
}
const QByteArray clipXml = c.get("string");
std::shared_ptr prod(new Mlt::Producer(*producer->profile(), "xml-string", clipXml.constData()));
if (strcmp(prod->get("mlt_service"), "avformat") == 0) {
prod->set("mlt_service", "avformat-novalidate");
}
return prod;
}
std::shared_ptr ProjectClip::softClone(const char *list)
{
QString service = QString::fromLatin1(m_masterProducer->get("mlt_service"));
QString resource = QString::fromLatin1(m_masterProducer->get("resource"));
std::shared_ptr clone(new Mlt::Producer(*m_masterProducer->profile(), service.toUtf8().constData(), resource.toUtf8().constData()));
Mlt::Properties original(m_masterProducer->get_properties());
Mlt::Properties cloneProps(clone->get_properties());
cloneProps.pass_list(original, list);
return clone;
}
bool ProjectClip::isReady() const
{
return m_clipStatus == StatusReady;
}
/*void ProjectClip::setZone(const QPoint &zone)
{
m_zone = zone;
}*/
QPoint ProjectClip::zone() const
{
int x = getProducerIntProperty(QStringLiteral("kdenlive:zone_in"));
int y = getProducerIntProperty(QStringLiteral("kdenlive:zone_out"));
if (y <= x) {
y = getFramePlaytime();
}
return QPoint(x, y);
}
const QString ProjectClip::hash()
{
QString clipHash = getProducerProperty(QStringLiteral("kdenlive:file_hash"));
if (!clipHash.isEmpty()) {
return clipHash;
}
return getFileHash();
}
const QString ProjectClip::getFileHash()
{
QByteArray fileData;
QByteArray fileHash;
switch (m_clipType) {
case ClipType::SlideShow:
fileData = clipUrl().toUtf8();
fileHash = QCryptographicHash::hash(fileData, QCryptographicHash::Md5);
break;
case ClipType::Text:
case ClipType::TextTemplate:
fileData = getProducerProperty(QStringLiteral("xmldata")).toUtf8();
fileHash = QCryptographicHash::hash(fileData, QCryptographicHash::Md5);
break;
case ClipType::QText:
fileData = getProducerProperty(QStringLiteral("text")).toUtf8();
fileHash = QCryptographicHash::hash(fileData, QCryptographicHash::Md5);
break;
case ClipType::Color:
fileData = getProducerProperty(QStringLiteral("resource")).toUtf8();
fileHash = QCryptographicHash::hash(fileData, QCryptographicHash::Md5);
break;
default:
QFile file(clipUrl());
if (file.open(QIODevice::ReadOnly)) { // write size and hash only if resource points to a file
/*
* 1 MB = 1 second per 450 files (or faster)
* 10 MB = 9 seconds per 450 files (or faster)
*/
if (file.size() > 2000000) {
fileData = file.read(1000000);
if (file.seek(file.size() - 1000000)) {
fileData.append(file.readAll());
}
} else {
fileData = file.readAll();
}
file.close();
ClipController::setProducerProperty(QStringLiteral("kdenlive:file_size"), QString::number(file.size()));
fileHash = QCryptographicHash::hash(fileData, QCryptographicHash::Md5);
}
break;
}
if (fileHash.isEmpty()) {
qDebug() << "// WARNING EMPTY CLIP HASH: ";
return QString();
}
QString result = fileHash.toHex();
ClipController::setProducerProperty(QStringLiteral("kdenlive:file_hash"), result);
return result;
}
double ProjectClip::getOriginalFps() const
{
return originalFps();
}
bool ProjectClip::hasProxy() const
{
QString proxy = getProducerProperty(QStringLiteral("kdenlive:proxy"));
return proxy.size() > 2;
}
void ProjectClip::setProperties(const QMap &properties, bool refreshPanel)
{
qDebug() << "// SETTING CLIP PROPERTIES: " << properties;
QMapIterator i(properties);
QMap passProperties;
bool refreshAnalysis = false;
bool reload = false;
bool refreshOnly = true;
if (properties.contains(QStringLiteral("templatetext"))) {
m_description = properties.value(QStringLiteral("templatetext"));
if (auto ptr = m_model.lock())
std::static_pointer_cast(ptr)->onItemUpdated(std::static_pointer_cast(shared_from_this()),
AbstractProjectItem::ClipStatus);
refreshPanel = true;
}
// Some properties also need to be passed to track producers
QStringList timelineProperties{QStringLiteral("force_aspect_ratio"), QStringLiteral("video_index"), QStringLiteral("audio_index"),
QStringLiteral("set.force_full_luma"), QStringLiteral("full_luma"), QStringLiteral("threads"),
QStringLiteral("force_colorspace"), QStringLiteral("force_tff"), QStringLiteral("force_progressive"),
QStringLiteral("video_index"), QStringLiteral("audio_index")};
QStringList forceReloadProperties{QStringLiteral("autorotate"), QStringLiteral("templatetext"), QStringLiteral("resource"),
QStringLiteral("force_fps"), QStringLiteral("set.test_image"), QStringLiteral("set.test_audio")};
QStringList keys{QStringLiteral("luma_duration"), QStringLiteral("luma_file"), QStringLiteral("fade"), QStringLiteral("ttl"),
QStringLiteral("softness"), QStringLiteral("crop"), QStringLiteral("animation")};
QVector updateRoles;
while (i.hasNext()) {
i.next();
setProducerProperty(i.key(), i.value());
if (m_clipType == ClipType::SlideShow && keys.contains(i.key())) {
reload = true;
refreshOnly = false;
}
if (i.key().startsWith(QLatin1String("kdenlive:clipanalysis"))) {
refreshAnalysis = true;
}
if (timelineProperties.contains(i.key())) {
passProperties.insert(i.key(), i.value());
}
}
if (properties.contains(QStringLiteral("kdenlive:proxy"))) {
QString value = properties.value(QStringLiteral("kdenlive:proxy"));
// If value is "-", that means user manually disabled proxy on this clip
if (value.isEmpty() || value == QLatin1String("-")) {
// reset proxy
int id;
if (pCore->jobManager()->hasPendingJob(clipId(), AbstractClipJob::PROXYJOB, &id)) {
// The proxy clip is being created, abort
pCore->jobManager()->discardJobs(clipId(), AbstractClipJob::PROXYJOB);
} else {
reload = true;
refreshOnly = false;
}
} else {
// A proxy was requested, make sure to keep original url
setProducerProperty(QStringLiteral("kdenlive:originalurl"), url());
pCore->jobManager()->startJob({clipId()}, -1, QString());
}
} else if (!reload) {
const QList propKeys = properties.keys();
for (const QString &k : propKeys) {
if (forceReloadProperties.contains(k)) {
if (m_clipType != ClipType::Color) {
reload = true;
refreshOnly = false;
} else {
// Clip resource changed, update thumbnail
reload = true;
refreshPanel = true;
updateRoles << TimelineModel::ResourceRole;
}
break;
}
}
}
if (!reload && (properties.contains(QStringLiteral("xmldata")) || !passProperties.isEmpty())) {
reload = true;
}
if (refreshAnalysis) {
emit refreshAnalysisPanel();
}
if (properties.contains(QStringLiteral("length")) || properties.contains(QStringLiteral("kdenlive:duration"))) {
m_duration = getStringDuration();
if (auto ptr = m_model.lock())
std::static_pointer_cast(ptr)->onItemUpdated(std::static_pointer_cast(shared_from_this()),
AbstractProjectItem::DataDuration);
refreshOnly = false;
reload = true;
}
if (properties.contains(QStringLiteral("kdenlive:clipname"))) {
m_name = properties.value(QStringLiteral("kdenlive:clipname"));
refreshPanel = true;
if (auto ptr = m_model.lock()) {
std::static_pointer_cast(ptr)->onItemUpdated(std::static_pointer_cast(shared_from_this()),
AbstractProjectItem::DataName);
}
// update timeline clips
updateTimelineClips(QVector() << TimelineModel::NameRole);
}
if (refreshPanel) {
// Some of the clip properties have changed through a command, update properties panel
emit refreshPropertiesPanel();
}
if (reload) {
// producer has changed, refresh monitor and thumbnail
if (hasProxy()) {
pCore->jobManager()->discardJobs(clipId(), AbstractClipJob::PROXYJOB);
setProducerProperty(QStringLiteral("_overwriteproxy"), 1);
pCore->jobManager()->startJob({clipId()}, -1, QString());
} else {
reloadProducer(refreshOnly);
}
if (refreshOnly) {
if (auto ptr = m_model.lock()) {
emit std::static_pointer_cast(ptr)->refreshClip(m_binId);
}
}
if (!updateRoles.isEmpty()) {
updateTimelineClips(updateRoles);
}
}
if (!passProperties.isEmpty()) {
if (auto ptr = m_model.lock()) emit std::static_pointer_cast(ptr)->updateTimelineProducers(m_binId, passProperties);
}
}
ClipPropertiesController *ProjectClip::buildProperties(QWidget *parent)
{
auto ptr = m_model.lock();
Q_ASSERT(ptr);
ClipPropertiesController *panel = new ClipPropertiesController(static_cast(this), parent);
connect(this, &ProjectClip::refreshPropertiesPanel, panel, &ClipPropertiesController::slotReloadProperties);
connect(this, &ProjectClip::refreshAnalysisPanel, panel, &ClipPropertiesController::slotFillAnalysisData);
connect(panel, &ClipPropertiesController::requestProxy, [this](bool doProxy) {
QList> clipList{std::static_pointer_cast(shared_from_this())};
pCore->currentDoc()->slotProxyCurrentItem(doProxy, clipList);
});
connect(panel, &ClipPropertiesController::deleteProxy, this, &ProjectClip::deleteProxy);
return panel;
}
void ProjectClip::deleteProxy()
{
// Disable proxy file
QString proxy = getProducerProperty(QStringLiteral("kdenlive:proxy"));
QList> clipList{std::static_pointer_cast(shared_from_this())};
pCore->currentDoc()->slotProxyCurrentItem(false, clipList);
// Delete
bool ok;
QDir dir = pCore->currentDoc()->getCacheDir(CacheProxy, &ok);
if (ok && proxy.length() > 2) {
proxy = QFileInfo(proxy).fileName();
if (dir.exists(proxy)) {
dir.remove(proxy);
}
}
}
void ProjectClip::updateParent(std::shared_ptr parent)
{
if (parent) {
auto item = std::static_pointer_cast(parent);
ClipController::setProducerProperty(QStringLiteral("kdenlive:folderid"), item->clipId());
}
AbstractProjectItem::updateParent(parent);
}
bool ProjectClip::matches(const QString &condition)
{
// TODO
Q_UNUSED(condition)
return true;
}
bool ProjectClip::rename(const QString &name, int column)
{
QMap newProperites;
QMap oldProperites;
bool edited = false;
switch (column) {
case 0:
if (m_name == name) {
return false;
}
// Rename clip
oldProperites.insert(QStringLiteral("kdenlive:clipname"), m_name);
newProperites.insert(QStringLiteral("kdenlive:clipname"), name);
m_name = name;
edited = true;
break;
case 2:
if (m_description == name) {
return false;
}
// Rename clip
if (m_clipType == ClipType::TextTemplate) {
oldProperites.insert(QStringLiteral("templatetext"), m_description);
newProperites.insert(QStringLiteral("templatetext"), name);
} else {
oldProperites.insert(QStringLiteral("kdenlive:description"), m_description);
newProperites.insert(QStringLiteral("kdenlive:description"), name);
}
m_description = name;
edited = true;
break;
}
if (edited) {
pCore->bin()->slotEditClipCommand(m_binId, oldProperites, newProperites);
}
return edited;
}
QVariant ProjectClip::getData(DataType type) const
{
switch (type) {
case AbstractProjectItem::IconOverlay:
return m_effectStack && m_effectStack->rowCount() > 0 ? QVariant("kdenlive-track_has_effect") : QVariant();
default:
return AbstractProjectItem::getData(type);
}
}
void ProjectClip::slotExtractImage(const QList &frames)
{
QMutexLocker lock(&m_thumbMutex);
for (int i = 0; i < frames.count(); i++) {
if (!m_requestedThumbs.contains(frames.at(i))) {
m_requestedThumbs << frames.at(i);
}
}
qSort(m_requestedThumbs);
if (!m_thumbThread.isRunning()) {
m_thumbThread = QtConcurrent::run(this, &ProjectClip::doExtractImage);
}
}
void ProjectClip::doExtractImage()
{
// TODO refac: we can probably move that into a ThumbJob
std::shared_ptr prod = thumbProducer();
if (prod == nullptr || !prod->is_valid()) {
return;
}
int frameWidth = 150 * prod->profile()->dar() + 0.5;
bool ok = false;
auto ptr = m_model.lock();
Q_ASSERT(ptr);
QDir thumbFolder = pCore->currentDoc()->getCacheDir(CacheThumbs, &ok);
int max = prod->get_length();
while (!m_requestedThumbs.isEmpty()) {
m_thumbMutex.lock();
int pos = m_requestedThumbs.takeFirst();
m_thumbMutex.unlock();
if (ok && thumbFolder.exists(hash() + QLatin1Char('#') + QString::number(pos) + QStringLiteral(".png"))) {
emit thumbReady(pos, QImage(thumbFolder.absoluteFilePath(hash() + QLatin1Char('#') + QString::number(pos) + QStringLiteral(".png"))));
continue;
}
if (pos >= max) {
pos = max - 1;
}
const QString path = url() + QLatin1Char('_') + QString::number(pos);
QImage img;
if (ThumbnailCache::get()->hasThumbnail(clipId(), pos, true)) {
img = ThumbnailCache::get()->getThumbnail(clipId(), pos, true);
}
if (!img.isNull()) {
emit thumbReady(pos, img);
continue;
}
prod->seek(pos);
Mlt::Frame *frame = prod->get_frame();
frame->set("deinterlace_method", "onefield");
frame->set("top_field_first", -1);
if (frame->is_valid()) {
img = KThumb::getFrame(frame, frameWidth, 150, !qFuzzyCompare(prod->profile()->sar(), 1));
ThumbnailCache::get()->storeThumbnail(clipId(), pos, img, false);
emit thumbReady(pos, img);
}
delete frame;
}
}
int ProjectClip::audioChannels() const
{
if (!audioInfo()) {
return 0;
}
return audioInfo()->channels();
}
void ProjectClip::discardAudioThumb()
{
QString audioThumbPath = getAudioThumbPath();
if (!audioThumbPath.isEmpty()) {
QFile::remove(audioThumbPath);
}
audioFrameCache.clear();
qCDebug(KDENLIVE_LOG) << "//////////////////// DISCARD AUIIO THUMBNS";
m_audioThumbCreated = false;
pCore->jobManager()->discardJobs(clipId(), AbstractClipJob::AUDIOTHUMBJOB);
}
const QString ProjectClip::getAudioThumbPath()
{
if (audioInfo() == nullptr) {
return QString();
}
int audioStream = audioInfo()->ffmpeg_audio_index();
QString clipHash = hash();
if (clipHash.isEmpty()) {
return QString();
}
bool ok = false;
QDir thumbFolder = pCore->currentDoc()->getCacheDir(CacheAudio, &ok);
if (!ok) {
return QString();
}
QString audioPath = thumbFolder.absoluteFilePath(clipHash);
if (audioStream > 0) {
audioPath.append(QLatin1Char('_') + QString::number(audioInfo()->audio_index()));
}
int roundedFps = (int)pCore->getCurrentFps();
audioPath.append(QStringLiteral("_%1_audio.png").arg(roundedFps));
return audioPath;
}
QStringList ProjectClip::updatedAnalysisData(const QString &name, const QString &data, int offset)
{
if (data.isEmpty()) {
// Remove data
return QStringList() << QString("kdenlive:clipanalysis." + name) << QString();
// m_controller->resetProperty("kdenlive:clipanalysis." + name);
}
QString current = getProducerProperty("kdenlive:clipanalysis." + name);
if (!current.isEmpty()) {
if (KMessageBox::questionYesNo(QApplication::activeWindow(), i18n("Clip already contains analysis data %1", name), QString(), KGuiItem(i18n("Merge")),
KGuiItem(i18n("Add"))) == KMessageBox::Yes) {
// Merge data
auto &profile = pCore->getCurrentProfile();
Mlt::Geometry geometry(current.toUtf8().data(), duration().frames(profile->fps()), profile->width(), profile->height());
Mlt::Geometry newGeometry(data.toUtf8().data(), duration().frames(profile->fps()), profile->width(), profile->height());
Mlt::GeometryItem item;
int pos = 0;
while (newGeometry.next_key(&item, pos) == 0) {
pos = item.frame();
item.frame(pos + offset);
pos++;
geometry.insert(item);
}
return QStringList() << QString("kdenlive:clipanalysis." + name) << geometry.serialise();
// m_controller->setProperty("kdenlive:clipanalysis." + name, geometry.serialise());
}
// Add data with another name
int i = 1;
QString previous = getProducerProperty("kdenlive:clipanalysis." + name + QString::number(i));
while (!previous.isEmpty()) {
++i;
previous = getProducerProperty("kdenlive:clipanalysis." + name + QString::number(i));
}
return QStringList() << QString("kdenlive:clipanalysis." + name + QString::number(i)) << geometryWithOffset(data, offset);
// m_controller->setProperty("kdenlive:clipanalysis." + name + QLatin1Char(' ') + QString::number(i), geometryWithOffset(data, offset));
}
return QStringList() << QString("kdenlive:clipanalysis." + name) << geometryWithOffset(data, offset);
// m_controller->setProperty("kdenlive:clipanalysis." + name, geometryWithOffset(data, offset));
}
QMap ProjectClip::analysisData(bool withPrefix)
{
return getPropertiesFromPrefix(QStringLiteral("kdenlive:clipanalysis."), withPrefix);
}
const QString ProjectClip::geometryWithOffset(const QString &data, int offset)
{
if (offset == 0) {
return data;
}
auto &profile = pCore->getCurrentProfile();
Mlt::Geometry geometry(data.toUtf8().data(), duration().frames(profile->fps()), profile->width(), profile->height());
Mlt::Geometry newgeometry(nullptr, duration().frames(profile->fps()), profile->width(), profile->height());
Mlt::GeometryItem item;
int pos = 0;
while (geometry.next_key(&item, pos) == 0) {
pos = item.frame();
item.frame(pos + offset);
pos++;
newgeometry.insert(item);
}
return newgeometry.serialise();
}
bool ProjectClip::isSplittable() const
{
return (m_clipType == ClipType::AV || m_clipType == ClipType::Playlist);
}
void ProjectClip::setBinEffectsEnabled(bool enabled)
{
ClipController::setBinEffectsEnabled(enabled);
}
void ProjectClip::registerService(std::weak_ptr timeline, int clipId, std::shared_ptr service, bool forceRegister)
{
if (!service->is_cut() || forceRegister) {
int hasAudio = service->get_int("set.test_audio") == 0;
int hasVideo = service->get_int("set.test_image") == 0;
if (hasVideo && m_videoProducers.count(clipId) == 0) {
// This is an undo producer, register it!
m_videoProducers[clipId] = service;
m_effectStack->addService(m_videoProducers[clipId]);
} else if (hasAudio && m_audioProducers.count(clipId) == 0) {
// This is an undo producer, register it!
m_audioProducers[clipId] = service;
m_effectStack->addService(m_audioProducers[clipId]);
}
}
registerTimelineClip(timeline, clipId);
}
void ProjectClip::registerTimelineClip(std::weak_ptr timeline, int clipId)
{
Q_ASSERT(m_registeredClips.count(clipId) == 0);
Q_ASSERT(!timeline.expired());
m_registeredClips[clipId] = std::move(timeline);
setRefCount((uint)m_registeredClips.size());
}
void ProjectClip::deregisterTimelineClip(int clipId)
{
qDebug() << " ** * DEREGISTERING TIMELINE CLIP: " << clipId;
Q_ASSERT(m_registeredClips.count(clipId) > 0);
m_registeredClips.erase(clipId);
if (m_videoProducers.count(clipId) > 0) {
m_effectStack->removeService(m_videoProducers[clipId]);
m_videoProducers.erase(clipId);
}
if (m_audioProducers.count(clipId) > 0) {
m_effectStack->removeService(m_audioProducers[clipId]);
m_audioProducers.erase(clipId);
}
setRefCount((uint)m_registeredClips.size());
}
QList ProjectClip::timelineInstances() const
{
QList ids;
for (std::map>::const_iterator it = m_registeredClips.begin(); it != m_registeredClips.end(); ++it) {
ids.push_back(it->first);
}
return ids;
}
bool ProjectClip::selfSoftDelete(Fun &undo, Fun &redo)
{
auto toDelete = m_registeredClips; // we cannot use m_registeredClips directly, because it will be modified during loop
for (const auto &clip : toDelete) {
if (m_registeredClips.count(clip.first) == 0) {
// clip already deleted, was probably grouped with another one
continue;
}
if (auto timeline = clip.second.lock()) {
timeline->requestItemDeletion(clip.first, undo, redo);
} else {
qDebug() << "Error while deleting clip: timeline unavailable";
Q_ASSERT(false);
return false;
}
}
return AbstractProjectItem::selfSoftDelete(undo, redo);
}
bool ProjectClip::isIncludedInTimeline()
{
return m_registeredClips.size() > 0;
}
void ProjectClip::updateChildProducers()
{
// TODO refac: the effect should be managed by an effectstack on the master
/*
// pass effect stack on all child producers
QMutexLocker locker(&m_producerMutex);
for (const auto &clip : m_timelineProducers) {
if (auto producer = clip.second) {
Clip clp(producer->parent());
clp.deleteEffects();
clp.replaceEffects(*m_masterProducer);
}
}
*/
}
void ProjectClip::replaceInTimeline()
{
for (const auto &clip : m_registeredClips) {
if (auto timeline = clip.second.lock()) {
timeline->requestClipReload(clip.first);
} else {
qDebug() << "Error while reloading clip: timeline unavailable";
Q_ASSERT(false);
}
}
}
void ProjectClip::updateTimelineClips(QVector roles)
{
for (const auto &clip : m_registeredClips) {
if (auto timeline = clip.second.lock()) {
timeline->requestClipUpdate(clip.first, roles);
} else {
qDebug() << "Error while reloading clip thumb: timeline unavailable";
Q_ASSERT(false);
return;
}
}
}
diff --git a/src/monitor/glwidget.cpp b/src/monitor/glwidget.cpp
index d5e8d6d5f..28c012f2a 100644
--- a/src/monitor/glwidget.cpp
+++ b/src/monitor/glwidget.cpp
@@ -1,1983 +1,1983 @@
/*
* Copyright (c) 2011-2016 Meltytech, LLC
* Original author: Dan Dennedy
* Modified for Kdenlive: Jean-Baptiste Mardelle
*
* GL shader based on BSD licensed code from Peter Bengtsson:
* http://www.fourcc.org/source/YUV420P-OpenGL-GLSLang.c
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see .
*/
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include "core.h"
#include "glwidget.h"
#include "kdenlivesettings.h"
#include "monitorproxy.h"
#include "profiles/profilemodel.hpp"
#include "qml/qmlaudiothumb.h"
#include "timeline2/view/qml/timelineitems.h"
#include
#ifndef GL_UNPACK_ROW_LENGTH
#ifdef GL_UNPACK_ROW_LENGTH_EXT
#define GL_UNPACK_ROW_LENGTH GL_UNPACK_ROW_LENGTH_EXT
#else
#error GL_UNPACK_ROW_LENGTH undefined
#endif
#endif
#ifdef QT_NO_DEBUG
#define check_error(fn) \
{ \
}
#else
#define check_error(fn) \
{ \
uint err = fn->glGetError(); \
if (err != GL_NO_ERROR) { \
qCCritical(KDENLIVE_LOG) << "GL error" << hex << err << dec << "at" << __FILE__ << ":" << __LINE__; \
} \
}
#endif
#ifndef GL_TIMEOUT_IGNORED
#define GL_TIMEOUT_IGNORED 0xFFFFFFFFFFFFFFFFull
#endif
using namespace Mlt;
GLWidget::GLWidget(int id, QObject *parent)
: QQuickView((QWindow *)parent)
, sendFrameForAnalysis(false)
, m_glslManager(nullptr)
, m_consumer(nullptr)
, m_producer(nullptr)
, m_id(id)
, m_rulerHeight(QFontMetrics(QApplication::font()).lineSpacing() * 0.7)
, m_shader(nullptr)
, m_initSem(0)
, m_analyseSem(1)
, m_isInitialized(false)
, m_threadStartEvent(nullptr)
, m_threadStopEvent(nullptr)
, m_threadCreateEvent(nullptr)
, m_threadJoinEvent(nullptr)
, m_displayEvent(nullptr)
, m_frameRenderer(nullptr)
, m_projectionLocation(0)
, m_modelViewLocation(0)
, m_vertexLocation(0)
, m_texCoordLocation(0)
, m_colorspaceLocation(0)
, m_zoom(1.0f)
, m_sendFrame(false)
, m_isZoneMode(false)
, m_isLoopMode(false)
, m_offset(QPoint(0, 0))
, m_audioWaveDisplayed(false)
, m_fbo(nullptr)
, m_shareContext(nullptr)
, m_openGLSync(false)
, m_ClientWaitSync(nullptr)
{
KDeclarative::KDeclarative kdeclarative;
kdeclarative.setDeclarativeEngine(engine());
#if KDECLARATIVE_VERSION >= QT_VERSION_CHECK(5, 45, 0)
kdeclarative.setupEngine(engine());
kdeclarative.setupContext();
#else
kdeclarative.setupBindings();
#endif
m_texture[0] = m_texture[1] = m_texture[2] = 0;
qRegisterMetaType("Mlt::Frame");
qRegisterMetaType("SharedFrame");
qmlRegisterType("AudioThumb", 1, 0, "QmlAudioThumb");
setPersistentOpenGLContext(true);
setPersistentSceneGraph(true);
setClearBeforeRendering(false);
setResizeMode(QQuickView::SizeRootObjectToView);
m_offscreenSurface.setFormat(QWindow::format());
m_offscreenSurface.create();
m_monitorProfile = new Mlt::Profile();
m_refreshTimer.setSingleShot(true);
m_refreshTimer.setInterval(50);
m_blackClip.reset(new Mlt::Producer(*m_monitorProfile, "color:black"));
m_blackClip->set("kdenlive:id", "black");
m_blackClip->set("out", 3);
connect(&m_refreshTimer, &QTimer::timeout, this, &GLWidget::refresh);
- m_producer = &*m_blackClip;
+ m_producer = m_blackClip;
if (!initGPUAccel()) {
disableGPUAccel();
}
connect(this, &QQuickWindow::sceneGraphInitialized, this, &GLWidget::initializeGL, Qt::DirectConnection);
connect(this, &QQuickWindow::beforeRendering, this, &GLWidget::paintGL, Qt::DirectConnection);
registerTimelineItems();
m_proxy = new MonitorProxy(this);
connect(m_proxy, &MonitorProxy::seekRequestChanged, this, &GLWidget::requestSeek);
rootContext()->setContextProperty("controller", m_proxy);
}
GLWidget::~GLWidget()
{
// C & D
delete m_glslManager;
delete m_threadStartEvent;
delete m_threadStopEvent;
delete m_threadCreateEvent;
delete m_threadJoinEvent;
delete m_displayEvent;
if (m_frameRenderer) {
if (m_frameRenderer->isRunning()) {
QMetaObject::invokeMethod(m_frameRenderer, "cleanup");
m_frameRenderer->quit();
m_frameRenderer->wait();
m_frameRenderer->deleteLater();
} else {
delete m_frameRenderer;
}
}
m_blackClip.reset();
delete m_shareContext;
delete m_shader;
// delete m_monitorProfile;
}
void GLWidget::updateAudioForAnalysis()
{
if (m_frameRenderer) {
m_frameRenderer->sendAudioForAnalysis = KdenliveSettings::monitor_audio();
}
}
void GLWidget::initializeGL()
{
if (m_isInitialized || !isVisible() || (openglContext() == nullptr)) return;
openglContext()->makeCurrent(&m_offscreenSurface);
initializeOpenGLFunctions();
qCDebug(KDENLIVE_LOG) << "OpenGL vendor: " << QString::fromUtf8((const char *)glGetString(GL_VENDOR));
qCDebug(KDENLIVE_LOG) << "OpenGL renderer: " << QString::fromUtf8((const char *)glGetString(GL_RENDERER));
qCDebug(KDENLIVE_LOG) << "OpenGL Threaded: " << openglContext()->supportsThreadedOpenGL();
qCDebug(KDENLIVE_LOG) << "OpenGL ARG_SYNC: " << openglContext()->hasExtension("GL_ARB_sync");
qCDebug(KDENLIVE_LOG) << "OpenGL OpenGLES: " << openglContext()->isOpenGLES();
// C & D
if (onlyGLESGPUAccel()) {
disableGPUAccel();
}
createShader();
m_openGLSync = initGPUAccelSync();
// C & D
if (m_glslManager) {
// Create a context sharing with this context for the RenderThread context.
// This is needed because openglContext() is active in another thread
// at the time that RenderThread is created.
// See this Qt bug for more info: https://bugreports.qt.io/browse/QTBUG-44677
// TODO: QTBUG-44677 is closed. still applicable?
m_shareContext = new QOpenGLContext;
m_shareContext->setFormat(openglContext()->format());
m_shareContext->setShareContext(openglContext());
m_shareContext->create();
}
m_frameRenderer = new FrameRenderer(openglContext(), &m_offscreenSurface, m_ClientWaitSync);
m_frameRenderer->sendAudioForAnalysis = KdenliveSettings::monitor_audio();
openglContext()->makeCurrent(this);
// openglContext()->blockSignals(false);
connect(m_frameRenderer, &FrameRenderer::frameDisplayed, this, &GLWidget::frameDisplayed, Qt::QueuedConnection);
connect(m_frameRenderer, &FrameRenderer::textureReady, this, &GLWidget::updateTexture, Qt::DirectConnection);
connect(m_frameRenderer, &FrameRenderer::frameDisplayed, this, &GLWidget::onFrameDisplayed, Qt::QueuedConnection);
connect(m_frameRenderer, &FrameRenderer::audioSamplesSignal, this, &GLWidget::audioSamplesSignal, Qt::QueuedConnection);
m_initSem.release();
m_isInitialized = true;
reconfigure();
}
void GLWidget::resizeGL(int width, int height)
{
int x, y, w, h;
height -= m_rulerHeight;
double this_aspect = (double)width / height;
double video_aspect = m_monitorProfile->dar();
// Special case optimization to negate odd effect of sample aspect ratio
// not corresponding exactly with image resolution.
if ((int)(this_aspect * 1000) == (int)(video_aspect * 1000)) {
w = width;
h = height;
}
// Use OpenGL to normalise sample aspect ratio
else if (height * video_aspect > width) {
w = width;
h = width / video_aspect;
} else {
w = height * video_aspect;
h = height;
}
x = (width - w) / 2;
y = (height - h) / 2;
m_rect.setRect(x, y, w, h);
double scalex = (double)m_rect.width() / m_monitorProfile->width() * m_zoom;
double scaley = (double)m_rect.width() / ((double)m_monitorProfile->height() * m_monitorProfile->dar() / m_monitorProfile->width()) /
m_monitorProfile->width() * m_zoom;
QPoint center = m_rect.center();
QQuickItem *rootQml = rootObject();
if (rootQml) {
rootQml->setProperty("center", center);
rootQml->setProperty("scalex", scalex);
rootQml->setProperty("scaley", scaley);
if (rootQml->objectName() == QLatin1String("rootsplit")) {
// Adjust splitter pos
rootQml->setProperty("splitterPos", x + (rootQml->property("realpercent").toDouble() * w));
}
}
emit rectChanged();
}
void GLWidget::resizeEvent(QResizeEvent *event)
{
resizeGL(event->size().width(), event->size().height());
QQuickView::resizeEvent(event);
}
void GLWidget::createGPUAccelFragmentProg()
{
m_shader->addShaderFromSourceCode(QOpenGLShader::Fragment, "uniform sampler2D tex;"
"varying highp vec2 coordinates;"
"void main(void) {"
" gl_FragColor = texture2D(tex, coordinates);"
"}");
m_shader->link();
m_textureLocation[0] = m_shader->uniformLocation("tex");
}
void GLWidget::createShader()
{
m_shader = new QOpenGLShaderProgram;
m_shader->addShaderFromSourceCode(QOpenGLShader::Vertex, "uniform highp mat4 projection;"
"uniform highp mat4 modelView;"
"attribute highp vec4 vertex;"
"attribute highp vec2 texCoord;"
"varying highp vec2 coordinates;"
"void main(void) {"
" gl_Position = projection * modelView * vertex;"
" coordinates = texCoord;"
"}");
// C & D
if (m_glslManager) {
createGPUAccelFragmentProg();
} else {
// A & B
createYUVTextureProjectFragmentProg();
}
m_projectionLocation = m_shader->uniformLocation("projection");
m_modelViewLocation = m_shader->uniformLocation("modelView");
m_vertexLocation = m_shader->attributeLocation("vertex");
m_texCoordLocation = m_shader->attributeLocation("texCoord");
}
void GLWidget::createYUVTextureProjectFragmentProg()
{
m_shader->addShaderFromSourceCode(QOpenGLShader::Fragment,
"uniform sampler2D Ytex, Utex, Vtex;"
"uniform lowp int colorspace;"
"varying highp vec2 coordinates;"
"void main(void) {"
" mediump vec3 texel;"
" texel.r = texture2D(Ytex, coordinates).r - 0.0625;" // Y
" texel.g = texture2D(Utex, coordinates).r - 0.5;" // U
" texel.b = texture2D(Vtex, coordinates).r - 0.5;" // V
" mediump mat3 coefficients;"
" if (colorspace == 601) {"
" coefficients = mat3("
" 1.1643, 1.1643, 1.1643," // column 1
" 0.0, -0.39173, 2.017," // column 2
" 1.5958, -0.8129, 0.0);" // column 3
" } else {" // ITU-R 709
" coefficients = mat3("
" 1.1643, 1.1643, 1.1643," // column 1
" 0.0, -0.213, 2.112," // column 2
" 1.793, -0.533, 0.0);" // column 3
" }"
" gl_FragColor = vec4(coefficients * texel, 1.0);"
"}");
m_shader->link();
m_textureLocation[0] = m_shader->uniformLocation("Ytex");
m_textureLocation[1] = m_shader->uniformLocation("Utex");
m_textureLocation[2] = m_shader->uniformLocation("Vtex");
m_colorspaceLocation = m_shader->uniformLocation("colorspace");
}
static void uploadTextures(QOpenGLContext *context, const SharedFrame &frame, GLuint texture[])
{
int width = frame.get_image_width();
int height = frame.get_image_height();
const uint8_t *image = frame.get_image();
QOpenGLFunctions *f = context->functions();
// The planes of pixel data may not be a multiple of the default 4 bytes.
f->glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
// Upload each plane of YUV to a texture.
if (texture[0] != 0u) {
f->glDeleteTextures(3, texture);
}
check_error(f);
f->glGenTextures(3, texture);
check_error(f);
f->glBindTexture(GL_TEXTURE_2D, texture[0]);
check_error(f);
f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
check_error(f);
f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
check_error(f);
f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
check_error(f);
f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
check_error(f);
f->glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width, height, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, image);
check_error(f);
f->glBindTexture(GL_TEXTURE_2D, texture[1]);
check_error(f);
f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
check_error(f);
f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
check_error(f);
f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
check_error(f);
f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
check_error(f);
f->glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width / 2, height / 2, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, image + width * height);
check_error(f);
f->glBindTexture(GL_TEXTURE_2D, texture[2]);
check_error(f);
f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
check_error(f);
f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
check_error(f);
f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
check_error(f);
f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
check_error(f);
f->glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width / 2, height / 2, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, image + width * height + width / 2 * height / 2);
check_error(f);
}
void GLWidget::clear()
{
stopGlsl();
update();
}
void GLWidget::releaseAnalyse()
{
m_analyseSem.release();
}
bool GLWidget::acquireSharedFrameTextures()
{
// A
if ((m_glslManager == nullptr) && !openglContext()->supportsThreadedOpenGL()) {
QMutexLocker locker(&m_contextSharedAccess);
if (!m_sharedFrame.is_valid()) {
return false;
}
uploadTextures(openglContext(), m_sharedFrame, m_texture);
} else if (m_glslManager) {
// C & D
m_contextSharedAccess.lock();
if (m_sharedFrame.is_valid()) {
m_texture[0] = *((const GLuint *)m_sharedFrame.get_image());
}
}
if (!m_texture[0]) {
// C & D
if (m_glslManager) m_contextSharedAccess.unlock();
return false;
}
return true;
}
void GLWidget::bindShaderProgram()
{
m_shader->bind();
// C & D
if (m_glslManager) {
m_shader->setUniformValue(m_textureLocation[0], 0);
} else {
// A & B
m_shader->setUniformValue(m_textureLocation[0], 0);
m_shader->setUniformValue(m_textureLocation[1], 1);
m_shader->setUniformValue(m_textureLocation[2], 2);
m_shader->setUniformValue(m_colorspaceLocation, m_monitorProfile->colorspace());
}
}
void GLWidget::releaseSharedFrameTextures()
{
// C & D
if (m_glslManager) {
glFinish();
m_contextSharedAccess.unlock();
}
}
bool GLWidget::initGPUAccel()
{
if (!KdenliveSettings::gpu_accel()) return false;
m_glslManager = new Mlt::Filter(*m_monitorProfile, "glsl.manager");
return m_glslManager->is_valid();
}
// C & D
// TODO: insure safe, idempotent on all pipelines.
void GLWidget::disableGPUAccel()
{
delete m_glslManager;
m_glslManager = nullptr;
KdenliveSettings::setGpu_accel(false);
// Need to destroy MLT global reference to prevent filters from trying to use GPU.
mlt_properties_set_data(mlt_global_properties(), "glslManager", nullptr, 0, nullptr, nullptr);
emit gpuNotSupported();
}
bool GLWidget::onlyGLESGPUAccel() const
{
return (m_glslManager != nullptr) && openglContext()->isOpenGLES();
}
#if defined(Q_OS_WIN)
bool GLWidget::initGPUAccelSync()
{
// no-op
// TODO: getProcAddress is not working on Windows?
return false;
}
#else
bool GLWidget::initGPUAccelSync()
{
if (!KdenliveSettings::gpu_accel()) return false;
if (m_glslManager == nullptr) return false;
if (!openglContext()->hasExtension("GL_ARB_sync")) return false;
m_ClientWaitSync = (ClientWaitSync_fp)openglContext()->getProcAddress("glClientWaitSync");
if (m_ClientWaitSync) {
return true;
} else {
qCDebug(KDENLIVE_LOG) << " / / // NO GL SYNC, ERROR";
// fallback on A || B
// TODO: fallback on A || B || C?
disableGPUAccel();
return false;
}
}
#endif
void GLWidget::paintGL()
{
QOpenGLFunctions *f = openglContext()->functions();
int width = this->width() * devicePixelRatio();
int height = this->height() * devicePixelRatio();
f->glDisable(GL_BLEND);
f->glDisable(GL_DEPTH_TEST);
f->glDepthMask(GL_FALSE);
f->glViewport(0, (m_rulerHeight * devicePixelRatio() * 0.5 + 0.5), width, height);
check_error(f);
QColor color(KdenliveSettings::window_background());
f->glClearColor(color.redF(), color.greenF(), color.blueF(), color.alphaF());
f->glClear(GL_COLOR_BUFFER_BIT);
check_error(f);
if (!acquireSharedFrameTextures()) return;
// Bind textures.
for (uint i = 0; i < 3; ++i) {
if (m_texture[i] != 0u) {
f->glActiveTexture(GL_TEXTURE0 + i);
f->glBindTexture(GL_TEXTURE_2D, m_texture[i]);
check_error(f);
}
}
bindShaderProgram();
check_error(f);
// Setup an orthographic projection.
QMatrix4x4 projection;
projection.scale(2.0f / (float)width, 2.0f / (float)height);
m_shader->setUniformValue(m_projectionLocation, projection);
check_error(f);
// Set model view.
QMatrix4x4 modelView;
if (!qFuzzyCompare(m_zoom, 1.0f)) {
if ((offset().x() != 0) || (offset().y() != 0)) modelView.translate(-offset().x() * devicePixelRatio(), offset().y() * devicePixelRatio());
modelView.scale(zoom(), zoom());
}
m_shader->setUniformValue(m_modelViewLocation, modelView);
check_error(f);
// Provide vertices of triangle strip.
QVector vertices;
width = m_rect.width() * devicePixelRatio();
height = m_rect.height() * devicePixelRatio();
vertices << QVector2D(float(-width) / 2.0f, float(-height) / 2.0f);
vertices << QVector2D(float(-width) / 2.0f, float(height) / 2.0f);
vertices << QVector2D(float(width) / 2.0f, float(-height) / 2.0f);
vertices << QVector2D(float(width) / 2.0f, float(height) / 2.0f);
m_shader->enableAttributeArray(m_vertexLocation);
check_error(f);
m_shader->setAttributeArray(m_vertexLocation, vertices.constData());
check_error(f);
// Provide texture coordinates.
QVector texCoord;
texCoord << QVector2D(0.0f, 1.0f);
texCoord << QVector2D(0.0f, 0.0f);
texCoord << QVector2D(1.0f, 1.0f);
texCoord << QVector2D(1.0f, 0.0f);
m_shader->enableAttributeArray(m_texCoordLocation);
check_error(f);
m_shader->setAttributeArray(m_texCoordLocation, texCoord.constData());
check_error(f);
// Render
glDrawArrays(GL_TRIANGLE_STRIP, 0, vertices.size());
check_error(f);
if (m_sendFrame && m_analyseSem.tryAcquire(1)) {
// Render RGB frame for analysis
int fullWidth = m_monitorProfile->width();
int fullHeight = m_monitorProfile->height();
if ((m_fbo == nullptr) || m_fbo->size() != QSize(fullWidth, fullHeight)) {
delete m_fbo;
QOpenGLFramebufferObjectFormat fmt;
fmt.setSamples(1);
fmt.setInternalTextureFormat(GL_RGB); // GL_RGBA32F); // which one is the fastest ?
m_fbo = new QOpenGLFramebufferObject(fullWidth, fullHeight, fmt); // GL_TEXTURE_2D);
}
m_fbo->bind();
glViewport(0, 0, fullWidth, fullHeight);
QMatrix4x4 projection2;
projection2.scale(2.0f / (float)width, 2.0f / (float)height);
m_shader->setUniformValue(m_projectionLocation, projection2);
glDrawArrays(GL_TRIANGLE_STRIP, 0, vertices.size());
check_error(f);
m_fbo->release();
emit analyseFrame(m_fbo->toImage());
m_sendFrame = false;
}
// Cleanup
m_shader->disableAttributeArray(m_vertexLocation);
m_shader->disableAttributeArray(m_texCoordLocation);
m_shader->release();
for (uint i = 0; i < 3; ++i) {
if (m_texture[i] != 0u) {
f->glActiveTexture(GL_TEXTURE0 + i);
f->glBindTexture(GL_TEXTURE_2D, 0);
check_error(f);
}
}
glActiveTexture(GL_TEXTURE0);
check_error(f);
releaseSharedFrameTextures();
check_error(f);
}
void GLWidget::slotZoom(bool zoomIn)
{
if (zoomIn) {
if (qFuzzyCompare(m_zoom, 1.0f)) {
setZoom(2.0f);
} else if (qFuzzyCompare(m_zoom, 2.0f)) {
setZoom(3.0f);
} else if (m_zoom < 1.0f) {
setZoom(m_zoom * 2);
}
} else {
if (qFuzzyCompare(m_zoom, 3.0f)) {
setZoom(2.0);
} else if (qFuzzyCompare(m_zoom, 2.0f)) {
setZoom(1.0);
} else if (m_zoom > 0.2) {
setZoom(m_zoom / 2);
}
}
}
void GLWidget::wheelEvent(QWheelEvent *event)
{
if (((event->modifiers() & Qt::ControlModifier) != 0u) && ((event->modifiers() & Qt::ShiftModifier) != 0u)) {
slotZoom(event->delta() > 0);
return;
}
emit mouseSeek(event->delta(), (uint)event->modifiers());
event->accept();
}
void GLWidget::requestSeek()
{
- if (m_producer == nullptr) {
+ if (!m_producer) {
return;
}
if (m_proxy->seeking()) {
if (!qFuzzyIsNull(m_producer->get_speed())) {
m_consumer->purge();
}
m_producer->seek(m_proxy->seekPosition());
if (m_consumer->is_stopped()) {
m_consumer->start();
}
m_consumer->set("refresh", 1);
}
}
void GLWidget::seek(int pos)
{
// Testing puspose only
if (!m_proxy->seeking()) {
m_proxy->setSeekPosition(pos);
if (!qFuzzyIsNull(m_producer->get_speed())) {
m_consumer->purge();
}
m_producer->seek(pos);
if (m_consumer->is_stopped()) {
m_consumer->start();
}
m_consumer->set("refresh", 1);
} else {
m_proxy->setSeekPosition(pos);
}
}
void GLWidget::requestRefresh()
{
- if ((m_producer != nullptr) && qFuzzyIsNull(m_producer->get_speed())) {
+ if (m_producer && qFuzzyIsNull(m_producer->get_speed())) {
m_refreshTimer.start();
}
}
QString GLWidget::frameToTime(int frames) const
{
return m_consumer ? m_consumer->frames_to_time(frames, mlt_time_smpte_df) : QStringLiteral("-");
}
void GLWidget::refresh()
{
m_refreshTimer.stop();
QMutexLocker locker(&m_mltMutex);
if (m_consumer->is_stopped()) {
m_consumer->start();
}
m_consumer->set("refresh", 1);
}
bool GLWidget::checkFrameNumber(int pos, int offset)
{
emit consumerPosition(pos);
if (!m_proxy->setPosition(pos)) {
emit seekPosition(m_proxy->seekOrCurrentPosition());
}
const double speed = m_producer->get_speed();
if (m_proxy->seeking()) {
m_producer->set_speed(0);
m_producer->seek(m_proxy->seekPosition());
if (qFuzzyIsNull(speed)) {
m_consumer->set("refresh", 1);
} else {
m_producer->set_speed(speed);
}
} else if (qFuzzyIsNull(speed)) {
if (m_isLoopMode) {
if (pos >= m_producer->get_int("out") - offset) {
m_consumer->purge();
m_producer->seek(m_proxy->zoneIn());
m_producer->set_speed(1.0);
m_consumer->set("refresh", 1);
}
return true;
} else {
if (pos >= m_producer->get_int("out") - offset) {
return false;
}
return true;
}
} else if (speed < 0. && pos <= 0) {
m_producer->set_speed(0);
return false;
}
return true;
}
void GLWidget::mousePressEvent(QMouseEvent *event)
{
if ((rootObject() != nullptr) && rootObject()->objectName() != QLatin1String("root") && !(event->modifiers() & Qt::ControlModifier) &&
!(event->buttons() & Qt::MiddleButton)) {
event->ignore();
QQuickView::mousePressEvent(event);
return;
}
if ((event->button() & Qt::LeftButton) != 0u) {
if ((event->modifiers() & Qt::ControlModifier) != 0u) {
// Pan view
m_panStart = event->pos();
setCursor(Qt::ClosedHandCursor);
} else {
m_dragStart = event->pos();
}
} else if ((event->button() & Qt::RightButton) != 0u) {
emit showContextMenu(event->globalPos());
} else if ((event->button() & Qt::MiddleButton) != 0u) {
m_panStart = event->pos();
setCursor(Qt::ClosedHandCursor);
}
event->accept();
QQuickView::mousePressEvent(event);
}
void GLWidget::mouseMoveEvent(QMouseEvent *event)
{
if ((rootObject() != nullptr) && rootObject()->objectName() != QLatin1String("root") && !(event->modifiers() & Qt::ControlModifier) &&
!(event->buttons() & Qt::MiddleButton)) {
event->ignore();
QQuickView::mouseMoveEvent(event);
return;
}
/* if (event->modifiers() == Qt::ShiftModifier && m_producer) {
emit seekTo(m_producer->get_length() * event->x() / width());
return;
}*/
QQuickView::mouseMoveEvent(event);
if (!m_panStart.isNull()) {
emit panView(m_panStart - event->pos());
m_panStart = event->pos();
event->accept();
QQuickView::mouseMoveEvent(event);
return;
}
if (!(event->buttons() & Qt::LeftButton)) {
QQuickView::mouseMoveEvent(event);
return;
}
if (!event->isAccepted() && !m_dragStart.isNull() && (event->pos() - m_dragStart).manhattanLength() >= QApplication::startDragDistance()) {
m_dragStart = QPoint();
emit startDrag();
}
}
void GLWidget::keyPressEvent(QKeyEvent *event)
{
QQuickView::keyPressEvent(event);
if (!event->isAccepted()) {
emit passKeyEvent(event);
}
}
void GLWidget::createThread(RenderThread **thread, thread_function_t function, void *data)
{
#ifdef Q_OS_WIN
// On Windows, MLT event consumer-thread-create is fired from the Qt main thread.
while (!m_isInitialized) {
qApp->processEvents();
}
#else
if (!m_isInitialized) {
m_initSem.acquire();
}
#endif
(*thread) = new RenderThread(function, data, m_shareContext, &m_offscreenSurface);
(*thread)->start();
}
static void onThreadCreate(mlt_properties owner, GLWidget *self, RenderThread **thread, int *priority, thread_function_t function, void *data)
{
Q_UNUSED(owner)
Q_UNUSED(priority)
// self->clearFrameRenderer();
self->createThread(thread, function, data);
self->lockMonitor();
}
static void onThreadJoin(mlt_properties owner, GLWidget *self, RenderThread *thread)
{
Q_UNUSED(owner)
if (thread) {
thread->quit();
thread->wait();
delete thread;
// self->clearFrameRenderer();
self->releaseMonitor();
}
}
void GLWidget::startGlsl()
{
// C & D
if (m_glslManager) {
// clearFrameRenderer();
m_glslManager->fire_event("init glsl");
if (m_glslManager->get_int("glsl_supported") == 0) {
disableGPUAccel();
} else {
emit started();
}
}
}
static void onThreadStarted(mlt_properties owner, GLWidget *self)
{
Q_UNUSED(owner)
self->startGlsl();
}
void GLWidget::releaseMonitor()
{
emit lockMonitor(false);
}
void GLWidget::lockMonitor()
{
emit lockMonitor(true);
}
void GLWidget::stopGlsl()
{
if (m_consumer) {
m_consumer->purge();
}
// C & D
// TODO This is commented out for now because it is causing crashes.
// Technically, this should be the correct thing to do, but it appears
// some changes have created regression (see shotcut)
// with respect to restarting the consumer in GPU mode.
// m_glslManager->fire_event("close glsl");
m_texture[0] = 0;
}
static void onThreadStopped(mlt_properties owner, GLWidget *self)
{
Q_UNUSED(owner)
self->stopGlsl();
}
void GLWidget::slotSwitchAudioOverlay(bool enable)
{
KdenliveSettings::setDisplayAudioOverlay(enable);
if (m_audioWaveDisplayed && !enable) {
- if ((m_producer != nullptr) && m_producer->get_int("video_index") != -1) {
+ if (m_producer && m_producer->get_int("video_index") != -1) {
// We have a video producer, disable filter
removeAudioOverlay();
}
}
- if (enable && !m_audioWaveDisplayed && m_producer != nullptr) {
+ if (enable && !m_audioWaveDisplayed && m_producer) {
createAudioOverlay(m_producer->get_int("video_index") == -1);
}
}
-int GLWidget::setProducer(Mlt::Producer *producer, bool isActive, int position)
+int GLWidget::setProducer(std::shared_ptr producer, bool isActive, int position)
{
int error = 0;
QString currentId;
int consumerPosition = 0;
currentId = m_producer->parent().get("kdenlive:id");
- if (producer != nullptr) {
+ if (producer) {
m_producer = producer;
} else {
if (currentId == QLatin1String("black")) {
return 0;
}
if (m_audioWaveDisplayed) {
removeAudioOverlay();
}
- m_producer = &*m_blackClip;
+ m_producer = m_blackClip;
}
// redundant check. postcondition of above is m_producer != null
if (m_producer) {
m_producer->set_speed(0);
if (m_consumer) {
consumerPosition = m_consumer->position();
m_consumer->stop();
if (!m_consumer->is_stopped()) {
m_consumer->stop();
}
}
error = reconfigure();
if (error == 0) {
// The profile display aspect ratio may have changed.
resizeGL(width(), height());
}
} else {
return error;
}
if (!m_consumer) {
return error;
}
consumerPosition = m_consumer->position();
if (m_producer->get_int("video_index") == -1) {
// This is an audio only clip, attach visualization filter. Currently, the filter crashes MLT when Movit accel is used
if (!m_audioWaveDisplayed) {
createAudioOverlay(true);
} else if (m_consumer) {
if (KdenliveSettings::gpu_accel()) {
removeAudioOverlay();
} else {
adjustAudioOverlay(true);
}
}
} else if (m_audioWaveDisplayed && (m_consumer != nullptr)) {
// This is not an audio clip, hide wave
if (KdenliveSettings::displayAudioOverlay()) {
adjustAudioOverlay(m_producer->get_int("video_index") == -1);
} else {
removeAudioOverlay();
}
} else if (KdenliveSettings::displayAudioOverlay()) {
createAudioOverlay(false);
}
if (position == -1 && m_producer->parent().get("kdenlive:id") == currentId) {
position = consumerPosition;
}
if (isActive) {
startConsumer();
}
m_proxy->requestSeekPosition(position > 0 ? position : m_producer->position());
return error;
}
int GLWidget::droppedFrames() const
{
return (m_consumer ? m_consumer->get_int("drop_count") : 0);
}
void GLWidget::resetDrops()
{
if (m_consumer) {
m_consumer->set("drop_count", 0);
}
}
void GLWidget::createAudioOverlay(bool isAudio)
{
if (!m_consumer) {
return;
}
if (isAudio && KdenliveSettings::gpu_accel()) {
// Audiowaveform filter crashes on Movit + audio clips)
return;
}
Mlt::Filter f(*m_monitorProfile, "audiowaveform");
if (f.is_valid()) {
// f.set("show_channel", 1);
f.set("color.1", "0xffff0099");
f.set("fill", 1);
if (isAudio) {
// Fill screen
f.set("rect", "0,0,100%,100%");
} else {
// Overlay on lower part of the screen
f.set("rect", "0,80%,100%,20%");
}
m_consumer->attach(f);
m_audioWaveDisplayed = true;
}
}
void GLWidget::removeAudioOverlay()
{
Mlt::Service sourceService(m_consumer->get_service());
// move all effects to the correct producer
int ct = 0;
Mlt::Filter *filter = sourceService.filter(ct);
while (filter != nullptr) {
QString srv = filter->get("mlt_service");
if (srv == QLatin1String("audiowaveform")) {
sourceService.detach(*filter);
delete filter;
break;
} else {
ct++;
}
filter = sourceService.filter(ct);
}
m_audioWaveDisplayed = false;
}
void GLWidget::adjustAudioOverlay(bool isAudio)
{
Mlt::Service sourceService(m_consumer->get_service());
// move all effects to the correct producer
int ct = 0;
Mlt::Filter *filter = sourceService.filter(ct);
while (filter != nullptr) {
QString srv = filter->get("mlt_service");
if (srv == QLatin1String("audiowaveform")) {
if (isAudio) {
filter->set("rect", "0,0,100%,100%");
} else {
filter->set("rect", "0,80%,100%,20%");
}
break;
} else {
ct++;
}
filter = sourceService.filter(ct);
}
}
void GLWidget::stopCapture()
{
if (strcmp(m_consumer->get("mlt_service"), "multi") == 0) {
m_consumer->set("refresh", 0);
m_consumer->purge();
m_consumer->stop();
}
}
int GLWidget::reconfigureMulti(const QString ¶ms, const QString &path, Mlt::Profile *profile)
{
QString serviceName = property("mlt_service").toString();
if ((m_consumer == nullptr) || !m_consumer->is_valid() || strcmp(m_consumer->get("mlt_service"), "multi") != 0) {
if (m_consumer) {
m_consumer->purge();
m_consumer->stop();
m_consumer.reset();
}
m_consumer.reset(new Mlt::FilteredConsumer(*profile, "multi"));
delete m_threadStartEvent;
m_threadStartEvent = nullptr;
delete m_threadStopEvent;
m_threadStopEvent = nullptr;
delete m_threadCreateEvent;
delete m_threadJoinEvent;
if (m_consumer) {
m_threadCreateEvent = m_consumer->listen("consumer-thread-create", this, (mlt_listener)onThreadCreate);
m_threadJoinEvent = m_consumer->listen("consumer-thread-join", this, (mlt_listener)onThreadJoin);
}
}
if (m_consumer->is_valid()) {
// build sub consumers
// m_consumer->set("mlt_image_format", "yuv422");
reloadProfile();
int volume = KdenliveSettings::volume();
m_consumer->set("0", serviceName.toUtf8().constData());
m_consumer->set("0.mlt_image_format", "yuv422");
m_consumer->set("0.terminate_on_pause", 0);
// m_consumer->set("0.preview_off", 1);
m_consumer->set("0.real_time", 0);
m_consumer->set("0.volume", (double)volume / 100);
if (serviceName.startsWith(QLatin1String("sdl_audio"))) {
#ifdef Q_OS_WIN
m_consumer->set("0.audio_buffer", 2048);
#else
m_consumer->set("0.audio_buffer", 512);
#endif
QString audioDevice = KdenliveSettings::audiodevicename();
if (!audioDevice.isEmpty()) {
m_consumer->set("audio_device", audioDevice.toUtf8().constData());
}
QString audioDriver = KdenliveSettings::audiodrivername();
if (!audioDriver.isEmpty()) {
m_consumer->set("audio_driver", audioDriver.toUtf8().constData());
}
}
m_consumer->set("1", "avformat");
m_consumer->set("1.target", path.toUtf8().constData());
// m_consumer->set("1.real_time", -KdenliveSettings::mltthreads());
m_consumer->set("terminate_on_pause", 0);
m_consumer->set("1.terminate_on_pause", 0);
// m_consumer->set("1.terminate_on_pause", 0);// was commented out. restoring it fixes mantis#3415 - FFmpeg recording freezes
QStringList paramList = params.split(' ', QString::SkipEmptyParts);
for (int i = 0; i < paramList.count(); ++i) {
QString key = "1." + paramList.at(i).section(QLatin1Char('='), 0, 0);
QString value = paramList.at(i).section(QLatin1Char('='), 1, 1);
if (value == QLatin1String("%threads")) {
value = QString::number(QThread::idealThreadCount());
}
m_consumer->set(key.toUtf8().constData(), value.toUtf8().constData());
}
// Connect the producer to the consumer - tell it to "run" later
delete m_displayEvent;
// C & D
if (m_glslManager) {
// D
if (m_openGLSync) {
m_displayEvent = m_consumer->listen("consumer-frame-show", this, (mlt_listener)on_gl_frame_show);
} else {
// C
m_displayEvent = m_consumer->listen("consumer-frame-show", this, (mlt_listener)on_gl_nosync_frame_show);
}
} else {
// A & B
m_displayEvent = m_consumer->listen("consumer-frame-show", this, (mlt_listener)on_frame_show);
}
- m_consumer->connect(*m_producer);
+ m_consumer->connect(*m_producer.get());
m_consumer->start();
return 0;
}
return -1;
}
int GLWidget::reconfigure(Mlt::Profile *profile)
{
int error = 0;
// use SDL for audio, OpenGL for video
QString serviceName = property("mlt_service").toString();
if (profile) {
reloadProfile();
m_blackClip.reset(new Mlt::Producer(*profile, "color:black"));
m_blackClip->set("kdenlive:id", "black");
}
if ((m_consumer == nullptr) || !m_consumer->is_valid() || strcmp(m_consumer->get("mlt_service"), "multi") == 0) {
if (m_consumer) {
m_consumer->purge();
m_consumer->stop();
m_consumer.reset();
}
QString audioBackend = (KdenliveSettings::external_display()) ? QString("decklink:%1").arg(KdenliveSettings::blackmagic_output_device())
: KdenliveSettings::audiobackend();
if (serviceName.isEmpty() || serviceName != audioBackend) {
m_consumer.reset(new Mlt::FilteredConsumer(*m_monitorProfile, audioBackend.toLatin1().constData()));
if (m_consumer->is_valid()) {
serviceName = audioBackend;
setProperty("mlt_service", serviceName);
if (KdenliveSettings::external_display()) {
m_consumer->set("terminate_on_pause", 0);
}
} else {
// Warning, audio backend unavailable on system
m_consumer.reset();
QStringList backends = {"sdl2_audio", "sdl_audio", "rtaudio"};
for (const QString &bk : backends) {
if (bk == audioBackend) {
// Already tested
continue;
}
m_consumer.reset(new Mlt::FilteredConsumer(*m_monitorProfile, bk.toLatin1().constData()));
if (m_consumer->is_valid()) {
if (audioBackend == KdenliveSettings::sdlAudioBackend()) {
// switch sdl audio backend
KdenliveSettings::setSdlAudioBackend(bk);
}
qDebug() << "++++++++\nSwitching audio backend to: " << bk << "\n++++++++++";
KdenliveSettings::setAudiobackend(bk);
serviceName = bk;
setProperty("mlt_service", serviceName);
break;
} else {
m_consumer.reset();
}
}
if (!m_consumer) {
qWarning() << "WARNING, NO AUDIO BACKEND FOUND";
return -1;
}
}
}
delete m_threadStartEvent;
m_threadStartEvent = nullptr;
delete m_threadStopEvent;
m_threadStopEvent = nullptr;
delete m_threadCreateEvent;
delete m_threadJoinEvent;
if (m_consumer) {
m_threadCreateEvent = m_consumer->listen("consumer-thread-create", this, (mlt_listener)onThreadCreate);
m_threadJoinEvent = m_consumer->listen("consumer-thread-join", this, (mlt_listener)onThreadJoin);
}
}
if (m_consumer->is_valid()) {
// Connect the producer to the consumer - tell it to "run" later
if (m_producer) {
- m_consumer->connect(*m_producer);
+ m_consumer->connect(*m_producer.get());
// m_producer->set_speed(0.0);
}
int dropFrames = realTime();
if (!KdenliveSettings::monitor_dropframes()) {
dropFrames = -dropFrames;
}
m_consumer->set("real_time", dropFrames);
// C & D
if (m_glslManager) {
if (!m_threadStartEvent) {
m_threadStartEvent = m_consumer->listen("consumer-thread-started", this, (mlt_listener)onThreadStarted);
}
if (!m_threadStopEvent) {
m_threadStopEvent = m_consumer->listen("consumer-thread-stopped", this, (mlt_listener)onThreadStopped);
}
if (!serviceName.startsWith(QLatin1String("decklink"))) {
m_consumer->set("mlt_image_format", "glsl");
}
} else {
// A & B
m_consumer->set("mlt_image_format", "yuv422");
}
delete m_displayEvent;
// C & D
if (m_glslManager) {
m_displayEvent = m_consumer->listen("consumer-frame-show", this, (mlt_listener)on_gl_frame_show);
} else {
// A & B
m_displayEvent = m_consumer->listen("consumer-frame-show", this, (mlt_listener)on_frame_show);
}
int volume = KdenliveSettings::volume();
if (serviceName.startsWith(QLatin1String("sdl_audio"))) {
QString audioDevice = KdenliveSettings::audiodevicename();
if (!audioDevice.isEmpty()) {
m_consumer->set("audio_device", audioDevice.toUtf8().constData());
}
QString audioDriver = KdenliveSettings::audiodrivername();
if (!audioDriver.isEmpty()) {
m_consumer->set("audio_driver", audioDriver.toUtf8().constData());
}
}
/*if (!m_monitorProfile->progressive())
m_consumer->set("progressive", property("progressive").toBool());*/
m_consumer->set("volume", volume / 100.0);
// m_consumer->set("progressive", 1);
m_consumer->set("rescale", KdenliveSettings::mltinterpolation().toUtf8().constData());
m_consumer->set("deinterlace_method", KdenliveSettings::mltdeinterlacer().toUtf8().constData());
/*
#ifdef Q_OS_WIN
m_consumer->set("audio_buffer", 2048);
#else
m_consumer->set("audio_buffer", 512);
#endif
*/
m_consumer->set("buffer", 25);
m_consumer->set("prefill", 1);
m_consumer->set("scrub_audio", 1);
if (KdenliveSettings::monitor_gamma() == 0) {
m_consumer->set("color_trc", "iec61966_2_1");
} else {
m_consumer->set("color_trc", "bt709");
}
} else {
// Cleanup on error
error = 2;
}
return error;
}
float GLWidget::zoom() const
{
return m_zoom;
}
float GLWidget::scale() const
{
return (double)m_rect.width() / m_monitorProfile->width() * m_zoom;
}
Mlt::Profile *GLWidget::profile()
{
return m_monitorProfile;
}
void GLWidget::reloadProfile()
{
auto &profile = pCore->getCurrentProfile();
m_monitorProfile->get_profile()->description = strdup(profile->description().toUtf8().constData());
m_monitorProfile->set_colorspace(profile->colorspace());
m_monitorProfile->set_frame_rate(profile->frame_rate_num(), profile->frame_rate_den());
m_monitorProfile->set_height(profile->height());
m_monitorProfile->set_width(profile->width());
m_monitorProfile->set_progressive(static_cast(profile->progressive()));
m_monitorProfile->set_sample_aspect(profile->sample_aspect_num(), profile->sample_aspect_den());
m_monitorProfile->set_display_aspect(profile->display_aspect_num(), profile->display_aspect_den());
m_monitorProfile->set_explicit(1);
// The profile display aspect ratio may have changed.
resizeGL(width(), height());
refreshSceneLayout();
}
QSize GLWidget::profileSize() const
{
return QSize(m_monitorProfile->width(), m_monitorProfile->height());
}
QRect GLWidget::displayRect() const
{
return m_rect;
}
QPoint GLWidget::offset() const
{
return QPoint(m_offset.x() - ((int)((float)m_monitorProfile->width() * m_zoom) - width()) / 2,
m_offset.y() - ((int)((float)m_monitorProfile->height() * m_zoom) - height()) / 2);
}
void GLWidget::setZoom(float zoom)
{
double zoomRatio = zoom / m_zoom;
m_zoom = zoom;
emit zoomChanged();
if (rootObject()) {
rootObject()->setProperty("zoom", m_zoom);
double scalex = rootObject()->property("scalex").toDouble() * zoomRatio;
rootObject()->setProperty("scalex", scalex);
double scaley = rootObject()->property("scaley").toDouble() * zoomRatio;
rootObject()->setProperty("scaley", scaley);
}
update();
}
void GLWidget::onFrameDisplayed(const SharedFrame &frame)
{
m_contextSharedAccess.lock();
m_sharedFrame = frame;
m_sendFrame = sendFrameForAnalysis;
m_contextSharedAccess.unlock();
update();
}
void GLWidget::mouseReleaseEvent(QMouseEvent *event)
{
QQuickView::mouseReleaseEvent(event);
if (m_dragStart.isNull() && m_panStart.isNull() && (rootObject() != nullptr) && rootObject()->objectName() != QLatin1String("root") &&
!(event->modifiers() & Qt::ControlModifier)) {
event->ignore();
return;
}
if (!m_dragStart.isNull() && m_panStart.isNull() && ((event->button() & Qt::LeftButton) != 0u) && !event->isAccepted()) {
emit monitorPlay();
}
m_dragStart = QPoint();
m_panStart = QPoint();
setCursor(Qt::ArrowCursor);
}
void GLWidget::mouseDoubleClickEvent(QMouseEvent *event)
{
QQuickView::mouseDoubleClickEvent(event);
if (event->isAccepted()) {
return;
}
if ((rootObject() == nullptr) || rootObject()->objectName() != QLatin1String("rooteffectscene")) {
emit switchFullScreen();
}
event->accept();
}
void GLWidget::setOffsetX(int x, int max)
{
m_offset.setX(x);
emit offsetChanged();
if (rootObject()) {
rootObject()->setProperty("offsetx", m_zoom > 1.0f ? x - max / 2.0 - 10 : 0);
}
update();
}
void GLWidget::setOffsetY(int y, int max)
{
m_offset.setY(y);
if (rootObject()) {
rootObject()->setProperty("offsety", m_zoom > 1.0f ? y - max / 2.0 - 10 : 0);
}
update();
}
int GLWidget::realTime() const
{
// C & D
if (m_glslManager) {
return 1;
}
return KdenliveSettings::mltthreads();
}
std::shared_ptr GLWidget::consumer()
{
return m_consumer;
}
void GLWidget::updateGamma()
{
reconfigure();
}
void GLWidget::resetConsumer(bool fullReset)
{
if (fullReset && m_consumer) {
m_consumer->purge();
m_consumer->stop();
m_consumer.reset();
}
reconfigure();
}
const QString GLWidget::sceneList(const QString &root, const QString &fullPath)
{
QString playlist;
qCDebug(KDENLIVE_LOG) << " * * *Setting document xml root: " << root;
Mlt::Consumer xmlConsumer(*m_monitorProfile, "xml", fullPath.isEmpty() ? "kdenlive_playlist" : fullPath.toUtf8().constData());
if (!root.isEmpty()) {
xmlConsumer.set("root", root.toUtf8().constData());
}
if (!xmlConsumer.is_valid()) {
return QString();
}
m_producer->optimise();
xmlConsumer.set("terminate_on_pause", 1);
xmlConsumer.set("store", "kdenlive");
xmlConsumer.set("time_format", "clock");
// Disabling meta creates cleaner files, but then we don't have access to metadata on the fly (meta channels, etc)
// And we must use "avformat" instead of "avformat-novalidate" on project loading which causes a big delay on project opening
// xmlConsumer.set("no_meta", 1);
Mlt::Producer prod(m_producer->get_producer());
if (!prod.is_valid()) {
return QString();
}
xmlConsumer.connect(prod);
xmlConsumer.run();
playlist = fullPath.isEmpty() ? QString::fromUtf8(xmlConsumer.get("kdenlive_playlist")) : fullPath;
return playlist;
}
void GLWidget::updateTexture(GLuint yName, GLuint uName, GLuint vName)
{
m_texture[0] = yName;
m_texture[1] = uName;
m_texture[2] = vName;
m_sendFrame = sendFrameForAnalysis;
// update();
}
// MLT consumer-frame-show event handler
void GLWidget::on_frame_show(mlt_consumer, void *self, mlt_frame frame_ptr)
{
Mlt::Frame frame(frame_ptr);
if (frame.get_int("rendered") != 0) {
GLWidget *widget = static_cast(self);
int timeout = (widget->consumer()->get_int("real_time") > 0) ? 0 : 1000;
if ((widget->m_frameRenderer != nullptr) && widget->m_frameRenderer->semaphore()->tryAcquire(1, timeout)) {
QMetaObject::invokeMethod(widget->m_frameRenderer, "showFrame", Qt::QueuedConnection, Q_ARG(Mlt::Frame, frame));
}
}
}
void GLWidget::on_gl_nosync_frame_show(mlt_consumer, void *self, mlt_frame frame_ptr)
{
Mlt::Frame frame(frame_ptr);
if (frame.get_int("rendered") != 0) {
GLWidget *widget = static_cast(self);
int timeout = (widget->consumer()->get_int("real_time") > 0) ? 0 : 1000;
if ((widget->m_frameRenderer != nullptr) && widget->m_frameRenderer->semaphore()->tryAcquire(1, timeout)) {
QMetaObject::invokeMethod(widget->m_frameRenderer, "showGLNoSyncFrame", Qt::QueuedConnection, Q_ARG(Mlt::Frame, frame));
}
}
}
void GLWidget::on_gl_frame_show(mlt_consumer, void *self, mlt_frame frame_ptr)
{
Mlt::Frame frame(frame_ptr);
if (frame.get_int("rendered") != 0) {
GLWidget *widget = static_cast(self);
int timeout = (widget->consumer()->get_int("real_time") > 0) ? 0 : 1000;
if ((widget->m_frameRenderer != nullptr) && widget->m_frameRenderer->semaphore()->tryAcquire(1, timeout)) {
QMetaObject::invokeMethod(widget->m_frameRenderer, "showGLFrame", Qt::QueuedConnection, Q_ARG(Mlt::Frame, frame));
}
}
}
RenderThread::RenderThread(thread_function_t function, void *data, QOpenGLContext *context, QSurface *surface)
: QThread(nullptr)
, m_function(function)
, m_data(data)
, m_context(nullptr)
, m_surface(surface)
{
if (context) {
m_context = new QOpenGLContext;
m_context->setFormat(context->format());
m_context->setShareContext(context);
m_context->create();
m_context->moveToThread(this);
}
}
RenderThread::~RenderThread()
{
// would otherwise leak if RenderThread is allocated with a context but not run.
// safe post-run
delete m_context;
}
// TODO: missing some exception handling?
void RenderThread::run()
{
if (m_context) {
m_context->makeCurrent(m_surface);
}
m_function(m_data);
if (m_context) {
m_context->doneCurrent();
delete m_context;
m_context = nullptr;
}
}
FrameRenderer::FrameRenderer(QOpenGLContext *shareContext, QSurface *surface, GLWidget::ClientWaitSync_fp clientWaitSync)
: QThread(nullptr)
, m_semaphore(3)
, m_context(nullptr)
, m_surface(surface)
, m_ClientWaitSync(clientWaitSync)
, m_gl32(nullptr)
, sendAudioForAnalysis(false)
{
Q_ASSERT(shareContext);
m_renderTexture[0] = m_renderTexture[1] = m_renderTexture[2] = 0;
m_displayTexture[0] = m_displayTexture[1] = m_displayTexture[2] = 0;
// B & C & D
if (KdenliveSettings::gpu_accel() || shareContext->supportsThreadedOpenGL()) {
m_context = new QOpenGLContext;
m_context->setFormat(shareContext->format());
m_context->setShareContext(shareContext);
m_context->create();
m_context->moveToThread(this);
}
setObjectName(QStringLiteral("FrameRenderer"));
moveToThread(this);
start();
}
FrameRenderer::~FrameRenderer()
{
delete m_context;
delete m_gl32;
}
void FrameRenderer::showFrame(Mlt::Frame frame)
{
int width = 0;
int height = 0;
mlt_image_format format = mlt_image_yuv420p;
frame.get_image(format, width, height);
// Save this frame for future use and to keep a reference to the GL Texture.
m_displayFrame = SharedFrame(frame);
if ((m_context != nullptr) && m_context->isValid()) {
m_context->makeCurrent(m_surface);
// Upload each plane of YUV to a texture.
QOpenGLFunctions *f = m_context->functions();
uploadTextures(m_context, m_displayFrame, m_renderTexture);
f->glBindTexture(GL_TEXTURE_2D, 0);
check_error(f);
f->glFinish();
for (int i = 0; i < 3; ++i) {
std::swap(m_renderTexture[i], m_displayTexture[i]);
}
emit textureReady(m_displayTexture[0], m_displayTexture[1], m_displayTexture[2]);
m_context->doneCurrent();
}
// The frame is now done being modified and can be shared with the rest
// of the application.
emit frameDisplayed(m_displayFrame);
m_semaphore.release();
}
void FrameRenderer::showGLFrame(Mlt::Frame frame)
{
if ((m_context != nullptr) && m_context->isValid()) {
int width = 0;
int height = 0;
frame.set("movit.convert.use_texture", 1);
mlt_image_format format = mlt_image_glsl_texture;
frame.get_image(format, width, height);
m_context->makeCurrent(m_surface);
pipelineSyncToFrame(frame);
m_context->functions()->glFinish();
m_context->doneCurrent();
// Save this frame for future use and to keep a reference to the GL Texture.
m_displayFrame = SharedFrame(frame);
}
// The frame is now done being modified and can be shared with the rest
// of the application.
emit frameDisplayed(m_displayFrame);
m_semaphore.release();
}
void FrameRenderer::showGLNoSyncFrame(Mlt::Frame frame)
{
if ((m_context != nullptr) && m_context->isValid()) {
int width = 0;
int height = 0;
frame.set("movit.convert.use_texture", 1);
mlt_image_format format = mlt_image_glsl_texture;
frame.get_image(format, width, height);
m_context->makeCurrent(m_surface);
m_context->functions()->glFinish();
m_context->doneCurrent();
// Save this frame for future use and to keep a reference to the GL Texture.
m_displayFrame = SharedFrame(frame);
}
// The frame is now done being modified and can be shared with the rest
// of the application.
emit frameDisplayed(m_displayFrame);
m_semaphore.release();
}
void FrameRenderer::cleanup()
{
if ((m_renderTexture[0] != 0u) && (m_renderTexture[1] != 0u) && (m_renderTexture[2] != 0u)) {
m_context->makeCurrent(m_surface);
m_context->functions()->glDeleteTextures(3, m_renderTexture);
if ((m_displayTexture[0] != 0u) && (m_displayTexture[1] != 0u) && (m_displayTexture[2] != 0u)) {
m_context->functions()->glDeleteTextures(3, m_displayTexture);
}
m_context->doneCurrent();
m_renderTexture[0] = m_renderTexture[1] = m_renderTexture[2] = 0;
m_displayTexture[0] = m_displayTexture[1] = m_displayTexture[2] = 0;
}
}
// D
void FrameRenderer::pipelineSyncToFrame(Mlt::Frame &frame)
{
GLsync sync = (GLsync)frame.get_data("movit.convert.fence");
if (!sync) return;
#ifdef Q_OS_WIN
// On Windows, use QOpenGLFunctions_3_2_Core instead of getProcAddress.
// TODO: move to initialization of m_ClientWaitSync
if (!m_gl32) {
m_gl32 = m_context->versionFunctions();
if (m_gl32) {
m_gl32->initializeOpenGLFunctions();
}
}
if (m_gl32) {
m_gl32->glClientWaitSync(sync, 0, GL_TIMEOUT_IGNORED);
check_error(m_context->functions());
}
#else
if (m_ClientWaitSync) {
m_ClientWaitSync(sync, 0, GL_TIMEOUT_IGNORED);
check_error(m_context->functions());
}
#endif // Q_OS_WIN
}
void GLWidget::setAudioThumb(int channels, const QVariantList &audioCache)
{
if (!rootObject()) return;
QmlAudioThumb *audioThumbDisplay = rootObject()->findChild(QStringLiteral("audiothumb"));
if (!audioThumbDisplay) return;
QImage img(width(), height() / 6, QImage::Format_ARGB32_Premultiplied);
img.fill(Qt::transparent);
if (!audioCache.isEmpty() && channels > 0) {
int audioLevelCount = audioCache.count() - 1;
// simplified audio
QPainter painter(&img);
QRectF mappedRect(0, 0, img.width(), img.height());
int channelHeight = mappedRect.height();
double value;
double scale = (double)width() / (audioLevelCount / channels);
if (scale < 1) {
painter.setPen(QColor(80, 80, 150, 200));
for (int i = 0; i < img.width(); i++) {
int framePos = i / scale;
value = audioCache.at(qMin(framePos * channels, audioLevelCount)).toDouble() / 256;
for (int channel = 1; channel < channels; channel++) {
value = qMax(value, audioCache.at(qMin(framePos * channels + channel, audioLevelCount)).toDouble() / 256);
}
painter.drawLine(i, mappedRect.bottom() - (value * channelHeight), i, mappedRect.bottom());
}
} else {
QPainterPath positiveChannelPath;
positiveChannelPath.moveTo(0, mappedRect.bottom());
for (int i = 0; i < audioLevelCount / channels; i++) {
value = audioCache.at(qMin(i * channels, audioLevelCount)).toDouble() / 256;
for (int channel = 1; channel < channels; channel++) {
value = qMax(value, audioCache.at(qMin(i * channels + channel, audioLevelCount)).toDouble() / 256);
}
positiveChannelPath.lineTo(i * scale, mappedRect.bottom() - (value * channelHeight));
}
positiveChannelPath.lineTo(mappedRect.right(), mappedRect.bottom());
painter.setPen(Qt::NoPen);
painter.setBrush(QBrush(QColor(80, 80, 150, 200)));
painter.drawPath(positiveChannelPath);
}
painter.end();
}
audioThumbDisplay->setImage(img);
}
void GLWidget::refreshSceneLayout()
{
if (!rootObject()) {
return;
}
rootObject()->setProperty("profile", QPoint(m_monitorProfile->width(), m_monitorProfile->height()));
rootObject()->setProperty("scalex", (double)m_rect.width() / m_monitorProfile->width() * m_zoom);
rootObject()->setProperty("scaley", (double)m_rect.width() / (((double)m_monitorProfile->height() * m_monitorProfile->dar() / m_monitorProfile->width())) /
m_monitorProfile->width() * m_zoom);
}
void GLWidget::switchPlay(bool play, double speed)
{
m_proxy->setSeekPosition(-1);
- if ((m_producer == nullptr) || (m_consumer == nullptr)) {
+ if (!m_producer || !m_consumer) {
return;
}
if (m_isZoneMode) {
resetZoneMode();
}
if (play) {
if (m_id == Kdenlive::ClipMonitor && m_consumer->position() == m_producer->get_out()) {
m_producer->seek(0);
}
m_producer->set_speed(speed);
m_consumer->start();
m_consumer->set("refresh", 1);
} else {
m_producer->set_speed(0);
m_producer->seek(m_consumer->position() + 1);
m_consumer->purge();
m_consumer->start();
}
}
bool GLWidget::playZone(bool loop)
{
if (!m_producer) {
return false;
}
m_proxy->setSeekPosition(-1);
m_producer->seek(m_proxy->zoneIn());
m_producer->set_speed(0);
m_consumer->purge();
m_producer->set("out", m_proxy->zoneOut());
m_producer->set_speed(1.0);
if (m_consumer->is_stopped()) {
m_consumer->start();
}
m_consumer->set("refresh", 1);
m_isZoneMode = true;
m_isLoopMode = loop;
return true;
}
bool GLWidget::loopClip()
{
if (!m_producer) {
return false;
}
m_proxy->setSeekPosition(-1);
m_producer->seek(0);
m_producer->set_speed(0);
m_consumer->purge();
m_producer->set("out", m_producer->get_playtime());
m_producer->set_speed(1.0);
if (m_consumer->is_stopped()) {
m_consumer->start();
}
m_consumer->set("refresh", 1);
m_isZoneMode = true;
m_isLoopMode = true;
return true;
}
void GLWidget::resetZoneMode()
{
if (!m_isZoneMode && !m_isLoopMode) {
return;
}
m_producer->set("out", m_producer->get_length());
m_isZoneMode = false;
m_isLoopMode = false;
}
MonitorProxy *GLWidget::getControllerProxy()
{
return m_proxy;
}
int GLWidget::getCurrentPos() const
{
return m_proxy->seeking() ? m_proxy->seekPosition() : m_consumer->position();
}
void GLWidget::setRulerInfo(int duration, std::shared_ptr model)
{
rootObject()->setProperty("duration", duration);
if (model != nullptr) {
// we are resetting marker/snap model, reset zone
rootContext()->setContextProperty("markersModel", model.get());
}
}
void GLWidget::startConsumer()
{
if (m_consumer == nullptr) {
return;
}
if (m_consumer->is_stopped() && m_consumer->start() == -1) {
// ARGH CONSUMER BROKEN!!!!
KMessageBox::error(
qApp->activeWindow(),
i18n("Could not create the video preview window.\nThere is something wrong with your Kdenlive install or your driver settings, please fix it."));
if (m_displayEvent) {
delete m_displayEvent;
}
m_displayEvent = nullptr;
m_consumer.reset();
return;
}
m_consumer->set("refresh", 1);
}
void GLWidget::stop()
{
m_refreshTimer.stop();
m_proxy->setSeekPosition(-1);
// why this lock?
QMutexLocker locker(&m_mltMutex);
if (m_producer) {
if (m_isZoneMode) {
resetZoneMode();
}
m_producer->set_speed(0.0);
}
if (m_consumer) {
m_consumer->purge();
if (!m_consumer->is_stopped()) {
m_consumer->stop();
}
}
}
double GLWidget::playSpeed() const
{
if (m_producer) {
return m_producer->get_speed();
}
return 0.0;
}
void GLWidget::setDropFrames(bool drop)
{
// why this lock?
QMutexLocker locker(&m_mltMutex);
if (m_consumer) {
int dropFrames = realTime();
if (!drop) {
dropFrames = -dropFrames;
}
m_consumer->stop();
m_consumer->set("real_time", dropFrames);
if (m_consumer->start() == -1) {
qCWarning(KDENLIVE_LOG) << "ERROR, Cannot start monitor";
}
}
}
int GLWidget::volume() const
{
- if ((m_consumer == nullptr) || (m_producer == nullptr)) {
+ if ((!m_consumer) || (!m_producer)) {
return -1;
}
if (m_consumer->get("mlt_service") == QStringLiteral("multi")) {
return ((int)100 * m_consumer->get_double("0.volume"));
}
return ((int)100 * m_consumer->get_double("volume"));
}
void GLWidget::setVolume(double volume)
{
if (m_consumer) {
if (m_consumer->get("mlt_service") == QStringLiteral("multi")) {
m_consumer->set("0.volume", volume);
} else {
m_consumer->set("volume", volume);
}
}
}
int GLWidget::duration() const
{
- if (m_producer == nullptr) {
+ if (!m_producer) {
return 0;
}
return m_producer->get_playtime();
}
void GLWidget::setConsumerProperty(const QString &name, const QString &value)
{
QMutexLocker locker(&m_mltMutex);
if (m_consumer) {
m_consumer->set(name.toUtf8().constData(), value.toUtf8().constData());
if (m_consumer->start() == -1) {
qCWarning(KDENLIVE_LOG) << "ERROR, Cannot start monitor";
}
}
}
diff --git a/src/monitor/glwidget.h b/src/monitor/glwidget.h
index 95008dd03..4a40b3a69 100644
--- a/src/monitor/glwidget.h
+++ b/src/monitor/glwidget.h
@@ -1,342 +1,342 @@
/*
* Copyright (c) 2011-2014 Meltytech, LLC
* Author: Dan Dennedy
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see .
*/
#ifndef GLWIDGET_H
#define GLWIDGET_H
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include "bin/model/markerlistmodel.hpp"
#include "definitions.h"
#include "kdenlivesettings.h"
#include "scopes/sharedframe.h"
class QOpenGLFunctions_3_2_Core;
namespace Mlt {
class Filter;
class Producer;
class Consumer;
class Profile;
} // namespace Mlt
class RenderThread;
class FrameRenderer;
class MonitorProxy;
typedef void *(*thread_function_t)(void *);
/* QQuickView that renders an .
*
* Creates an MLT consumer and renders a GL view from the consumer. This pipeline is one of:
*
* A. YUV gl texture w/o GPU filter acceleration
* B. YUV gl texture multithreaded w/o GPU filter acceleration
* C. RGB gl texture multithreaded w/ GPU filter acceleration and no sync
* D. RGB gl texture multithreaded w/ GPU filter acceleration and sync
*/
class GLWidget : public QQuickView, protected QOpenGLFunctions
{
Q_OBJECT
Q_PROPERTY(QRect rect READ rect NOTIFY rectChanged)
Q_PROPERTY(float zoom READ zoom NOTIFY zoomChanged)
Q_PROPERTY(QPoint offset READ offset NOTIFY offsetChanged)
public:
friend class MonitorController;
friend class Monitor;
friend class MonitorProxy;
using ClientWaitSync_fp = GLenum (*)(GLsync, GLbitfield, GLuint64);
GLWidget(int id, QObject *parent = nullptr);
~GLWidget();
int requestedSeekPosition;
void createThread(RenderThread **thread, thread_function_t function, void *data);
void startGlsl();
void stopGlsl();
void clear();
// TODO: currently unused
int reconfigureMulti(const QString ¶ms, const QString &path, Mlt::Profile *profile);
void stopCapture();
int reconfigure(Mlt::Profile *profile = nullptr);
/** @brief Get the current MLT producer playlist.
* @return A string describing the playlist */
const QString sceneList(const QString &root, const QString &fullPath = QString());
int displayWidth() const { return m_rect.width(); }
void updateAudioForAnalysis();
int displayHeight() const { return m_rect.height(); }
QObject *videoWidget() { return this; }
Mlt::Filter *glslManager() const { return m_glslManager; }
QRect rect() const { return m_rect; }
QRect effectRect() const { return m_effectRect; }
float zoom() const;
float scale() const;
QPoint offset() const;
std::shared_ptr consumer();
Mlt::Producer *producer();
QSize profileSize() const;
QRect displayRect() const;
/** @brief set to true if we want to emit a QImage of the frame for analysis */
bool sendFrameForAnalysis;
void updateGamma();
/** @brief delete and rebuild consumer, for example when external display is switched */
void resetConsumer(bool fullReset);
Mlt::Profile *profile();
void reloadProfile();
void lockMonitor();
void releaseMonitor();
int realTime() const;
void setAudioThumb(int channels = 0, const QVariantList &audioCache = QList());
int droppedFrames() const;
void resetDrops();
bool checkFrameNumber(int pos, int offset);
/** @brief Return current timeline position */
int getCurrentPos() const;
/** @brief Requests a monitor refresh */
void requestRefresh();
void setRulerInfo(int duration, std::shared_ptr model = nullptr);
MonitorProxy *getControllerProxy();
bool playZone(bool loop = false);
bool loopClip();
void startConsumer();
void stop();
int rulerHeight() const;
/** @brief return current play producer's playing speed */
double playSpeed() const;
/** @brief Turn drop frame feature on/off */
void setDropFrames(bool drop);
/** @brief Returns current audio volume */
int volume() const;
/** @brief Set audio volume on consumer */
void setVolume(double volume);
/** @brief Returns current producer's duration in frames */
int duration() const;
/** @brief Set a property on the MLT consumer */
void setConsumerProperty(const QString &name, const QString &value);
protected:
void mouseReleaseEvent(QMouseEvent *event) override;
void mouseDoubleClickEvent(QMouseEvent *event) override;
void wheelEvent(QWheelEvent *event) override;
/** @brief Update producer, should ONLY be called from monitor */
- int setProducer(Mlt::Producer *producer, bool isActive, int position = -1);
+ int setProducer(std::shared_ptr producer, bool isActive, int position = -1);
QString frameToTime(int frames) const;
public slots:
void seek(int pos);
void requestSeek();
void setZoom(float zoom);
void setOffsetX(int x, int max);
void setOffsetY(int y, int max);
void slotSwitchAudioOverlay(bool enable);
void slotZoom(bool zoomIn);
void initializeGL();
void releaseAnalyse();
void switchPlay(bool play, double speed = 1.0);
signals:
void frameDisplayed(const SharedFrame &frame);
void dragStarted();
void seekTo(int x);
void gpuNotSupported();
void started();
void paused();
void playing();
void rectChanged();
void zoomChanged();
void offsetChanged();
void monitorPlay();
void switchFullScreen(bool minimizeOnly = false);
void mouseSeek(int eventDelta, uint modifiers);
void startDrag();
void analyseFrame(const QImage &);
void audioSamplesSignal(const audioShortVector &, int, int, int);
void showContextMenu(const QPoint &);
void lockMonitor(bool);
void passKeyEvent(QKeyEvent *);
void panView(const QPoint &diff);
void seekPosition(int);
void consumerPosition(int);
void activateMonitor();
protected:
Mlt::Filter *m_glslManager;
// TODO: MTL has lock/unlock of individual nodes. Use those.
// keeping this for refactoring ease.
QMutex m_mltMutex;
std::shared_ptr m_consumer;
- Mlt::Producer *m_producer;
+ std::shared_ptr m_producer;
Mlt::Profile *m_monitorProfile;
int m_id;
int m_rulerHeight;
private:
QRect m_rect;
QRect m_effectRect;
GLuint m_texture[3];
QOpenGLShaderProgram *m_shader;
QPoint m_panStart;
QPoint m_dragStart;
QSemaphore m_initSem;
QSemaphore m_analyseSem;
bool m_isInitialized;
Mlt::Event *m_threadStartEvent;
Mlt::Event *m_threadStopEvent;
Mlt::Event *m_threadCreateEvent;
Mlt::Event *m_threadJoinEvent;
Mlt::Event *m_displayEvent;
FrameRenderer *m_frameRenderer;
int m_projectionLocation;
int m_modelViewLocation;
int m_vertexLocation;
int m_texCoordLocation;
int m_colorspaceLocation;
int m_textureLocation[3];
QTimer m_refreshTimer;
float m_zoom;
bool m_sendFrame;
bool m_isZoneMode;
bool m_isLoopMode;
QPoint m_offset;
bool m_audioWaveDisplayed;
MonitorProxy *m_proxy;
- QScopedPointer m_blackClip;
+ std::shared_ptr m_blackClip;
static void on_frame_show(mlt_consumer, void *self, mlt_frame frame);
static void on_gl_frame_show(mlt_consumer, void *self, mlt_frame frame_ptr);
static void on_gl_nosync_frame_show(mlt_consumer, void *self, mlt_frame frame_ptr);
void createAudioOverlay(bool isAudio);
void removeAudioOverlay();
void adjustAudioOverlay(bool isAudio);
QOpenGLFramebufferObject *m_fbo;
void refreshSceneLayout();
void resetZoneMode();
/* OpenGL context management. Interfaces to MLT according to the configured render pipeline.
*/
private slots:
void resizeGL(int width, int height);
void updateTexture(GLuint yName, GLuint uName, GLuint vName);
void paintGL();
void onFrameDisplayed(const SharedFrame &frame);
void refresh();
protected:
QMutex m_contextSharedAccess;
QOffscreenSurface m_offscreenSurface;
SharedFrame m_sharedFrame;
QOpenGLContext *m_shareContext;
bool acquireSharedFrameTextures();
void bindShaderProgram();
void createGPUAccelFragmentProg();
void createShader();
void createYUVTextureProjectFragmentProg();
void disableGPUAccel();
void releaseSharedFrameTextures();
// pipeline A - YUV gl texture w/o GPU filter acceleration
// pipeline B - YUV gl texture multithreaded w/o GPU filter acceleration
// pipeline C - RGB gl texture multithreaded w/ GPU filter acceleration and no sync
// pipeline D - RGB gl texture multithreaded w/ GPU filter acceleration and sync
bool m_openGLSync;
bool initGPUAccelSync();
// pipeline C & D
bool initGPUAccel();
bool onlyGLESGPUAccel() const;
// pipeline A & B & C & D
// not null iff D
ClientWaitSync_fp m_ClientWaitSync;
protected:
void resizeEvent(QResizeEvent *event) override;
void mousePressEvent(QMouseEvent *) override;
void mouseMoveEvent(QMouseEvent *) override;
void keyPressEvent(QKeyEvent *event) override;
};
class RenderThread : public QThread
{
Q_OBJECT
public:
RenderThread(thread_function_t function, void *data, QOpenGLContext *context, QSurface *surface);
~RenderThread();
protected:
void run() override;
private:
thread_function_t m_function;
void *m_data;
QOpenGLContext *m_context;
QSurface *m_surface;
};
class FrameRenderer : public QThread
{
Q_OBJECT
public:
explicit FrameRenderer(QOpenGLContext *shareContext, QSurface *surface, GLWidget::ClientWaitSync_fp clientWaitSync);
~FrameRenderer();
QSemaphore *semaphore() { return &m_semaphore; }
QOpenGLContext *context() const { return m_context; }
Q_INVOKABLE void showFrame(Mlt::Frame frame);
Q_INVOKABLE void showGLFrame(Mlt::Frame frame);
Q_INVOKABLE void showGLNoSyncFrame(Mlt::Frame frame);
public slots:
void cleanup();
signals:
void textureReady(GLuint yName, GLuint uName = 0, GLuint vName = 0);
void frameDisplayed(const SharedFrame &frame);
void audioSamplesSignal(const audioShortVector &, int, int, int);
private:
QSemaphore m_semaphore;
SharedFrame m_displayFrame;
QOpenGLContext *m_context;
QSurface *m_surface;
GLWidget::ClientWaitSync_fp m_ClientWaitSync;
void pipelineSyncToFrame(Mlt::Frame &);
public:
GLuint m_renderTexture[3];
GLuint m_displayTexture[3];
QOpenGLFunctions_3_2_Core *m_gl32;
bool sendAudioForAnalysis;
};
#endif
diff --git a/src/monitor/monitor.cpp b/src/monitor/monitor.cpp
index 53848713c..8957e0b9b 100644
--- a/src/monitor/monitor.cpp
+++ b/src/monitor/monitor.cpp
@@ -1,2142 +1,2141 @@
/***************************************************************************
* Copyright (C) 2007 by Jean-Baptiste Mardelle (jb@kdenlive.org) *
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU General Public License for more details. *
* *
* You should have received a copy of the GNU General Public License *
* along with this program; if not, write to the *
* Free Software Foundation, Inc., *
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA *
***************************************************************************/
#include "monitor.h"
#include "bin/bin.h"
#include "bin/projectclip.h"
#include "core.h"
#include "dialogs/profilesdialog.h"
#include "doc/kdenlivedoc.h"
#include "doc/kthumb.h"
#include "glwidget.h"
#include "kdenlivesettings.h"
#include "lib/audio/audioStreamInfo.h"
#include "mainwindow.h"
#include "mltcontroller/clipcontroller.h"
#include "monitorproxy.h"
#include "project/projectmanager.h"
#include "qmlmanager.h"
#include "recmanager.h"
#include "scopes/monitoraudiolevel.h"
#include "timeline2/model/snapmodel.hpp"
#include "transitions/transitionsrepository.hpp"
#include "klocalizedstring.h"
#include
#include
#include
#include
#include
#include
#include
#include
#include "kdenlive_debug.h"
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#define SEEK_INACTIVE (-1)
QuickEventEater::QuickEventEater(QObject *parent)
: QObject(parent)
{
}
bool QuickEventEater::eventFilter(QObject *obj, QEvent *event)
{
switch (event->type()) {
case QEvent::DragEnter: {
QDragEnterEvent *ev = reinterpret_cast(event);
if (ev->mimeData()->hasFormat(QStringLiteral("kdenlive/effect"))) {
ev->acceptProposedAction();
return true;
}
break;
}
case QEvent::DragMove: {
QDragEnterEvent *ev = reinterpret_cast