Split video condition into multiple files

This commit is contained in:
WarmUpTill 2022-02-13 17:35:05 +01:00 committed by WarmUpTill
parent 03900e936b
commit d8dcd91ed9
9 changed files with 385 additions and 345 deletions

View File

@ -16,7 +16,15 @@ else()
endif()
include_directories("${CMAKE_CURRENT_SOURCE_DIR}/../../headers")
set(module_SOURCES macro-condition-video.cpp macro-condition-video.hpp)
set(module_SOURCES
macro-condition-video.cpp
macro-condition-video.hpp
video-match-dialog.cpp
video-match-dialog.hpp
threshold-slider.cpp
threshold-slider.hpp
opencv-helpers.cpp
opencv-helpers.hpp)
add_library(advanced-scene-switcher-opencv MODULE ${module_SOURCES})
if(BUILD_OUT_OF_TREE)

View File

@ -186,44 +186,6 @@ void MacroConditionVideo::GetScreenshot()
_getNextScreenshot = false;
}
// Assumption is that QImage uses Format_RGBA8888.
// Conversion from: https://github.com/dbzhang800/QtOpenCV
cv::Mat QImageToMat(const QImage &img)
{
if (img.isNull()) {
return cv::Mat();
}
return cv::Mat(img.height(), img.width(), CV_8UC(img.depth() / 8),
(uchar *)img.bits(), img.bytesPerLine());
}
QImage MatToQImage(const cv::Mat &mat)
{
if (mat.empty()) {
return QImage();
}
return QImage(mat.data, mat.cols, mat.rows, mat.step,
QImage::Format::Format_RGBA8888);
}
PatternMatchData createPatternData(QImage &pattern)
{
PatternMatchData data;
if (pattern.isNull()) {
return data;
}
data.rgbaPattern = QImageToMat(pattern);
std::vector<cv::Mat1b> rgbaChannelsPattern;
cv::split(data.rgbaPattern, rgbaChannelsPattern);
std::vector<cv::Mat1b> rgbChanlesPattern(
rgbaChannelsPattern.begin(), rgbaChannelsPattern.begin() + 3);
cv::merge(rgbChanlesPattern, data.rgbPattern);
cv::threshold(rgbaChannelsPattern[3], data.mask, 0, 255,
cv::THRESH_BINARY);
return data;
}
bool MacroConditionVideo::LoadImageFromFile()
{
if (!_matchImage.load(QString::fromStdString(_file))) {
@ -245,47 +207,6 @@ bool MacroConditionVideo::LoadModelData(std::string &path)
return !_objectCascade.empty();
}
void matchPattern(QImage &img, PatternMatchData &patternData, double threshold,
cv::Mat &result, bool useAlphaAsMask = true)
{
if (img.isNull() || patternData.rgbaPattern.empty()) {
return;
}
if (img.height() < patternData.rgbaPattern.rows ||
img.width() < patternData.rgbaPattern.cols) {
return;
}
auto i = QImageToMat(img);
if (useAlphaAsMask) {
std::vector<cv::Mat1b> rgbaChannelsImage;
cv::split(i, rgbaChannelsImage);
std::vector<cv::Mat1b> rgbChanlesImage(
rgbaChannelsImage.begin(),
rgbaChannelsImage.begin() + 3);
cv::Mat3b rgbImage;
cv::merge(rgbChanlesImage, rgbImage);
cv::matchTemplate(rgbImage, patternData.rgbPattern, result,
cv::TM_CCORR_NORMED, patternData.mask);
cv::threshold(result, result, threshold, 0, cv::THRESH_TOZERO);
} else {
cv::matchTemplate(i, patternData.rgbaPattern, result,
cv::TM_CCOEFF_NORMED);
cv::threshold(result, result, threshold, 0, cv::THRESH_TOZERO);
}
}
void matchPattern(QImage &img, QImage &pattern, double threshold,
cv::Mat &result, bool useAlphaAsMask)
{
auto data = createPatternData(pattern);
matchPattern(img, data, threshold, result, useAlphaAsMask);
}
bool MacroConditionVideo::ScreenshotContainsPattern()
{
cv::Mat result;
@ -306,24 +227,6 @@ bool MacroConditionVideo::OutputChanged()
return _screenshotData.image != _matchImage;
}
std::vector<cv::Rect> matchObject(QImage &img, cv::CascadeClassifier &cascade,
double scaleFactor, int minNeighbors,
cv::Size minSize, cv::Size maxSize)
{
if (img.isNull() || cascade.empty()) {
return {};
}
auto i = QImageToMat(img);
cv::Mat frameGray;
cv::cvtColor(i, frameGray, cv::COLOR_BGR2GRAY);
equalizeHist(frameGray, frameGray);
std::vector<cv::Rect> objects;
cascade.detectMultiScale(frameGray, objects, scaleFactor, minNeighbors,
0, minSize, maxSize);
return objects;
}
bool MacroConditionVideo::ScreenshotContainsObject()
{
auto objects = matchObject(_screenshotData.image, _objectCascade,
@ -363,54 +266,6 @@ static inline void populateConditionSelection(QComboBox *list)
}
}
ThresholdSlider::ThresholdSlider(double min, double max, const QString &label,
const QString &description, QWidget *parent)
: QWidget(parent)
{
_slider = new QSlider();
_slider->setOrientation(Qt::Horizontal);
_slider->setRange(min * _scale, max * _scale);
_value = new QLabel();
QString labelText = label + QString("0.");
for (int i = 0; i < _precision; i++) {
labelText.append(QString("0"));
}
_value->setText(labelText);
connect(_slider, SIGNAL(valueChanged(int)), this,
SLOT(NotifyValueChanged(int)));
QVBoxLayout *mainLayout = new QVBoxLayout();
QHBoxLayout *sliderLayout = new QHBoxLayout();
sliderLayout->addWidget(_value);
sliderLayout->addWidget(_slider);
mainLayout->addLayout(sliderLayout);
if (!description.isEmpty()) {
mainLayout->addWidget(new QLabel(description));
}
mainLayout->setContentsMargins(0, 0, 0, 0);
setLayout(mainLayout);
}
void ThresholdSlider::SetDoubleValue(double value)
{
_slider->setValue(value * _scale);
SetDoubleValueText(value);
}
void ThresholdSlider::NotifyValueChanged(int value)
{
double doubleValue = value / _scale;
SetDoubleValueText(doubleValue);
emit DoubleValueChanged(doubleValue);
}
void ThresholdSlider::SetDoubleValueText(double value)
{
QString labelText = _value->text();
labelText.chop(_precision + 2); // 2 for the part left of the "."
labelText.append(QString::number(value, 'f', _precision));
_value->setText(labelText);
}
MacroConditionVideoEdit::MacroConditionVideoEdit(
QWidget *parent, std::shared_ptr<MacroConditionVideo> entryData)
: QWidget(parent), _matchDialog(this, entryData.get())
@ -825,34 +680,6 @@ void MacroConditionVideoEdit::ThrottleCountChanged(int value)
_entryData->_throttleCount = value / GetSwitcher()->interval;
}
QImage markPatterns(cv::Mat &matchResult, QImage &image, QImage &pattern)
{
auto matchImg = QImageToMat(image);
for (int row = 0; row < matchResult.rows - 1; row++) {
for (int col = 0; col < matchResult.cols - 1; col++) {
if (matchResult.at<float>(row, col) != 0.0) {
rectangle(matchImg, {col, row},
cv::Point(col + pattern.width(),
row + pattern.height()),
cv::Scalar(255, 0, 0, 255), 2, 8, 0);
}
}
}
return MatToQImage(matchImg);
}
QImage markObjects(QImage &image, std::vector<cv::Rect> &objects)
{
auto frame = QImageToMat(image);
for (size_t i = 0; i < objects.size(); i++) {
rectangle(frame, cv::Point(objects[i].x, objects[i].y),
cv::Point(objects[i].x + objects[i].width,
objects[i].y + objects[i].height),
cv::Scalar(255, 0, 0, 255), 2, 8, 0);
}
return MatToQImage(frame);
}
void MacroConditionVideoEdit::ShowMatchClicked()
{
_matchDialog.show();
@ -968,116 +795,3 @@ void MacroConditionVideoEdit::UpdateEntryData()
GetSwitcher()->interval);
SetWidgetVisibility();
}
ShowMatchDialog::ShowMatchDialog(QWidget *parent,
MacroConditionVideo *conditionData)
: QDialog(parent),
_conditionData(conditionData),
_imageLabel(new QLabel),
_scrollArea(new QScrollArea)
{
setWindowTitle("Advanced Scene Switcher");
_statusLabel = new QLabel(obs_module_text(
"AdvSceneSwitcher.condition.video.showMatch.loading"));
_scrollArea->setBackgroundRole(QPalette::Dark);
_scrollArea->setWidget(_imageLabel);
QVBoxLayout *layout = new QVBoxLayout;
layout->addWidget(_statusLabel);
layout->addWidget(_scrollArea);
setLayout(layout);
}
ShowMatchDialog::~ShowMatchDialog()
{
_stop = true;
if (_thread.joinable()) {
_thread.join();
}
}
void ShowMatchDialog::ShowMatch()
{
if (_thread.joinable()) {
return;
}
if (!_conditionData) {
DisplayMessage(obs_module_text(
"AdvSceneSwitcher.condition.video.screenshotFail"));
return;
}
_thread = std::thread(&ShowMatchDialog::CheckForMatchLoop, this);
}
void ShowMatchDialog::RedrawImage(QImage img)
{
_imageLabel->setPixmap(QPixmap::fromImage(img));
_imageLabel->adjustSize();
}
void ShowMatchDialog::CheckForMatchLoop()
{
while (!_stop) {
auto source = obs_weak_source_get_source(
_conditionData->_videoSource);
ScreenshotHelper screenshot(source);
obs_source_release(source);
std::this_thread::sleep_for(std::chrono::seconds(1));
if (!screenshot.done) {
_statusLabel->setText(obs_module_text(
"AdvSceneSwitcher.condition.video.screenshotFail"));
continue;
}
if (screenshot.image.width() == 0 ||
screenshot.image.height() == 0) {
_statusLabel->setText(obs_module_text(
"AdvSceneSwitcher.condition.video.screenshotEmpty"));
continue;
}
auto image = MarkMatch(screenshot.image);
if (_stop) {
return;
}
QMetaObject::invokeMethod(this, "RedrawImage",
Qt::BlockingQueuedConnection,
Q_ARG(QImage, image));
}
}
QImage ShowMatchDialog::MarkMatch(QImage &screenshot)
{
QImage resultIamge;
if (_conditionData->_condition == VideoCondition::PATTERN) {
cv::Mat result;
QImage pattern = _conditionData->GetMatchImage();
matchPattern(screenshot, pattern,
_conditionData->_patternThreshold, result,
_conditionData->_useAlphaAsMask);
if (countNonZero(result) == 0) {
resultIamge = screenshot;
_statusLabel->setText(obs_module_text(
"AdvSceneSwitcher.condition.video.patternMatchFail"));
} else {
_statusLabel->setText(obs_module_text(
"AdvSceneSwitcher.condition.video.patternMatchSuccess"));
resultIamge = markPatterns(result, screenshot, pattern);
}
} else if (_conditionData->_condition == VideoCondition::OBJECT) {
auto objects = matchObject(
screenshot, _conditionData->_objectCascade,
_conditionData->_scaleFactor,
_conditionData->_minNeighbors,
{_conditionData->_minSizeX, _conditionData->_minSizeY},
{_conditionData->_maxSizeX, _conditionData->_maxSizeY});
if (objects.empty()) {
resultIamge = screenshot;
_statusLabel->setText(obs_module_text(
"AdvSceneSwitcher.condition.video.objectMatchFail"));
} else {
_statusLabel->setText(obs_module_text(
"AdvSceneSwitcher.condition.video.objectMatchSuccess"));
resultIamge = markObjects(screenshot, objects);
}
}
return resultIamge;
}

View File

@ -1,18 +1,16 @@
#pragma once
#include <macro.hpp>
#include <screenshot-helper.hpp>
#include <opencv-helpers.hpp>
#include <file-selection.hpp>
#include <threshold-slider.hpp>
#include <video-match-dialog.hpp>
#include <QWidget>
#include <QComboBox>
#include <QCheckBox>
#include <QHBoxLayout>
#include <QLabel>
#include <QDialog>
#include <QScrollArea>
#include <chrono>
#undef NO // MacOS macro that can conflict with OpenCV
#include <opencv2/opencv.hpp>
enum class VideoCondition {
MATCH,
@ -24,15 +22,6 @@ enum class VideoCondition {
OBJECT,
};
struct PatternMatchData {
cv::Mat4b rgbaPattern;
cv::Mat3b rgbPattern;
cv::Mat1b mask;
};
constexpr int minMinNeighbors = 3;
constexpr int maxMinNeighbors = 6;
class MacroConditionVideo : public MacroCondition {
public:
MacroConditionVideo(Macro *m) : MacroCondition(m) {}
@ -91,50 +80,6 @@ private:
static const std::string id;
};
class ThresholdSlider : public QWidget {
Q_OBJECT
public:
ThresholdSlider(double min = 0., double max = 1.,
const QString &label = "threshold",
const QString &description = "", QWidget *parent = 0);
void SetDoubleValue(double);
public slots:
void NotifyValueChanged(int value);
signals:
void DoubleValueChanged(double value);
private:
void SetDoubleValueText(double);
QLabel *_value;
QSlider *_slider;
double _scale = 100.0;
int _precision = 2;
};
class ShowMatchDialog : public QDialog {
Q_OBJECT
public:
ShowMatchDialog(QWidget *parent, MacroConditionVideo *_conditionData);
virtual ~ShowMatchDialog();
void ShowMatch();
private slots:
void RedrawImage(QImage img);
private:
void CheckForMatchLoop();
QImage MarkMatch(QImage &screenshot);
MacroConditionVideo *_conditionData;
QScrollArea *_scrollArea;
QLabel *_statusLabel;
QLabel *_imageLabel;
std::thread _thread;
std::atomic_bool _stop = {false};
};
class MacroConditionVideoEdit : public QWidget {
Q_OBJECT

View File

@ -0,0 +1,98 @@
#include "opencv-helpers.hpp"
PatternMatchData createPatternData(QImage &pattern)
{
PatternMatchData data;
if (pattern.isNull()) {
return data;
}
data.rgbaPattern = QImageToMat(pattern);
std::vector<cv::Mat1b> rgbaChannelsPattern;
cv::split(data.rgbaPattern, rgbaChannelsPattern);
std::vector<cv::Mat1b> rgbChanlesPattern(
rgbaChannelsPattern.begin(), rgbaChannelsPattern.begin() + 3);
cv::merge(rgbChanlesPattern, data.rgbPattern);
cv::threshold(rgbaChannelsPattern[3], data.mask, 0, 255,
cv::THRESH_BINARY);
return data;
}
void matchPattern(QImage &img, PatternMatchData &patternData, double threshold,
cv::Mat &result, bool useAlphaAsMask)
{
if (img.isNull() || patternData.rgbaPattern.empty()) {
return;
}
if (img.height() < patternData.rgbaPattern.rows ||
img.width() < patternData.rgbaPattern.cols) {
return;
}
auto i = QImageToMat(img);
if (useAlphaAsMask) {
std::vector<cv::Mat1b> rgbaChannelsImage;
cv::split(i, rgbaChannelsImage);
std::vector<cv::Mat1b> rgbChanlesImage(
rgbaChannelsImage.begin(),
rgbaChannelsImage.begin() + 3);
cv::Mat3b rgbImage;
cv::merge(rgbChanlesImage, rgbImage);
cv::matchTemplate(rgbImage, patternData.rgbPattern, result,
cv::TM_CCORR_NORMED, patternData.mask);
cv::threshold(result, result, threshold, 0, cv::THRESH_TOZERO);
} else {
cv::matchTemplate(i, patternData.rgbaPattern, result,
cv::TM_CCOEFF_NORMED);
cv::threshold(result, result, threshold, 0, cv::THRESH_TOZERO);
}
}
void matchPattern(QImage &img, QImage &pattern, double threshold,
cv::Mat &result, bool useAlphaAsMask)
{
auto data = createPatternData(pattern);
matchPattern(img, data, threshold, result, useAlphaAsMask);
}
std::vector<cv::Rect> matchObject(QImage &img, cv::CascadeClassifier &cascade,
double scaleFactor, int minNeighbors,
cv::Size minSize, cv::Size maxSize)
{
if (img.isNull() || cascade.empty()) {
return {};
}
auto i = QImageToMat(img);
cv::Mat frameGray;
cv::cvtColor(i, frameGray, cv::COLOR_BGR2GRAY);
equalizeHist(frameGray, frameGray);
std::vector<cv::Rect> objects;
cascade.detectMultiScale(frameGray, objects, scaleFactor, minNeighbors,
0, minSize, maxSize);
return objects;
}
// Assumption is that QImage uses Format_RGBA8888.
// Conversion from: https://github.com/dbzhang800/QtOpenCV
cv::Mat QImageToMat(const QImage &img)
{
if (img.isNull()) {
return cv::Mat();
}
return cv::Mat(img.height(), img.width(), CV_8UC(img.depth() / 8),
(uchar *)img.bits(), img.bytesPerLine());
}
QImage MatToQImage(const cv::Mat &mat)
{
if (mat.empty()) {
return QImage();
}
return QImage(mat.data, mat.cols, mat.rows,
QImage::Format::Format_RGBA8888);
}

View File

@ -0,0 +1,24 @@
#pragma once
#include <QImage>
#undef NO // MacOS macro that can conflict with OpenCV
#include <opencv2/opencv.hpp>
constexpr int minMinNeighbors = 3;
constexpr int maxMinNeighbors = 6;
struct PatternMatchData {
cv::Mat4b rgbaPattern;
cv::Mat3b rgbPattern;
cv::Mat1b mask;
};
PatternMatchData createPatternData(QImage &pattern);
void matchPattern(QImage &img, PatternMatchData &patternData, double threshold,
cv::Mat &result, bool useAlphaAsMask = true);
void matchPattern(QImage &img, QImage &pattern, double threshold,
cv::Mat &result, bool useAlphaAsMask);
std::vector<cv::Rect> matchObject(QImage &img, cv::CascadeClassifier &cascade,
double scaleFactor, int minNeighbors,
cv::Size minSize, cv::Size maxSize);
cv::Mat QImageToMat(const QImage &img);
QImage MatToQImage(const cv::Mat &mat);

View File

@ -0,0 +1,50 @@
#include "threshold-slider.hpp"
#include <QHBoxLayout>
ThresholdSlider::ThresholdSlider(double min, double max, const QString &label,
const QString &description, QWidget *parent)
: QWidget(parent)
{
_slider = new QSlider();
_slider->setOrientation(Qt::Horizontal);
_slider->setRange(min * _scale, max * _scale);
_value = new QLabel();
QString labelText = label + QString("0.");
for (int i = 0; i < _precision; i++) {
labelText.append(QString("0"));
}
_value->setText(labelText);
connect(_slider, SIGNAL(valueChanged(int)), this,
SLOT(NotifyValueChanged(int)));
QVBoxLayout *mainLayout = new QVBoxLayout();
QHBoxLayout *sliderLayout = new QHBoxLayout();
sliderLayout->addWidget(_value);
sliderLayout->addWidget(_slider);
mainLayout->addLayout(sliderLayout);
if (!description.isEmpty()) {
mainLayout->addWidget(new QLabel(description));
}
mainLayout->setContentsMargins(0, 0, 0, 0);
setLayout(mainLayout);
}
void ThresholdSlider::SetDoubleValue(double value)
{
_slider->setValue(value * _scale);
SetDoubleValueText(value);
}
void ThresholdSlider::NotifyValueChanged(int value)
{
double doubleValue = value / _scale;
SetDoubleValueText(doubleValue);
emit DoubleValueChanged(doubleValue);
}
void ThresholdSlider::SetDoubleValueText(double value)
{
QString labelText = _value->text();
labelText.chop(_precision + 2); // 2 for the part left of the "."
labelText.append(QString::number(value, 'f', _precision));
_value->setText(labelText);
}

View File

@ -0,0 +1,25 @@
#pragma once
#include <QWidget>
#include <QSlider>
#include <QLabel>
class ThresholdSlider : public QWidget {
Q_OBJECT
public:
ThresholdSlider(double min = 0., double max = 1.,
const QString &label = "threshold",
const QString &description = "", QWidget *parent = 0);
void SetDoubleValue(double);
public slots:
void NotifyValueChanged(int value);
signals:
void DoubleValueChanged(double value);
private:
void SetDoubleValueText(double);
QLabel *_value;
QSlider *_slider;
double _scale = 100.0;
int _precision = 2;
};

View File

@ -0,0 +1,145 @@
#include "video-match-dialog.hpp"
#include "macro-condition-video.hpp"
#include "opencv-helpers.hpp"
#include "utility.hpp"
ShowMatchDialog::ShowMatchDialog(QWidget *parent,
MacroConditionVideo *conditionData)
: QDialog(parent),
_conditionData(conditionData),
_imageLabel(new QLabel),
_scrollArea(new QScrollArea)
{
setWindowTitle("Advanced Scene Switcher");
_statusLabel = new QLabel(obs_module_text(
"AdvSceneSwitcher.condition.video.showMatch.loading"));
_scrollArea->setBackgroundRole(QPalette::Dark);
_scrollArea->setWidget(_imageLabel);
QVBoxLayout *layout = new QVBoxLayout;
layout->addWidget(_statusLabel);
layout->addWidget(_scrollArea);
setLayout(layout);
}
ShowMatchDialog::~ShowMatchDialog()
{
_stop = true;
if (_thread.joinable()) {
_thread.join();
}
}
void ShowMatchDialog::ShowMatch()
{
if (_thread.joinable()) {
return;
}
if (!_conditionData) {
DisplayMessage(obs_module_text(
"AdvSceneSwitcher.condition.video.screenshotFail"));
return;
}
_thread = std::thread(&ShowMatchDialog::CheckForMatchLoop, this);
}
void ShowMatchDialog::RedrawImage(QImage img)
{
_imageLabel->setPixmap(QPixmap::fromImage(img));
_imageLabel->adjustSize();
}
void ShowMatchDialog::CheckForMatchLoop()
{
while (!_stop) {
auto source = obs_weak_source_get_source(
_conditionData->_videoSource);
ScreenshotHelper screenshot(source);
obs_source_release(source);
std::this_thread::sleep_for(std::chrono::seconds(1));
if (!screenshot.done) {
_statusLabel->setText(obs_module_text(
"AdvSceneSwitcher.condition.video.screenshotFail"));
continue;
}
if (screenshot.image.width() == 0 ||
screenshot.image.height() == 0) {
_statusLabel->setText(obs_module_text(
"AdvSceneSwitcher.condition.video.screenshotEmpty"));
continue;
}
auto image = MarkMatch(screenshot.image);
if (_stop) {
return;
}
QMetaObject::invokeMethod(this, "RedrawImage",
Qt::BlockingQueuedConnection,
Q_ARG(QImage, image));
}
}
QImage markPatterns(cv::Mat &matchResult, QImage &image, QImage &pattern)
{
auto matchImg = QImageToMat(image);
for (int row = 0; row < matchResult.rows - 1; row++) {
for (int col = 0; col < matchResult.cols - 1; col++) {
if (matchResult.at<float>(row, col) != 0.0) {
rectangle(matchImg, {col, row},
cv::Point(col + pattern.width(),
row + pattern.height()),
cv::Scalar(255, 0, 0, 255), 2, 8, 0);
}
}
}
return MatToQImage(matchImg);
}
QImage markObjects(QImage &image, std::vector<cv::Rect> &objects)
{
auto frame = QImageToMat(image);
for (size_t i = 0; i < objects.size(); i++) {
rectangle(frame, cv::Point(objects[i].x, objects[i].y),
cv::Point(objects[i].x + objects[i].width,
objects[i].y + objects[i].height),
cv::Scalar(255, 0, 0, 255), 2, 8, 0);
}
return MatToQImage(frame);
}
QImage ShowMatchDialog::MarkMatch(QImage &screenshot)
{
QImage resultIamge;
if (_conditionData->_condition == VideoCondition::PATTERN) {
cv::Mat result;
QImage pattern = _conditionData->GetMatchImage();
matchPattern(screenshot, pattern,
_conditionData->_patternThreshold, result,
_conditionData->_useAlphaAsMask);
if (countNonZero(result) == 0) {
resultIamge = screenshot;
_statusLabel->setText(obs_module_text(
"AdvSceneSwitcher.condition.video.patternMatchFail"));
} else {
_statusLabel->setText(obs_module_text(
"AdvSceneSwitcher.condition.video.patternMatchSuccess"));
resultIamge = markPatterns(result, screenshot, pattern);
}
} else if (_conditionData->_condition == VideoCondition::OBJECT) {
auto objects = matchObject(
screenshot, _conditionData->_objectCascade,
_conditionData->_scaleFactor,
_conditionData->_minNeighbors,
{_conditionData->_minSizeX, _conditionData->_minSizeY},
{_conditionData->_maxSizeX, _conditionData->_maxSizeY});
if (objects.empty()) {
resultIamge = screenshot;
_statusLabel->setText(obs_module_text(
"AdvSceneSwitcher.condition.video.objectMatchFail"));
} else {
_statusLabel->setText(obs_module_text(
"AdvSceneSwitcher.condition.video.objectMatchSuccess"));
resultIamge = markObjects(screenshot, objects);
}
}
return resultIamge;
}

View File

@ -0,0 +1,31 @@
#pragma once
#include <QDialog>
#include <QLabel>
#include <QScrollArea>
#include <thread>
class MacroConditionVideo;
class ShowMatchDialog : public QDialog {
Q_OBJECT
public:
ShowMatchDialog(QWidget *parent, MacroConditionVideo *_conditionData);
virtual ~ShowMatchDialog();
void ShowMatch();
private slots:
void RedrawImage(QImage img);
private:
void CheckForMatchLoop();
QImage MarkMatch(QImage &screenshot);
MacroConditionVideo *_conditionData;
QScrollArea *_scrollArea;
QLabel *_statusLabel;
QLabel *_imageLabel;
std::thread _thread;
std::atomic_bool _stop = {false};
};