Add temp variable support for video condition

This commit is contained in:
WarmUpTill 2023-12-04 21:17:12 +01:00 committed by WarmUpTill
parent c043053b2b
commit 43967f5ded
5 changed files with 177 additions and 28 deletions

View File

@ -1495,6 +1495,17 @@ AdvSceneSwitcher.tempVar.process.name="Process name"
AdvSceneSwitcher.tempVar.recording.durationSeconds="Recording duration"
AdvSceneSwitcher.tempVar.recording.durationSeconds.description="Recording duration in seconds.\nThis value does not change while the recording is paused and will be reset to zero if the recording is stopped."
AdvSceneSwitcher.tempVar.video.patternCount="Pattern count"
AdvSceneSwitcher.tempVar.video.patternCount.description="The number of times the given pattern has been found in a given video input frame."
AdvSceneSwitcher.tempVar.video.objectCount="Object count"
AdvSceneSwitcher.tempVar.video.objectCount.description="The number of objects the given model has identified in a given video input frame."
AdvSceneSwitcher.tempVar.video.brightness="Average brightness"
AdvSceneSwitcher.tempVar.video.brightness.description="The average brightness in a given video input frame in a range from 0 to 1 (dark to bright)."
AdvSceneSwitcher.tempVar.video.text="OCR text"
AdvSceneSwitcher.tempVar.video.text.description="The text detected in a given video input frame."
AdvSceneSwitcher.tempVar.video.color="Average color"
AdvSceneSwitcher.tempVar.video.color.description="The average RGB color in a given video input frame in HexArgb format."
AdvSceneSwitcher.selectScene="--select scene--"
AdvSceneSwitcher.selectPreviousScene="Previous Scene"
AdvSceneSwitcher.selectCurrentScene="Current Scene"

View File

@ -177,8 +177,8 @@ bool MacroConditionVideo::Load(obs_data_t *obj)
{
MacroCondition::Load(obj);
_video.Load(obj);
_condition =
static_cast<VideoCondition>(obs_data_get_int(obj, "condition"));
SetCondition(static_cast<VideoCondition>(
obs_data_get_int(obj, "condition")));
_file = obs_data_get_string(obj, "filePath");
_blockUntilScreenshotDone =
obs_data_get_bool(obj, "blockUntilScreenshotDone");
@ -261,6 +261,12 @@ bool MacroConditionVideo::SetLanguage(const std::string &language)
return _ocrParameters.SetLanguageCode(language);
}
void MacroConditionVideo::SetCondition(VideoCondition condition)
{
_condition = condition;
SetupTempVars();
}
bool MacroConditionVideo::ScreenshotContainsPattern()
{
cv::Mat result;
@ -269,9 +275,12 @@ bool MacroConditionVideo::ScreenshotContainsPattern()
_patternMatchParameters.useAlphaAsMask,
_patternMatchParameters.matchMode);
if (result.total() == 0) {
SetTempVarValue("patternCount", "0");
return false;
}
return countNonZero(result) > 0;
const auto count = countNonZero(result);
SetTempVarValue("patternCount", std::to_string(count));
return count > 0;
}
bool MacroConditionVideo::OutputChanged()
@ -300,12 +309,15 @@ bool MacroConditionVideo::ScreenshotContainsObject()
_objMatchParameters.minNeighbors,
_objMatchParameters.minSize.CV(),
_objMatchParameters.maxSize.CV());
return objects.size() > 0;
const auto count = objects.size();
SetTempVarValue("objectCount", std::to_string(count));
return count > 0;
}
bool MacroConditionVideo::CheckBrightnessThreshold()
{
_currentBrightness = GetAvgBrightness(_screenshotData.image) / 255.;
SetTempVarValue("brightness", std::to_string(_currentBrightness));
return _currentBrightness > _brightnessThreshold;
}
@ -318,6 +330,7 @@ bool MacroConditionVideo::CheckOCR()
auto text = RunOCR(_ocrParameters.GetOCR(), _screenshotData.image,
_ocrParameters.color, _ocrParameters.colorThreshold);
SetVariableValue(text);
SetTempVarValue("text", text);
if (!_ocrParameters.regex.Enabled()) {
return text == std::string(_ocrParameters.text);
}
@ -332,10 +345,18 @@ bool MacroConditionVideo::CheckOCR()
bool MacroConditionVideo::CheckColor()
{
return ContainsPixelsInColorRange(_screenshotData.image,
_colorParameters.color,
_colorParameters.colorThreshold,
_colorParameters.matchThreshold);
const bool ret = ContainsPixelsInColorRange(
_screenshotData.image, _colorParameters.color,
_colorParameters.colorThreshold,
_colorParameters.matchThreshold);
// Way too slow for now
//SetTempVarValue("dominantColor", GetDominantColor(_screenshotData.image, 3)
// .name(QColor::HexArgb)
// .toStdString());
SetTempVarValue("color", GetAverageColor(_screenshotData.image)
.name(QColor::HexArgb)
.toStdString());
return ret;
}
bool MacroConditionVideo::Compare()
@ -378,6 +399,58 @@ bool MacroConditionVideo::Compare()
return false;
}
void MacroConditionVideo::SetupTempVars()
{
MacroCondition::SetupTempVars();
switch (_condition) {
case VideoCondition::PATTERN:
AddTempvar(
"patternCount",
obs_module_text(
"AdvSceneSwitcher.tempVar.video.patternCount"),
obs_module_text(
"AdvSceneSwitcher.tempVar.video.patternCount.description"));
break;
case VideoCondition::OBJECT:
AddTempvar(
"objectCount",
obs_module_text(
"AdvSceneSwitcher.tempVar.video.objectCount"),
obs_module_text(
"AdvSceneSwitcher.tempVar.video.objectCount.description"));
break;
case VideoCondition::BRIGHTNESS:
AddTempvar(
"brightness",
obs_module_text(
"AdvSceneSwitcher.tempVar.video.brightness"),
obs_module_text(
"AdvSceneSwitcher.tempVar.video.brightness.description"));
break;
case VideoCondition::OCR:
AddTempvar(
"text",
obs_module_text("AdvSceneSwitcher.tempVar.video.text"),
obs_module_text(
"AdvSceneSwitcher.tempVar.video.text.description"));
break;
case VideoCondition::COLOR:
AddTempvar(
"color",
obs_module_text("AdvSceneSwitcher.tempVar.video.color"),
obs_module_text(
"AdvSceneSwitcher.tempVar.video.color.description"));
break;
case VideoCondition::MATCH:
case VideoCondition::DIFFER:
case VideoCondition::HAS_NOT_CHANGED:
case VideoCondition::HAS_CHANGED:
case VideoCondition::NO_IMAGE:
default:
break;
}
}
static inline void populateVideoInputSelection(QComboBox *list)
{
for (const auto &[_, name] : videoInputTypes) {
@ -1155,7 +1228,7 @@ void MacroConditionVideoEdit::UpdatePreviewTooltip()
return;
}
if (!requiresFileInput(_entryData->_condition)) {
if (!requiresFileInput(_entryData->GetCondition())) {
this->setToolTip("");
return;
}
@ -1224,7 +1297,7 @@ void MacroConditionVideoEdit::ConditionChanged(int cond)
}
auto lock = LockContext();
_entryData->_condition = static_cast<VideoCondition>(cond);
_entryData->SetCondition(static_cast<VideoCondition>(cond));
_entryData->ResetLastMatch();
SetWidgetVisibility();
@ -1239,7 +1312,7 @@ void MacroConditionVideoEdit::ConditionChanged(int cond)
_previewDialog.PatternMatchParametersChanged(
_entryData->_patternMatchParameters);
if (_entryData->_condition == VideoCondition::OBJECT) {
if (_entryData->GetCondition() == VideoCondition::OBJECT) {
auto path = _entryData->GetModelDataPath();
_entryData->_objMatchParameters.cascade =
initObjectCascade(path);
@ -1482,27 +1555,28 @@ void MacroConditionVideoEdit::SetWidgetVisibility()
_sources->setVisible(_entryData->_video.type ==
VideoInput::Type::SOURCE);
_scenes->setVisible(_entryData->_video.type == VideoInput::Type::SCENE);
_imagePath->setVisible(requiresFileInput(_entryData->_condition));
_imagePath->setVisible(requiresFileInput(_entryData->GetCondition()));
_usePatternForChangedCheck->setVisible(
patternControlIsOptional(_entryData->_condition));
_patternThreshold->setVisible(needsThreshold(_entryData->_condition));
_useAlphaAsMask->setVisible(_entryData->_condition ==
patternControlIsOptional(_entryData->GetCondition()));
_patternThreshold->setVisible(
needsThreshold(_entryData->GetCondition()));
_useAlphaAsMask->setVisible(_entryData->GetCondition() ==
VideoCondition::PATTERN);
SetLayoutVisible(_patternMatchModeLayout,
_entryData->_condition == VideoCondition::PATTERN);
_brightness->setVisible(_entryData->_condition ==
_entryData->GetCondition() == VideoCondition::PATTERN);
_brightness->setVisible(_entryData->GetCondition() ==
VideoCondition::BRIGHTNESS);
_showMatch->setVisible(needsShowMatch(_entryData->_condition));
_ocr->setVisible(_entryData->_condition == VideoCondition::OCR);
_objectDetect->setVisible(_entryData->_condition ==
_showMatch->setVisible(needsShowMatch(_entryData->GetCondition()));
_ocr->setVisible(_entryData->GetCondition() == VideoCondition::OCR);
_objectDetect->setVisible(_entryData->GetCondition() ==
VideoCondition::OBJECT);
_color->setVisible(_entryData->_condition == VideoCondition::COLOR);
_color->setVisible(_entryData->GetCondition() == VideoCondition::COLOR);
SetLayoutVisible(_throttleControlLayout,
needsThrottleControls(_entryData->_condition));
_area->setVisible(needsAreaControls(_entryData->_condition));
needsThrottleControls(_entryData->GetCondition()));
_area->setVisible(needsAreaControls(_entryData->GetCondition()));
if (_entryData->_condition == VideoCondition::HAS_CHANGED ||
_entryData->_condition == VideoCondition::HAS_NOT_CHANGED) {
if (_entryData->GetCondition() == VideoCondition::HAS_CHANGED ||
_entryData->GetCondition() == VideoCondition::HAS_NOT_CHANGED) {
_patternThreshold->setVisible(
_entryData->_patternMatchParameters.useForChangedCheck);
SetLayoutVisible(
@ -1528,7 +1602,7 @@ void MacroConditionVideoEdit::SetupPreviewDialogParams()
_previewDialog.VideoSelectionChanged(_entryData->_video);
_previewDialog.AreaParametersChanged(_entryData->_areaParameters);
_previewDialog.ConditionChanged(
static_cast<int>(_entryData->_condition));
static_cast<int>(_entryData->GetCondition()));
}
void MacroConditionVideoEdit::UpdateEntryData()
@ -1541,7 +1615,8 @@ void MacroConditionVideoEdit::UpdateEntryData()
static_cast<int>(_entryData->_video.type));
_scenes->SetScene(_entryData->_video.scene);
_sources->SetSource(_entryData->_video.source);
_condition->setCurrentIndex(static_cast<int>(_entryData->_condition));
_condition->setCurrentIndex(
static_cast<int>(_entryData->GetCondition()));
_reduceLatency->setChecked(_entryData->_blockUntilScreenshotDone);
_imagePath->SetPath(QString::fromStdString(_entryData->_file));
_usePatternForChangedCheck->setChecked(

View File

@ -45,8 +45,10 @@ public:
void SetPageSegMode(tesseract::PageSegMode);
bool SetLanguage(const std::string &);
void SetCondition(VideoCondition);
VideoCondition GetCondition() const { return _condition; }
VideoInput _video;
VideoCondition _condition = VideoCondition::MATCH;
std::string _file = obs_module_text("AdvSceneSwitcher.enterPath");
// Enabling this will reduce matching latency, but slow down the
// the condition checks of all macros overall.
@ -74,6 +76,10 @@ private:
bool Compare();
bool CheckShouldBeSkipped();
void SetupTempVars();
VideoCondition _condition = VideoCondition::MATCH;
bool _getNextScreenshot = true;
ScreenshotHelper _screenshotData;
QImage _matchImage;

View File

@ -225,6 +225,61 @@ bool ContainsPixelsInColorRange(const QImage &image, const QColor &color,
return matchPercentage >= totalPixelMatchThreshold;
}
QColor GetAverageColor(const QImage &img)
{
if (img.isNull()) {
return QColor();
}
auto image = QImageToMat(img);
cv::Scalar meanColor = cv::mean(image);
int averageBlue = cvRound(meanColor[0]);
int averageGreen = cvRound(meanColor[1]);
int averageRed = cvRound(meanColor[2]);
return QColor(averageRed, averageGreen, averageBlue);
}
QColor GetDominantColor(const QImage &img, int k)
{
if (img.isNull()) {
return QColor();
}
auto image = QImageToMat(img);
cv::Mat reshapedImage = image.reshape(1, image.rows * image.cols);
reshapedImage.convertTo(reshapedImage, CV_32F);
cv::mean(reshapedImage);
// Apply k-means clustering to group similar colors
cv::TermCriteria criteria(
cv::TermCriteria::EPS + cv::TermCriteria::MAX_ITER, 100, 0.2);
cv::Mat labels, centers;
cv::kmeans(reshapedImage, k, labels, criteria, 1,
cv::KMEANS_RANDOM_CENTERS, centers);
// Find the dominant color
// Center of the cluster with the largest number of pixels
cv::Mat counts = cv::Mat::zeros(1, k, CV_32SC1);
for (int i = 0; i < labels.rows; i++) {
counts.at<int>(0, labels.at<int>(i))++;
}
cv::Point max_loc;
cv::minMaxLoc(counts, nullptr, nullptr, nullptr, &max_loc);
try {
cv::Scalar dominantColor = centers.at<cv::Scalar>(max_loc.y);
const int blue = cv::saturate_cast<int>(dominantColor.val[0]);
const int green = cv::saturate_cast<int>(dominantColor.val[1]);
const int red = cv::saturate_cast<int>(dominantColor.val[2]);
const int alpha = cv::saturate_cast<int>(dominantColor.val[3]);
return QColor(red, green, blue, alpha);
} catch (...) {
}
return QColor();
}
// Assumption is that QImage uses Format_RGBA8888.
// Conversion from: https://github.com/dbzhang800/QtOpenCV
cv::Mat QImageToMat(const QImage &img)

View File

@ -66,6 +66,8 @@ std::string RunOCR(tesseract::TessBaseAPI *, const QImage &, const QColor &,
bool ContainsPixelsInColorRange(const QImage &image, const QColor &color,
double colorDeviationThreshold,
double totalPixelMatchThreshold);
QColor GetAverageColor(const QImage &img);
QColor GetDominantColor(const QImage &image, int k);
cv::Mat QImageToMat(const QImage &img);
QImage MatToQImage(const cv::Mat &mat);