forked from Smorodov/Multitarget-tracker
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathMotionDetector.cpp
More file actions
122 lines (107 loc) · 2.62 KB
/
MotionDetector.cpp
File metadata and controls
122 lines (107 loc) · 2.62 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
#include "MotionDetector.h"
///
/// \brief MotionDetector::MotionDetector
/// \param algType
/// \param collectPoints
/// \param gray
///
MotionDetector::MotionDetector(
BackgroundSubtract::BGFG_ALGS algType,
bool collectPoints,
cv::UMat& gray
)
: BaseDetector(collectPoints, gray)
{
m_fg = gray.clone();
m_backgroundSubst = std::make_unique<BackgroundSubtract>(algType, gray.channels());
}
///
/// \brief MotionDetector::~MotionDetector
///
MotionDetector::~MotionDetector(void)
{
}
///
/// \brief MotionDetector::DetectContour
///
void MotionDetector::DetectContour()
{
m_regions.clear();
std::vector<std::vector<cv::Point>> contours;
std::vector<cv::Vec4i> hierarchy;
cv::findContours(m_fg, contours, hierarchy, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_SIMPLE, cv::Point());
if (contours.size() > 0)
{
for (size_t i = 0; i < contours.size(); i++)
{
cv::Rect r = cv::boundingRect(contours[i]);
if (r.width >= m_minObjectSize.width &&
r.height >= m_minObjectSize.height)
{
CRegion region(r);
cv::Point2f center(r.x + 0.5f * r.width, r.y + 0.5f * r.height);
if (m_collectPoints)
{
const int yStep = 5;
const int xStep = 5;
for (int y = r.y; y < r.y + r.height; y += yStep)
{
cv::Point2f pt(0, static_cast<float>(y));
for (int x = r.x; x < r.x + r.width; x += xStep)
{
pt.x = static_cast<float>(x);
if (cv::pointPolygonTest(contours[i], pt, false) > 0)
{
region.m_points.push_back(pt);
}
}
}
if (region.m_points.empty())
{
region.m_points.push_back(center);
}
}
m_regions.push_back(region);
}
}
}
}
///
/// \brief MotionDetector::Detect
/// \param gray
///
void MotionDetector::Detect(cv::UMat& gray)
{
m_backgroundSubst->subtract(gray, m_fg);
DetectContour();
}
///
/// \brief MotionDetector::CalcMotionMap
/// \param frame
///
void MotionDetector::CalcMotionMap(cv::Mat frame)
{
if (m_motionMap.size() != frame.size())
{
m_motionMap = cv::Mat(frame.size(), CV_32FC1, cv::Scalar(0, 0, 0));
}
cv::Mat normFor;
cv::normalize(m_fg, normFor, 255, 0, cv::NORM_MINMAX, m_motionMap.type());
double alpha = 0.95;
cv::addWeighted(m_motionMap, alpha, normFor, 1 - alpha, 0, m_motionMap);
const int chans = frame.channels();
for (int y = 0; y < frame.rows; ++y)
{
uchar* imgPtr = frame.ptr(y);
float* moPtr = reinterpret_cast<float*>(m_motionMap.ptr(y));
for (int x = 0; x < frame.cols; ++x)
{
for (int ci = chans - 1; ci < chans; ++ci)
{
imgPtr[ci] = cv::saturate_cast<uchar>(imgPtr[ci] + moPtr[0]);
}
imgPtr += chans;
++moPtr;
}
}
}