51 if (trackerType ==
"BOOSTING")
52 return OPENCV_TRACKER_NS::TrackerBoosting::create();
53 if (trackerType ==
"MIL")
54 return OPENCV_TRACKER_NS::TrackerMIL::create();
55 if (trackerType ==
"KCF")
56 return OPENCV_TRACKER_NS::TrackerKCF::create();
57 if (trackerType ==
"TLD")
58 return OPENCV_TRACKER_NS::TrackerTLD::create();
59 if (trackerType ==
"MEDIANFLOW")
60 return OPENCV_TRACKER_NS::TrackerMedianFlow::create();
61 if (trackerType ==
"MOSSE")
62 return OPENCV_TRACKER_NS::TrackerMOSSE::create();
63 if (trackerType ==
"CSRT")
64 return OPENCV_TRACKER_NS::TrackerCSRT::create();
73 bool process_interval)
77 start = _start; end = _end;
78 if (!process_interval || end <= 1 || end - start == 0) {
79 start = int(video.
Start() * video.
Reader()->info.fps.ToFloat()) + 1;
80 end = int(video.
End() * video.
Reader()->info.fps.ToFloat()) + 1;
83 start = int(start + video.
Start() * video.
Reader()->info.fps.ToFloat()) + 1;
84 end = int(video.
End() * video.
Reader()->info.fps.ToFloat()) + 1;
87 processingController->SetError(
false,
"");
89 bool trackerInit =
false;
92 for (
size_t frame = start; frame <= end; ++frame) {
93 if (processingController->ShouldStop())
return;
96 cv::Mat img = f->GetImageCV();
100 int(bbox.x * img.cols),
101 int(bbox.y * img.rows),
102 int(bbox.width * img.cols),
103 int(bbox.height * img.rows)
120 processingController->SetProgress(
121 uint(100 * (frame - start) / (end - start))
133 if (bbox.width < 0) {
134 bbox.x -= bbox.width;
135 bbox.width = -bbox.width;
137 if (bbox.height < 0) {
138 bbox.y -= bbox.height;
139 bbox.height = -bbox.height;
143 clampRect(bbox, frame.cols, frame.rows);
146 tracker->init(frame, bbox);
148 float fw = float(frame.cols), fh = float(frame.rows);
151 origWidth = bbox.width;
152 origHeight = bbox.height;
155 smoothC_x = bbox.x + bbox.width * 0.5;
156 smoothC_y = bbox.y + bbox.height * 0.5;
163 (bbox.x + bbox.width) / fw,
164 (bbox.y + bbox.height) / fh
174 const int W = frame.cols, H = frame.rows;
175 const auto& prev = trackedDataById[frameId - 1];
179 prev.x1 * W, prev.y1 * H,
180 (prev.x2 - prev.x1) * W,
181 (prev.y2 - prev.y1) * H
186 cv::cvtColor(frame, gray, cv::COLOR_BGR2GRAY);
192 if (!prevGray.empty() && !prevPts.empty()) {
193 std::vector<cv::Point2f> currPts;
194 std::vector<uchar> status;
195 std::vector<float> err;
196 cv::calcOpticalFlowPyrLK(
201 cv::TermCriteria{cv::TermCriteria::COUNT|cv::TermCriteria::EPS,30,0.01},
202 cv::OPTFLOW_LK_GET_MIN_EIGENVALS, 1e-4
206 std::vector<double> dx, dy;
207 for (
size_t i = 0; i < status.size(); ++i) {
208 if (status[i] && err[i] < 12.0) {
209 dx.push_back(currPts[i].x - prevPts[i].x);
210 dy.push_back(currPts[i].y - prevPts[i].y);
214 if ((
int)dx.size() >= minKltPts) {
215 auto median = [&](
auto &v){
216 std::nth_element(v.begin(), v.begin()+v.size()/2, v.end());
217 return v[v.size()/2];
219 double mdx = median(dx), mdy = median(dy);
224 cand.width = origWidth;
225 cand.height = origHeight;
236 if (!fullPrevGray.empty()) {
238 cv::calcOpticalFlowFarneback(
239 fullPrevGray, gray, flow,
242 cv::Scalar avg = cv::mean(flow);
246 cand.width = origWidth;
247 cand.height = origHeight;
249 if (lostCount >= 10) {
258 constexpr double JITTER_THRESH = 1.0;
259 double measCx = cand.x + cand.width * 0.5;
260 double measCy = cand.y + cand.height * 0.5;
261 double dx = measCx - smoothC_x;
262 double dy = measCy - smoothC_y;
264 if (std::abs(dx) > JITTER_THRESH || std::abs(dy) > JITTER_THRESH) {
269 cand.x = smoothC_x - cand.width * 0.5;
270 cand.y = smoothC_y - cand.height * 0.5;
278 int roiX = int(std::clamp(cand.x, 0.0,
double(W - 1)));
279 int roiY = int(std::clamp(cand.y, 0.0,
double(H - 1)));
280 int roiW = int(std::min(cand.width,
double(W - roiX)));
281 int roiH = int(std::min(cand.height,
double(H - roiY)));
282 roiW = std::max(0, roiW);
283 roiH = std::max(0, roiH);
285 if (roiW > 0 && roiH > 0) {
286 cv::Rect roi(roiX, roiY, roiW, roiH);
287 cv::goodFeaturesToTrack(
289 kltMaxCorners, kltQualityLevel,
290 kltMinDist, cv::Mat(), kltBlockSize
292 for (
auto &pt : prevPts)
293 pt += cv::Point2f(
float(roi.x),
float(roi.y));
300 fullPrevGray = gray.clone();
301 prevGray = gray.clone();
303 float fw = float(W), fh = float(H);
308 (cand.x + cand.width) / fw,
309 (cand.y + cand.height) / fh
319 pb_tracker::Tracker trackerMessage;
322 for(std::map<size_t,FrameData>::iterator it=trackedDataById.begin(); it!=trackedDataById.end(); ++it){
324 pb_tracker::Frame* pbFrameData;
329 *trackerMessage.mutable_last_updated() = TimeUtil::SecondsToTimestamp(time(NULL));
333 std::fstream output(protobuf_data_path, ios::out | ios::trunc | ios::binary);
334 if (!trackerMessage.SerializeToOstream(&output)) {
335 std::cerr <<
"Failed to write protobuf message." << std::endl;
341 google::protobuf::ShutdownProtobufLibrary();
440 pb_tracker::Tracker trackerMessage;
444 std::fstream input(protobuf_data_path, ios::in | ios::binary);
445 if (!trackerMessage.ParseFromIstream(&input)) {
446 std::cerr <<
"Failed to parse protobuf message." << std::endl;
452 trackedDataById.clear();
455 for (
size_t i = 0; i < trackerMessage.frame_size(); i++) {
456 const pb_tracker::Frame& pbFrameData = trackerMessage.frame(i);
459 size_t id = pbFrameData.id();
460 float rotation = pbFrameData.rotation();
463 const pb_tracker::Frame::Box& box = pbFrameData.bounding_box();
470 trackedDataById[id] =
FrameData(
id, rotation, x1, y1, x2, y2);
474 google::protobuf::ShutdownProtobufLibrary();