我决定采用统计路线,但如果您看到多个单元格,这可能行不通。
我的解决方案相当简单:
- 计算关键点位置
- 找到关键点空间位置的质心
- 计算所有点到质心的欧几里得距离
- 过滤原始关键点
distance < mu + 2*sigma
这是我使用此算法得到的图像(关键点 == 绿色,质心 == 红色):
最后,这是我如何做到的代码示例:
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/features2d/features2d.hpp>
#include <iostream>
#include <vector>
using namespace cv;
using namespace std;
void distanceFromCentroid(const vector<Point2f>& points, Point2f centroid, vector<double>& distances)
{
vector<Point2f>::const_iterator point;
for(point = points.begin(); point != points.end(); ++point)
{
double distance = std::sqrt((point->x - centroid.x)*(point->x - centroid.x) + (point->y - centroid.y)*(point->y - centroid.y));
distances.push_back(distance);
}
}
int main(int argc, char* argv[])
{
Mat input = imread("cell.jpg", 0); //Load as grayscale
SiftFeatureDetector detector;
vector<cv::KeyPoint> keypoints;
detector.detect(input, keypoints);
vector<Point2f> points;
vector<KeyPoint>::iterator keypoint;
for(keypoint = keypoints.begin(); keypoint != keypoints.end(); ++keypoint)
{
points.push_back(keypoint->pt);
}
Moments m = moments(points, true);
Point2f centroid(m.m10 / m.m00, m.m01 / m.m00);
vector<double> distances;
distanceFromCentroid(points, centroid, distances);
Scalar mu, sigma;
meanStdDev(distances, mu, sigma);
cout << mu.val[0] << ", " << sigma.val[0] << endl;
vector<KeyPoint> filtered;
vector<double>::iterator distance;
for(size_t i = 0; i < distances.size(); ++i)
{
if(distances[i] < (mu.val[0] + 2.0*sigma.val[0]))
{
filtered.push_back(keypoints[i]);
}
}
Mat out = input.clone();
drawKeypoints(input, filtered, out, Scalar(0, 255, 0));
circle(out, centroid, 7, Scalar(0, 0, 255), 1);
imshow("kpts", out);
waitKey();
imwrite("statFilter.png", out);
return 0;
}
希望有帮助!