0
votes

I'm developing an android app which main purpose is detects an asked object in a scenario. To do this I'm using the SURF algorithm of OpenCV. I'm not having "good luck" with the detection because I don't know when an object is "found".

I obtain a frame with my device camera and I follow these steps to get objects' keypoints and descriptors:

Java Code

public void onSnapClick(View v) {
    Imgproc.GaussianBlur(frameGray, frameGray, new Size(3, 3), 2);
    Imgproc.Canny(frameGray, frameGray, 40, 120);
    Imgproc.resize(frameGray, frameGray, new Size(320, 240));
    FindFeatures(frameGray.getNativeObjAddr()); //JNI call
    //Some code to store data in DB...
}

JNI call

double hessianThreshold=600;
int nOctaves=4;
int nOctaveLayers=2;
bool extended=true;
bool upright=false;

JNIEXPORT void JNICALL Java_es_ugr_reconocimiento_Juego_FindFeatures(JNIEnv* env, jobject, jlong addrGray) {
    Mat& frameGray= *(Mat*) addrGray;
    vector<KeyPoint> keyPoints;
    Mat descriptores;
    SurfFeatureDetector detector_Surf(hessianThreshold, nOctaves, nOctaveLayers, extended, upright);
    SurfDescriptorExtractor extractor_Surf;
    detector_Surf.detect(frameGray, keyPoints);
    if (keyPoints.size() > 0)
        extractor_Surf.compute(frameGray, keyPoints, descriptores);
}

Now I choose what object I want to find and I follow these steps to do that:

Java Code

public void onSearchClick(View v) {
    Imgproc.GaussianBlur(frameGray, frameGray, new Size(3, 3), 2);
    Imgproc.Canny(frameGray, frameGray, 40, 120);
    Imgproc.resize(frameGray, frameGray, new Size(320, 240));
    nObject = FindObjects(frameGray.getNativeObjAddr()); //JNI call
    if (nObject = searchObject) 
        //draw frame with a rectangle around the found object in the scenario....
}

JNI call

double hessianThreshold=600;
int nOctaves=4;
int nOctaveLayers=2;
bool extended=true;
bool upright=false;

JNIEXPORT jint JNICALL Java_es_ugr_reconocimiento_Juego_FindObjects(JNIEnv* env, jobject, jlong addrGray) {
    Mat& frameGray = *(Mat*) addrGray;
    vector<KeyPoint> keyPoints_esc;
    Mat descriptores_esc;
    SurfFeatureDetector detector_Surf(hessianThreshold, nOctaves, nOctaveLayers, extended, upright);
    SurfDescriptorExtractor extractor_Surf;
    detector_Surf.detect(frameGray , keyPoints_esc);
    if (keyPoints_esc.size() == 0) return -1;
    extractor_Surf.compute(frameGray , keyPoints_esc, descriptores_esc);
    if (descriptores_esc.rows() == 0) return -1;

    for(int i=0;i<lstObjects.size();i++){
        Mat descriptores_obj = lstDescriptors.at(i);
        vector<KeyPoint> keyPoints_obj = lstKeyPoints.at(i);

        FlannBasedMatcher matcher;
        vector<vector<DMatch> > matches;
        matcher.knnMatch(descriptores_obj, descriptores_esc, matches, 2);
        // ----------------------------------------------------------------------
        // Draw only "good" matches (i.e. whose distance is less than 2*min_dist,
        // or a small arbitary value ( 0.02 ) in the event that min_dist is very
        // small)
        // PS.- radiusMatch can also be used here.
        // ----------------------------------------------------------------------
        vector<DMatch> good_matches;
        //THIS LOOP IS SENSITIVE TO SEGFAULTS
        for (int i = 0; i < min(descriptores_obj.rows - 1, (int) matches.size());i++){
            if ( (matches[i][0].distance < 0.6 * (matches[i][1].distance)) && 
                 ((int) matches[i].size() <= 2 && (int) matches[i].size() > 0) ) {
                    good_matches.push_back(matches[i][0]);
            }
        }

        if (good_matches.size() >= nThreshold) {
            vector < Point2f > obj;
            vector < Point2f > scene;

            for (int i = 0; i < good_matches.size(); i++) {
                //-- Get the keypoints from the good matches
                obj.push_back(keyPoints_obj[good_matches[i].queryIdx].pt);
                scene.push_back(keyPoints_esc[good_matches[i].trainIdx].pt);
            }

            Mat H = findHomography(obj, scene, CV_RANSAC);

            vector<Point2f> obj_corners(4);
            obj_corners[0] = cvPoint(0, 0);
            obj_corners[1] = cvPoint(240, 0);
            obj_corners[2] = cvPoint(240, 320);
            obj_corners[3] = cvPoint(0, 320);
            vector<Point2f> scene_corners(4);

            perspectiveTransform(obj_corners, scene_corners, H);

            line(frameGray, scene_corners[0], scene_corners[1], Scalar(255, 0, 0), 4);
            line(frameGray, scene_corners[1], scene_corners[2], Scalar(255, 0, 0), 4);
            line(frameGray, scene_corners[2], scene_corners[3], Scalar(255, 0, 0), 4);
            line(frameGray, scene_corners[3], scene_corners[0], Scalar(255, 0, 0), 4);

            for (unsigned int i = 0; i < scene.size(); i++) {
                const Point2f& kp = scene[i];
                circle(frameGray, Point(kp.x, kp.y), 10, Scalar(255, 255, 255, 255));
            }

            return i; //position of the matched object

        }

    }
}

I don't know what threshold could be the best in this comparison

if (good_matches.size() >= nThreshold) // do findHomography...

I've been searching and almost every code I found contained the number 4 as nThreshold, but for me it's not working good. My code almost every time "find" an object.

Is there any other better way to do this? Like using different matcher or another threshold or trying to figure out if doing the homography is going to create something similar to a rectangle (i said this because sometimes it "find" something but drawing four lines that aren't building a rectangle).

1
you probably need 2 conditions. at least 4 point to do the homography, and a threshold for a good match (maybe like 60% of the matches are good ones)berak
@migue02 did my solution help you?Darshan
@migue02 were you able to solve this?Daniel

1 Answers

0
votes

Please make this following changes in your code

int nThreshold= 100;
       if (good_matches.size() >= nThreshold) 
        {
        continue; // This line is to prevent further steps of matching if there are too many good matches (Lot of ambiguous points results in false match)
        }
        vector < Point2f > obj;
        vector < Point2f > scene;

        for (int i = 0; i < good_matches.size(); i++) {
            //-- Get the keypoints from the good matches
            obj.push_back(keyPoints_obj[good_matches[i].queryIdx].pt);
            scene.push_back(keyPoints_esc[good_matches[i].trainIdx].pt);
               }

// Skip doing homography if the object and scene contains less than four points(cant draw a rectangle if less than 4 points, hence your program will crash here if you do not handle the exception)
      if(obj.size() < 4 || scene.size() < 4)
       {
       continue;
       }

       Mat H = findHomography(obj, scene, CV_RANSAC);