'OpenCv in Java: No matches are found and the findHomography function throws an exception due to it

I am new to OpenCV in Java and I need some help calculating the homography. The code is the same as here https://docs.opencv.org/4.x/d7/dff/tutorial_feature_homography.html, however I am required (for a project) to use the SIFT algorithm instead of SURF. I ran into a roadblock.

Here is my code split into 2 classes:

import java.util.ArrayList;
import java.util.List;
import org.opencv.calib3d.Calib3d;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.DMatch;
import org.opencv.core.KeyPoint;
import org.opencv.core.Mat;
import org.opencv.core.MatOfByte;
import org.opencv.core.MatOfDMatch;
import org.opencv.core.MatOfKeyPoint;
import org.opencv.core.MatOfPoint2f;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.features2d.DescriptorMatcher;
import org.opencv.features2d.Features2d;
import org.opencv.features2d.SIFT;
import org.opencv.highgui.HighGui;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.imgproc.Imgproc;

class SURFFLANNMatchingHomography {
    public void run(String[] args) {
        String filenameObject = args.length > 1 ? args[0] : "C:\\Users\\nicis\\IdeaProjects\\test3\\src\\hexagon.jpg";
        String filenameScene = args.length > 1 ? args[1] : "C:\\Users\\nicis\\IdeaProjects\\test3\\src\\hexagon.jpg";
        Mat imgObject = Imgcodecs.imread(filenameObject, Imgcodecs.IMREAD_GRAYSCALE);
        Mat imgScene = Imgcodecs.imread(filenameScene, Imgcodecs.IMREAD_GRAYSCALE);
        if (imgObject.empty() || imgScene.empty()) {
            System.err.println("Cannot read images!");
            System.exit(0);
        }

        //-- Step 1: Detect the keypoints using SIFT Detector, compute the descriptors
        int hessianThreshold = 400;
        int nOctaves = 4, nOctaveLayers = 3;
        boolean extended = false, upright = false;
        SIFT detector = SIFT.create(hessianThreshold, nOctaves, nOctaveLayers);
        MatOfKeyPoint keypointsObject = new MatOfKeyPoint(), keypointsScene = new MatOfKeyPoint();
        Mat descriptorsObject = new Mat(), descriptorsScene = new Mat();
        detector.detectAndCompute(imgObject, new Mat(), keypointsObject, descriptorsObject);
        detector.detectAndCompute(imgScene, new Mat(), keypointsScene, descriptorsScene);

        //-- Step 2: Matching descriptor vectors with a FLANN based matcher
        // Since SIFT is a floating-point descriptor NORM_L2 is used
        DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED);
        List<MatOfDMatch> knnMatches = new ArrayList<>();
        matcher.knnMatch(descriptorsObject, descriptorsScene, knnMatches, 2);

        //-- Filter matches using the Lowe's ratio test
        float ratioThresh = 0.75f;
        List<DMatch> listOfGoodMatches = new ArrayList<>();
        for (int i = 0; i < knnMatches.size(); i++) {
            if (knnMatches.get(i).rows() > 1) {
                DMatch[] matches = knnMatches.get(i).toArray();
                if (matches[0].distance < ratioThresh * matches[1].distance) {
                    listOfGoodMatches.add(matches[0]);
                }
            }
        }
        MatOfDMatch goodMatches = new MatOfDMatch();
        goodMatches.fromList(listOfGoodMatches);


        //-- Draw matches
        Mat imgMatches = new Mat();
        Features2d.drawMatches(imgObject, keypointsObject, imgScene, keypointsScene, goodMatches, imgMatches, Scalar.all(-1),
                Scalar.all(-1), new MatOfByte(), Features2d.DrawMatchesFlags_NOT_DRAW_SINGLE_POINTS);

        //-- Localize the object
        List<Point> obj = new ArrayList<>();
        List<Point> scene = new ArrayList<>();
        List<KeyPoint> listOfKeypointsObject = keypointsObject.toList();
        List<KeyPoint> listOfKeypointsScene = keypointsScene.toList();
        for (int i = 0; i < listOfGoodMatches.size(); i++) {
            //-- Get the keypoints from the good matches
            obj.add(listOfKeypointsObject.get(listOfGoodMatches.get(i).queryIdx).pt);
            scene.add(listOfKeypointsScene.get(listOfGoodMatches.get(i).trainIdx).pt);
        }

        if (obj.isEmpty() || scene.isEmpty()) {
            System.out.println("No matches found at all.....");

        }

        MatOfPoint2f objMat = new MatOfPoint2f(), sceneMat = new MatOfPoint2f();
        objMat.fromList(obj);
        sceneMat.fromList(scene);
        double ransacReprojThreshold = 3.0;
        Mat H = Calib3d.findHomography( objMat, sceneMat, Calib3d.RANSAC, ransacReprojThreshold );
        //-- Get the corners from the image_1 ( the object to be "detected" )
        Mat objCorners = new Mat(4, 1, CvType.CV_32FC2), sceneCorners = new Mat();
        float[] objCornersData = new float[(int) (objCorners.total() * objCorners.channels())];
        objCorners.get(0, 0, objCornersData);
        objCornersData[0] = 0;
        objCornersData[1] = 0;
        objCornersData[2] = imgObject.cols();
        objCornersData[3] = 0;
        objCornersData[4] = imgObject.cols();
        objCornersData[5] = imgObject.rows();
        objCornersData[6] = 0;
        objCornersData[7] = imgObject.rows();
        objCorners.put(0, 0, objCornersData);
        Core.perspectiveTransform(objCorners, sceneCorners, H);
        float[] sceneCornersData = new float[(int) (sceneCorners.total() * sceneCorners.channels())];
        sceneCorners.get(0, 0, sceneCornersData);
        //-- Draw lines between the corners (the mapped object in the scene - image_2 )
        Imgproc.line(imgMatches, new Point(sceneCornersData[0] + imgObject.cols(), sceneCornersData[1]),
                new Point(sceneCornersData[2] + imgObject.cols(), sceneCornersData[3]), new Scalar(0, 255, 0), 4);
        Imgproc.line(imgMatches, new Point(sceneCornersData[2] + imgObject.cols(), sceneCornersData[3]),
                new Point(sceneCornersData[4] + imgObject.cols(), sceneCornersData[5]), new Scalar(0, 255, 0), 4);
        Imgproc.line(imgMatches, new Point(sceneCornersData[4] + imgObject.cols(), sceneCornersData[5]),
                new Point(sceneCornersData[6] + imgObject.cols(), sceneCornersData[7]), new Scalar(0, 255, 0), 4);
        Imgproc.line(imgMatches, new Point(sceneCornersData[6] + imgObject.cols(), sceneCornersData[7]),
                new Point(sceneCornersData[0] + imgObject.cols(), sceneCornersData[1]), new Scalar(0, 255, 0), 4);
        //-- Show detected matches
        HighGui.imshow("Good Matches & Object detection", imgMatches);
        HighGui.waitKey(0);
        System.exit(0);
    }
}

public class SURFFLANNMatchingHomographyDemo {
    public static void main(String[] args) {
        // Load the native OpenCV library
        System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

                new SURFFLANNMatchingHomography().run(args);
                //System.out.println("An exception has been found");
    }
}

To be brutally honest I just changed the SURF detector with SIFT and thought that would do it, but apparently not. I get the following error and exception.

Exception in thread "main" CvException [org.opencv.core.CvException: cv::Exception: OpenCV(4.5.5) C:\build\master_winpack-bindings-win64-vc14-static\opencv\modules\calib3d\src\fundam.cpp:378: error: (-5:Bad argument) The input arrays should be 2D or 3D point sets in function 'cv::findHomography'
]
    at org.opencv.calib3d.Calib3d.findHomography_3(Native Method)
    at org.opencv.calib3d.Calib3d.findHomography(Calib3d.java:563)
    at SURFFLANNMatchingHomography.run(SURFFLANNMatching.java:88)
    at SURFFLANNMatchingHomographyDemo.main(SURFFLANNMatchingHomographyDemo.java:11)

I understand that both lists obj and scene are empty as I checked and I assume that may be the reason why I get the exception at the findHomography function, however I am quite new to this and do not know how to solve the issue. I tried using different images (different sizes), but that was not it. Any ideas?

Thanks in advance



Sources

This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.

Source: Stack Overflow

Solution Source