'Opencv assertion error when using cv::sfm::computeOrientation function with cv version 4.5.5

Here are my full code, i am trying to use opencv 4.5.5 for feature detection and relative pose calculation.

I tried using vector<vector> as first and second input type, but it didnot work.

I think this error is something related to input parameter type, but i donot know how to fix it, huge thanks for any help!

Detection and Descriptor calculation works fine.

#include <iostream>
#include <opencv2/features2d.hpp>
#include <opencv2/sfm/fundamental.hpp>
#include <opencv2/highgui.hpp>
#include <vector>
#include <chrono>

using std::vector;

static cv::Ptr<cv::ORB>               mOrbTracker = nullptr;
static cv::Ptr<cv::DescriptorMatcher> mMatcher    = nullptr;

void init(const unsigned int &nFeatures, const float &scaleFactor,
          const unsigned int &nPyramid, const unsigned int &edgeThreshold,
          const int &sourceImgToPyramidLevel, const int &wta_k,
          const cv::ORB::ScoreType &sType, const unsigned int &fastThreshold,
          const cv::DescriptorMatcher::MatcherType &matcherType) {
    mOrbTracker = cv::ORB::create(nFeatures, scaleFactor, nPyramid,
                                  edgeThreshold, sourceImgToPyramidLevel, wta_k,
                                  sType, edgeThreshold, fastThreshold);
    mMatcher    = cv::DescriptorMatcher::create(matcherType);
}

int main() {
    cv::Mat img1, img2;
    img1 = cv::imread("/home/wgf/docs/1.jpg", cv::IMREAD_COLOR);
    img2 = cv::imread("/home/wgf/docs/2.jpg", cv::IMREAD_COLOR);
    init(600, 1.2f, 8, 31, 0, 2,
         cv::ORB::HARRIS_SCORE, 20, cv::DescriptorMatcher::BRUTEFORCE_HAMMING);
    cv::Mat                                            firstDescriptors  = cv::Mat();
    cv::Mat                                            secondDescriptors = cv::Mat();
    vector<cv::KeyPoint>                               mTempFirst, mTempSecond;
    vector<cv::DMatch>                                 mTempMatches, mTempGoodMatches;
    std::chrono::time_point<std::chrono::system_clock> start             = std::chrono::system_clock::now();
    detectAndComputeDescriptors(img1, mTempFirst, firstDescriptors);
    detectAndComputeDescriptors(img2, mTempSecond, secondDescriptors);

    auto dur_feature_extraction = std::chrono::duration_cast<std::chrono::milliseconds>(
            std::chrono::system_clock::now() - start);

    mMatcher->match(firstDescriptors, secondDescriptors, mTempMatches);
    float    maxDist                   = std::numeric_limits<float>::min();
    for (int i                         = 0; i < mTempMatches.size(); ++i) {
        maxDist = std::max(maxDist, mTempMatches[i].distance);
    }
    float    mMatchesDistanceThreshold = 0.6f;
    for (int j                         = 0; j < mTempMatches.size(); ++j) {
        if (mTempMatches[j].distance < mMatchesDistanceThreshold * maxDist) {
            mTempGoodMatches.emplace_back(mTempMatches[j]);
        }
    }

    cv::Mat firstKeyPoints  = cv::Mat(2, mTempGoodMatches.size(), CV_32F);
    cv::Mat secondKeyPoints = cv::Mat(2, mTempGoodMatches.size(), CV_32F);

    for (int k     = 0; k < mTempGoodMatches.size(); k++) {
        firstKeyPoints.at<float>(0, k)  = mTempFirst[mTempGoodMatches[k].queryIdx].pt.x;
        firstKeyPoints.at<float>(1, k)  = mTempFirst[mTempGoodMatches[k].queryIdx].pt.y;
        secondKeyPoints.at<float>(0, k) = mTempSecond[mTempGoodMatches[k].trainIdx].pt.x;
        secondKeyPoints.at<float>(1, k) = mTempSecond[mTempGoodMatches[k].trainIdx].pt.y;
    }
    cv::Mat  R     = cv::Mat(3, 3, CV_32F);
    cv::Mat  t     = cv::Mat(3, 1, CV_32F);
    float    scale = 0.0f;

    cv::Mat currentPose;
    cv::sfm::computeOrientation(firstKeyPoints, secondKeyPoints, R, t, scale);
    currentPose = cv::Mat::eye(4, 4, CV_32F);
    currentPose.at<float>(0, 0) = R.at<float>(0, 0);
    currentPose.at<float>(0, 1) = R.at<float>(0, 1);
    currentPose.at<float>(0, 2) = R.at<float>(0, 2);
    currentPose.at<float>(1, 0) = R.at<float>(1, 0);
    currentPose.at<float>(1, 1) = R.at<float>(1, 1);
    currentPose.at<float>(1, 2) = R.at<float>(1, 2);
    currentPose.at<float>(2, 0) = R.at<float>(2, 0);
    currentPose.at<float>(2, 1) = R.at<float>(2, 1);
    currentPose.at<float>(2, 2) = R.at<float>(2, 2);

    currentPose.at<float>(0, 3) = t.at<float>(0, 0);
    currentPose.at<float>(1, 3) = t.at<float>(1, 0);
    currentPose.at<float>(2, 3) = t.at<float>(2, 0);
    cv::Mat                                            re;
    cv::drawMatches(img1, mTempFirst, img2, mTempSecond, mTempGoodMatches, re);
    cv::imshow("match", re);
    cv::waitKey();
    return 0;
}



Sources

This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.

Source: Stack Overflow

Solution Source