If you have two images from a stereo camera, then you can convert them to a depth image using the function below.
cv::Mat getDepthImage( cv::Mat& left_row, cv::Mat& right_row)
{
// for creating path
std::string str = workingFolder + str_cam + "\\";
std::string name = stereo_name + cam_no;
std::string fpath = str + name + ".xml";
//for getting the intrinsic and extrinsic parameters.
cv::Mat M1, D1, M2, D2, R, T;
cv::Mat Rl, Pl, Rr, Pr, Q, a3;
loadMatrix_XML(fpath, M1, D1, R, T, "LMATRIX", "LDISTCOEFF", "R", "T");
loadMatrix_XML(fpath, M2, D2, a3, a3, "RMATRIX", "RDISTCOEFF");
cv::Size imSize = left_row.size();
cv::Mat mapxl, mapyl, mapxr, mapyr;
double fov = 1; //if I want to change the fov for rectifying the unrectified images.
if (fisheye) //for fisheye option
{
cv::fisheye::stereoRectify(M1, D1, M2, D2, imSize, R, T, Rl, Rr, Pl, Pr, Q, cv::CALIB_ZERO_DISPARITY/*, imSize, 0.0, fov*/);
cv::fisheye::initUndistortRectifyMap(M1, D1, Rl, Pl, imSize, CV_32FC1, mapxl, mapyl);
cv::fisheye::initUndistortRectifyMap(M2, D2, Rr, Pr, imSize, CV_32FC1, mapxr, mapyr);
}
else
{
cv::stereoRectify(M1, D1, M2, D2, imSize, R, T, Rl, Rr, Pl, Pr, Q, cv::CALIB_ZERO_DISPARITY);
cv::initUndistortRectifyMap(M1, D1, Rl, Pl, imSize, CV_32FC1, mapxl, mapyl);
cv::initUndistortRectifyMap(M2, D2, Rr, Pr, imSize, CV_32FC1, mapxr, mapyr);
}
cv::Mat left, right;
cv::remap(left_row, left, mapxl, mapyl, cv::INTER_NEAREST);
cv::remap(right_row, right, mapxr, mapyr, cv::INTER_NEAREST);
//From here, the semi-global block matching class has been used.
//The below parameters should be tuned for each condition.
int minDisparity = 0;
int numDisparities = 16 *10;
int blockSize = 5;
int P1 = 8 *3*9;
int P2 = 32 *3* 9;
int disp12MaxDiff = 1;
int preFilterCap = 63;
int uniquenessRatio = 10;
int speckleWindowSize = 100;
int speckleRange = 32;
cv::Ptr<cv::StereoSGBM> sgbm = cv::StereoSGBM::create(minDisparity, numDisparities, blockSize,
P1, P2, disp12MaxDiff, preFilterCap,
uniquenessRatio, speckleWindowSize, speckleRange);
cv::Mat leftGray, rightGray, disparity, disparity8U;
cv::cvtColor(left, leftGray, cv::COLOR_BGR2GRAY);
cv::cvtColor(right, rightGray, cv::COLOR_BGR2GRAY);
sgbm->compute(leftGray, rightGray, disparity);
//disparity.convertTo(disparity8U, CV_8U, 255.0 / (numDisparities * 16));
cv::Mat disparity32F;
disparity.convertTo(disparity32F, CV_32F, 1.0 / 16.0);
cv::Mat points3D;
cv::reprojectImageTo3D(disparity32F, points3D, Q, 0, CV_32F);
cv::Mat depthImage(points3D.rows, points3D.cols, CV_32FC1);
for (int i = 0; i < points3D.rows; i++) {
for (int j = 0; j < points3D.cols; j++) {
cv::Vec3f point = points3D.at<cv::Vec3f>(i, j);
if(point[2]<0)
depthImage.at<float>(i, j) = 0;
else if(point[2]>30000)
depthImage.at<float>(i, j) = 30000;
else
depthImage.at<float>(i, j) = point[2];
//depthImage.at<float>(i, j) = (point[2] > 0) ? point[2] : 0;
}
}
return depthImage;
}

Leave a comment