Rev 41 | Rev 45 | Go to most recent revision | Blame | Compare with Previous | Last modification | View Log | RSS feed
#include "SMReconstructionWorker.h"
#include "AlgorithmGrayCode.h"
#include "AlgorithmPhaseShift.h"
#include <QCoreApplication>
#include <QSettings>
#include <iostream>
#include <opencv2/opencv.hpp>
#include "cvtools.h"
#include <pcl/filters/statistical_outlier_removal.h>
#include <pcl/io/pcd_io.h>
void SMReconstructionWorker::setup(){
QSettings settings;
// Get current calibration
calibration = settings.value("calibration/parameters").value<SMCalibrationParameters>();
// Create Algorithm
dir = (CodingDir)settings.value("pattern/direction", CodingDirHorizontal).toInt();
if(dir == CodingDirNone)
std::cerr << "SMCaptureWorker: invalid coding direction " << std::endl;
int resX = settings.value("projector/resX").toInt();
int resY = settings.value("projector/resY").toInt();
QString codec = settings.value("codec", "GrayCode").toString();
if(codec == "PhaseShift")
algorithm = new AlgorithmPhaseShift(resX, resY, dir);
else if(codec == "GrayCode")
algorithm = new AlgorithmGrayCode(resX, resY, dir);
else
std::cerr << "SLScanWorker: invalid codec " << codec.toStdString() << std::endl;
// // Precompute lens correction maps
// cv::Mat eye = cv::Mat::eye(3, 3, CV_32F);
// cv::initUndistortRectifyMap(calibration.K0, calibration.k0, eye, calibration.K0, cv::Size(calibration.frameWidth, calibration.frameHeight), CV_32FC1, lensMap0Horz, lensMap0Vert);
// cv::initUndistortRectifyMap(calibration.K0, calibration.k0, eye, calibration.K0, cv::Size(calibration.frameWidth, calibration.frameHeight), CV_32FC1, lensMap1Horz, lensMap1Vert);
//cv::Mat mapHorz, mapVert;
//cv::normalize(lensMap0Horz, mapHorz, 0, 255, cv::NORM_MINMAX, CV_8U);
//cv::normalize(lensMap0Vert, mapVert, 0, 255, cv::NORM_MINMAX, CV_8U);
//cv::imwrite("mapHorz.png", mapHorz);
//cv::imwrite("mapVert.png", mapVert);
}
void SMReconstructionWorker::reconstructPointCloud(SMFrameSequence frameSequence){
time.start();
// Get 3D Points
std::vector<cv::Point3f> Q;
std::vector<cv::Vec3b> color;
algorithm->get3DPoints(calibration, frameSequence.frames0, frameSequence.frames1, Q, color);
// Convert point cloud to PCL format
pcl::PointCloud<pcl::PointXYZRGB>::Ptr pointCloudPCL(new pcl::PointCloud<pcl::PointXYZRGB>);
// Interpret as unorganized point cloud
pointCloudPCL->width = Q.size();
pointCloudPCL->height = 1;
pointCloudPCL->is_dense = false;
pointCloudPCL->points.resize(Q.size());
for(int i=0; i<Q.size(); i++){
pcl::PointXYZRGB point;
point.x = Q[i].x; point.y = Q[i].y; point.z = Q[i].z;
point.r = color[i][0]; point.g = color[i][1]; point.b = color[i][2];
pointCloudPCL->points[i] = point;
}
SMPointCloud smPointCloud;
smPointCloud.pointCloud = pointCloudPCL;
smPointCloud.rotationAngle = frameSequence.rotationAngle;
// Emit result
emit newPointCloud(smPointCloud);
std::cout << "SMReconstructionWorker: " << time.elapsed() << "ms" << std::endl;
}
void SMReconstructionWorker::reconstructPointClouds(std::vector<SMFrameSequence> frameSequences){
// Process sequentially
for(int i=0; i<frameSequences.size(); i++){
reconstructPointCloud(frameSequences[i]);
}
}
void SMReconstructionWorker::triangulate(std::vector<cv::Point2f>& q0, std::vector<cv::Point2f>& q1, std::vector<cv::Point3f> &Q){
cv::Mat P0(3,4,CV_32F,cv::Scalar(0.0));
cv::Mat(calibration.K0).copyTo(P0(cv::Range(0,3), cv::Range(0,3)));
cv::Mat temp(3,4,CV_32F);
cv::Mat(calibration.R1).copyTo(temp(cv::Range(0,3), cv::Range(0,3)));
cv::Mat(calibration.T1).copyTo(temp(cv::Range(0,3), cv::Range(3,4)));
cv::Mat P1 = cv::Mat(calibration.K1) * temp;
cv::Mat QMatHomogenous, QMat;
cv::triangulatePoints(P0, P1, q0, q1, QMatHomogenous);
cvtools::convertMatFromHomogeneous(QMatHomogenous, QMat);
cvtools::matToPoints3f(QMat, Q);
}
//void SMReconstructionWorker::triangulateFromUpVp(cv::Mat &up, cv::Mat &vp, cv::Mat &xyz){
// std::cerr << "WARNING! NOT FULLY IMPLEMENTED!" << std::endl;
// int N = up.rows * up.cols;
// cv::Mat projPointsCam(2, N, CV_32F);
// uc.reshape(0,1).copyTo(projPointsCam.row(0));
// vc.reshape(0,1).copyTo(projPointsCam.row(1));
// cv::Mat projPointsProj(2, N, CV_32F);
// up.reshape(0,1).copyTo(projPointsProj.row(0));
// vp.reshape(0,1).copyTo(projPointsProj.row(1));
// cv::Mat Pc(3,4,CV_32F,cv::Scalar(0.0));
// cv::Mat(calibration.Kc).copyTo(Pc(cv::Range(0,3), cv::Range(0,3)));
// cv::Mat Pp(3,4,CV_32F), temp(3,4,CV_32F);
// cv::Mat(calibration.Rp).copyTo(temp(cv::Range(0,3), cv::Range(0,3)));
// cv::Mat(calibration.Tp).copyTo(temp(cv::Range(0,3), cv::Range(3,4)));
// Pp = cv::Mat(calibration.Kp) * temp;
// cv::Mat xyzw;
// cv::triangulatePoints(Pc, Pp, projPointsCam, projPointsProj, xyzw);
// xyz.create(3, N, CV_32F);
// for(int i=0; i<N; i++){
// xyz.at<float>(0,i) = xyzw.at<float>(0,i)/xyzw.at<float>(3,i);
// xyz.at<float>(1,i) = xyzw.at<float>(1,i)/xyzw.at<float>(3,i);
// xyz.at<float>(2,i) = xyzw.at<float>(2,i)/xyzw.at<float>(3,i);
// }
// xyz = xyz.t();
// xyz = xyz.reshape(3, up.rows);
//}