1 |
#include "SMReconstructionWorker.h"
|
1 |
#include "SMReconstructionWorker.h"
|
2 |
|
2 |
|
3 |
#include "AlgorithmGrayCode.h"
|
3 |
#include "AlgorithmGrayCode.h"
|
4 |
#include "AlgorithmPhaseShift.h"
|
4 |
#include "AlgorithmPhaseShift.h"
|
5 |
|
5 |
|
6 |
#include <QCoreApplication>
|
6 |
#include <QCoreApplication>
|
7 |
#include <QSettings>
|
7 |
#include <QSettings>
|
8 |
|
8 |
|
9 |
#include <iostream>
|
9 |
#include <iostream>
|
10 |
#include <opencv2/opencv.hpp>
|
10 |
#include <opencv2/opencv.hpp>
|
11 |
|
11 |
|
12 |
#include "cvtools.h"
|
12 |
#include "cvtools.h"
|
13 |
|
13 |
|
14 |
#include <pcl/filters/statistical_outlier_removal.h>
|
14 |
#include <pcl/filters/statistical_outlier_removal.h>
|
15 |
#include <pcl/io/pcd_io.h>
|
15 |
#include <pcl/io/pcd_io.h>
|
- |
|
16 |
#include <pcl/features/normal_3d.h>
|
16 |
|
17 |
|
17 |
|
18 |
|
18 |
void SMReconstructionWorker::setup(){
|
19 |
void SMReconstructionWorker::setup(){
|
19 |
|
20 |
|
20 |
QSettings settings;
|
21 |
QSettings settings;
|
21 |
|
22 |
|
22 |
// Get current calibration
|
23 |
// Get current calibration
|
23 |
calibration = settings.value("calibration/parameters").value<SMCalibrationParameters>();
|
24 |
calibration = settings.value("calibration/parameters").value<SMCalibrationParameters>();
|
24 |
|
25 |
|
25 |
// Create Algorithm
|
26 |
// Create Algorithm
|
26 |
dir = (CodingDir)settings.value("pattern/direction", CodingDirHorizontal).toInt();
|
27 |
dir = (CodingDir)settings.value("pattern/direction", CodingDirHorizontal).toInt();
|
27 |
if(dir == CodingDirNone)
|
28 |
if(dir == CodingDirNone)
|
28 |
std::cerr << "SMCaptureWorker: invalid coding direction " << std::endl;
|
29 |
std::cerr << "SMCaptureWorker: invalid coding direction " << std::endl;
|
29 |
|
30 |
|
30 |
int resX = settings.value("projector/resX").toInt();
|
31 |
int resX = settings.value("projector/resX").toInt();
|
31 |
int resY = settings.value("projector/resY").toInt();
|
32 |
int resY = settings.value("projector/resY").toInt();
|
32 |
QString codec = settings.value("codec", "GrayCode").toString();
|
33 |
QString codec = settings.value("codec", "GrayCode").toString();
|
33 |
if(codec == "PhaseShift")
|
34 |
if(codec == "PhaseShift")
|
34 |
algorithm = new AlgorithmPhaseShift(resX, resY, dir);
|
35 |
algorithm = new AlgorithmPhaseShift(resX, resY, dir);
|
35 |
else if(codec == "GrayCode")
|
36 |
else if(codec == "GrayCode")
|
36 |
algorithm = new AlgorithmGrayCode(resX, resY, dir);
|
37 |
algorithm = new AlgorithmGrayCode(resX, resY, dir);
|
37 |
else
|
38 |
else
|
38 |
std::cerr << "SLScanWorker: invalid codec " << codec.toStdString() << std::endl;
|
39 |
std::cerr << "SLScanWorker: invalid codec " << codec.toStdString() << std::endl;
|
39 |
|
40 |
|
40 |
|
41 |
|
41 |
// // Precompute lens correction maps
|
42 |
// // Precompute lens correction maps
|
42 |
// cv::Mat eye = cv::Mat::eye(3, 3, CV_32F);
|
43 |
// cv::Mat eye = cv::Mat::eye(3, 3, CV_32F);
|
43 |
// cv::initUndistortRectifyMap(calibration.K0, calibration.k0, eye, calibration.K0, cv::Size(calibration.frameWidth, calibration.frameHeight), CV_32FC1, lensMap0Horz, lensMap0Vert);
|
44 |
// cv::initUndistortRectifyMap(calibration.K0, calibration.k0, eye, calibration.K0, cv::Size(calibration.frameWidth, calibration.frameHeight), CV_32FC1, lensMap0Horz, lensMap0Vert);
|
44 |
// cv::initUndistortRectifyMap(calibration.K0, calibration.k0, eye, calibration.K0, cv::Size(calibration.frameWidth, calibration.frameHeight), CV_32FC1, lensMap1Horz, lensMap1Vert);
|
45 |
// cv::initUndistortRectifyMap(calibration.K0, calibration.k0, eye, calibration.K0, cv::Size(calibration.frameWidth, calibration.frameHeight), CV_32FC1, lensMap1Horz, lensMap1Vert);
|
45 |
|
46 |
|
46 |
//cv::Mat mapHorz, mapVert;
|
47 |
//cv::Mat mapHorz, mapVert;
|
47 |
//cv::normalize(lensMap0Horz, mapHorz, 0, 255, cv::NORM_MINMAX, CV_8U);
|
48 |
//cv::normalize(lensMap0Horz, mapHorz, 0, 255, cv::NORM_MINMAX, CV_8U);
|
48 |
//cv::normalize(lensMap0Vert, mapVert, 0, 255, cv::NORM_MINMAX, CV_8U);
|
49 |
//cv::normalize(lensMap0Vert, mapVert, 0, 255, cv::NORM_MINMAX, CV_8U);
|
49 |
//cv::imwrite("mapHorz.png", mapHorz);
|
50 |
//cv::imwrite("mapHorz.png", mapHorz);
|
50 |
//cv::imwrite("mapVert.png", mapVert);
|
51 |
//cv::imwrite("mapVert.png", mapVert);
|
51 |
}
|
52 |
}
|
52 |
|
53 |
|
53 |
void SMReconstructionWorker::reconstructPointCloud(SMFrameSequence frameSequence){
|
54 |
void SMReconstructionWorker::reconstructPointCloud(SMFrameSequence frameSequence){
|
54 |
|
55 |
|
55 |
time.start();
|
56 |
time.start();
|
56 |
|
57 |
|
57 |
// Get 3D Points
|
58 |
// Get 3D Points
|
58 |
std::vector<cv::Point3f> Q;
|
59 |
std::vector<cv::Point3f> Q;
|
59 |
std::vector<cv::Vec3b> color;
|
60 |
std::vector<cv::Vec3b> color;
|
60 |
algorithm->get3DPoints(calibration, frameSequence.frames0, frameSequence.frames1, Q, color);
|
61 |
algorithm->get3DPoints(calibration, frameSequence.frames0, frameSequence.frames1, Q, color);
|
61 |
|
62 |
|
62 |
// Convert point cloud to PCL format
|
63 |
// Convert point cloud to PCL format
|
63 |
pcl::PointCloud<pcl::PointXYZRGB>::Ptr pointCloudPCL(new pcl::PointCloud<pcl::PointXYZRGB>);
|
64 |
pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr pointCloudPCL(new pcl::PointCloud<pcl::PointXYZRGBNormal>);
|
64 |
|
65 |
|
65 |
// Interpret as unorganized point cloud
|
- |
|
66 |
pointCloudPCL->width = Q.size();
|
66 |
pointCloudPCL->width = Q.size();
|
67 |
pointCloudPCL->height = 1;
|
67 |
pointCloudPCL->height = 1;
|
68 |
pointCloudPCL->is_dense = false;
|
68 |
pointCloudPCL->is_dense = false;
|
69 |
|
69 |
|
70 |
pointCloudPCL->points.resize(Q.size());
|
70 |
pointCloudPCL->points.resize(Q.size());
|
71 |
|
71 |
|
72 |
for(int i=0; i<Q.size(); i++){
|
72 |
for(int i=0; i<Q.size(); i++){
|
73 |
pcl::PointXYZRGB point;
|
73 |
pcl::PointXYZRGBNormal point;
|
74 |
point.x = Q[i].x; point.y = Q[i].y; point.z = Q[i].z;
|
74 |
point.x = Q[i].x; point.y = Q[i].y; point.z = Q[i].z;
|
75 |
point.r = color[i][0]; point.g = color[i][1]; point.b = color[i][2];
|
75 |
point.r = color[i][0]; point.g = color[i][1]; point.b = color[i][2];
|
76 |
pointCloudPCL->points[i] = point;
|
76 |
pointCloudPCL->points[i] = point;
|
77 |
}
|
77 |
}
|
78 |
|
78 |
|
- |
|
79 |
// Estimate surface normals
|
- |
|
80 |
pcl::NormalEstimation<pcl::PointXYZRGBNormal, pcl::PointXYZRGBNormal> ne;
|
- |
|
81 |
pcl::search::KdTree<pcl::PointXYZRGBNormal>::Ptr tree(new pcl::search::KdTree<pcl::PointXYZRGBNormal>());
|
- |
|
82 |
ne.setSearchMethod(tree);
|
- |
|
83 |
ne.setRadiusSearch(3);
|
- |
|
84 |
ne.setViewPoint(0.0, 0.0, 0.0);
|
- |
|
85 |
ne.setInputCloud(pointCloudPCL);
|
- |
|
86 |
ne.compute(*pointCloudPCL);
|
- |
|
87 |
|
- |
|
88 |
// Assemble SMPointCloud data structure
|
79 |
SMPointCloud smPointCloud;
|
89 |
SMPointCloud smPointCloud;
|
80 |
smPointCloud.pointCloud = pointCloudPCL;
|
90 |
smPointCloud.pointCloud = pointCloudPCL;
|
81 |
smPointCloud.rotationAngle = frameSequence.rotationAngle;
|
91 |
smPointCloud.rotationAngle = frameSequence.rotationAngle;
|
82 |
|
92 |
|
- |
|
93 |
// Determine transform in world (camera0) coordinate system
|
- |
|
94 |
float angleRadians = frameSequence.rotationAngle/180.0*M_PI;
|
- |
|
95 |
cv::Vec3f rot_rvec(0.0, -angleRadians, 0.0);
|
- |
|
96 |
cv::Mat R;
|
- |
|
97 |
cv::Rodrigues(rot_rvec, R);
|
- |
|
98 |
smPointCloud.R = calibration.Rr.t()*cv::Matx33f(R)*calibration.Rr;
|
- |
|
99 |
smPointCloud.T = calibration.Rr.t()*cv::Matx33f(R)*calibration.Tr - calibration.Rr.t()*calibration.Tr;
|
- |
|
100 |
|
83 |
// Emit result
|
101 |
// Emit result
|
84 |
emit newPointCloud(smPointCloud);
|
102 |
emit newPointCloud(smPointCloud);
|
85 |
|
103 |
|
86 |
std::cout << "SMReconstructionWorker: " << time.elapsed() << "ms" << std::endl;
|
104 |
std::cout << "SMReconstructionWorker: " << time.elapsed() << "ms" << std::endl;
|
87 |
|
105 |
|
88 |
}
|
106 |
}
|
89 |
|
107 |
|
90 |
void SMReconstructionWorker::reconstructPointClouds(std::vector<SMFrameSequence> frameSequences){
|
108 |
void SMReconstructionWorker::reconstructPointClouds(std::vector<SMFrameSequence> frameSequences){
|
91 |
|
109 |
|
92 |
// Process sequentially
|
110 |
// Process sequentially
|
93 |
for(int i=0; i<frameSequences.size(); i++){
|
111 |
for(int i=0; i<frameSequences.size(); i++){
|
94 |
reconstructPointCloud(frameSequences[i]);
|
112 |
reconstructPointCloud(frameSequences[i]);
|
95 |
}
|
113 |
}
|
96 |
|
114 |
|
97 |
}
|
115 |
}
|
98 |
|
116 |
|
99 |
void SMReconstructionWorker::triangulate(std::vector<cv::Point2f>& q0, std::vector<cv::Point2f>& q1, std::vector<cv::Point3f> &Q){
|
117 |
void SMReconstructionWorker::triangulate(std::vector<cv::Point2f>& q0, std::vector<cv::Point2f>& q1, std::vector<cv::Point3f> &Q){
|
100 |
|
118 |
|
101 |
cv::Mat P0(3,4,CV_32F,cv::Scalar(0.0));
|
119 |
cv::Mat P0(3,4,CV_32F,cv::Scalar(0.0));
|
102 |
cv::Mat(calibration.K0).copyTo(P0(cv::Range(0,3), cv::Range(0,3)));
|
120 |
cv::Mat(calibration.K0).copyTo(P0(cv::Range(0,3), cv::Range(0,3)));
|
103 |
|
121 |
|
104 |
cv::Mat temp(3,4,CV_32F);
|
122 |
cv::Mat temp(3,4,CV_32F);
|
105 |
cv::Mat(calibration.R1).copyTo(temp(cv::Range(0,3), cv::Range(0,3)));
|
123 |
cv::Mat(calibration.R1).copyTo(temp(cv::Range(0,3), cv::Range(0,3)));
|
106 |
cv::Mat(calibration.T1).copyTo(temp(cv::Range(0,3), cv::Range(3,4)));
|
124 |
cv::Mat(calibration.T1).copyTo(temp(cv::Range(0,3), cv::Range(3,4)));
|
107 |
cv::Mat P1 = cv::Mat(calibration.K1) * temp;
|
125 |
cv::Mat P1 = cv::Mat(calibration.K1) * temp;
|
108 |
|
126 |
|
109 |
cv::Mat QMatHomogenous, QMat;
|
127 |
cv::Mat QMatHomogenous, QMat;
|
110 |
cv::triangulatePoints(P0, P1, q0, q1, QMatHomogenous);
|
128 |
cv::triangulatePoints(P0, P1, q0, q1, QMatHomogenous);
|
111 |
cvtools::convertMatFromHomogeneous(QMatHomogenous, QMat);
|
129 |
cvtools::convertMatFromHomogeneous(QMatHomogenous, QMat);
|
112 |
cvtools::matToPoints3f(QMat, Q);
|
130 |
cvtools::matToPoints3f(QMat, Q);
|
113 |
|
131 |
|
114 |
|
132 |
|
115 |
}
|
133 |
}
|
116 |
|
134 |
|
117 |
//void SMReconstructionWorker::triangulateFromUpVp(cv::Mat &up, cv::Mat &vp, cv::Mat &xyz){
|
135 |
//void SMReconstructionWorker::triangulateFromUpVp(cv::Mat &up, cv::Mat &vp, cv::Mat &xyz){
|
118 |
|
136 |
|
119 |
// std::cerr << "WARNING! NOT FULLY IMPLEMENTED!" << std::endl;
|
137 |
// std::cerr << "WARNING! NOT FULLY IMPLEMENTED!" << std::endl;
|
120 |
// int N = up.rows * up.cols;
|
138 |
// int N = up.rows * up.cols;
|
121 |
|
139 |
|
122 |
// cv::Mat projPointsCam(2, N, CV_32F);
|
140 |
// cv::Mat projPointsCam(2, N, CV_32F);
|
123 |
// uc.reshape(0,1).copyTo(projPointsCam.row(0));
|
141 |
// uc.reshape(0,1).copyTo(projPointsCam.row(0));
|
124 |
// vc.reshape(0,1).copyTo(projPointsCam.row(1));
|
142 |
// vc.reshape(0,1).copyTo(projPointsCam.row(1));
|
125 |
|
143 |
|
126 |
// cv::Mat projPointsProj(2, N, CV_32F);
|
144 |
// cv::Mat projPointsProj(2, N, CV_32F);
|
127 |
// up.reshape(0,1).copyTo(projPointsProj.row(0));
|
145 |
// up.reshape(0,1).copyTo(projPointsProj.row(0));
|
128 |
// vp.reshape(0,1).copyTo(projPointsProj.row(1));
|
146 |
// vp.reshape(0,1).copyTo(projPointsProj.row(1));
|
129 |
|
147 |
|
130 |
// cv::Mat Pc(3,4,CV_32F,cv::Scalar(0.0));
|
148 |
// cv::Mat Pc(3,4,CV_32F,cv::Scalar(0.0));
|
131 |
// cv::Mat(calibration.Kc).copyTo(Pc(cv::Range(0,3), cv::Range(0,3)));
|
149 |
// cv::Mat(calibration.Kc).copyTo(Pc(cv::Range(0,3), cv::Range(0,3)));
|
132 |
|
150 |
|
133 |
// cv::Mat Pp(3,4,CV_32F), temp(3,4,CV_32F);
|
151 |
// cv::Mat Pp(3,4,CV_32F), temp(3,4,CV_32F);
|
134 |
// cv::Mat(calibration.Rp).copyTo(temp(cv::Range(0,3), cv::Range(0,3)));
|
152 |
// cv::Mat(calibration.Rp).copyTo(temp(cv::Range(0,3), cv::Range(0,3)));
|
135 |
// cv::Mat(calibration.Tp).copyTo(temp(cv::Range(0,3), cv::Range(3,4)));
|
153 |
// cv::Mat(calibration.Tp).copyTo(temp(cv::Range(0,3), cv::Range(3,4)));
|
136 |
// Pp = cv::Mat(calibration.Kp) * temp;
|
154 |
// Pp = cv::Mat(calibration.Kp) * temp;
|
137 |
|
155 |
|
138 |
// cv::Mat xyzw;
|
156 |
// cv::Mat xyzw;
|
139 |
// cv::triangulatePoints(Pc, Pp, projPointsCam, projPointsProj, xyzw);
|
157 |
// cv::triangulatePoints(Pc, Pp, projPointsCam, projPointsProj, xyzw);
|
140 |
|
158 |
|
141 |
// xyz.create(3, N, CV_32F);
|
159 |
// xyz.create(3, N, CV_32F);
|
142 |
// for(int i=0; i<N; i++){
|
160 |
// for(int i=0; i<N; i++){
|
143 |
// xyz.at<float>(0,i) = xyzw.at<float>(0,i)/xyzw.at<float>(3,i);
|
161 |
// xyz.at<float>(0,i) = xyzw.at<float>(0,i)/xyzw.at<float>(3,i);
|
144 |
// xyz.at<float>(1,i) = xyzw.at<float>(1,i)/xyzw.at<float>(3,i);
|
162 |
// xyz.at<float>(1,i) = xyzw.at<float>(1,i)/xyzw.at<float>(3,i);
|
145 |
// xyz.at<float>(2,i) = xyzw.at<float>(2,i)/xyzw.at<float>(3,i);
|
163 |
// xyz.at<float>(2,i) = xyzw.at<float>(2,i)/xyzw.at<float>(3,i);
|
146 |
// }
|
164 |
// }
|
147 |
|
165 |
|
148 |
// xyz = xyz.t();
|
166 |
// xyz = xyz.t();
|
149 |
// xyz = xyz.reshape(3, up.rows);
|
167 |
// xyz = xyz.reshape(3, up.rows);
|
150 |
//}
|
168 |
//}
|
151 |
|
169 |
|
152 |
|
170 |
|