Subversion Repositories seema-scanner

Rev

Rev 242 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 242 Rev 245
1
#include "SMReconstructionWorker.h"
1
#include "SMReconstructionWorker.h"
2
 
2
 
3
#include "AlgorithmGrayCode.h"
3
#include "AlgorithmGrayCode.h"
4
#include "AlgorithmGrayCodeHorzVert.h"
4
#include "AlgorithmGrayCodeHorzVert.h"
5
#include "AlgorithmPhaseShiftTwoFreq.h"
5
#include "AlgorithmPhaseShiftTwoFreq.h"
6
#include "AlgorithmPhaseShiftTwoFreqHorzVert.h"
6
#include "AlgorithmPhaseShiftTwoFreqHorzVert.h"
7
#include "AlgorithmPhaseShiftThreeFreq.h"
7
#include "AlgorithmPhaseShiftThreeFreq.h"
8
#include "AlgorithmPhaseShiftEmbedded.h"
8
#include "AlgorithmPhaseShiftEmbedded.h"
9
#include "AlgorithmLineShift.h"
9
#include "AlgorithmLineShift.h"
10
 
10
 
11
#include <QCoreApplication>
11
#include <QCoreApplication>
12
#include <QSettings>
12
#include <QSettings>
13
 
13
 
14
#include <iostream>
14
#include <iostream>
15
#include <opencv2/opencv.hpp>
15
#include <opencv2/opencv.hpp>
16
 
16
 
17
#include "cvtools.h"
17
#include "cvtools.h"
18
#include <opencv2/core/eigen.hpp>
18
#include <opencv2/core/eigen.hpp>
19
 
19
 
20
#include <pcl/filters/statistical_outlier_removal.h>
20
#include <pcl/filters/statistical_outlier_removal.h>
21
#include <pcl/io/pcd_io.h>
21
#include <pcl/io/pcd_io.h>
22
#include <pcl/features/normal_3d.h>
22
#include <pcl/features/normal_3d.h>
23
#include <pcl/features/normal_3d_omp.h>
23
#include <pcl/features/normal_3d_omp.h>
24
#include <pcl/common/transforms.h>
24
#include <pcl/common/transforms.h>
25
 
25
 
-
 
26
/* Convert everything to Debayered floating point frames */
-
 
27
void debayerAndFloat(const std::vector<cv::Mat> &rawFrames, std::vector<cv::Mat> &frames){
-
 
28
 
-
 
29
    unsigned int nFrames = rawFrames.size();
-
 
30
    frames.resize(nFrames);
-
 
31
 
-
 
32
    assert(rawFrames[0].type() == CV_8UC1);
-
 
33
 
-
 
34
    // Debayer and convert to float
-
 
35
    for(unsigned int i=0; i<nFrames; i++){
-
 
36
        cv::cvtColor(rawFrames[i], frames[i], CV_BayerBG2RGB);
-
 
37
        frames[i].convertTo(frames[i], CV_32FC3, 1.0/255.0);
-
 
38
    }
-
 
39
 
-
 
40
}
-
 
41
 
-
 
42
/* Merge exposures of HDR sequence */
-
 
43
void mergeHDR(const std::vector<cv::Mat> &frames, const std::vector<float> &shutters, std::vector<cv::Mat> &hdrFrames){
-
 
44
 
-
 
45
    int nShutters = shutters.size();
-
 
46
    unsigned int nFrames = frames.size();
-
 
47
    unsigned int nHDRFrames = nFrames/nShutters;
-
 
48
 
-
 
49
    assert(nShutters * nHDRFrames == nFrames);
-
 
50
 
-
 
51
    int nRows = frames[0].rows;
-
 
52
    int nCols = frames[0].cols;
-
 
53
 
-
 
54
    // Merge into HDR
-
 
55
    std::vector<cv::Mat> outputFrames(nHDRFrames);
-
 
56
 
-
 
57
    float shutterMin = shutters[0];
-
 
58
 
-
 
59
    for(unsigned int i=0; i<nHDRFrames; i++){
-
 
60
        outputFrames[i].create(nRows, nCols, CV_32FC3);
-
 
61
        outputFrames[i].setTo(0.0);
-
 
62
    }
-
 
63
 
-
 
64
    #pragma omp parallel for
-
 
65
    for(unsigned int j=0; j<nShutters; j++){
-
 
66
 
-
 
67
        std::vector<cv::Mat> frameChannels;
-
 
68
        cv::split(frames[j*nHDRFrames], frameChannels);
-
 
69
 
-
 
70
        cv::Mat mask = (frameChannels[0] < 0.99) & (frameChannels[1] < 0.99) & (frameChannels[2] < 0.99);
-
 
71
 
-
 
72
        for(unsigned int i=0; i<nHDRFrames; i++){
-
 
73
            cv::Mat frameji = frames[j*nHDRFrames + i];
-
 
74
 
-
 
75
            cv::add((shutterMin/shutters[j]) * frameji, outputFrames[i], outputFrames[i], mask);
-
 
76
 
-
 
77
        }
-
 
78
    }
-
 
79
 
-
 
80
    hdrFrames = outputFrames;
-
 
81
}
26
 
82
 
27
void SMReconstructionWorker::reconstructPointCloud(const SMFrameSequence &frameSequence){
83
void SMReconstructionWorker::reconstructPointCloud(const SMFrameSequence &frameSequence){
28
 
84
 
-
 
85
    time.start();
-
 
86
 
29
    QSettings settings;
87
    QSettings settings;
30
 
88
 
31
    // Get current calibration
89
    // Get current calibration
32
    calibration = settings.value("calibration/parameters").value<SMCalibrationParameters>();
90
    calibration = settings.value("calibration/parameters").value<SMCalibrationParameters>();
33
 
91
 
34
    // Create Algorithm
92
    // Create Algorithm
35
    QString codec = frameSequence.codec;
93
    QString codec = frameSequence.codec;
36
    unsigned int resX = settings.value("projector/resX").toInt();
94
    unsigned int resX = settings.value("projector/resX").toInt();
37
    unsigned int resY = settings.value("projector/resY").toInt();
95
    unsigned int resY = settings.value("projector/resY").toInt();
38
 
96
 
39
    if(codec == "GrayCode")
97
    if(codec == "GrayCode")
40
        algorithm = new AlgorithmGrayCode(resX, resY);
98
        algorithm = new AlgorithmGrayCode(resX, resY);
41
    else if(codec == "GrayCodeHorzVert")
99
    else if(codec == "GrayCodeHorzVert")
42
        algorithm = new AlgorithmGrayCodeHorzVert(resX, resY);
100
        algorithm = new AlgorithmGrayCodeHorzVert(resX, resY);
43
    else if(codec == "PhaseShiftTwoFreq")
101
    else if(codec == "PhaseShiftTwoFreq")
44
        algorithm = new AlgorithmPhaseShiftTwoFreq(resX, resY);
102
        algorithm = new AlgorithmPhaseShiftTwoFreq(resX, resY);
45
    else if(codec == "PhaseShiftTwoFreqHorzVert")
103
    else if(codec == "PhaseShiftTwoFreqHorzVert")
46
        algorithm = new AlgorithmPhaseShiftTwoFreqHorzVert(resX, resY);
104
        algorithm = new AlgorithmPhaseShiftTwoFreqHorzVert(resX, resY);
47
    else if(codec == "PhaseShiftThreeFreq")
105
    else if(codec == "PhaseShiftThreeFreq")
48
        algorithm = new AlgorithmPhaseShiftThreeFreq(resX, resY);
106
        algorithm = new AlgorithmPhaseShiftThreeFreq(resX, resY);
49
    else if(codec == "PhaseShiftEmbedded")
107
    else if(codec == "PhaseShiftEmbedded")
50
        algorithm = new AlgorithmPhaseShiftEmbedded(resX, resY);
108
        algorithm = new AlgorithmPhaseShiftEmbedded(resX, resY);
51
    else if(codec == "LineShift")
109
    else if(codec == "LineShift")
52
        algorithm = new AlgorithmLineShift(resX, resY);
110
        algorithm = new AlgorithmLineShift(resX, resY);
53
    else{
111
    else{
54
        std::cerr << "SLScanWorker: invalid codec (Please set codec in preferences): " << codec.toStdString() << std::endl;
112
        std::cerr << "SLScanWorker: invalid codec (Please set codec in preferences): " << codec.toStdString() << std::endl;
55
        return; // otherwise segfault TODO no default?
113
        return; // otherwise segfault TODO no default?
56
    }
114
    }
57
 
115
 
58
    assert(frameSequence.frames0.size() == algorithm->getNPatterns());
116
    // Convert data to floating point and debayer
59
    assert(frameSequence.frames1.size() == algorithm->getNPatterns());
117
    unsigned int nFrames = frameSequence.frames0.size();
60
 
118
 
-
 
119
    std::vector<cv::Mat> frames0, frames1;
-
 
120
    debayerAndFloat(frameSequence.frames0, frames0);
-
 
121
    debayerAndFloat(frameSequence.frames1, frames1);
-
 
122
 
61
    // Print OpenCV build information
123
    // If HDR sequence, merge frames
62
    cv::setUseOptimized(true);
124
    if(frameSequence.shutters.size() > 1){
63
    //std::cout << cv::getBuildInformation();
125
        mergeHDR(frames0, frameSequence.shutters, frames0);
-
 
126
        mergeHDR(frames1, frameSequence.shutters, frames1);
-
 
127
    }
64
 
128
 
-
 
129
    assert(frames0.size() == algorithm->getNPatterns());
65
    time.start();
130
    assert(frames1.size() == algorithm->getNPatterns());
66
 
131
 
67
    // Get 3D Points
132
    // Get 3D Points
68
    std::vector<cv::Point3f> Q;
133
    std::vector<cv::Point3f> Q;
69
    std::vector<cv::Vec3b> color;
134
    std::vector<cv::Vec3f> color;
70
    algorithm->get3DPoints(calibration, frameSequence.frames0, frameSequence.frames1, Q, color);
135
    algorithm->get3DPoints(calibration, frames0, frames1, Q, color);
71
 
136
 
72
    // Convert point cloud to PCL format
137
    // Convert point cloud to PCL format
73
    pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr pointCloudPCL(new pcl::PointCloud<pcl::PointXYZRGBNormal>);
138
    pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr pointCloudPCL(new pcl::PointCloud<pcl::PointXYZRGBNormal>);
74
 
139
 
75
    pointCloudPCL->width = Q.size();
140
    pointCloudPCL->width = Q.size();
76
    pointCloudPCL->height = 1;
141
    pointCloudPCL->height = 1;
77
    pointCloudPCL->is_dense = true;
142
    pointCloudPCL->is_dense = true;
78
 
143
 
79
    pointCloudPCL->points.resize(Q.size());
144
    pointCloudPCL->points.resize(Q.size());
80
 
145
 
81
    for(unsigned int i=0; i<Q.size(); i++){
146
    for(unsigned int i=0; i<Q.size(); i++){
82
        pcl::PointXYZRGBNormal point;
147
        pcl::PointXYZRGBNormal point;
83
        point.x = Q[i].x; point.y = Q[i].y; point.z = Q[i].z;
148
        point.x = Q[i].x; point.y = Q[i].y; point.z = Q[i].z;
84
        point.r = color[i][0]; point.g = color[i][1]; point.b = color[i][2];
149
        point.r = 255*color[i][0]; point.g = 255*color[i][1]; point.b = 255*color[i][2];
85
        pointCloudPCL->points[i] = point;
150
        pointCloudPCL->points[i] = point;
86
    }
151
    }
87
 
152
 
88
    // Transform point cloud to rotation axis coordinate system
153
    // Transform point cloud to rotation axis coordinate system
89
    /*cv::Mat TRCV(3, 4, CV_32F);
154
    /*cv::Mat TRCV(3, 4, CV_32F);
90
    cv::Mat(calibration.Rr).copyTo(TRCV.colRange(0, 3));
155
    cv::Mat(calibration.Rr).copyTo(TRCV.colRange(0, 3));
91
    cv::Mat(calibration.Tr).copyTo(TRCV.col(3));
156
    cv::Mat(calibration.Tr).copyTo(TRCV.col(3));
92
    Eigen::Affine3f TR;
157
    Eigen::Affine3f TR;
93
    cv::cv2eigen(TRCV, TR.matrix());
158
    cv::cv2eigen(TRCV, TR.matrix());
94
    pcl::transformPointCloud(*pointCloudPCL, *pointCloudPCL, TR);
159
    pcl::transformPointCloud(*pointCloudPCL, *pointCloudPCL, TR);
95
 
160
 
96
    // Estimate surface normals (does not produce proper normals...)
161
    // Estimate surface normals (does not produce proper normals...)
97
    std::cout << "Estimating normals..." << std::endl;
162
    std::cout << "Estimating normals..." << std::endl;
98
    pcl::PointCloud<pcl::PointXYZ>::Ptr points(new pcl::PointCloud<pcl::PointXYZ>);
163
    pcl::PointCloud<pcl::PointXYZ>::Ptr points(new pcl::PointCloud<pcl::PointXYZ>);
99
    pcl::copyPointCloud(*pointCloudPCL, *points);
164
    pcl::copyPointCloud(*pointCloudPCL, *points);
100
    pcl::PointCloud<pcl::Normal>::Ptr normals(new pcl::PointCloud<pcl::Normal>);
165
    pcl::PointCloud<pcl::Normal>::Ptr normals(new pcl::PointCloud<pcl::Normal>);
101
    pcl::NormalEstimationOMP<pcl::PointXYZ, pcl::Normal> ne;
166
    pcl::NormalEstimationOMP<pcl::PointXYZ, pcl::Normal> ne;
102
    pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ>());
167
    pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ>());
103
    tree->setInputCloud(points);
168
    tree->setInputCloud(points);
104
    ne.setSearchMethod(tree);
169
    ne.setSearchMethod(tree);
105
    ne.setRadiusSearch(1.0);
170
    ne.setRadiusSearch(1.0);
106
    //ne.setKSearch(50);
171
    //ne.setKSearch(50);
107
    ne.setViewPoint(0.0, 0.0, 0.0);
172
    ne.setViewPoint(0.0, 0.0, 0.0);
108
    ne.setInputCloud(points);
173
    ne.setInputCloud(points);
109
    ne.compute(*normals);
174
    ne.compute(*normals);
110
    pcl::copyPointCloud(*normals, *pointCloudPCL);*/
175
    pcl::copyPointCloud(*normals, *pointCloudPCL);*/
111
 
176
 
112
    // Assemble SMPointCloud data structure
177
    // Assemble SMPointCloud data structure
113
    SMPointCloud smPointCloud;
178
    SMPointCloud smPointCloud;
114
    smPointCloud.id = frameSequence.id;
179
    smPointCloud.id = frameSequence.id;
115
    smPointCloud.pointCloud = pointCloudPCL;
180
    smPointCloud.pointCloud = pointCloudPCL;
116
    smPointCloud.rotationAngle = frameSequence.rotationAngle;
181
    smPointCloud.rotationAngle = frameSequence.rotationAngle;
117
 
182
 
118
    // Determine transform in world (camera0) coordinate system
183
    // Determine transform in world (camera0) coordinate system
119
    float angleRadians = frameSequence.rotationAngle/180.0*M_PI;
184
    float angleRadians = frameSequence.rotationAngle/180.0*M_PI;
120
    cv::Vec3f rot_rvec(0.0, -angleRadians, 0.0);
185
    cv::Vec3f rot_rvec(0.0, -angleRadians, 0.0);
121
    cv::Mat R;
186
    cv::Mat R;
122
    cv::Rodrigues(rot_rvec, R);
187
    cv::Rodrigues(rot_rvec, R);
123
    smPointCloud.R = calibration.Rr.t()*cv::Matx33f(R)*calibration.Rr;
188
    smPointCloud.R = calibration.Rr.t()*cv::Matx33f(R)*calibration.Rr;
124
    smPointCloud.T = calibration.Rr.t()*cv::Matx33f(R)*calibration.Tr - calibration.Rr.t()*calibration.Tr;
189
    smPointCloud.T = calibration.Rr.t()*cv::Matx33f(R)*calibration.Tr - calibration.Rr.t()*calibration.Tr;
125
 
190
 
126
 
191
 
127
    // Determine transform in world (camera0) coordinate system
192
    // Determine transform in world (camera0) coordinate system
128
    /*float angleRadians = frameSequence.rotationAngle/180.0*M_PI;
193
    /*float angleRadians = frameSequence.rotationAngle/180.0*M_PI;
129
    cv::Vec3f rot_rvec(0.0, -angleRadians, 0.0);
194
    cv::Vec3f rot_rvec(0.0, -angleRadians, 0.0);
130
    cv::Mat R;
195
    cv::Mat R;
131
    cv::Rodrigues(rot_rvec, R);
196
    cv::Rodrigues(rot_rvec, R);
132
    smPointCloud.R = cv::Matx33f(R);
197
    smPointCloud.R = cv::Matx33f(R);
133
    smPointCloud.T = cv::Vec3f(0.0,0.0,0.0);*/
198
    smPointCloud.T = cv::Vec3f(0.0,0.0,0.0);*/
134
 
199
 
135
    // Emit result
200
    // Emit result
136
    emit newPointCloud(smPointCloud);
201
    emit newPointCloud(smPointCloud);
137
    std::cout << "SMReconstructionWorker: " << time.elapsed() << "ms" << std::endl;
202
    std::cout << "SMReconstructionWorker: " << time.elapsed() << "ms" << std::endl;
138
    std::cout << "SMReconstructionWorker: " << smPointCloud.pointCloud->size() << " Points" << std::endl;
203
    std::cout << "SMReconstructionWorker: " << smPointCloud.pointCloud->size() << " Points" << std::endl;
139
 
204
 
140
}
205
}
141
 
206
 
-
 
207
 
142
void SMReconstructionWorker::reconstructPointClouds(const std::vector<SMFrameSequence> &frameSequences){
208
void SMReconstructionWorker::reconstructPointClouds(const std::vector<SMFrameSequence> &frameSequences){
143
 
209
 
144
    // Process sequentially
210
    // Process sequentially
145
    #pragma omp parallel for
211
    #pragma omp parallel for
146
    for(unsigned int i=0; i<frameSequences.size(); i++){
212
    for(unsigned int i=0; i<frameSequences.size(); i++){
147
        if(!frameSequences[i].reconstructed) reconstructPointCloud(frameSequences[i]);
213
        if(!frameSequences[i].reconstructed) reconstructPointCloud(frameSequences[i]);
148
    }
214
    }
149
}
215
}
150
 
216
 
151
void SMReconstructionWorker::triangulate(const std::vector<cv::Point2f>& q0, const std::vector<cv::Point2f>& q1, std::vector<cv::Point3f> &Q){
217
void SMReconstructionWorker::triangulate(const std::vector<cv::Point2f>& q0, const std::vector<cv::Point2f>& q1, std::vector<cv::Point3f> &Q){
152
    cv::Mat P0(3,4,CV_32F,cv::Scalar(0.0));
218
    cv::Mat P0(3,4,CV_32F,cv::Scalar(0.0));
153
    cv::Mat(calibration.K0).copyTo(P0(cv::Range(0,3), cv::Range(0,3)));
219
    cv::Mat(calibration.K0).copyTo(P0(cv::Range(0,3), cv::Range(0,3)));
154
 
220
 
155
    cv::Mat temp(3,4,CV_32F);
221
    cv::Mat temp(3,4,CV_32F);
156
    cv::Mat(calibration.R1).copyTo(temp(cv::Range(0,3), cv::Range(0,3)));
222
    cv::Mat(calibration.R1).copyTo(temp(cv::Range(0,3), cv::Range(0,3)));
157
    cv::Mat(calibration.T1).copyTo(temp(cv::Range(0,3), cv::Range(3,4)));
223
    cv::Mat(calibration.T1).copyTo(temp(cv::Range(0,3), cv::Range(3,4)));
158
    cv::Mat P1 = cv::Mat(calibration.K1) * temp;
224
    cv::Mat P1 = cv::Mat(calibration.K1) * temp;
159
 
225
 
160
    cv::Mat QMatHomogenous, QMat;
226
    cv::Mat QMatHomogenous, QMat;
161
    cv::triangulatePoints(P0, P1, q0, q1, QMatHomogenous);
227
    cv::triangulatePoints(P0, P1, q0, q1, QMatHomogenous);
162
    cvtools::convertMatFromHomogeneous(QMatHomogenous, QMat);
228
    cvtools::convertMatFromHomogeneous(QMatHomogenous, QMat);
163
    cvtools::matToPoints3f(QMat, Q);
229
    cvtools::matToPoints3f(QMat, Q);
164
}
230
}
165
 
231