Subversion Repositories seema-scanner

Rev

Rev 245 | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 245 Rev 255
1
#include "SMReconstructionWorker.h"
1
#include "SMReconstructionWorker.h"
2
 
2
 
3
#include "AlgorithmGrayCode.h"
3
#include "AlgorithmGrayCode.h"
4
#include "AlgorithmGrayCodeHorzVert.h"
4
#include "AlgorithmGrayCodeHorzVert.h"
5
#include "AlgorithmPhaseShiftTwoFreq.h"
5
#include "AlgorithmPhaseShiftTwoFreq.h"
6
#include "AlgorithmPhaseShiftTwoFreqHorzVert.h"
6
#include "AlgorithmPhaseShiftTwoFreqHorzVert.h"
7
#include "AlgorithmPhaseShiftThreeFreq.h"
7
#include "AlgorithmPhaseShiftThreeFreq.h"
8
#include "AlgorithmPhaseShiftEmbedded.h"
8
#include "AlgorithmPhaseShiftEmbedded.h"
9
#include "AlgorithmLineShift.h"
9
#include "AlgorithmLineShift.h"
10
 
10
 
11
#include <QCoreApplication>
11
#include <QCoreApplication>
12
#include <QSettings>
12
#include <QSettings>
13
 
13
 
14
#include <iostream>
14
#include <iostream>
15
#include <opencv2/opencv.hpp>
15
#include <opencv2/opencv.hpp>
16
 
16
 
17
#include "cvtools.h"
17
#include "cvtools.h"
18
#include <opencv2/core/eigen.hpp>
18
#include <opencv2/core/eigen.hpp>
19
 
19
 
20
#include <pcl/filters/statistical_outlier_removal.h>
20
#include <pcl/filters/statistical_outlier_removal.h>
21
#include <pcl/io/pcd_io.h>
21
#include <pcl/io/pcd_io.h>
22
#include <pcl/features/normal_3d.h>
22
#include <pcl/features/normal_3d.h>
23
#include <pcl/features/normal_3d_omp.h>
23
#include <pcl/features/normal_3d_omp.h>
24
#include <pcl/common/transforms.h>
24
#include <pcl/common/transforms.h>
25
 
25
 
26
/* Convert everything to Debayered floating point frames */
26
/* Convert everything to Debayered floating point frames */
27
void debayerAndFloat(const std::vector<cv::Mat> &rawFrames, std::vector<cv::Mat> &frames){
27
void debayerAndFloat(const std::vector<cv::Mat> &rawFrames, std::vector<cv::Mat> &frames){
28
 
28
 
29
    unsigned int nFrames = rawFrames.size();
29
    unsigned int nFrames = rawFrames.size();
30
    frames.resize(nFrames);
30
    frames.resize(nFrames);
31
 
31
 
32
    assert(rawFrames[0].type() == CV_8UC1);
32
    assert(rawFrames[0].type() == CV_8UC1);
33
 
33
 
34
    // Debayer and convert to float
34
    // Debayer and convert to float
35
    for(unsigned int i=0; i<nFrames; i++){
35
    for(unsigned int i=0; i<nFrames; i++){
36
        cv::cvtColor(rawFrames[i], frames[i], CV_BayerBG2RGB);
36
        cv::cvtColor(rawFrames[i], frames[i], CV_BayerBG2RGB);
37
        frames[i].convertTo(frames[i], CV_32FC3, 1.0/255.0);
37
        frames[i].convertTo(frames[i], CV_32FC3, 1.0/255.0);
38
    }
38
    }
39
 
39
 
40
}
40
}
41
 
41
 
42
/* Merge exposures of HDR sequence */
42
/* Merge exposures of HDR sequence */
43
void mergeHDR(const std::vector<cv::Mat> &frames, const std::vector<float> &shutters, std::vector<cv::Mat> &hdrFrames){
43
void mergeHDR(const std::vector<cv::Mat> &frames, const std::vector<float> &shutters, std::vector<cv::Mat> &hdrFrames){
44
 
44
 
45
    int nShutters = shutters.size();
45
    int nShutters = shutters.size();
46
    unsigned int nFrames = frames.size();
46
    unsigned int nFrames = frames.size();
47
    unsigned int nHDRFrames = nFrames/nShutters;
47
    unsigned int nHDRFrames = nFrames/nShutters;
48
 
48
 
49
    assert(nShutters * nHDRFrames == nFrames);
49
    assert(nShutters * nHDRFrames == nFrames);
50
 
50
 
51
    int nRows = frames[0].rows;
51
    int nRows = frames[0].rows;
52
    int nCols = frames[0].cols;
52
    int nCols = frames[0].cols;
53
 
53
 
54
    // Merge into HDR
54
    // Merge into HDR
55
    std::vector<cv::Mat> outputFrames(nHDRFrames);
55
    std::vector<cv::Mat> outputFrames(nHDRFrames);
56
 
56
 
57
    float shutterMin = shutters[0];
57
    float shutterMin = shutters[0];
58
 
58
 
59
    for(unsigned int i=0; i<nHDRFrames; i++){
59
    for(unsigned int i=0; i<nHDRFrames; i++){
60
        outputFrames[i].create(nRows, nCols, CV_32FC3);
60
        outputFrames[i].create(nRows, nCols, CV_32FC3);
61
        outputFrames[i].setTo(0.0);
61
        outputFrames[i].setTo(0.0);
62
    }
62
    }
63
 
63
 
64
    #pragma omp parallel for
64
    #pragma omp parallel for
65
    for(unsigned int j=0; j<nShutters; j++){
65
    for(int j=0; j<nShutters; j++){
66
 
66
 
67
        std::vector<cv::Mat> frameChannels;
67
        std::vector<cv::Mat> frameChannels;
68
        cv::split(frames[j*nHDRFrames], frameChannels);
68
        cv::split(frames[j*nHDRFrames], frameChannels);
69
 
69
 
70
        cv::Mat mask = (frameChannels[0] < 0.99) & (frameChannels[1] < 0.99) & (frameChannels[2] < 0.99);
70
        cv::Mat mask = (frameChannels[0] < 0.99) & (frameChannels[1] < 0.99) & (frameChannels[2] < 0.99);
71
 
71
 
72
        for(unsigned int i=0; i<nHDRFrames; i++){
72
        for(unsigned int i=0; i<nHDRFrames; i++){
73
            cv::Mat frameji = frames[j*nHDRFrames + i];
73
            cv::Mat frameji = frames[j*nHDRFrames + i];
74
 
74
 
75
            cv::add((shutterMin/shutters[j]) * frameji, outputFrames[i], outputFrames[i], mask);
75
            cv::add((shutterMin/shutters[j]) * frameji, outputFrames[i], outputFrames[i], mask);
76
 
76
 
77
        }
77
        }
78
    }
78
    }
79
 
79
 
80
    hdrFrames = outputFrames;
80
    hdrFrames = outputFrames;
81
}
81
}
82
 
82
 
83
void SMReconstructionWorker::reconstructPointCloud(const SMFrameSequence &frameSequence){
83
void SMReconstructionWorker::reconstructPointCloud(const SMFrameSequence &frameSequence){
84
 
84
 
-
 
85
    std::cout << "reconstructPointCloud" << std::endl;
-
 
86
 
85
    time.start();
87
    time.start();
86
 
88
 
87
    QSettings settings;
89
    QSettings settings;
88
 
90
 
89
    // Get current calibration
91
    // Get current calibration
90
    calibration = settings.value("calibration/parameters").value<SMCalibrationParameters>();
92
    calibration = settings.value("calibration/parameters").value<SMCalibrationParameters>();
91
 
93
 
92
    // Create Algorithm
94
    // Create Algorithm
93
    QString codec = frameSequence.codec;
95
    QString codec = frameSequence.codec;
94
    unsigned int resX = settings.value("projector/resX").toInt();
96
    unsigned int resX = settings.value("projector/resX").toInt();
95
    unsigned int resY = settings.value("projector/resY").toInt();
97
    unsigned int resY = settings.value("projector/resY").toInt();
96
 
98
 
97
    if(codec == "GrayCode")
99
    if(codec == "GrayCode")
98
        algorithm = new AlgorithmGrayCode(resX, resY);
100
        algorithm = new AlgorithmGrayCode(resX, resY);
99
    else if(codec == "GrayCodeHorzVert")
101
    else if(codec == "GrayCodeHorzVert")
100
        algorithm = new AlgorithmGrayCodeHorzVert(resX, resY);
102
        algorithm = new AlgorithmGrayCodeHorzVert(resX, resY);
101
    else if(codec == "PhaseShiftTwoFreq")
103
    else if(codec == "PhaseShiftTwoFreq")
102
        algorithm = new AlgorithmPhaseShiftTwoFreq(resX, resY);
104
        algorithm = new AlgorithmPhaseShiftTwoFreq(resX, resY);
103
    else if(codec == "PhaseShiftTwoFreqHorzVert")
105
    else if(codec == "PhaseShiftTwoFreqHorzVert")
104
        algorithm = new AlgorithmPhaseShiftTwoFreqHorzVert(resX, resY);
106
        algorithm = new AlgorithmPhaseShiftTwoFreqHorzVert(resX, resY);
105
    else if(codec == "PhaseShiftThreeFreq")
107
    else if(codec == "PhaseShiftThreeFreq")
106
        algorithm = new AlgorithmPhaseShiftThreeFreq(resX, resY);
108
        algorithm = new AlgorithmPhaseShiftThreeFreq(resX, resY);
107
    else if(codec == "PhaseShiftEmbedded")
109
    else if(codec == "PhaseShiftEmbedded")
108
        algorithm = new AlgorithmPhaseShiftEmbedded(resX, resY);
110
        algorithm = new AlgorithmPhaseShiftEmbedded(resX, resY);
109
    else if(codec == "LineShift")
111
    else if(codec == "LineShift")
110
        algorithm = new AlgorithmLineShift(resX, resY);
112
        algorithm = new AlgorithmLineShift(resX, resY);
111
    else{
113
    else{
112
        std::cerr << "SLScanWorker: invalid codec (Please set codec in preferences): " << codec.toStdString() << std::endl;
114
        std::cerr << "SLScanWorker: invalid codec (Please set codec in preferences): " << codec.toStdString() << std::endl;
113
        return; // otherwise segfault TODO no default?
115
        return; // otherwise segfault TODO no default?
114
    }
116
    }
115
 
117
 
116
    // Convert data to floating point and debayer
-
 
117
    unsigned int nFrames = frameSequence.frames0.size();
-
 
118
 
-
 
119
    std::vector<cv::Mat> frames0, frames1;
118
    std::vector<cv::Mat> frames0, frames1;
120
    debayerAndFloat(frameSequence.frames0, frames0);
119
    debayerAndFloat(frameSequence.frames0, frames0);
121
    debayerAndFloat(frameSequence.frames1, frames1);
120
    debayerAndFloat(frameSequence.frames1, frames1);
122
 
121
 
123
    // If HDR sequence, merge frames
122
    // If HDR sequence, merge frames
124
    if(frameSequence.shutters.size() > 1){
123
    if(frameSequence.shutters.size() > 1){
125
        mergeHDR(frames0, frameSequence.shutters, frames0);
124
        mergeHDR(frames0, frameSequence.shutters, frames0);
126
        mergeHDR(frames1, frameSequence.shutters, frames1);
125
        mergeHDR(frames1, frameSequence.shutters, frames1);
127
    }
126
    }
128
 
127
 
129
    assert(frames0.size() == algorithm->getNPatterns());
128
    assert(frames0.size() == algorithm->getNPatterns());
130
    assert(frames1.size() == algorithm->getNPatterns());
129
    assert(frames1.size() == algorithm->getNPatterns());
131
 
130
 
132
    // Get 3D Points
131
    // Get 3D Points
133
    std::vector<cv::Point3f> Q;
132
    std::vector<cv::Point3f> Q;
134
    std::vector<cv::Vec3f> color;
133
    std::vector<cv::Vec3f> color;
135
    algorithm->get3DPoints(calibration, frames0, frames1, Q, color);
134
    algorithm->get3DPoints(calibration, frames0, frames1, Q, color);
136
 
135
 
137
    // Convert point cloud to PCL format
136
    // Convert point cloud to PCL format
138
    pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr pointCloudPCL(new pcl::PointCloud<pcl::PointXYZRGBNormal>);
137
    pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr pointCloudPCL(new pcl::PointCloud<pcl::PointXYZRGBNormal>);
139
 
138
 
140
    pointCloudPCL->width = Q.size();
139
    pointCloudPCL->width = Q.size();
141
    pointCloudPCL->height = 1;
140
    pointCloudPCL->height = 1;
142
    pointCloudPCL->is_dense = true;
141
    pointCloudPCL->is_dense = true;
143
 
142
 
144
    pointCloudPCL->points.resize(Q.size());
143
    pointCloudPCL->points.resize(Q.size());
145
 
144
 
146
    for(unsigned int i=0; i<Q.size(); i++){
145
    for(unsigned int i=0; i<Q.size(); i++){
147
        pcl::PointXYZRGBNormal point;
146
        pcl::PointXYZRGBNormal point;
148
        point.x = Q[i].x; point.y = Q[i].y; point.z = Q[i].z;
147
        point.x = Q[i].x; point.y = Q[i].y; point.z = Q[i].z;
149
        point.r = 255*color[i][0]; point.g = 255*color[i][1]; point.b = 255*color[i][2];
148
        point.r = 255*color[i][0]; point.g = 255*color[i][1]; point.b = 255*color[i][2];
150
        pointCloudPCL->points[i] = point;
149
        pointCloudPCL->points[i] = point;
151
    }
150
    }
152
 
151
 
153
    // Transform point cloud to rotation axis coordinate system
152
    // Transform point cloud to rotation axis coordinate system
154
    /*cv::Mat TRCV(3, 4, CV_32F);
153
    /*cv::Mat TRCV(3, 4, CV_32F);
155
    cv::Mat(calibration.Rr).copyTo(TRCV.colRange(0, 3));
154
    cv::Mat(calibration.Rr).copyTo(TRCV.colRange(0, 3));
156
    cv::Mat(calibration.Tr).copyTo(TRCV.col(3));
155
    cv::Mat(calibration.Tr).copyTo(TRCV.col(3));
157
    Eigen::Affine3f TR;
156
    Eigen::Affine3f TR;
158
    cv::cv2eigen(TRCV, TR.matrix());
157
    cv::cv2eigen(TRCV, TR.matrix());
159
    pcl::transformPointCloud(*pointCloudPCL, *pointCloudPCL, TR);
158
    pcl::transformPointCloud(*pointCloudPCL, *pointCloudPCL, TR);
160
 
159
 
161
    // Estimate surface normals (does not produce proper normals...)
160
    // Estimate surface normals (does not produce proper normals...)
162
    std::cout << "Estimating normals..." << std::endl;
161
    std::cout << "Estimating normals..." << std::endl;
163
    pcl::PointCloud<pcl::PointXYZ>::Ptr points(new pcl::PointCloud<pcl::PointXYZ>);
162
    pcl::PointCloud<pcl::PointXYZ>::Ptr points(new pcl::PointCloud<pcl::PointXYZ>);
164
    pcl::copyPointCloud(*pointCloudPCL, *points);
163
    pcl::copyPointCloud(*pointCloudPCL, *points);
165
    pcl::PointCloud<pcl::Normal>::Ptr normals(new pcl::PointCloud<pcl::Normal>);
164
    pcl::PointCloud<pcl::Normal>::Ptr normals(new pcl::PointCloud<pcl::Normal>);
166
    pcl::NormalEstimationOMP<pcl::PointXYZ, pcl::Normal> ne;
165
    pcl::NormalEstimationOMP<pcl::PointXYZ, pcl::Normal> ne;
167
    pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ>());
166
    pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ>());
168
    tree->setInputCloud(points);
167
    tree->setInputCloud(points);
169
    ne.setSearchMethod(tree);
168
    ne.setSearchMethod(tree);
170
    ne.setRadiusSearch(1.0);
169
    ne.setRadiusSearch(1.0);
171
    //ne.setKSearch(50);
170
    //ne.setKSearch(50);
172
    ne.setViewPoint(0.0, 0.0, 0.0);
171
    ne.setViewPoint(0.0, 0.0, 0.0);
173
    ne.setInputCloud(points);
172
    ne.setInputCloud(points);
174
    ne.compute(*normals);
173
    ne.compute(*normals);
175
    pcl::copyPointCloud(*normals, *pointCloudPCL);*/
174
    pcl::copyPointCloud(*normals, *pointCloudPCL);*/
176
 
175
 
177
    // Assemble SMPointCloud data structure
176
    // Assemble SMPointCloud data structure
178
    SMPointCloud smPointCloud;
177
    SMPointCloud smPointCloud;
179
    smPointCloud.id = frameSequence.id;
178
    smPointCloud.id = frameSequence.id;
180
    smPointCloud.pointCloud = pointCloudPCL;
179
    smPointCloud.pointCloud = pointCloudPCL;
181
    smPointCloud.rotationAngle = frameSequence.rotationAngle;
180
    smPointCloud.rotationAngle = frameSequence.rotationAngle;
182
 
181
 
183
    // Determine transform in world (camera0) coordinate system
182
    // Determine transform in world (camera0) coordinate system
184
    float angleRadians = frameSequence.rotationAngle/180.0*M_PI;
183
    float angleRadians = frameSequence.rotationAngle/180.0*M_PI;
185
    cv::Vec3f rot_rvec(0.0, -angleRadians, 0.0);
184
    cv::Vec3f rot_rvec(0.0, -angleRadians, 0.0);
186
    cv::Mat R;
185
    cv::Mat R;
187
    cv::Rodrigues(rot_rvec, R);
186
    cv::Rodrigues(rot_rvec, R);
188
    smPointCloud.R = calibration.Rr.t()*cv::Matx33f(R)*calibration.Rr;
187
    smPointCloud.R = calibration.Rr.t()*cv::Matx33f(R)*calibration.Rr;
189
    smPointCloud.T = calibration.Rr.t()*cv::Matx33f(R)*calibration.Tr - calibration.Rr.t()*calibration.Tr;
188
    smPointCloud.T = calibration.Rr.t()*cv::Matx33f(R)*calibration.Tr - calibration.Rr.t()*calibration.Tr;
190
 
189
 
191
 
190
 
192
    // Determine transform in world (camera0) coordinate system
191
    // Determine transform in world (camera0) coordinate system
193
    /*float angleRadians = frameSequence.rotationAngle/180.0*M_PI;
192
    /*float angleRadians = frameSequence.rotationAngle/180.0*M_PI;
194
    cv::Vec3f rot_rvec(0.0, -angleRadians, 0.0);
193
    cv::Vec3f rot_rvec(0.0, -angleRadians, 0.0);
195
    cv::Mat R;
194
    cv::Mat R;
196
    cv::Rodrigues(rot_rvec, R);
195
    cv::Rodrigues(rot_rvec, R);
197
    smPointCloud.R = cv::Matx33f(R);
196
    smPointCloud.R = cv::Matx33f(R);
198
    smPointCloud.T = cv::Vec3f(0.0,0.0,0.0);*/
197
    smPointCloud.T = cv::Vec3f(0.0,0.0,0.0);*/
199
 
198
 
200
    // Emit result
199
    // Emit result
201
    emit newPointCloud(smPointCloud);
200
    emit newPointCloud(smPointCloud);
202
    std::cout << "SMReconstructionWorker: " << time.elapsed() << "ms" << std::endl;
201
    std::cout << "SMReconstructionWorker: " << time.elapsed() << "ms" << std::endl;
203
    std::cout << "SMReconstructionWorker: " << smPointCloud.pointCloud->size() << " Points" << std::endl;
202
    std::cout << "SMReconstructionWorker: " << smPointCloud.pointCloud->size() << " Points" << std::endl;
204
 
203
 
205
}
204
}
206
 
205
 
207
 
206
 
208
void SMReconstructionWorker::reconstructPointClouds(const std::vector<SMFrameSequence> &frameSequences){
207
void SMReconstructionWorker::reconstructPointClouds(const std::vector<SMFrameSequence> &frameSequences){
209
 
208
 
210
    // Process sequentially
209
    // Process sequentially
211
    #pragma omp parallel for
210
    #pragma omp parallel for
212
    for(unsigned int i=0; i<frameSequences.size(); i++){
211
    for(unsigned int i=0; i<frameSequences.size(); i++){
213
        if(!frameSequences[i].reconstructed) reconstructPointCloud(frameSequences[i]);
212
        if(!frameSequences[i].reconstructed) reconstructPointCloud(frameSequences[i]);
214
    }
213
    }
215
}
214
}
216
 
215
 
217
void SMReconstructionWorker::triangulate(const std::vector<cv::Point2f>& q0, const std::vector<cv::Point2f>& q1, std::vector<cv::Point3f> &Q){
216
void SMReconstructionWorker::triangulate(const std::vector<cv::Point2f>& q0, const std::vector<cv::Point2f>& q1, std::vector<cv::Point3f> &Q){
218
    cv::Mat P0(3,4,CV_32F,cv::Scalar(0.0));
217
    cv::Mat P0(3,4,CV_32F,cv::Scalar(0.0));
219
    cv::Mat(calibration.K0).copyTo(P0(cv::Range(0,3), cv::Range(0,3)));
218
    cv::Mat(calibration.K0).copyTo(P0(cv::Range(0,3), cv::Range(0,3)));
220
 
219
 
221
    cv::Mat temp(3,4,CV_32F);
220
    cv::Mat temp(3,4,CV_32F);
222
    cv::Mat(calibration.R1).copyTo(temp(cv::Range(0,3), cv::Range(0,3)));
221
    cv::Mat(calibration.R1).copyTo(temp(cv::Range(0,3), cv::Range(0,3)));
223
    cv::Mat(calibration.T1).copyTo(temp(cv::Range(0,3), cv::Range(3,4)));
222
    cv::Mat(calibration.T1).copyTo(temp(cv::Range(0,3), cv::Range(3,4)));
224
    cv::Mat P1 = cv::Mat(calibration.K1) * temp;
223
    cv::Mat P1 = cv::Mat(calibration.K1) * temp;
225
 
224
 
226
    cv::Mat QMatHomogenous, QMat;
225
    cv::Mat QMatHomogenous, QMat;
227
    cv::triangulatePoints(P0, P1, q0, q1, QMatHomogenous);
226
    cv::triangulatePoints(P0, P1, q0, q1, QMatHomogenous);
228
    cvtools::convertMatFromHomogeneous(QMatHomogenous, QMat);
227
    cvtools::convertMatFromHomogeneous(QMatHomogenous, QMat);
229
    cvtools::matToPoints3f(QMat, Q);
228
    cvtools::matToPoints3f(QMat, Q);
230
}
229
}
231
 
230