Subversion Repositories seema-scanner

Rev

Rev 245 | Details | Compare with Previous | Last modification | View Log | RSS feed

Rev Author Line No. Line
27 jakw 1
#include "SMReconstructionWorker.h"
9 jakw 2
 
41 jakw 3
#include "AlgorithmGrayCode.h"
99 jakw 4
#include "AlgorithmGrayCodeHorzVert.h"
128 jakw 5
#include "AlgorithmPhaseShiftTwoFreq.h"
200 jakw 6
#include "AlgorithmPhaseShiftTwoFreqHorzVert.h"
128 jakw 7
#include "AlgorithmPhaseShiftThreeFreq.h"
192 jakw 8
#include "AlgorithmPhaseShiftEmbedded.h"
123 jakw 9
#include "AlgorithmLineShift.h"
27 jakw 10
 
9 jakw 11
#include <QCoreApplication>
12
#include <QSettings>
13
 
14
#include <iostream>
15
#include <opencv2/opencv.hpp>
16
 
42 jakw 17
#include "cvtools.h"
180 jakw 18
#include <opencv2/core/eigen.hpp>
42 jakw 19
 
9 jakw 20
#include <pcl/filters/statistical_outlier_removal.h>
21
#include <pcl/io/pcd_io.h>
44 jakw 22
#include <pcl/features/normal_3d.h>
185 jakw 23
#include <pcl/features/normal_3d_omp.h>
180 jakw 24
#include <pcl/common/transforms.h>
9 jakw 25
 
245 jakw 26
/* Convert everything to Debayered floating point frames */
27
void debayerAndFloat(const std::vector<cv::Mat> &rawFrames, std::vector<cv::Mat> &frames){
9 jakw 28
 
245 jakw 29
    unsigned int nFrames = rawFrames.size();
30
    frames.resize(nFrames);
31
 
32
    assert(rawFrames[0].type() == CV_8UC1);
33
 
34
    // Debayer and convert to float
35
    for(unsigned int i=0; i<nFrames; i++){
36
        cv::cvtColor(rawFrames[i], frames[i], CV_BayerBG2RGB);
37
        frames[i].convertTo(frames[i], CV_32FC3, 1.0/255.0);
38
    }
39
 
40
}
41
 
42
/* Merge exposures of HDR sequence */
43
void mergeHDR(const std::vector<cv::Mat> &frames, const std::vector<float> &shutters, std::vector<cv::Mat> &hdrFrames){
44
 
45
    int nShutters = shutters.size();
46
    unsigned int nFrames = frames.size();
47
    unsigned int nHDRFrames = nFrames/nShutters;
48
 
49
    assert(nShutters * nHDRFrames == nFrames);
50
 
51
    int nRows = frames[0].rows;
52
    int nCols = frames[0].cols;
53
 
54
    // Merge into HDR
55
    std::vector<cv::Mat> outputFrames(nHDRFrames);
56
 
57
    float shutterMin = shutters[0];
58
 
59
    for(unsigned int i=0; i<nHDRFrames; i++){
60
        outputFrames[i].create(nRows, nCols, CV_32FC3);
61
        outputFrames[i].setTo(0.0);
62
    }
63
 
64
    #pragma omp parallel for
255 - 65
    for(int j=0; j<nShutters; j++){
245 jakw 66
 
67
        std::vector<cv::Mat> frameChannels;
68
        cv::split(frames[j*nHDRFrames], frameChannels);
69
 
70
        cv::Mat mask = (frameChannels[0] < 0.99) & (frameChannels[1] < 0.99) & (frameChannels[2] < 0.99);
71
 
72
        for(unsigned int i=0; i<nHDRFrames; i++){
73
            cv::Mat frameji = frames[j*nHDRFrames + i];
74
 
75
            cv::add((shutterMin/shutters[j]) * frameji, outputFrames[i], outputFrames[i], mask);
76
 
77
        }
78
    }
79
 
80
    hdrFrames = outputFrames;
81
}
82
 
207 flgw 83
void SMReconstructionWorker::reconstructPointCloud(const SMFrameSequence &frameSequence){
137 jakw 84
 
255 - 85
    std::cout << "reconstructPointCloud" << std::endl;
86
 
245 jakw 87
    time.start();
88
 
27 jakw 89
    QSettings settings;
90
 
91
    // Get current calibration
33 jakw 92
    calibration = settings.value("calibration/parameters").value<SMCalibrationParameters>();
27 jakw 93
 
41 jakw 94
    // Create Algorithm
137 jakw 95
    QString codec = frameSequence.codec;
231 jakw 96
    unsigned int resX = settings.value("projector/resX").toInt();
97
    unsigned int resY = settings.value("projector/resY").toInt();
137 jakw 98
 
71 jakw 99
    if(codec == "GrayCode")
100
        algorithm = new AlgorithmGrayCode(resX, resY);
107 jakw 101
    else if(codec == "GrayCodeHorzVert")
99 jakw 102
        algorithm = new AlgorithmGrayCodeHorzVert(resX, resY);
128 jakw 103
    else if(codec == "PhaseShiftTwoFreq")
104
        algorithm = new AlgorithmPhaseShiftTwoFreq(resX, resY);
200 jakw 105
    else if(codec == "PhaseShiftTwoFreqHorzVert")
106
        algorithm = new AlgorithmPhaseShiftTwoFreqHorzVert(resX, resY);
128 jakw 107
    else if(codec == "PhaseShiftThreeFreq")
108
        algorithm = new AlgorithmPhaseShiftThreeFreq(resX, resY);
192 jakw 109
    else if(codec == "PhaseShiftEmbedded")
110
        algorithm = new AlgorithmPhaseShiftEmbedded(resX, resY);
123 jakw 111
    else if(codec == "LineShift")
112
        algorithm = new AlgorithmLineShift(resX, resY);
208 flgw 113
    else{
114
        std::cerr << "SLScanWorker: invalid codec (Please set codec in preferences): " << codec.toStdString() << std::endl;
115
        return; // otherwise segfault TODO no default?
116
    }
27 jakw 117
 
245 jakw 118
    std::vector<cv::Mat> frames0, frames1;
119
    debayerAndFloat(frameSequence.frames0, frames0);
120
    debayerAndFloat(frameSequence.frames1, frames1);
229 jakw 121
 
245 jakw 122
    // If HDR sequence, merge frames
123
    if(frameSequence.shutters.size() > 1){
124
        mergeHDR(frames0, frameSequence.shutters, frames0);
125
        mergeHDR(frames1, frameSequence.shutters, frames1);
126
    }
9 jakw 127
 
245 jakw 128
    assert(frames0.size() == algorithm->getNPatterns());
129
    assert(frames1.size() == algorithm->getNPatterns());
130
 
42 jakw 131
    // Get 3D Points
36 jakw 132
    std::vector<cv::Point3f> Q;
245 jakw 133
    std::vector<cv::Vec3f> color;
134
    algorithm->get3DPoints(calibration, frames0, frames1, Q, color);
9 jakw 135
 
136
    // Convert point cloud to PCL format
128 jakw 137
    pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr pointCloudPCL(new pcl::PointCloud<pcl::PointXYZRGBNormal>);
9 jakw 138
 
36 jakw 139
    pointCloudPCL->width = Q.size();
140
    pointCloudPCL->height = 1;
128 jakw 141
    pointCloudPCL->is_dense = true;
9 jakw 142
 
36 jakw 143
    pointCloudPCL->points.resize(Q.size());
9 jakw 144
 
75 jakw 145
    for(unsigned int i=0; i<Q.size(); i++){
128 jakw 146
        pcl::PointXYZRGBNormal point;
36 jakw 147
        point.x = Q[i].x; point.y = Q[i].y; point.z = Q[i].z;
245 jakw 148
        point.r = 255*color[i][0]; point.g = 255*color[i][1]; point.b = 255*color[i][2];
36 jakw 149
        pointCloudPCL->points[i] = point;
27 jakw 150
    }
9 jakw 151
 
207 flgw 152
    // Transform point cloud to rotation axis coordinate system
153
    /*cv::Mat TRCV(3, 4, CV_32F);
154
    cv::Mat(calibration.Rr).copyTo(TRCV.colRange(0, 3));
155
    cv::Mat(calibration.Tr).copyTo(TRCV.col(3));
156
    Eigen::Affine3f TR;
157
    cv::cv2eigen(TRCV, TR.matrix());
158
    pcl::transformPointCloud(*pointCloudPCL, *pointCloudPCL, TR);
180 jakw 159
 
207 flgw 160
    // Estimate surface normals (does not produce proper normals...)
161
    std::cout << "Estimating normals..." << std::endl;
162
    pcl::PointCloud<pcl::PointXYZ>::Ptr points(new pcl::PointCloud<pcl::PointXYZ>);
163
    pcl::copyPointCloud(*pointCloudPCL, *points);
164
    pcl::PointCloud<pcl::Normal>::Ptr normals(new pcl::PointCloud<pcl::Normal>);
165
    pcl::NormalEstimationOMP<pcl::PointXYZ, pcl::Normal> ne;
166
    pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ>());
167
    tree->setInputCloud(points);
168
    ne.setSearchMethod(tree);
169
    ne.setRadiusSearch(1.0);
170
    //ne.setKSearch(50);
171
    ne.setViewPoint(0.0, 0.0, 0.0);
172
    ne.setInputCloud(points);
173
    ne.compute(*normals);
174
    pcl::copyPointCloud(*normals, *pointCloudPCL);*/
44 jakw 175
 
176
    // Assemble SMPointCloud data structure
42 jakw 177
    SMPointCloud smPointCloud;
45 jakw 178
    smPointCloud.id = frameSequence.id;
42 jakw 179
    smPointCloud.pointCloud = pointCloudPCL;
180
    smPointCloud.rotationAngle = frameSequence.rotationAngle;
181
 
44 jakw 182
    // Determine transform in world (camera0) coordinate system
183
    float angleRadians = frameSequence.rotationAngle/180.0*M_PI;
184
    cv::Vec3f rot_rvec(0.0, -angleRadians, 0.0);
185
    cv::Mat R;
186
    cv::Rodrigues(rot_rvec, R);
187
    smPointCloud.R = calibration.Rr.t()*cv::Matx33f(R)*calibration.Rr;
188
    smPointCloud.T = calibration.Rr.t()*cv::Matx33f(R)*calibration.Tr - calibration.Rr.t()*calibration.Tr;
189
 
180 jakw 190
 
207 flgw 191
    // Determine transform in world (camera0) coordinate system
192
    /*float angleRadians = frameSequence.rotationAngle/180.0*M_PI;
193
    cv::Vec3f rot_rvec(0.0, -angleRadians, 0.0);
194
    cv::Mat R;
195
    cv::Rodrigues(rot_rvec, R);
196
    smPointCloud.R = cv::Matx33f(R);
197
    smPointCloud.T = cv::Vec3f(0.0,0.0,0.0);*/
180 jakw 198
 
9 jakw 199
    // Emit result
225 jakw 200
    emit newPointCloud(smPointCloud);
201
    std::cout << "SMReconstructionWorker: " << time.elapsed() << "ms" << std::endl;
202
    std::cout << "SMReconstructionWorker: " << smPointCloud.pointCloud->size() << " Points" << std::endl;
203
 
9 jakw 204
}
205
 
245 jakw 206
 
207 flgw 207
void SMReconstructionWorker::reconstructPointClouds(const std::vector<SMFrameSequence> &frameSequences){
225 jakw 208
 
27 jakw 209
    // Process sequentially
207 flgw 210
    #pragma omp parallel for
167 jakw 211
    for(unsigned int i=0; i<frameSequences.size(); i++){
207 flgw 212
        if(!frameSequences[i].reconstructed) reconstructPointCloud(frameSequences[i]);
24 jakw 213
    }
214
}
215
 
207 flgw 216
void SMReconstructionWorker::triangulate(const std::vector<cv::Point2f>& q0, const std::vector<cv::Point2f>& q1, std::vector<cv::Point3f> &Q){
41 jakw 217
    cv::Mat P0(3,4,CV_32F,cv::Scalar(0.0));
218
    cv::Mat(calibration.K0).copyTo(P0(cv::Range(0,3), cv::Range(0,3)));
219
 
220
    cv::Mat temp(3,4,CV_32F);
221
    cv::Mat(calibration.R1).copyTo(temp(cv::Range(0,3), cv::Range(0,3)));
222
    cv::Mat(calibration.T1).copyTo(temp(cv::Range(0,3), cv::Range(3,4)));
223
    cv::Mat P1 = cv::Mat(calibration.K1) * temp;
224
 
42 jakw 225
    cv::Mat QMatHomogenous, QMat;
226
    cv::triangulatePoints(P0, P1, q0, q1, QMatHomogenous);
227
    cvtools::convertMatFromHomogeneous(QMatHomogenous, QMat);
228
    cvtools::matToPoints3f(QMat, Q);
41 jakw 229
}