Subversion Repositories seema-scanner

Rev

Rev 123 | Rev 137 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 123 Rev 128
1
#include "SMReconstructionWorker.h"
1
#include "SMReconstructionWorker.h"
2
 
2
 
3
#include "AlgorithmGrayCode.h"
3
#include "AlgorithmGrayCode.h"
4
#include "AlgorithmGrayCodeHorzVert.h"
4
#include "AlgorithmGrayCodeHorzVert.h"
5
#include "AlgorithmPhaseShift.h"
5
#include "AlgorithmPhaseShiftTwoFreq.h"
-
 
6
#include "AlgorithmPhaseShiftThreeFreq.h"
6
#include "AlgorithmLineShift.h"
7
#include "AlgorithmLineShift.h"
7
 
8
 
8
#include <QCoreApplication>
9
#include <QCoreApplication>
9
#include <QSettings>
10
#include <QSettings>
10
 
11
 
11
#include <iostream>
12
#include <iostream>
12
#include <opencv2/opencv.hpp>
13
#include <opencv2/opencv.hpp>
13
 
14
 
14
#include "cvtools.h"
15
#include "cvtools.h"
15
 
16
 
16
#include <pcl/filters/statistical_outlier_removal.h>
17
#include <pcl/filters/statistical_outlier_removal.h>
17
#include <pcl/io/pcd_io.h>
18
#include <pcl/io/pcd_io.h>
18
#include <pcl/features/normal_3d.h>
19
#include <pcl/features/normal_3d.h>
19
 
20
 
20
 
21
 
21
void SMReconstructionWorker::setup(){
22
void SMReconstructionWorker::setup(){
22
 
23
 
23
    QSettings settings;
24
    QSettings settings;
24
 
25
 
25
    // Get current calibration
26
    // Get current calibration
26
    calibration = settings.value("calibration/parameters").value<SMCalibrationParameters>();
27
    calibration = settings.value("calibration/parameters").value<SMCalibrationParameters>();
27
 
28
 
28
    // Create Algorithm
29
    // Create Algorithm
29
    int resX = settings.value("projector/resX").toInt();
30
    int resX = settings.value("projector/resX").toInt();
30
    int resY = settings.value("projector/resY").toInt();
31
    int resY = settings.value("projector/resY").toInt();
31
    QString codec = settings.value("algorithm", "GrayCode").toString();
32
    QString codec = settings.value("algorithm", "GrayCode").toString();
32
    if(codec == "GrayCode")
33
    if(codec == "GrayCode")
33
        algorithm = new AlgorithmGrayCode(resX, resY);
34
        algorithm = new AlgorithmGrayCode(resX, resY);
34
    else if(codec == "GrayCodeHorzVert")
35
    else if(codec == "GrayCodeHorzVert")
35
        algorithm = new AlgorithmGrayCodeHorzVert(resX, resY);
36
        algorithm = new AlgorithmGrayCodeHorzVert(resX, resY);
36
    else if(codec == "PhaseShift")
37
    else if(codec == "PhaseShiftTwoFreq")
37
        algorithm = new AlgorithmPhaseShift(resX, resY);
38
        algorithm = new AlgorithmPhaseShiftTwoFreq(resX, resY);
-
 
39
    else if(codec == "PhaseShiftThreeFreq")
-
 
40
        algorithm = new AlgorithmPhaseShiftThreeFreq(resX, resY);
38
    else if(codec == "LineShift")
41
    else if(codec == "LineShift")
39
        algorithm = new AlgorithmLineShift(resX, resY);
42
        algorithm = new AlgorithmLineShift(resX, resY);
40
    else
43
    else
41
        std::cerr << "SLScanWorker: invalid codec " << codec.toStdString() << std::endl;
44
        std::cerr << "SLScanWorker: invalid codec " << codec.toStdString() << std::endl;
42
 
45
 
43
 
46
 
44
//    // Precompute lens correction maps
47
//    // Precompute lens correction maps
45
//    cv::Mat eye = cv::Mat::eye(3, 3, CV_32F);
48
//    cv::Mat eye = cv::Mat::eye(3, 3, CV_32F);
46
//    cv::initUndistortRectifyMap(calibration.K0, calibration.k0, eye, calibration.K0, cv::Size(calibration.frameWidth, calibration.frameHeight), CV_32FC1, lensMap0Horz, lensMap0Vert);
49
//    cv::initUndistortRectifyMap(calibration.K0, calibration.k0, eye, calibration.K0, cv::Size(calibration.frameWidth, calibration.frameHeight), CV_32FC1, lensMap0Horz, lensMap0Vert);
47
//    cv::initUndistortRectifyMap(calibration.K0, calibration.k0, eye, calibration.K0, cv::Size(calibration.frameWidth, calibration.frameHeight), CV_32FC1, lensMap1Horz, lensMap1Vert);
50
//    cv::initUndistortRectifyMap(calibration.K0, calibration.k0, eye, calibration.K0, cv::Size(calibration.frameWidth, calibration.frameHeight), CV_32FC1, lensMap1Horz, lensMap1Vert);
48
 
51
 
49
    //cv::Mat mapHorz, mapVert;
52
    //cv::Mat mapHorz, mapVert;
50
    //cv::normalize(lensMap0Horz, mapHorz, 0, 255, cv::NORM_MINMAX, CV_8U);
53
    //cv::normalize(lensMap0Horz, mapHorz, 0, 255, cv::NORM_MINMAX, CV_8U);
51
    //cv::normalize(lensMap0Vert, mapVert, 0, 255, cv::NORM_MINMAX, CV_8U);
54
    //cv::normalize(lensMap0Vert, mapVert, 0, 255, cv::NORM_MINMAX, CV_8U);
52
    //cv::imwrite("mapHorz.png", mapHorz);
55
    //cv::imwrite("mapHorz.png", mapHorz);
53
    //cv::imwrite("mapVert.png", mapVert);
56
    //cv::imwrite("mapVert.png", mapVert);
54
}
57
}
55
 
58
 
56
void SMReconstructionWorker::reconstructPointCloud(SMFrameSequence frameSequence){
59
void SMReconstructionWorker::reconstructPointCloud(SMFrameSequence frameSequence){
57
 
60
 
58
    time.start();
61
    time.start();
59
 
62
 
60
    // Get 3D Points
63
    // Get 3D Points
61
    std::vector<cv::Point3f> Q;
64
    std::vector<cv::Point3f> Q;
62
    std::vector<cv::Vec3b> color;
65
    std::vector<cv::Vec3b> color;
63
    algorithm->get3DPoints(calibration, frameSequence.frames0, frameSequence.frames1, Q, color);
66
    algorithm->get3DPoints(calibration, frameSequence.frames0, frameSequence.frames1, Q, color);
64
 
67
 
65
    // Convert point cloud to PCL format
68
    // Convert point cloud to PCL format
66
    pcl::PointCloud<pcl::PointXYZRGB>::Ptr pointCloudPCL(new pcl::PointCloud<pcl::PointXYZRGB>);
69
    pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr pointCloudPCL(new pcl::PointCloud<pcl::PointXYZRGBNormal>);
67
 
70
 
68
    pointCloudPCL->width = Q.size();
71
    pointCloudPCL->width = Q.size();
69
    pointCloudPCL->height = 1;
72
    pointCloudPCL->height = 1;
70
    pointCloudPCL->is_dense = false;
73
    pointCloudPCL->is_dense = true;
71
 
74
 
72
    pointCloudPCL->points.resize(Q.size());
75
    pointCloudPCL->points.resize(Q.size());
73
 
76
 
74
    for(unsigned int i=0; i<Q.size(); i++){
77
    for(unsigned int i=0; i<Q.size(); i++){
75
        pcl::PointXYZRGB point;
78
        pcl::PointXYZRGBNormal point;
76
        point.x = Q[i].x; point.y = Q[i].y; point.z = Q[i].z;
79
        point.x = Q[i].x; point.y = Q[i].y; point.z = Q[i].z;
77
        point.r = color[i][0]; point.g = color[i][1]; point.b = color[i][2];
80
        point.r = color[i][0]; point.g = color[i][1]; point.b = color[i][2];
78
        pointCloudPCL->points[i] = point;
81
        pointCloudPCL->points[i] = point;
79
    }
82
    }
80
 
83
 
81
//    // Estimate surface normals
84
    // Estimate surface normals
82
//    pcl::NormalEstimation<pcl::PointXYZRGB, pcl::PointXYZRGBNormal> ne;
85
    pcl::NormalEstimation<pcl::PointXYZRGBNormal, pcl::PointXYZRGBNormal> ne;
83
//    pcl::search::KdTree<pcl::PointXYZRGB>::Ptr tree(new pcl::search::KdTree<pcl::PointXYZRGB>());
86
    pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr pointCloudPCLCopy(new pcl::PointCloud<pcl::PointXYZRGBNormal>);
-
 
87
    pcl::copyPointCloud(*pointCloudPCL, *pointCloudPCLCopy);
84
//    ne.setSearchMethod(tree);
88
    //ne.setKSearch(10);
85
//    ne.setRadiusSearch(3);
89
    ne.setRadiusSearch(0.5);
86
//    ne.setViewPoint(0.0, 0.0, 0.0);
90
    ne.setViewPoint(0.0, 0.0, 0.0);
87
//    ne.setInputCloud(pointCloudPCL);
91
    ne.setInputCloud(pointCloudPCLCopy);
88
//    ne.compute(*pointCloudPCL);
92
    ne.compute(*pointCloudPCL);
89
 
93
 
90
    // Assemble SMPointCloud data structure
94
    // Assemble SMPointCloud data structure
91
    SMPointCloud smPointCloud;
95
    SMPointCloud smPointCloud;
92
    smPointCloud.id = frameSequence.id;
96
    smPointCloud.id = frameSequence.id;
93
    smPointCloud.pointCloud = pointCloudPCL;
97
    smPointCloud.pointCloud = pointCloudPCL;
94
    smPointCloud.rotationAngle = frameSequence.rotationAngle;
98
    smPointCloud.rotationAngle = frameSequence.rotationAngle;
95
 
99
 
96
    // Determine transform in world (camera0) coordinate system
100
    // Determine transform in world (camera0) coordinate system
97
    float angleRadians = frameSequence.rotationAngle/180.0*M_PI;
101
    float angleRadians = frameSequence.rotationAngle/180.0*M_PI;
98
    cv::Vec3f rot_rvec(0.0, -angleRadians, 0.0);
102
    cv::Vec3f rot_rvec(0.0, -angleRadians, 0.0);
99
    cv::Mat R;
103
    cv::Mat R;
100
    cv::Rodrigues(rot_rvec, R);
104
    cv::Rodrigues(rot_rvec, R);
101
    smPointCloud.R = calibration.Rr.t()*cv::Matx33f(R)*calibration.Rr;
105
    smPointCloud.R = calibration.Rr.t()*cv::Matx33f(R)*calibration.Rr;
102
    smPointCloud.T = calibration.Rr.t()*cv::Matx33f(R)*calibration.Tr - calibration.Rr.t()*calibration.Tr;
106
    smPointCloud.T = calibration.Rr.t()*cv::Matx33f(R)*calibration.Tr - calibration.Rr.t()*calibration.Tr;
103
 
107
 
104
    // Emit result
108
    // Emit result
105
    emit newPointCloud(smPointCloud);
109
    emit newPointCloud(smPointCloud);
106
 
110
 
107
    std::cout << "SMReconstructionWorker: " << time.elapsed() << "ms" << std::endl;
111
    std::cout << "SMReconstructionWorker: " << time.elapsed() << "ms" << std::endl;
108
 
112
 
109
}
113
}
110
 
114
 
111
void SMReconstructionWorker::reconstructPointClouds(std::vector<SMFrameSequence> frameSequences){
115
void SMReconstructionWorker::reconstructPointClouds(std::vector<SMFrameSequence> frameSequences){
112
 
116
 
113
    // Process sequentially
117
    // Process sequentially
114
    for(int i=0; i<frameSequences.size(); i++){
118
    for(int i=0; i<frameSequences.size(); i++){
115
        reconstructPointCloud(frameSequences[i]);
119
        reconstructPointCloud(frameSequences[i]);
116
    }
120
    }
117
 
121
 
118
}
122
}
119
 
123
 
120
void SMReconstructionWorker::triangulate(std::vector<cv::Point2f>& q0, std::vector<cv::Point2f>& q1, std::vector<cv::Point3f> &Q){
124
void SMReconstructionWorker::triangulate(std::vector<cv::Point2f>& q0, std::vector<cv::Point2f>& q1, std::vector<cv::Point3f> &Q){
121
 
125
 
122
    cv::Mat P0(3,4,CV_32F,cv::Scalar(0.0));
126
    cv::Mat P0(3,4,CV_32F,cv::Scalar(0.0));
123
    cv::Mat(calibration.K0).copyTo(P0(cv::Range(0,3), cv::Range(0,3)));
127
    cv::Mat(calibration.K0).copyTo(P0(cv::Range(0,3), cv::Range(0,3)));
124
 
128
 
125
    cv::Mat temp(3,4,CV_32F);
129
    cv::Mat temp(3,4,CV_32F);
126
    cv::Mat(calibration.R1).copyTo(temp(cv::Range(0,3), cv::Range(0,3)));
130
    cv::Mat(calibration.R1).copyTo(temp(cv::Range(0,3), cv::Range(0,3)));
127
    cv::Mat(calibration.T1).copyTo(temp(cv::Range(0,3), cv::Range(3,4)));
131
    cv::Mat(calibration.T1).copyTo(temp(cv::Range(0,3), cv::Range(3,4)));
128
    cv::Mat P1 = cv::Mat(calibration.K1) * temp;
132
    cv::Mat P1 = cv::Mat(calibration.K1) * temp;
129
 
133
 
130
    cv::Mat QMatHomogenous, QMat;
134
    cv::Mat QMatHomogenous, QMat;
131
    cv::triangulatePoints(P0, P1, q0, q1, QMatHomogenous);
135
    cv::triangulatePoints(P0, P1, q0, q1, QMatHomogenous);
132
    cvtools::convertMatFromHomogeneous(QMatHomogenous, QMat);
136
    cvtools::convertMatFromHomogeneous(QMatHomogenous, QMat);
133
    cvtools::matToPoints3f(QMat, Q);
137
    cvtools::matToPoints3f(QMat, Q);
134
 
138
 
135
 
139
 
136
}
140
}
137
 
141