Subversion Repositories seema-scanner

Rev

Rev 99 | Rev 103 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 99 Rev 100
1
#include "SMReconstructionWorker.h"
1
#include "SMReconstructionWorker.h"
2
 
2
 
3
#include "AlgorithmGrayCode.h"
3
#include "AlgorithmGrayCode.h"
4
#include "AlgorithmGrayCodeHorzVert.h"
4
#include "AlgorithmGrayCodeHorzVert.h"
5
#include "AlgorithmPhaseShift.h"
5
#include "AlgorithmPhaseShift.h"
6
 
6
 
7
#include <QCoreApplication>
7
#include <QCoreApplication>
8
#include <QSettings>
8
#include <QSettings>
9
 
9
 
10
#include <iostream>
10
#include <iostream>
11
#include <opencv2/opencv.hpp>
11
#include <opencv2/opencv.hpp>
12
 
12
 
13
#include "cvtools.h"
13
#include "cvtools.h"
14
 
14
 
15
#include <pcl/filters/statistical_outlier_removal.h>
15
#include <pcl/filters/statistical_outlier_removal.h>
16
#include <pcl/io/pcd_io.h>
16
#include <pcl/io/pcd_io.h>
17
#include <pcl/features/normal_3d.h>
17
#include <pcl/features/normal_3d.h>
18
 
18
 
19
 
19
 
20
void SMReconstructionWorker::setup(){
20
void SMReconstructionWorker::setup(){
21
 
21
 
22
    QSettings settings;
22
    QSettings settings;
23
 
23
 
24
    // Get current calibration
24
    // Get current calibration
25
    calibration = settings.value("calibration/parameters").value<SMCalibrationParameters>();
25
    calibration = settings.value("calibration/parameters").value<SMCalibrationParameters>();
26
 
26
 
27
    // Create Algorithm
27
    // Create Algorithm
28
    int resX = settings.value("projector/resX").toInt();
28
    int resX = settings.value("projector/resX").toInt();
29
    int resY = settings.value("projector/resY").toInt();
29
    int resY = settings.value("projector/resY").toInt();
30
    QString codec = settings.value("algorithm", "GrayCode").toString();
30
    QString codec = settings.value("algorithm", "GrayCode").toString();
31
    if(codec == "GrayCode")
31
    if(codec == "GrayCode")
32
        algorithm = new AlgorithmGrayCode(resX, resY);
32
        algorithm = new AlgorithmGrayCode(resX, resY);
33
    else if(codec == "GrayCodeHQ")
33
    else if(codec == "GrayCodeHQ")
34
        algorithm = new AlgorithmGrayCodeHorzVert(resX, resY);
34
        algorithm = new AlgorithmGrayCodeHorzVert(resX, resY);
35
    else if(codec == "PhaseShift")
35
    else if(codec == "PhaseShift")
36
        algorithm = new AlgorithmPhaseShift(resX, resY);
36
        algorithm = new AlgorithmPhaseShift(resX, resY);
37
    else
37
    else
38
        std::cerr << "SLScanWorker: invalid codec " << codec.toStdString() << std::endl;
38
        std::cerr << "SLScanWorker: invalid codec " << codec.toStdString() << std::endl;
39
 
39
 
40
 
40
 
41
//    // Precompute lens correction maps
41
//    // Precompute lens correction maps
42
//    cv::Mat eye = cv::Mat::eye(3, 3, CV_32F);
42
//    cv::Mat eye = cv::Mat::eye(3, 3, CV_32F);
43
//    cv::initUndistortRectifyMap(calibration.K0, calibration.k0, eye, calibration.K0, cv::Size(calibration.frameWidth, calibration.frameHeight), CV_32FC1, lensMap0Horz, lensMap0Vert);
43
//    cv::initUndistortRectifyMap(calibration.K0, calibration.k0, eye, calibration.K0, cv::Size(calibration.frameWidth, calibration.frameHeight), CV_32FC1, lensMap0Horz, lensMap0Vert);
44
//    cv::initUndistortRectifyMap(calibration.K0, calibration.k0, eye, calibration.K0, cv::Size(calibration.frameWidth, calibration.frameHeight), CV_32FC1, lensMap1Horz, lensMap1Vert);
44
//    cv::initUndistortRectifyMap(calibration.K0, calibration.k0, eye, calibration.K0, cv::Size(calibration.frameWidth, calibration.frameHeight), CV_32FC1, lensMap1Horz, lensMap1Vert);
45
 
45
 
46
    //cv::Mat mapHorz, mapVert;
46
    //cv::Mat mapHorz, mapVert;
47
    //cv::normalize(lensMap0Horz, mapHorz, 0, 255, cv::NORM_MINMAX, CV_8U);
47
    //cv::normalize(lensMap0Horz, mapHorz, 0, 255, cv::NORM_MINMAX, CV_8U);
48
    //cv::normalize(lensMap0Vert, mapVert, 0, 255, cv::NORM_MINMAX, CV_8U);
48
    //cv::normalize(lensMap0Vert, mapVert, 0, 255, cv::NORM_MINMAX, CV_8U);
49
    //cv::imwrite("mapHorz.png", mapHorz);
49
    //cv::imwrite("mapHorz.png", mapHorz);
50
    //cv::imwrite("mapVert.png", mapVert);
50
    //cv::imwrite("mapVert.png", mapVert);
51
}
51
}
52
 
52
 
53
void SMReconstructionWorker::reconstructPointCloud(SMFrameSequence frameSequence){
53
void SMReconstructionWorker::reconstructPointCloud(SMFrameSequence frameSequence){
54
 
54
 
55
    time.start();
55
    time.start();
56
 
56
 
57
    // Decompress frames
57
    // Decompress frames
58
    int nFrames = frameSequence.compressedFrames0.size();
58
    int nFrames = frameSequence.compressedFrames0.size();
59
    std::vector<cv::Mat> frames0(nFrames), frames1(nFrames);
59
    std::vector<cv::Mat> frames0(nFrames), frames1(nFrames);
60
    for(int i=0; i<nFrames; i++){
60
    for(int i=0; i<nFrames; i++){
61
        cv::imdecode(frameSequence.compressedFrames0[i], frames0[i]);
61
        cv::imdecode(frameSequence.compressedFrames0[i], CV_LOAD_IMAGE_COLOR, &frames0[i]);
62
        cv::imdecode(frameSequence.compressedFrames1[i], frames1[i]);
62
        cv::imdecode(frameSequence.compressedFrames1[i], CV_LOAD_IMAGE_COLOR, &frames1[i]);
63
    }
63
    }
64
 
64
 
65
    // Get 3D Points
65
    // Get 3D Points
66
    std::vector<cv::Point3f> Q;
66
    std::vector<cv::Point3f> Q;
67
    std::vector<cv::Vec3b> color;
67
    std::vector<cv::Vec3b> color;
68
    algorithm->get3DPoints(calibration, frames0, frames1, Q, color);
68
    algorithm->get3DPoints(calibration, frames0, frames1, Q, color);
69
 
69
 
70
    // Convert point cloud to PCL format
70
    // Convert point cloud to PCL format
71
    pcl::PointCloud<pcl::PointXYZRGB>::Ptr pointCloudPCL(new pcl::PointCloud<pcl::PointXYZRGB>);
71
    pcl::PointCloud<pcl::PointXYZRGB>::Ptr pointCloudPCL(new pcl::PointCloud<pcl::PointXYZRGB>);
72
 
72
 
73
    pointCloudPCL->width = Q.size();
73
    pointCloudPCL->width = Q.size();
74
    pointCloudPCL->height = 1;
74
    pointCloudPCL->height = 1;
75
    pointCloudPCL->is_dense = false;
75
    pointCloudPCL->is_dense = false;
76
 
76
 
77
    pointCloudPCL->points.resize(Q.size());
77
    pointCloudPCL->points.resize(Q.size());
78
 
78
 
79
    for(unsigned int i=0; i<Q.size(); i++){
79
    for(unsigned int i=0; i<Q.size(); i++){
80
        pcl::PointXYZRGB point;
80
        pcl::PointXYZRGB point;
81
        point.x = Q[i].x; point.y = Q[i].y; point.z = Q[i].z;
81
        point.x = Q[i].x; point.y = Q[i].y; point.z = Q[i].z;
82
        point.r = color[i][0]; point.g = color[i][1]; point.b = color[i][2];
82
        point.r = color[i][0]; point.g = color[i][1]; point.b = color[i][2];
83
        pointCloudPCL->points[i] = point;
83
        pointCloudPCL->points[i] = point;
84
    }
84
    }
85
 
85
 
86
//    // Estimate surface normals
86
//    // Estimate surface normals
87
//    pcl::NormalEstimation<pcl::PointXYZRGB, pcl::PointXYZRGBNormal> ne;
87
//    pcl::NormalEstimation<pcl::PointXYZRGB, pcl::PointXYZRGBNormal> ne;
88
//    pcl::search::KdTree<pcl::PointXYZRGB>::Ptr tree(new pcl::search::KdTree<pcl::PointXYZRGB>());
88
//    pcl::search::KdTree<pcl::PointXYZRGB>::Ptr tree(new pcl::search::KdTree<pcl::PointXYZRGB>());
89
//    ne.setSearchMethod(tree);
89
//    ne.setSearchMethod(tree);
90
//    ne.setRadiusSearch(3);
90
//    ne.setRadiusSearch(3);
91
//    ne.setViewPoint(0.0, 0.0, 0.0);
91
//    ne.setViewPoint(0.0, 0.0, 0.0);
92
//    ne.setInputCloud(pointCloudPCL);
92
//    ne.setInputCloud(pointCloudPCL);
93
//    ne.compute(*pointCloudPCL);
93
//    ne.compute(*pointCloudPCL);
94
 
94
 
95
    // Assemble SMPointCloud data structure
95
    // Assemble SMPointCloud data structure
96
    SMPointCloud smPointCloud;
96
    SMPointCloud smPointCloud;
97
    smPointCloud.id = frameSequence.id;
97
    smPointCloud.id = frameSequence.id;
98
    smPointCloud.pointCloud = pointCloudPCL;
98
    smPointCloud.pointCloud = pointCloudPCL;
99
    smPointCloud.rotationAngle = frameSequence.rotationAngle;
99
    smPointCloud.rotationAngle = frameSequence.rotationAngle;
100
 
100
 
101
    // Determine transform in world (camera0) coordinate system
101
    // Determine transform in world (camera0) coordinate system
102
    float angleRadians = frameSequence.rotationAngle/180.0*M_PI;
102
    float angleRadians = frameSequence.rotationAngle/180.0*M_PI;
103
    cv::Vec3f rot_rvec(0.0, -angleRadians, 0.0);
103
    cv::Vec3f rot_rvec(0.0, -angleRadians, 0.0);
104
    cv::Mat R;
104
    cv::Mat R;
105
    cv::Rodrigues(rot_rvec, R);
105
    cv::Rodrigues(rot_rvec, R);
106
    smPointCloud.R = calibration.Rr.t()*cv::Matx33f(R)*calibration.Rr;
106
    smPointCloud.R = calibration.Rr.t()*cv::Matx33f(R)*calibration.Rr;
107
    smPointCloud.T = calibration.Rr.t()*cv::Matx33f(R)*calibration.Tr - calibration.Rr.t()*calibration.Tr;
107
    smPointCloud.T = calibration.Rr.t()*cv::Matx33f(R)*calibration.Tr - calibration.Rr.t()*calibration.Tr;
108
 
108
 
109
    // Emit result
109
    // Emit result
110
    emit newPointCloud(smPointCloud);
110
    emit newPointCloud(smPointCloud);
111
 
111
 
112
    std::cout << "SMReconstructionWorker: " << time.elapsed() << "ms" << std::endl;
112
    std::cout << "SMReconstructionWorker: " << time.elapsed() << "ms" << std::endl;
113
 
113
 
114
}
114
}
115
 
115
 
116
void SMReconstructionWorker::reconstructPointClouds(std::vector<SMFrameSequence> frameSequences){
116
void SMReconstructionWorker::reconstructPointClouds(std::vector<SMFrameSequence> frameSequences){
117
 
117
 
118
    // Process sequentially
118
    // Process sequentially
119
    for(int i=0; i<frameSequences.size(); i++){
119
    for(int i=0; i<frameSequences.size(); i++){
120
        reconstructPointCloud(frameSequences[i]);
120
        reconstructPointCloud(frameSequences[i]);
121
    }
121
    }
122
 
122
 
123
}
123
}
124
 
124
 
125
void SMReconstructionWorker::triangulate(std::vector<cv::Point2f>& q0, std::vector<cv::Point2f>& q1, std::vector<cv::Point3f> &Q){
125
void SMReconstructionWorker::triangulate(std::vector<cv::Point2f>& q0, std::vector<cv::Point2f>& q1, std::vector<cv::Point3f> &Q){
126
 
126
 
127
    cv::Mat P0(3,4,CV_32F,cv::Scalar(0.0));
127
    cv::Mat P0(3,4,CV_32F,cv::Scalar(0.0));
128
    cv::Mat(calibration.K0).copyTo(P0(cv::Range(0,3), cv::Range(0,3)));
128
    cv::Mat(calibration.K0).copyTo(P0(cv::Range(0,3), cv::Range(0,3)));
129
 
129
 
130
    cv::Mat temp(3,4,CV_32F);
130
    cv::Mat temp(3,4,CV_32F);
131
    cv::Mat(calibration.R1).copyTo(temp(cv::Range(0,3), cv::Range(0,3)));
131
    cv::Mat(calibration.R1).copyTo(temp(cv::Range(0,3), cv::Range(0,3)));
132
    cv::Mat(calibration.T1).copyTo(temp(cv::Range(0,3), cv::Range(3,4)));
132
    cv::Mat(calibration.T1).copyTo(temp(cv::Range(0,3), cv::Range(3,4)));
133
    cv::Mat P1 = cv::Mat(calibration.K1) * temp;
133
    cv::Mat P1 = cv::Mat(calibration.K1) * temp;
134
 
134
 
135
    cv::Mat QMatHomogenous, QMat;
135
    cv::Mat QMatHomogenous, QMat;
136
    cv::triangulatePoints(P0, P1, q0, q1, QMatHomogenous);
136
    cv::triangulatePoints(P0, P1, q0, q1, QMatHomogenous);
137
    cvtools::convertMatFromHomogeneous(QMatHomogenous, QMat);
137
    cvtools::convertMatFromHomogeneous(QMatHomogenous, QMat);
138
    cvtools::matToPoints3f(QMat, Q);
138
    cvtools::matToPoints3f(QMat, Q);
139
 
139
 
140
 
140
 
141
}
141
}
142
 
142