Rev 245 | Blame | Compare with Previous | Last modification | View Log | RSS feed
#include "SMReconstructionWorker.h"
#include "AlgorithmGrayCode.h"
#include "AlgorithmGrayCodeHorzVert.h"
#include "AlgorithmPhaseShiftTwoFreq.h"
#include "AlgorithmPhaseShiftTwoFreqHorzVert.h"
#include "AlgorithmPhaseShiftThreeFreq.h"
#include "AlgorithmPhaseShiftEmbedded.h"
#include "AlgorithmLineShift.h"
#include <QCoreApplication>
#include <QSettings>
#include <iostream>
#include <opencv2/opencv.hpp>
#include "cvtools.h"
#include <opencv2/core/eigen.hpp>
#include <pcl/filters/statistical_outlier_removal.h>
#include <pcl/io/pcd_io.h>
#include <pcl/features/normal_3d.h>
#include <pcl/features/normal_3d_omp.h>
#include <pcl/common/transforms.h>
/* Convert everything to Debayered floating point frames */
void debayerAndFloat(const std::vector<cv::Mat> &rawFrames, std::vector<cv::Mat> &frames){
unsigned int nFrames = rawFrames.size();
frames.resize(nFrames);
assert(rawFrames[0].type() == CV_8UC1);
// Debayer and convert to float
for(unsigned int i=0; i<nFrames; i++){
cv::cvtColor(rawFrames[i], frames[i], CV_BayerBG2RGB);
frames[i].convertTo(frames[i], CV_32FC3, 1.0/255.0);
}
}
/* Merge exposures of HDR sequence */
void mergeHDR(const std::vector<cv::Mat> &frames, const std::vector<float> &shutters, std::vector<cv::Mat> &hdrFrames){
int nShutters = shutters.size();
unsigned int nFrames = frames.size();
unsigned int nHDRFrames = nFrames/nShutters;
assert(nShutters * nHDRFrames == nFrames);
int nRows = frames[0].rows;
int nCols = frames[0].cols;
// Merge into HDR
std::vector<cv::Mat> outputFrames(nHDRFrames);
float shutterMin = shutters[0];
for(unsigned int i=0; i<nHDRFrames; i++){
outputFrames[i].create(nRows, nCols, CV_32FC3);
outputFrames[i].setTo(0.0);
}
#pragma omp parallel for
for(int j=0; j<nShutters; j++){
std::vector<cv::Mat> frameChannels;
cv::split(frames[j*nHDRFrames], frameChannels);
cv::Mat mask = (frameChannels[0] < 0.99) & (frameChannels[1] < 0.99) & (frameChannels[2] < 0.99);
for(unsigned int i=0; i<nHDRFrames; i++){
cv::Mat frameji = frames[j*nHDRFrames + i];
cv::add((shutterMin/shutters[j]) * frameji, outputFrames[i], outputFrames[i], mask);
}
}
hdrFrames = outputFrames;
}
void SMReconstructionWorker::reconstructPointCloud(const SMFrameSequence &frameSequence){
std::cout << "reconstructPointCloud" << std::endl;
time.start();
QSettings settings;
// Get current calibration
calibration = settings.value("calibration/parameters").value<SMCalibrationParameters>();
// Create Algorithm
QString codec = frameSequence.codec;
unsigned int resX = settings.value("projector/resX").toInt();
unsigned int resY = settings.value("projector/resY").toInt();
if(codec == "GrayCode")
algorithm = new AlgorithmGrayCode(resX, resY);
else if(codec == "GrayCodeHorzVert")
algorithm = new AlgorithmGrayCodeHorzVert(resX, resY);
else if(codec == "PhaseShiftTwoFreq")
algorithm = new AlgorithmPhaseShiftTwoFreq(resX, resY);
else if(codec == "PhaseShiftTwoFreqHorzVert")
algorithm = new AlgorithmPhaseShiftTwoFreqHorzVert(resX, resY);
else if(codec == "PhaseShiftThreeFreq")
algorithm = new AlgorithmPhaseShiftThreeFreq(resX, resY);
else if(codec == "PhaseShiftEmbedded")
algorithm = new AlgorithmPhaseShiftEmbedded(resX, resY);
else if(codec == "LineShift")
algorithm = new AlgorithmLineShift(resX, resY);
else{
std::cerr << "SLScanWorker: invalid codec (Please set codec in preferences): " << codec.toStdString() << std::endl;
return; // otherwise segfault TODO no default?
}
std::vector<cv::Mat> frames0, frames1;
debayerAndFloat(frameSequence.frames0, frames0);
debayerAndFloat(frameSequence.frames1, frames1);
// If HDR sequence, merge frames
if(frameSequence.shutters.size() > 1){
mergeHDR(frames0, frameSequence.shutters, frames0);
mergeHDR(frames1, frameSequence.shutters, frames1);
}
assert(frames0.size() == algorithm->getNPatterns());
assert(frames1.size() == algorithm->getNPatterns());
// Get 3D Points
std::vector<cv::Point3f> Q;
std::vector<cv::Vec3f> color;
algorithm->get3DPoints(calibration, frames0, frames1, Q, color);
// Convert point cloud to PCL format
pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr pointCloudPCL(new pcl::PointCloud<pcl::PointXYZRGBNormal>);
pointCloudPCL->width = Q.size();
pointCloudPCL->height = 1;
pointCloudPCL->is_dense = true;
pointCloudPCL->points.resize(Q.size());
for(unsigned int i=0; i<Q.size(); i++){
pcl::PointXYZRGBNormal point;
point.x = Q[i].x; point.y = Q[i].y; point.z = Q[i].z;
point.r = 255*color[i][0]; point.g = 255*color[i][1]; point.b = 255*color[i][2];
pointCloudPCL->points[i] = point;
}
// Transform point cloud to rotation axis coordinate system
/*cv::Mat TRCV(3, 4, CV_32F);
cv::Mat(calibration.Rr).copyTo(TRCV.colRange(0, 3));
cv::Mat(calibration.Tr).copyTo(TRCV.col(3));
Eigen::Affine3f TR;
cv::cv2eigen(TRCV, TR.matrix());
pcl::transformPointCloud(*pointCloudPCL, *pointCloudPCL, TR);
// Estimate surface normals (does not produce proper normals...)
std::cout << "Estimating normals..." << std::endl;
pcl::PointCloud<pcl::PointXYZ>::Ptr points(new pcl::PointCloud<pcl::PointXYZ>);
pcl::copyPointCloud(*pointCloudPCL, *points);
pcl::PointCloud<pcl::Normal>::Ptr normals(new pcl::PointCloud<pcl::Normal>);
pcl::NormalEstimationOMP<pcl::PointXYZ, pcl::Normal> ne;
pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ>());
tree->setInputCloud(points);
ne.setSearchMethod(tree);
ne.setRadiusSearch(1.0);
//ne.setKSearch(50);
ne.setViewPoint(0.0, 0.0, 0.0);
ne.setInputCloud(points);
ne.compute(*normals);
pcl::copyPointCloud(*normals, *pointCloudPCL);*/
// Assemble SMPointCloud data structure
SMPointCloud smPointCloud;
smPointCloud.id = frameSequence.id;
smPointCloud.pointCloud = pointCloudPCL;
smPointCloud.rotationAngle = frameSequence.rotationAngle;
// Determine transform in world (camera0) coordinate system
float angleRadians = frameSequence.rotationAngle/180.0*M_PI;
cv::Vec3f rot_rvec(0.0, -angleRadians, 0.0);
cv::Mat R;
cv::Rodrigues(rot_rvec, R);
smPointCloud.R = calibration.Rr.t()*cv::Matx33f(R)*calibration.Rr;
smPointCloud.T = calibration.Rr.t()*cv::Matx33f(R)*calibration.Tr - calibration.Rr.t()*calibration.Tr;
// Determine transform in world (camera0) coordinate system
/*float angleRadians = frameSequence.rotationAngle/180.0*M_PI;
cv::Vec3f rot_rvec(0.0, -angleRadians, 0.0);
cv::Mat R;
cv::Rodrigues(rot_rvec, R);
smPointCloud.R = cv::Matx33f(R);
smPointCloud.T = cv::Vec3f(0.0,0.0,0.0);*/
// Emit result
emit newPointCloud(smPointCloud);
std::cout << "SMReconstructionWorker: " << time.elapsed() << "ms" << std::endl;
std::cout << "SMReconstructionWorker: " << smPointCloud.pointCloud->size() << " Points" << std::endl;
}
void SMReconstructionWorker::reconstructPointClouds(const std::vector<SMFrameSequence> &frameSequences){
// Process sequentially
#pragma omp parallel for
for(unsigned int i=0; i<frameSequences.size(); i++){
if(!frameSequences[i].reconstructed) reconstructPointCloud(frameSequences[i]);
}
}
void SMReconstructionWorker::triangulate(const std::vector<cv::Point2f>& q0, const std::vector<cv::Point2f>& q1, std::vector<cv::Point3f> &Q){
cv::Mat P0(3,4,CV_32F,cv::Scalar(0.0));
cv::Mat(calibration.K0).copyTo(P0(cv::Range(0,3), cv::Range(0,3)));
cv::Mat temp(3,4,CV_32F);
cv::Mat(calibration.R1).copyTo(temp(cv::Range(0,3), cv::Range(0,3)));
cv::Mat(calibration.T1).copyTo(temp(cv::Range(0,3), cv::Range(3,4)));
cv::Mat P1 = cv::Mat(calibration.K1) * temp;
cv::Mat QMatHomogenous, QMat;
cv::triangulatePoints(P0, P1, q0, q1, QMatHomogenous);
cvtools::convertMatFromHomogeneous(QMatHomogenous, QMat);
cvtools::matToPoints3f(QMat, Q);
}