Subversion Repositories seema-scanner

Rev

Rev 208 | Rev 231 | Go to most recent revision | Blame | Compare with Previous | Last modification | View Log | RSS feed

#include "SMReconstructionWorker.h"

#include "AlgorithmGrayCode.h"
#include "AlgorithmGrayCodeHorzVert.h"
#include "AlgorithmPhaseShiftTwoFreq.h"
#include "AlgorithmPhaseShiftTwoFreqHorzVert.h"
#include "AlgorithmPhaseShiftThreeFreq.h"
#include "AlgorithmPhaseShiftEmbedded.h"
#include "AlgorithmLineShift.h"

#include <QCoreApplication>
#include <QSettings>

#include <iostream>
#include <opencv2/opencv.hpp>

#include "cvtools.h"
#include <opencv2/core/eigen.hpp>

#include <pcl/filters/statistical_outlier_removal.h>
#include <pcl/io/pcd_io.h>
#include <pcl/features/normal_3d.h>
#include <pcl/features/normal_3d_omp.h>
#include <pcl/common/transforms.h>


void SMReconstructionWorker::reconstructPointCloud(const SMFrameSequence &frameSequence){

    QSettings settings;

    // Get current calibration
    calibration = settings.value("calibration/parameters").value<SMCalibrationParameters>();

    // Create Algorithm
    QString codec = frameSequence.codec;
    int resX = settings.value("projector/resX").toInt();
    int resY = settings.value("projector/resY").toInt();

    if(codec == "GrayCode")
        algorithm = new AlgorithmGrayCode(resX, resY);
    else if(codec == "GrayCodeHorzVert")
        algorithm = new AlgorithmGrayCodeHorzVert(resX, resY);
    else if(codec == "PhaseShiftTwoFreq")
        algorithm = new AlgorithmPhaseShiftTwoFreq(resX, resY);
    else if(codec == "PhaseShiftTwoFreqHorzVert")
        algorithm = new AlgorithmPhaseShiftTwoFreqHorzVert(resX, resY);
    else if(codec == "PhaseShiftThreeFreq")
        algorithm = new AlgorithmPhaseShiftThreeFreq(resX, resY);
    else if(codec == "PhaseShiftEmbedded")
        algorithm = new AlgorithmPhaseShiftEmbedded(resX, resY);
    else if(codec == "LineShift")
        algorithm = new AlgorithmLineShift(resX, resY);
    else{
        std::cerr << "SLScanWorker: invalid codec (Please set codec in preferences): " << codec.toStdString() << std::endl;
        return; // otherwise segfault TODO no default?
    }

    assert(frameSequence.frames0.size() == algorithm->getNPatterns());
    assert(frameSequence.frames1.size() == algorithm->getNPatterns());

    time.start();

    // Get 3D Points
    std::vector<cv::Point3f> Q;
    std::vector<cv::Vec3b> color;
    algorithm->get3DPoints(calibration, frameSequence.frames0, frameSequence.frames1, Q, color);

    // Convert point cloud to PCL format
    pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr pointCloudPCL(new pcl::PointCloud<pcl::PointXYZRGBNormal>);

    pointCloudPCL->width = Q.size();
    pointCloudPCL->height = 1;
    pointCloudPCL->is_dense = true;

    pointCloudPCL->points.resize(Q.size());

    for(unsigned int i=0; i<Q.size(); i++){
        pcl::PointXYZRGBNormal point;
        point.x = Q[i].x; point.y = Q[i].y; point.z = Q[i].z;
        point.r = color[i][0]; point.g = color[i][1]; point.b = color[i][2];
        pointCloudPCL->points[i] = point;
    }

    // Transform point cloud to rotation axis coordinate system
    /*cv::Mat TRCV(3, 4, CV_32F);
    cv::Mat(calibration.Rr).copyTo(TRCV.colRange(0, 3));
    cv::Mat(calibration.Tr).copyTo(TRCV.col(3));
    Eigen::Affine3f TR;
    cv::cv2eigen(TRCV, TR.matrix());
    pcl::transformPointCloud(*pointCloudPCL, *pointCloudPCL, TR);

    // Estimate surface normals (does not produce proper normals...)
    std::cout << "Estimating normals..." << std::endl;
    pcl::PointCloud<pcl::PointXYZ>::Ptr points(new pcl::PointCloud<pcl::PointXYZ>);
    pcl::copyPointCloud(*pointCloudPCL, *points);
    pcl::PointCloud<pcl::Normal>::Ptr normals(new pcl::PointCloud<pcl::Normal>);
    pcl::NormalEstimationOMP<pcl::PointXYZ, pcl::Normal> ne;
    pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ>());
    tree->setInputCloud(points);
    ne.setSearchMethod(tree);
    ne.setRadiusSearch(1.0);
    //ne.setKSearch(50);
    ne.setViewPoint(0.0, 0.0, 0.0);
    ne.setInputCloud(points);
    ne.compute(*normals);
    pcl::copyPointCloud(*normals, *pointCloudPCL);*/

    // Assemble SMPointCloud data structure
    SMPointCloud smPointCloud;
    smPointCloud.id = frameSequence.id;
    smPointCloud.pointCloud = pointCloudPCL;
    smPointCloud.rotationAngle = frameSequence.rotationAngle;

    // Determine transform in world (camera0) coordinate system
    float angleRadians = frameSequence.rotationAngle/180.0*M_PI;
    cv::Vec3f rot_rvec(0.0, -angleRadians, 0.0);
    cv::Mat R;
    cv::Rodrigues(rot_rvec, R);
    smPointCloud.R = calibration.Rr.t()*cv::Matx33f(R)*calibration.Rr;
    smPointCloud.T = calibration.Rr.t()*cv::Matx33f(R)*calibration.Tr - calibration.Rr.t()*calibration.Tr;


    // Determine transform in world (camera0) coordinate system
    /*float angleRadians = frameSequence.rotationAngle/180.0*M_PI;
    cv::Vec3f rot_rvec(0.0, -angleRadians, 0.0);
    cv::Mat R;
    cv::Rodrigues(rot_rvec, R);
    smPointCloud.R = cv::Matx33f(R);
    smPointCloud.T = cv::Vec3f(0.0,0.0,0.0);*/

    // Emit result
    emit newPointCloud(smPointCloud);
    std::cout << "SMReconstructionWorker: " << time.elapsed() << "ms" << std::endl;
    std::cout << "SMReconstructionWorker: " << smPointCloud.pointCloud->size() << " Points" << std::endl;

}

void SMReconstructionWorker::reconstructPointClouds(const std::vector<SMFrameSequence> &frameSequences){

    // Process sequentially
    #pragma omp parallel for
    for(unsigned int i=0; i<frameSequences.size(); i++){
        if(!frameSequences[i].reconstructed) reconstructPointCloud(frameSequences[i]);
    }
}

void SMReconstructionWorker::triangulate(const std::vector<cv::Point2f>& q0, const std::vector<cv::Point2f>& q1, std::vector<cv::Point3f> &Q){
    cv::Mat P0(3,4,CV_32F,cv::Scalar(0.0));
    cv::Mat(calibration.K0).copyTo(P0(cv::Range(0,3), cv::Range(0,3)));

    cv::Mat temp(3,4,CV_32F);
    cv::Mat(calibration.R1).copyTo(temp(cv::Range(0,3), cv::Range(0,3)));
    cv::Mat(calibration.T1).copyTo(temp(cv::Range(0,3), cv::Range(3,4)));
    cv::Mat P1 = cv::Mat(calibration.K1) * temp;

    cv::Mat QMatHomogenous, QMat;
    cv::triangulatePoints(P0, P1, q0, q1, QMatHomogenous);
    cvtools::convertMatFromHomogeneous(QMatHomogenous, QMat);
    cvtools::matToPoints3f(QMat, Q);
}