Subversion Repositories seema-scanner

Rev

Rev 226 | Rev 245 | Go to most recent revision | Blame | Compare with Previous | Last modification | View Log | RSS feed

#include "SMCaptureWorker.h"

#include "AlgorithmGrayCode.h"
#include "AlgorithmGrayCodeHorzVert.h"
#include "AlgorithmPhaseShiftTwoFreq.h"
#include "AlgorithmPhaseShiftTwoFreqHorzVert.h"
#include "AlgorithmPhaseShiftThreeFreq.h"
#include "AlgorithmPhaseShiftEmbedded.h"
#include "AlgorithmLineShift.h"

#include <QCoreApplication>
#include <QTime>
#include <QSettings>
#include <QtTest/QTest>

#include "cvtools.h"

void SMCaptureWorker::setup(){

    QSettings settings;

    // Create cameras
    int iNum0 = settings.value("camera0/interfaceNumber", -1).toInt();
    int cNum0 = settings.value("camera0/cameraNumber", -1).toInt();
    camera0.reset(CameraFactory::NewCamera(iNum0,cNum0,triggerModeSoftware));

    if(!camera0)
        return;

    int iNum1 = settings.value("camera1/interfaceNumber", -1).toInt();
    int cNum1 = settings.value("camera1/cameraNumber", -1).toInt();
    camera1.reset(CameraFactory::NewCamera(iNum1,cNum1,triggerModeSoftware));

    if(!camera1)
        return;

    // Set camera settings
    CameraSettings cameraSettings;
    cameraSettings.shutter = settings.value("camera/shutter", 16.666).toFloat();
    cameraSettings.gain = 0.0;

    camera0->setCameraSettings(cameraSettings);
    camera1->setCameraSettings(cameraSettings);

    // Start capturing
    camera0->startCapture();
    camera1->startCapture();

    // Create projector
    //int screenNum = settings.value("projector/screenNumber", -1).toInt();
    int screenNum = settings.value("projector/screenNumber", 1).toInt();
    if(screenNum != -1)
        projector.reset(new ProjectorOpenGL(screenNum));

//    // Create rotation stage
//    for(int i = 0; i<10; i++){
//        try{
//            delete rotationStage.get();
//            rotationStage = std::make_shared<RotationStage>();
//            break;
//        }
//        catch (...){
//            std::cerr << "trying to setup rotation stage" << std::endl;
//        }
//    }
    rotationStage.reset();
    rotationStage.reset(new RotationStage());

    // Create Algorithm
    unsigned int screenCols, screenRows;
    projector->getScreenRes(&screenCols, &screenRows);
    codec = settings.value("algorithm", "GrayCode").toString();
    if(codec == "GrayCode")
        algorithm.reset(new AlgorithmGrayCode(screenCols, screenRows));
    else if(codec == "GrayCodeHorzVert")
        algorithm.reset(new AlgorithmGrayCodeHorzVert(screenCols, screenRows));
    else if(codec == "PhaseShiftTwoFreq")
        algorithm.reset(new AlgorithmPhaseShiftTwoFreq(screenCols, screenRows));
    else if(codec == "PhaseShiftThreeFreq")
        algorithm.reset(new AlgorithmPhaseShiftThreeFreq(screenCols, screenRows));
    else if(codec == "PhaseShiftTwoFreqHorzVert")
        algorithm.reset(new AlgorithmPhaseShiftTwoFreqHorzVert(screenCols, screenRows));
    else if(codec == "PhaseShiftEmbedded")
        algorithm.reset(new AlgorithmPhaseShiftEmbedded(screenCols, screenRows));
    else if(codec == "LineShift")
        algorithm.reset(new AlgorithmLineShift(screenCols, screenRows));
    else
        std::cerr << "SMCaptureWorker: invalid codec " << codec.toStdString() << std::endl;

    // Upload patterns to projector/GPU
    for(unsigned int i=0; i<algorithm->getNPatterns(); i++){
        cv::Mat pattern = algorithm->getEncodingPattern(i);
        projector->setPattern(i, pattern.ptr(), pattern.cols, pattern.rows);
    }

    delay = settings.value("trigger/delay", 50).toInt();
    stackingCalibration = settings.value("stacking/calibration", 1).toInt();
    stackingAcquisition= settings.value("stacking/acquisition", 1).toInt();

    setupSuccessful = true;
}


void SMCaptureWorker::doWork(){

    if(!setupSuccessful)
        return;

    working = true;

    cv::Mat checkerboard(8, 8, CV_8UC3);
    checkerboard.setTo(0);
    checkerboard.rowRange(0, 4).colRange(0, 4).setTo(cv::Vec3b(255,255,255));
    checkerboard.rowRange(4, 8).colRange(4, 8).setTo(cv::Vec3b(255,255,255));

    // Processing loop
//    QTime time;
//    time.start();
    while(working){

        if(focusingPattern)
            projector->displayTexture(checkerboard.ptr(), checkerboard.cols, checkerboard.rows);
        else
            projector->displayWhite();


        // prevent image acquisition timeout
        QTest::qSleep(100);

        CameraFrame frame;

        // trigger cameras
        camera0->trigger();
        camera1->trigger();

        // retrieve raw frames
        frame = camera0->getFrame();
        cv::Mat frameCV;
        frameCV  = cv::Mat(frame.height, frame.width, CV_8UC1, frame.memory);
        frameCV = frameCV.clone();
//        cvtools::rshift(frameCV, 8);
//        frameCV.convertTo(frameCV, CV_8UC1);
        emit newFrame(0, frameCV);

        frame = camera1->getFrame();
        frameCV  = cv::Mat(frame.height, frame.width, CV_8UC1, frame.memory);
        frameCV = frameCV.clone();
//        cvtools::rshift(frameCV, 8);
//        frameCV.convertTo(frameCV, CV_8UC1);
        emit newFrame(1, frameCV);

        //std::cout << "SMCaptureWorker idle " << time.restart() << "ms" << std::endl;

        // Process events e.g. perform a task
        QCoreApplication::processEvents();
    }

    emit finished();
}

void SMCaptureWorker::rotateTo(float angle){
    // TODO is this the right check
    if(!setupSuccessful || !rotationStage || !rotationStage->Handle)
        return;

    rotationStage->moveAbsolute(angle);
    while(rotationStage->isMoving()){

        // prevent grab timeout in flycapture
        QTest::qSleep(10);

        // trigger cameras
        camera0->trigger();
        camera1->trigger();

        // retrieve frames
        CameraFrame frame;
        frame = camera0->getFrame();
        cv::Mat frameCV;
        frameCV  = cv::Mat(frame.height, frame.width, CV_8UC1, frame.memory);
        frameCV = frameCV.clone();
        emit newFrame(0, frameCV);
        frame = camera1->getFrame();
        frameCV  = cv::Mat(frame.height, frame.width, CV_8UC1, frame.memory);
        frameCV = frameCV.clone();
        emit newFrame(1, frameCV);
    }

    emit rotatedTo(angle);
}

void SMCaptureWorker::acquireCalibrationSet(float angle){
    if(!setupSuccessful || !rotationStage || !rotationStage->Handle)
        return;

    if(angle != -1.0)
        rotateTo(angle);

    projector->displayWhite();

    // just for safe measures
    QTest::qSleep(500);

    CameraFrame frame;
    SMCalibrationSet calibrationSet;
    cv::Mat frameCVStacked0(camera0->getFrameHeight(), camera0->getFrameWidth(), CV_32SC1, cv::Scalar(0));
    cv::Mat frameCVStacked1(camera1->getFrameHeight(), camera1->getFrameWidth(), CV_32SC1, cv::Scalar(0));

    for(int i=0; i<stackingCalibration; i++){
        // trigger cameras
        camera0->trigger();
        camera1->trigger();

        // retrieve frames
        frame = camera0->getFrame();
        cv::Mat frameCV;
        frameCV  = cv::Mat(frame.height, frame.width, CV_8UC1, frame.memory);
        frameCV = frameCV.clone();
        cv::add(frameCV, frameCVStacked0, frameCVStacked0, cv::noArray(), CV_32SC1);
//cvtools::writeMat(frameCV, "frameCV.mat", "frameCV");
//cvtools::writeMat(frameCVStacked0, "frameCVStacked0.mat", "frameCVStacked0");
        emit newFrame(0, frameCV);

        frame = camera1->getFrame();
        frameCV  = cv::Mat(frame.height, frame.width, CV_8UC1, frame.memory);
        frameCV = frameCV.clone();
        cv::add(frameCV, frameCVStacked1, frameCVStacked1, cv::noArray(), CV_32SC1);

        emit newFrame(1, frameCV);

    }

    frameCVStacked0.convertTo(frameCVStacked0, CV_8UC1, 1.0/stackingCalibration);
//cvtools::writeMat(frameCVStacked0, "frameCVStacked0a.mat", "frameCVStacked0a");
    frameCVStacked1.convertTo(frameCVStacked1, CV_8UC1, 1.0/stackingCalibration);

    calibrationSet.frame0 = frameCVStacked0;
    calibrationSet.frame1 = frameCVStacked1;

    calibrationSet.rotationAngle = rotationStage->getAngle();

    emit newCalibrationSet(calibrationSet);
}

void SMCaptureWorker::acquireCalibrationSets(std::vector<float> angles){

    if(!setupSuccessful)
        return;

    for(unsigned int i=0; i<angles.size(); i++)
        acquireCalibrationSet(angles[i]);
}

void SMCaptureWorker::acquireFrameSequenceLDR(SMFrameSequence &frameSequence){

    CameraFrame frame;

    for(unsigned int i=0; i<algorithm->getNPatterns(); i++){

        // display pattern
        projector->displayPattern(i);

        QTest::qSleep(delay);

        cv::Mat frameCVStacked0(camera0->getFrameHeight(), camera0->getFrameWidth(), CV_32SC1, cv::Scalar(0));
        cv::Mat frameCVStacked1(camera1->getFrameHeight(), camera1->getFrameWidth(), CV_32SC1, cv::Scalar(0));
        for(int i=0; i<stackingAcquisition; i++){
            // trigger cameras
            camera0->trigger();
            camera1->trigger();

            // retrieve frames
            frame = camera0->getFrame();
            cv::Mat frameCV;
            frameCV  = cv::Mat(frame.height, frame.width, CV_8UC1, frame.memory);
            frameCV = frameCV.clone();
            cv::add(frameCV, frameCVStacked0, frameCVStacked0, cv::noArray(), CV_32SC1);

            emit newFrame(0, frameCV);

            frame = camera1->getFrame();
            frameCV  = cv::Mat(frame.height, frame.width, CV_8UC1, frame.memory);
            frameCV = frameCV.clone();
            cv::add(frameCV, frameCVStacked1, frameCVStacked1, cv::noArray(), CV_32SC1);

            emit newFrame(1, frameCV);

        }

        frameCVStacked0.convertTo(frameCVStacked0, CV_8UC1, 1.0/stackingAcquisition);
        frameCVStacked1.convertTo(frameCVStacked1, CV_8UC1, 1.0/stackingAcquisition);

        frameSequence.frames0.push_back(frameCVStacked0);
        frameSequence.frames1.push_back(frameCVStacked1);

    }


}

void SMCaptureWorker::acquireFrameSequenceHDR(SMFrameSequence &frameSequence){

    QSettings settings;
    QString shuttersString = settings.value("camera/shuttersHDR").toString();
    QStringList list = shuttersString.split("/");
    std::vector<float> shutters(list.size());
    for(int i=0; i<list.size(); i++)
        shutters[i] = list[i].toFloat();
    if(shutters.empty()){
        std::cerr << "Could not read HDR shutter times" << std::endl;
        return;
    }

    int nShutters = shutters.size();

    std::vector<SMFrameSequence> frameSequences(nShutters);

    CameraSettings cameraSettings;

    for(int i=0; i<nShutters; i++){

        // Set camera shutter
        cameraSettings.shutter = shutters[i];

        // assert that shutter is given in ms
        assert(cameraSettings.shutter < 2000.0);

        camera0->setCameraSettings(cameraSettings);
        camera1->setCameraSettings(cameraSettings);

        // Project/acquire sequence
        acquireFrameSequenceLDR(frameSequences[i]);

    }

    unsigned int nFrames = frameSequences[0].frames0.size();
    int nRows = frameSequences[0].frames0[0].rows;
    int nCols = frameSequences[0].frames0[0].cols;

    // Merge into HDR
    frameSequence.frames0.resize(nFrames);
    frameSequence.frames1.resize(nFrames);

    float shutterMean = 0.0;
    for(unsigned int i=0; i<shutters.size(); i++)
        shutterMean += shutters[i]/shutters.size();

    for(unsigned int i=0; i<nFrames; i++){
        frameSequence.frames0[i].create(nRows, nCols, CV_32F);
        frameSequence.frames1[i].create(nRows, nCols, CV_32F);

    }

    #pragma omp parallel for
    for(int r=0; r<nRows; r++){
        for(int c=0; c<nCols; c++){

            for(int j=nShutters-1; j>=0; j--){

                uchar s0 = frameSequences[j].frames0[0].at<uchar>(r,c);

                if(s0 < 250){
                    for(unsigned int i=0; i<nFrames; i++)
                        frameSequence.frames0[i].at<float>(r,c) = (shutterMean/shutters[j]) * frameSequences[j].frames0[i].at<uchar>(r,c);

                    break;
                }
            }

            for(int j=nShutters-1; j>=0; j--){

                uchar s1 = frameSequences[j].frames1[0].at<uchar>(r,c);

                if(s1 < 250){
                    for(unsigned int i=0; i<nFrames; i++)
                        frameSequence.frames1[i].at<float>(r,c) = (shutterMean/shutters[j]) * frameSequences[j].frames1[i].at<uchar>(r,c);

                    break;
                }
            }
        }
        //cvtools::writeMat(frame0i, QString("frame0_%1.mat").arg(i).toLatin1(),  QString("frame0_%1").arg(i).toLatin1());
    }

    // Set camera shutter back to default
    cameraSettings.shutter = settings.value("shutter", 66.666).toFloat();

    camera0->setCameraSettings(cameraSettings);
    camera1->setCameraSettings(cameraSettings);

    // TODO: we need to somehow make the debayer function cvtColor accept floating point images...
}


void SMCaptureWorker::acquireFrameSequence(float angle){

    if(!setupSuccessful)
        return;

    if(int(angle) != -1.0)
        rotateTo(angle);

    SMFrameSequence frameSequence;

    QSettings settings;
    if(settings.contains("camera/shuttersHDR"))
        acquireFrameSequenceHDR(frameSequence);
    else
        acquireFrameSequenceLDR(frameSequence);


    if(rotationStage && rotationStage->Handle)// TODO is this the right check
        frameSequence.rotationAngle = rotationStage->getAngle();
    else
        frameSequence.rotationAngle = 0;

    frameSequence.codec = codec;

    emit newFrameSequence(frameSequence);

    projector->displayWhite();
}


void SMCaptureWorker::acquireFrameSequences(std::vector<float> angles){

    if(!setupSuccessful)
        return;

    for(unsigned int i=0; i<angles.size(); i++)
        acquireFrameSequence(angles[i]);
}

void SMCaptureWorker::abort(){}

void SMCaptureWorker::stopWork(){
    working = false;
}