0

emgucvを使用してステレオカメラキャリブレーションアプリを実装しようとしています。

私の問題は、CvInvoke.cvRemapを使用して画像の歪みを解消しようとすると、関数がハングするだけです。エラーやクラッシュは発生しません。ハングするだけで、速度が遅い場合に備えて2時間放置しました。これが私がしていることです:

  1. 10組のチェス盤サンプル(左と右)をキャプチャし、FindChessboardCornersがそれぞれで機能することを確認します。カメラを同時にキャプチャするだけで同期するために特別なことは何もしていません。
  2. 使用するチェス盤に基づいてオブジェクトポイントのセットを生成します。
  3. 2のオブジェクトポイントと1の画像ポイントを使用して、各サンプルの左右の画像に対して個別のCalibrateCameraを実行します。
  4. CalibrateCameraによって生成されたIntrinsicCameraParametersを3で、オブジェクトポイントを2で、チェス盤からキャプチャされたイメージポイントを1で使用して、StereoCalibrateを実行します。
  5. 3/4のIntrinsicCameraParametersを使用してStereoRectifyを実行します。
  6. 5からの出力を使用して、cvInitUnConstraintRectifyMapから左右両方のmapxとmapyを生成します。
  7. 6つのmapxとmapy、およびカメラからキャプチャされた新鮮な画像を使用してcvRemapを試行しています。次へ:StereoBM.FindStereoCorrespondenceとPointCollection.ReprojectImageTo3Dを使用して、うまくいけばキャリブレーションされたステレオデータからポイントクラウドを生成します。

したがって、7に到達すると、cvRemapがハングします。私はcvRemapを単一のカメラからキャプチャするようにしていますが、この機能は私のセットアップである程度機能していることがわかります。

複数のカメラを管理するためのクラスを作成しました。

using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Drawing;
using System.Drawing.Drawing2D;
using System.Windows.Forms;

using Emgu.CV;
using Emgu.CV.UI;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Emgu.CV.VideoSurveillance;


namespace Capture2Cams
{
    class camData
    {
        public Capture capture;
        public Image<Bgr, Byte> lastFrame;
        public Image<Gray, Byte> lastFrameGray;
        public bool lastChessboardFound;
        public PointF[] lastChessboardCorners;
        public Image<Gray, Byte>[] samplesGray;
        public PointF[][] samplesChessboardCorners;
        public Size cbDimensions;
        public Size imageDimensions;
        public int cursampleIndex = 0;
        public ImageList sampleIcons;

        private Image<Gray, Byte> _chessBoardDisplay;
        private int _iconWidth = 160;
        private int _icnonHeight = 90;

        private int _numSamples = 0;
        public int numSamples()
        {
            return _numSamples;
        }

        public void numSamples(int val)
        {
            _numSamples = val;
            this.samplesGray = new Image<Gray, Byte>[val];
            this.samplesChessboardCorners = new PointF[val][];

            this.sampleIcons.ImageSize = new Size(_iconWidth, _icnonHeight);
            Bitmap tmp = new Bitmap(_iconWidth, _icnonHeight);
            this.sampleIcons.Images.Clear();
            for (int c = 0; c < _numSamples; c++) this.sampleIcons.Images.Add(tmp);
        }



        public camData(int camIndex, int capWidth, int capHeight, int pcbWidth, int pcbHeight, int pNumSamples)
        {
            this.sampleIcons = new ImageList();

            try
            {
                this.capture = new Capture(camIndex);
                this.capture.SetCaptureProperty(CAP_PROP.CV_CAP_PROP_FRAME_WIDTH, capWidth);
                this.capture.SetCaptureProperty(CAP_PROP.CV_CAP_PROP_FRAME_HEIGHT, capHeight);
            }
            catch (Exception e)
            {
                MessageBox.Show(e.Message);
                return;
            }
            this.imageDimensions = new Size(capWidth, capHeight);
            this.cbDimensions = new Size(pcbWidth, pcbHeight);
            this.numSamples(pNumSamples);

        }

        public Image<Gray, Byte> captureFrame()
        {
            this.lastFrame = this.capture.QueryFrame();
            this.lastFrameGray = this.lastFrame.Convert<Gray, Byte>();
            return this.lastFrameGray;
        }


        public int captureSample()
        {
            this.detectChessboard(true);  // detectChessboard calls -> captureFrame

            if (lastChessboardFound)
            {
                this.samplesGray[cursampleIndex] = this.lastFrameGray;
                this.samplesChessboardCorners[cursampleIndex] = this.lastChessboardCorners;
                this.sampleIcons.Images[this.cursampleIndex] = this.lastFrameGray.ToBitmap(_iconWidth, _icnonHeight);

                this.cursampleIndex++;
                if (this.cursampleIndex >= _numSamples) this.cursampleIndex = 0;

            }
            return cursampleIndex;
        }

        public void clearSamples()
        {
            this.cursampleIndex = 0;
            this.numSamples(_numSamples);
        }

        public Image<Gray, Byte> detectChessboard(bool pDoCapture)
        {
            if (pDoCapture) this.captureFrame();

            this.lastChessboardFound = CameraCalibration.FindChessboardCorners(this.lastFrameGray, this.cbDimensions, CALIB_CB_TYPE.ADAPTIVE_THRESH | CALIB_CB_TYPE.FILTER_QUADS, out this.lastChessboardCorners);

            _chessBoardDisplay = this.lastFrameGray.Clone();
            CameraCalibration.DrawChessboardCorners(this._chessBoardDisplay, this.cbDimensions, this.lastChessboardCorners, this.lastChessboardFound);

            return this._chessBoardDisplay;
        }

        public void saveSampleImages(string pPath, string pID)
        {
            for(int ic = 0; ic < this._numSamples; ic++)
            {
                this.samplesGray[ic].Save(pPath + pID + ic.ToString() + ".bmp");
            }
        }


        public void loadSampleImages(string pPath, string pID)
        {
            clearSamples();

            for (int ic = 0; ic < this._numSamples; ic++)
            {
                this.lastFrameGray = new Image<Gray, byte>(new Bitmap(pPath + pID + ic.ToString() + ".bmp"));
                this.detectChessboard(false);
                this.samplesChessboardCorners[ic] = this.lastChessboardCorners;
                this.sampleIcons.Images[ic] = this.lastFrameGray.ToBitmap(_iconWidth, _icnonHeight);

                this.samplesGray[ic] = this.lastFrameGray;

            }
        }

    }
}

そして、これが残りのキャリブレーションロジックを含む私のフォームコードです:

using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Windows.Forms;
using System.Runtime.InteropServices;

using Emgu.CV.Util;
using Emgu.CV;
using Emgu.CV.UI;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Emgu.CV.VideoSurveillance;

namespace Capture2Cams
{
    public partial class CaptureForm : Form
    {
        private static camData camLeft;
        private static camData camRight;
        private int _numSamples = 10;  // Number of calibration samples
        private int _imageWidth = 1280;  // web cam resolution
        private int _imageHeight = 720; // web cam resolution
        private int _cbWidth = 9; // chessboard corner count
        private int _cbHeight = 5; // chessboard corner count

        // TODO: Test post calibration values, these will need to be loaded and saved
        private static Matrix<double> _foundamentalMatrix;
        private static Matrix<double> _essentialMatrix;
        private static IntrinsicCameraParameters _inPramsLeft;
        private static IntrinsicCameraParameters _inPramsRight;
        private static ExtrinsicCameraParameters _outExtParamsStereo;

        private  Matrix<float> _mapxLeft;
        private  Matrix<float> _mapyLeft;
        private  Matrix<float> _mapxRight;
        private  Matrix<float> _mapyRight;

        public CaptureForm()
        {
            InitializeComponent();
            Run();
        }

        void Run()
        {
            camLeft = new camData(0, _imageWidth, _imageHeight, _cbWidth, _cbHeight, _numSamples);
            camRight = new camData(1, _imageWidth, _imageHeight, _cbWidth, _cbHeight, _numSamples);

            this.listViewLeft.LargeImageList = camLeft.sampleIcons;
            for (int c = 0; c < _numSamples; c++)
            {
                ListViewItem curItem = new ListViewItem();
                curItem.ImageIndex = c;
                curItem.Text = "Sample" + c.ToString();
                this.listViewLeft.Items.Add(curItem);
            }


            this.listViewRight.LargeImageList = camRight.sampleIcons;
            for (int c = 0; c < _numSamples; c++)
            {
                ListViewItem curItem = new ListViewItem();
                curItem.ImageIndex = c;
                curItem.Text = "Sample" + c.ToString();
                this.listViewRight.Items.Add(curItem);
            }


            Application.Idle += ProcessFrame;
        }

        void ProcessFrame(object sender, EventArgs e)
        {
            if (!checkBoxRectify.Checked)
            {
                if (this.checkBoxCapCB.Checked)
                {
                    imageBoxLeft.Image = camLeft.detectChessboard(true);
                    imageBoxRight.Image = camRight.detectChessboard(true);
                }
                else
                {
                    imageBoxLeft.Image = camLeft.captureFrame();
                    imageBoxRight.Image = camRight.captureFrame();
                }
            }
            else
            {
                camLeft.captureFrame();
                camRight.captureFrame();
                Image<Gray, byte> imgLeft = camLeft.lastFrameGray.Clone();
                Image<Gray, byte> imgRight = camRight.lastFrameGray.Clone();

                CvInvoke.cvRemap(camLeft.lastFrameGray.Ptr, imgLeft.Ptr, _mapxLeft.Ptr, _mapyLeft.Ptr, (int)INTER.CV_INTER_LINEAR | (int)WARP.CV_WARP_FILL_OUTLIERS, new MCvScalar(0));
                CvInvoke.cvRemap(camRight.lastFrameGray.Ptr, imgRight.Ptr, _mapxRight.Ptr, _mapyRight.Ptr, (int)INTER.CV_INTER_LINEAR | (int)WARP.CV_WARP_FILL_OUTLIERS, new MCvScalar(0));

                imageBoxLeft.Image = imgLeft;
                imageBoxRight.Image = imgRight;
            }


            //checkBoxRectify
        }

        private void buttonCaptureSample_Click(object sender, EventArgs e)
        {
            camLeft.captureSample();            
            camRight.captureSample();

            this.listViewLeft.Refresh();
            this.listViewRight.Refresh();
        }

        private void buttonStereoCalibrate_Click(object sender, EventArgs e)
        {
            // We should have most of the data needed from the sampling with the camData objects
            int numCorners = _cbWidth * _cbHeight;

            // Calc intrisitcs / camera
            _inPramsLeft = new IntrinsicCameraParameters();
            _inPramsRight = new IntrinsicCameraParameters();

            ExtrinsicCameraParameters[] outExtParamsLeft;
            ExtrinsicCameraParameters[] outExtParamsRight;

            //Matrix<double> foundamentalMatrix;
            //Matrix<double> essentialMatrix;


            outExtParamsLeft = new ExtrinsicCameraParameters[_numSamples];
            outExtParamsRight = new ExtrinsicCameraParameters[_numSamples];
            _outExtParamsStereo = new ExtrinsicCameraParameters();

            // Building object points
            // These are the points on the cessboard in local 3d coordinates
            // Requires one set per sample, if the same calibration object (chessboard) is used for each sample then just use the same set of points for each sample
            // Also setting sub pixel analasys on samples
            MCvPoint3D32f[][] objectPoints = new MCvPoint3D32f[_numSamples][];
            for (int sc = 0; sc < _numSamples; sc++) // Samples count
            {
                // indivual cam setup
                outExtParamsLeft[sc] = new ExtrinsicCameraParameters();
                outExtParamsRight[sc] = new ExtrinsicCameraParameters();

                // Sub pixel analasys
                camLeft.samplesGray[sc].FindCornerSubPix(new PointF[][] { camLeft.samplesChessboardCorners[sc] }, new Size(10, 10), new Size(-1, -1), new MCvTermCriteria(300, 0.01));
                camRight.samplesGray[sc].FindCornerSubPix(new PointF[][] { camRight.samplesChessboardCorners[sc] }, new Size(10, 10), new Size(-1, -1), new MCvTermCriteria(300, 0.01));

                // Object points
                objectPoints[sc] = new MCvPoint3D32f[numCorners];

                for (int cc = 0; cc < numCorners; cc++)  // chessboard corners count
                {                    
                    objectPoints[sc][cc].x = cc / _cbWidth;
                    objectPoints[sc][cc].y = cc % _cbWidth;
                    objectPoints[sc][cc].z = 0.0f;
                }
            }
            Size imageSize = new Size(_imageWidth, _imageHeight);
            // Indivual cam camibration

            CameraCalibration.CalibrateCamera(objectPoints, camLeft.samplesChessboardCorners, imageSize, _inPramsLeft, CALIB_TYPE.DEFAULT, out outExtParamsLeft);
            CameraCalibration.CalibrateCamera(objectPoints, camRight.samplesChessboardCorners, imageSize, _inPramsRight, CALIB_TYPE.DEFAULT, out outExtParamsRight);


            // Stereo Cam calibration
            CameraCalibration.StereoCalibrate(
                objectPoints, 
                camLeft.samplesChessboardCorners, 
                camRight.samplesChessboardCorners, 
                _inPramsLeft, 
                _inPramsRight,
                imageSize, 
                CALIB_TYPE.CV_CALIB_FIX_ASPECT_RATIO | CALIB_TYPE.CV_CALIB_ZERO_TANGENT_DIST | CALIB_TYPE.CV_CALIB_FIX_FOCAL_LENGTH, 
                new MCvTermCriteria(100, 0.001), 
                out _outExtParamsStereo, 
                out _foundamentalMatrix, 
                out _essentialMatrix
                );

            PrintIntrinsic(_inPramsLeft);
            PrintIntrinsic(_inPramsRight);
        }


        private void listViewLeft_ItemSelectionChanged(object sender, ListViewItemSelectionChangedEventArgs e)
        {

        }

        private void listViewRight_ItemSelectionChanged(object sender, ListViewItemSelectionChangedEventArgs e)
        {

        }

        private void buttonSaveSamples_Click(object sender, EventArgs e)
        {
            camLeft.saveSampleImages(textBoxSavePath.Text, "left");
            camRight.saveSampleImages(textBoxSavePath.Text, "right");
        }

        private void buttonLoadSamples_Click(object sender, EventArgs e)
        {
            camLeft.loadSampleImages(textBoxSavePath.Text, "left");
            camRight.loadSampleImages(textBoxSavePath.Text, "right");

            this.listViewLeft.Refresh();
            this.listViewRight.Refresh();
        }

        private void buttonCapture_Click(object sender, EventArgs e)
        {

        }

        private void buttonCaptureCurframe_Click(object sender, EventArgs e)
        {
            camLeft.captureFrame();
            camRight.captureFrame();
            camLeft.lastFrame.Save(textBoxSavePath.Text + "frameLeft" + ".bmp");
            camLeft.lastFrameGray.Save(textBoxSavePath.Text + "frameLeftGray" + ".bmp");
            camRight.lastFrame.Save(textBoxSavePath.Text + "frameRight" + ".bmp");
            camRight.lastFrameGray.Save(textBoxSavePath.Text + "frameRightGray" + ".bmp");
        }

        public void StereoRectify(
            IntrinsicCameraParameters intrinsicParam1,
            IntrinsicCameraParameters intrinsicParam2,
            Size imageSize,           
            ExtrinsicCameraParameters extrinsicParams,
            out Matrix<double> R1,
            out Matrix<double> R2,
            out Matrix<double> P1,
            out Matrix<double> P2,
            out Matrix<double> Q,
            STEREO_RECTIFY_TYPE flags,
            double alpha,
            Size newImageSize,
            ref Rectangle validPixROI1,
            ref Rectangle validPixROI2
            )
        {            
            R1 = new Matrix<double>(3, 3);
            R2 = new Matrix<double>(3, 3);
            P1 = new Matrix<double>(3, 4);
            P2 = new Matrix<double>(3, 4);
            Q = new Matrix<double>(4, 4);

            CvInvoke.cvStereoRectify(
                _inPramsLeft.IntrinsicMatrix.Ptr,
                _inPramsRight.IntrinsicMatrix.Ptr,
                _inPramsLeft.DistortionCoeffs.Ptr,
                _inPramsRight.DistortionCoeffs.Ptr, 
                imageSize,
                extrinsicParams.RotationVector.Ptr,
                extrinsicParams.TranslationVector.Ptr, 
                R1.Ptr, 
                R2.Ptr, 
                P1.Ptr, 
                P2.Ptr, 
                Q.Ptr, 
                STEREO_RECTIFY_TYPE.DEFAULT, 
                alpha, 
                newImageSize, 
                ref validPixROI1, 
                ref validPixROI1);
        }

        public void InitUndistortRectifyMap(
            IntrinsicCameraParameters intrinsicParam,
            Matrix<double> R,
            Matrix<double> newCameraMatrix,
            out Matrix<float> mapx,
            out Matrix<float> mapy
            )
        {
            mapx = new Matrix<float>(new Size(_imageWidth, _imageHeight));
            mapy = new Matrix<float>(new Size(_imageWidth, _imageHeight));
            CvInvoke.cvInitUndistortRectifyMap(intrinsicParam.IntrinsicMatrix.Ptr, intrinsicParam.DistortionCoeffs.Ptr, R.Ptr, newCameraMatrix.Ptr, mapx.Ptr, mapy.Ptr);
        }


        private void buttonTestCalc_Click(object sender, EventArgs e)
        {
            // Stereo Rectify images           
            Matrix<double> R1;
            Matrix<double> R2;
            Matrix<double> P1;
            Matrix<double> P2;
            Matrix<double> Q;
            Rectangle validPixROI1, validPixROI2;
            validPixROI1 = new Rectangle();
            validPixROI2 = new Rectangle();

            StereoRectify(_inPramsLeft, _inPramsRight, new Size(_imageWidth, _imageHeight), _outExtParamsStereo, out R1, out R2, out P1, out P2, out Q, 0, 0, new Size(_imageWidth, _imageHeight), ref validPixROI1, ref validPixROI2);

            //InitUndistortRectifyMap(_inPramsLeft, R1, P1, out _mapxLeft, out _mapyLeft);
            //InitUndistortRectifyMap(_inPramsRight, R2, P2, out _mapxRight, out _mapyRight);            

            _inPramsLeft.InitUndistortMap(_imageWidth, _imageHeight, out _mapxLeft, out _mapyLeft);
            _inPramsRight.InitUndistortMap(_imageWidth, _imageHeight, out _mapxRight, out _mapyRight);

            Image<Gray, byte> imgLeft = camLeft.lastFrameGray.Clone();
            Image<Gray, byte> imgRight = camRight.lastFrameGray.Clone();

            // **** THIS IS WHERE IM UP TO, no errors, it just hangs ****
            CvInvoke.cvRemap(camLeft.lastFrameGray.Ptr, imgLeft.Ptr, _mapxLeft.Ptr, _mapyLeft.Ptr, (int)INTER.CV_INTER_LINEAR | (int)WARP.CV_WARP_FILL_OUTLIERS, new MCvScalar(0));

            // StereoBM stereoSolver = new StereoBM(Emgu.CV.CvEnum.STEREO_BM_TYPE.BASIC, 0);
            //stereoSolver.FindStereoCorrespondence(                      
        }


        public void PrintIntrinsic(IntrinsicCameraParameters CamIntrinsic)
        {
            // Prints the Intrinsic camera parameters to the command line

            Console.WriteLine("Intrinsic Matrix:");

            string outStr = "";
            int i = 0;
            int j = 0;

            for (i = 0; i < CamIntrinsic.IntrinsicMatrix.Height; i++)
            {
                for (j = 0; j < CamIntrinsic.IntrinsicMatrix.Width; j++)
                {
                    outStr = outStr + CamIntrinsic.IntrinsicMatrix.Data[i, j].ToString();
                    outStr = outStr + "  ";
                }

                Console.WriteLine(outStr);
                outStr = "";

            }

            Console.WriteLine("Distortion Coefficients: ");
            outStr = "";

            for (j = 0; j < CamIntrinsic.DistortionCoeffs.Height; j++)
            {
                outStr = outStr + CamIntrinsic.DistortionCoeffs.Data[j, 0].ToString();
                outStr = outStr + "  ";
            }

            Console.WriteLine(outStr);
        }


        public void PrintExtrinsic(ExtrinsicCameraParameters CamExtrinsic)
        {
            // Prints the Extrinsic camera parameters to the command line
            Console.WriteLine("Extrinsic Matrix:");
            string outStr = "";
            int i = 0;
            int j = 0;
            for (i = 0; i < CamExtrinsic.ExtrinsicMatrix.Height; i++)
            {
                for (j = 0; j < CamExtrinsic.ExtrinsicMatrix.Width; j++)
                {
                    outStr = outStr + CamExtrinsic.ExtrinsicMatrix.Data[i, j].ToString();
                    outStr = outStr + "  ";
                }
                Console.WriteLine(outStr);
                outStr = "";

            }

            Console.WriteLine("Rotation Vector: ");
            outStr = "";

            for (i = 0; i < CamExtrinsic.RotationVector.Height; i++)
            {
                for (j = 0; j < CamExtrinsic.RotationVector.Width; j++)
                {
                    outStr = outStr + CamExtrinsic.RotationVector.Data[i, j].ToString();
                    outStr = outStr + "  ";
                }

                Console.WriteLine(outStr);
                outStr = "";
            }


            Console.WriteLine("Translation Vector: ");
            outStr = "";

            for (i = 0; i < CamExtrinsic.TranslationVector.Height; i++)
            {
                for (j = 0; j < CamExtrinsic.TranslationVector.Width; j++)
                {
                    outStr = outStr + CamExtrinsic.TranslationVector.Data[i, j].ToString();
                    outStr = outStr + "  ";
                }

                Console.WriteLine(outStr);
                outStr = "";
            }
        }


    }
}

TNKS!

4

1 に答える 1

1

マップは行列ではなく画像でなければなりません。

具体的には「グレー、フロート」タイプ。

于 2011-08-25T10:35:30.780 に答える