4

そこで、ndk で opencv を使用して Android で正方形のオブジェクトを検出し、それを 2D 図形にトリミングしようとしています。正方形の点を検出しますが、getPerspectiveTRansform(src, dst) を使用しようとすると、このエラー メッセージが表示されます。 :

OpenCV Error: Assertion failed (src.checkVector(2, CV_32F) == 4 && dst.checkVector(2, CV_32F)  == 4) in cv::Mat cv::getPerspectiveTransform(cv::InputArray, cv::InputArray), file /home/reports/ci/slave/50-SDK/opencv/modules/imgproc/src/imgwarp.cpp, line 3607

これはアンドロイドの私の活動です:

package org.opencv.samples.tutorial1;

import java.io.ByteArrayOutputStream;

import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.android.Utils;
import org.opencv.core.CvException;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;

import android.app.Activity;
import android.content.Intent;
import android.graphics.Bitmap;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.SurfaceView;
import android.view.View;
import android.view.WindowManager;
import android.view.View.OnClickListener;
import android.widget.Toast;

public class Tutorial1Activity extends Activity implements
        CvCameraViewListener2 {
    private static final String TAG = "OCVSample::Activity";

    private Mat mRgba;
    private Mat mGrayMat;
    private Mat imageTaken;

    private CameraBridgeViewBase mOpenCvCameraView;
    private boolean mIsJavaCamera = true;
    private MenuItem mItemSwitchCamera = null;

    private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
        @Override
        public void onManagerConnected(int status) {
            switch (status) {
            case LoaderCallbackInterface.SUCCESS: {
                Log.i(TAG, "OpenCV loaded successfully");

                System.loadLibrary("native_sample");

                mOpenCvCameraView.enableView();
            }
                break;
            default: {
                super.onManagerConnected(status);
            }
                break;
            }
        }
    };

    public Tutorial1Activity() {
        Log.i(TAG, "Instantiated new " + this.getClass());
    }

    /** Called when the activity is first created. */
    @Override
    public void onCreate(Bundle savedInstanceState) {
        Log.i(TAG, "called onCreate");
        super.onCreate(savedInstanceState);
        getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

        setContentView(R.layout.tutorial1_surface_view);

        mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial1_activity_native_surface_view);

        mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);

        mOpenCvCameraView.setCvCameraViewListener(this);
    }

    @Override
    public void onPause() {
        super.onPause();
        if (mOpenCvCameraView != null)
            mOpenCvCameraView.disableView();
    }

    @Override
    public void onResume() {
        super.onResume();
        OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this,
                mLoaderCallback);
    }

    public void onDestroy() {
        super.onDestroy();
        if (mOpenCvCameraView != null)
            mOpenCvCameraView.disableView();
    }

    @Override
    public boolean onCreateOptionsMenu(Menu menu) {
        Log.i(TAG, "called onCreateOptionsMenu");
        // mItemSwitchCamera = menu.add("Toggle Native/Java camera");
        return true;
    }

    @Override
    public boolean onOptionsItemSelected(MenuItem item) {
        String toastMesage = new String();
        Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);

        if (item == mItemSwitchCamera) {
            mOpenCvCameraView.setVisibility(SurfaceView.GONE);
            mIsJavaCamera = !mIsJavaCamera;

            if (mIsJavaCamera) {
                mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial1_activity_java_surface_view);
                toastMesage = "Java Camera";
            } else {
                mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial1_activity_native_surface_view);
                toastMesage = "Native Camera";
            }

            mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
            mOpenCvCameraView.setCvCameraViewListener(this);
            mOpenCvCameraView.enableView();

            mOpenCvCameraView.setOnClickListener(new OnClickListener() {

                @Override
                public void onClick(View v) {
                    takePicture();
                }
            });

            Toast toast = Toast.makeText(this, toastMesage, Toast.LENGTH_LONG);
            toast.show();
        }

        return true;
    }

    public void takePicture() {

        if (imageTaken != null) {

            Bitmap resultBitmap = null;

            try {
                // Imgproc.cvtColor(imageTaken, imageTaken,
                // Imgproc.COLOR_BGR2GRAY);
                // Imgproc.cvtColor(imageTaken, imageTaken,
                // Imgproc.COLOR_GRAY2RGBA, 4);

                /*
                 * Mat test =
                 * Imgproc.getPerspectiveTransform(ImageSrc,ImageDst);
                 * Imgproc.warpPerspective(ImageSrc, ImageDst, test,
                 * ImageDst.size());
                 */

                resultBitmap = Bitmap.createBitmap(imageTaken.cols(),
                        imageTaken.rows(), Bitmap.Config.ARGB_8888);
                //
                Utils.matToBitmap(imageTaken, resultBitmap);

                byte[] sendData = codec(resultBitmap,
                        Bitmap.CompressFormat.JPEG, 50);

                Intent i = new Intent(getApplicationContext(),
                        ShowImageActivity.class);
                i.putExtra("data", sendData);
                startActivity(i);

            } catch (CvException e) {
                // TODO: handle exception
                e.printStackTrace();
            }

        }

    }

    private byte[] codec(Bitmap src, Bitmap.CompressFormat format, int quality) {
        ByteArrayOutputStream os = new ByteArrayOutputStream();
        src.compress(format, quality, os);

        byte[] array = os.toByteArray();
        System.out.println(array.length);

//      return BitmapFactory.decodeByteArray(array, 0, array.length);
        return array;

    }

    public void onCameraViewStarted(int width, int height) {
        mRgba = new Mat();
        mGrayMat = new Mat();
        imageTaken = new Mat();
    }

    public void onCameraViewStopped() {
        mRgba.release();
        mGrayMat.release();
        imageTaken.release();
    }

    public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

        /*long start = System.currentTimeMillis();

        Size originalSize = inputFrame.rgba().size();

        Imgproc.resize(inputFrame.rgba(), mRgba, new Size(800, 480));

        */
//      FindSquares(inputFrame.rgba().getNativeObjAddr(), 1);
        // imageTaken = inputFrame.clone();

//      System.out.println(inputFrame.rgba().type());

        findSquare(inputFrame.rgba().getNativeObjAddr(), imageTaken.getNativeObjAddr(),  1);

        // if (mDraw == 1) {
        /*Imgproc.resize(mRgba, inputFrame.rgba(), originalSize);
        // }

        long end = System.currentTimeMillis();
        Log.d("Frame time", "" + (end - start) + " ms");
*/
        return inputFrame.rgba();

    }

    public native void FindFeatures(long matAddrGr, long matAddrRgba);

    public native int FindSquares(long matAddrRgba, int draw);

    public native void findSquare(long matAddrRgba, long matAddrDescriptor, int draw);
}

そして、これは私のjniコードです:

JNIEXPORT jint JNICALL Java_com_gconsent_opencv_MainActivity_findSquare(JNIEnv*,
        jobject, jlong addrRgba, jlong addrDescriptor, jlong addrSrc, jlong addrDst, jint draw){

    Mat& image = *(Mat*) addrRgba;

    Mat& imageCropped = *(Mat*) addrDescriptor;

    Mat& imageSrc = *(Mat*) addrSrc;
    Mat& imageDst = *(Mat*) addrDst;


    Mat newSrc = image.clone();
    imageCropped = image.clone();
    Mat testImage = image.clone();

    // blur will enhance edge detection
    Mat blurred(testImage);
    medianBlur(testImage, blurred, 9);

    Mat gray0(blurred.size(), CV_8U), gray;
    vector < vector<Point> > contours;

    // find squares in every color plane of the image
    for (int c = 0; c < 3; c++) {
        int ch[] = { c, 0 };
        mixChannels(&blurred, 1, &gray0, 1, ch, 1);

        // try several threshold levels
        const int threshold_level = 2;
        for (int l = 0; l < threshold_level; l++) {
            // Use Canny instead of zero threshold level!
            // Canny helps to catch squares with gradient shading
            if (l == 0) {
                Canny(gray0, gray, 10, 20, 3); //

                // Dilate helps to remove potential holes between edge segments
                dilate(gray, gray, Mat(), Point(-1, -1));
            } else {
                gray = gray0 >= (l + 1) * 255 / threshold_level;
            }

            // Find contours and store them in a list
            findContours(gray, contours, CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE);

            // Test contours
            vector < Point > approx;
            for (size_t i = 0; i < contours.size(); i++) {
                // approximate contour with accuracy proportional
                // to the contour perimeter
                approxPolyDP(Mat(contours[i]), approx,
                        arcLength(Mat(contours[i]), true) * 0.02, true);

                // Note: absolute value of an area is used because
                // area may be positive or negative - in accordance with the
                // contour orientation
                if (approx.size() == 4 && fabs(contourArea(Mat(approx))) > 1000
                        && isContourConvex(Mat(approx))) {
                    double maxCosine = 0;

                    for (int j = 2; j < 5; j++) {
                        double cosine = fabs(
                                angle(approx[j % 4], approx[j - 2],
                                        approx[j - 1]));
                        maxCosine = MAX(maxCosine, cosine);
                    }

                    if (maxCosine < 0.3) {

                        line(image, approx[0], approx[1],
                                Scalar(0, 255, 0, 255), 2, 4, 0);
                        line(image, approx[1], approx[2],
                                Scalar(0, 255, 0, 255), 2, 4, 0);
                        line(image, approx[2], approx[3],
                                Scalar(0, 255, 0, 255), 2, 4, 0);
                        line(image, approx[3], approx[0],
                                Scalar(0, 255, 0, 255), 2, 4, 0);



                        vector<Point2f> src(4);

//                      src.push_back(approx[0]);
//                      src.push_back(approx[1]);
//                      src.push_back(approx[2]);
//                      src.push_back(approx[3]);


                        src[0] = approx[0];
                        src[1] = approx[1];
                        src[2] = approx[2];
                        src[3] = approx[3];

                        cv::Mat quad = cv::Mat::zeros(300, 220, CV_8U);

                        // transformed quadrangle
                        vector < Point2f > quad_pts(4);

//                      Point2f quad_pts[4];

                        quad_pts.push_back(Point(0, 0));
                        quad_pts.push_back(Point(quad.cols, 0));
                        quad_pts.push_back(Point(quad.cols, quad.rows));
                        quad_pts.push_back(Point(0, quad.rows));

//                      quad_pts[0] = Point(0, 0);
//                      quad_pts[1] = Point(quad.cols, 0);
//                      quad_pts[2] = Point(quad.cols, quad.rows);
//                      quad_pts[3] = Point(0, quad.rows);

                        imageSrc = Mat(src);
                        imageDst = Mat(quad_pts);


                        Mat transmtx = getPerspectiveTransform(src, quad_pts);
                        warpPerspective(src, quad, transmtx, quad.size());

                        imageCropped = quad.clone();

                    }
                }
            }
        }

    }

//  imageCropped = getPolygon(newSrc);

    return 1;

}
4

1 に答える 1