アンドロイドで画像内の顔を検出するための顔検出アプリ(リアルタイムではない)を作成しています。android.mediaのFaceDetectorクラスを使用してみましたが、これは優れていますが、あまり正確ではありません。そこで、JavaCVおよびjava.cppパッケージ(OpenCV v 2.3を使用)をダウンロードしました。プロジェクトは実行されますが、例外があります。
Thread [<1> main] (Suspended (exception UnsatisfiedLinkError))
Loader.loadLibrary(Class, String[], String) line: 463
Loader.load(Class) line: 368
Loader.load() line: 315
opencv_core.<clinit>() line: 131
Class.classForName(String, boolean, ClassLoader) line: not available [native method]
Class.forName(String, boolean, ClassLoader) line: 234
Loader.load(Class) line: 334
opencv_imgproc.<clinit>() line: 96
Class.classForName(String, boolean, ClassLoader) line: not available [native method]
Class.forName(String, boolean, ClassLoader) line: 234
Loader.load(Class) line: 334
opencv_objdetect.<clinit>() line: 96
Class.classForName(String, boolean, ClassLoader) line: not available [native method]
Class.forName(String, boolean, ClassLoader) line: 234
Loader.load(Class) line: 334
MyView.<init>(Context) line: 50
FaceSwAPPerActivity.onCreate(Bundle) line: 35
Instrumentation.callActivityOnCreate(Activity, Bundle) line: 1047
ActivityThread.performLaunchActivity(ActivityThread$ActivityClientRecord, Intent) line: 1611
ActivityThread.handleLaunchActivity(ActivityThread$ActivityClientRecord, Intent) line: 1663
ActivityThread.access$1500(ActivityThread, ActivityThread$ActivityClientRecord, Intent) line: 117
ActivityThread$H.handleMessage(Message) line: 931
ActivityThread$H(Handler).dispatchMessage(Message) line: 99
Looper.loop() line: 123
ActivityThread.main(String[]) line: 3683
Method.invokeNative(Object, Object[], Class, Class[], Class, int, boolean) line: not available [native method]
Method.invoke(Object, Object...) line: 507
ZygoteInit$MethodAndArgsCaller.run() line: 839
ZygoteInit.main(String[]) line: 597
NativeStart.main(String[]) line: not available [native method]
以下は私のコードです:
import android.app.Activity;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.os.Bundle;
import android.view.View;
import com.googlecode.javacpp.Loader;
import com.googlecode.javacv.cpp.opencv_objdetect;
import com.googlecode.javacv.cpp.opencv_objdetect.*;
import com.googlecode.javacv.cpp.opencv_core.*;
import static com.googlecode.javacv.cpp.opencv_core.cvGetSeqElem;
import static com.googlecode.javacv.cpp.opencv_core.cvRectangle;
import static com.googlecode.javacv.cpp.opencv_core.cvPoint;
import static com.googlecode.javacv.cpp.opencv_objdetect.cvHaarDetectObjects;
import static com.googlecode.javacv.cpp.opencv_core.cvLoad;
import static com.googlecode.javacv.cpp.opencv_imgproc.cvCvtColor;
import static com.googlecode.javacv.cpp.opencv_imgproc.CV_BGR2GRAY;
import static com.googlecode.javacv.cpp.opencv_core.CV_AA;
import static com.googlecode.javacv.cpp.opencv_core.IPL_DEPTH_8U;
public class FaceSwAPPerActivity extends Activity {
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
//setContentView(R.layout.main);
setContentView(new MyView(this));
}
}
// The cascade definition to be used for detection.
//private static final String CASCADE_FILE = "haarcascade_frontalface_alt.xml";
class MyView extends View {
Bitmap myBitmap;
public MyView(Context context)
{
super(context);
Loader.load(opencv_objdetect.class);
String CASCADE_FILE = "../haarcascade_frontalface_alt.xml";
BitmapFactory.Options bitmapFatoryOptions=new BitmapFactory.Options();
bitmapFatoryOptions.inPreferredConfig=Bitmap.Config.ARGB_8888;
myBitmap=BitmapFactory.decodeResource(getResources(), R.drawable.kidsparty, bitmapFatoryOptions);
// Load the original image.
IplImage originalImage = IplImage.create(200, 400, IPL_DEPTH_8U, 1);
//IplImage originalImage = com.googlecode.javacv.cpp.opencv_highgui.cvLoadImage( imgPath , 1);
myBitmap.copyPixelsToBuffer(originalImage.getByteBuffer());
// We need a grayscale image in order to do the recognition, so we
// create a new image of the same size as the original one.
IplImage grayImage = IplImage.create(originalImage.width(), originalImage.height(), IPL_DEPTH_8U, 1);
// We convert the original image to grayscale.
cvCvtColor(originalImage, grayImage, CV_BGR2GRAY);
CvMemStorage storage = CvMemStorage.create();
// We instantiate a classifier cascade to be used for detection, using the cascade definition.
CvHaarClassifierCascade cascade = new CvHaarClassifierCascade(cvLoad(CASCADE_FILE));
// We detect the faces.
CvSeq faces = cvHaarDetectObjects(grayImage, cascade, storage, 1.1, 1, 0);
//We iterate over the discovered faces and draw yellow rectangles around them.
for (int i = 0; i < faces.total(); i++) {
CvRect r = new CvRect(cvGetSeqElem(faces, i));
cvRectangle(originalImage, cvPoint(r.x(), r.y()),
cvPoint(r.x() + r.width(), r.y() + r.height()), CvScalar.YELLOW, 1, CV_AA, 0);
}
myBitmap.copyPixelsToBuffer(originalImage.getByteBuffer());
// Save the image to a new file.
// com.googlecode.javacv.cpp.opencv_highgui.cvSaveImage(args[1], originalImage);
}
@Override
protected void onDraw(Canvas canvas)
{
canvas.drawBitmap(myBitmap, 0,0, null);
}
}
何が問題なのかわからないので助けてください