2

Android Developer Guideで定義されている CameraPreview を ApiDemos/OS/Sensors アプリケーション画面に配置しようとしています。半分の時間で期待どおりに動作します。ただし、このテスト アプリケーションを再開するたびに (デバイスの「ホーム」アイコンを押して一時停止した場合)、次の例外がログに記録されます。

06-13 14:10:17.369: D/SENSORS_TEST(11888): 対応プレビュー幅×高さ: 640 x 480 06-13 14:10:17.369: D/SENSORS_TEST(11888): 対応プレビュー幅×高さ: 320 x 240 06-13 14:10:17.369: D/SENSORS_TEST(11888): サポートされるプレビューの幅 x 高さ: 176 x 144 06-13 14:10:17.600: D/dalvikvm(11888): GC_FOR_ALLOC 解放 56K、3% 解放 9091K/ 9347K、一時停止 22 ミリ秒 06-13 14:10:17.600: I/dalvikvm-heap(11888): 695056 バイトの割り当てのためにヒープ (フラグ ケース) を 9.610MB に拡張 06-13 14:10:17.631: D/dalvikvm(11888 ): GC_CONCURRENT は 1K を解放し、3% は 9768K/10055K を解放し、2ms+2ms を一時停止しました 06-13 14:10:31.510: D/AndroidRuntime(11888): VM をシャットダウンしています 06-13 14:10:31.510: W/dalvikvm(11888 ): threadid=1: キャッチされない例外で終了するスレッド (group=0x40a351f8) 06-13 14:10:31.518: E/AndroidRuntime(11888): FATAL EXCEPTION: main 06-13 14:10:31.518:E/AndroidRuntime(11888): java.lang.RuntimeException: release() 06-13 14:10:31.518 の後に呼び出されるメソッド: E/AndroidRuntime(11888): android.hardware.Camera.setPreviewDisplay(Native Method) 06-13 で14:10:31.518: E/AndroidRuntime(11888): android.hardware.Camera.setPreviewDisplay(Camera.java:405) 06-13 14:10:31.518: E/AndroidRuntime(11888): atcom.example.sensor.Sensors10Activity$CameraPreview.surfaceCreated(Sensors10Activity.java:221)
06-13 14:10:31.518: E/AndroidRuntime(11888): android.view.SurfaceView.updateWindow(SurfaceView.java:533) 06-13 14:10:31.518: E/AndroidRuntime(11888): Android で。 view.SurfaceView.onWindowVisibilityChanged(SurfaceView.java:226) 06-13 14:10:31.518: E/AndroidRuntime(11888): android.view.View.dispatchWindowVisibilityChanged(View.java:5839) 06-13 14:10: 31.518: E/AndroidRuntime(11888): android.view.ViewGroup.dispatchWindowVisibilityChanged(ViewGroup.java:945) 06-13 14:10:31.518: E/AndroidRuntime(11888): android.view.ViewGroup.dispatchWindowVisibilityChanged(ViewGroup) .java:945) 06-13 14:10:31.518: E/AndroidRuntime(11888): android.view.ViewGroup.dispatchWindowVisibilityChanged(ViewGroup.java:945) 06-13 14:10:31.518: E/AndroidRuntime(11888) ): android.view.ViewGroup で。dispatchWindowVisibilityChanged(ViewGroup.java:945) 06-13 14:10:31.518: E/AndroidRuntime(11888): android.view.ViewRootImpl.performTraversals(ViewRootImpl.java:965) 06-13 14:10:31.518: E/ AndroidRuntime(11888): android.view.ViewRootImpl.handleMessage(ViewRootImpl.java:2442) 06-13 14:10:31.518: E/AndroidRuntime(11888): android.os.Handler.dispatchMessage(Handler.java:99) ) 06-13 14:10:31.518: E/AndroidRuntime(11888): android.os.Looper.loop(Looper.java:137) 06-13 14:10:31.518: E/AndroidRuntime(11888): android .app.ActivityThread.main(ActivityThread.java:4424) 06-13 14:10:31.518: E/AndroidRuntime(11888): java.lang.reflect.Method.invokeNative(ネイティブ メソッド) 06-13 14:10: 31.518: E/AndroidRuntime(11888): java.lang.reflect.Method.invoke(Method.java:511) 06-13 14:10:31.518: E/AndroidRuntime(11888):com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:784) 06-13 14:10:31.518: E/AndroidRuntime(11888): com.android.internal.os.ZygoteInit.main( ZygoteInit.java:551) 06-13 14:10:31.518: E/AndroidRuntime (11888): dalvik.system.NativeStart.main (ネイティブ メソッド) で

コードは次のとおりです。そのほとんどは、開発ガイドのカメラの例と ApiDemos/OS/Sensors サンプルから直接取得したものです。221行目で発生する例外を回避するために、アクティビティライフサイクルで何をすべきかを知っている人はいますか? (コードでコメントされているように)

チェックしていただきありがとうございます、
グレッグ

public class Sensors10Activity extends Activity {
private final String TAG = "SENSORS_TEST";
private SensorManager mSensorManager;
private GraphView mGraphView;
private Camera mCamera;
private CameraPreview mCameraPreview;

public class GraphView extends View implements SensorEventListener {
    private Bitmap  mBitmap;
    private Paint   mPaint = new Paint();
    private Canvas  mCanvas = new Canvas();
    private Path    mPath = new Path();
    private RectF   mRect = new RectF();
    private float   mLastValues[] = new float[3*2];
    private float   mOrientationValues[] = new float[3];
    private int     mColors[] = new int[3*2];
    private float   mLastX;
    private float   mScale[] = new float[2];
    private float   mYOffset;
    private float   mMaxX;
    private float   mSpeed = 1.0f;
    private float   mWidth;
    private float   mHeight;
    public GraphView(Context context) {
        super(context);
        mColors[0] = Color.argb(192, 255, 64, 64);
        mColors[1] = Color.argb(192, 64, 128, 64);
        mColors[2] = Color.argb(192, 64, 64, 255);
        mColors[3] = Color.argb(192, 64, 255, 255);
        mColors[4] = Color.argb(192, 128, 64, 128);
        mColors[5] = Color.argb(192, 255, 255, 64);

        mPaint.setFlags(Paint.ANTI_ALIAS_FLAG);
        mRect.set(-0.5f, -0.5f, 0.5f, 0.5f);
        mPath.arcTo(mRect, 0, 180);
    }

    @Override
    protected void onSizeChanged(int w, int h, int oldw, int oldh) {
        mBitmap = Bitmap.createBitmap(w, h, Bitmap.Config.RGB_565);
        mCanvas.setBitmap(mBitmap);
        mCanvas.drawColor(0xFFFFFFFF);
        mYOffset = h * 0.5f;
        mScale[0] = - (h * 0.5f * (1.0f / (SensorManager.STANDARD_GRAVITY * 2)));
        mScale[1] = - (h * 0.5f * (1.0f / (SensorManager.MAGNETIC_FIELD_EARTH_MAX)));
        mWidth = w;
        mHeight = h;
        if (mWidth < mHeight) {
            mMaxX = w;
        } else {
            mMaxX = w-50;
        }
        mLastX = mMaxX;
        super.onSizeChanged(w, h, oldw, oldh);
    }

    @Override
    protected void onDraw(Canvas canvas) {
        synchronized (this) {
            if (mBitmap != null) {
                final Paint paint = mPaint;
                final Path path = mPath;
                final int outer = 0xFFC0C0C0;
                final int inner = 0xFFff7010;

                if (mLastX >= mMaxX) {
                    mLastX = 0;
                    final Canvas cavas = mCanvas;
                    final float yoffset = mYOffset;
                    final float maxx = mMaxX;
                    final float oneG = SensorManager.STANDARD_GRAVITY * mScale[0];
                    paint.setColor(0xFFAAAAAA);
                    cavas.drawColor(0xFFFFFFFF);
                    cavas.drawLine(0, yoffset,      maxx, yoffset,      paint);
                    cavas.drawLine(0, yoffset+oneG, maxx, yoffset+oneG, paint);
                    cavas.drawLine(0, yoffset-oneG, maxx, yoffset-oneG, paint);
                }
                canvas.drawBitmap(mBitmap, 0, 0, null);

                float[] values = mOrientationValues;
                if (mWidth < mHeight) {
                    float w0 = mWidth * 0.333333f;
                    float w  = w0 - 32;
                    float x = w0*0.5f;
                    for (int i=0 ; i<3 ; i++) {
                        canvas.save(Canvas.MATRIX_SAVE_FLAG);
                        canvas.translate(x, w*0.5f + 4.0f);
                        canvas.save(Canvas.MATRIX_SAVE_FLAG);
                        paint.setColor(outer);
                        canvas.scale(w, w);
                        canvas.drawOval(mRect, paint);
                        canvas.restore();
                        canvas.scale(w-5, w-5);
                        paint.setColor(inner);
                        canvas.rotate(-values[i]);
                        canvas.drawPath(path, paint);
                        canvas.restore();
                        x += w0;
                    }
                } else {
                    float h0 = mHeight * 0.333333f;
                    float h  = h0 - 32;
                    float y = h0*0.5f;
                    for (int i=0 ; i<3 ; i++) {
                        canvas.save(Canvas.MATRIX_SAVE_FLAG);
                        canvas.translate(mWidth - (h*0.5f + 4.0f), y);
                        canvas.save(Canvas.MATRIX_SAVE_FLAG);
                        paint.setColor(outer);
                        canvas.scale(h, h);
                        canvas.drawOval(mRect, paint);
                        canvas.restore();
                        canvas.scale(h-5, h-5);
                        paint.setColor(inner);
                        canvas.rotate(-values[i]);
                        canvas.drawPath(path, paint);
                        canvas.restore();
                        y += h0;
                    }
                }
            }
        }
    }

    public void onSensorChanged(SensorEvent event) {
        //Log.d(TAG, "sensor: " + sensor + ", x: " + values[0] + ", y: " + values[1] + ", z: " + values[2]);
        synchronized (this) {
            if (mBitmap != null) {
                final Canvas canvas = mCanvas;
                final Paint paint = mPaint;
                if (event.sensor.getType() == Sensor.TYPE_ORIENTATION) {
                    for (int i=0 ; i<3 ; i++) {
                        mOrientationValues[i] = event.values[i];
                    }
                } else {
                    float deltaX = mSpeed;
                    float newX = mLastX + deltaX;

                    int j = (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD) ? 1 : 0;
                    for (int i=0 ; i<3 ; i++) {
                        int k = i+j*3;
                        final float v = mYOffset + event.values[i] * mScale[j];
                        paint.setColor(mColors[k]);
                        canvas.drawLine(mLastX, mLastValues[k], newX, v, paint);
                        mLastValues[k] = v;
                    }
                    if (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD)
                        mLastX += mSpeed;
                }
                invalidate();
            }
        }
    }

    public void onAccuracyChanged(Sensor sensor, int accuracy) {
    }
}

public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback {
    private SurfaceHolder mHolder;
    private Camera mCamera;

    public CameraPreview(Context context, Camera camera) {
        super(context);
        mCamera = camera;

        // Install a SurfaceHolder.Callback so we get notified when the
        // underlying surface is created and destroyed.
        mHolder = getHolder();
        mHolder.addCallback(this);
        // deprecated setting, but required on Android versions prior to 3.0
        mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
    }

    public void surfaceCreated(SurfaceHolder holder) {
        // The Surface has been created, now tell the camera where to draw the preview.
        try {
            mCamera.setPreviewDisplay(holder);  // !!! LINE 221: exception occurs here.
            mCamera.startPreview();
        } catch (IOException e) {
            Log.d(TAG, "Error setting camera preview: " + e.getMessage());
        }
    }

    public void surfaceDestroyed(SurfaceHolder holder) {
        // empty. Take care of releasing the Camera preview in your activity.
    }

    public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
        // If your preview can change or rotate, take care of those events here.
        // Make sure to stop the preview before resizing or reformatting it.

        if (mHolder.getSurface() == null){
          // preview surface does not exist
          return;
        }

        // stop preview before making changes
        try {
            mCamera.stopPreview();
        } catch (Exception e){
          // ignore: tried to stop a non-existent preview
        }

        // set preview size and make any resize, rotate or
        // reformatting changes here

        // start preview with new settings
        try {
            mCamera.setPreviewDisplay(mHolder);
            mCamera.startPreview();

        } catch (Exception e){
            Log.d(TAG, "Error starting camera preview: " + e.getMessage());
        }
    }
}

//////////////
// LIFE CYCLE
//

/**
 * Initialization of the Activity after it is first created.  Must at least
 * call {@link android.app.Activity#setContentView setContentView()} to
 * describe what is to be displayed in the screen.
 */
@Override
protected void onCreate(Bundle savedInstanceState) {
    // Be sure to call the super class.
    super.onCreate(savedInstanceState);

    mSensorManager = (SensorManager) getSystemService(SENSOR_SERVICE);
    mCamera = Camera.open(1);
    mCameraPreview = new CameraPreview(this, mCamera);
    mGraphView = new GraphView(this);
    // setContentView(mGraphView);

    // Create RelativeLayout for layout root.
    RelativeLayout relativeLayout = new RelativeLayout(this);
    RelativeLayout.LayoutParams rlp = new RelativeLayout.LayoutParams(
            RelativeLayout.LayoutParams.FILL_PARENT,
            RelativeLayout.LayoutParams.FILL_PARENT);

    // Add GraphView to layout.
    RelativeLayout.LayoutParams lpGraph = new RelativeLayout.LayoutParams(
            RelativeLayout.LayoutParams.FILL_PARENT,
            RelativeLayout.LayoutParams.FILL_PARENT);
    mGraphView.setLayoutParams(lpGraph);
    relativeLayout.addView(mGraphView);

    // Add SurfaceView to layout.
    List<Camera.Size> ls = mCamera.getParameters().getSupportedPreviewSizes();
    int n = ls.size();
    int widthMin = 10000;
    int imin = -1;
    for (int i=0; i<n; i++) {
        Log.d(TAG, "supported preview width x height: " + ls.get(i).width + " x " + ls.get(i).height);
        if (widthMin > ls.get(i).width) {
            widthMin = ls.get(i).width;
            imin = i;
        }
    }
    if (imin >= 0) {
        RelativeLayout.LayoutParams lpSurface = new RelativeLayout.LayoutParams(
                ls.get(imin).width, ls.get(imin).height);
        lpSurface.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM);
        lpSurface.addRule(RelativeLayout.CENTER_HORIZONTAL);
        mCameraPreview.setLayoutParams(lpSurface);
        relativeLayout.addView(mCameraPreview);
    }

    // Provide Android framework with layout root.
    setContentView(relativeLayout, rlp);
}

@Override
protected void onResume() {
    super.onResume();
    mSensorManager.registerListener(mGraphView,
            mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER),
            SensorManager.SENSOR_DELAY_FASTEST);
    mSensorManager.registerListener(mGraphView,
            mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD),
            SensorManager.SENSOR_DELAY_FASTEST);
    mSensorManager.registerListener(mGraphView, 
            mSensorManager.getDefaultSensor(Sensor.TYPE_ORIENTATION),
            SensorManager.SENSOR_DELAY_FASTEST);

    if (mCamera == null)
        mCamera = Camera.open(1);
    if (mCameraPreview == null) 
        mCameraPreview = new CameraPreview(this, mCamera);
}


@Override
protected void onPause() {
    super.onPause();
    if (mCamera != null) {
        mCamera.stopPreview();
        mCamera.release();        // release the camera for other applications
        mCamera = null;
    }
    if (mCameraPreview != null) {
        mCameraPreview = null;
    }
}

@Override
protected void onStop() {
    mSensorManager.unregisterListener(mGraphView);
    super.onStop();
}

}

4

1 に答える 1

1

数時間の試行錯誤の後、これらのライフサイクル ハンドラーは機能しているようです (つまり、電源ボタンによる一時停止、ホーム アイコンによる停止、戻るアイコンによる破棄、およびカメラの解放を処理します)。 onPause() をシステムのカメラ アプリケーションで使用できるようにするため)。

@Override
protected void onCreate(Bundle savedInstanceState) {
    // Be sure to call the super class.
    super.onCreate(savedInstanceState);

    mSensorManager = (SensorManager) getSystemService(SENSOR_SERVICE);
    mCamera = Camera.open(1);
    mCameraPreview = new CameraPreview(this, mCamera);
    mGraphView = new GraphView(this);
    // setContentView(mGraphView);

    // Create RelativeLayout for layout root.
    mLayoutRoot = new RelativeLayout(this);
    RelativeLayout.LayoutParams rlp = new RelativeLayout.LayoutParams(
            RelativeLayout.LayoutParams.FILL_PARENT,
            RelativeLayout.LayoutParams.FILL_PARENT);

    // Add GraphView to layout.
    RelativeLayout.LayoutParams lpGraph = new RelativeLayout.LayoutParams(
            RelativeLayout.LayoutParams.FILL_PARENT,
            RelativeLayout.LayoutParams.FILL_PARENT);
    mGraphView.setLayoutParams(lpGraph);
    mLayoutRoot.addView(mGraphView);

    // Add SurfaceView to layout.
    List<Camera.Size> ls = mCamera.getParameters().getSupportedPreviewSizes();
    int n = ls.size();
    int widthMin = 10000;
    int imin = -1;
    for (int i=0; i<n; i++) {
        Log.d(TAG, "supported preview width x height: " + ls.get(i).width + " x " + ls.get(i).height);
        if (widthMin > ls.get(i).width) {
            widthMin = ls.get(i).width;
            mCameraPreviewSize = ls.get(i);
            imin = i;
        }
    }
    if (imin >= 0) {
        RelativeLayout.LayoutParams lpSurface = new RelativeLayout.LayoutParams(
                ls.get(imin).width, ls.get(imin).height);
        lpSurface.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM);
        lpSurface.addRule(RelativeLayout.CENTER_HORIZONTAL);
        mCameraPreview.setLayoutParams(lpSurface);
        mLayoutRoot.addView(mCameraPreview);
    }

    // Provide Android framework with layout root.
    setContentView(mLayoutRoot, rlp);
    Log.d(TAG, "onCreate OUT mCamera, mCameraPreview: " + mCamera + ", " + mCameraPreview);
}

@Override
protected void onStart() {
    super.onStart();
    Log.d(TAG, "onStart OUT mCamera, mCameraPreview: " + mCamera + ", " + mCameraPreview);
}

@Override
protected void onResume() {
    super.onResume();
    mSensorManager.registerListener(mGraphView,
            mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER),
            SensorManager.SENSOR_DELAY_FASTEST);
    mSensorManager.registerListener(mGraphView,
            mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD),
            SensorManager.SENSOR_DELAY_FASTEST);
    mSensorManager.registerListener(mGraphView, 
            mSensorManager.getDefaultSensor(Sensor.TYPE_ORIENTATION),
            SensorManager.SENSOR_DELAY_FASTEST);

    if (mCamera == null)
        mCamera = Camera.open(1);
    if (mCameraPreview == null) {
        mCameraPreview = new CameraPreview(this, mCamera);
        RelativeLayout.LayoutParams lpCameraPreview = new RelativeLayout.LayoutParams(
                mCameraPreviewSize.width, mCameraPreviewSize.height);
        lpCameraPreview.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM);
        lpCameraPreview.addRule(RelativeLayout.CENTER_HORIZONTAL);
        mCameraPreview.setLayoutParams(lpCameraPreview);
        mLayoutRoot.addView(mCameraPreview);
    }
    Log.d(TAG, "onResume OUT mCamera, mCameraPreview: " + mCamera + ", " + mCameraPreview);
}


@Override
protected void onPause() {
    if (mCamera != null) {
        mCamera.stopPreview();
        mCamera.release();        // release the camera for other applications
        mCamera = null;
    }
    if (mCameraPreview != null) {
        mLayoutRoot.removeView(mCameraPreview);
        mCameraPreview = null;
    }
    super.onPause();
    Log.d(TAG, "onPause OUT mCamera, mCameraPreview: " + mCamera + ", " + mCameraPreview);
}

@Override
protected void onStop() {
    mSensorManager.unregisterListener(mGraphView);
    super.onStop();
    Log.d(TAG, "onStop OUT mCamera, mCameraPreview: " + mCamera + ", " + mCameraPreview);
}

@Override
protected void onDestroy() {
    super.onDestroy();
    Log.d(TAG, "onDestroy OUT mCamera, mCameraPreview: " + mCamera + ", " + mCameraPreview);
}
于 2012-06-14T00:33:12.703 に答える