関数でビデオ フレームを編集しようとしていonPreviewFrame
ます。ただし、記録されたビデオ フレームは編集されません。
コードは次のとおりです。
public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {
private static final String TAG = "CameraPreview";
private final Camera camera;
private final CameraConfigurationManager configManager;
public CameraPreview(Context context, Camera camera) {
super(context);
this.camera = camera;
this.configManager = new CameraConfigurationManager(context);
super.getHolder().addCallback(this);
this.camera.setPreviewCallbackWithBuffer(CameraPreview.this);
// required for API <= 11
super.getHolder().setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.d(TAG, "surfaceCreated()");
// now that we have the surface, we can start the preview
try {
this.camera.setPreviewDisplay(holder);
this.camera.startPreview();
} catch (IOException e) {
Log.wtf(TAG, "Failed to start camera preview", e);
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// we will release the camera preview in our activity before this
// happens
Log.d(TAG, "surfaceDestroyed()");
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
// our activity runs with screenOrientation="landscape" so we don't
// care about surface changes
Log.d(TAG, "surfaceChanged()");
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
Log.d("HFI","In onPreviewFrame "+data.length);
for (int i = 1000; i < 5000; i++) {
data [i] = 0;
}
Point cameraResolution = configManager.getCameraResolution();
int maxwidth = cameraResolution.x;
int maxheight =cameraResolution.y;
// YuvImage img = new YuvImage(data, camera.getParameters().getPreviewFormat(), 640, 480, null);
YuvImage image = new YuvImage(data, camera.getParameters().getPreviewFormat(), maxwidth, maxheight, null);
Rect rectangle = new Rect();
rectangle.bottom = maxheight;
rectangle.top = 0;
rectangle.left = 0;
rectangle.right = maxwidth;
ByteArrayOutputStream output = new ByteArrayOutputStream();
image.compressToJpeg(rectangle, 95, output);
// Decode the JPEG byte array from 'output' to 'Bitmap' object
Bitmap bmp = BitmapFactory.decodeByteArray(output.toByteArray(), 0, output.size());
bmp = convertToMutable(bmp);
// Use 'Canvas' to draw text ont 'Bitmap'
Canvas cv = new Canvas(bmp);
// Prepare 'Paint' for text drawing
Paint mPaint = new Paint();
mPaint.setColor( Color.RED );
mPaint.setStyle( Style.STROKE );
mPaint.setTextSize(30);
// Draw text on the 'Bitmap' image
cv.drawText("Hello Camera", 10, 10, mPaint);
// Reset the stream of 'output' for output writing.
output.reset();
// Compress current 'Bitmap' to 'output' as JPEG format
bmp.compress(CompressFormat.JPEG, 95, output);
data = getNV21(maxwidth, maxheight, bmp);
//camera.addCallbackBuffer(getNV21(640, 480, bmp));
}
// untested function
byte [] getNV21(int inputWidth, int inputHeight, Bitmap scaled) {
int [] argb = new int[inputWidth * inputHeight];
scaled.getPixels(argb, 0, inputWidth, 0, 0, inputWidth, inputHeight);
byte [] yuv = new byte[inputWidth*inputHeight*3/2];
encodeYUV420SP(yuv, argb, inputWidth, inputHeight);
scaled.recycle();
return yuv;
}
void encodeYUV420SP(byte[] yuv420sp, int[] argb, int width, int height) {
final int frameSize = width * height;
int yIndex = 0;
int uvIndex = frameSize;
int a, R, G, B, Y, U, V;
int index = 0;
for (int j = 0; j < height; j++) {
for (int i = 0; i < width; i++) {
a = (argb[index] & 0xff000000) >> 24; // a is not used obviously
R = (argb[index] & 0xff0000) >> 16;
G = (argb[index] & 0xff00) >> 8;
B = (argb[index] & 0xff) >> 0;
// well known RGB to YUV algorithm
Y = ( ( 66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
U = ( ( -38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
V = ( ( 112 * R - 94 * G - 18 * B + 128) >> 8) + 128;
// NV21 has a plane of Y and interleaved planes of VU each sampled by a factor of 2
// meaning for every 4 Y pixels there are 1 V and 1 U. Note the sampling is every other
// pixel AND every other scanline.
yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));
if (j % 2 == 0 && index % 2 == 0) {
yuv420sp[uvIndex++] = (byte)((V<0) ? 0 : ((V > 255) ? 255 : V));
yuv420sp[uvIndex++] = (byte)((U<0) ? 0 : ((U > 255) ? 255 : U));
}
index ++;
}
}
}
/**
* Converts a immutable bitmap to a mutable bitmap. This operation doesn't allocates
* more memory that there is already allocated.
*
* @param imgIn - Source image. It will be released, and should not be used more
* @return a copy of imgIn, but muttable.
*/
public static Bitmap convertToMutable(Bitmap imgIn) {
try {
//this is the file going to use temporally to save the bytes.
// This file will not be a image, it will store the raw image data.
File file = new File(Environment.getExternalStorageDirectory() + File.separator + "temp.tmp");
//Open an RandomAccessFile
//Make sure you have added uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"
//into AndroidManifest.xml file
RandomAccessFile randomAccessFile = new RandomAccessFile(file, "rw");
// get the width and height of the source bitmap.
int width = imgIn.getWidth();
int height = imgIn.getHeight();
Config type = imgIn.getConfig();
//Copy the byte to the file
//Assume source bitmap loaded using options.inPreferredConfig = Config.ARGB_8888;
FileChannel channel = randomAccessFile.getChannel();
MappedByteBuffer map = channel.map(MapMode.READ_WRITE, 0, imgIn.getRowBytes()*height);
imgIn.copyPixelsToBuffer(map);
//recycle the source bitmap, this will be no longer used.
imgIn.recycle();
System.gc();// try to force the bytes from the imgIn to be released
//Create a new bitmap to load the bitmap again. Probably the memory will be available.
imgIn = Bitmap.createBitmap(width, height, type);
map.position(0);
//load it back from temporary
imgIn.copyPixelsFromBuffer(map);
//close the temporary file and channel , then delete that also
channel.close();
randomAccessFile.close();
// delete the temp file
file.delete();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return imgIn;
}
}
私は試してみthis.camera.setPreviewCallback(CameraPreview.this);
ましたが、次のエラーが発生しました。
10-04 12:43:06.118: E/AndroidRuntime(3015): FATAL EXCEPTION: main
10-04 12:43:06.118: E/AndroidRuntime(3015): java.lang.RuntimeException: Method called after release()
10-04 12:43:06.118: E/AndroidRuntime(3015): at android.hardware.Camera.setHasPreviewCallback(Native Method)
10-04 12:43:06.118: E/AndroidRuntime(3015): at android.hardware.Camera.access$600(Camera.java:133)
10-04 12:43:06.118: E/AndroidRuntime(3015): at android.hardware.Camera$EventHandler.handleMessage(Camera.java:792)
10-04 12:43:06.118: E/AndroidRuntime(3015): at android.os.Handler.dispatchMessage(Handler.java:99)
10-04 12:43:06.118: E/AndroidRuntime(3015): at android.os.Looper.loop(Looper.java:137)
10-04 12:43:06.118: E/AndroidRuntime(3015): at android.app.ActivityThread.main(ActivityThread.java:4898)
10-04 12:43:06.118: E/AndroidRuntime(3015): at java.lang.reflect.Method.invokeNative(Native Method)
10-04 12:43:06.118: E/AndroidRuntime(3015): at java.lang.reflect.Method.invoke(Method.java:511)
10-04 12:43:06.118: E/AndroidRuntime(3015): at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1006)
10-04 12:43:06.118: E/AndroidRuntime(3015): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:773)
10-04 12:43:06.118: E/AndroidRuntime(3015): at dalvik.system.NativeStart.main(Native Method)
助けが必要。実際には、フレームにテキストを書き、画像を描画したいと考えています。