通过List<Integer> list = Camera.Parameters.getSupportedPreviewFormats();这个方法来判断手机支持哪种类型的PixelFormat,一般情况下是 NV21 即值为17. BitmapFactory.decodeByteArray这个方法是不能直接解析NV21格式的byte[],返回的是null,所以需要对NV21格式的byte[]进行转换.
最后:sdk 2.2提供了类YuvImage 通过方法compressToJpeg进行转换,但是sdk2.1没有现成的api进行此操作(或者是说我没找到,找到了给说一下),所以需要自己进行格式的转换.MainActivity
package cn.itcast.picture;import java.io.File;import java.io.FileOutputStream;import java.util.List;import android.app.Activity;import android.content.Context;import android.graphics.Bitmap;import android.graphics.BitmapFactory;import android.graphics.Canvas;import android.graphics.PixelFormat;import android.graphics.Bitmap.CompressFormat;import android.hardware.Camera;import android.hardware.Camera.PictureCallback;import android.os.Bundle;import android.os.Environment;import android.util.Log;import android.view.Display;import android.view.KeyEvent;import android.view.SurfaceHolder;import android.view.SurfaceView;import android.view.Window;import android.view.WindowManager;public class MainActivity extends Activity { private static final String TAG = "MainActivity"; private SurfaceView surfaceView; private Camera camera; Bitmap mBitmap; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Window window = getWindow(); requestWindowFeature(Window.FEATURE_NO_TITLE);// 没有标题 window.setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);// 设置全屏 window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);// 高亮 setContentView(R.layout.main); surfaceView = (SurfaceView) findViewById(R.id.surfaceView); surfaceView.getHolder().setFixedSize(176, 144); // 设置分辨率 /* 下面设置Surface不维护自己的缓冲区,而是等待屏幕的渲染引擎将内容推送到用户面前 */ // surfaceView.getHolder() // .setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); surfaceView.getHolder().addCallback(new SurfaceCallback()); } private final class SurfaceCallback implements SurfaceHolder.Callback { private boolean preview; public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { } public void surfaceCreated(SurfaceHolder holder) { // 打开摄像头 camera = Camera.open(); WindowManager wm = (WindowManager) getSystemService(Context.WINDOW_SERVICE); Display display = wm.getDefaultDisplay(); Camera.Parameters parameters = camera.getParameters(); parameters.setPreviewSize(display.getWidth(), display.getHeight());// 设置预览照片的大小 parameters.setPreviewFrameRate(3);// 每秒3帧 parameters.setPictureFormat(PixelFormat.JPEG);// 设置照片的输出格式 parameters.set("jpeg-quality", 85);// 照片质量 // parameters.setPreviewFormat(PixelFormat.JPEG);//必须底层驱动支持,设置才有效。 // parameters.setPictureSize(display.getWidth(), // display.getHeight());// 设置照片的大小 camera.setParameters(parameters); // try { // camera.setPreviewDisplay(surfaceView.getHolder());// // 通过SurfaceView显示取景画面 camera.setPreviewCallback(new PreviewCallBack()); camera.startPreview(); preview = true; // } catch (IOException e) { // Log.e(TAG, e.toString()); // } } public void surfaceDestroyed(SurfaceHolder holder) { if (camera != null) { if (preview) { camera.stopPreview(); } camera.release(); } } } @Override public boolean onKeyDown(int keyCode, KeyEvent event) { if (camera != null && event.getRepeatCount() == 0) { switch (keyCode) { case KeyEvent.KEYCODE_SEARCH:// 搜索键 // 自动对焦 camera.autoFocus(null); return true; case KeyEvent.KEYCODE_CAMERA:// 拍照键 case KeyEvent.KEYCODE_DPAD_CENTER:// 确定键 camera.takePicture(null, null, new TakePictureCallback()); return true; } } return super.onKeyDown(keyCode, event); } private final class TakePictureCallback implements PictureCallback { public void onPictureTaken(byte[] data, Camera camera) { try { Bitmap bitmap = BitmapFactory.decodeByteArray(data, 0, data.length); File file = new File(Environment.getExternalStorageDirectory(), System.currentTimeMillis() + ".jpg"); FileOutputStream outStream = new FileOutputStream(file); bitmap.compress(CompressFormat.JPEG, 100, outStream); outStream.close(); camera.stopPreview(); camera.startPreview(); } catch (Exception e) { Log.e(TAG, e.toString()); } } } private final class PreviewCallBack implements Camera.PreviewCallback { /* * (non-Javadoc) * * @see android.hardware.Camera.PreviewCallback#onPreviewFrame(byte[], * android.hardware.Camera) */ public void onPreviewFrame(byte[] data, Camera camera) { Camera.Parameters parameters = camera.getParameters(); int format = parameters.getPreviewFormat(); List转载:http://www.adobex.com/android/source/details/00000391.htmlist = parameters.getSupportedPreviewFormats(); for (Integer i : list) { // 查看camara驱动支持的format. // Log.e(TAG, "suport format:" + i); } // Log.e(TAG, "format:" + format); if (mBitmap != null) { mBitmap.recycle(); mBitmap = null; } // YUV formats require more conversion if (format == PixelFormat.YCbCr_420_SP || format == PixelFormat.YCbCr_422_I) { int w = parameters.getPreviewSize().width; int h = parameters.getPreviewSize().height; // Get the YuV image 由于此方法适用于android 2.2版本,所以此处不能用。 // YuvImage yuv_image = new YuvImage(data, format, w, h, null); // // Convert YuV to Jpeg // Rect rect = new Rect(0, 0, w, h); // ByteArrayOutputStream output_stream = new // ByteArrayOutputStream(); // yuv_image.compressToJpeg(rect, 100, output_stream); // // Convert from Jpeg to Bitmap // bitmap2 = BitmapFactory.decodeByteArray(output_stream // .toByteArray(), 0, output_stream.size()); int[] i = new int[data.length]; decodeYUV420SP(i, data, w, h); mBitmap = Bitmap.createBitmap(i, w, h, Bitmap.Config.RGB_565); // mBitmap = renderCroppedGreyscaleBitmap(data, w, h); } // Jpeg and RGB565 are supported by BitmapFactory.decodeByteArray else if (format == PixelFormat.JPEG || format == PixelFormat.RGB_565) { mBitmap = BitmapFactory.decodeByteArray(data, 0, data.length); } Bitmap bitmap = BitmapFactory.decodeResource(MainActivity.this .getResources(), R.drawable.test); // Log.e(TAG, "mBitmap:" + mBitmap + "format:" + format); Canvas canvas = surfaceView.getHolder().lockCanvas(); // 判断非null,才能drawBitmap. if (mBitmap != null) { canvas.drawBitmap(mBitmap, 0, 0, null); } canvas.drawBitmap(bitmap, 0, 0, null); surfaceView.getHolder().unlockCanvasAndPost(canvas); } } static public void decodeYUV420SP(int[] rgb, byte[] yuv420sp, int width, int height) { final int frameSize = width * height; for (int j = 0, yp = 0; j < height; j++) { int uvp = frameSize + (j >> 1) * width, u = 0, v = 0; for (int i = 0; i < width; i++, yp++) { int y = (0xff & ((int) yuv420sp[yp])) - 16; if (y < 0) y = 0; if ((i & 1) == 0) { v = (0xff & yuv420sp[uvp++]) - 128; u = (0xff & yuv420sp[uvp++]) - 128; } int y1192 = 1192 * y; int r = (y1192 + 1634 * v); int g = (y1192 - 833 * v - 400 * u); int b = (y1192 + 2066 * u); if (r < 0) r = 0; else if (r > 262143) r = 262143; if (g < 0) g = 0; else if (g > 262143) g = 262143; if (b < 0) b = 0; else if (b > 262143) b = 262143; rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff); } } } /** * 此方法来源于网络,效果太差。 * * @param yuvData * @param width * @param height * @return */ public Bitmap renderCroppedGreyscaleBitmap(byte[] yuvData, int width, int height) { int[] pixels = new int[width * height]; byte[] yuv = yuvData; int inputOffset = 0 * 320 + 40; for (int y = 0; y < height; y++) { int outputOffset = y * width; for (int x = 0; x < width; x++) { int grey = yuv[inputOffset + x] & 0xff; pixels[outputOffset + x] = 0xFF000000 | (grey * 0x00010101); } inputOffset += 320; } Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); bitmap.setPixels(pixels, 0, width, 0, 0, width, height); return bitmap; } /** * 此方法来源于网络,经验证不好用。 * * @param rgbBuf * @param yuv420sp * @param width * @param height */ static public void decodeYUV420SP(byte[] rgbBuf, byte[] yuv420sp, int width, int height) { final int frameSize = width * height; if (rgbBuf == null) throw new NullPointerException("buffer 'rgbBuf' is null"); if (rgbBuf.length < frameSize * 3) throw new IllegalArgumentException("buffer 'rgbBuf' size " + rgbBuf.length + " < minimum " + frameSize * 3); if (yuv420sp == null) throw new NullPointerException("buffer 'yuv420sp' is null"); if (yuv420sp.length < frameSize * 3 / 2) throw new IllegalArgumentException("buffer 'yuv420sp' size " + yuv420sp.length + " < minimum " + frameSize * 3 / 2); int i = 0, y = 0; int uvp = 0, u = 0, v = 0; int y1192 = 0, r = 0, g = 0, b = 0; for (int j = 0, yp = 0; j < height; j++) { uvp = frameSize + (j >> 1) * width; u = 0; v = 0; for (i = 0; i < width; i++, yp++) { y = (0xff & ((int) yuv420sp[yp])) - 16; if (y < 0) y = 0; if ((i & 1) == 0) { v = (0xff & yuv420sp[uvp++]) - 128; u = (0xff & yuv420sp[uvp++]) - 128; } y1192 = 1192 * y; r = (y1192 + 1634 * v); g = (y1192 - 833 * v - 400 * u); b = (y1192 + 2066 * u); if (r < 0) r = 0; else if (r > 262143) r = 262143; if (g < 0) g = 0; else if (g > 262143) g = 262143; if (b < 0) b = 0; else if (b > 262143) b = 262143; rgbBuf[yp * 3] = (byte) (r >> 10); rgbBuf[yp * 3 + 1] = (byte) (g >> 10); rgbBuf[yp * 3 + 2] = (byte) (b >> 10); } } }}