Commit ad7707c8 by maiqh

修复卡顿问题,去掉冗余代码

parent ae328dd2
......@@ -33,10 +33,6 @@
</intent-filter>
</activity>
<activity android:name=".Camera2Activity"/>
<!-- <activity android:name=".mtk.MTKCameraActivity"/>-->
<activity android:name=".video.CameraActivity"/>
<activity android:name=".getpreview.PreviewActivity"/>
<activity android:name=".camera1.Camera1"/>
<service android:name="com.autonavi.amapauto.gdarcameraservicedemo.GDArCameraService"
android:exported="true">
......@@ -45,7 +41,7 @@
</intent-filter>
</service>
<receiver android:name="com.autonavi.amapauto.ArCameraIdBroadcast" android:exported="true">
<receiver android:name="com.autonavi.amapauto.gdarcameraservicedemo.ArCameraIdBroadcast" android:exported="true">
<intent-filter>
<action android:name="AUTONAVI_STANDARD_BROADCAST_SEND"/>
</intent-filter>
......
package com.autonavi.amapauto.camera1;
import android.hardware.Camera;
import android.util.Log;
import android.view.SurfaceHolder;
import java.io.IOException;
public class Camera1Helper {
private final static String TAG = "Camera1Helper";
private Camera mCamera;
public Camera open(){
mCamera = null;
try {
mCamera = Camera.open(); // attempt to get a Camera instance
}
catch (Exception e){
// Camera is not available (in use or does not exist)
}
return mCamera; // returns null if camera is unavailable
}
public void setHolder(SurfaceHolder mHolder) {
mHolder.addCallback(callback);
// deprecated setting, but required on Android versions prior to 3.0
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
SurfaceHolder.Callback callback= new SurfaceHolder.Callback(){
@Override
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, now tell the camera where to draw the preview.
try {
mCamera.setPreviewDisplay(holder);
mCamera.setPreviewCallback(new Camera.PreviewCallback() {
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
Log.d(TAG, "onPreviewFrame : " + data);
}
});
mCamera.startPreview();
} catch (IOException e) {
Log.d(TAG, "Error setting camera preview: " + e.getMessage());
}
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
if (holder.getSurface() == null) {
// preview surface does not exist
return;
}
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e) {
// ignore: tried to stop a non-existent preview
}
// set preview size and make any resize, rotate or
// reformatting changes here
// start preview with new settings
try {
mCamera.setPreviewDisplay(holder);
mCamera.setPreviewCallback(new Camera.PreviewCallback() {
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
Log.d(TAG, "onPreviewFrame : " + data);
}
});
mCamera.startPreview();
} catch (Exception e) {
Log.d(TAG, "Error starting camera preview: " + e.getMessage());
}
}
};
}
package com.autonavi.amapauto.gdarcameraservicedemo;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import com.autonavi.amapauto.gdarcameraservicedemo.utils.BroadcastUtils;
public class ArCameraIdBroadcast extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
String action = intent.getAction();
if ("AUTONAVI_STANDARD_BROADCAST_SEND".equals(action)) {
int key_type = intent.getIntExtra("KEY_TYPE", -1);
if (key_type == 12116) {
// 高德那边说:不一定是收到我们的请求你们才发,我们会去请求,另外,你们如果自己摄像头链接有变化时也要主动发送这条广播给我们
BroadcastUtils.sendCameraConfigBroadcast(context);
}
}
}
}
package com.autonavi.amapauto.gdarcameraservicedemo;
import android.content.Context;
import android.graphics.ImageFormat;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.TotalCaptureResult;
import android.media.Image;
import android.media.ImageReader;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.MemoryFile;
import android.os.ParcelFileDescriptor;
import android.os.RemoteException;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
import com.autonavi.amapauto.gdarcameraservice.IGDCameraStateCallBack;
import com.autonavi.amapauto.gdarcameraservice.model.ArCameraOpenResultParam;
import com.autonavi.amapauto.gdarcameraservicedemo.utils.SharedMemUtils;
import java.io.FileDescriptor;
import java.io.IOException;
import java.lang.reflect.Method;
import java.nio.ByteBuffer;
import java.util.Arrays;
public class CameraServiceHelper {
private static final String TAG = "CameraServiceHelper";
private static final String MEMORY_FILE = "cneeds_camera_memory";
private final static int PREVIEW_WIDTH = 720;
private final static int PREVIEW_HEIGHT = 1280;
public final static int SAVE_WIDTH = 720;
public final static int SAVE_HEIGHT = 1280;
private boolean mIsCameraOpen = false;
private final int MEMORY_SIZE = 3133440 + 1;
private CameraManager mCameraManager;
private ImageReader mImageReader;
private CameraDevice mCameraDevice;
private CameraCaptureSession mCameraCaptureSession;
private String mCameraId = "0";
private CameraCharacteristics mCameraCharacteristics;
private int mCameraSensorOrientation = 0; //摄像头方向
private int mCameraFacing = CameraCharacteristics.LENS_FACING_BACK; //默认使用后置摄像头
private int mDisplayRotation; //手机方向
private boolean canTakePic = true; //是否可以拍照
private boolean canExchangeCamera = false; //是否可以切换摄像头
private Handler mCameraHandler;
private HandlerThread handlerThread = new HandlerThread("CameraThread");
private Size mPreviewSize = new Size(PREVIEW_WIDTH, PREVIEW_HEIGHT); //预览大小
private Size mSavePicSize = new Size(SAVE_WIDTH, SAVE_HEIGHT); //保存图片大小
private IGDCameraStateCallBack mStateCallBack;
private MemoryFile mMemoryFile;
private Surface mSurface;
public CameraServiceHelper() {
try {
mMemoryFile = new MemoryFile(MEMORY_FILE, MEMORY_SIZE);
} catch (IOException e) {
e.printStackTrace();
}
}
public void setStateCallBack(IGDCameraStateCallBack stateCallBack) {
mStateCallBack =stateCallBack;
}
public boolean initCameraInfo(Context context, Surface surface) {
handlerThread.start();
mCameraHandler = new Handler(handlerThread.getLooper());
mSurface = surface;
mCameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
String[] cameraIdList = new String[0];
try {
cameraIdList = mCameraManager.getCameraIdList();
if (cameraIdList.length == 0) {
Log.d(TAG, "没有相机可用");
return false;
}
for (int i = 0; i < cameraIdList.length; i++) {
CameraCharacteristics cameraCharacteristics = mCameraManager.getCameraCharacteristics(cameraIdList[i]);
Integer facing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING);
if (facing == mCameraFacing) {
mCameraId = cameraIdList[i];
mCameraCharacteristics = cameraCharacteristics;
}
Log.d(TAG, "设备中的摄像头" + mCameraId);
}
Integer supportLevel = mCameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
if (supportLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
Log.d(TAG, "相机硬件不支持新特性");
}
// 获取摄像头方向
mCameraSensorOrientation = mCameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
mImageReader = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(), ImageFormat.YUV_420_888, 1);
mImageReader.setOnImageAvailableListener(onImageAvailableListener, mCameraHandler);
return true;
} catch (CameraAccessException e) {
e.printStackTrace();
}
return false;
}
private ImageReader.OnImageAvailableListener onImageAvailableListener =
new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
// 拿到拍照照片数据
Image image = reader.acquireNextImage();
ByteBuffer byteBuffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[byteBuffer.remaining()];
byteBuffer.get(bytes);
reader.close();
log("拿到拍照照片数据");
byte[] buffer = new byte[20+bytes.length];
SharedMemUtils.initHeader(buffer);
boolean b = SharedMemUtils.canWrite(buffer);
if (b) {
SharedMemUtils.setOffset(buffer, 0);
SharedMemUtils.setLength(buffer, 0);
SharedMemUtils.setContentSize(buffer, bytes.length);
SharedMemUtils.setContent(buffer, bytes);
try {
// 写一次 , 读取数据后 数据会被清空
// 持续写,不读,数据不会清空,注意数据覆盖(offset值)
mMemoryFile.writeBytes(buffer, 0, 0, buffer.length);
Method getFileDescriptorMethod = mMemoryFile.getClass().getDeclaredMethod("getFileDescriptor");
if(getFileDescriptorMethod != null){
FileDescriptor fileDescriptor = (FileDescriptor) getFileDescriptorMethod.invoke(mMemoryFile);
// 序列化,才可传送
ParcelFileDescriptor pfd = ParcelFileDescriptor.dup(fileDescriptor);
ArCameraOpenResultParam openResultParam = new ArCameraOpenResultParam();
openResultParam.cameraId = mCameraId;
openResultParam.imageHeight = PREVIEW_HEIGHT;
openResultParam.imageWidth = PREVIEW_WIDTH;
mStateCallBack.onOpened(pfd, openResultParam, MEMORY_FILE);
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
};
public void openCamera() throws CameraAccessException {
mCameraManager.openCamera(mCameraId, new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice camera) {
mCameraDevice = camera;
try {
createCaptureSession(camera);
} catch (CameraAccessException e) {
e.printStackTrace();
}
try {
mStateCallBack.onConnected();
} catch (RemoteException e) {
e.printStackTrace();
}
mIsCameraOpen = true;
}
@Override
public void onDisconnected(CameraDevice camera) {
log("onDisconnected");
try {
mStateCallBack.onDisconnected();
} catch (RemoteException e) {
e.printStackTrace();
}
mIsCameraOpen = false;
}
@Override
public void onError(CameraDevice camera, int error) {
log("onError" +error);
try {
mStateCallBack.onError(error, camera.getId());
} catch (RemoteException e) {
e.printStackTrace();
}
mIsCameraOpen = false;
releaseCamera();
releaseThread();
}
}, mCameraHandler);
}
private void createCaptureSession(CameraDevice cameraDevice) throws CameraAccessException {
final CaptureRequest.Builder builder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
builder.addTarget(mImageReader.getSurface());
builder.addTarget(mSurface);
builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
cameraDevice.createCaptureSession(Arrays.asList(mImageReader.getSurface()), new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession session) {
mCameraCaptureSession = session;
try {
session.setRepeatingRequest(builder.build(), captureCallback, mCameraHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(CameraCaptureSession session) {
}
@Override
public void onClosed(CameraCaptureSession session) {
super.onClosed(session);
releaseCamera();
releaseThread();
try {
mStateCallBack.onClosed(0, "onClosed");
} catch (RemoteException e) {
e.printStackTrace();
}
}
}, mCameraHandler);
}
public void releaseCamera() {
if (mCameraCaptureSession != null) {
mCameraCaptureSession.close();
mCameraCaptureSession = null;
}
if (mCameraDevice != null) {
mCameraDevice.close();
mCameraDevice = null;
}
if (mImageReader != null) {
mImageReader.close();
mImageReader = null;
}
canExchangeCamera = false;
}
public void releaseThread() {
handlerThread.quitSafely();
try {
handlerThread.join();
handlerThread = null;
mCameraHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
CameraCaptureSession.CaptureCallback captureCallback = new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
canExchangeCamera = true;
canTakePic = true;
}
@Override
public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
super.onCaptureFailed(session, request, failure);
log("onCaptureFailed");
}
};
private void log(String msg) {
Log.d(TAG, msg);
}
public boolean isIsCameraOpen() {
return mIsCameraOpen;
}
public void setIsCameraOpen(boolean mIsCameraOpen) {
this.mIsCameraOpen = mIsCameraOpen;
}
}
......@@ -155,7 +155,6 @@ public class GDArCameraService extends Service {
mCamera.setPreviewCallback(new Camera.PreviewCallback() {
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
Log.d(TAG, "onPreviewFrame: " + data.toString());
try {
sendData(data, camera);
} catch (IOException e) {
......@@ -223,7 +222,6 @@ public class GDArCameraService extends Service {
try {
SharedMemUtils.setCanRead(buffer);
mMemoryFile.writeBytes(buffer, 0, 0, buffer.length);
Log.d(TAG, "CNEEDS_TEST sendData: " + Arrays.toString(buffer));
} catch (Exception e) {
e.printStackTrace();
Log.d(TAG, "sendDataError: " + e.getMessage());
......
package com.autonavi.amapauto;
package com.autonavi.amapauto.gdarcameraservicedemo.utils;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
public class ArCameraIdBroadcast extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
String action = intent.getAction();
if ("AUTONAVI_STANDARD_BROADCAST_SEND".equals(action)) {
int key_type = intent.getIntExtra("KEY_TYPE", -1);
if (key_type == 12116) {
// 不一定是收到我们的请求你们才发,我们会去请求,另外,你们如果自己摄像头链接有变化时也要主动发送这条广播给我们
Intent receiverIntent = new Intent();
receiverIntent.setAction("AUTONAVI_STANDARD_BROADCAST_RECV");
receiverIntent.putExtra("KEY_TYPE", 12116);
receiverIntent.putExtra("productName", "智能网联车盒");
receiverIntent.putExtra("productModel", "M2");
receiverIntent.putExtra("cameraDisplay", "1280x720");
receiverIntent.putExtra("cameraName", "C2390");
receiverIntent.putExtra("imu", "MPU-6050");
receiverIntent.putExtra("cameraConnect", "MIPI");
context.sendBroadcast(receiverIntent);
}
}
public class BroadcastUtils {
public static void sendCameraConfigBroadcast(Context context) {
Intent receiverIntent = new Intent();
receiverIntent.setAction("AUTONAVI_STANDARD_BROADCAST_RECV");
receiverIntent.putExtra("KEY_TYPE", 12116);
receiverIntent.putExtra("productName", "智能网联车盒");
receiverIntent.putExtra("productModel", "M2");
receiverIntent.putExtra("cameraDisplay", "1280x720");
receiverIntent.putExtra("cameraName", "C2390");
receiverIntent.putExtra("imu", "MPU-6050");
receiverIntent.putExtra("cameraConnect", "MIPI");
context.sendBroadcast(receiverIntent);
}
}
package com.autonavi.amapauto.proxy;
import android.content.Context;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.media.Image;
import android.media.ImageReader;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.MemoryFile;
import android.os.RemoteException;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
import com.autonavi.amapauto.gdarcameraservice.IGDCameraStateCallBack;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;
public class Camera2Proxy {
private static final String TAG = "Camera2Proxy";
private static final String MEMORY_FILE = "cneeds_camera_memory";
private Context mContext;
private final int MEMORY_SIZE = 3133440 + 1;
// camera
private int mCameraId = CameraCharacteristics.LENS_FACING_FRONT; // 要打开的摄像头ID
private Size mPreviewSize = new Size(640, 480); // 固定640*480演示
private CameraDevice mCameraDevice; // 相机对象
private CameraCaptureSession mCaptureSession;
// handler
private Handler mBackgroundHandler;
private HandlerThread mBackgroundThread;
// output
private Surface mPreviewSurface; // 输出到屏幕的预览
private ImageReader mImageReader; // 预览回调的接收者
private IGDCameraStateCallBack mStateCallBack;
// private ImageReader.OnImageAvailableListener mOnImageAvailableListener;
private MemoryFile mMemoryFile;
public void setStateCallBack(IGDCameraStateCallBack stateCallBack) {
mStateCallBack =stateCallBack;
}
/**
* 打开摄像头的回调
*/
private CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened( CameraDevice camera) {
Log.d(TAG, "onOpened");
mCameraDevice = camera;
initPreviewRequest();
try {
mStateCallBack.onConnected();
} catch (RemoteException e) {
e.printStackTrace();
}
}
@Override
public void onDisconnected( CameraDevice camera) {
Log.d(TAG, "onDisconnected");
releaseCamera();
try {
mStateCallBack.onDisconnected();
} catch (RemoteException e) {
e.printStackTrace();
}
}
@Override
public void onError( CameraDevice camera, int error) {
Log.e(TAG, "Camera Open failed, error: " + error);
releaseCamera();
try {
mStateCallBack.onError(error, "error : " +error);
} catch (RemoteException e) {
e.printStackTrace();
}
}
};
public Camera2Proxy(Context context) {
mContext = context;
try {
mMemoryFile = new MemoryFile(MEMORY_FILE, MEMORY_SIZE);
} catch (IOException e) {
e.printStackTrace();
}
}
public void setSurface(Surface surface) {
mPreviewSurface = surface;
}
public boolean openCamera() {
Log.v(TAG, "openCamera");
startBackgroundThread(); // 对应 releaseCamera() 方法中的 stopBackgroundThread()
try {
CameraManager cameraManager = (CameraManager) mContext.getSystemService(Context.CAMERA_SERVICE);
Log.d(TAG, "preview size: " + mPreviewSize.getWidth() + "*" + mPreviewSize.getHeight());
mImageReader = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(),
ImageFormat.YUV_420_888, 2);
mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, null);
// 打开摄像头
cameraManager.openCamera(Integer.toString(mCameraId), mStateCallback, mBackgroundHandler);
return true;
} catch (CameraAccessException e) {
e.printStackTrace();
}
return false;
}
public void releaseCamera() {
Log.v(TAG, "releaseCamera");
if (mImageReader != null) {
mImageReader.close();
mImageReader = null;
}
if (mCaptureSession != null) {
mCaptureSession.close();
mCaptureSession = null;
}
if (mCameraDevice != null) {
mCameraDevice.close();
mCameraDevice = null;
}
stopBackgroundThread(); // 对应 openCamera() 方法中的 startBackgroundThread()
}
public void setImageAvailableListener(ImageReader.OnImageAvailableListener onImageAvailableListener) {
mOnImageAvailableListener = onImageAvailableListener;
}
public void setPreviewSurface(SurfaceTexture surfaceTexture) {
// mPreviewSize必须先初始化完成
surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPreviewSurface = new Surface(surfaceTexture);
}
private void initPreviewRequest() {
try {
final CaptureRequest.Builder builder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
// 添加输出到屏幕的surface
if (mPreviewSurface != null) {
builder.addTarget(mPreviewSurface);
}
// 添加输出到ImageReader的surface。然后我们就可以从ImageReader中获取预览数据了
builder.addTarget(mImageReader.getSurface());
mCameraDevice.createCaptureSession(Arrays.asList(mPreviewSurface, mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured( CameraCaptureSession session) {
mCaptureSession = session;
// 设置连续自动对焦和自动曝光
builder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
builder.set(CaptureRequest.CONTROL_AE_MODE,
CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
CaptureRequest captureRequest = builder.build();
try {
// 一直发送预览请求
mCaptureSession.setRepeatingRequest(captureRequest, null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed( CameraCaptureSession session) {
Log.e(TAG, "ConfigureFailed. session: mCaptureSession");
}
}, mBackgroundHandler); // handle 传入 null 表示使用当前线程的 Looper
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
public Size getPreviewSize() {
return mPreviewSize;
}
public void switchCamera() {
mCameraId ^= 1;
Log.d(TAG, "switchCamera: mCameraId: " + mCameraId);
releaseCamera();
openCamera();
}
private void startBackgroundThread() {
if (mBackgroundThread == null || mBackgroundHandler == null) {
Log.v(TAG, "startBackgroundThread");
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
}
private void stopBackgroundThread() {
Log.v(TAG, "stopBackgroundThread");
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private byte[] mYuvBytes;
private boolean mIsShutter;
private ImageReader.OnImageAvailableListener mOnImageAvailableListener
= new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireLatestImage();
if (image == null) {
return;
}
int width = getPreviewSize().getWidth();
int height = getPreviewSize().getHeight();
if (mYuvBytes == null) {
// YUV420 大小总是 width * height * 3 / 2
mYuvBytes = new byte[width * height * 3 / 2];
}
// YUV_420_888
Image.Plane[] planes = image.getPlanes();
// Y通道,对应planes[0]
// Y size = width * height
// yBuffer.remaining() = width * height;
// pixelStride = 1
ByteBuffer yBuffer = planes[0].getBuffer();
int yLen = width * height;
yBuffer.get(mYuvBytes, 0, yLen);
// U通道,对应planes[1]
// U size = width * height / 4;
// uBuffer.remaining() = width * height / 2;
// pixelStride = 2
ByteBuffer uBuffer = planes[1].getBuffer();
int pixelStride = planes[1].getPixelStride(); // pixelStride = 2
for (int i = 0; i < uBuffer.remaining(); i+=pixelStride) {
mYuvBytes[yLen++] = uBuffer.get(i);
}
// V通道,对应planes[2]
// V size = width * height / 4;
// vBuffer.remaining() = width * height / 2;
// pixelStride = 2
ByteBuffer vBuffer = planes[2].getBuffer();
pixelStride = planes[2].getPixelStride(); // pixelStride = 2
for (int i = 0; i < vBuffer.remaining(); i+=pixelStride) {
mYuvBytes[yLen++] = vBuffer.get(i);
}
if (mIsShutter) {
mIsShutter = false;
// save yuv data
// String yuvPath = FileUtil.SAVE_DIR + System.currentTimeMillis() + ".yuv";
// FileUtil.saveBytes(mYuvBytes, yuvPath);
// save bitmap data
// String jpgPath = yuvPath.replace(".yuv", ".jpg");
// Bitmap bitmap = ColorConvertUtil.yuv420pToBitmap(mYuvBytes, width, height);
// FileUtil.saveBitmap(bitmap, jpgPath);
}
// byte[] buffer = new byte[20+mYuvBytes.length];
//
// SharedMemUtils.initHeader(buffer);
// boolean b = SharedMemUtils.canWrite(buffer);
// if (b) {
// SharedMemUtils.setOffset(buffer, 0);
// SharedMemUtils.setLength(buffer, 0);
// SharedMemUtils.setContentSize(buffer, mYuvBytes.length);
// SharedMemUtils.setContent(buffer, mYuvBytes);
// try {
// // 写一次 , 读取数据后 数据会被清空
// // 持续写,不读,数据不会清空,注意数据覆盖(offset值)
// mMemoryFile.writeBytes(buffer, 0, 0, buffer.length);
// Method getFileDescriptorMethod = mMemoryFile.getClass().getDeclaredMethod("getFileDescriptor");
// if(getFileDescriptorMethod != null){
// FileDescriptor fileDescriptor = (FileDescriptor) getFileDescriptorMethod.invoke(mMemoryFile);
// // 序列化,才可传送
// ParcelFileDescriptor pfd = ParcelFileDescriptor.dup(fileDescriptor);
//
// ArCameraOpenResultParam openResultParam = new ArCameraOpenResultParam();
// openResultParam.cameraId = "" + mCameraId;
// openResultParam.imageHeight = getPreviewSize().getHeight();
// openResultParam.imageWidth = getPreviewSize().getWidth();
// mStateCallBack.onOpened(pfd, openResultParam, MEMORY_FILE);
// }
//
// } catch (Exception e) {
// e.printStackTrace();
// }
// }
// 一定不能忘记close
image.close();
}
};
}
package com.example.cameraservicedemo;
import android.app.Activity;
import android.os.Bundle;
import android.view.TextureView;
import android.view.View;
import android.widget.Button;
public class Camera2Activity extends Activity {
private TextureView mTextureView;
private Button mBtnOpenCamera;
private Button mBtnCloseCamera;
private Camera2Helper mCamera2Helper;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera2);
mTextureView = (TextureView) findViewById(R.id.texture_view);
mBtnOpenCamera = (Button) findViewById(R.id.btn_open_camera);
mBtnCloseCamera = (Button) findViewById(R.id.btn_close_camera);
mCamera2Helper = new Camera2Helper(Camera2Activity.this, mTextureView);
mBtnOpenCamera.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mCamera2Helper.initCameraInfo();
}
});
mBtnCloseCamera.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mCamera2Helper.releaseCamera();
mCamera2Helper.releaseThread();
}
});
}
@Override
protected void onResume() {
super.onResume();
}
@Override
protected void onPause() {
super.onPause();
}
@Override
protected void onDestroy() {
super.onDestroy();
mCamera2Helper.releaseCamera();
mCamera2Helper.releaseThread();
}
}
\ No newline at end of file
package com.example.cameraservicedemo;
import android.app.Activity;
import android.content.Context;
import android.content.res.Configuration;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
import android.view.TextureView;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
public class Camera2Helper {
private static final String TAG = "Camera2Helper";
private final Activity mActivity;
private final TextureView mTextureView;
private final static int PREVIEW_WIDTH = 720;
private final static int PREVIEW_HEIGHT = 1280;
private final static int SAVE_WIDTH = 720;
private final static int SAVE_HEIGHT = 1280;
private CameraManager mCameraManager;
private ImageReader mImageReader;
private CameraDevice mCameraDevice;
private CameraCaptureSession mCameraCaptureSession;
private String mCameraId = "0";
private CameraCharacteristics mCameraCharacteristics;
private int mCameraSensorOrientation = 0; //摄像头方向
private int mCameraFacing = CameraCharacteristics.LENS_FACING_BACK; //默认使用后置摄像头
private int mDisplayRotation; //手机方向
private boolean canTakePic = true; //是否可以拍照
private boolean canExchangeCamera = false; //是否可以切换摄像头
private Handler mCameraHandler;
private HandlerThread handlerThread = new HandlerThread("CameraThread");
private Size mPreviewSize = new Size(PREVIEW_WIDTH, PREVIEW_HEIGHT); //预览大小
private Size mSavePicSize = new Size(SAVE_WIDTH, SAVE_HEIGHT); //保存图片大小
public Camera2Helper(Activity activity, TextureView textureView) {
mActivity = activity;
mTextureView = textureView;
mDisplayRotation = mActivity.getWindowManager().getDefaultDisplay().getRotation();
handlerThread.start();
mCameraHandler = new Handler(handlerThread.getLooper());
mTextureView.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
initCameraInfo();
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
releaseCamera();
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
});
}
public void initCameraInfo() {
mCameraManager = (CameraManager) mActivity.getSystemService(Context.CAMERA_SERVICE);
String[] cameraIdList = new String[0];
try {
cameraIdList = mCameraManager.getCameraIdList();
if (cameraIdList.length == 0) {
Log.d(TAG, "没有相机可用");
return;
}
for (int i = 0; i < cameraIdList.length; i++) {
CameraCharacteristics cameraCharacteristics = mCameraManager.getCameraCharacteristics(cameraIdList[i]);
Integer facing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING);
if (facing == mCameraFacing) {
mCameraId = cameraIdList[i];
mCameraCharacteristics = cameraCharacteristics;
}
Log.d(TAG, "设备中的摄像头" + mCameraId);
}
Integer supportLevel = mCameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
if (supportLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
Log.d(TAG, "相机硬件不支持新特性");
}
// 获取摄像头方向
mCameraSensorOrientation = mCameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
//获取StreamConfigurationMap,它是管理摄像头支持的所有输出格式和尺寸
StreamConfigurationMap configurationMap = mCameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size[] savePicSize = configurationMap.getOutputSizes(ImageFormat.JPEG);
Size[] previewSize = configurationMap.getOutputSizes(SurfaceTexture.class);
boolean exchange = exchangeWidthAndHeight(mDisplayRotation, mCameraSensorOrientation);
mSavePicSize = getBestSize(exchange ? mSavePicSize.getHeight() : mSavePicSize.getWidth(),
exchange ? mSavePicSize.getWidth() : mSavePicSize.getHeight(),
exchange ? mTextureView.getHeight() : mTextureView.getWidth(),
exchange ? mTextureView.getWidth() : mTextureView.getHeight(),
Arrays.asList(savePicSize));
mPreviewSize = getBestSize(exchange ? mPreviewSize.getHeight() : mPreviewSize.getWidth(),
exchange ? mPreviewSize.getWidth() : mPreviewSize.getHeight(),
exchange ? mTextureView.getHeight() : mTextureView.getWidth(),
exchange ? mTextureView.getWidth() : mTextureView.getHeight(),
Arrays.asList(previewSize));
mTextureView.getSurfaceTexture().setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
log("预览最优尺寸 :" + mPreviewSize.getWidth() * mPreviewSize.getHeight() + ", 比例 " + (float) mPreviewSize.getWidth() / mPreviewSize.getHeight());
log("保存图片最优尺寸 :" + mSavePicSize.getWidth() * mSavePicSize.getHeight() + ", 比例 " + (float) mSavePicSize.getWidth() / mSavePicSize.getHeight());
//根据预览的尺寸大小调整TextureView的大小,保证画面不被拉伸
// int orientation = mActivity.getResources().getConfiguration().orientation;
// if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
// mTextureView.setRotationX(mPreviewSize.getWidth());
// mTextureView.setRotationY(mPreviewSize.getHeight());
// } else {
// mTextureView.setRotationX(mPreviewSize.getHeight());
// mTextureView.setRotationX(mPreviewSize.getWidth());
// }
mImageReader = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(), ImageFormat.JPEG, 1);
mImageReader.setOnImageAvailableListener(onImageAvailableListener, mCameraHandler);
openCamera();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private ImageReader.OnImageAvailableListener onImageAvailableListener =
new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
// 拿到拍照照片数据
Image image = reader.acquireNextImage();
ByteBuffer byteBuffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[byteBuffer.remaining()];
byteBuffer.get(bytes);
reader.close();
log("拿到拍照照片数据");
}
};
private void openCamera() throws CameraAccessException {
mCameraManager.openCamera(mCameraId, new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice camera) {
mCameraDevice = camera;
try {
createCaptureSession(camera);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onDisconnected(CameraDevice camera) {
log("onDisconnected");
}
@Override
public void onError(CameraDevice camera, int error) {
log("onError" +error);
}
}, mCameraHandler);
}
private void createCaptureSession(CameraDevice cameraDevice) throws CameraAccessException {
final CaptureRequest.Builder builder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
Surface surface = new Surface(mTextureView.getSurfaceTexture());
builder.addTarget(surface);
// builder.addTarget(mImageReader.getSurface());
builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
cameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()), new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession session) {
mCameraCaptureSession = session;
try {
session.setRepeatingRequest(builder.build(), captureCallback, mCameraHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(CameraCaptureSession session) {
}
@Override
public void onClosed(CameraCaptureSession session) {
super.onClosed(session);
releaseCamera();
releaseThread();
}
}, mCameraHandler);
}
public void releaseCamera() {
if (mCameraCaptureSession != null) {
mCameraCaptureSession.close();
mCameraCaptureSession = null;
}
if (mCameraDevice != null) {
mCameraDevice.close();
mCameraDevice = null;
}
if (mImageReader != null) {
mImageReader.close();
mImageReader = null;
}
canExchangeCamera = false;
}
public void releaseThread() {
if (handlerThread == null) {
return;
}
handlerThread.quitSafely();
try {
handlerThread.join();
handlerThread = null;
mCameraHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
CameraCaptureSession.CaptureCallback captureCallback = new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
canExchangeCamera = true;
canTakePic = true;
}
@Override
public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
super.onCaptureFailed(session, request, failure);
log("onCaptureFailed");
}
};
/**
* 根据提供的屏幕方向 [displayRotation] 和相机方向 [sensorOrientation] 返回是否需要交换宽高
*/
private boolean exchangeWidthAndHeight(int displayRotation, int sensorOrientation) {
boolean exchange = false;
switch (displayRotation) {
case Surface.ROTATION_0:
case Surface.ROTATION_180:
if (sensorOrientation == 90 || sensorOrientation == 270) {
exchange = true;
}
break;
case Surface.ROTATION_90:
case Surface.ROTATION_270:
if (sensorOrientation == 0 || sensorOrientation == 180) {
exchange = true;
}
break;
default:
log("Display rotation is invalid: " + displayRotation);
break;
}
log("屏幕方向" + displayRotation);
log("相机方向" + sensorOrientation);
return exchange;
}
private void log(String msg) {
Log.d(TAG, msg);
}
/**
* 根据提供的参数值返回与指定宽高相等或最接近的尺寸
*
* @param targetWidth 目标宽度
* @param targetHeight 目标高度
* @param maxWidth 最大宽度(即TextureView的宽度)
* @param maxHeight 最大高度(即TextureView的高度)
* @param sizeList 支持的Size列表
* @return 返回与指定宽高相等或最接近的尺寸
*/
private Size getBestSize(int targetWidth, int targetHeight, int maxWidth, int maxHeight, List<Size> sizeList) {
ArrayList bigEnough = new ArrayList<Size>(); //比指定宽高大的Size列表
ArrayList notBigEnough = new ArrayList<Size>(); //比指定宽高小的Size列表
for (int i = 0; i < sizeList.size(); i++) {
Size size = sizeList.get(i);
//宽<=最大宽度 && 高<=最大高度 && 宽高比 == 目标值宽高比
if (size.getWidth() <= maxWidth && size.getHeight() <= maxHeight
&& size.getWidth() == size.getHeight() * targetWidth / targetHeight) {
if (size.getWidth() >= targetWidth && size.getHeight() >= targetHeight)
bigEnough.add(size);
else
notBigEnough.add(size);
}
log("系统支持的尺寸: " + size.getWidth() * size.getHeight() + " , 比例 :" + (float) size.getWidth() / size.getHeight());
}
log("最大尺寸 :" + maxWidth * maxHeight + ", 比例 :" + (float) targetWidth / targetHeight);
log("目标尺寸 :" + targetWidth * targetHeight + ", 比例 :" + (float) targetWidth / targetHeight);
//选择bigEnough中最小的值 或 notBigEnough中最大的值
Size size;
if (bigEnough.size() > 0) {
size = (Size) Collections.min(bigEnough, new CompareSizesByArea());
} else if (notBigEnough.size() > 0) {
size = (Size) Collections.min(notBigEnough, new CompareSizesByArea());
} else {
size = sizeList.get(0);
}
return size;
}
private class CompareSizesByArea implements Comparator<Size> {
@Override
public int compare(Size size1, Size size2) {
return Long.signum((long) size1.getWidth() * size1.getHeight() - (long) size2.getWidth() * size2.getHeight());
}
}
}
......@@ -3,74 +3,28 @@ package com.example.cameraservicedemo;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.TextureView;
import android.view.View;
import android.widget.Button;
import com.example.cameraservicedemo.camera1.Camera1;
import com.example.cameraservicedemo.getpreview.PreviewActivity;
import com.example.cameraservicedemo.video.CameraActivity;
public class MainActivity extends Activity implements View.OnClickListener {
private Button mBtnCamera2;
private Button mBtnVideo;
private Button mBtnMTKCamera;
private Button btn_preview;
private Button btn_camera1;
private Camera2Helper mCamera2Helper;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mBtnCamera2 = (Button) findViewById(R.id.btn_camera2);
mBtnVideo = (Button) findViewById(R.id.btn_video);
mBtnMTKCamera = (Button) findViewById(R.id.btn_mtk_camera);
btn_preview = (Button) findViewById(R.id.btn_preview);
btn_camera1 = (Button) findViewById(R.id.btn_camera1);
mBtnCamera2.setOnClickListener(this);
mBtnMTKCamera.setOnClickListener(this);
mBtnVideo.setOnClickListener(this);
btn_preview.setOnClickListener(this);
btn_camera1.setOnClickListener(this);
}
@Override
protected void onResume() {
super.onResume();
}
@Override
protected void onPause() {
super.onPause();
}
@Override
protected void onDestroy() {
super.onDestroy();
}
@Override
public void onClick(View v) {
Intent intent;
switch (v.getId()) {
case R.id.btn_camera2:
intent = new Intent(MainActivity.this, Camera2Activity.class);
startActivity(intent);
break;
case R.id.btn_mtk_camera:
// intent = new Intent(MainActivity.this, MTKCameraActivity.class);
// startActivity(intent);
break;
case R.id.btn_video:
intent = new Intent(MainActivity.this, CameraActivity.class);
startActivity(intent);
break;
case R.id.btn_preview:
intent = new Intent(MainActivity.this, PreviewActivity.class);
startActivity(intent);
break;
case R.id.btn_camera1:
intent = new Intent(MainActivity.this, Camera1.class);
startActivity(intent);
......
package com.example.cameraservicedemo.getpreview;
import android.app.Activity;
import android.content.Context;
import android.util.AttributeSet;
import android.view.TextureView;
import com.example.cameraservicedemo.getpreview.util.Camera2Proxy;
public class Camera2View extends TextureView {
private static final String TAG = "Camera2View";
private Camera2Proxy mCameraProxy;
private int mRatioWidth = 0;
private int mRatioHeight = 0;
public Camera2View(Context context) {
this(context, null);
}
public Camera2View(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public Camera2View(Context context, AttributeSet attrs, int defStyleAttr) {
this(context, attrs, defStyleAttr, 0);
}
public Camera2View(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
init(context);
}
private void init(Context context) {
mCameraProxy = new Camera2Proxy((Activity) context);
}
public void setAspectRatio(int width, int height) {
if (width < 0 || height < 0) {
throw new IllegalArgumentException("Size cannot be negative.");
}
mRatioWidth = width;
mRatioHeight = height;
requestLayout();
}
public Camera2Proxy getCameraProxy() {
return mCameraProxy;
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int width = MeasureSpec.getSize(widthMeasureSpec);
int height = MeasureSpec.getSize(heightMeasureSpec);
if (0 == mRatioWidth || 0 == mRatioHeight) {
setMeasuredDimension(width, height);
} else {
if (width < height * mRatioWidth / mRatioHeight) {
setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
} else {
setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
}
}
}
}
package com.example.cameraservicedemo.getpreview;
import android.app.Fragment;
import android.graphics.SurfaceTexture;
import android.media.Image;
import android.media.ImageReader;
import android.os.Bundle;
import android.util.Size;
import android.view.LayoutInflater;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import com.example.cameraservicedemo.R;
import com.example.cameraservicedemo.getpreview.util.Camera2Proxy;
import java.nio.ByteBuffer;
public class CameraFragment extends Fragment implements View.OnClickListener {
private static final String TAG = "CameraFragment";
private ImageView mCloseIv;
private ImageView mSwitchCameraIv;
private ImageView mTakePictureIv;
private Camera2View mCameraView;
private Camera2Proxy mCameraProxy;
private final TextureView.SurfaceTextureListener mSurfaceTextureListener
= new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) {
mCameraProxy.openCamera();
mCameraProxy.setPreviewSurface(texture);
// 根据相机预览设置View大小,避免显示变形
Size previewSize = mCameraProxy.getPreviewSize();
mCameraView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth());
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) {
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture texture) {
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture texture) {
}
};
@Override
public View onCreateView( LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_camera, null);
initView(rootView);
return rootView;
}
private void initView(View rootView) {
mCloseIv = (ImageView) rootView.findViewById(R.id.toolbar_close_iv);
mSwitchCameraIv = (ImageView) rootView.findViewById(R.id.toolbar_switch_iv);
mTakePictureIv = (ImageView) rootView.findViewById(R.id.take_picture_iv);
mCameraView = (Camera2View) rootView.findViewById(R.id.camera_view);
mCameraProxy = mCameraView.getCameraProxy();
mCloseIv.setOnClickListener(this);
mSwitchCameraIv.setOnClickListener(this);
mTakePictureIv.setOnClickListener(this);
mCameraProxy.setImageAvailableListener(mOnImageAvailableListener);
}
@Override
public void onResume() {
super.onResume();
if (mCameraView.isAvailable()) {
mCameraProxy.openCamera();
} else {
mCameraView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
@Override
public void onPause() {
super.onPause();
mCameraProxy.releaseCamera();
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.toolbar_close_iv:
getActivity().finish();
break;
case R.id.toolbar_switch_iv:
mCameraProxy.switchCamera();
break;
case R.id.take_picture_iv:
mIsShutter = true;
break;
}
}
private byte[] mYuvBytes;
private boolean mIsShutter;
private ImageReader.OnImageAvailableListener mOnImageAvailableListener
= new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireLatestImage();
if (image == null) {
return;
}
int width = mCameraProxy.getPreviewSize().getWidth();
int height = mCameraProxy.getPreviewSize().getHeight();
if (mYuvBytes == null) {
// YUV420 大小总是 width * height * 3 / 2
mYuvBytes = new byte[width * height * 3 / 2];
}
// YUV_420_888
Image.Plane[] planes = image.getPlanes();
// Y通道,对应planes[0]
// Y size = width * height
// yBuffer.remaining() = width * height;
// pixelStride = 1
ByteBuffer yBuffer = planes[0].getBuffer();
int yLen = width * height;
yBuffer.get(mYuvBytes, 0, yLen);
// U通道,对应planes[1]
// U size = width * height / 4;
// uBuffer.remaining() = width * height / 2;
// pixelStride = 2
ByteBuffer uBuffer = planes[1].getBuffer();
int pixelStride = planes[1].getPixelStride(); // pixelStride = 2
for (int i = 0; i < uBuffer.remaining(); i+=pixelStride) {
mYuvBytes[yLen++] = uBuffer.get(i);
}
// V通道,对应planes[2]
// V size = width * height / 4;
// vBuffer.remaining() = width * height / 2;
// pixelStride = 2
ByteBuffer vBuffer = planes[2].getBuffer();
pixelStride = planes[2].getPixelStride(); // pixelStride = 2
for (int i = 0; i < vBuffer.remaining(); i+=pixelStride) {
mYuvBytes[yLen++] = vBuffer.get(i);
}
if (mIsShutter) {
mIsShutter = false;
// save yuv data
// String yuvPath = FileUtil.SAVE_DIR + System.currentTimeMillis() + ".yuv";
// FileUtil.saveBytes(mYuvBytes, yuvPath);
// save bitmap data
// String jpgPath = yuvPath.replace(".yuv", ".jpg");
// Bitmap bitmap = ColorConvertUtil.yuv420pToBitmap(mYuvBytes, width, height);
// FileUtil.saveBitmap(bitmap, jpgPath);
}
// 一定不能忘记close
image.close();
}
};
}
package com.example.cameraservicedemo.getpreview;
import android.app.Activity;
import android.os.Bundle;
import com.example.cameraservicedemo.R;
import com.example.cameraservicedemo.video.Camera2VideoFragment;
public class PreviewActivity extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera);
if (null == savedInstanceState) {
}
}
@Override
protected void onResume() {
super.onResume();
getFragmentManager().beginTransaction()
.replace(R.id.container, new CameraFragment())
.commit();
}
}
package com.example.cameraservicedemo.getpreview.util;
import android.app.Activity;
import android.content.Context;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.media.ImageReader;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
import java.util.Arrays;
public class Camera2Proxy {
private static final String TAG = "Camera2Proxy";
private Activity mActivity;
// camera
private int mCameraId = CameraCharacteristics.LENS_FACING_FRONT; // 要打开的摄像头ID
private Size mPreviewSize = new Size(640, 480); // 固定640*480演示
private CameraDevice mCameraDevice; // 相机对象
private CameraCaptureSession mCaptureSession;
// handler
private Handler mBackgroundHandler;
private HandlerThread mBackgroundThread;
// output
private Surface mPreviewSurface; // 输出到屏幕的预览
private ImageReader mImageReader; // 预览回调的接收者
private ImageReader.OnImageAvailableListener mOnImageAvailableListener;
/**
* 打开摄像头的回调
*/
private CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened( CameraDevice camera) {
Log.d(TAG, "onOpened");
mCameraDevice = camera;
initPreviewRequest();
}
@Override
public void onDisconnected( CameraDevice camera) {
Log.d(TAG, "onDisconnected");
releaseCamera();
}
@Override
public void onError( CameraDevice camera, int error) {
Log.e(TAG, "Camera Open failed, error: " + error);
releaseCamera();
}
};
public Camera2Proxy(Activity activity) {
mActivity = activity;
}
public void openCamera() {
Log.v(TAG, "openCamera");
startBackgroundThread(); // 对应 releaseCamera() 方法中的 stopBackgroundThread()
try {
CameraManager cameraManager = (CameraManager) mActivity.getSystemService(Context.CAMERA_SERVICE);
Log.d(TAG, "preview size: " + mPreviewSize.getWidth() + "*" + mPreviewSize.getHeight());
mImageReader = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(),
ImageFormat.YUV_420_888, 2);
mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, null);
// 打开摄像头
cameraManager.openCamera(Integer.toString(mCameraId), mStateCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
public void releaseCamera() {
Log.v(TAG, "releaseCamera");
if (mImageReader != null) {
mImageReader.close();
mImageReader = null;
}
if (mCaptureSession != null) {
mCaptureSession.close();
mCaptureSession = null;
}
if (mCameraDevice != null) {
mCameraDevice.close();
mCameraDevice = null;
}
stopBackgroundThread(); // 对应 openCamera() 方法中的 startBackgroundThread()
}
public void setImageAvailableListener(ImageReader.OnImageAvailableListener onImageAvailableListener) {
mOnImageAvailableListener = onImageAvailableListener;
}
public void setPreviewSurface(SurfaceTexture surfaceTexture) {
// mPreviewSize必须先初始化完成
surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPreviewSurface = new Surface(surfaceTexture);
}
private void initPreviewRequest() {
try {
final CaptureRequest.Builder builder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
// 添加输出到屏幕的surface
builder.addTarget(mPreviewSurface);
// 添加输出到ImageReader的surface。然后我们就可以从ImageReader中获取预览数据了
builder.addTarget(mImageReader.getSurface());
mCameraDevice.createCaptureSession(Arrays.asList(mPreviewSurface, mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured( CameraCaptureSession session) {
mCaptureSession = session;
// 设置连续自动对焦和自动曝光
builder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
builder.set(CaptureRequest.CONTROL_AE_MODE,
CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
CaptureRequest captureRequest = builder.build();
try {
// 一直发送预览请求
mCaptureSession.setRepeatingRequest(captureRequest, null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed( CameraCaptureSession session) {
Log.e(TAG, "ConfigureFailed. session: mCaptureSession");
}
}, mBackgroundHandler); // handle 传入 null 表示使用当前线程的 Looper
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
public Size getPreviewSize() {
return mPreviewSize;
}
public void switchCamera() {
mCameraId ^= 1;
Log.d(TAG, "switchCamera: mCameraId: " + mCameraId);
releaseCamera();
openCamera();
}
private void startBackgroundThread() {
if (mBackgroundThread == null || mBackgroundHandler == null) {
Log.v(TAG, "startBackgroundThread");
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
}
private void stopBackgroundThread() {
Log.v(TAG, "stopBackgroundThread");
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
package com.example.cameraservicedemo.getpreview.util;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.util.Log;
import java.nio.ByteBuffer;
public class ColorConvertUtil {
private static final String TAG = "ColorConvertUtil";
public static Bitmap yuv420pToBitmap(byte[] yuv420p, int width, int height) {
if (yuv420p == null || width < 0 || height < 0) {
Log.e(TAG, "cropNv21ToBitmap failed: illegal para !");
return null;
}
byte[] rgba = new byte[width * height * 4];
ColorConvertUtil.yuv420pToRGBA(yuv420p, width, height, rgba);
Bitmap bitmap = byteArrayToBitmap(rgba, width, height);
return bitmap;
}
public static void yuv420pToRGBA(byte[] yuv420p, int width, int height, byte[] rgba) {
if (yuv420p == null || rgba == null) {
Log.e(TAG, "yuv420pToRGBA failed: yuv420p or rgba is null ");
return;
}
if (yuv420p.length != width * height * 3 / 2) {
Log.e(TAG, "yuv420p length: " + yuv420p.length);
Log.e(TAG, "yuv420pToRGBA failed: yuv420p length error!");
return;
}
// NativeLibrary.yuv420p2rgba(yuv420p, width, height, rgba);
}
/**
* 将 rgba 的 byte[] 数据转换成 bitmap
*
* @param rgba 输入的 rgba 数据
* @param width 图片宽度
* @param height 图片高度
* @return 得到的 bitmap
*/
public static Bitmap byteArrayToBitmap(byte[] rgba, int width, int height) {
ByteBuffer buffer = ByteBuffer.wrap(rgba);
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(buffer);
return bitmap;
}
public static Bitmap rotateBitmap(Bitmap bitmap, int rotate, boolean mirrorX) {
Matrix matrix = new Matrix();
matrix.postRotate(rotate);
if (mirrorX) {
matrix.postScale(-1f, 1f);
}
Bitmap rotateBitmap = null;
if (bitmap != null) {
rotateBitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, false);
bitmap.recycle(); // 回收旧Bitmap
}
return rotateBitmap;
}
}
package com.example.cameraservicedemo.getpreview.util;
import android.graphics.Bitmap;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
public class FileUtil {
public static final String SAVE_DIR = "/sdcard/DCIM/Camera2GetPreview/";
public static boolean saveBytes(byte[] bytes, String imagePath) {
File file = new File(imagePath);
File parentFile = file.getParentFile();
if (!parentFile.exists()) {
parentFile.mkdirs();
}
try {
FileOutputStream fos = new FileOutputStream(file);
fos.write(bytes);
fos.flush();
fos.close();
return true;
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return false;
}
public static boolean saveBitmap(Bitmap bitmap, String imagePath) {
if (bitmap == null) {
return false;
}
File file = new File(imagePath);
File parentFile = file.getParentFile();
if (!parentFile.exists()) {
parentFile.mkdirs();
}
try {
FileOutputStream fos = new FileOutputStream(file);
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, fos);
fos.flush();
fos.close();
return true;
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return false;
}
}
package com.example.cameraservicedemo.getpreview.util;
public class NativeLibrary {
// static {
// System.loadLibrary("native-lib");
// }
// public static native void yuv420p2rgba(byte[] yuv420p,
// int width,
// int height,
// byte[] rgba);
}
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.cameraservicedemo.video;
import android.content.Context;
import android.util.AttributeSet;
import android.view.TextureView;
/**
* A {@link TextureView} that can be adjusted to a specified aspect ratio.
*/
public class AutoFitTextureView extends TextureView {
private int mRatioWidth = 0;
private int mRatioHeight = 0;
public AutoFitTextureView(Context context) {
this(context, null);
}
public AutoFitTextureView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public AutoFitTextureView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
}
/**
* Sets the aspect ratio for this view. The size of the view will be measured based on the ratio
* calculated from the parameters. Note that the actual sizes of parameters don't matter, that
* is, calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result.
*
* @param width Relative horizontal size
* @param height Relative vertical size
*/
public void setAspectRatio(int width, int height) {
if (width < 0 || height < 0) {
throw new IllegalArgumentException("Size cannot be negative.");
}
mRatioWidth = width;
mRatioHeight = height;
requestLayout();
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int width = MeasureSpec.getSize(widthMeasureSpec);
int height = MeasureSpec.getSize(heightMeasureSpec);
if (0 == mRatioWidth || 0 == mRatioHeight) {
setMeasuredDimension(width, height);
} else {
if (width < height * mRatioWidth / mRatioHeight) {
setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
} else {
setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
}
}
}
}
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.cameraservicedemo.video;
import android.Manifest;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.app.DialogFragment;
import android.app.Fragment;
import android.content.Context;
import android.content.DialogInterface;
import android.content.pm.PackageManager;
import android.content.res.Configuration;
import android.graphics.Matrix;
import android.graphics.RectF;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.LayoutInflater;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.Toast;
import com.example.cameraservicedemo.R;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
public class Camera2VideoFragment extends Fragment
implements View.OnClickListener {
private static final int SENSOR_ORIENTATION_DEFAULT_DEGREES = 90;
private static final int SENSOR_ORIENTATION_INVERSE_DEGREES = 270;
private static final SparseIntArray DEFAULT_ORIENTATIONS = new SparseIntArray();
private static final SparseIntArray INVERSE_ORIENTATIONS = new SparseIntArray();
private static final String TAG = "Camera2VideoFragment";
private static final int REQUEST_VIDEO_PERMISSIONS = 1;
private static final String FRAGMENT_DIALOG = "dialog";
private static final String[] VIDEO_PERMISSIONS = {
Manifest.permission.CAMERA,
Manifest.permission.RECORD_AUDIO,
};
static {
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_0, 90);
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_90, 0);
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_180, 270);
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
static {
INVERSE_ORIENTATIONS.append(Surface.ROTATION_0, 270);
INVERSE_ORIENTATIONS.append(Surface.ROTATION_90, 180);
INVERSE_ORIENTATIONS.append(Surface.ROTATION_180, 90);
INVERSE_ORIENTATIONS.append(Surface.ROTATION_270, 0);
}
/**
* An {@link AutoFitTextureView} for camera preview.
*/
private AutoFitTextureView mTextureView;
/**
* Button to record video
*/
private Button mButtonVideo;
/**
* A reference to the opened {@link android.hardware.camera2.CameraDevice}.
*/
private CameraDevice mCameraDevice;
/**
* A reference to the current {@link android.hardware.camera2.CameraCaptureSession} for
* preview.
*/
private CameraCaptureSession mPreviewSession;
/**
* {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a
* {@link TextureView}.
*/
private TextureView.SurfaceTextureListener mSurfaceTextureListener
= new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture,
int width, int height) {
openCamera(width, height);
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture,
int width, int height) {
configureTransform(width, height);
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
};
/**
* The {@link android.util.Size} of camera preview.
*/
private Size mPreviewSize;
/**
* The {@link android.util.Size} of video recording.
*/
private Size mVideoSize;
/**
* MediaRecorder
*/
private MediaRecorder mMediaRecorder;
/**
* Whether the app is recording video now
*/
private boolean mIsRecordingVideo;
/**
* An additional thread for running tasks that shouldn't block the UI.
*/
private HandlerThread mBackgroundThread;
/**
* A {@link Handler} for running tasks in the background.
*/
private Handler mBackgroundHandler;
/**
* A {@link Semaphore} to prevent the app from exiting before closing the camera.
*/
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
/**
* {@link CameraDevice.StateCallback} is called when {@link CameraDevice} changes its status.
*/
private CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened( CameraDevice cameraDevice) {
mCameraDevice = cameraDevice;
startPreview();
mCameraOpenCloseLock.release();
if (null != mTextureView) {
configureTransform(mTextureView.getWidth(), mTextureView.getHeight());
}
}
@Override
public void onDisconnected( CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
@Override
public void onError( CameraDevice cameraDevice, int error) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
Activity activity = getActivity();
if (null != activity) {
activity.finish();
}
}
};
private Integer mSensorOrientation;
private String mNextVideoAbsolutePath;
private CaptureRequest.Builder mPreviewBuilder;
public static Camera2VideoFragment newInstance() {
return new Camera2VideoFragment();
}
/**
* In this sample, we choose a video size with 3x4 aspect ratio. Also, we don't use sizes
* larger than 1080p, since MediaRecorder cannot handle such a high-resolution video.
*
* @param choices The list of available sizes
* @return The video size
*/
private static Size chooseVideoSize(Size[] choices) {
for (Size size : choices) {
if (size.getWidth() == size.getHeight() * 4 / 3 && size.getWidth() <= 1080) {
return size;
}
}
Log.e(TAG, "Couldn't find any suitable video size");
return choices[choices.length - 1];
}
/**
* Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose
* width and height are at least as large as the respective requested values, and whose aspect
* ratio matches with the specified value.
*
* @param choices The list of sizes that the camera supports for the intended output class
* @param width The minimum desired width
* @param height The minimum desired height
* @param aspectRatio The aspect ratio
* @return The optimal {@code Size}, or an arbitrary one if none were big enough
*/
private static Size chooseOptimalSize(Size[] choices, int width, int height, Size aspectRatio) {
// Collect the supported resolutions that are at least as big as the preview Surface
List<Size> bigEnough = new ArrayList<>();
int w = aspectRatio.getWidth();
int h = aspectRatio.getHeight();
for (Size option : choices) {
if (option.getHeight() == option.getWidth() * h / w &&
option.getWidth() >= width && option.getHeight() >= height) {
bigEnough.add(option);
}
}
// Pick the smallest of those, assuming we found any
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizesByArea());
} else {
Log.e(TAG, "Couldn't find any suitable preview size");
return choices[0];
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
return inflater.inflate(R.layout.fragment_camera2_video, container, false);
}
@Override
public void onViewCreated(final View view, Bundle savedInstanceState) {
mTextureView = (AutoFitTextureView) view.findViewById(R.id.texture);
mButtonVideo = (Button) view.findViewById(R.id.video);
mButtonVideo.setOnClickListener(this);
view.findViewById(R.id.info).setOnClickListener(this);
}
@Override
public void onResume() {
super.onResume();
startBackgroundThread();
if (mTextureView.isAvailable()) {
openCamera(mTextureView.getWidth(), mTextureView.getHeight());
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
@Override
public void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
@Override
public void onClick(View view) {
switch (view.getId()) {
case R.id.video: {
if (mIsRecordingVideo) {
stopRecordingVideo();
} else {
startRecordingVideo();
}
break;
}
case R.id.info: {
Activity activity = getActivity();
if (null != activity) {
new AlertDialog.Builder(activity)
.setMessage(R.string.intro_message)
.setPositiveButton(android.R.string.ok, null)
.show();
}
break;
}
}
}
/**
* Starts a background thread and its {@link Handler}.
*/
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
/**
* Stops the background thread and its {@link Handler}.
*/
private void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
/**
* Gets whether you should show UI with rationale for requesting permissions.
*
* @param permissions The permissions your app wants to request.
* @return Whether you can show permission rationale UI.
*/
// private boolean shouldShowRequestPermissionRationale(String[] permissions) {
// for (String permission : permissions) {
// if (FragmentCompat.shouldShowRequestPermissionRationale(this, permission)) {
// return true;
// }
// }
// return false;
// }
/**
* Requests permissions needed for recording video.
*/
// private void requestVideoPermissions() {
// if (shouldShowRequestPermissionRationale(VIDEO_PERMISSIONS)) {
// new ConfirmationDialog().show(getChildFragmentManager(), FRAGMENT_DIALOG);
// } else {
// FragmentCompat.requestPermissions(this, VIDEO_PERMISSIONS, REQUEST_VIDEO_PERMISSIONS);
// }
// }
// @Override
// public void onRequestPermissionsResult(int requestCode, String[] permissions,
// int[] grantResults) {
// Log.d(TAG, "onRequestPermissionsResult");
// if (requestCode == REQUEST_VIDEO_PERMISSIONS) {
// if (grantResults.length == VIDEO_PERMISSIONS.length) {
// for (int result : grantResults) {
// if (result != PackageManager.PERMISSION_GRANTED) {
// ErrorDialog.newInstance(getString(R.string.permission_request))
// .show(getChildFragmentManager(), FRAGMENT_DIALOG);
// break;
// }
// }
// } else {
// ErrorDialog.newInstance(getString(R.string.permission_request))
// .show(getChildFragmentManager(), FRAGMENT_DIALOG);
// }
// } else {
//// super.onRequestPermissionsResult(requestCode, permissions, grantResults);
// }
// }
// private boolean hasPermissionsGranted(String[] permissions) {
// for (String permission : permissions) {
// if (ActivityCompat.checkSelfPermission(getActivity(), permission)
// != PackageManager.PERMISSION_GRANTED) {
// return false;
// }
// }
// return true;
// }
/**
* Tries to open a {@link CameraDevice}. The result is listened by `mStateCallback`.
*/
@SuppressWarnings("MissingPermission")
private void openCamera(int width, int height) {
// if (!hasPermissionsGranted(VIDEO_PERMISSIONS)) {
//// requestVideoPermissions();
// Toast.makeText(getActivity(), "缺少权限", Toast.LENGTH_SHORT).show();
// return;
// }
final Activity activity = getActivity();
if (null == activity || activity.isFinishing()) {
return;
}
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
Log.d(TAG, "tryAcquire");
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
String cameraId = manager.getCameraIdList()[0];
// Choose the sizes for camera preview and video recording
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics
.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
if (map == null) {
throw new RuntimeException("Cannot get available preview/video sizes");
}
mVideoSize = chooseVideoSize(map.getOutputSizes(MediaRecorder.class));
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
width, height, mVideoSize);
int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
configureTransform(width, height);
mMediaRecorder = new MediaRecorder();
manager.openCamera(cameraId, mStateCallback, null);
} catch (CameraAccessException e) {
Toast.makeText(activity, "Cannot access the camera.", Toast.LENGTH_SHORT).show();
activity.finish();
} catch (NullPointerException e) {
// Currently an NPE is thrown when the Camera2API is used but not supported on the
// device this code runs.
ErrorDialog.newInstance(getString(R.string.camera_error))
.show(getChildFragmentManager(), FRAGMENT_DIALOG);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera opening.");
}
}
private void closeCamera() {
try {
mCameraOpenCloseLock.acquire();
closePreviewSession();
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
if (null != mMediaRecorder) {
mMediaRecorder.release();
mMediaRecorder = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.");
} finally {
mCameraOpenCloseLock.release();
}
}
/**
* Start the camera preview.
*/
private void startPreview() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
return;
}
try {
closePreviewSession();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
Surface previewSurface = new Surface(texture);
mPreviewBuilder.addTarget(previewSurface);
mCameraDevice.createCaptureSession(Collections.singletonList(previewSurface),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured( CameraCaptureSession session) {
mPreviewSession = session;
updatePreview();
}
@Override
public void onConfigureFailed( CameraCaptureSession session) {
Activity activity = getActivity();
if (null != activity) {
Toast.makeText(activity, "Failed", Toast.LENGTH_SHORT).show();
}
}
@Override
public void onClosed(CameraCaptureSession session) {
super.onClosed(session);
stopBackgroundThread();
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Update the camera preview. {@link #startPreview()} needs to be called in advance.
*/
private void updatePreview() {
if (null == mCameraDevice) {
return;
}
try {
setUpCaptureRequestBuilder(mPreviewBuilder);
HandlerThread thread = new HandlerThread("CameraPreview");
thread.start();
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void setUpCaptureRequestBuilder(CaptureRequest.Builder builder) {
builder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
}
/**
* Configures the necessary {@link android.graphics.Matrix} transformation to `mTextureView`.
* This method should not to be called until the camera preview size is determined in
* openCamera, or until the size of `mTextureView` is fixed.
*
* @param viewWidth The width of `mTextureView`
* @param viewHeight The height of `mTextureView`
*/
private void configureTransform(int viewWidth, int viewHeight) {
Activity activity = getActivity();
if (null == mTextureView || null == mPreviewSize || null == activity) {
return;
}
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max(
(float) viewHeight / mPreviewSize.getHeight(),
(float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
}
mTextureView.setTransform(matrix);
}
private void setUpMediaRecorder() throws IOException {
final Activity activity = getActivity();
if (null == activity) {
return;
}
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
if (mNextVideoAbsolutePath == null || mNextVideoAbsolutePath.isEmpty()) {
mNextVideoAbsolutePath = getVideoFilePath(getActivity());
}
mMediaRecorder.setOutputFile(mNextVideoAbsolutePath);
mMediaRecorder.setVideoEncodingBitRate(10000000);
mMediaRecorder.setVideoFrameRate(30);
mMediaRecorder.setVideoSize(mVideoSize.getWidth(), mVideoSize.getHeight());
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
switch (mSensorOrientation) {
case SENSOR_ORIENTATION_DEFAULT_DEGREES:
mMediaRecorder.setOrientationHint(DEFAULT_ORIENTATIONS.get(rotation));
break;
case SENSOR_ORIENTATION_INVERSE_DEGREES:
mMediaRecorder.setOrientationHint(INVERSE_ORIENTATIONS.get(rotation));
break;
}
mMediaRecorder.prepare();
}
private String getVideoFilePath(Context context) {
final File dir = context.getExternalFilesDir(null);
return (dir == null ? "" : (dir.getAbsolutePath() + "/"))
+ System.currentTimeMillis() + ".mp4";
}
private void startRecordingVideo() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
return;
}
try {
closePreviewSession();
setUpMediaRecorder();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
List<Surface> surfaces = new ArrayList<>();
// Set up Surface for the camera preview
Surface previewSurface = new Surface(texture);
surfaces.add(previewSurface);
mPreviewBuilder.addTarget(previewSurface);
// Set up Surface for the MediaRecorder
Surface recorderSurface = mMediaRecorder.getSurface();
surfaces.add(recorderSurface);
mPreviewBuilder.addTarget(recorderSurface);
// Start a capture session
// Once the session starts, we can update the UI and start recording
mCameraDevice.createCaptureSession(surfaces, new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured( CameraCaptureSession cameraCaptureSession) {
mPreviewSession = cameraCaptureSession;
updatePreview();
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
// UI
mButtonVideo.setText(R.string.stop);
mIsRecordingVideo = true;
// Start recording
mMediaRecorder.start();
}
});
}
@Override
public void onConfigureFailed( CameraCaptureSession cameraCaptureSession) {
Activity activity = getActivity();
if (null != activity) {
Toast.makeText(activity, "Failed", Toast.LENGTH_SHORT).show();
}
}
}, mBackgroundHandler);
} catch (CameraAccessException | IOException e) {
e.printStackTrace();
}
}
private void closePreviewSession() {
if (mPreviewSession != null) {
mPreviewSession.close();
mPreviewSession = null;
}
}
private void stopRecordingVideo() {
// UI
mIsRecordingVideo = false;
mButtonVideo.setText(R.string.record);
// Stop recording
mMediaRecorder.stop();
mMediaRecorder.reset();
Activity activity = getActivity();
if (null != activity) {
Toast.makeText(activity, "Video saved: " + mNextVideoAbsolutePath,
Toast.LENGTH_SHORT).show();
Log.d(TAG, "Video saved: " + mNextVideoAbsolutePath);
}
mNextVideoAbsolutePath = null;
startPreview();
}
/**
* Compares two {@code Size}s based on their areas.
*/
static class CompareSizesByArea implements Comparator<Size> {
@Override
public int compare(Size lhs, Size rhs) {
// We cast here to ensure the multiplications won't overflow
return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
(long) rhs.getWidth() * rhs.getHeight());
}
}
public static class ErrorDialog extends DialogFragment {
private static final String ARG_MESSAGE = "message";
public static ErrorDialog newInstance(String message) {
ErrorDialog dialog = new ErrorDialog();
Bundle args = new Bundle();
args.putString(ARG_MESSAGE, message);
dialog.setArguments(args);
return dialog;
}
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Activity activity = getActivity();
return new AlertDialog.Builder(activity)
.setMessage(getArguments().getString(ARG_MESSAGE))
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
activity.finish();
}
})
.create();
}
}
// public static class ConfirmationDialog extends DialogFragment {
//
// @Override
// public Dialog onCreateDialog(Bundle savedInstanceState) {
// final Fragment parent = getParentFragment();
// return new AlertDialog.Builder(getActivity())
// .setMessage(R.string.permission_request)
// .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
// @Override
// public void onClick(DialogInterface dialog, int which) {
// FragmentCompat.requestPermissions(parent, VIDEO_PERMISSIONS,
// REQUEST_VIDEO_PERMISSIONS);
// }
// })
// .setNegativeButton(android.R.string.cancel,
// new DialogInterface.OnClickListener() {
// @Override
// public void onClick(DialogInterface dialog, int which) {
// parent.getActivity().finish();
// }
// })
// .create();
// }
//
// }
}
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.cameraservicedemo.video;
import android.app.Activity;
import android.os.Bundle;
import com.example.cameraservicedemo.R;
public class CameraActivity extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera);
if (null == savedInstanceState) {
getFragmentManager().beginTransaction()
.replace(R.id.container, Camera2VideoFragment.newInstance())
.commit();
}
}
}
<?xml version="1.0" encoding="utf-8"?><!--
Copyright 2014 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:id="@+id/container"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="#000" />
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical" android:layout_width="match_parent"
android:layout_height="match_parent">
<TextureView
android:id="@+id/texture_view"
android:layout_width="match_parent"
android:layout_height="match_parent" />
<LinearLayout
android:layout_width="match_parent"
android:layout_height="match_parent">
<Button
android:id="@+id/btn_open_camera"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:padding="10dp"
android:text="打开摄像头"/>
<Button
android:id="@+id/btn_close_camera"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:padding="10dp"
android:text="关闭摄像头"/>
</LinearLayout>
</FrameLayout>
\ No newline at end of file
......@@ -3,14 +3,6 @@
android:orientation="vertical" android:layout_width="match_parent"
android:layout_height="match_parent"
>
<Button
android:id="@+id/btn_camera2"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:padding="20dp"
android:text="Camera2"
/>
<Button
android:id="@+id/btn_camera1"
android:layout_width="wrap_content"
......@@ -18,26 +10,4 @@
android:padding="20dp"
android:text="camera1"
/>
<Button
android:id="@+id/btn_preview"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:padding="20dp"
android:text="btn_preview"
/>
<Button
android:id="@+id/btn_video"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:padding="20dp"
android:text="video"
/>
<Button
android:id="@+id/btn_mtk_camera"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:padding="20dp"
android:text="MTK Camera"
/>
</LinearLayout>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical" android:layout_width="match_parent"
android:layout_height="match_parent">
<FrameLayout
android:id="@+id/big_layout"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<SurfaceView
android:id="@+id/sv_mkt"
android:layout_width="match_parent"
android:layout_height="match_parent" />
</FrameLayout>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="match_parent">
<Button
android:id="@+id/btn_open_camera"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:padding="10dp"
android:text="打开摄像头"/>
<Button
android:id="@+id/btn_close_camera"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:padding="10dp"
android:text="关闭摄像头"/>
</LinearLayout>
</FrameLayout>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@color/colorPrimary">
<!-- <android.support.v7.widget.Toolbar-->
<!-- android:id="@+id/toolbar"-->
<!-- android:layout_width="match_parent"-->
<!-- android:layout_height="?android:actionBarSize"-->
<!-- android:background="@color/colorPrimary"-->
<!-- app:contentInsetStart="0dp">-->
<LinearLayout
android:id="@+id/toolbar"
android:layout_width="match_parent"
android:layout_height="?android:actionBarSize"
android:orientation="horizontal"
android:padding="10dp">
<ImageView
android:id="@+id/toolbar_close_iv"
android:layout_width="45dp"
android:layout_height="45dp"
android:layout_gravity="center_vertical"
android:padding="10dp"
android:src="@mipmap/ic_close"/>
<Space
android:layout_width="0dp"
android:layout_height="0dp"
android:layout_weight="1"/>
<ImageView
android:id="@+id/toolbar_switch_iv"
android:layout_width="45dp"
android:layout_height="45dp"
android:layout_gravity="center_vertical"
android:padding="10dp"
android:src="@mipmap/ic_camera_switch"/>
</LinearLayout>
<!-- </android.support.v7.widget.Toolbar>-->
<com.example.cameraservicedemo.getpreview.Camera2View
android:id="@+id/camera_view"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_below="@+id/toolbar">
</com.example.cameraservicedemo.getpreview.Camera2View>
<ImageView
android:id="@+id/take_picture_iv"
android:layout_width="60dp"
android:layout_height="60dp"
android:layout_alignParentBottom="true"
android:layout_centerHorizontal="true"
android:layout_marginBottom="20dp"
android:src="@mipmap/ic_launcher"/>
</RelativeLayout>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?><!--
Copyright 2014 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<com.example.cameraservicedemo.video.AutoFitTextureView
android:id="@+id/texture"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentStart="true"
android:layout_alignParentTop="true" />
<FrameLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true"
android:layout_alignParentStart="true"
android:layout_below="@id/texture"
android:background="#4285f4">
<Button
android:id="@+id/video"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:text="@string/record" />
<ImageButton
android:id="@+id/info"
android:contentDescription="@string/description_info"
style="@android:style/Widget.Material.Light.Button.Borderless"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center_vertical|right"
android:padding="20dp"
android:src="@mipmap/ic_launcher" />
</FrameLayout>
</RelativeLayout>
重要的类只有一个:
com.autonavi.amapauto.gdarcameraservicedemo.GDArCameraService.java
AIDL服务端源码在目录:
com.autonavi.amapauto.gdarcameraservicedemo
重要的类是:
com.autonavi.amapauto.gdarcameraservicedemo.GDArCameraService.java
其他类都只是测试API
\ No newline at end of file
目录:com.example.cameraservicedemo 下面的只是测试demo
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment