Commit fdf568cc by maiqh

init

parents
*.iml
.gradle
/local.properties
/.idea/caches
/.idea/libraries
/.idea/modules.xml
/.idea/workspace.xml
/.idea/navEditor.xml
/.idea/assetWizardSettings.xml
.DS_Store
/build
/captures
.externalNativeBuild
.cxx
<component name="ProjectCodeStyleConfiguration">
<code_scheme name="Project" version="173">
<codeStyleSettings language="XML">
<indentOptions>
<option name="CONTINUATION_INDENT_SIZE" value="4" />
</indentOptions>
<arrangement>
<rules>
<section>
<rule>
<match>
<AND>
<NAME>xmlns:android</NAME>
<XML_ATTRIBUTE />
<XML_NAMESPACE>^$</XML_NAMESPACE>
</AND>
</match>
</rule>
</section>
<section>
<rule>
<match>
<AND>
<NAME>xmlns:.*</NAME>
<XML_ATTRIBUTE />
<XML_NAMESPACE>^$</XML_NAMESPACE>
</AND>
</match>
<order>BY_NAME</order>
</rule>
</section>
<section>
<rule>
<match>
<AND>
<NAME>.*:id</NAME>
<XML_ATTRIBUTE />
<XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
</AND>
</match>
</rule>
</section>
<section>
<rule>
<match>
<AND>
<NAME>.*:name</NAME>
<XML_ATTRIBUTE />
<XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
</AND>
</match>
</rule>
</section>
<section>
<rule>
<match>
<AND>
<NAME>name</NAME>
<XML_ATTRIBUTE />
<XML_NAMESPACE>^$</XML_NAMESPACE>
</AND>
</match>
</rule>
</section>
<section>
<rule>
<match>
<AND>
<NAME>style</NAME>
<XML_ATTRIBUTE />
<XML_NAMESPACE>^$</XML_NAMESPACE>
</AND>
</match>
</rule>
</section>
<section>
<rule>
<match>
<AND>
<NAME>.*</NAME>
<XML_ATTRIBUTE />
<XML_NAMESPACE>^$</XML_NAMESPACE>
</AND>
</match>
<order>BY_NAME</order>
</rule>
</section>
<section>
<rule>
<match>
<AND>
<NAME>.*</NAME>
<XML_ATTRIBUTE />
<XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
</AND>
</match>
<order>ANDROID_ATTRIBUTE_ORDER</order>
</rule>
</section>
<section>
<rule>
<match>
<AND>
<NAME>.*</NAME>
<XML_ATTRIBUTE />
<XML_NAMESPACE>.*</XML_NAMESPACE>
</AND>
</match>
<order>BY_NAME</order>
</rule>
</section>
</rules>
</arrangement>
</codeStyleSettings>
</code_scheme>
</component>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="GradleMigrationSettings" migrationVersion="1" />
<component name="GradleSettings">
<option name="linkedExternalProjectsSettings">
<GradleProjectSettings>
<option name="testRunner" value="PLATFORM" />
<option name="distributionType" value="DEFAULT_WRAPPED" />
<option name="externalProjectPath" value="$PROJECT_DIR$" />
<option name="gradleJvm" value="1.8" />
<option name="modules">
<set>
<option value="$PROJECT_DIR$" />
<option value="$PROJECT_DIR$/app" />
</set>
</option>
<option name="resolveModulePerSourceSet" value="false" />
</GradleProjectSettings>
</option>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="RemoteRepositoriesConfiguration">
<remote-repository>
<option name="id" value="central" />
<option name="name" value="Maven Central repository" />
<option name="url" value="https://repo1.maven.org/maven2" />
</remote-repository>
<remote-repository>
<option name="id" value="jboss.community" />
<option name="name" value="JBoss Community repository" />
<option name="url" value="https://repository.jboss.org/nexus/content/repositories/public/" />
</remote-repository>
<remote-repository>
<option name="id" value="BintrayJCenter" />
<option name="name" value="BintrayJCenter" />
<option name="url" value="https://jcenter.bintray.com/" />
</remote-repository>
<remote-repository>
<option name="id" value="Google" />
<option name="name" value="Google" />
<option name="url" value="https://dl.google.com/dl/android/maven2/" />
</remote-repository>
<remote-repository>
<option name="id" value="D:\SDK\extras\m2repository" />
<option name="name" value="D:\SDK\extras\m2repository" />
<option name="url" value="file:/D:/SDK/extras/m2repository" />
</remote-repository>
<remote-repository>
<option name="id" value="D:\SDK\extras\google\m2repository" />
<option name="name" value="D:\SDK\extras\google\m2repository" />
<option name="url" value="file:/D:/SDK/extras/google/m2repository" />
</remote-repository>
<remote-repository>
<option name="id" value="D:\SDK\extras\android\m2repository" />
<option name="name" value="D:\SDK\extras\android\m2repository" />
<option name="url" value="file:/D:/SDK/extras/android/m2repository/" />
</remote-repository>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" languageLevel="JDK_1_7" project-jdk-name="1.8" project-jdk-type="JavaSDK">
<output url="file://$PROJECT_DIR$/build/classes" />
</component>
<component name="ProjectType">
<option name="id" value="Android" />
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="RunConfigurationProducerService">
<option name="ignoredProducers">
<set>
<option value="org.jetbrains.plugins.gradle.execution.test.runner.AllInPackageGradleConfigurationProducer" />
<option value="org.jetbrains.plugins.gradle.execution.test.runner.TestClassGradleConfigurationProducer" />
<option value="org.jetbrains.plugins.gradle.execution.test.runner.TestMethodGradleConfigurationProducer" />
</set>
</option>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$/.." vcs="Git" />
</component>
</project>
\ No newline at end of file
/build
\ No newline at end of file
apply plugin: 'com.android.application'
android {
compileSdkVersion 22
defaultConfig {
applicationId "com.cneeds.arcamera"
minSdkVersion 21
targetSdkVersion 22
versionCode 1
versionName "1.0"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
}
dependencies {
//implementation files('libs\\mediatek-framework.jar')
// implementation files('libs\\framework.jar')
}
\ No newline at end of file
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.example.cameraservicedemo">
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-feature android:name="android.hardware.camera" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.ACCESS_FINE_LOCATION" />
...
<!-- Needed only if your app targets Android 5.0 (API level 21) or higher. -->
<uses-feature android:name="android.hardware.location.gps" />
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:supportsRtl="true"
android:theme="@style/AppTheme">
<meta-data
android:name="cneeds_launcher_item_view"
android:value="com.example.cameraservicedemo.view.ItemView" />
<meta-data
android:name="cneeds_launcher_divider_view"
android:value="com.example.cameraservicedemo.view.ItemView" />
<meta-data
android:name="cneeds_launcher_item_view_order_id"
android:resource="@integer/launcher_index" />
<activity android:name=".MainActivity">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<activity android:name=".Camera2Activity"/>
<!-- <activity android:name=".mtk.MTKCameraActivity"/>-->
<activity android:name=".video.CameraActivity"/>
<activity android:name=".getpreview.PreviewActivity"/>
<activity android:name=".camera1.Camera1"/>
<service android:name="com.autonavi.amapauto.gdarcameraservicedemo.GDArCameraService"
android:exported="true">
<intent-filter>
<action android:name="com.cneeds.arcamera.action.service" />
</intent-filter>
</service>
<receiver android:name="com.autonavi.amapauto.ArCameraIdBroadcast" android:exported="true">
<intent-filter>
<action android:name="AUTONAVI_STANDARD_BROADCAST_SEND"/>
</intent-filter>
</receiver>
</application>
</manifest>
\ No newline at end of file
// IGDCameraService.aidl
package com.autonavi.amapauto.gdarcameraservice;
import com.autonavi.amapauto.gdarcameraservice.IGDCameraStateCallBack;
import android.view.Surface;
import com.autonavi.amapauto.gdarcameraservice.IGDSize;
import com.autonavi.amapauto.gdarcameraservice.model.GDArCameraParam;
/**
* 高德AR导航AIDL标准协议接口
*
* 摄像头服务接口
*/
interface IGDCameraService {
/**
* 注册摄像头状态监听
* @param clientId 客户端ID
* @param gdCameraStateCallBack 状态回调接口对象
*
* @return true:成功, false:失败
*/
boolean registerCameraStateCallback(String clientId,IGDCameraStateCallBack gdCameraStateCallBack);
/**
* 注销摄像头状态监听
* @param clientId 客户端ID
* @param gdCameraStateCallBack 状态回调接口对象
*
* @return true:成功, false:失败
*/
boolean unregisterCameraStateCallback(String clientId,IGDCameraStateCallBack gdCameraStateCallBack);
/**
* 设备是否支持AR导航
* @param clientId 客户端ID
*
* @return true:支持, false:不支持
*/
boolean isSupportArNavi(String clientId);
/**
* 获取支持的建议的图像规格大小
* @param clientId 客户端ID
*
* @return IGDSize:建议的图像规格大小对象
*/
IGDSize getRecommendSize(String clientId);
/**
* 摄像头是否已连接,建议在摄像头成功打开时才设置为已连接,注意这里的isCameraConnected不是指IGDCameraService服务的连接绑定
* @param clientId 客户端ID
*
* @return true:已连接, false:未连接
*/
boolean isCameraConnected(String clientId);
/**
* 摄像头是否已打开,建议在摄像头成功打开并且有返回第一张图片时才设置为真正的打开
* @param clientId 客户端ID
*
* @return true:已打开, false:未打开
*/
boolean isCameraOpened(String clientId);
/**
* 初始化摄像头参数
* @param clientId 客户端ID
* @param imageFormat 图像格式,见{@link ImageFormat}
* @param dataType 数据类型,见{@link DataType}
* @param cameraUseType 摄像头使用类型,见{@link CameraUseType}
* @param imageWidth 图像宽度
* @param imageHeight 图像高度
* @param surface Surfaced对象,可以实现渲染/预览
*
* @return true:成功, false:失败
*/
//boolean initCamera(String clientId,int imageFormat, int dataType, int cameraUseType,int imageWidth, int imageHeight, in Surface surface);
boolean initCamera(String clientId,in GDArCameraParam gdArCameraParam, in Surface surface);
/**
* 打开摄像头
* @param clientId 客户端ID
*
* @return true:成功, false:失败
*/
boolean openCamera(String clientId);
/**
* 关闭摄像头
* @param clientId 客户端ID
*
* @return true:成功, false:失败
*/
boolean closeCamera(String clientId);
/**
* 释放占用的摄像头资源,主要为释放客户端设置给服务端的一些资源,服务端自己创建并维护的资源需要自己在适当的时机进行释放
* @param clientId 客户端ID
*
* @return true:成功, false:失败
*/
boolean unInitCamera(String clientId);
}
// IGDCameraStateCallBack.aidl
package com.autonavi.amapauto.gdarcameraservice;
import android.os.ParcelFileDescriptor;
import com.autonavi.amapauto.gdarcameraservice.model.ArCameraOpenResultParam;
/**
* 高德AR导航AIDL标准协议接口
*
* 摄像头状态回调接口
*/
interface IGDCameraStateCallBack{
/**
* 摄像头连接回调
*/
void onConnected();
/**
* 摄像头断开回调
*/
void onDisconnected();
/**
* 摄像头已开启的回调,建议在摄像头成功打开并且有返回第一张图片、共享内存打开成功时才回调该接口
* @param parcelFileDescriptor 文件描述器(共享内存)
* @param arCameraOpenResultParam 打开摄像头后的返回的结果参数
* @param memoryfileName 共享文件名
*/
void onOpened(in ParcelFileDescriptor parcelFileDescriptor, in ArCameraOpenResultParam arCameraOpenResultParam, String memoryfileName);
/**
* 摄像头关闭回调
* @param code 关闭原因编码
* @param message 对应提示信息
*/
void onClosed(int code, String message);
/**
* 摄像头相关异常回调
* @param code 异常原因编码
* @param message 对应提示信息
*/
void onError(int code, String message);
}
\ No newline at end of file
// IGDSize.aidl
package com.autonavi.amapauto.gdarcameraservice;
/**
* 高德AR导航AIDL标准协议接口
*
* 图像规格大小接口
*/
interface IGDSize {
/**
* 获取图像规格的宽度
* @return 图像规格的宽度大小
*/
int getWidth();
/**
* 获取图像规格的高度
* @return 图像规格的高度大小
*/
int getHeight();
}
package com.autonavi.amapauto.gdarcameraservice.model;
parcelable ArCameraOpenResultParam;
\ No newline at end of file
package com.autonavi.amapauto.gdarcameraservice.model;
parcelable GDArCameraParam;
\ No newline at end of file
package com.autonavi.amapauto;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
public class ArCameraIdBroadcast extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
String action = intent.getAction();
if ("AUTONAVI_STANDARD_BROADCAST_SEND".equals(action)) {
int key_type = intent.getIntExtra("KEY_TYPE", -1);
if (key_type == 12116) {
// 不一定是收到我们的请求你们才发,我们会去请求,另外,你们如果自己摄像头链接有变化时也要主动发送这条广播给我们
Intent receiverIntent = new Intent();
receiverIntent.setAction("AUTONAVI_STANDARD_BROADCAST_RECV");
receiverIntent.putExtra("KEY_TYPE", 12116);
receiverIntent.putExtra("productName", "智能网联车盒");
receiverIntent.putExtra("productModel", "M2");
receiverIntent.putExtra("cameraDisplay", "1280x720");
receiverIntent.putExtra("cameraName", "C2390");
receiverIntent.putExtra("imu", "MPU-6050");
receiverIntent.putExtra("cameraConnect", "MIPI");
context.sendBroadcast(receiverIntent);
}
}
}
}
package com.autonavi.amapauto.camera1;
import android.hardware.Camera;
import android.util.Log;
import android.view.SurfaceHolder;
import java.io.IOException;
public class Camera1Helper {
private final static String TAG = "Camera1Helper";
private Camera mCamera;
public Camera open(){
mCamera = null;
try {
mCamera = Camera.open(); // attempt to get a Camera instance
}
catch (Exception e){
// Camera is not available (in use or does not exist)
}
return mCamera; // returns null if camera is unavailable
}
public void setHolder(SurfaceHolder mHolder) {
mHolder.addCallback(callback);
// deprecated setting, but required on Android versions prior to 3.0
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
SurfaceHolder.Callback callback= new SurfaceHolder.Callback(){
@Override
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, now tell the camera where to draw the preview.
try {
mCamera.setPreviewDisplay(holder);
mCamera.setPreviewCallback(new Camera.PreviewCallback() {
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
Log.d(TAG, "onPreviewFrame : " + data);
}
});
mCamera.startPreview();
} catch (IOException e) {
Log.d(TAG, "Error setting camera preview: " + e.getMessage());
}
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
if (holder.getSurface() == null) {
// preview surface does not exist
return;
}
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e) {
// ignore: tried to stop a non-existent preview
}
// set preview size and make any resize, rotate or
// reformatting changes here
// start preview with new settings
try {
mCamera.setPreviewDisplay(holder);
mCamera.setPreviewCallback(new Camera.PreviewCallback() {
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
Log.d(TAG, "onPreviewFrame : " + data);
}
});
mCamera.startPreview();
} catch (Exception e) {
Log.d(TAG, "Error starting camera preview: " + e.getMessage());
}
}
};
}
package com.autonavi.amapauto.gdarcameraservice;
public enum CameraUseType {
COMMON_CAMERA(0),
AR_NAVI_CAMERA(1);
private int type;
private CameraUseType(int type) {
this.type = type;
}
public int getType() {
return this.type;
}
}
package com.autonavi.amapauto.gdarcameraservice;
public enum DataType {
YUV(0),
H264(1);
private int type;
private DataType(int type) {
this.type = type;
}
public int getType() {
return this.type;
}
}
package com.autonavi.amapauto.gdarcameraservice;
public enum ImageFormat {
NV21_822(0),
I420_822(1),
RGB_888(2),
RGBA_8888(3),
YV12(4);
private int format;
private ImageFormat(int format) {
this.format = format;
}
public int getFormat() {
return this.format;
}
}
package com.autonavi.amapauto.gdarcameraservice;
public enum MemoryFileFlag {
CAN_READ((byte)0),
CAN_WRITE((byte)1);
private byte flag;
private MemoryFileFlag(byte flag) {
this.flag = flag;
}
public byte getFlag() {
return this.flag;
}
}
\ No newline at end of file
package com.autonavi.amapauto.gdarcameraservice;
import android.os.RemoteException;
public class RecommendSize {
private int width = 0;
private int height = 0;
public RecommendSize(int width, int height) {
this.width = width;
this.height = height;
}
public RecommendSize(IGDSize gdSize) throws RemoteException{
this.width = gdSize.getWidth();
this.height = gdSize.getHeight();
}
public void setWidth(int width) {
this.width = width;
}
public void setHeight(int height) {
this.height = height;
}
public int getWidth() {
return this.width;
}
public int getHeight() {
return this.height;
}
}
package com.autonavi.amapauto.gdarcameraservice.model;
import android.os.Parcel;
import android.os.Parcelable;
/**
* 打开摄像头后的返回的结果参数,客户端会以服务端返回的该类中的参数来最终矫正缓存的大小
*/
public class ArCameraOpenResultParam implements Parcelable {
/**
* 摄像头服务返回的真正支持的图片数据格式
*/
public int imageFormat;
/**
* 摄像头服务返回的真正支持的数据类型
*/
public int dataType;
/**
* 摄像头服务返回的真正支持的摄像头支持类型
*/
public int cameraUseType;
/**
* 摄像头服务返回的真正支持的图片数据宽度
*/
public int imageWidth;
/**
* 摄像头服务返回的真正支持的图片数据高度
*/
public int imageHeight;
/**
* 摄像头服务返回的真正支持的摄像头ID
*/
public String cameraId;
/**
* 摄像头服务返回的真正支持的共享内存中的图片数据内容的大小
*/
public int imageSize;
public ArCameraOpenResultParam() {
}
public ArCameraOpenResultParam(int imageWidth, int imageHeight, int imageSize) {
this.imageWidth = imageWidth;
this.imageHeight = imageHeight;
this.imageSize = imageSize;
}
public ArCameraOpenResultParam(int imageFormat, int imageWidth, int imageHeight, int imageSize) {
this.imageFormat = imageFormat;
this.imageWidth = imageWidth;
this.imageHeight = imageHeight;
this.imageSize = imageSize;
}
public ArCameraOpenResultParam(int imageFormat, int imageWidth, int imageHeight, int imageSize, String cameraId) {
this.imageFormat = imageFormat;
this.imageWidth = imageWidth;
this.imageHeight = imageHeight;
this.imageSize = imageSize;
this.cameraId = cameraId;
}
public ArCameraOpenResultParam(int imageFormat, int dataType, int cameraUseType, int imageWidth, int imageHeight, int imageSize, String cameraId) {
this.imageFormat = imageFormat;
this.dataType = dataType;
this.cameraUseType = cameraUseType;
this.imageWidth = imageWidth;
this.imageHeight = imageHeight;
this.imageSize = imageSize;
this.cameraId = cameraId;
}
public int getImageFormat() {
return imageFormat;
}
public void setImageFormat(int imageFormat) {
this.imageFormat = imageFormat;
}
public int getDataType() {
return dataType;
}
public void setDataType(int dataType) {
this.dataType = dataType;
}
public int getCameraUseType() {
return cameraUseType;
}
public void setCameraUseType(int cameraUseType) {
this.cameraUseType = cameraUseType;
}
public int getImageWidth() {
return imageWidth;
}
public void setImageWidth(int imageWidth) {
this.imageWidth = imageWidth;
}
public int getImageHeight() {
return imageHeight;
}
public void setImageHeight(int imageHeight) {
this.imageHeight = imageHeight;
}
public String getCameraId() {
return cameraId;
}
public void setCameraId(String cameraId) {
this.cameraId = cameraId;
}
public int getImageSize() {
return imageSize;
}
public void setImageSize(int imageSize) {
this.imageSize = imageSize;
}
protected ArCameraOpenResultParam(Parcel in) {
imageFormat = in.readInt();
dataType = in.readInt();
cameraUseType = in.readInt();
imageWidth = in.readInt();
imageHeight = in.readInt();
cameraId = in.readString();
imageSize = in.readInt();
}
public static final Creator<ArCameraOpenResultParam> CREATOR = new Creator<ArCameraOpenResultParam>() {
@Override
public ArCameraOpenResultParam createFromParcel(Parcel in) {
return new ArCameraOpenResultParam(in);
}
@Override
public ArCameraOpenResultParam[] newArray(int size) {
return new ArCameraOpenResultParam[size];
}
};
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeInt(imageFormat);
dest.writeInt(dataType);
dest.writeInt(cameraUseType);
dest.writeInt(imageWidth);
dest.writeInt(imageHeight);
dest.writeString(cameraId);
dest.writeInt(imageSize);
}
@Override
public String toString() {
return "ArCameraParam{" +
"imageFormat=" + imageFormat +
", dataType=" + dataType +
", cameraUseType=" + cameraUseType +
", imageWidth=" + imageWidth +
", imageHeight=" + imageHeight +
", imageSize=" + imageSize +
", cameraId='" + cameraId + '\'' +
'}';
}
}
package com.autonavi.amapauto.gdarcameraservice.model;
import android.os.Parcel;
import android.os.Parcelable;
public class GDArCameraParam implements Parcelable {
public int imageFormat;
public int dataType;
public int cameraUseType;
public int imageWidth;
public int imageHeight;
public String cameraId;
public GDArCameraParam() {
}
protected GDArCameraParam(Parcel in) {
imageFormat = in.readInt();
dataType = in.readInt();
cameraUseType = in.readInt();
imageWidth = in.readInt();
imageHeight = in.readInt();
cameraId = in.readString();
}
public static final Creator<GDArCameraParam> CREATOR = new Creator<GDArCameraParam>() {
@Override
public GDArCameraParam createFromParcel(Parcel in) {
return new GDArCameraParam(in);
}
@Override
public GDArCameraParam[] newArray(int size) {
return new GDArCameraParam[size];
}
};
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeInt(imageFormat);
dest.writeInt(dataType);
dest.writeInt(cameraUseType);
dest.writeInt(imageWidth);
dest.writeInt(imageHeight);
dest.writeString(cameraId);
}
@Override
public String toString() {
return "ArCameraParam{" +
"imageFormat=" + imageFormat +
", dataType=" + dataType +
", cameraUseType=" + cameraUseType +
", imageWidth=" + imageWidth +
", imageHeight=" + imageHeight +
", cameraId='" + cameraId + '\'' +
'}';
}
}
package com.autonavi.amapauto.gdarcameraservicedemo;
import android.content.Context;
import android.graphics.ImageFormat;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.TotalCaptureResult;
import android.media.Image;
import android.media.ImageReader;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.MemoryFile;
import android.os.ParcelFileDescriptor;
import android.os.RemoteException;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
import com.autonavi.amapauto.gdarcameraservice.IGDCameraStateCallBack;
import com.autonavi.amapauto.gdarcameraservice.model.ArCameraOpenResultParam;
import com.autonavi.amapauto.gdarcameraservicedemo.utils.SharedMemUtils;
import java.io.FileDescriptor;
import java.io.IOException;
import java.lang.reflect.Method;
import java.nio.ByteBuffer;
import java.util.Arrays;
public class CameraServiceHelper {
private static final String TAG = "CameraServiceHelper";
private static final String MEMORY_FILE = "cneeds_camera_memory";
private final static int PREVIEW_WIDTH = 720;
private final static int PREVIEW_HEIGHT = 1280;
public final static int SAVE_WIDTH = 720;
public final static int SAVE_HEIGHT = 1280;
private boolean mIsCameraOpen = false;
private final int MEMORY_SIZE = 3133440 + 1;
private CameraManager mCameraManager;
private ImageReader mImageReader;
private CameraDevice mCameraDevice;
private CameraCaptureSession mCameraCaptureSession;
private String mCameraId = "0";
private CameraCharacteristics mCameraCharacteristics;
private int mCameraSensorOrientation = 0; //摄像头方向
private int mCameraFacing = CameraCharacteristics.LENS_FACING_BACK; //默认使用后置摄像头
private int mDisplayRotation; //手机方向
private boolean canTakePic = true; //是否可以拍照
private boolean canExchangeCamera = false; //是否可以切换摄像头
private Handler mCameraHandler;
private HandlerThread handlerThread = new HandlerThread("CameraThread");
private Size mPreviewSize = new Size(PREVIEW_WIDTH, PREVIEW_HEIGHT); //预览大小
private Size mSavePicSize = new Size(SAVE_WIDTH, SAVE_HEIGHT); //保存图片大小
private IGDCameraStateCallBack mStateCallBack;
private MemoryFile mMemoryFile;
private Surface mSurface;
public CameraServiceHelper() {
try {
mMemoryFile = new MemoryFile(MEMORY_FILE, MEMORY_SIZE);
} catch (IOException e) {
e.printStackTrace();
}
}
public void setStateCallBack(IGDCameraStateCallBack stateCallBack) {
mStateCallBack =stateCallBack;
}
public boolean initCameraInfo(Context context, Surface surface) {
handlerThread.start();
mCameraHandler = new Handler(handlerThread.getLooper());
mSurface = surface;
mCameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
String[] cameraIdList = new String[0];
try {
cameraIdList = mCameraManager.getCameraIdList();
if (cameraIdList.length == 0) {
Log.d(TAG, "没有相机可用");
return false;
}
for (int i = 0; i < cameraIdList.length; i++) {
CameraCharacteristics cameraCharacteristics = mCameraManager.getCameraCharacteristics(cameraIdList[i]);
Integer facing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING);
if (facing == mCameraFacing) {
mCameraId = cameraIdList[i];
mCameraCharacteristics = cameraCharacteristics;
}
Log.d(TAG, "设备中的摄像头" + mCameraId);
}
Integer supportLevel = mCameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
if (supportLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
Log.d(TAG, "相机硬件不支持新特性");
}
// 获取摄像头方向
mCameraSensorOrientation = mCameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
mImageReader = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(), ImageFormat.YUV_420_888, 1);
mImageReader.setOnImageAvailableListener(onImageAvailableListener, mCameraHandler);
return true;
} catch (CameraAccessException e) {
e.printStackTrace();
}
return false;
}
private ImageReader.OnImageAvailableListener onImageAvailableListener =
new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
// 拿到拍照照片数据
Image image = reader.acquireNextImage();
ByteBuffer byteBuffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[byteBuffer.remaining()];
byteBuffer.get(bytes);
reader.close();
log("拿到拍照照片数据");
byte[] buffer = new byte[20+bytes.length];
SharedMemUtils.initHeader(buffer);
boolean b = SharedMemUtils.canWrite(buffer);
if (b) {
SharedMemUtils.setOffset(buffer, 0);
SharedMemUtils.setLength(buffer, 0);
SharedMemUtils.setContentSize(buffer, bytes.length);
SharedMemUtils.setContent(buffer, bytes);
try {
// 写一次 , 读取数据后 数据会被清空
// 持续写,不读,数据不会清空,注意数据覆盖(offset值)
mMemoryFile.writeBytes(buffer, 0, 0, buffer.length);
Method getFileDescriptorMethod = mMemoryFile.getClass().getDeclaredMethod("getFileDescriptor");
if(getFileDescriptorMethod != null){
FileDescriptor fileDescriptor = (FileDescriptor) getFileDescriptorMethod.invoke(mMemoryFile);
// 序列化,才可传送
ParcelFileDescriptor pfd = ParcelFileDescriptor.dup(fileDescriptor);
ArCameraOpenResultParam openResultParam = new ArCameraOpenResultParam();
openResultParam.cameraId = mCameraId;
openResultParam.imageHeight = PREVIEW_HEIGHT;
openResultParam.imageWidth = PREVIEW_WIDTH;
mStateCallBack.onOpened(pfd, openResultParam, MEMORY_FILE);
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
};
public void openCamera() throws CameraAccessException {
mCameraManager.openCamera(mCameraId, new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice camera) {
mCameraDevice = camera;
try {
createCaptureSession(camera);
} catch (CameraAccessException e) {
e.printStackTrace();
}
try {
mStateCallBack.onConnected();
} catch (RemoteException e) {
e.printStackTrace();
}
mIsCameraOpen = true;
}
@Override
public void onDisconnected(CameraDevice camera) {
log("onDisconnected");
try {
mStateCallBack.onDisconnected();
} catch (RemoteException e) {
e.printStackTrace();
}
mIsCameraOpen = false;
}
@Override
public void onError(CameraDevice camera, int error) {
log("onError" +error);
try {
mStateCallBack.onError(error, camera.getId());
} catch (RemoteException e) {
e.printStackTrace();
}
mIsCameraOpen = false;
releaseCamera();
releaseThread();
}
}, mCameraHandler);
}
private void createCaptureSession(CameraDevice cameraDevice) throws CameraAccessException {
final CaptureRequest.Builder builder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
builder.addTarget(mImageReader.getSurface());
builder.addTarget(mSurface);
builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
cameraDevice.createCaptureSession(Arrays.asList(mImageReader.getSurface()), new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession session) {
mCameraCaptureSession = session;
try {
session.setRepeatingRequest(builder.build(), captureCallback, mCameraHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(CameraCaptureSession session) {
}
@Override
public void onClosed(CameraCaptureSession session) {
super.onClosed(session);
releaseCamera();
releaseThread();
try {
mStateCallBack.onClosed(0, "onClosed");
} catch (RemoteException e) {
e.printStackTrace();
}
}
}, mCameraHandler);
}
public void releaseCamera() {
if (mCameraCaptureSession != null) {
mCameraCaptureSession.close();
mCameraCaptureSession = null;
}
if (mCameraDevice != null) {
mCameraDevice.close();
mCameraDevice = null;
}
if (mImageReader != null) {
mImageReader.close();
mImageReader = null;
}
canExchangeCamera = false;
}
public void releaseThread() {
handlerThread.quitSafely();
try {
handlerThread.join();
handlerThread = null;
mCameraHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
CameraCaptureSession.CaptureCallback captureCallback = new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
canExchangeCamera = true;
canTakePic = true;
}
@Override
public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
super.onCaptureFailed(session, request, failure);
log("onCaptureFailed");
}
};
private void log(String msg) {
Log.d(TAG, msg);
}
public boolean isIsCameraOpen() {
return mIsCameraOpen;
}
public void setIsCameraOpen(boolean mIsCameraOpen) {
this.mIsCameraOpen = mIsCameraOpen;
}
}
package com.autonavi.amapauto.gdarcameraservicedemo;
import android.app.Service;
import android.content.Intent;
import android.graphics.ImageFormat;
import android.hardware.Camera;
import android.os.IBinder;
import android.os.MemoryFile;
import android.os.ParcelFileDescriptor;
import android.os.RemoteException;
import android.util.Log;
import android.view.Surface;
import com.autonavi.amapauto.gdarcameraservice.IGDCameraService;
import com.autonavi.amapauto.gdarcameraservice.IGDCameraStateCallBack;
import com.autonavi.amapauto.gdarcameraservice.IGDSize;
import com.autonavi.amapauto.gdarcameraservice.model.ArCameraOpenResultParam;
import com.autonavi.amapauto.gdarcameraservice.model.GDArCameraParam;
import com.autonavi.amapauto.gdarcameraservicedemo.utils.SharedMemUtils;
import java.io.FileDescriptor;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.Arrays;
public class GDArCameraService extends Service {
private final static String TAG = "GDArCameraService";
private Camera mCamera;
private Surface mSurface;
private IGDCameraStateCallBack mGdCameraStateCallBack;
private static final String MEMORY_NAME = "cneeds_camera_memory";
public static final int HEADER_SIZE = 20;
/**
* 图片宽度
* 注意: 车盒上支持1080宽度,但手机上不支持,手机上设置宽度位1080时,会报错导致相机打不开
*/
private int imageWidth = 1280;
/**
* 图片高度
*/
private int imageHeight = 720;
/**
* 摄像头数据大小, 视实际分辨率确定
*/
private int dataSize = (imageWidth == 0 && imageHeight == 0) ? 1382400 :
imageWidth * imageHeight * 3 / 2;
private final int MEMORY_SIZE = dataSize + SharedMemUtils.HEADER_SIZE;
private MemoryFile mMemoryFile;
private boolean isFirstSend = true;
private boolean mIsCameraOpened;
private boolean mISCameraConnect;
private Camera.Size mPreviewSize;
@Override
public IBinder onBind(Intent intent) {
try {
mMemoryFile = new MemoryFile(MEMORY_NAME, MEMORY_SIZE);
} catch (IOException e) {
e.printStackTrace();
}
return binder;
}
private final IGDCameraService.Stub binder = new IGDCameraService.Stub() {
@Override
public boolean registerCameraStateCallback(String clientId, IGDCameraStateCallBack gdCameraStateCallBack) throws RemoteException {
mGdCameraStateCallBack = gdCameraStateCallBack;
return true;
}
@Override
public boolean unregisterCameraStateCallback(String clientId, IGDCameraStateCallBack gdCameraStateCallBack) throws RemoteException {
return false;
}
@Override
public boolean isSupportArNavi(String clientId) throws RemoteException {
return false;
}
@Override
public IGDSize getRecommendSize(String clientId) throws RemoteException {
return new IGDSize.Stub() {
@Override
public int getWidth() throws RemoteException {
return mPreviewSize.width;
}
@Override
public int getHeight() throws RemoteException {
return mPreviewSize.height;
}
};
}
@Override
public boolean isCameraConnected(String clientId) throws RemoteException {
return mISCameraConnect;
}
@Override
public boolean isCameraOpened(String clientId) throws RemoteException {
return mIsCameraOpened;
}
@Override
public boolean initCamera(String clientId, GDArCameraParam gdArCameraParam, Surface surface) throws RemoteException {
mCamera = null;
mSurface = surface;
try {
mCamera = Camera.open(); // attempt to get a Camera instance
} catch (Exception e) {
// Camera is not available (in use or does not exist)
return false;
}
mISCameraConnect = true;
return true;
}
@Override
public boolean openCamera(String clientId) throws RemoteException {
if (mCamera == null) {
mIsCameraOpened = false;
return false;
}
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e) {
// ignore: tried to stop a non-existent preview
}
// set preview size and make any resize, rotate or
// reformatting changes here
// start preview with new settings
try {
if (mSurface != null) {
// 这里设置预览,如果不需要预览,直接注释掉就好
Method method = mCamera.getClass().getMethod("setPreviewSurface", Surface.class);
method.invoke(mCamera, mSurface);
}
// parameters.setPreviewSize(1080, 720); 车盒可以用这个尺寸,手机不能,手机用这个尺寸会报错
Camera.Parameters parameters = mCamera.getParameters();
parameters.setPreviewSize(imageWidth, imageHeight);
parameters.setPreviewFormat(ImageFormat.NV21);
mCamera.setParameters(parameters);
mCamera.setPreviewCallback(new Camera.PreviewCallback() {
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
Log.d(TAG, "onPreviewFrame: " + data.toString());
try {
sendData(data, camera);
} catch (IOException e) {
e.printStackTrace();
}
}
});
mCamera.startPreview();
mPreviewSize = mCamera.getParameters().getPreviewSize();//获取尺寸,格式转换的时候要用到
initMemoryFile();
mIsCameraOpened = true;
} catch (Exception e) {
e.printStackTrace();
Log.e(TAG, "Error starting camera preview: " + e.getMessage());
mIsCameraOpened = false;
return false;
}
return true;
}
@Override
public boolean closeCamera(String clientId) throws RemoteException {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
}
mIsCameraOpened = false;
mISCameraConnect = false;
return true;
}
@Override
public boolean unInitCamera(String clientId) throws RemoteException {
mISCameraConnect = false;
mIsCameraOpened = false;
mCamera.release();
return true;
}
};
private byte[] header = new byte[SharedMemUtils.HEADER_SIZE];
private void sendData(byte[] bytes, Camera camera) throws IOException {
byte[] buffer = new byte[HEADER_SIZE + bytes.length];
boolean canWrite;
if (isFirstSend) {
SharedMemUtils.initHeader(buffer);
canWrite = SharedMemUtils.canWrite(buffer);
isFirstSend = false;
} else {
mMemoryFile.readBytes(header, 0, 0, SharedMemUtils.HEADER_SIZE);
canWrite = SharedMemUtils.canWrite(header);
}
/** 判断共享内存是否可写 */
if (canWrite) {
SharedMemUtils.setOffset(buffer, 0);
SharedMemUtils.setLength(buffer, 0);
SharedMemUtils.setContentSize(buffer, bytes.length);
SharedMemUtils.setContent(buffer, bytes);
try {
SharedMemUtils.setCanRead(buffer);
mMemoryFile.writeBytes(buffer, 0, 0, buffer.length);
Log.d(TAG, "CNEEDS_TEST sendData: " + Arrays.toString(buffer));
} catch (Exception e) {
e.printStackTrace();
Log.d(TAG, "sendDataError: " + e.getMessage());
}
}
}
private void initMemoryFile() throws Exception {
Method getFileDescriptorMethod = mMemoryFile.getClass().getDeclaredMethod("getFileDescriptor");
if (getFileDescriptorMethod != null) {
FileDescriptor fileDescriptor = (FileDescriptor) getFileDescriptorMethod.invoke(mMemoryFile);
// 序列化,才可传送
ParcelFileDescriptor pfd = ParcelFileDescriptor.dup(fileDescriptor);
ArCameraOpenResultParam openResultParam = new ArCameraOpenResultParam();
openResultParam.cameraId = "" + getCameraId();
openResultParam.imageHeight = mPreviewSize.height;
openResultParam.imageWidth = mPreviewSize.width;
/**
* 参考 {@link com.autonavi.amapauto.gdarcameraservice.ImageFormat}
* NV21在高德协议对应 0
*/
openResultParam.imageFormat = 0;
mGdCameraStateCallBack.onOpened(pfd, openResultParam, MEMORY_NAME);
}
}
private int getCameraId() {
int cameraId = 0;
int numberOfCameras = Camera.getNumberOfCameras();
for (int i = 0; i <= numberOfCameras; i++) {
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(i, info);
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
cameraId = i;
break;
}
}
return cameraId;
}
}
package com.autonavi.amapauto.gdarcameraservicedemo.utils;
/**
* 共享内存文件状态标志类
*/
public enum MemoryFileFlag {
/**
* 可读标志
*/
CAN_READ((byte)0),
/**
* 可写标志
*/
CAN_WRITE((byte)1);
private byte flag;
private MemoryFileFlag(byte flag) {
this.flag = flag;
}
public byte getFlag() {
return this.flag;
}
}
\ No newline at end of file
package com.autonavi.amapauto.gdarcameraservicedemo.utils;
import android.util.Log;
/**
* 共享内存结构,前面二十个字节为HEADER,后面的所有部分是CONTENT。CONTENT的内容由HEADER中的一些字段决定
* HEADER部分描述如下:
* 0: 一个字节,可以放一些标志位,目前放置的是可读可写的标志位
* 1: 一个字节,放置的是version code,版本号,版本号必须完全一致才可以正确解析
* 2~5:四个字节,offset
* 6~9:四个字节,length
* 10~13:四个字节,contentSize
* <p>
* CONTENT部分描述如下:
* 纯字节数组,内容按header中的offset+length+contentSize决定
* 如果是yuvImage类型的话,offset == 0; length是要读取的长度; contentSize指的是整个CONTENT部分的大小
*/
public class SharedMemUtils {
private static final String TAG = "SharedMemUtils";
/**
* 共享文件头的长度
*/
public static final int HEADER_SIZE = 20;
/**
* 如果解析格式有变化,这个数值记得要加一
*/
public static final byte VERSION_CODE = 1;
public static void initHeader(byte[] header) {
if (header == null || header.length < HEADER_SIZE) {
Log.e(TAG, "initHeader ERROR!! --> header == null || header.length < HEADER_SIZE");
return;
}
header[0] = MemoryFileFlag.CAN_WRITE.getFlag();
header[1] = VERSION_CODE;
}
/**
* 判断是否可读
*
* @param header
* @return
*/
public static boolean canRead(byte[] header) {
if (header == null || header.length < HEADER_SIZE) {
return false;
}
return header[0] == MemoryFileFlag.CAN_READ.getFlag();
}
/**
* 设置成可读
*
* @param header
* @return
*/
public static boolean setCanRead(byte[] header) {
if (header == null || header.length < HEADER_SIZE) {
return false;
}
header[0] = MemoryFileFlag.CAN_READ.getFlag();
return true;
}
/**
* 判断是否可写
*
* @param header
* @return
*/
public static boolean canWrite(byte[] header) {
if (header == null || header.length < HEADER_SIZE) {
return false;
}
return header[0] == MemoryFileFlag.CAN_WRITE.getFlag();
}
/**
* 设置为可写
*
* @param header
* @return
*/
public static boolean setWirtable(byte[] header) {
if (header == null || header.length < HEADER_SIZE) {
return false;
}
header[0] = MemoryFileFlag.CAN_WRITE.getFlag();
return true;
}
/**
* 设置开始位置
* 2~5:四个字节,offset
* <p>
* 注意高低位的顺序:高在左,低在右
*
* @param header
* @param start
* @return
*/
public static boolean setOffset(byte[] header, int start) {
if (header == null || header.length < HEADER_SIZE) {
return false;
}
header[2] = (byte) (start >> 24);
header[3] = (byte) ((start & 0xff0000) >> 16);
header[4] = (byte) ((start & 0xff00) >> 8);
header[5] = (byte) (start & 0xff);
return true;
}
/**
* 获取到开始位置/偏移位置
* 2~5:四个字节,offset
*
* @param header
* @return
*/
public static int getOffset(byte[] header) {
if (header == null || header.length < HEADER_SIZE) {
return -1;
}
int result = 0;
result = header[2] & 0xff;
result = result << 8 | header[3] & 0xff;
result = result << 8 | header[4] & 0xff;
result = result << 8 | header[5] & 0xff;
return result;
}
/**
* 设置读取的长度
* 6~9:四个字节,length
*
* @param header
* @param length
* @return
*/
public static boolean setLength(byte[] header, int length) {
if (header == null || header.length < HEADER_SIZE) {
return false;
}
header[6] = (byte) (length >> 24);
header[7] = (byte) ((length & 0xff0000) >> 16);
header[8] = (byte) ((length & 0xff00) >> 8);
header[9] = (byte) (length & 0xff);
return true;
}
/**
* 获取到需要读取的长度
* 6~9:四个字节,length
*
* @param header
* @return
*/
public static int getLength(byte[] header) {
if (header == null || header.length < HEADER_SIZE) {
return -1;
}
int result = 0;
result = header[6] & 0xff;
result = result << 8 | header[7] & 0xff;
result = result << 8 | header[8] & 0xff;
result = result << 8 | header[9] & 0xff;
return result;
}
/**
* 设置整个content的大小
* 10~13:四个字节,contentSize
*
* @param header
* @param contentSize
* @return
*/
public static boolean setContentSize(byte[] header, int contentSize) {
if (header == null || header.length < HEADER_SIZE) {
return false;
}
header[10] = (byte)(contentSize >>> 24);
header[11] = (byte)(contentSize >>> 16);
header[12] = (byte)(contentSize >>> 8);
header[13] = (byte)contentSize;
return true;
}
public static boolean setContent(byte[] header, byte[] tempData) {
if (header == null || header.length < HEADER_SIZE) {
return false;
}
for (int i = 0; i < tempData.length; i++) {
header[HEADER_SIZE+i] = tempData[i];
}
return true;
}
/**
* 设置contentSize的大小
* 10~13:四个字节,contentSize
*
* @param header
* @return
*/
public static int getContentSize(byte[] header) {
if (header == null || header.length < HEADER_SIZE) {
return -1;
}
int result = 0;
result = header[10] & 0xff;
result = result << 8 | header[11] & 0xff;
result = result << 8 | header[12] & 0xff;
result = result << 8 | header[13] & 0xff;
return result;
}
// public static void main(String[] args) {
// byte[] header = new byte[20];
//
// setContentSize(header, 255);
// setOffset(header , 255);
// setLenght(header , 255);
//
// int size = getContentSize(header);
// int offset = getOffset(header);
// int length = getLength(header);
//
// int xx;
// }
}
\ No newline at end of file
package com.autonavi.amapauto.proxy;
import android.content.Context;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.media.Image;
import android.media.ImageReader;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.MemoryFile;
import android.os.RemoteException;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
import com.autonavi.amapauto.gdarcameraservice.IGDCameraStateCallBack;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;
public class Camera2Proxy {
private static final String TAG = "Camera2Proxy";
private static final String MEMORY_FILE = "cneeds_camera_memory";
private Context mContext;
private final int MEMORY_SIZE = 3133440 + 1;
// camera
private int mCameraId = CameraCharacteristics.LENS_FACING_FRONT; // 要打开的摄像头ID
private Size mPreviewSize = new Size(640, 480); // 固定640*480演示
private CameraDevice mCameraDevice; // 相机对象
private CameraCaptureSession mCaptureSession;
// handler
private Handler mBackgroundHandler;
private HandlerThread mBackgroundThread;
// output
private Surface mPreviewSurface; // 输出到屏幕的预览
private ImageReader mImageReader; // 预览回调的接收者
private IGDCameraStateCallBack mStateCallBack;
// private ImageReader.OnImageAvailableListener mOnImageAvailableListener;
private MemoryFile mMemoryFile;
public void setStateCallBack(IGDCameraStateCallBack stateCallBack) {
mStateCallBack =stateCallBack;
}
/**
* 打开摄像头的回调
*/
private CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened( CameraDevice camera) {
Log.d(TAG, "onOpened");
mCameraDevice = camera;
initPreviewRequest();
try {
mStateCallBack.onConnected();
} catch (RemoteException e) {
e.printStackTrace();
}
}
@Override
public void onDisconnected( CameraDevice camera) {
Log.d(TAG, "onDisconnected");
releaseCamera();
try {
mStateCallBack.onDisconnected();
} catch (RemoteException e) {
e.printStackTrace();
}
}
@Override
public void onError( CameraDevice camera, int error) {
Log.e(TAG, "Camera Open failed, error: " + error);
releaseCamera();
try {
mStateCallBack.onError(error, "error : " +error);
} catch (RemoteException e) {
e.printStackTrace();
}
}
};
public Camera2Proxy(Context context) {
mContext = context;
try {
mMemoryFile = new MemoryFile(MEMORY_FILE, MEMORY_SIZE);
} catch (IOException e) {
e.printStackTrace();
}
}
public void setSurface(Surface surface) {
mPreviewSurface = surface;
}
public boolean openCamera() {
Log.v(TAG, "openCamera");
startBackgroundThread(); // 对应 releaseCamera() 方法中的 stopBackgroundThread()
try {
CameraManager cameraManager = (CameraManager) mContext.getSystemService(Context.CAMERA_SERVICE);
Log.d(TAG, "preview size: " + mPreviewSize.getWidth() + "*" + mPreviewSize.getHeight());
mImageReader = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(),
ImageFormat.YUV_420_888, 2);
mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, null);
// 打开摄像头
cameraManager.openCamera(Integer.toString(mCameraId), mStateCallback, mBackgroundHandler);
return true;
} catch (CameraAccessException e) {
e.printStackTrace();
}
return false;
}
public void releaseCamera() {
Log.v(TAG, "releaseCamera");
if (mImageReader != null) {
mImageReader.close();
mImageReader = null;
}
if (mCaptureSession != null) {
mCaptureSession.close();
mCaptureSession = null;
}
if (mCameraDevice != null) {
mCameraDevice.close();
mCameraDevice = null;
}
stopBackgroundThread(); // 对应 openCamera() 方法中的 startBackgroundThread()
}
public void setImageAvailableListener(ImageReader.OnImageAvailableListener onImageAvailableListener) {
mOnImageAvailableListener = onImageAvailableListener;
}
public void setPreviewSurface(SurfaceTexture surfaceTexture) {
// mPreviewSize必须先初始化完成
surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPreviewSurface = new Surface(surfaceTexture);
}
private void initPreviewRequest() {
try {
final CaptureRequest.Builder builder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
// 添加输出到屏幕的surface
if (mPreviewSurface != null) {
builder.addTarget(mPreviewSurface);
}
// 添加输出到ImageReader的surface。然后我们就可以从ImageReader中获取预览数据了
builder.addTarget(mImageReader.getSurface());
mCameraDevice.createCaptureSession(Arrays.asList(mPreviewSurface, mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured( CameraCaptureSession session) {
mCaptureSession = session;
// 设置连续自动对焦和自动曝光
builder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
builder.set(CaptureRequest.CONTROL_AE_MODE,
CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
CaptureRequest captureRequest = builder.build();
try {
// 一直发送预览请求
mCaptureSession.setRepeatingRequest(captureRequest, null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed( CameraCaptureSession session) {
Log.e(TAG, "ConfigureFailed. session: mCaptureSession");
}
}, mBackgroundHandler); // handle 传入 null 表示使用当前线程的 Looper
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
public Size getPreviewSize() {
return mPreviewSize;
}
public void switchCamera() {
mCameraId ^= 1;
Log.d(TAG, "switchCamera: mCameraId: " + mCameraId);
releaseCamera();
openCamera();
}
private void startBackgroundThread() {
if (mBackgroundThread == null || mBackgroundHandler == null) {
Log.v(TAG, "startBackgroundThread");
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
}
private void stopBackgroundThread() {
Log.v(TAG, "stopBackgroundThread");
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private byte[] mYuvBytes;
private boolean mIsShutter;
private ImageReader.OnImageAvailableListener mOnImageAvailableListener
= new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireLatestImage();
if (image == null) {
return;
}
int width = getPreviewSize().getWidth();
int height = getPreviewSize().getHeight();
if (mYuvBytes == null) {
// YUV420 大小总是 width * height * 3 / 2
mYuvBytes = new byte[width * height * 3 / 2];
}
// YUV_420_888
Image.Plane[] planes = image.getPlanes();
// Y通道,对应planes[0]
// Y size = width * height
// yBuffer.remaining() = width * height;
// pixelStride = 1
ByteBuffer yBuffer = planes[0].getBuffer();
int yLen = width * height;
yBuffer.get(mYuvBytes, 0, yLen);
// U通道,对应planes[1]
// U size = width * height / 4;
// uBuffer.remaining() = width * height / 2;
// pixelStride = 2
ByteBuffer uBuffer = planes[1].getBuffer();
int pixelStride = planes[1].getPixelStride(); // pixelStride = 2
for (int i = 0; i < uBuffer.remaining(); i+=pixelStride) {
mYuvBytes[yLen++] = uBuffer.get(i);
}
// V通道,对应planes[2]
// V size = width * height / 4;
// vBuffer.remaining() = width * height / 2;
// pixelStride = 2
ByteBuffer vBuffer = planes[2].getBuffer();
pixelStride = planes[2].getPixelStride(); // pixelStride = 2
for (int i = 0; i < vBuffer.remaining(); i+=pixelStride) {
mYuvBytes[yLen++] = vBuffer.get(i);
}
if (mIsShutter) {
mIsShutter = false;
// save yuv data
// String yuvPath = FileUtil.SAVE_DIR + System.currentTimeMillis() + ".yuv";
// FileUtil.saveBytes(mYuvBytes, yuvPath);
// save bitmap data
// String jpgPath = yuvPath.replace(".yuv", ".jpg");
// Bitmap bitmap = ColorConvertUtil.yuv420pToBitmap(mYuvBytes, width, height);
// FileUtil.saveBitmap(bitmap, jpgPath);
}
// byte[] buffer = new byte[20+mYuvBytes.length];
//
// SharedMemUtils.initHeader(buffer);
// boolean b = SharedMemUtils.canWrite(buffer);
// if (b) {
// SharedMemUtils.setOffset(buffer, 0);
// SharedMemUtils.setLength(buffer, 0);
// SharedMemUtils.setContentSize(buffer, mYuvBytes.length);
// SharedMemUtils.setContent(buffer, mYuvBytes);
// try {
// // 写一次 , 读取数据后 数据会被清空
// // 持续写,不读,数据不会清空,注意数据覆盖(offset值)
// mMemoryFile.writeBytes(buffer, 0, 0, buffer.length);
// Method getFileDescriptorMethod = mMemoryFile.getClass().getDeclaredMethod("getFileDescriptor");
// if(getFileDescriptorMethod != null){
// FileDescriptor fileDescriptor = (FileDescriptor) getFileDescriptorMethod.invoke(mMemoryFile);
// // 序列化,才可传送
// ParcelFileDescriptor pfd = ParcelFileDescriptor.dup(fileDescriptor);
//
// ArCameraOpenResultParam openResultParam = new ArCameraOpenResultParam();
// openResultParam.cameraId = "" + mCameraId;
// openResultParam.imageHeight = getPreviewSize().getHeight();
// openResultParam.imageWidth = getPreviewSize().getWidth();
// mStateCallBack.onOpened(pfd, openResultParam, MEMORY_FILE);
// }
//
// } catch (Exception e) {
// e.printStackTrace();
// }
// }
// 一定不能忘记close
image.close();
}
};
}
package com.example.cameraservicedemo;
import android.app.Activity;
import android.os.Bundle;
import android.view.TextureView;
import android.view.View;
import android.widget.Button;
public class Camera2Activity extends Activity {
private TextureView mTextureView;
private Button mBtnOpenCamera;
private Button mBtnCloseCamera;
private Camera2Helper mCamera2Helper;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera2);
mTextureView = (TextureView) findViewById(R.id.texture_view);
mBtnOpenCamera = (Button) findViewById(R.id.btn_open_camera);
mBtnCloseCamera = (Button) findViewById(R.id.btn_close_camera);
mCamera2Helper = new Camera2Helper(Camera2Activity.this, mTextureView);
mBtnOpenCamera.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mCamera2Helper.initCameraInfo();
}
});
mBtnCloseCamera.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mCamera2Helper.releaseCamera();
mCamera2Helper.releaseThread();
}
});
}
@Override
protected void onResume() {
super.onResume();
}
@Override
protected void onPause() {
super.onPause();
}
@Override
protected void onDestroy() {
super.onDestroy();
mCamera2Helper.releaseCamera();
mCamera2Helper.releaseThread();
}
}
\ No newline at end of file
package com.example.cameraservicedemo;
import android.app.Activity;
import android.content.Context;
import android.content.res.Configuration;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
import android.view.TextureView;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
public class Camera2Helper {
private static final String TAG = "Camera2Helper";
private final Activity mActivity;
private final TextureView mTextureView;
private final static int PREVIEW_WIDTH = 720;
private final static int PREVIEW_HEIGHT = 1280;
private final static int SAVE_WIDTH = 720;
private final static int SAVE_HEIGHT = 1280;
private CameraManager mCameraManager;
private ImageReader mImageReader;
private CameraDevice mCameraDevice;
private CameraCaptureSession mCameraCaptureSession;
private String mCameraId = "0";
private CameraCharacteristics mCameraCharacteristics;
private int mCameraSensorOrientation = 0; //摄像头方向
private int mCameraFacing = CameraCharacteristics.LENS_FACING_BACK; //默认使用后置摄像头
private int mDisplayRotation; //手机方向
private boolean canTakePic = true; //是否可以拍照
private boolean canExchangeCamera = false; //是否可以切换摄像头
private Handler mCameraHandler;
private HandlerThread handlerThread = new HandlerThread("CameraThread");
private Size mPreviewSize = new Size(PREVIEW_WIDTH, PREVIEW_HEIGHT); //预览大小
private Size mSavePicSize = new Size(SAVE_WIDTH, SAVE_HEIGHT); //保存图片大小
public Camera2Helper(Activity activity, TextureView textureView) {
mActivity = activity;
mTextureView = textureView;
mDisplayRotation = mActivity.getWindowManager().getDefaultDisplay().getRotation();
handlerThread.start();
mCameraHandler = new Handler(handlerThread.getLooper());
mTextureView.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
initCameraInfo();
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
releaseCamera();
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
});
}
public void initCameraInfo() {
mCameraManager = (CameraManager) mActivity.getSystemService(Context.CAMERA_SERVICE);
String[] cameraIdList = new String[0];
try {
cameraIdList = mCameraManager.getCameraIdList();
if (cameraIdList.length == 0) {
Log.d(TAG, "没有相机可用");
return;
}
for (int i = 0; i < cameraIdList.length; i++) {
CameraCharacteristics cameraCharacteristics = mCameraManager.getCameraCharacteristics(cameraIdList[i]);
Integer facing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING);
if (facing == mCameraFacing) {
mCameraId = cameraIdList[i];
mCameraCharacteristics = cameraCharacteristics;
}
Log.d(TAG, "设备中的摄像头" + mCameraId);
}
Integer supportLevel = mCameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
if (supportLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
Log.d(TAG, "相机硬件不支持新特性");
}
// 获取摄像头方向
mCameraSensorOrientation = mCameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
//获取StreamConfigurationMap,它是管理摄像头支持的所有输出格式和尺寸
StreamConfigurationMap configurationMap = mCameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size[] savePicSize = configurationMap.getOutputSizes(ImageFormat.JPEG);
Size[] previewSize = configurationMap.getOutputSizes(SurfaceTexture.class);
boolean exchange = exchangeWidthAndHeight(mDisplayRotation, mCameraSensorOrientation);
mSavePicSize = getBestSize(exchange ? mSavePicSize.getHeight() : mSavePicSize.getWidth(),
exchange ? mSavePicSize.getWidth() : mSavePicSize.getHeight(),
exchange ? mTextureView.getHeight() : mTextureView.getWidth(),
exchange ? mTextureView.getWidth() : mTextureView.getHeight(),
Arrays.asList(savePicSize));
mPreviewSize = getBestSize(exchange ? mPreviewSize.getHeight() : mPreviewSize.getWidth(),
exchange ? mPreviewSize.getWidth() : mPreviewSize.getHeight(),
exchange ? mTextureView.getHeight() : mTextureView.getWidth(),
exchange ? mTextureView.getWidth() : mTextureView.getHeight(),
Arrays.asList(previewSize));
mTextureView.getSurfaceTexture().setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
log("预览最优尺寸 :" + mPreviewSize.getWidth() * mPreviewSize.getHeight() + ", 比例 " + (float) mPreviewSize.getWidth() / mPreviewSize.getHeight());
log("保存图片最优尺寸 :" + mSavePicSize.getWidth() * mSavePicSize.getHeight() + ", 比例 " + (float) mSavePicSize.getWidth() / mSavePicSize.getHeight());
//根据预览的尺寸大小调整TextureView的大小,保证画面不被拉伸
// int orientation = mActivity.getResources().getConfiguration().orientation;
// if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
// mTextureView.setRotationX(mPreviewSize.getWidth());
// mTextureView.setRotationY(mPreviewSize.getHeight());
// } else {
// mTextureView.setRotationX(mPreviewSize.getHeight());
// mTextureView.setRotationX(mPreviewSize.getWidth());
// }
mImageReader = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(), ImageFormat.JPEG, 1);
mImageReader.setOnImageAvailableListener(onImageAvailableListener, mCameraHandler);
openCamera();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private ImageReader.OnImageAvailableListener onImageAvailableListener =
new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
// 拿到拍照照片数据
Image image = reader.acquireNextImage();
ByteBuffer byteBuffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[byteBuffer.remaining()];
byteBuffer.get(bytes);
reader.close();
log("拿到拍照照片数据");
}
};
private void openCamera() throws CameraAccessException {
mCameraManager.openCamera(mCameraId, new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice camera) {
mCameraDevice = camera;
try {
createCaptureSession(camera);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onDisconnected(CameraDevice camera) {
log("onDisconnected");
}
@Override
public void onError(CameraDevice camera, int error) {
log("onError" +error);
}
}, mCameraHandler);
}
private void createCaptureSession(CameraDevice cameraDevice) throws CameraAccessException {
final CaptureRequest.Builder builder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
Surface surface = new Surface(mTextureView.getSurfaceTexture());
builder.addTarget(surface);
// builder.addTarget(mImageReader.getSurface());
builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
cameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()), new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession session) {
mCameraCaptureSession = session;
try {
session.setRepeatingRequest(builder.build(), captureCallback, mCameraHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(CameraCaptureSession session) {
}
@Override
public void onClosed(CameraCaptureSession session) {
super.onClosed(session);
releaseCamera();
releaseThread();
}
}, mCameraHandler);
}
public void releaseCamera() {
if (mCameraCaptureSession != null) {
mCameraCaptureSession.close();
mCameraCaptureSession = null;
}
if (mCameraDevice != null) {
mCameraDevice.close();
mCameraDevice = null;
}
if (mImageReader != null) {
mImageReader.close();
mImageReader = null;
}
canExchangeCamera = false;
}
public void releaseThread() {
if (handlerThread == null) {
return;
}
handlerThread.quitSafely();
try {
handlerThread.join();
handlerThread = null;
mCameraHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
CameraCaptureSession.CaptureCallback captureCallback = new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
canExchangeCamera = true;
canTakePic = true;
}
@Override
public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
super.onCaptureFailed(session, request, failure);
log("onCaptureFailed");
}
};
/**
* 根据提供的屏幕方向 [displayRotation] 和相机方向 [sensorOrientation] 返回是否需要交换宽高
*/
private boolean exchangeWidthAndHeight(int displayRotation, int sensorOrientation) {
boolean exchange = false;
switch (displayRotation) {
case Surface.ROTATION_0:
case Surface.ROTATION_180:
if (sensorOrientation == 90 || sensorOrientation == 270) {
exchange = true;
}
break;
case Surface.ROTATION_90:
case Surface.ROTATION_270:
if (sensorOrientation == 0 || sensorOrientation == 180) {
exchange = true;
}
break;
default:
log("Display rotation is invalid: " + displayRotation);
break;
}
log("屏幕方向" + displayRotation);
log("相机方向" + sensorOrientation);
return exchange;
}
private void log(String msg) {
Log.d(TAG, msg);
}
/**
* 根据提供的参数值返回与指定宽高相等或最接近的尺寸
*
* @param targetWidth 目标宽度
* @param targetHeight 目标高度
* @param maxWidth 最大宽度(即TextureView的宽度)
* @param maxHeight 最大高度(即TextureView的高度)
* @param sizeList 支持的Size列表
* @return 返回与指定宽高相等或最接近的尺寸
*/
private Size getBestSize(int targetWidth, int targetHeight, int maxWidth, int maxHeight, List<Size> sizeList) {
ArrayList bigEnough = new ArrayList<Size>(); //比指定宽高大的Size列表
ArrayList notBigEnough = new ArrayList<Size>(); //比指定宽高小的Size列表
for (int i = 0; i < sizeList.size(); i++) {
Size size = sizeList.get(i);
//宽<=最大宽度 && 高<=最大高度 && 宽高比 == 目标值宽高比
if (size.getWidth() <= maxWidth && size.getHeight() <= maxHeight
&& size.getWidth() == size.getHeight() * targetWidth / targetHeight) {
if (size.getWidth() >= targetWidth && size.getHeight() >= targetHeight)
bigEnough.add(size);
else
notBigEnough.add(size);
}
log("系统支持的尺寸: " + size.getWidth() * size.getHeight() + " , 比例 :" + (float) size.getWidth() / size.getHeight());
}
log("最大尺寸 :" + maxWidth * maxHeight + ", 比例 :" + (float) targetWidth / targetHeight);
log("目标尺寸 :" + targetWidth * targetHeight + ", 比例 :" + (float) targetWidth / targetHeight);
//选择bigEnough中最小的值 或 notBigEnough中最大的值
Size size;
if (bigEnough.size() > 0) {
size = (Size) Collections.min(bigEnough, new CompareSizesByArea());
} else if (notBigEnough.size() > 0) {
size = (Size) Collections.min(notBigEnough, new CompareSizesByArea());
} else {
size = sizeList.get(0);
}
return size;
}
private class CompareSizesByArea implements Comparator<Size> {
@Override
public int compare(Size size1, Size size2) {
return Long.signum((long) size1.getWidth() * size1.getHeight() - (long) size2.getWidth() * size2.getHeight());
}
}
}
package com.example.cameraservicedemo;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.TextureView;
import android.view.View;
import android.widget.Button;
import com.example.cameraservicedemo.camera1.Camera1;
import com.example.cameraservicedemo.getpreview.PreviewActivity;
import com.example.cameraservicedemo.video.CameraActivity;
public class MainActivity extends Activity implements View.OnClickListener {
private Button mBtnCamera2;
private Button mBtnVideo;
private Button mBtnMTKCamera;
private Button btn_preview;
private Button btn_camera1;
private Camera2Helper mCamera2Helper;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mBtnCamera2 = (Button) findViewById(R.id.btn_camera2);
mBtnVideo = (Button) findViewById(R.id.btn_video);
mBtnMTKCamera = (Button) findViewById(R.id.btn_mtk_camera);
btn_preview = (Button) findViewById(R.id.btn_preview);
btn_camera1 = (Button) findViewById(R.id.btn_camera1);
mBtnCamera2.setOnClickListener(this);
mBtnMTKCamera.setOnClickListener(this);
mBtnVideo.setOnClickListener(this);
btn_preview.setOnClickListener(this);
btn_camera1.setOnClickListener(this);
}
@Override
protected void onResume() {
super.onResume();
}
@Override
protected void onPause() {
super.onPause();
}
@Override
protected void onDestroy() {
super.onDestroy();
}
@Override
public void onClick(View v) {
Intent intent;
switch (v.getId()) {
case R.id.btn_camera2:
intent = new Intent(MainActivity.this, Camera2Activity.class);
startActivity(intent);
break;
case R.id.btn_mtk_camera:
// intent = new Intent(MainActivity.this, MTKCameraActivity.class);
// startActivity(intent);
break;
case R.id.btn_video:
intent = new Intent(MainActivity.this, CameraActivity.class);
startActivity(intent);
break;
case R.id.btn_preview:
intent = new Intent(MainActivity.this, PreviewActivity.class);
startActivity(intent);
break;
case R.id.btn_camera1:
intent = new Intent(MainActivity.this, Camera1.class);
startActivity(intent);
break;
}
}
}
\ No newline at end of file
package com.example.cameraservicedemo.camera1;
import android.app.Activity;
import android.hardware.Camera;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.FrameLayout;
import android.widget.ImageView;
import com.example.cameraservicedemo.R;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
public class Camera1 extends Activity {
private final static String TAG = "Camera1";
private Camera mCamera;
private CameraPreview mPreview;
private ImageView mIvPreview;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera1);
mIvPreview = (ImageView) findViewById(R.id.iv_preview);
// Create an instance of Camera
mCamera = Camera1Helper.getCameraInstance();
// Create our Preview view and set it as the content of our activity.
mPreview = new CameraPreview(this, mCamera);
FrameLayout preview = (FrameLayout) findViewById(R.id.camera_preview);
preview.addView(mPreview);
mPreview.setPreView(mIvPreview);
/* try {
mCamera.setPreviewDisplay(mPreview.getHolder());
} catch (IOException e) {
e.printStackTrace();
}*/
// mCamera.startPreview();
// Add a listener to the Capture button
Button captureButton = (Button) findViewById(R.id.button_capture);
captureButton.setOnClickListener(
new View.OnClickListener() {
@Override
public void onClick(View v) {
// get an image from the camera
mCamera.takePicture(null, null, mPicture);
}
}
);
}
private Camera.PictureCallback mPicture = new Camera.PictureCallback() {
@Override
public void onPictureTaken(byte[] data, Camera camera) {
File pictureFile = getOutputMediaFile(MEDIA_TYPE_IMAGE);
if (pictureFile == null){
Log.d(TAG, "Error creating media file, check storage permissions");
return;
}
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
fos.close();
} catch (FileNotFoundException e) {
Log.d(TAG, "File not found: " + e.getMessage());
} catch (IOException e) {
Log.d(TAG, "Error accessing file: " + e.getMessage());
}
}
};
public static final int MEDIA_TYPE_IMAGE = 1;
public static final int MEDIA_TYPE_VIDEO = 2;
/** Create a file Uri for saving an image or video */
private static Uri getOutputMediaFileUri(int type){
return Uri.fromFile(getOutputMediaFile(type));
}
/** Create a File for saving an image or video */
private static File getOutputMediaFile(int type){
// To be safe, you should check that the SDCard is mounted
// using Environment.getExternalStorageState() before doing this.
File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES), "MyCameraApp");
// This location works best if you want the created images to be shared
// between applications and persist after your app has been uninstalled.
// Create the storage directory if it does not exist
if (! mediaStorageDir.exists()){
if (! mediaStorageDir.mkdirs()){
Log.d("MyCameraApp", "failed to create directory");
return null;
}
}
// Create a media file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
File mediaFile;
if (type == MEDIA_TYPE_IMAGE){
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"IMG_"+ timeStamp + ".jpg");
} else if(type == MEDIA_TYPE_VIDEO) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"VID_"+ timeStamp + ".mp4");
} else {
return null;
}
return mediaFile;
}
@Override
protected void onPause() {
super.onPause();
releaseMediaRecorder(); // if you are using MediaRecorder, release it first
releaseCamera(); // release the camera immediately on pause event
}
private void releaseMediaRecorder(){
// if (mediaRecorder != null) {
// mediaRecorder.reset(); // clear recorder configuration
// mediaRecorder.release(); // release the recorder object
// mediaRecorder = null;
// mCamera.lock(); // lock camera for later use
// }
}
private void releaseCamera(){
if (mCamera != null){
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
mCamera.release(); // release the camera for other applications
mCamera = null;
}
}
}
package com.example.cameraservicedemo.camera1;
import android.content.Context;
import android.content.pm.PackageManager;
import android.hardware.Camera;
public class Camera1Helper {
/** Check if this device has a camera */
private boolean checkCameraHardware(Context context) {
if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)){
// this device has a camera
return true;
} else {
// no camera on this device
return false;
}
}
/** A safe way to get an instance of the Camera object. */
public static Camera getCameraInstance(){
Camera c = null;
try {
c = Camera.open(); // attempt to get a Camera instance
}
catch (Exception e){
// Camera is not available (in use or does not exist)
}
return c; // returns null if camera is unavailable
}
}
package com.example.cameraservicedemo.camera1;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.hardware.Camera;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.widget.ImageView;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
/**
* A basic Camera preview class
*/
public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback {
private final static String TAG = "CameraPreview";
private SurfaceHolder mHolder;
private Camera mCamera;
private ImageView mImageView;
private Camera.Size previewSize;
public CameraPreview(Context context, Camera camera) {
super(context);
mCamera = camera;
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(this);
// deprecated setting, but required on Android versions prior to 3.0
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, now tell the camera where to draw the preview.
try {
mCamera.setPreviewDisplay(holder);
mCamera.setPreviewCallback(new Camera.PreviewCallback() {
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
Log.d(TAG, "onPreviewFrame : " + data);
}
});
mCamera.startPreview();
} catch (IOException e) {
Log.d(TAG, "Error setting camera preview: " + e.getMessage());
}
}
public void setPreView(ImageView imageView) {
mImageView = imageView;
}
public void surfaceDestroyed(SurfaceHolder holder) {
// empty. Take care of releasing the Camera preview in your activity.
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
if (mHolder.getSurface() == null) {
// preview surface does not exist
return;
}
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e) {
// ignore: tried to stop a non-existent preview
}
// set preview size and make any resize, rotate or
// reformatting changes here
// start preview with new settings
try {
mCamera.setPreviewDisplay(mHolder);
Camera.Parameters parameters = mCamera.getParameters();
parameters.setPreviewFormat(ImageFormat.NV21);
mCamera.setParameters(parameters);
mCamera.setPreviewCallback(new Camera.PreviewCallback() {
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
Log.d(TAG, "onPreviewFrame : " + data);
setPreViewFrame(data, camera);
}
});
mCamera.startPreview();
} catch (Exception e) {
Log.d(TAG, "Error starting camera preview: " + e.getMessage());
}
}
private void setPreViewFrame(byte[] data, Camera camera) {
if (mImageView != null) {
//处理data
previewSize = camera.getParameters().getPreviewSize();//获取尺寸,格式转换的时候要用到
YuvImage yuvimage = new YuvImage(
data,
// ImageFormat.YV12,
ImageFormat.NV21,
previewSize.width,
previewSize.height,
null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
yuvimage.compressToJpeg(new Rect(0, 0, previewSize.width, previewSize.height), 100, baos);// 80--JPG图片的质量[0-100],100最高
byte[] rawImage = baos.toByteArray();
//将rawImage转换成bitmap
BitmapFactory.Options options = new BitmapFactory.Options();
options.inPreferredConfig = Bitmap.Config.RGB_565;
Bitmap bitmap = BitmapFactory.decodeByteArray(rawImage, 0, rawImage.length, options);
// idx++;
// text.setText(idx+"");
// bitmap = BitmapFactory.decodeByteArray(data, 0, data.length);
mImageView.setImageBitmap(bitmap);
}
}
}
package com.example.cameraservicedemo.getpreview;
import android.app.Activity;
import android.content.Context;
import android.util.AttributeSet;
import android.view.TextureView;
import com.example.cameraservicedemo.getpreview.util.Camera2Proxy;
public class Camera2View extends TextureView {
private static final String TAG = "Camera2View";
private Camera2Proxy mCameraProxy;
private int mRatioWidth = 0;
private int mRatioHeight = 0;
public Camera2View(Context context) {
this(context, null);
}
public Camera2View(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public Camera2View(Context context, AttributeSet attrs, int defStyleAttr) {
this(context, attrs, defStyleAttr, 0);
}
public Camera2View(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
init(context);
}
private void init(Context context) {
mCameraProxy = new Camera2Proxy((Activity) context);
}
public void setAspectRatio(int width, int height) {
if (width < 0 || height < 0) {
throw new IllegalArgumentException("Size cannot be negative.");
}
mRatioWidth = width;
mRatioHeight = height;
requestLayout();
}
public Camera2Proxy getCameraProxy() {
return mCameraProxy;
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int width = MeasureSpec.getSize(widthMeasureSpec);
int height = MeasureSpec.getSize(heightMeasureSpec);
if (0 == mRatioWidth || 0 == mRatioHeight) {
setMeasuredDimension(width, height);
} else {
if (width < height * mRatioWidth / mRatioHeight) {
setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
} else {
setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
}
}
}
}
package com.example.cameraservicedemo.getpreview;
import android.app.Fragment;
import android.graphics.SurfaceTexture;
import android.media.Image;
import android.media.ImageReader;
import android.os.Bundle;
import android.util.Size;
import android.view.LayoutInflater;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import com.example.cameraservicedemo.R;
import com.example.cameraservicedemo.getpreview.util.Camera2Proxy;
import java.nio.ByteBuffer;
public class CameraFragment extends Fragment implements View.OnClickListener {
private static final String TAG = "CameraFragment";
private ImageView mCloseIv;
private ImageView mSwitchCameraIv;
private ImageView mTakePictureIv;
private Camera2View mCameraView;
private Camera2Proxy mCameraProxy;
private final TextureView.SurfaceTextureListener mSurfaceTextureListener
= new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) {
mCameraProxy.openCamera();
mCameraProxy.setPreviewSurface(texture);
// 根据相机预览设置View大小,避免显示变形
Size previewSize = mCameraProxy.getPreviewSize();
mCameraView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth());
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) {
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture texture) {
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture texture) {
}
};
@Override
public View onCreateView( LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_camera, null);
initView(rootView);
return rootView;
}
private void initView(View rootView) {
mCloseIv = (ImageView) rootView.findViewById(R.id.toolbar_close_iv);
mSwitchCameraIv = (ImageView) rootView.findViewById(R.id.toolbar_switch_iv);
mTakePictureIv = (ImageView) rootView.findViewById(R.id.take_picture_iv);
mCameraView = (Camera2View) rootView.findViewById(R.id.camera_view);
mCameraProxy = mCameraView.getCameraProxy();
mCloseIv.setOnClickListener(this);
mSwitchCameraIv.setOnClickListener(this);
mTakePictureIv.setOnClickListener(this);
mCameraProxy.setImageAvailableListener(mOnImageAvailableListener);
}
@Override
public void onResume() {
super.onResume();
if (mCameraView.isAvailable()) {
mCameraProxy.openCamera();
} else {
mCameraView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
@Override
public void onPause() {
super.onPause();
mCameraProxy.releaseCamera();
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.toolbar_close_iv:
getActivity().finish();
break;
case R.id.toolbar_switch_iv:
mCameraProxy.switchCamera();
break;
case R.id.take_picture_iv:
mIsShutter = true;
break;
}
}
private byte[] mYuvBytes;
private boolean mIsShutter;
private ImageReader.OnImageAvailableListener mOnImageAvailableListener
= new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireLatestImage();
if (image == null) {
return;
}
int width = mCameraProxy.getPreviewSize().getWidth();
int height = mCameraProxy.getPreviewSize().getHeight();
if (mYuvBytes == null) {
// YUV420 大小总是 width * height * 3 / 2
mYuvBytes = new byte[width * height * 3 / 2];
}
// YUV_420_888
Image.Plane[] planes = image.getPlanes();
// Y通道,对应planes[0]
// Y size = width * height
// yBuffer.remaining() = width * height;
// pixelStride = 1
ByteBuffer yBuffer = planes[0].getBuffer();
int yLen = width * height;
yBuffer.get(mYuvBytes, 0, yLen);
// U通道,对应planes[1]
// U size = width * height / 4;
// uBuffer.remaining() = width * height / 2;
// pixelStride = 2
ByteBuffer uBuffer = planes[1].getBuffer();
int pixelStride = planes[1].getPixelStride(); // pixelStride = 2
for (int i = 0; i < uBuffer.remaining(); i+=pixelStride) {
mYuvBytes[yLen++] = uBuffer.get(i);
}
// V通道,对应planes[2]
// V size = width * height / 4;
// vBuffer.remaining() = width * height / 2;
// pixelStride = 2
ByteBuffer vBuffer = planes[2].getBuffer();
pixelStride = planes[2].getPixelStride(); // pixelStride = 2
for (int i = 0; i < vBuffer.remaining(); i+=pixelStride) {
mYuvBytes[yLen++] = vBuffer.get(i);
}
if (mIsShutter) {
mIsShutter = false;
// save yuv data
// String yuvPath = FileUtil.SAVE_DIR + System.currentTimeMillis() + ".yuv";
// FileUtil.saveBytes(mYuvBytes, yuvPath);
// save bitmap data
// String jpgPath = yuvPath.replace(".yuv", ".jpg");
// Bitmap bitmap = ColorConvertUtil.yuv420pToBitmap(mYuvBytes, width, height);
// FileUtil.saveBitmap(bitmap, jpgPath);
}
// 一定不能忘记close
image.close();
}
};
}
package com.example.cameraservicedemo.getpreview;
import android.app.Activity;
import android.os.Bundle;
import com.example.cameraservicedemo.R;
import com.example.cameraservicedemo.video.Camera2VideoFragment;
public class PreviewActivity extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera);
if (null == savedInstanceState) {
}
}
@Override
protected void onResume() {
super.onResume();
getFragmentManager().beginTransaction()
.replace(R.id.container, new CameraFragment())
.commit();
}
}
package com.example.cameraservicedemo.getpreview.util;
import android.app.Activity;
import android.content.Context;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.media.ImageReader;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
import java.util.Arrays;
public class Camera2Proxy {
private static final String TAG = "Camera2Proxy";
private Activity mActivity;
// camera
private int mCameraId = CameraCharacteristics.LENS_FACING_FRONT; // 要打开的摄像头ID
private Size mPreviewSize = new Size(640, 480); // 固定640*480演示
private CameraDevice mCameraDevice; // 相机对象
private CameraCaptureSession mCaptureSession;
// handler
private Handler mBackgroundHandler;
private HandlerThread mBackgroundThread;
// output
private Surface mPreviewSurface; // 输出到屏幕的预览
private ImageReader mImageReader; // 预览回调的接收者
private ImageReader.OnImageAvailableListener mOnImageAvailableListener;
/**
* 打开摄像头的回调
*/
private CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened( CameraDevice camera) {
Log.d(TAG, "onOpened");
mCameraDevice = camera;
initPreviewRequest();
}
@Override
public void onDisconnected( CameraDevice camera) {
Log.d(TAG, "onDisconnected");
releaseCamera();
}
@Override
public void onError( CameraDevice camera, int error) {
Log.e(TAG, "Camera Open failed, error: " + error);
releaseCamera();
}
};
public Camera2Proxy(Activity activity) {
mActivity = activity;
}
public void openCamera() {
Log.v(TAG, "openCamera");
startBackgroundThread(); // 对应 releaseCamera() 方法中的 stopBackgroundThread()
try {
CameraManager cameraManager = (CameraManager) mActivity.getSystemService(Context.CAMERA_SERVICE);
Log.d(TAG, "preview size: " + mPreviewSize.getWidth() + "*" + mPreviewSize.getHeight());
mImageReader = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(),
ImageFormat.YUV_420_888, 2);
mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, null);
// 打开摄像头
cameraManager.openCamera(Integer.toString(mCameraId), mStateCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
public void releaseCamera() {
Log.v(TAG, "releaseCamera");
if (mImageReader != null) {
mImageReader.close();
mImageReader = null;
}
if (mCaptureSession != null) {
mCaptureSession.close();
mCaptureSession = null;
}
if (mCameraDevice != null) {
mCameraDevice.close();
mCameraDevice = null;
}
stopBackgroundThread(); // 对应 openCamera() 方法中的 startBackgroundThread()
}
public void setImageAvailableListener(ImageReader.OnImageAvailableListener onImageAvailableListener) {
mOnImageAvailableListener = onImageAvailableListener;
}
public void setPreviewSurface(SurfaceTexture surfaceTexture) {
// mPreviewSize必须先初始化完成
surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPreviewSurface = new Surface(surfaceTexture);
}
private void initPreviewRequest() {
try {
final CaptureRequest.Builder builder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
// 添加输出到屏幕的surface
builder.addTarget(mPreviewSurface);
// 添加输出到ImageReader的surface。然后我们就可以从ImageReader中获取预览数据了
builder.addTarget(mImageReader.getSurface());
mCameraDevice.createCaptureSession(Arrays.asList(mPreviewSurface, mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured( CameraCaptureSession session) {
mCaptureSession = session;
// 设置连续自动对焦和自动曝光
builder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
builder.set(CaptureRequest.CONTROL_AE_MODE,
CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
CaptureRequest captureRequest = builder.build();
try {
// 一直发送预览请求
mCaptureSession.setRepeatingRequest(captureRequest, null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed( CameraCaptureSession session) {
Log.e(TAG, "ConfigureFailed. session: mCaptureSession");
}
}, mBackgroundHandler); // handle 传入 null 表示使用当前线程的 Looper
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
public Size getPreviewSize() {
return mPreviewSize;
}
public void switchCamera() {
mCameraId ^= 1;
Log.d(TAG, "switchCamera: mCameraId: " + mCameraId);
releaseCamera();
openCamera();
}
private void startBackgroundThread() {
if (mBackgroundThread == null || mBackgroundHandler == null) {
Log.v(TAG, "startBackgroundThread");
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
}
private void stopBackgroundThread() {
Log.v(TAG, "stopBackgroundThread");
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
package com.example.cameraservicedemo.getpreview.util;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.util.Log;
import java.nio.ByteBuffer;
public class ColorConvertUtil {
private static final String TAG = "ColorConvertUtil";
public static Bitmap yuv420pToBitmap(byte[] yuv420p, int width, int height) {
if (yuv420p == null || width < 0 || height < 0) {
Log.e(TAG, "cropNv21ToBitmap failed: illegal para !");
return null;
}
byte[] rgba = new byte[width * height * 4];
ColorConvertUtil.yuv420pToRGBA(yuv420p, width, height, rgba);
Bitmap bitmap = byteArrayToBitmap(rgba, width, height);
return bitmap;
}
public static void yuv420pToRGBA(byte[] yuv420p, int width, int height, byte[] rgba) {
if (yuv420p == null || rgba == null) {
Log.e(TAG, "yuv420pToRGBA failed: yuv420p or rgba is null ");
return;
}
if (yuv420p.length != width * height * 3 / 2) {
Log.e(TAG, "yuv420p length: " + yuv420p.length);
Log.e(TAG, "yuv420pToRGBA failed: yuv420p length error!");
return;
}
// NativeLibrary.yuv420p2rgba(yuv420p, width, height, rgba);
}
/**
* 将 rgba 的 byte[] 数据转换成 bitmap
*
* @param rgba 输入的 rgba 数据
* @param width 图片宽度
* @param height 图片高度
* @return 得到的 bitmap
*/
public static Bitmap byteArrayToBitmap(byte[] rgba, int width, int height) {
ByteBuffer buffer = ByteBuffer.wrap(rgba);
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(buffer);
return bitmap;
}
public static Bitmap rotateBitmap(Bitmap bitmap, int rotate, boolean mirrorX) {
Matrix matrix = new Matrix();
matrix.postRotate(rotate);
if (mirrorX) {
matrix.postScale(-1f, 1f);
}
Bitmap rotateBitmap = null;
if (bitmap != null) {
rotateBitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, false);
bitmap.recycle(); // 回收旧Bitmap
}
return rotateBitmap;
}
}
package com.example.cameraservicedemo.getpreview.util;
import android.graphics.Bitmap;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
public class FileUtil {
public static final String SAVE_DIR = "/sdcard/DCIM/Camera2GetPreview/";
public static boolean saveBytes(byte[] bytes, String imagePath) {
File file = new File(imagePath);
File parentFile = file.getParentFile();
if (!parentFile.exists()) {
parentFile.mkdirs();
}
try {
FileOutputStream fos = new FileOutputStream(file);
fos.write(bytes);
fos.flush();
fos.close();
return true;
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return false;
}
public static boolean saveBitmap(Bitmap bitmap, String imagePath) {
if (bitmap == null) {
return false;
}
File file = new File(imagePath);
File parentFile = file.getParentFile();
if (!parentFile.exists()) {
parentFile.mkdirs();
}
try {
FileOutputStream fos = new FileOutputStream(file);
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, fos);
fos.flush();
fos.close();
return true;
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return false;
}
}
package com.example.cameraservicedemo.getpreview.util;
public class NativeLibrary {
// static {
// System.loadLibrary("native-lib");
// }
// public static native void yuv420p2rgba(byte[] yuv420p,
// int width,
// int height,
// byte[] rgba);
}
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.cameraservicedemo.video;
import android.content.Context;
import android.util.AttributeSet;
import android.view.TextureView;
/**
* A {@link TextureView} that can be adjusted to a specified aspect ratio.
*/
public class AutoFitTextureView extends TextureView {
private int mRatioWidth = 0;
private int mRatioHeight = 0;
public AutoFitTextureView(Context context) {
this(context, null);
}
public AutoFitTextureView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public AutoFitTextureView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
}
/**
* Sets the aspect ratio for this view. The size of the view will be measured based on the ratio
* calculated from the parameters. Note that the actual sizes of parameters don't matter, that
* is, calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result.
*
* @param width Relative horizontal size
* @param height Relative vertical size
*/
public void setAspectRatio(int width, int height) {
if (width < 0 || height < 0) {
throw new IllegalArgumentException("Size cannot be negative.");
}
mRatioWidth = width;
mRatioHeight = height;
requestLayout();
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int width = MeasureSpec.getSize(widthMeasureSpec);
int height = MeasureSpec.getSize(heightMeasureSpec);
if (0 == mRatioWidth || 0 == mRatioHeight) {
setMeasuredDimension(width, height);
} else {
if (width < height * mRatioWidth / mRatioHeight) {
setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
} else {
setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
}
}
}
}
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.cameraservicedemo.video;
import android.Manifest;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.app.DialogFragment;
import android.app.Fragment;
import android.content.Context;
import android.content.DialogInterface;
import android.content.pm.PackageManager;
import android.content.res.Configuration;
import android.graphics.Matrix;
import android.graphics.RectF;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.LayoutInflater;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.Toast;
import com.example.cameraservicedemo.R;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
public class Camera2VideoFragment extends Fragment
implements View.OnClickListener {
private static final int SENSOR_ORIENTATION_DEFAULT_DEGREES = 90;
private static final int SENSOR_ORIENTATION_INVERSE_DEGREES = 270;
private static final SparseIntArray DEFAULT_ORIENTATIONS = new SparseIntArray();
private static final SparseIntArray INVERSE_ORIENTATIONS = new SparseIntArray();
private static final String TAG = "Camera2VideoFragment";
private static final int REQUEST_VIDEO_PERMISSIONS = 1;
private static final String FRAGMENT_DIALOG = "dialog";
private static final String[] VIDEO_PERMISSIONS = {
Manifest.permission.CAMERA,
Manifest.permission.RECORD_AUDIO,
};
static {
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_0, 90);
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_90, 0);
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_180, 270);
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
static {
INVERSE_ORIENTATIONS.append(Surface.ROTATION_0, 270);
INVERSE_ORIENTATIONS.append(Surface.ROTATION_90, 180);
INVERSE_ORIENTATIONS.append(Surface.ROTATION_180, 90);
INVERSE_ORIENTATIONS.append(Surface.ROTATION_270, 0);
}
/**
* An {@link AutoFitTextureView} for camera preview.
*/
private AutoFitTextureView mTextureView;
/**
* Button to record video
*/
private Button mButtonVideo;
/**
* A reference to the opened {@link android.hardware.camera2.CameraDevice}.
*/
private CameraDevice mCameraDevice;
/**
* A reference to the current {@link android.hardware.camera2.CameraCaptureSession} for
* preview.
*/
private CameraCaptureSession mPreviewSession;
/**
* {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a
* {@link TextureView}.
*/
private TextureView.SurfaceTextureListener mSurfaceTextureListener
= new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture,
int width, int height) {
openCamera(width, height);
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture,
int width, int height) {
configureTransform(width, height);
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
};
/**
* The {@link android.util.Size} of camera preview.
*/
private Size mPreviewSize;
/**
* The {@link android.util.Size} of video recording.
*/
private Size mVideoSize;
/**
* MediaRecorder
*/
private MediaRecorder mMediaRecorder;
/**
* Whether the app is recording video now
*/
private boolean mIsRecordingVideo;
/**
* An additional thread for running tasks that shouldn't block the UI.
*/
private HandlerThread mBackgroundThread;
/**
* A {@link Handler} for running tasks in the background.
*/
private Handler mBackgroundHandler;
/**
* A {@link Semaphore} to prevent the app from exiting before closing the camera.
*/
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
/**
* {@link CameraDevice.StateCallback} is called when {@link CameraDevice} changes its status.
*/
private CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened( CameraDevice cameraDevice) {
mCameraDevice = cameraDevice;
startPreview();
mCameraOpenCloseLock.release();
if (null != mTextureView) {
configureTransform(mTextureView.getWidth(), mTextureView.getHeight());
}
}
@Override
public void onDisconnected( CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
@Override
public void onError( CameraDevice cameraDevice, int error) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
Activity activity = getActivity();
if (null != activity) {
activity.finish();
}
}
};
private Integer mSensorOrientation;
private String mNextVideoAbsolutePath;
private CaptureRequest.Builder mPreviewBuilder;
public static Camera2VideoFragment newInstance() {
return new Camera2VideoFragment();
}
/**
* In this sample, we choose a video size with 3x4 aspect ratio. Also, we don't use sizes
* larger than 1080p, since MediaRecorder cannot handle such a high-resolution video.
*
* @param choices The list of available sizes
* @return The video size
*/
private static Size chooseVideoSize(Size[] choices) {
for (Size size : choices) {
if (size.getWidth() == size.getHeight() * 4 / 3 && size.getWidth() <= 1080) {
return size;
}
}
Log.e(TAG, "Couldn't find any suitable video size");
return choices[choices.length - 1];
}
/**
* Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose
* width and height are at least as large as the respective requested values, and whose aspect
* ratio matches with the specified value.
*
* @param choices The list of sizes that the camera supports for the intended output class
* @param width The minimum desired width
* @param height The minimum desired height
* @param aspectRatio The aspect ratio
* @return The optimal {@code Size}, or an arbitrary one if none were big enough
*/
private static Size chooseOptimalSize(Size[] choices, int width, int height, Size aspectRatio) {
// Collect the supported resolutions that are at least as big as the preview Surface
List<Size> bigEnough = new ArrayList<>();
int w = aspectRatio.getWidth();
int h = aspectRatio.getHeight();
for (Size option : choices) {
if (option.getHeight() == option.getWidth() * h / w &&
option.getWidth() >= width && option.getHeight() >= height) {
bigEnough.add(option);
}
}
// Pick the smallest of those, assuming we found any
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizesByArea());
} else {
Log.e(TAG, "Couldn't find any suitable preview size");
return choices[0];
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
return inflater.inflate(R.layout.fragment_camera2_video, container, false);
}
@Override
public void onViewCreated(final View view, Bundle savedInstanceState) {
mTextureView = (AutoFitTextureView) view.findViewById(R.id.texture);
mButtonVideo = (Button) view.findViewById(R.id.video);
mButtonVideo.setOnClickListener(this);
view.findViewById(R.id.info).setOnClickListener(this);
}
@Override
public void onResume() {
super.onResume();
startBackgroundThread();
if (mTextureView.isAvailable()) {
openCamera(mTextureView.getWidth(), mTextureView.getHeight());
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
@Override
public void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
@Override
public void onClick(View view) {
switch (view.getId()) {
case R.id.video: {
if (mIsRecordingVideo) {
stopRecordingVideo();
} else {
startRecordingVideo();
}
break;
}
case R.id.info: {
Activity activity = getActivity();
if (null != activity) {
new AlertDialog.Builder(activity)
.setMessage(R.string.intro_message)
.setPositiveButton(android.R.string.ok, null)
.show();
}
break;
}
}
}
/**
* Starts a background thread and its {@link Handler}.
*/
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
/**
* Stops the background thread and its {@link Handler}.
*/
private void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
/**
* Gets whether you should show UI with rationale for requesting permissions.
*
* @param permissions The permissions your app wants to request.
* @return Whether you can show permission rationale UI.
*/
// private boolean shouldShowRequestPermissionRationale(String[] permissions) {
// for (String permission : permissions) {
// if (FragmentCompat.shouldShowRequestPermissionRationale(this, permission)) {
// return true;
// }
// }
// return false;
// }
/**
* Requests permissions needed for recording video.
*/
// private void requestVideoPermissions() {
// if (shouldShowRequestPermissionRationale(VIDEO_PERMISSIONS)) {
// new ConfirmationDialog().show(getChildFragmentManager(), FRAGMENT_DIALOG);
// } else {
// FragmentCompat.requestPermissions(this, VIDEO_PERMISSIONS, REQUEST_VIDEO_PERMISSIONS);
// }
// }
// @Override
// public void onRequestPermissionsResult(int requestCode, String[] permissions,
// int[] grantResults) {
// Log.d(TAG, "onRequestPermissionsResult");
// if (requestCode == REQUEST_VIDEO_PERMISSIONS) {
// if (grantResults.length == VIDEO_PERMISSIONS.length) {
// for (int result : grantResults) {
// if (result != PackageManager.PERMISSION_GRANTED) {
// ErrorDialog.newInstance(getString(R.string.permission_request))
// .show(getChildFragmentManager(), FRAGMENT_DIALOG);
// break;
// }
// }
// } else {
// ErrorDialog.newInstance(getString(R.string.permission_request))
// .show(getChildFragmentManager(), FRAGMENT_DIALOG);
// }
// } else {
//// super.onRequestPermissionsResult(requestCode, permissions, grantResults);
// }
// }
// private boolean hasPermissionsGranted(String[] permissions) {
// for (String permission : permissions) {
// if (ActivityCompat.checkSelfPermission(getActivity(), permission)
// != PackageManager.PERMISSION_GRANTED) {
// return false;
// }
// }
// return true;
// }
/**
* Tries to open a {@link CameraDevice}. The result is listened by `mStateCallback`.
*/
@SuppressWarnings("MissingPermission")
private void openCamera(int width, int height) {
// if (!hasPermissionsGranted(VIDEO_PERMISSIONS)) {
//// requestVideoPermissions();
// Toast.makeText(getActivity(), "缺少权限", Toast.LENGTH_SHORT).show();
// return;
// }
final Activity activity = getActivity();
if (null == activity || activity.isFinishing()) {
return;
}
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
Log.d(TAG, "tryAcquire");
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
String cameraId = manager.getCameraIdList()[0];
// Choose the sizes for camera preview and video recording
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics
.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
if (map == null) {
throw new RuntimeException("Cannot get available preview/video sizes");
}
mVideoSize = chooseVideoSize(map.getOutputSizes(MediaRecorder.class));
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
width, height, mVideoSize);
int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
configureTransform(width, height);
mMediaRecorder = new MediaRecorder();
manager.openCamera(cameraId, mStateCallback, null);
} catch (CameraAccessException e) {
Toast.makeText(activity, "Cannot access the camera.", Toast.LENGTH_SHORT).show();
activity.finish();
} catch (NullPointerException e) {
// Currently an NPE is thrown when the Camera2API is used but not supported on the
// device this code runs.
ErrorDialog.newInstance(getString(R.string.camera_error))
.show(getChildFragmentManager(), FRAGMENT_DIALOG);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera opening.");
}
}
private void closeCamera() {
try {
mCameraOpenCloseLock.acquire();
closePreviewSession();
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
if (null != mMediaRecorder) {
mMediaRecorder.release();
mMediaRecorder = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.");
} finally {
mCameraOpenCloseLock.release();
}
}
/**
* Start the camera preview.
*/
private void startPreview() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
return;
}
try {
closePreviewSession();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
Surface previewSurface = new Surface(texture);
mPreviewBuilder.addTarget(previewSurface);
mCameraDevice.createCaptureSession(Collections.singletonList(previewSurface),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured( CameraCaptureSession session) {
mPreviewSession = session;
updatePreview();
}
@Override
public void onConfigureFailed( CameraCaptureSession session) {
Activity activity = getActivity();
if (null != activity) {
Toast.makeText(activity, "Failed", Toast.LENGTH_SHORT).show();
}
}
@Override
public void onClosed(CameraCaptureSession session) {
super.onClosed(session);
stopBackgroundThread();
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Update the camera preview. {@link #startPreview()} needs to be called in advance.
*/
private void updatePreview() {
if (null == mCameraDevice) {
return;
}
try {
setUpCaptureRequestBuilder(mPreviewBuilder);
HandlerThread thread = new HandlerThread("CameraPreview");
thread.start();
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void setUpCaptureRequestBuilder(CaptureRequest.Builder builder) {
builder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
}
/**
* Configures the necessary {@link android.graphics.Matrix} transformation to `mTextureView`.
* This method should not to be called until the camera preview size is determined in
* openCamera, or until the size of `mTextureView` is fixed.
*
* @param viewWidth The width of `mTextureView`
* @param viewHeight The height of `mTextureView`
*/
private void configureTransform(int viewWidth, int viewHeight) {
Activity activity = getActivity();
if (null == mTextureView || null == mPreviewSize || null == activity) {
return;
}
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max(
(float) viewHeight / mPreviewSize.getHeight(),
(float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
}
mTextureView.setTransform(matrix);
}
private void setUpMediaRecorder() throws IOException {
final Activity activity = getActivity();
if (null == activity) {
return;
}
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
if (mNextVideoAbsolutePath == null || mNextVideoAbsolutePath.isEmpty()) {
mNextVideoAbsolutePath = getVideoFilePath(getActivity());
}
mMediaRecorder.setOutputFile(mNextVideoAbsolutePath);
mMediaRecorder.setVideoEncodingBitRate(10000000);
mMediaRecorder.setVideoFrameRate(30);
mMediaRecorder.setVideoSize(mVideoSize.getWidth(), mVideoSize.getHeight());
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
switch (mSensorOrientation) {
case SENSOR_ORIENTATION_DEFAULT_DEGREES:
mMediaRecorder.setOrientationHint(DEFAULT_ORIENTATIONS.get(rotation));
break;
case SENSOR_ORIENTATION_INVERSE_DEGREES:
mMediaRecorder.setOrientationHint(INVERSE_ORIENTATIONS.get(rotation));
break;
}
mMediaRecorder.prepare();
}
private String getVideoFilePath(Context context) {
final File dir = context.getExternalFilesDir(null);
return (dir == null ? "" : (dir.getAbsolutePath() + "/"))
+ System.currentTimeMillis() + ".mp4";
}
private void startRecordingVideo() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
return;
}
try {
closePreviewSession();
setUpMediaRecorder();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
List<Surface> surfaces = new ArrayList<>();
// Set up Surface for the camera preview
Surface previewSurface = new Surface(texture);
surfaces.add(previewSurface);
mPreviewBuilder.addTarget(previewSurface);
// Set up Surface for the MediaRecorder
Surface recorderSurface = mMediaRecorder.getSurface();
surfaces.add(recorderSurface);
mPreviewBuilder.addTarget(recorderSurface);
// Start a capture session
// Once the session starts, we can update the UI and start recording
mCameraDevice.createCaptureSession(surfaces, new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured( CameraCaptureSession cameraCaptureSession) {
mPreviewSession = cameraCaptureSession;
updatePreview();
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
// UI
mButtonVideo.setText(R.string.stop);
mIsRecordingVideo = true;
// Start recording
mMediaRecorder.start();
}
});
}
@Override
public void onConfigureFailed( CameraCaptureSession cameraCaptureSession) {
Activity activity = getActivity();
if (null != activity) {
Toast.makeText(activity, "Failed", Toast.LENGTH_SHORT).show();
}
}
}, mBackgroundHandler);
} catch (CameraAccessException | IOException e) {
e.printStackTrace();
}
}
private void closePreviewSession() {
if (mPreviewSession != null) {
mPreviewSession.close();
mPreviewSession = null;
}
}
private void stopRecordingVideo() {
// UI
mIsRecordingVideo = false;
mButtonVideo.setText(R.string.record);
// Stop recording
mMediaRecorder.stop();
mMediaRecorder.reset();
Activity activity = getActivity();
if (null != activity) {
Toast.makeText(activity, "Video saved: " + mNextVideoAbsolutePath,
Toast.LENGTH_SHORT).show();
Log.d(TAG, "Video saved: " + mNextVideoAbsolutePath);
}
mNextVideoAbsolutePath = null;
startPreview();
}
/**
* Compares two {@code Size}s based on their areas.
*/
static class CompareSizesByArea implements Comparator<Size> {
@Override
public int compare(Size lhs, Size rhs) {
// We cast here to ensure the multiplications won't overflow
return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
(long) rhs.getWidth() * rhs.getHeight());
}
}
public static class ErrorDialog extends DialogFragment {
private static final String ARG_MESSAGE = "message";
public static ErrorDialog newInstance(String message) {
ErrorDialog dialog = new ErrorDialog();
Bundle args = new Bundle();
args.putString(ARG_MESSAGE, message);
dialog.setArguments(args);
return dialog;
}
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Activity activity = getActivity();
return new AlertDialog.Builder(activity)
.setMessage(getArguments().getString(ARG_MESSAGE))
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
activity.finish();
}
})
.create();
}
}
// public static class ConfirmationDialog extends DialogFragment {
//
// @Override
// public Dialog onCreateDialog(Bundle savedInstanceState) {
// final Fragment parent = getParentFragment();
// return new AlertDialog.Builder(getActivity())
// .setMessage(R.string.permission_request)
// .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
// @Override
// public void onClick(DialogInterface dialog, int which) {
// FragmentCompat.requestPermissions(parent, VIDEO_PERMISSIONS,
// REQUEST_VIDEO_PERMISSIONS);
// }
// })
// .setNegativeButton(android.R.string.cancel,
// new DialogInterface.OnClickListener() {
// @Override
// public void onClick(DialogInterface dialog, int which) {
// parent.getActivity().finish();
// }
// })
// .create();
// }
//
// }
}
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.cameraservicedemo.video;
import android.app.Activity;
import android.os.Bundle;
import com.example.cameraservicedemo.R;
public class CameraActivity extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera);
if (null == savedInstanceState) {
getFragmentManager().beginTransaction()
.replace(R.id.container, Camera2VideoFragment.newInstance())
.commit();
}
}
}
package com.example.cameraservicedemo.view;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.util.AttributeSet;
import android.widget.RelativeLayout;
/**
* author: jack
* date: 2018/8/30 18:12
* desc:Launcher界面基类
*/
public abstract class BaseLauncherView extends RelativeLayout {
private static final String TAG = "BaseLauncherView";
//UI state
public static final int STATE_NO_CONNECT = 0;
public static final int STATE_CONNECT = 1;
public static final int STATE_CALL = 2;
protected final Context mContext;
protected Context mReadResContext;
//view
protected RelativeLayout mRlRoot;
public BaseLauncherView(Context context) {
this(context, null);
}
public BaseLauncherView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public BaseLauncherView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
this.mContext = context;
try {
mReadResContext = mContext.createPackageContext(Constants.PACKAGE_NAME, Context.CONTEXT_IGNORE_SECURITY | Context.CONTEXT_INCLUDE_CODE);
} catch (PackageManager.NameNotFoundException e) {
e.printStackTrace();
}
onInit(context, attrs, defStyleAttr);
}
public abstract void onInit(Context context, AttributeSet attrs, int defStyleAttr);
/**
* 启动App
*/
protected void startupApp() {
Intent targetIntent = getContext().getPackageManager().getLaunchIntentForPackage(Constants.PACKAGE_NAME);
mContext.startActivity(targetIntent);
}
protected void toHome() {
Intent home = new Intent(Intent.ACTION_MAIN);
home.addCategory(Intent.CATEGORY_HOME);
getContext().startActivity(home);
}
}
package com.example.cameraservicedemo.view;
public class Constants {
//应用包名
public static final String PACKAGE_NAME = "com.cneeds.arcamera";
}
package com.example.cameraservicedemo.view;
import android.content.Context;
import android.util.AttributeSet;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.LinearLayout;
import com.example.cameraservicedemo.R;
public class ItemView extends BaseLauncherView{
public ItemView(Context context) {
super(context);
}
@Override
public void onInit(Context context, AttributeSet attrs, int defStyleAttr) {
LayoutInflater.from(context).inflate(R.layout.launcher_view, this);
LinearLayout root = (LinearLayout) findViewById(R.id.ll_root);
root.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
startupApp();
}
});
}
}
<?xml version="1.0" encoding="utf-8"?><!--
Copyright 2014 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:id="@+id/container"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="#000" />
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
>
<FrameLayout
android:id="@+id/camera_preview"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:layout_weight="1"
/>
<Button
android:id="@+id/button_capture"
android:text="Capture"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center"
/>
<ImageView
android:id="@+id/iv_preview"
android:layout_width="100dp"
android:layout_height="100dp"
android:src="@mipmap/ic_launcher"/>
</LinearLayout>
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical" android:layout_width="match_parent"
android:layout_height="match_parent">
<TextureView
android:id="@+id/texture_view"
android:layout_width="match_parent"
android:layout_height="match_parent" />
<LinearLayout
android:layout_width="match_parent"
android:layout_height="match_parent">
<Button
android:id="@+id/btn_open_camera"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:padding="10dp"
android:text="打开摄像头"/>
<Button
android:id="@+id/btn_close_camera"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:padding="10dp"
android:text="关闭摄像头"/>
</LinearLayout>
</FrameLayout>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical" android:layout_width="match_parent"
android:layout_height="match_parent"
>
<Button
android:id="@+id/btn_camera2"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:padding="20dp"
android:text="Camera2"
/>
<Button
android:id="@+id/btn_camera1"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:padding="20dp"
android:text="camera1"
/>
<Button
android:id="@+id/btn_preview"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:padding="20dp"
android:text="btn_preview"
/>
<Button
android:id="@+id/btn_video"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:padding="20dp"
android:text="video"
/>
<Button
android:id="@+id/btn_mtk_camera"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:padding="20dp"
android:text="MTK Camera"
/>
</LinearLayout>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical" android:layout_width="match_parent"
android:layout_height="match_parent">
<FrameLayout
android:id="@+id/big_layout"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<SurfaceView
android:id="@+id/sv_mkt"
android:layout_width="match_parent"
android:layout_height="match_parent" />
</FrameLayout>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="match_parent">
<Button
android:id="@+id/btn_open_camera"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:padding="10dp"
android:text="打开摄像头"/>
<Button
android:id="@+id/btn_close_camera"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:padding="10dp"
android:text="关闭摄像头"/>
</LinearLayout>
</FrameLayout>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@color/colorPrimary">
<!-- <android.support.v7.widget.Toolbar-->
<!-- android:id="@+id/toolbar"-->
<!-- android:layout_width="match_parent"-->
<!-- android:layout_height="?android:actionBarSize"-->
<!-- android:background="@color/colorPrimary"-->
<!-- app:contentInsetStart="0dp">-->
<LinearLayout
android:id="@+id/toolbar"
android:layout_width="match_parent"
android:layout_height="?android:actionBarSize"
android:orientation="horizontal"
android:padding="10dp">
<ImageView
android:id="@+id/toolbar_close_iv"
android:layout_width="45dp"
android:layout_height="45dp"
android:layout_gravity="center_vertical"
android:padding="10dp"
android:src="@mipmap/ic_close"/>
<Space
android:layout_width="0dp"
android:layout_height="0dp"
android:layout_weight="1"/>
<ImageView
android:id="@+id/toolbar_switch_iv"
android:layout_width="45dp"
android:layout_height="45dp"
android:layout_gravity="center_vertical"
android:padding="10dp"
android:src="@mipmap/ic_camera_switch"/>
</LinearLayout>
<!-- </android.support.v7.widget.Toolbar>-->
<com.example.cameraservicedemo.getpreview.Camera2View
android:id="@+id/camera_view"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_below="@+id/toolbar">
</com.example.cameraservicedemo.getpreview.Camera2View>
<ImageView
android:id="@+id/take_picture_iv"
android:layout_width="60dp"
android:layout_height="60dp"
android:layout_alignParentBottom="true"
android:layout_centerHorizontal="true"
android:layout_marginBottom="20dp"
android:src="@mipmap/ic_launcher"/>
</RelativeLayout>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?><!--
Copyright 2014 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<com.example.cameraservicedemo.video.AutoFitTextureView
android:id="@+id/texture"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentStart="true"
android:layout_alignParentTop="true" />
<FrameLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true"
android:layout_alignParentStart="true"
android:layout_below="@id/texture"
android:background="#4285f4">
<Button
android:id="@+id/video"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:text="@string/record" />
<ImageButton
android:id="@+id/info"
android:contentDescription="@string/description_info"
style="@android:style/Widget.Material.Light.Button.Borderless"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center_vertical|right"
android:padding="20dp"
android:src="@mipmap/ic_launcher" />
</FrameLayout>
</RelativeLayout>
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical" android:layout_width="match_parent"
android:layout_height="match_parent"
android:id="@+id/ll_root"
android:gravity="center">
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:textSize="25sp"
android:text="CameraServiceDemo"/>
</LinearLayout>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="colorPrimary">#6200EE</color>
<color name="colorPrimaryDark">#3700B3</color>
<color name="colorAccent">#03DAC5</color>
</resources>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<resources>
<integer name="launcher_index">7</integer>
</resources>
\ No newline at end of file
<resources>
<string name="app_name">CameraServiceDemo</string>
<string name="intro_message">
<![CDATA[
This sample demonstrates how to record video using Camera2 API.
]]>
</string>
<string name="record">Record</string>
<string name="stop">Stop</string>
<string name="description_info">Info</string>
<string name="permission_request">This sample needs permission for camera and audio recording.</string>
<string name="camera_error">This device doesn\'t support Camera2 API.</string>
</resources>
\ No newline at end of file
<resources>
<!-- Base application theme. -->
<style name="AppTheme" parent="@android:style/Theme.NoTitleBar.Fullscreen">
<item name="android:windowAnimationStyle">@style/Animation</item>
</style>
<style name="MyAppTheme" parent="@android:style/Theme.NoTitleBar.Fullscreen">
<item name="android:colorEdgeEffect">#666666</item>
</style>
<style name="Animation">
<item name="android:activityOpenEnterAnimation">@null</item>
<item name="android:activityOpenExitAnimation">@null</item>
<item name="android:activityCloseEnterAnimation">@null</item>
<item name="android:activityCloseExitAnimation">@null</item>
<item name="android:taskOpenEnterAnimation">@null</item>
<item name="android:taskOpenExitAnimation">@null</item>
<item name="android:taskCloseEnterAnimation">@null</item>
<item name="android:taskCloseExitAnimation">@null</item>
<item name="android:taskToFrontEnterAnimation">@null</item>
<item name="android:taskToFrontExitAnimation">@null</item>
<item name="android:taskToBackEnterAnimation">@null</item>
<item name="android:taskToBackExitAnimation">@null</item>
</style>
</resources>
\ No newline at end of file
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
repositories {
google()
jcenter()
}
dependencies {
classpath "com.android.tools.build:gradle:3.1.2"
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
}
}
allprojects {
repositories {
google()
jcenter()
}
}
task clean(type: Delete) {
delete rootProject.buildDir
}
\ No newline at end of file
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx2048m
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
# AndroidX package structure to make it clearer which packages are bundled with the
# Android operating system, and which are packaged with your app"s APK
# https://developer.android.com/topic/libraries/support-library/androidx-rn
android.useAndroidX=true
# Automatically convert third-party libraries to use AndroidX
android.enableJetifier=true
\ No newline at end of file
#Mon Jul 27 15:04:13 CST 2020
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-6.1.1-all.zip
#!/usr/bin/env sh
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "$*"
}
die () {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=$(save "$@")
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
cd "$(dirname "$0")"
fi
exec "$JAVACMD" "$@"
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windows variants
if not "%OS%" == "Windows_NT" goto win9xME_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega
include ':app'
rootProject.name = "CameraServiceDemo"
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment