安卓人脸识别笔记
人臉識別的SDK來自虹軟的人臉識別SDK,開源免費
虹軟的官網(wǎng) http://www.arcsoft.com.cn/ai/arcface.html
faceDemo實現(xiàn)效果:
在項目實現(xiàn)過程中遇到的一些問題,記一下。
一、調(diào)用系統(tǒng)相機方法
這里使用FileProvider.getUriForFile();獲取Uri 而不使用Uri.fromFile()android7.0對于系統(tǒng)權限做了一些更愛,為了提高私有文件的安全性。當我們在訪問文件的時候,安卓禁止你的應用外部公開file://uri 會報錯:android.os.FileUriExposedException:
由于我的手機是Android7.1故使用以下,Android7.0以下的可以直接使用Uri.fromFile(),為了保持兼容性可以在程序中加一個判斷當前系統(tǒng)版本進行調(diào)用
關于FileProvider.getUriForFile()調(diào)用的設置
第一步 在清單文件下注冊
<provider android:name="android.support.v4.content.FileProvider" //此處的provider需要和代碼中的provider保持一致 android:authorities="你的包名.provider" android:exported="false" android:grantUriPermissions="true"> <meta-data android:name="android.support.FILE_PROVIDER_PATHS" android:resource="@xml/provider_paths" /> </provider>第二步 res/xml/provider_paths類
<?xml version="1.0" encoding="utf-8"?> <paths> <external-path name="external_files" path="." /> </paths>注意你的代碼里面的provider名字需要和清單的provider名字一致,否則會報空
二、activity回調(diào)接收相機的圖片
@Overrideprotected void onActivityResult(int requestCode, int resultCode, Intent data) {super.onActivityResult(requestCode, resultCode, data);// requestCode 回調(diào)代碼,用來識別是哪個activity回調(diào)回來的,和startActivityForResult();第二個參數(shù)比較// resultCode 系統(tǒng)確認代碼,有Activity.RESULT_OK |Activity.RESULT_CANCELED 前者表示成功回調(diào),后者表示取消}三、虹軟SDK的使用
// 把圖形格式轉(zhuǎn)化為軟虹SDK使用的圖像格式NV21byte[] data = new byte[bp.getWidth() * bp.getHeight() * 3 / 2];ImageConverter convert = new ImageConverter();convert.initial(bp.getWidth(), bp.getHeight(), ImageConverter.CP_PAF_NV21);if (convert.convert(bp, data)) {Log.d(TAG, "convert ok!");}convert.destroy();// 人臉信息檢測 檢測的結(jié)果包括人臉矩形框的位置AFD_FSDKEngine FD_engine = new AFD_FSDKEngine();AFD_FSDKVersion FD_version = new AFD_FSDKVersion();// 用來存放檢測到的人臉信息列表List<AFD_FSDKFace> FD_result = new ArrayList<AFD_FSDKFace>();// 首先初始化人臉檢測引擎AFD_FSDKError FD_error = FD_engine.AFD_FSDK_InitialFaceEngine(FaceDB.appid,FaceDB.fd_key,AFD_FSDKEngine.AFD_OPF_0_HIGHER_EXT, 16, 5);if(FD_error.getCode() != AFD_FSDKError.MOK ){Toast.makeText(Register.this, "FD初始化失敗,錯誤碼:" + FD_error.getCode(), Toast.LENGTH_SHORT).show();}// 輸入的data數(shù)據(jù)為NV21格式,人臉檢測返回結(jié)果保存在FD_result中FD_error = FD_engine.AFD_FSDK_StillImageFaceDetection(data,bp.getWidth(), bp.getHeight(), AFD_FSDKEngine.CP_PAF_NV21, FD_result);// 畫box框Bitmap bitmap = Bitmap.createBitmap(bp.getWidth(), bp.getHeight(), bp.getConfig());Canvas canvas = new Canvas(bitmap);canvas.drawBitmap(bp, 0, 0, null);Paint mPaint = new Paint();for (AFD_FSDKFace face : FD_result) {mPaint.setColor(Color.RED);mPaint.setStrokeWidth(10.0f);mPaint.setStyle(Paint.Style.STROKE);canvas.drawRect(face.getRect(), mPaint);}// canvas.restore();bp = bitmap;imageView.setImageBitmap(bp);if(!FD_result.isEmpty()) {// 檢測人臉特征信息AFR_FSDKVersion FR_version1 = new AFR_FSDKVersion();AFR_FSDKEngine FR_engine1 = new AFR_FSDKEngine();// 存放人臉特征信息AFR_FSDKFace FR_result1 = new AFR_FSDKFace();// 初始化AFR_FSDKError FR_error1 = FR_engine1.AFR_FSDK_InitialEngine(FaceDB.appid, FaceDB.fr_key);if(FR_error1.getCode() != AFR_FSDKError.MOK ){Toast.makeText(Register.this, "FR初始化失敗,錯誤碼:" + FD_error.getCode(), Toast.LENGTH_SHORT).show();}// 檢測人臉特征FR_error1 = FR_engine1.AFR_FSDK_ExtractFRFeature(data,bp.getWidth(),bp.getHeight(), AFR_FSDKEngine.CP_PAF_NV21, new Rect(FD_result.get(0).getRect()), FD_result.get(0).getDegree(), FR_result1);if(FR_error1.getCode() != AFR_FSDKError.MOK){Toast.makeText(Register.this, "人臉特征無法檢測,請換一張圖片", Toast.LENGTH_SHORT).show();}else{mAFR_FSDKFace = FR_result1.clone(); // 復制// 裁剪int width = FD_result.get(0).getRect().width();int height = FD_result.get(0).getRect().height();Bitmap face_bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);Canvas face_canvas = new Canvas(face_bitmap);face_canvas.drawBitmap(bp, FD_result.get(0).getRect(), new Rect(0, 0, width, height), null);// 顯示imageView2.setImageBitmap(face_bitmap);// 添加人臉特征信息到臉庫// App.mFaceDB.addFace("name",mAFR_FSDKFace);}// 銷毀FR_error1 = FR_engine1.AFR_FSDK_UninitialEngine();}else{Toast.makeText(Register.this, "未檢測到人臉", Toast.LENGTH_SHORT).show();}FD_error = FD_engine.AFD_FSDK_UninitialFaceEngine();人臉檢測布局
<?xml version="1.0" encoding="utf-8"?> <RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"xmlns:app="http://schemas.android.com/apk/res-auto"xmlns:tools="http://schemas.android.com/tools"android:layout_width="match_parent"android:layout_height="match_parent"tools:context="com.demo.cjh.facedemo.Detecter"><com.guo.android_extend.widget.CameraSurfaceView android:id="@+id/surfaceView"android:layout_width="1dp"android:layout_height="1dp"/><com.guo.android_extend.widget.CameraGLSurfaceView android:id="@+id/glsurfaceView"android:layout_width="144dp"android:layout_height="176dp"android:layout_marginTop="80dp"android:layout_centerHorizontal="true"/><TextView android:id="@+id/text1"android:layout_width="wrap_content"android:layout_height="wrap_content"android:text="faceDemo"android:layout_below="@id/glsurfaceView"android:layout_centerHorizontal="true"android:paddingTop="10dp"/></RelativeLayout>人臉檢測代碼
public class Detecter extends AppCompatActivity implements View.OnTouchListener, CameraSurfaceView.OnCameraListener, Camera.AutoFocusCallback {private final String TAG = this.getClass().getSimpleName();private CameraSurfaceView mSurfaceView;private CameraGLSurfaceView mGLSurfaceView;private Camera mCamera;private TextView textView1 ;int mCameraID;int mCameraRotate;boolean mCameraMirror;private int mWidth, mHeight, mFormat;List<AFT_FSDKFace> result = new ArrayList<>();AFT_FSDKVersion version = new AFT_FSDKVersion();AFT_FSDKEngine engine = new AFT_FSDKEngine();byte[] mImageNV21 = null;FRAbsLoop mFRAbsLoop = null;AFT_FSDKFace mAFT_FSDKFace = null;boolean isOK = false;@Overrideprotected void onCreate(Bundle savedInstanceState) {super.onCreate(savedInstanceState);mCameraID = getIntent().getIntExtra("Camera", 0) == 0 ? Camera.CameraInfo.CAMERA_FACING_BACK : Camera.CameraInfo.CAMERA_FACING_FRONT;mCameraRotate = getIntent().getIntExtra("Camera", 0) == 0 ? 90 : 270;mCameraMirror = getIntent().getIntExtra("Camera", 0) == 0 ? false : true;mWidth = 1280;mHeight = 960;mFormat = ImageFormat.NV21;setContentView(R.layout.activity_detecter);mGLSurfaceView = (CameraGLSurfaceView) findViewById(R.id.glsurfaceView);mGLSurfaceView.setOnTouchListener(this);mSurfaceView = (CameraSurfaceView) findViewById(R.id.surfaceView);mSurfaceView.setOnCameraListener(this);mSurfaceView.setupGLSurafceView(mGLSurfaceView, true, mCameraMirror, mCameraRotate);mSurfaceView.debug_print_fps(true, false);textView1 = (TextView) findViewById(R.id.text1);// 初始化人臉跟蹤程序AFT_FSDKError err = engine.AFT_FSDK_InitialFaceEngine(FaceDB.appid, FaceDB.ft_key, AFT_FSDKEngine.AFT_OPF_0_HIGHER_EXT, 16, 5);Log.d(TAG, "AFT_FSDK_InitialFaceEngine =" + err.getCode());err = engine.AFT_FSDK_GetVersion(version);Log.d(TAG, "AFT_FSDK_GetVersion:" + version.toString() + "," + err.getCode());mFRAbsLoop = new FRAbsLoop(); // 識別線程mFRAbsLoop.start();}class FRAbsLoop extends AbsLoop {AFR_FSDKVersion version = new AFR_FSDKVersion();AFR_FSDKEngine engine = new AFR_FSDKEngine();AFR_FSDKFace result = new AFR_FSDKFace();List<FaceDB.FaceRegist> mResgist = App.mFaceDB.mRegister;List<ASAE_FSDKFace> face1 = new ArrayList<>();List<ASGE_FSDKFace> face2 = new ArrayList<>();@Overridepublic void setup() {AFR_FSDKError error = engine.AFR_FSDK_InitialEngine(FaceDB.appid, FaceDB.fr_key);Log.d(TAG, "AFR_FSDK_InitialEngine = " + error.getCode());error = engine.AFR_FSDK_GetVersion(version);Log.d(TAG, "FR=" + version.toString() + "," + error.getCode()); //(210, 178 - 478, 446), degree = 1 780, 2208 - 1942, 3370}@Overridepublic void loop() {if(isOK){try {Thread.sleep(3000);} catch (InterruptedException e) {e.printStackTrace();}mFRAbsLoop.shutdown();Detecter.this.finish();}// 進行人臉識別檢測if (mImageNV21 != null) {long time = System.currentTimeMillis();AFR_FSDKError error = engine.AFR_FSDK_ExtractFRFeature(mImageNV21, mWidth, mHeight, AFR_FSDKEngine.CP_PAF_NV21, mAFT_FSDKFace.getRect(), mAFT_FSDKFace.getDegree(), result);Log.d(TAG, "AFR_FSDK_ExtractFRFeature cost :" + (System.currentTimeMillis() - time) + "ms");Log.d(TAG, "Face=" + result.getFeatureData()[0] + "," + result.getFeatureData()[1] + "," + result.getFeatureData()[2] + "," + error.getCode());AFR_FSDKMatching score = new AFR_FSDKMatching();float max = 0.0f;String name = null;for (FaceDB.FaceRegist fr : mResgist) {for (AFR_FSDKFace face : fr.mFaceList) {error = engine.AFR_FSDK_FacePairMatching(result, face, score);Log.d(TAG, "Score:" + score.getScore() + ", AFR_FSDK_FacePairMatching=" + error.getCode());if (max < score.getScore()) {max = score.getScore();name = fr.mName;}}}//age & genderface1.clear();face2.clear();//crop byte[] data = mImageNV21;YuvImage yuv = new YuvImage(data, ImageFormat.NV21, mWidth, mHeight, null);ExtByteArrayOutputStream ops = new ExtByteArrayOutputStream();yuv.compressToJpeg(mAFT_FSDKFace.getRect(), 80, ops);final Bitmap bmp = BitmapFactory.decodeByteArray(ops.getByteArray(), 0, ops.getByteArray().length);try {ops.close();} catch (IOException e) {e.printStackTrace();}if (max > 0.6f) {// 分數(shù)大于6.0識別成功//fr success.final float max_score = max;Log.d(TAG, "fit Score:" + max + ", NAME:" + name);final String mNameShow = name;runOnUiThread(new Runnable() {@Overridepublic void run() {textView1.setText("識別成功!");textView1.setTextColor(Color.BLACK);isOK = true;}});} else {final String mNameShow = "未識別";Log.d(TAG, "識別:" + name);runOnUiThread(new Runnable() {@Overridepublic void run() {textView1.setText("識別失敗!");textView1.setTextColor(Color.RED);}});}mImageNV21 = null;}}@Overridepublic void over() {AFR_FSDKError error = engine.AFR_FSDK_UninitialEngine();Log.d(TAG, "AFR_FSDK_UninitialEngine : " + error.getCode());}}@Overrideprotected void onDestroy() {// TODO Auto-generated method stubsuper.onDestroy();// 銷毀mFRAbsLoop.shutdown();AFT_FSDKError err = engine.AFT_FSDK_UninitialFaceEngine();Log.d(TAG, "AFT_FSDK_UninitialFaceEngine =" + err.getCode());}@Overridepublic boolean onTouch(View v, MotionEvent event) {CameraHelper.touchFocus(mCamera, event, v, this); // 點擊聚焦return false;}@Overridepublic Camera setupCamera() {// 在相機啟動前設置一些屬性,設置數(shù)據(jù)的格式為NV21,圖像的寬高mCamera = Camera.open(mCameraID);try {Camera.Parameters parameters = mCamera.getParameters();parameters.setPreviewSize(mWidth, mHeight);parameters.setPreviewFormat(mFormat);for( Camera.Size size : parameters.getSupportedPreviewSizes()) {Log.d(TAG, "SIZE:" + size.width + "x" + size.height);}for( Integer format : parameters.getSupportedPreviewFormats()) {Log.d(TAG, "FORMAT:" + format);}List<int[]> fps = parameters.getSupportedPreviewFpsRange();for(int[] count : fps) {Log.d(TAG, "T:");for (int data : count) {Log.d(TAG, "V=" + data);}}//parameters.setPreviewFpsRange(15000, 30000);//parameters.setExposureCompensation(parameters.getMaxExposureCompensation());//parameters.setWhiteBalance(Camera.Parameters.WHITE_BALANCE_AUTO);//parameters.setAntibanding(Camera.Parameters.ANTIBANDING_AUTO);//parmeters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);//parameters.setSceneMode(Camera.Parameters.SCENE_MODE_AUTO);//parameters.setColorEffect(Camera.Parameters.EFFECT_NONE);mCamera.setParameters(parameters);} catch (Exception e) {e.printStackTrace();}if (mCamera != null) {mWidth = mCamera.getParameters().getPreviewSize().width;mHeight = mCamera.getParameters().getPreviewSize().height;}return mCamera;}@Overridepublic void setupChanged(int format, int width, int height) {}@Overridepublic boolean startPreviewLater() {return false;}@Overridepublic Object onPreview(byte[] data, int width, int height, int format, long timestamp) {// 獲取數(shù)據(jù)流 檢測人臉信息框AFT_FSDKError err = engine.AFT_FSDK_FaceFeatureDetect(data, width, height, AFT_FSDKEngine.CP_PAF_NV21, result);Log.d(TAG, "AFT_FSDK_FaceFeatureDetect =" + err.getCode());Log.d(TAG, "Face=" + result.size());for (AFT_FSDKFace face : result) {Log.d(TAG, "Face:" + face.toString());}if (mImageNV21 == null) {if (!result.isEmpty()) {mAFT_FSDKFace = result.get(0).clone();mImageNV21 = data.clone();} else {//mHandler.postDelayed(hide, 3000);}}//copy rectsRect[] rects = new Rect[result.size()];for (int i = 0; i < result.size(); i++) {rects[i] = new Rect(result.get(i).getRect());}//clear result.result.clear();//return the rects for render.return rects;}@Overridepublic void onBeforeRender(CameraFrameData data) {}@Overridepublic void onAfterRender(CameraFrameData data) {// 繪制人臉框mGLSurfaceView.getGLES2Render().draw_rect((Rect[])data.getParams(), Color.GREEN, 2);}@Overridepublic void onAutoFocus(boolean success, Camera camera) {} }總結(jié)
- 上一篇: 集中隔离第17天2022.3.13
- 下一篇: 基于深度学习的自动识别夹取机械臂