Jelajahi Sumber

refactor(更新)
- 人脸登录完成

周文健 4 bulan lalu
induk
melakukan
fdc00b28b3

+ 36 - 4
app/src/main/java/com/grkj/iscs/features/login/dialog/LoginDialog.kt

@@ -2,6 +2,7 @@ package com.grkj.iscs.features.login.dialog
 
 import android.graphics.Bitmap
 import android.view.View
+import androidx.appcompat.widget.PopupMenu
 import androidx.lifecycle.LifecycleOwner
 import com.grkj.data.model.res.UserInfoRes
 import com.grkj.iscs.R
@@ -17,6 +18,7 @@ import com.kongzue.dialogx.interfaces.DialogLifecycleCallback
 import com.kongzue.dialogx.interfaces.OnBindView
 import com.sik.sikcore.SIKCore
 import com.sik.sikcore.activity.ActivityTracker
+import com.sik.sikcore.thread.ThreadUtils
 import com.sik.sikimage.ImageConvertUtils
 import com.sik.sikimage.ImageUtils
 
@@ -28,6 +30,7 @@ class LoginDialog(
 
     private var cardNo = ""
     private var mLoginType = 3 // 0:人脸 1:指纹 2:工卡 3:账号
+    private var inFaceChecking: Boolean = false
     private val mPairList = mutableListOf(
         Pair(
             SIKCore.getApplication().getString(com.grkj.ui_base.R.string.please_scan_face),
@@ -47,6 +50,12 @@ class LoginDialog(
 
     override fun onBind(customDialog: CustomDialog, contentView: View) {
         mBinding = DialogLoginBinding.bind(contentView)
+        customDialog.setDialogLifecycleCallback(object : DialogLifecycleCallback<CustomDialog>() {
+            override fun onDismiss(dialog: CustomDialog?) {
+                ArcSoftUtil.stop()
+                super.onDismiss(dialog)
+            }
+        })
         mBinding.tvLogin.setOnClickListener {
             if (mBinding.etAccount.text.toString().isEmpty()) {
                 PopTip.tip(CommonUtils.getStr(com.grkj.ui_base.R.string.please_input_account))
@@ -56,7 +65,10 @@ class LoginDialog(
                 PopTip.tip(CommonUtils.getStr(com.grkj.ui_base.R.string.please_input_password))
                 return@setOnClickListener
             }
-            LoadingEvent.sendLoadingEvent(CommonUtils.getStr(com.grkj.ui_base.R.string.doing_login),true)
+            LoadingEvent.sendLoadingEvent(
+                CommonUtils.getStr(com.grkj.ui_base.R.string.doing_login),
+                true
+            )
             viewModel.loginWithAccount(
                 mBinding.etAccount.text.toString(),
                 mBinding.etPassword.text.toString()
@@ -109,7 +121,10 @@ class LoginDialog(
                     FingerprintUtil.start()
                     FingerprintUtil.setScanListener(object : FingerprintUtil.OnScanListener {
                         override fun onScan(bitmap: Bitmap) {
-                            LoadingEvent.sendLoadingEvent(CommonUtils.getStr(com.grkj.ui_base.R.string.doing_login),true)
+                            LoadingEvent.sendLoadingEvent(
+                                CommonUtils.getStr(com.grkj.ui_base.R.string.doing_login),
+                                true
+                            )
                             viewModel.loginWithFingerprint(
                                 ImageConvertUtils.bitmapToBase64(bitmap).toString()
                             ).observe(lifecycleOwner) {
@@ -149,18 +164,35 @@ class LoginDialog(
     private fun startFace() {
         ActivityTracker.getCurrentActivity()?.let { context ->
             ArcSoftUtil.initEngine(context)
-            ArcSoftUtil.initCamera(context, context.windowManager, mBinding.preview!!) {bitmap, faceSize, alive ->
+            ArcSoftUtil.initCamera(
+                context,
+                context.windowManager,
+                mBinding.preview!!
+            ) { bitmap, faceSize, alive ->
                 bitmap?.let { itBitmap ->
+                    if (faceSize==0){
+                        return@let
+                    }
+                    if (inFaceChecking) {
+                        return@let
+                    }
+                    inFaceChecking = true
                     viewModel.loginWithFace(
                         ImageConvertUtils.bitmapToBase64(itBitmap).toString()
                     ).observe(lifecycleOwner) {
+                        if (it == false) {
+                            ThreadUtils.runOnMainDelayed(1000) {
+                                inFaceChecking = false
+                            }
+                        }else{
+                            ArcSoftUtil.stop()
+                        }
                         callBack?.invoke(it)
                     }
                     LoadingEvent.sendLoadingEvent(
                         context.getString(com.grkj.ui_base.R.string.face_detected_do_login),
                         true
                     )
-                    ArcSoftUtil.stop()
                 }
             }
         }

+ 6 - 5
shared/src/main/java/com/grkj/shared/utils/ArcSoftUtil.kt

@@ -28,7 +28,6 @@ import com.grkj.shared.utils.face.arcsoft.CameraListener
 import com.grkj.shared.utils.face.arcsoft.NV21ToBitmap
 import org.slf4j.Logger
 import org.slf4j.LoggerFactory
-import kotlin.math.log
 
 /**
  * 虹软工具类
@@ -86,7 +85,7 @@ object ArcSoftUtil {
             DetectFaceOrientPriority.valueOf("ASF_OP_0_ONLY"),
             16,
             20,
-            FaceEngine.ASF_FACE_DETECT or FaceEngine.ASF_AGE or FaceEngine.ASF_FACE3DANGLE or FaceEngine.ASF_GENDER or FaceEngine.ASF_LIVENESS
+            FaceEngine.ASF_FACE_DETECT or FaceEngine.ASF_AGE or FaceEngine.ASF_FACE3DANGLE or FaceEngine.ASF_GENDER or FaceEngine.ASF_LIVENESS or FaceEngine.ASF_FACE_RECOGNITION
         )
         logger.info("initEngine:  init: $afCode")
         if (afCode != ErrorInfo.MOK) {
@@ -237,21 +236,22 @@ object ArcSoftUtil {
         val imgA = bitmapToBgr24(bmpA)
         val imgB = bitmapToBgr24(bmpB)
 
-        faceEngine?.detectFaces(
+        val imgADetectResultCode = faceEngine?.detectFaces(
             imgA,
             bmpA.width,
             bmpA.height,
             FaceEngine.CP_PAF_BGR24,
             facesA
         )
-        faceEngine?.detectFaces(
+        val imgBDetectResultCode = faceEngine?.detectFaces(
             imgB,
             bmpB.width,
             bmpB.height,
             FaceEngine.CP_PAF_BGR24,
             facesB
         )
-
+        logger.info("人脸检测结果1:${facesA.size},${facesA.size}")
+        logger.info("人脸检测结果2:${imgADetectResultCode},${imgBDetectResultCode}")
         if (facesA.isEmpty() || facesB.isEmpty()) {
             return false
         }
@@ -272,6 +272,7 @@ object ArcSoftUtil {
         val compareResult = FaceSimilar()
         val compareCode = faceEngine?.compareFaceFeature(ftA, ftB, compareResult)
         if (compareCode != ErrorInfo.MOK) {
+            logger.info("特征比对结果:${compareCode}")
             return false
         }
         logger.info("比对分数:${compareResult.score}")

+ 0 - 480
shared/src/main/java/com/grkj/shared/utils/face/arcsoft/FaceUtils.kt

@@ -1,480 +0,0 @@
-package com.grkj.shared.utils.face.arcsoft
-
-import android.content.Context
-import android.graphics.Bitmap
-import android.graphics.ImageFormat
-import android.graphics.Rect
-import android.hardware.camera2.CameraCharacteristics
-import android.hardware.camera2.CameraManager
-import android.media.Image
-import android.media.ImageReader
-import android.os.Environment
-import android.util.Log
-import android.util.Size
-import com.arcsoft.face.*
-import com.arcsoft.face.enums.DetectFaceOrientPriority
-import com.arcsoft.face.enums.DetectMode
-import com.arcsoft.imageutil.ArcSoftImageFormat
-import com.arcsoft.imageutil.ArcSoftImageUtil
-import com.arcsoft.imageutil.ArcSoftImageUtilError
-import com.arcsoft.imageutil.ArcSoftRotateDegree
-import java.io.File
-import java.io.FileInputStream
-import java.io.FileOutputStream
-import java.io.IOException
-import kotlin.math.abs
-
-object FaceUtils {
-    val EXT_ROOT_PATH : String = Environment.getExternalStorageDirectory().absolutePath
-    private var faceEngine: FaceEngine? = null
-//    private val faceDataList = ArrayList<FaceBean>()
-
-    /**
-     * 存放特征的目录
-     */
-    private const val NEW_FEATURE_DIR = "face-features"
-
-    fun extFile(path: String) : File {
-        return File(EXT_ROOT_PATH + File.separator + "refuse-class" + File.separator + path)
-    }
-    
-    fun moveOldFolderToNew() {
-        val oldRegDir = File(EXT_ROOT_PATH + File.separator + "register")
-        if (!oldRegDir.exists() || !oldRegDir.isDirectory) {
-            Log.i("FACE", "旧目录 register 不存在")
-            return
-        }
-        val oldDir = File(oldRegDir.path + File.separator + "features")
-        if (!oldDir.exists() || !oldDir.isDirectory) {
-            Log.i("FACE", "旧目录 register/features 不存在")
-            return
-        }
-        val newDir = extFile(NEW_FEATURE_DIR)
-        if (!newDir.exists()) {
-            val success = newDir.mkdirs()
-            Log.i("FACE", "创建新目录 /refuse-class/face-features : ${success}")
-        }
-        for (oldFile in oldDir.listFiles()) {
-            val newFile = extFile(NEW_FEATURE_DIR + File.separator + oldFile.name)
-            if (!newFile.exists()) {
-                newFile.createNewFile()
-            }
-            val data = ByteArray(FaceFeature.FEATURE_SIZE)
-            val fis = FileInputStream(oldFile)
-            fis.read(data)
-            fis.close()
-            Log.i("FACE", "读取旧目录人脸特征:${oldFile.name}")
-            val fos = FileOutputStream(newFile)
-            fos.write(data)
-            fos.close()
-            Log.i("FACE", "写入新目录人脸特征:${oldFile.name}")
-            oldFile.delete()
-            Log.i("FACE", "删除旧目录人脸特征:${oldFile.name}")
-        }
-        oldDir.delete()
-        Log.i("FACE", "删除旧目录 register/features")
-        oldRegDir.delete()
-        Log.i("FACE", "删除旧目录 register")
-    }
-
-
-    /**
-     * 初始化
-     *
-     * @param context 上下文对象
-     * @return 是否初始化成功
-     */
-    fun init(context: Context): Int {
-        synchronized(this) {
-            if (faceEngine == null) {
-                val t0 = System.currentTimeMillis()
-                faceEngine = FaceEngine()
-                val engineCode = faceEngine!!.init(
-                    context,
-                    DetectMode.ASF_DETECT_MODE_VIDEO,
-                    DetectFaceOrientPriority.ASF_OP_ALL_OUT,
-                    16,
-                    1,
-                    // 人脸监测 | 人脸特征 | RGB 活体
-                    FaceEngine.ASF_FACE_DETECT or FaceEngine.ASF_FACE_RECOGNITION or FaceEngine.ASF_LIVENESS
-                )
-                val tt = System.currentTimeMillis() - t0
-                Log.i("face", "人脸识别引擎初始化:$engineCode, 耗时:$tt")
-                if (engineCode == ErrorInfo.MOK) {
-                    initFaceList()
-                } else {
-                    faceEngine = null
-                }
-                return engineCode
-            }
-            return ErrorInfo.MOK
-        }
-    }
-
-    /**
-     * 销毁
-     */
-//    fun unInit() {
-//        synchronized(this) {
-//            faceDataList.clear()
-//            faceEngine?.unInit()
-//            faceEngine = null
-//        }
-//    }
-
-    /**
-     * 清空人脸特性
-     */
-    fun clearFaces() {
-        extFile(NEW_FEATURE_DIR).delete()
-    }
-
-    /**
-     * 初始化人脸特征数据以及人脸特征数据对应的注册图
-     *
-     */
-    private fun initFaceList() {
-        synchronized(this) {
-            val featureDir = extFile(NEW_FEATURE_DIR)
-            if (!featureDir.exists() || !featureDir.isDirectory) {
-                return
-            }
-            for (featureFile in featureDir.listFiles()) {
-                try {
-                    val fis = FileInputStream(featureFile)
-                    val feature = ByteArray(FaceFeature.FEATURE_SIZE)
-                    fis.read(feature)
-                    fis.close()
-//                    faceDataList.add(FaceBean(featureFile.name, feature))
-                } catch (e: IOException) {
-                    Log.e("face", "加载人脸数据异常", e)
-                }
-            }
-        }
-    }
-
-    /**
-     * 检测人脸信息
-     */
-    fun detectFace(nv21: ByteArray, width: Int, height: Int) : FaceInfo? {
-        return faceEngine?.run {
-            val faceInfos = ArrayList<FaceInfo>()
-            val code = detectFaces(nv21, width, height, FaceEngine.CP_PAF_NV21, faceInfos)
-            if (code == ErrorInfo.MOK && faceInfos.size > 0) {
-                Log.d("face", "检测到 ${faceInfos.size} 个人脸")
-                return faceInfos.maxByOrNull { faceInfo ->
-                    val r = faceInfo.rect
-                    abs((r.left - r.right) * (r.top - r.bottom))
-                }
-            } else {
-                Log.i("face", "未检测到人脸: $code")
-                return null
-            }
-        }
-    }
-
-    /**
-     * 活体检测
-     */
-    fun detectLive(nv21: ByteArray, faceInfo: FaceInfo, width: Int, height: Int) : LivenessInfo? {
-        return faceEngine?.run {
-            val pcode = process(nv21, width, height, FaceEngine.CP_PAF_NV21, listOf(faceInfo), FaceEngine.ASF_LIVENESS)
-            val livenesses = ArrayList<LivenessInfo>()
-            if (pcode == ErrorInfo.MOK) {
-                val lcode = getLiveness(livenesses)
-                if (lcode == ErrorInfo.MOK && livenesses.size > 0) {
-                    return livenesses[0]
-                } else {
-                    Log.d("face", "提取 RGB 活体信息失败:$lcode")
-                }
-            } else {
-                Log.i("face", "人脸属性检测失败:$pcode")
-            }
-            return null
-        }
-    }
-
-    /**
-     * 人脸特征提取
-     */
-    fun getFaceFeature(nv21: ByteArray, faceInfo: FaceInfo, width: Int, height: Int) : FaceFeature? {
-        return faceEngine?.run {
-            val faceFeature = FaceFeature()
-            val code = extractFaceFeature(nv21, width, height, FaceEngine.CP_PAF_NV21, faceInfo, faceFeature)
-            if (code == ErrorInfo.MOK) {
-                return faceFeature
-            } else {
-                Log.i("face", "人脸特征提取失败:$code")
-            }
-            return null
-        }
-    }
-
-    /**
-     * 在特征库中搜索最相似的一个
-     * 原名:getTopOfFaceLib
-     * @param feature 传入特征数据
-     * @return 比对结果
-     */
-//    fun findMostSimilarFace(feature: FaceFeature): FaceResultBean? {
-//        if (faceDataList.size == 0) {
-//            return null
-//        }
-//        return faceEngine?.run {
-//            val tmpFeature = FaceFeature()
-//            val faceSimilar = FaceSimilar()
-//            var maxSimilarScore = 0f
-//            var maxSimilarIndex = -1
-//            for (i in faceDataList.indices) {
-//                tmpFeature.featureData = faceDataList[i].featureData
-//
-//                compareFaceFeature(feature, tmpFeature, faceSimilar)
-//
-//                if (faceSimilar.score > maxSimilarScore) {
-//                    maxSimilarScore = faceSimilar.score
-//                    maxSimilarIndex = i
-//                }
-//            }
-//            return if (maxSimilarIndex > -1) {
-//                FaceResultBean(faceDataList[maxSimilarIndex].name, maxSimilarScore)
-//            } else null
-//        }
-//    }
-
-    /**
-     * 获取面部图像
-     */
-    fun getFaceBitmap(nv21: ByteArray, width: Int, height: Int, faceInfo: FaceInfo): Bitmap? {
-        if (width % 4 != 0 || nv21.size != width * height * 3 / 2) {
-            Log.e("face", "invalid params")
-            return null
-        }
-        return faceEngine?.run {
-            // 保存注册结果(注册图、特征数据)
-            // 为了美观,扩大rect截取注册图
-            val cropRect = getBestRect(width, height, faceInfo.rect)
-            if (cropRect == null) {
-                Log.e("face", "registerNv21: cropRect is null!")
-                return null
-            }
-            cropRect.left = cropRect.left and 3.inv()
-            cropRect.top = cropRect.top and 3.inv()
-            cropRect.right = cropRect.right and 3.inv()
-            cropRect.bottom = cropRect.bottom and 3.inv()
-            // 创建一个头像的 Bitmap,存放旋转结果图
-            return getHeadImage(nv21, width, height, faceInfo.orient, cropRect)
-        }
-    }
-
-    /**
-     * 保存人脸特征
-     */
-//    fun saveFeature(feature: FaceFeature, name: String): Boolean {
-//        synchronized(this) {
-//            //特征存储的文件夹
-//            val featureDir = extFile(NEW_FEATURE_DIR)
-//            if (!featureDir.exists() && !featureDir.mkdirs()) {
-//                Log.e("face", "registerNv21: can not create feature directory")
-//                return false
-//            }
-//            try {
-//                val fosFeature = FileOutputStream(featureDir.path + File.separator + name)
-//                fosFeature.write(feature.featureData)
-//                fosFeature.close()
-//                //内存中的数据同步
-//                faceDataList.add(FaceBean(name, feature.featureData))
-//                return true
-//            } catch (e: IOException) {
-//                Log.e("face", "保存图片特征异常", e)
-//                return false
-//            }
-//        }
-//    }
-
-    /**
-     * 截取合适的头像并旋转,保存为注册头像
-     *
-     * @param originImageData 原始的BGR24数据
-     * @param width           BGR24图像宽度
-     * @param height          BGR24图像高度
-     * @param orient          人脸角度
-     * @param cropRect        裁剪的位置
-     * @param imageFormat     图像格式
-     * @return 头像的图像数据
-     */
-    private fun getHeadImage(
-        originImageData: ByteArray,
-        width: Int,
-        height: Int,
-        orient: Int,
-        cropRect: Rect,
-        imageFormat: ArcSoftImageFormat = ArcSoftImageFormat.NV21
-    ): Bitmap {
-        val headImageData = ArcSoftImageUtil.createImageData(cropRect.width(), cropRect.height(), imageFormat)
-        val cropCode = ArcSoftImageUtil.cropImage(
-            originImageData,
-            headImageData,
-            width,
-            height,
-            cropRect,
-            imageFormat
-        )
-        if (cropCode != ArcSoftImageUtilError.CODE_SUCCESS) {
-            throw RuntimeException("crop image failed, code is $cropCode")
-        }
-
-        //判断人脸旋转角度,若不为0度则旋转注册图
-        var rotateHeadImageData: ByteArray? = null
-        val rotateCode: Int
-        val cropImageWidth: Int
-        val cropImageHeight: Int
-        // 90度或270度的情况,需要宽高互换
-        if (orient == FaceEngine.ASF_OC_90 || orient == FaceEngine.ASF_OC_270) {
-            cropImageWidth = cropRect.height()
-            cropImageHeight = cropRect.width()
-        } else {
-            cropImageWidth = cropRect.width()
-            cropImageHeight = cropRect.height()
-        }
-        var rotateDegree: ArcSoftRotateDegree? = null
-        when (orient) {
-            FaceEngine.ASF_OC_90 -> rotateDegree = ArcSoftRotateDegree.DEGREE_270
-            FaceEngine.ASF_OC_180 -> rotateDegree = ArcSoftRotateDegree.DEGREE_180
-            FaceEngine.ASF_OC_270 -> rotateDegree = ArcSoftRotateDegree.DEGREE_90
-            FaceEngine.ASF_OC_0 -> rotateHeadImageData = headImageData
-            else -> rotateHeadImageData = headImageData
-        }
-        // 非0度的情况,旋转图像
-        if (rotateDegree != null) {
-            rotateHeadImageData = ByteArray(headImageData.size)
-            rotateCode = ArcSoftImageUtil.rotateImage(
-                headImageData,
-                rotateHeadImageData,
-                cropRect.width(),
-                cropRect.height(),
-                rotateDegree,
-                imageFormat
-            )
-            if (rotateCode != ArcSoftImageUtilError.CODE_SUCCESS) {
-                throw RuntimeException("rotate image failed, code is $rotateCode")
-            }
-        }
-        // 将创建一个Bitmap,并将图像数据存放到Bitmap中
-        val headBmp = Bitmap.createBitmap(cropImageWidth, cropImageHeight, Bitmap.Config.RGB_565)
-        if (ArcSoftImageUtil.imageDataToBitmap(
-                rotateHeadImageData,
-                headBmp,
-                imageFormat
-            ) != ArcSoftImageUtilError.CODE_SUCCESS
-        ) {
-            throw RuntimeException("failed to transform image data to bitmap")
-        }
-        return headBmp
-    }
-
-    /**
-     * 将图像中需要截取的Rect向外扩张一倍,若扩张一倍会溢出,则扩张到边界,若Rect已溢出,则收缩到边界
-     *
-     * @param width   图像宽度
-     * @param height  图像高度
-     * @param srcRect 原Rect
-     * @return 调整后的Rect
-     */
-    private fun getBestRect(
-        width: Int,
-        height: Int,
-        srcRect: Rect?
-    ): Rect? {
-        if (srcRect == null) {
-            return null
-        }
-        val rect = Rect(srcRect)
-        // 原rect边界已溢出宽高的情况
-        val maxOverFlow = Math.max(
-            -rect.left,
-            Math.max(
-                -rect.top,
-                Math.max(rect.right - width, rect.bottom - height)
-            )
-        )
-        if (maxOverFlow >= 0) {
-            rect.inset(maxOverFlow, maxOverFlow)
-            return rect
-        }
-        // 原rect边界未溢出宽高的情况
-        var padding = rect.height() / 2
-        // 若以此padding扩张rect会溢出,取最大padding为四个边距的最小值
-        if (!(rect.left - padding > 0 && rect.right + padding < width && rect.top - padding > 0 && rect.bottom + padding < height)) {
-            padding = Math.min(
-                Math.min(
-                    Math.min(rect.left, width - rect.right),
-                    height - rect.bottom
-                ),
-                rect.top
-            )
-        }
-        rect.inset(-padding, -padding)
-        return rect
-    }
-
-    fun optimalOutputSize(cameraManager: CameraManager,cameraId: String, width: Int, height: Int) : Size {
-        return cameraManager.getCameraCharacteristics(cameraId).get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)
-            ?.getOutputSizes(ImageReader::class.java)
-            ?.minByOrNull {
-                (it.width - height) * (it.height - width)
-            }
-            ?: Size(width, height)
-    }
-
-    fun getDataByNV21(cropRect:Rect,format:Int,planes:Array<Image.Plane>) : ByteArray {
-        val width: Int = cropRect.width()
-        val height: Int = cropRect.height()
-        val data = ByteArray(width * height * ImageFormat.getBitsPerPixel(format) / 8)
-        val rowData = ByteArray(planes[0].rowStride)
-        var channelOffset = 0
-        var outputStride = 1
-        for (i in planes.indices) {
-            when (i) {
-                0 -> {
-                    channelOffset = 0
-                    outputStride = 1
-                }
-                1 -> {
-                    channelOffset = width * height + 1
-                    outputStride = 2
-                }
-                2 -> {
-                    channelOffset = width * height
-                    outputStride = 2
-                }
-            }
-            val buffer = planes[i].buffer
-            val rowStride = planes[i].rowStride
-            val pixelStride = planes[i].pixelStride
-            val shift = if (i == 0) 0 else 1
-            val w = width shr shift
-            val h = height shr shift
-            buffer.position(rowStride * (cropRect.top shr shift) + pixelStride * (cropRect.left shr shift))
-            for (row in 0 until h) {
-                var length: Int
-                if (pixelStride == 1 && outputStride == 1) {
-                    length = w
-                    buffer.get(data, channelOffset, length)
-                    channelOffset += length
-                } else {
-                    length = (w - 1) * pixelStride + 1
-                    buffer.get(rowData, 0, length)
-                    for (col in 0 until w) {
-                        data[channelOffset] = rowData[col * pixelStride]
-                        channelOffset += outputStride
-                    }
-                }
-                if (row < h - 1) {
-                    buffer.position(buffer.position() + rowStride - length)
-                }
-            }
-        }
-        return data
-    }
-
-}