|
|
@@ -0,0 +1,627 @@
|
|
|
+package com.grkj.data.hardware.face
|
|
|
+
|
|
|
+import android.content.Context
|
|
|
+import android.graphics.Bitmap
|
|
|
+import android.graphics.BitmapFactory
|
|
|
+import android.graphics.ImageFormat
|
|
|
+import android.graphics.Point
|
|
|
+import android.graphics.Rect
|
|
|
+import android.hardware.Camera
|
|
|
+import android.util.Base64
|
|
|
+import android.view.View
|
|
|
+import com.arcsoft.face.ActiveFileInfo
|
|
|
+import com.arcsoft.face.ErrorInfo
|
|
|
+import com.arcsoft.face.FaceEngine
|
|
|
+import com.arcsoft.face.FaceFeature
|
|
|
+import com.arcsoft.face.FaceFeatureInfo
|
|
|
+import com.arcsoft.face.FaceInfo
|
|
|
+import com.arcsoft.face.FaceSimilar
|
|
|
+import com.arcsoft.face.LivenessInfo
|
|
|
+import com.arcsoft.face.enums.DetectFaceOrientPriority
|
|
|
+import com.arcsoft.face.enums.DetectMode
|
|
|
+import com.arcsoft.face.enums.ExtractType
|
|
|
+import com.google.mlkit.vision.common.InputImage
|
|
|
+import com.google.mlkit.vision.face.FaceDetectorOptions
|
|
|
+import com.grkj.data.hardware.face.hlk.Hlk223Client
|
|
|
+import com.grkj.shared.config.Constants
|
|
|
+import com.grkj.shared.utils.DisplayUtils
|
|
|
+import com.grkj.shared.utils.ImageCompress
|
|
|
+import com.grkj.shared.utils.extension.isInCenterArea
|
|
|
+import com.grkj.shared.utils.face.arcsoft.CameraHelper
|
|
|
+import com.grkj.shared.utils.face.arcsoft.CameraListener
|
|
|
+import com.grkj.shared.widget.FaceOverlayView
|
|
|
+import com.sik.sikcore.thread.ThreadUtils
|
|
|
+import com.sik.sikimage.ImageConvertUtils
|
|
|
+import com.sik.sikimage.ImageUtils
|
|
|
+import kotlinx.coroutines.CoroutineScope
|
|
|
+import kotlinx.coroutines.Dispatchers
|
|
|
+import kotlinx.coroutines.Job
|
|
|
+import kotlinx.coroutines.SupervisorJob
|
|
|
+import kotlinx.coroutines.delay
|
|
|
+import kotlinx.coroutines.launch
|
|
|
+import org.json.JSONObject
|
|
|
+import org.slf4j.Logger
|
|
|
+import org.slf4j.LoggerFactory
|
|
|
+import java.io.File
|
|
|
+import java.util.zip.CRC32
|
|
|
+import kotlin.math.log
|
|
|
+
|
|
|
+/**
|
|
|
+ * FaceUtil 人脸:兼容 HLK 模组;对外 API 不变
|
|
|
+ * - HLK 模式:检测/活体/识别走模组 NOTE/REPLY;APP 只负责预览与回调
|
|
|
+ * - ARC 模式:沿用原始 ArcSoft 流程
|
|
|
+ */
|
|
|
+object FaceUtil {
|
|
|
+ private val logger: Logger = LoggerFactory.getLogger(FaceUtil::class.java)
|
|
|
+
|
|
|
+ private enum class FaceBackend { ARC, HLK }
|
|
|
+
|
|
|
+ @Volatile
|
|
|
+ private var backend: FaceBackend = FaceBackend.ARC
|
|
|
+
|
|
|
+ @Volatile
|
|
|
+ private var hlkClient: Hlk223Client? = null
|
|
|
+
|
|
|
+ @Volatile
|
|
|
+ private var hlkVerifyJob: Job? = null
|
|
|
+
|
|
|
+ @JvmStatic
|
|
|
+ fun enableHlkBackend(client: Hlk223Client) {
|
|
|
+ hlkClient = client; backend = FaceBackend.HLK
|
|
|
+ checkLive()
|
|
|
+ }
|
|
|
+
|
|
|
+ private fun checkLive() {
|
|
|
+ if (hlkClient == null) return
|
|
|
+ hlkVerifyJob?.cancel()
|
|
|
+ hlkVerifyJob = ioScope.launch {
|
|
|
+ delay(200)
|
|
|
+ val version = hlkClient?.getVersion()
|
|
|
+ logger.info("hlk version: $version")
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @JvmStatic
|
|
|
+ fun disableHlkBackend() {
|
|
|
+ backend = FaceBackend.ARC; hlkClient = null
|
|
|
+ }
|
|
|
+
|
|
|
+ // ArcSoft 授权配置(仅 ARC 用)
|
|
|
+ private const val CONFIG_DIR = "arcsoft_config"
|
|
|
+ private const val CONFIG_FILE = "ArcSoftConfig.json"
|
|
|
+ private const val OFFLINE_DAT = "OfflineActive.dat"
|
|
|
+
|
|
|
+ private data class ArcSoftLicenseConfig(
|
|
|
+ val appId: String, val sdkKey: String, val activeKey: String,
|
|
|
+ val activeOnline: Boolean, val activeOfflineFilePath: String
|
|
|
+ )
|
|
|
+
|
|
|
+ private fun readOrInitConfig(context: Context): ArcSoftLicenseConfig {
|
|
|
+ val dir = File("/sdcard/iscs/", CONFIG_DIR).apply { if (!exists()) mkdirs() }
|
|
|
+ val cfg = File(dir, CONFIG_FILE)
|
|
|
+ val offlineDat = File(dir, OFFLINE_DAT)
|
|
|
+ if (!offlineDat.exists()) runCatching { offlineDat.createNewFile() }
|
|
|
+ if (!cfg.exists()) {
|
|
|
+ val def = ArcSoftLicenseConfig(
|
|
|
+ appId = Constants.APP_ID,
|
|
|
+ sdkKey = Constants.SDK_KEY,
|
|
|
+ activeKey = Constants.ACTIVE_KEY,
|
|
|
+ activeOnline = false,
|
|
|
+ activeOfflineFilePath = offlineDat.absolutePath
|
|
|
+ )
|
|
|
+ cfg.writeText(JSONObject().apply {
|
|
|
+ put("appId", def.appId); put("sdkKey", def.sdkKey); put("activeKey", def.activeKey)
|
|
|
+ put("activeOnline", def.activeOnline); put(
|
|
|
+ "activeOfflineFilePath",
|
|
|
+ def.activeOfflineFilePath
|
|
|
+ )
|
|
|
+ }.toString())
|
|
|
+ return def
|
|
|
+ }
|
|
|
+ val json = runCatching { JSONObject(cfg.readText()) }.getOrElse { JSONObject() }
|
|
|
+ fun j(k: String, d: String) = json.optString(k, d).ifBlank { d }
|
|
|
+ return ArcSoftLicenseConfig(
|
|
|
+ appId = j("appId", Constants.APP_ID),
|
|
|
+ sdkKey = j("sdkKey", Constants.SDK_KEY),
|
|
|
+ activeKey = j("activeKey", Constants.ACTIVE_KEY),
|
|
|
+ activeOnline = json.optBoolean("activeOnline", false),
|
|
|
+ activeOfflineFilePath = j("activeOfflineFilePath", offlineDat.absolutePath)
|
|
|
+ )
|
|
|
+ }
|
|
|
+
|
|
|
+ // 旧字段(保持)
|
|
|
+ private var cameraHelper: CameraHelper? = null
|
|
|
+ private var previewSize: Camera.Size? = null
|
|
|
+ private var rgbCameraId: Int? = null
|
|
|
+ private var faceEngine: FaceEngine? = null
|
|
|
+ private val cameraWidth: Int = 640
|
|
|
+ private val cameraHeight: Int = 480
|
|
|
+ private var afCode = -1
|
|
|
+ private val processMask: Int = FaceEngine.ASF_MASK_DETECT or FaceEngine.ASF_LIVENESS
|
|
|
+ private val registerFaceFeatureJob get() = CoroutineScope(Dispatchers.IO + SupervisorJob())
|
|
|
+ private const val ACTION_REQUEST_PERMISSIONS: Int = 0x001
|
|
|
+ var isActivated = false
|
|
|
+
|
|
|
+ @Volatile
|
|
|
+ var inDetecting = false
|
|
|
+
|
|
|
+ // HLK NOTE 缓存
|
|
|
+ @Volatile
|
|
|
+ private var lastAliveByHlk: Boolean = true
|
|
|
+
|
|
|
+ @Volatile
|
|
|
+ private var lastFaceRectByHlk: Rect? = null
|
|
|
+
|
|
|
+ @Volatile
|
|
|
+ private var lastUserIdByHlk: Long? = null
|
|
|
+
|
|
|
+ @Volatile
|
|
|
+ private var hlkVerifyRunning = false
|
|
|
+ private val ioScope get() = CoroutineScope(Dispatchers.IO + SupervisorJob())
|
|
|
+
|
|
|
+ // 成员区
|
|
|
+ @Volatile
|
|
|
+ private var mlDetector: com.google.mlkit.vision.face.FaceDetector? = null
|
|
|
+
|
|
|
+ @Volatile
|
|
|
+ private var lastMlDetectTs = 0L
|
|
|
+
|
|
|
+ private var lastNoFaceTipTs = 0L
|
|
|
+ private fun maybeLogNoFaceTip() {
|
|
|
+ val now = System.currentTimeMillis()
|
|
|
+ if (now - lastNoFaceTipTs > 3000) {
|
|
|
+ logger.info("未检测到人脸,请靠近/居中/保证光照")
|
|
|
+ lastNoFaceTipTs = now
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ private fun findCameraIdByFacing(facing: Int): Int? {
|
|
|
+ val count = Camera.getNumberOfCameras()
|
|
|
+ val info = Camera.CameraInfo()
|
|
|
+ for (id in 0 until count) {
|
|
|
+ Camera.getCameraInfo(id, info)
|
|
|
+ if (info.facing == facing) return id
|
|
|
+ }
|
|
|
+ return null
|
|
|
+ }
|
|
|
+
|
|
|
+ private fun ensureMlDetector(): com.google.mlkit.vision.face.FaceDetector {
|
|
|
+ mlDetector?.let { return it }
|
|
|
+ val opts = FaceDetectorOptions.Builder()
|
|
|
+ .setPerformanceMode(FaceDetectorOptions.PERFORMANCE_MODE_FAST)
|
|
|
+ .setLandmarkMode(FaceDetectorOptions.LANDMARK_MODE_NONE)
|
|
|
+ .setClassificationMode(FaceDetectorOptions.CLASSIFICATION_MODE_NONE) // 不用笑容/眨眼
|
|
|
+ .enableTracking() // 拿 trackingId 可做稳定性
|
|
|
+ .build()
|
|
|
+ return com.google.mlkit.vision.face.FaceDetection.getClient(opts).also { mlDetector = it }
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+ // ===== ARC 激活/初始化 =====
|
|
|
+ fun checkActiveStatus(context: Context) {
|
|
|
+ if (backend == FaceBackend.HLK) {
|
|
|
+ isActivated = true; return
|
|
|
+ }
|
|
|
+ val cfg = readOrInitConfig(context)
|
|
|
+ val code = try {
|
|
|
+ if (cfg.activeOnline) FaceEngine.activeOnline(
|
|
|
+ context,
|
|
|
+ cfg.activeKey,
|
|
|
+ cfg.appId,
|
|
|
+ cfg.sdkKey
|
|
|
+ )
|
|
|
+ else FaceEngine.activeOffline(context, cfg.activeOfflineFilePath)
|
|
|
+ } catch (e: Throwable) {
|
|
|
+ logger.error("激活异常: ${e.message}", e); ErrorInfo.MERR_UNKNOWN
|
|
|
+ }
|
|
|
+ when (code) {
|
|
|
+ ErrorInfo.MOK, ErrorInfo.MERR_ASF_ALREADY_ACTIVATED -> isActivated = true
|
|
|
+ else -> {
|
|
|
+ isActivated = false; logger.error("checkActiveStatus : active failed $code")
|
|
|
+ }
|
|
|
+ }
|
|
|
+ val afi = ActiveFileInfo()
|
|
|
+ if (FaceEngine.getActiveFileInfo(
|
|
|
+ context,
|
|
|
+ afi
|
|
|
+ ) == ErrorInfo.MOK
|
|
|
+ ) logger.info("getActiveFileInfo: $afi")
|
|
|
+ }
|
|
|
+
|
|
|
+ fun initEngine(context: Context) {
|
|
|
+ if (backend == FaceBackend.HLK) return
|
|
|
+ faceEngine = FaceEngine()
|
|
|
+ afCode = faceEngine!!.init(
|
|
|
+ context,
|
|
|
+ DetectMode.ASF_DETECT_MODE_VIDEO, DetectFaceOrientPriority.ASF_OP_0_ONLY, 1,
|
|
|
+ FaceEngine.ASF_FACE_DETECT or FaceEngine.ASF_MASK_DETECT or FaceEngine.ASF_LIVENESS or FaceEngine.ASF_FACE_RECOGNITION
|
|
|
+ )
|
|
|
+ logger.info("initEngine: $afCode")
|
|
|
+ }
|
|
|
+
|
|
|
+ fun unInitEngine() {
|
|
|
+ if (backend == FaceBackend.HLK) return
|
|
|
+ if (afCode == 0) {
|
|
|
+ afCode = faceEngine!!.unInit(); logger.info("unInitEngine: $afCode")
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ private fun mirrorRect(src: Rect, width: Int): Rect {
|
|
|
+ // ML Kit 坐标:以图像 buffer 为参考,左上为原点
|
|
|
+ // 镜像:x' = width - (x + w)
|
|
|
+ val left = width - (src.left + src.width())
|
|
|
+ val right = width - src.left
|
|
|
+ return Rect(left, src.top, right, src.bottom)
|
|
|
+ }
|
|
|
+
|
|
|
+ // ================= initCamera =================
|
|
|
+ @JvmOverloads
|
|
|
+ fun initCamera(
|
|
|
+ preview: View,
|
|
|
+ faceOverlayView: FaceOverlayView? = null,
|
|
|
+ needCheckCenter: Boolean = false,
|
|
|
+ callBack: (Bitmap?, Int, Boolean) -> Unit
|
|
|
+ ) {
|
|
|
+ if (rgbCameraId == null) {
|
|
|
+ rgbCameraId = findCameraIdByFacing(Camera.CameraInfo.CAMERA_FACING_FRONT)
|
|
|
+ ?: findCameraIdByFacing(Camera.CameraInfo.CAMERA_FACING_BACK)
|
|
|
+ }
|
|
|
+ val camId = rgbCameraId ?: run { logger.error("找不到可用相机ID"); return }
|
|
|
+
|
|
|
+ val listener = object : CameraListener {
|
|
|
+ override fun onCameraOpened(
|
|
|
+ camera: Camera,
|
|
|
+ cameraId: Int,
|
|
|
+ displayOrientation: Int,
|
|
|
+ isMirror: Boolean
|
|
|
+ ) {
|
|
|
+ previewSize = camera.parameters.previewSize
|
|
|
+ faceOverlayView?.setCameraPreviewSize(previewSize!!.width, previewSize!!.height)
|
|
|
+ if (backend == FaceBackend.HLK && !hlkVerifyRunning) {
|
|
|
+ hlkVerifyRunning = true
|
|
|
+ hlkVerifyJob?.cancel()
|
|
|
+ hlkVerifyJob = ioScope.launch {
|
|
|
+ ensureMlDetector()
|
|
|
+ try {
|
|
|
+ hlkClient?.startVerifyWithNotes(
|
|
|
+ timeoutSec = 60, loop = true,
|
|
|
+ onFaceState = { rect, state, yaw, pitch, roll ->
|
|
|
+ logger.info("onFaceState: $rect, $state, $yaw, $pitch, $roll")
|
|
|
+ lastFaceRectByHlk = rect
|
|
|
+ lastAliveByHlk = (state == 0)
|
|
|
+ faceOverlayView?.setFaceRect(rect?.let { listOf(it) }
|
|
|
+ ?: emptyList())
|
|
|
+ },
|
|
|
+ onLiveness = { alive -> lastAliveByHlk = alive },
|
|
|
+ onResult = { userId -> lastUserIdByHlk = userId?.toLong() }
|
|
|
+ )
|
|
|
+ } finally {
|
|
|
+ hlkVerifyRunning = false
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ override fun onPreview(nv21: ByteArray, camera: Camera?) {
|
|
|
+ val p = previewSize ?: return
|
|
|
+
|
|
|
+ if (backend == FaceBackend.HLK) {
|
|
|
+ // HLK:持续回预览;人脸数量固定 1;活体来自 NOTE
|
|
|
+ // —— ML Kit 节流:~120ms 一次 ——
|
|
|
+ val now = System.currentTimeMillis()
|
|
|
+ val doDetect = (now - lastMlDetectTs) > 120
|
|
|
+ if (doDetect) {
|
|
|
+ lastMlDetectTs = now
|
|
|
+ // rotation:基于你已有的 DisplayUtils
|
|
|
+ val rotation = DisplayUtils.getRotation(preview.context) // 0/90/180/270
|
|
|
+ val imageData = ImageUtils.rotateNV21(nv21, p.width, p.height, 270)
|
|
|
+ val image = InputImage.fromByteArray(
|
|
|
+ imageData, p.width, p.height, rotation, ImageFormat.NV21
|
|
|
+ )
|
|
|
+ val bmp = ImageConvertUtils.nv21ToBitmap(imageData, p.width, p.height)
|
|
|
+ ensureMlDetector().process(image)
|
|
|
+ .addOnSuccessListener { faces ->
|
|
|
+ // 镜像修正:相机是 isMirror(true) → X 轴翻转
|
|
|
+ val rects = faces.map { f ->
|
|
|
+ val r = f.boundingBox
|
|
|
+ mirrorRect(r, p.width) // 见下方函数
|
|
|
+ }
|
|
|
+ faceOverlayView?.setFaceRect(rects)
|
|
|
+ // 可做中心判定
|
|
|
+ val inCenter =
|
|
|
+ rects.firstOrNull()?.isInCenterArea(p.width, p.height) ?: false
|
|
|
+ if (!inCenter) maybeLogNoFaceTip()
|
|
|
+ // 不在此处回调活体;活体交给 HLK NOTE
|
|
|
+ // —— 回调:人脸数量来自 MLKit(最近一次),活体来自 HLK NOTE ——
|
|
|
+ // 为了简单起见,这里用 overlay 里最新的 rect 数量(或你自己存储 lastFaceCountByMl)
|
|
|
+ val faceCount = faceOverlayView?.lastRectsCount()
|
|
|
+ ?: (if (lastFaceRectByHlk != null) 1 else 0)
|
|
|
+
|
|
|
+ // HLK 路径下仍然以 HLK 的活体为准
|
|
|
+ callBack(bmp, faceCount, lastAliveByHlk)
|
|
|
+ }
|
|
|
+ .addOnFailureListener {
|
|
|
+ // 静默或日志
|
|
|
+ logger.warn("MLKit detect failed: ${it.message}")
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return
|
|
|
+ }
|
|
|
+
|
|
|
+ // ARC:原流程
|
|
|
+ if (inDetecting) return
|
|
|
+ inDetecting = true
|
|
|
+ val fe = faceEngine ?: run { inDetecting = false; return }
|
|
|
+ val faces = mutableListOf<FaceInfo>()
|
|
|
+ var code = fe.detectFaces(nv21, p.width, p.height, FaceEngine.CP_PAF_NV21, faces)
|
|
|
+ faceOverlayView?.setFaceRect(faces.map { it.rect })
|
|
|
+ if (code != ErrorInfo.MOK || faces.isEmpty()) {
|
|
|
+ maybeLogNoFaceTip(); inDetecting = false; return
|
|
|
+ }
|
|
|
+ code =
|
|
|
+ fe.process(nv21, p.width, p.height, FaceEngine.CP_PAF_NV21, faces, processMask)
|
|
|
+ if (code != ErrorInfo.MOK) {
|
|
|
+ inDetecting = false; return
|
|
|
+ }
|
|
|
+ val liveList = mutableListOf<LivenessInfo>()
|
|
|
+ if (fe.getLiveness(liveList) != ErrorInfo.MOK) {
|
|
|
+ inDetecting = false; return
|
|
|
+ }
|
|
|
+ if (liveList.none { it.liveness == LivenessInfo.ALIVE }) {
|
|
|
+ callBack(null, faces.size, false); inDetecting = false; return
|
|
|
+ }
|
|
|
+ if (needCheckCenter && !faces[0].rect.isInCenterArea(p.width, p.height)) {
|
|
|
+ inDetecting = false; return
|
|
|
+ }
|
|
|
+ val bmp = ImageConvertUtils.nv21ToBitmap(nv21, p.width, p.height)
|
|
|
+ callBack(bmp, faces.size, true)
|
|
|
+ inDetecting = false
|
|
|
+ }
|
|
|
+
|
|
|
+ override fun onCameraClosed() {}
|
|
|
+ override fun onCameraError(e: Exception) {
|
|
|
+ logger.info("onCameraError: ${e.message}")
|
|
|
+ }
|
|
|
+
|
|
|
+ override fun onCameraConfigurationChanged(cameraID: Int, displayOrientation: Int) {}
|
|
|
+ }
|
|
|
+
|
|
|
+ val rotation = DisplayUtils.getRotation(preview.context)
|
|
|
+ cameraHelper = CameraHelper.Builder()
|
|
|
+ .previewViewSize(Point(cameraWidth, cameraHeight))
|
|
|
+ .rotation(rotation)
|
|
|
+ .specificCameraId(camId)
|
|
|
+ .isMirror(true)
|
|
|
+ .previewOn(preview)
|
|
|
+ .cameraListener(listener)
|
|
|
+ .build()
|
|
|
+ cameraHelper!!.init()
|
|
|
+ cameraHelper!!.start()
|
|
|
+ }
|
|
|
+
|
|
|
+ // ================= checkCamera =================
|
|
|
+ fun checkCamera(
|
|
|
+ preview: View,
|
|
|
+ callBack: (Bitmap?, Long?) -> Unit
|
|
|
+ ) {
|
|
|
+ if (rgbCameraId == null) {
|
|
|
+ rgbCameraId = findCameraIdByFacing(Camera.CameraInfo.CAMERA_FACING_FRONT)
|
|
|
+ ?: findCameraIdByFacing(Camera.CameraInfo.CAMERA_FACING_BACK)
|
|
|
+ }
|
|
|
+ val camId = rgbCameraId ?: run { logger.error("找不到可用相机ID"); return }
|
|
|
+
|
|
|
+ val listener = object : CameraListener {
|
|
|
+ override fun onCameraOpened(
|
|
|
+ camera: Camera,
|
|
|
+ cameraId: Int,
|
|
|
+ displayOrientation: Int,
|
|
|
+ isMirror: Boolean
|
|
|
+ ) {
|
|
|
+ previewSize = camera.parameters.previewSize
|
|
|
+ if (backend == FaceBackend.HLK && !hlkVerifyRunning) {
|
|
|
+ hlkVerifyRunning = true
|
|
|
+ hlkVerifyJob?.cancel()
|
|
|
+ hlkVerifyJob = ioScope.launch {
|
|
|
+ try {
|
|
|
+ hlkClient?.startVerifyWithNotes(
|
|
|
+ timeoutSec = 15, loop = false,
|
|
|
+ onFaceState = { rect, state, yaw, pitch, roll ->
|
|
|
+ logger.info("onFaceState: $rect, $state, $yaw, $pitch, $roll")
|
|
|
+ lastFaceRectByHlk = rect; lastAliveByHlk = (state == 0)
|
|
|
+ },
|
|
|
+ onLiveness = { alive -> lastAliveByHlk = alive },
|
|
|
+ onResult = { userId -> lastUserIdByHlk = userId?.toLong() }
|
|
|
+ )
|
|
|
+ } finally {
|
|
|
+ hlkVerifyRunning = false
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ override fun onPreview(nv21: ByteArray?, camera: Camera?) {
|
|
|
+ val p = previewSize ?: return
|
|
|
+ val bmp = ImageConvertUtils.nv21ToBitmap(nv21, p.width, p.height)
|
|
|
+
|
|
|
+ if (backend == FaceBackend.HLK) {
|
|
|
+ callBack(bmp, lastUserIdByHlk)
|
|
|
+ return
|
|
|
+ }
|
|
|
+
|
|
|
+ // ARC:原流程
|
|
|
+ if (inDetecting) return
|
|
|
+ inDetecting = true
|
|
|
+ val fe = faceEngine ?: run { inDetecting = false; return }
|
|
|
+ val faces = mutableListOf<FaceInfo>()
|
|
|
+ var code = fe.detectFaces(nv21, p.width, p.height, FaceEngine.CP_PAF_NV21, faces)
|
|
|
+ if (code != ErrorInfo.MOK || faces.isEmpty()) {
|
|
|
+ inDetecting = false; return
|
|
|
+ }
|
|
|
+ code =
|
|
|
+ fe.process(nv21, p.width, p.height, FaceEngine.CP_PAF_NV21, faces, processMask)
|
|
|
+ if (code != ErrorInfo.MOK) {
|
|
|
+ inDetecting = false; return
|
|
|
+ }
|
|
|
+ val liveList = mutableListOf<LivenessInfo>()
|
|
|
+ val lc = fe.getLiveness(liveList)
|
|
|
+ if (lc != ErrorInfo.MOK) {
|
|
|
+ inDetecting = false; return
|
|
|
+ }
|
|
|
+ if (liveList.none { it.liveness == LivenessInfo.ALIVE }) {
|
|
|
+ callBack(null, null); inDetecting = false; return
|
|
|
+ }
|
|
|
+ val ft = FaceFeature()
|
|
|
+ fe.extractFaceFeature(
|
|
|
+ nv21,
|
|
|
+ p.width,
|
|
|
+ p.height,
|
|
|
+ FaceEngine.CP_PAF_NV21,
|
|
|
+ faces[0],
|
|
|
+ ExtractType.RECOGNIZE,
|
|
|
+ 0,
|
|
|
+ ft
|
|
|
+ )
|
|
|
+ val searchResult =
|
|
|
+ runCatching { if (fe.faceCount > 0) fe.searchFaceFeature(ft) else null }.getOrNull()
|
|
|
+ callBack(bmp, searchResult?.faceFeatureInfo?.searchId?.toLong())
|
|
|
+ inDetecting = false
|
|
|
+ }
|
|
|
+
|
|
|
+ override fun onCameraClosed() {}
|
|
|
+ override fun onCameraError(e: Exception) {
|
|
|
+ logger.info("onCameraError: ${e.message}")
|
|
|
+ }
|
|
|
+
|
|
|
+ override fun onCameraConfigurationChanged(cameraID: Int, displayOrientation: Int) {}
|
|
|
+ }
|
|
|
+
|
|
|
+ val rotation = DisplayUtils.getRotation(preview.context)
|
|
|
+ cameraHelper = CameraHelper.Builder()
|
|
|
+ .previewViewSize(Point(cameraWidth, cameraHeight))
|
|
|
+ .rotation(rotation)
|
|
|
+ .specificCameraId(camId)
|
|
|
+ .isMirror(true)
|
|
|
+ .previewOn(preview)
|
|
|
+ .cameraListener(listener)
|
|
|
+ .build()
|
|
|
+ cameraHelper!!.init()
|
|
|
+ cameraHelper!!.start()
|
|
|
+ }
|
|
|
+
|
|
|
+ fun stop() {
|
|
|
+ cameraHelper?.release()
|
|
|
+ cameraHelper = null
|
|
|
+ hlkVerifyJob?.cancel()
|
|
|
+ hlkVerifyJob = null
|
|
|
+ hlkClient?.stopVerify() // 双保险:让设备侧监听循环马上跳出
|
|
|
+ }
|
|
|
+
|
|
|
+ // ----------------- 注册/比对(仅 ARC 有效) -----------------
|
|
|
+ fun registerFace(faceData: List<Pair<Long, String>>) {
|
|
|
+ if (backend == FaceBackend.HLK) {
|
|
|
+ // HLK 下请走 Hlk223PhotoEnroll / ENROLL_ITG
|
|
|
+
|
|
|
+ faceData.forEach { (uid, b64) ->
|
|
|
+ ThreadUtils.runOnIO {
|
|
|
+ val jpegBytes = ImageCompress.base64ToJpegUnder(b64, 1024 * 6)
|
|
|
+ val crc32 = CRC32().apply { update(jpegBytes) }.value.toInt()
|
|
|
+ logger.info("图片大小:{}", jpegBytes.size)
|
|
|
+ val userId = hlkClient?.enrollWithPhoto(jpegBytes, crc32 = crc32)
|
|
|
+ logger.info("注册Id:{}", userId)
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return
|
|
|
+ }
|
|
|
+ faceEngine?.removeFaceFeature(-1)
|
|
|
+ faceData.forEach { (uid, b64) ->
|
|
|
+ val bmp = decodeBase64ToBitmap(b64)
|
|
|
+ val img = bitmapToBgr24(bmp)
|
|
|
+ val faces = mutableListOf<FaceInfo>()
|
|
|
+ val code =
|
|
|
+ faceEngine?.detectFaces(img, bmp.width, bmp.height, FaceEngine.CP_PAF_BGR24, faces)
|
|
|
+ if (faces.isNullOrEmpty()) return@forEach
|
|
|
+ val faceFeature = FaceFeature()
|
|
|
+ faceEngine?.extractFaceFeature(
|
|
|
+ img,
|
|
|
+ bmp.width,
|
|
|
+ bmp.height,
|
|
|
+ FaceEngine.CP_PAF_BGR24,
|
|
|
+ faces[0],
|
|
|
+ ExtractType.REGISTER,
|
|
|
+ 0,
|
|
|
+ faceFeature
|
|
|
+ )
|
|
|
+ val info = FaceFeatureInfo(uid.toInt(), faceFeature.featureData)
|
|
|
+ try {
|
|
|
+ if ((faceEngine?.searchFaceFeature(faceFeature)?.maxSimilar ?: 0f) > 0.5) {
|
|
|
+ faceEngine?.updateFaceFeature(info)
|
|
|
+ } else {
|
|
|
+ faceEngine?.registerFaceFeature(info)
|
|
|
+ }
|
|
|
+ } catch (_: Exception) {
|
|
|
+ faceEngine?.registerFaceFeature(info)
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ fun verifyFaceArcSoft(b64a: String, b64b: String, threshold: Float = 0.7f): Boolean {
|
|
|
+ if (backend == FaceBackend.HLK) return false
|
|
|
+ if (b64a.isEmpty() || b64b.isEmpty()) return false
|
|
|
+ val bmpA = decodeBase64ToBitmap(b64a)
|
|
|
+ val bmpB = decodeBase64ToBitmap(b64b)
|
|
|
+ val facesA = mutableListOf<FaceInfo>()
|
|
|
+ val facesB = mutableListOf<FaceInfo>()
|
|
|
+ val imgA = bitmapToBgr24(bmpA)
|
|
|
+ val imgB = bitmapToBgr24(bmpB)
|
|
|
+ val codeA =
|
|
|
+ faceEngine?.detectFaces(imgA, bmpA.width, bmpA.height, FaceEngine.CP_PAF_BGR24, facesA)
|
|
|
+ val codeB =
|
|
|
+ faceEngine?.detectFaces(imgB, bmpB.width, bmpB.height, FaceEngine.CP_PAF_BGR24, facesB)
|
|
|
+ if (codeA != ErrorInfo.MOK || codeB != ErrorInfo.MOK || facesA.isEmpty() || facesB.isEmpty()) return false
|
|
|
+ val ftA = FaceFeature();
|
|
|
+ val ftB = FaceFeature()
|
|
|
+ faceEngine?.extractFaceFeature(
|
|
|
+ imgA,
|
|
|
+ bmpA.width,
|
|
|
+ bmpA.height,
|
|
|
+ FaceEngine.CP_PAF_BGR24,
|
|
|
+ facesA[0],
|
|
|
+ ExtractType.RECOGNIZE,
|
|
|
+ 0,
|
|
|
+ ftA
|
|
|
+ )
|
|
|
+ faceEngine?.extractFaceFeature(
|
|
|
+ imgB,
|
|
|
+ bmpB.width,
|
|
|
+ bmpB.height,
|
|
|
+ FaceEngine.CP_PAF_BGR24,
|
|
|
+ facesB[0],
|
|
|
+ ExtractType.RECOGNIZE,
|
|
|
+ 0,
|
|
|
+ ftB
|
|
|
+ )
|
|
|
+ val sim = FaceSimilar()
|
|
|
+ if (faceEngine?.compareFaceFeature(ftA, ftB, sim) != ErrorInfo.MOK) return false
|
|
|
+ return sim.score >= threshold
|
|
|
+ }
|
|
|
+
|
|
|
+ // ===== 工具 =====
|
|
|
+ private fun decodeBase64ToBitmap(b64: String): Bitmap {
|
|
|
+ val bytes = Base64.decode(b64, Base64.DEFAULT)
|
|
|
+ return BitmapFactory.decodeByteArray(bytes, 0, bytes.size)
|
|
|
+ }
|
|
|
+
|
|
|
+ private fun bitmapToBgr24(bitmap: Bitmap): ByteArray {
|
|
|
+ val w = bitmap.width;
|
|
|
+ val h = bitmap.height
|
|
|
+ val pixels = IntArray(w * h); bitmap.getPixels(pixels, 0, w, 0, 0, w, h)
|
|
|
+ val bgr = ByteArray(w * h * 3)
|
|
|
+ var i = 0
|
|
|
+ for (p in pixels) {
|
|
|
+ bgr[i++] = (p and 0xFF).toByte()
|
|
|
+ bgr[i++] = ((p shr 8) and 0xFF).toByte()
|
|
|
+ bgr[i++] = ((p shr 16) and 0xFF).toByte()
|
|
|
+ }
|
|
|
+ return bgr
|
|
|
+ }
|
|
|
+}
|