|
|
@@ -30,27 +30,30 @@ import com.grkj.shared.utils.extension.isInCenterArea
|
|
|
import com.grkj.shared.utils.face.arcsoft.CameraHelper
|
|
|
import com.grkj.shared.utils.face.arcsoft.CameraListener
|
|
|
import com.grkj.shared.widget.FaceOverlayView
|
|
|
-import com.sik.sikcore.extension.toJson
|
|
|
import com.sik.sikcore.thread.ThreadUtils
|
|
|
import com.sik.sikimage.ImageConvertUtils
|
|
|
import com.sik.sikimage.ImageUtils
|
|
|
-import kotlinx.coroutines.CoroutineScope
|
|
|
-import kotlinx.coroutines.Dispatchers
|
|
|
-import kotlinx.coroutines.Job
|
|
|
-import kotlinx.coroutines.SupervisorJob
|
|
|
-import kotlinx.coroutines.delay
|
|
|
-import kotlinx.coroutines.launch
|
|
|
+import kotlinx.coroutines.*
|
|
|
+import kotlinx.coroutines.sync.Mutex
|
|
|
+import kotlinx.coroutines.sync.withLock
|
|
|
import org.json.JSONObject
|
|
|
import org.slf4j.Logger
|
|
|
import org.slf4j.LoggerFactory
|
|
|
import java.io.File
|
|
|
+import java.lang.ref.WeakReference
|
|
|
+import java.util.concurrent.Executors
|
|
|
+import java.util.concurrent.atomic.AtomicBoolean
|
|
|
+import java.util.concurrent.atomic.AtomicLong
|
|
|
import java.util.zip.CRC32
|
|
|
-import kotlin.math.log
|
|
|
+import kotlin.coroutines.CoroutineContext
|
|
|
+import java.util.concurrent.locks.ReentrantLock
|
|
|
|
|
|
/**
|
|
|
* FaceUtil 人脸:兼容 HLK 模组;对外 API 不变
|
|
|
* - HLK 模式:检测/活体/识别走模组 NOTE/REPLY;APP 只负责预览与回调
|
|
|
- * - ARC 模式:沿用原始 ArcSoft 流程
|
|
|
+ * - ARC 模式:沿用原始 ArcSoft 流程(现已串行化至单线程,避免并发踩崩)
|
|
|
+ * - 本版新增:多维度互斥锁,消除 onPreview/引擎生命周期/HLK 验证的并发竞态
|
|
|
+ * - checkCamera 命中即停:相似度>阈值立即 stop,并拦截后续 onPreview
|
|
|
*/
|
|
|
object FaceUtil {
|
|
|
private val logger: Logger = LoggerFactory.getLogger(FaceUtil::class.java)
|
|
|
@@ -69,9 +72,21 @@ object FaceUtil {
|
|
|
@Volatile
|
|
|
private var registerUserIdAndLocalUserId: HashMap<Int, Long> = hashMapOf()
|
|
|
|
|
|
+ // === 统一上下文持有:懒初始化 Arc 引擎 ===
|
|
|
+ @Volatile
|
|
|
+ private var appContextRef: WeakReference<Context>? = null
|
|
|
+ private fun stashAppContext(ctx: Context?) {
|
|
|
+ if (ctx == null) return
|
|
|
+ val app = ctx.applicationContext ?: ctx
|
|
|
+ if (appContextRef?.get() == null) appContextRef = WeakReference(app)
|
|
|
+ }
|
|
|
+
|
|
|
+ private fun appContext(): Context? = appContextRef?.get()
|
|
|
+
|
|
|
@JvmStatic
|
|
|
fun enableHlkBackend(client: Hlk223Client) {
|
|
|
- hlkClient = client; backend = FaceBackend.HLK
|
|
|
+ hlkClient = client
|
|
|
+ backend = FaceBackend.HLK
|
|
|
checkLive()
|
|
|
}
|
|
|
|
|
|
@@ -79,19 +94,22 @@ object FaceUtil {
|
|
|
if (hlkClient == null) return
|
|
|
hlkVerifyJob?.cancel()
|
|
|
hlkVerifyJob = ioScope.launch {
|
|
|
- delay(200)
|
|
|
- val version = hlkClient?.getVersion()
|
|
|
- logger.info("hlk version: $version")
|
|
|
- logger.info("配置参数")
|
|
|
- hlkClient?.setUvcParam(byteArrayOf(0x20.toByte(), 0b00000010.toByte()))
|
|
|
- logger.info("清除人脸")
|
|
|
- hlkClient?.deleteAllUsers()
|
|
|
+ hlkLock.withLock {
|
|
|
+ delay(200)
|
|
|
+ val version = hlkClient?.getVersion()
|
|
|
+ logger.info("hlk version: $version")
|
|
|
+ logger.info("配置参数")
|
|
|
+ hlkClient?.setUvcParam(byteArrayOf(0x20.toByte(), 0b00000010.toByte()))
|
|
|
+ logger.info("清除人脸")
|
|
|
+ hlkClient?.deleteAllUsers()
|
|
|
+ }
|
|
|
}
|
|
|
}
|
|
|
|
|
|
@JvmStatic
|
|
|
fun disableHlkBackend() {
|
|
|
- backend = FaceBackend.ARC; hlkClient = null
|
|
|
+ backend = FaceBackend.ARC
|
|
|
+ hlkClient = null
|
|
|
}
|
|
|
|
|
|
// ArcSoft 授权配置(仅 ARC 用)
|
|
|
@@ -118,11 +136,11 @@ object FaceUtil {
|
|
|
activeOfflineFilePath = offlineDat.absolutePath
|
|
|
)
|
|
|
cfg.writeText(JSONObject().apply {
|
|
|
- put("appId", def.appId); put("sdkKey", def.sdkKey); put("activeKey", def.activeKey)
|
|
|
- put("activeOnline", def.activeOnline); put(
|
|
|
- "activeOfflineFilePath",
|
|
|
- def.activeOfflineFilePath
|
|
|
- )
|
|
|
+ put("appId", def.appId)
|
|
|
+ put("sdkKey", def.sdkKey)
|
|
|
+ put("activeKey", def.activeKey)
|
|
|
+ put("activeOnline", def.activeOnline)
|
|
|
+ put("activeOfflineFilePath", def.activeOfflineFilePath)
|
|
|
}.toString())
|
|
|
return def
|
|
|
}
|
|
|
@@ -137,6 +155,29 @@ object FaceUtil {
|
|
|
)
|
|
|
}
|
|
|
|
|
|
+ // ====== 线程域与调度器(固定,不再动态创建) ======
|
|
|
+ private val appJob = SupervisorJob()
|
|
|
+
|
|
|
+ private val ioScope = CoroutineScope(Dispatchers.IO + appJob)
|
|
|
+ private val defaultScope = CoroutineScope(Dispatchers.Default + appJob)
|
|
|
+
|
|
|
+ // FaceEngine 专用单线程调度器,串行化所有引擎调用,避免并发崩溃
|
|
|
+ private val feExecutor = Executors.newSingleThreadExecutor { r ->
|
|
|
+ Thread(r, "FaceEngine-Serial").apply { isDaemon = true }
|
|
|
+ }
|
|
|
+ private val feDispatcher: CoroutineContext = feExecutor.asCoroutineDispatcher()
|
|
|
+ private val feScope = CoroutineScope(feDispatcher + appJob)
|
|
|
+
|
|
|
+ // —— 并发门闸 —— //
|
|
|
+ // 预览阶段:防止 onPreview 并发进入 ARC 检测(丢帧式)
|
|
|
+ private val detectGate = Mutex()
|
|
|
+ // ARC 引擎生命周期 & 注册/比对串行
|
|
|
+ private val arcLock = Mutex()
|
|
|
+ // HLK 验证流程串行
|
|
|
+ private val hlkLock = Mutex()
|
|
|
+ // 相机生命周期原子化
|
|
|
+ private val cameraLock = ReentrantLock()
|
|
|
+
|
|
|
// 旧字段(保持)
|
|
|
private var cameraHelper: CameraHelper? = null
|
|
|
private var previewSize: Camera.Size? = null
|
|
|
@@ -146,12 +187,14 @@ object FaceUtil {
|
|
|
private val cameraHeight: Int = 480
|
|
|
private var afCode = -1
|
|
|
private val processMask: Int = FaceEngine.ASF_MASK_DETECT or FaceEngine.ASF_LIVENESS
|
|
|
- private val registerFaceFeatureJob get() = CoroutineScope(Dispatchers.IO + SupervisorJob())
|
|
|
private const val ACTION_REQUEST_PERMISSIONS: Int = 0x001
|
|
|
var isActivated = false
|
|
|
|
|
|
@Volatile
|
|
|
- var inDetecting = false
|
|
|
+ var inDetecting = false // 保留字段但不作为互斥依据
|
|
|
+
|
|
|
+ // —— 命中即停(仅 checkCamera 使用)——
|
|
|
+ private val stopAfterHit = AtomicBoolean(false)
|
|
|
|
|
|
// HLK NOTE 缓存
|
|
|
@Volatile
|
|
|
@@ -165,15 +208,31 @@ object FaceUtil {
|
|
|
|
|
|
@Volatile
|
|
|
private var hlkVerifyRunning = false
|
|
|
- private val ioScope get() = CoroutineScope(Dispatchers.IO + SupervisorJob())
|
|
|
|
|
|
- // 成员区
|
|
|
+ // MLKit & 节流
|
|
|
@Volatile
|
|
|
private var mlDetector: com.google.mlkit.vision.face.FaceDetector? = null
|
|
|
|
|
|
@Volatile
|
|
|
private var lastMlDetectTs = 0L
|
|
|
|
|
|
+ @Volatile
|
|
|
+ private var mlDetectRunning = AtomicBoolean(false)
|
|
|
+
|
|
|
+ // 回调节流(统一控制 UI 侧压力)
|
|
|
+ private const val CALLBACK_MIN_INTERVAL_MS = 1000L
|
|
|
+ private val lastInitCbTs = AtomicLong(0L)
|
|
|
+ private val lastCheckCbTs = AtomicLong(0L)
|
|
|
+
|
|
|
+ private fun shouldEmit(lastTs: AtomicLong, now: Long = System.currentTimeMillis()): Boolean {
|
|
|
+ val prev = lastTs.get()
|
|
|
+ return if (now - prev >= CALLBACK_MIN_INTERVAL_MS) {
|
|
|
+ lastTs.set(now); true
|
|
|
+ } else {
|
|
|
+ false
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
private var lastNoFaceTipTs = 0L
|
|
|
private fun maybeLogNoFaceTip() {
|
|
|
val now = System.currentTimeMillis()
|
|
|
@@ -198,21 +257,19 @@ object FaceUtil {
|
|
|
val opts = FaceDetectorOptions.Builder()
|
|
|
.setPerformanceMode(FaceDetectorOptions.PERFORMANCE_MODE_FAST)
|
|
|
.setLandmarkMode(FaceDetectorOptions.LANDMARK_MODE_NONE)
|
|
|
- .setClassificationMode(FaceDetectorOptions.CLASSIFICATION_MODE_NONE) // 不用笑容/眨眼
|
|
|
- .enableTracking() // 拿 trackingId 可做稳定性
|
|
|
+ .setClassificationMode(FaceDetectorOptions.CLASSIFICATION_MODE_NONE)
|
|
|
+ .enableTracking()
|
|
|
.build()
|
|
|
return com.google.mlkit.vision.face.FaceDetection.getClient(opts).also { mlDetector = it }
|
|
|
}
|
|
|
|
|
|
-
|
|
|
// ===== ARC 激活/初始化 =====
|
|
|
fun checkActiveStatus(context: Context) {
|
|
|
+ stashAppContext(context)
|
|
|
if (backend == FaceBackend.HLK) {
|
|
|
isActivated = true; return
|
|
|
}
|
|
|
- if (isActivated) {
|
|
|
- return
|
|
|
- }
|
|
|
+ if (isActivated) return
|
|
|
val cfg = readOrInitConfig(context)
|
|
|
val code = try {
|
|
|
if (cfg.activeOnline) FaceEngine.activeOnline(
|
|
|
@@ -232,37 +289,62 @@ object FaceUtil {
|
|
|
}
|
|
|
}
|
|
|
val afi = ActiveFileInfo()
|
|
|
- if (FaceEngine.getActiveFileInfo(
|
|
|
- context,
|
|
|
- afi
|
|
|
- ) == ErrorInfo.MOK
|
|
|
- ) logger.info("getActiveFileInfo: $afi")
|
|
|
+ if (FaceEngine.getActiveFileInfo(context, afi) == ErrorInfo.MOK)
|
|
|
+ logger.info("getActiveFileInfo: $afi")
|
|
|
}
|
|
|
|
|
|
fun initEngine(context: Context) {
|
|
|
+ stashAppContext(context)
|
|
|
if (backend == FaceBackend.HLK) return
|
|
|
- if (faceEngine != null) {
|
|
|
- return
|
|
|
+ runBlocking(feDispatcher) {
|
|
|
+ arcLock.withLock {
|
|
|
+ if (faceEngine != null) return@withLock
|
|
|
+ faceEngine = FaceEngine()
|
|
|
+ afCode = faceEngine!!.init(
|
|
|
+ context,
|
|
|
+ DetectMode.ASF_DETECT_MODE_VIDEO,
|
|
|
+ DetectFaceOrientPriority.ASF_OP_0_ONLY, 1,
|
|
|
+ FaceEngine.ASF_FACE_DETECT or FaceEngine.ASF_MASK_DETECT or
|
|
|
+ FaceEngine.ASF_LIVENESS or FaceEngine.ASF_FACE_RECOGNITION
|
|
|
+ )
|
|
|
+ logger.info("initEngine: $afCode")
|
|
|
+ }
|
|
|
}
|
|
|
- faceEngine = FaceEngine()
|
|
|
- afCode = faceEngine!!.init(
|
|
|
- context,
|
|
|
- DetectMode.ASF_DETECT_MODE_VIDEO, DetectFaceOrientPriority.ASF_OP_0_ONLY, 1,
|
|
|
- FaceEngine.ASF_FACE_DETECT or FaceEngine.ASF_MASK_DETECT or FaceEngine.ASF_LIVENESS or FaceEngine.ASF_FACE_RECOGNITION
|
|
|
- )
|
|
|
- logger.info("initEngine: $afCode")
|
|
|
}
|
|
|
|
|
|
fun unInitEngine() {
|
|
|
if (backend == FaceBackend.HLK) return
|
|
|
- if (afCode == 0) {
|
|
|
- afCode = faceEngine!!.unInit(); logger.info("unInitEngine: $afCode")
|
|
|
+ runBlocking(feDispatcher) {
|
|
|
+ arcLock.withLock {
|
|
|
+ if (afCode == 0 && faceEngine != null) {
|
|
|
+ afCode = faceEngine!!.unInit()
|
|
|
+ logger.info("unInitEngine: $afCode")
|
|
|
+ }
|
|
|
+ faceEngine = null
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Synchronized
|
|
|
+ private fun ensureArcReady(): Boolean {
|
|
|
+ if (backend == FaceBackend.HLK) return true
|
|
|
+ return runBlocking(feDispatcher) {
|
|
|
+ arcLock.withLock {
|
|
|
+ if (isActivated && faceEngine != null && afCode == 0) return@withLock true
|
|
|
+ val ctx = appContext()
|
|
|
+ if (ctx == null) {
|
|
|
+ logger.error("ensureArcReady: 缺少可用 Context"); return@withLock false
|
|
|
+ }
|
|
|
+ if (!isActivated) checkActiveStatus(ctx)
|
|
|
+ if (faceEngine == null || afCode != 0) initEngine(ctx)
|
|
|
+ val ok = isActivated && faceEngine != null && afCode == 0
|
|
|
+ if (!ok) logger.error("ensureArcReady: 初始化失败 isActivated=$isActivated, faceEngine=${faceEngine != null}, afCode=$afCode")
|
|
|
+ ok
|
|
|
+ }
|
|
|
}
|
|
|
}
|
|
|
|
|
|
private fun mirrorRect(src: Rect, width: Int): Rect {
|
|
|
- // ML Kit 坐标:以图像 buffer 为参考,左上为原点
|
|
|
- // 镜像:x' = width - (x + w)
|
|
|
val left = width - (src.left + src.width())
|
|
|
val right = width - src.left
|
|
|
return Rect(left, src.top, right, src.bottom)
|
|
|
@@ -276,6 +358,8 @@ object FaceUtil {
|
|
|
needCheckCenter: Boolean = false,
|
|
|
callBack: (Bitmap?, Int, Boolean) -> Unit
|
|
|
) {
|
|
|
+ stashAppContext(preview.context)
|
|
|
+
|
|
|
if (rgbCameraId == null) {
|
|
|
rgbCameraId = findCameraIdByFacing(Camera.CameraInfo.CAMERA_FACING_FRONT)
|
|
|
?: findCameraIdByFacing(Camera.CameraInfo.CAMERA_FACING_BACK)
|
|
|
@@ -298,18 +382,20 @@ object FaceUtil {
|
|
|
hlkVerifyJob = ioScope.launch {
|
|
|
ensureMlDetector()
|
|
|
try {
|
|
|
- hlkClient?.startVerifyWithNotes(
|
|
|
- timeoutSec = 60, loop = true,
|
|
|
- onFaceState = { rect, state, yaw, pitch, roll ->
|
|
|
- logger.info("onFaceState: $rect, $state, $yaw, $pitch, $roll")
|
|
|
- lastFaceRectByHlk = rect
|
|
|
- lastAliveByHlk = true
|
|
|
- faceOverlayView?.setFaceRect(rect?.let { listOf(it) }
|
|
|
- ?: emptyList())
|
|
|
- },
|
|
|
- onLiveness = { alive -> lastAliveByHlk = true },
|
|
|
- onResult = { userId -> lastUserIdByHlk = userId }
|
|
|
- )
|
|
|
+ hlkLock.withLock {
|
|
|
+ hlkClient?.startVerifyWithNotes(
|
|
|
+ timeoutSec = 60, loop = true,
|
|
|
+ onFaceState = { rect, state, yaw, pitch, roll ->
|
|
|
+ lastFaceRectByHlk = rect
|
|
|
+ lastAliveByHlk = true
|
|
|
+ ThreadUtils.runOnMain {
|
|
|
+ faceOverlayView?.setFaceRect(rect?.let { listOf(it) } ?: emptyList())
|
|
|
+ }
|
|
|
+ },
|
|
|
+ onLiveness = { _ -> lastAliveByHlk = true },
|
|
|
+ onResult = { userId -> lastUserIdByHlk = userId }
|
|
|
+ )
|
|
|
+ }
|
|
|
} finally {
|
|
|
hlkVerifyRunning = false
|
|
|
}
|
|
|
@@ -320,77 +406,82 @@ object FaceUtil {
|
|
|
override fun onPreview(nv21: ByteArray, camera: Camera?) {
|
|
|
val p = previewSize ?: return
|
|
|
|
|
|
+ // HLK 路径:仅做 MLKit 画框与节流回调
|
|
|
if (backend == FaceBackend.HLK) {
|
|
|
- // HLK:持续回预览;人脸数量固定 1;活体来自 NOTE
|
|
|
- // —— ML Kit 节流:~120ms 一次 ——
|
|
|
val now = System.currentTimeMillis()
|
|
|
val doDetect = (now - lastMlDetectTs) > 120
|
|
|
- if (doDetect) {
|
|
|
+ if (doDetect && mlDetectRunning.compareAndSet(false, true)) {
|
|
|
lastMlDetectTs = now
|
|
|
- // rotation:基于你已有的 DisplayUtils
|
|
|
- val rotation = DisplayUtils.getRotation(preview.context) // 0/90/180/270
|
|
|
+ val rotation = DisplayUtils.getRotation(preview.context)
|
|
|
val imageData = ImageUtils.rotateNV21(nv21, p.width, p.height, 270)
|
|
|
val image = InputImage.fromByteArray(
|
|
|
imageData, p.width, p.height, rotation, ImageFormat.NV21
|
|
|
)
|
|
|
- val bmp = ImageConvertUtils.nv21ToBitmap(imageData, p.width, p.height)
|
|
|
- ensureMlDetector().process(image)
|
|
|
+
|
|
|
+ ensureMlDetector()
|
|
|
+ .process(image)
|
|
|
.addOnSuccessListener { faces ->
|
|
|
- // 镜像修正:相机是 isMirror(true) → X 轴翻转
|
|
|
- val rects = faces.map { f ->
|
|
|
- val r = f.boundingBox
|
|
|
- mirrorRect(r, p.width) // 见下方函数
|
|
|
+ val rects = faces.map { f -> mirrorRect(f.boundingBox, p.width) }
|
|
|
+ defaultScope.launch(Dispatchers.Main) {
|
|
|
+ faceOverlayView?.setFaceRect(rects)
|
|
|
}
|
|
|
- faceOverlayView?.setFaceRect(rects)
|
|
|
- // 可做中心判定
|
|
|
- val inCenter =
|
|
|
- rects.firstOrNull()?.isInCenterArea(p.width, p.height) ?: false
|
|
|
+ val inCenter = rects.firstOrNull()?.isInCenterArea(p.width, p.height) ?: false
|
|
|
if (!inCenter) maybeLogNoFaceTip()
|
|
|
- // 不在此处回调活体;活体交给 HLK NOTE
|
|
|
- // —— 回调:人脸数量来自 MLKit(最近一次),活体来自 HLK NOTE ——
|
|
|
- // 为了简单起见,这里用 overlay 里最新的 rect 数量(或你自己存储 lastFaceCountByMl)
|
|
|
- val faceCount = faceOverlayView?.lastRectsCount()
|
|
|
- ?: (if (lastFaceRectByHlk != null) 1 else 0)
|
|
|
-
|
|
|
- // HLK 路径下仍然以 HLK 的活体为准
|
|
|
- callBack(bmp, faceCount, lastAliveByHlk)
|
|
|
+
|
|
|
+ if (shouldEmit(lastInitCbTs)) {
|
|
|
+ ioScope.launch {
|
|
|
+ val bmp = ImageConvertUtils.nv21ToBitmap(imageData, p.width, p.height)
|
|
|
+ ThreadUtils.runOnMain { callBack(bmp, rects.size, lastAliveByHlk) }
|
|
|
+ }
|
|
|
+ }
|
|
|
}
|
|
|
.addOnFailureListener {
|
|
|
- // 静默或日志
|
|
|
logger.warn("MLKit detect failed: ${it.message}")
|
|
|
}
|
|
|
+ .addOnCompleteListener {
|
|
|
+ mlDetectRunning.set(false)
|
|
|
+ }
|
|
|
}
|
|
|
return
|
|
|
}
|
|
|
|
|
|
- // ARC:原流程
|
|
|
- if (inDetecting) return
|
|
|
- inDetecting = true
|
|
|
- val fe = faceEngine ?: run { inDetecting = false; return }
|
|
|
- val faces = mutableListOf<FaceInfo>()
|
|
|
- var code = fe.detectFaces(nv21, p.width, p.height, FaceEngine.CP_PAF_NV21, faces)
|
|
|
- faceOverlayView?.setFaceRect(faces.map { it.rect })
|
|
|
- if (code != ErrorInfo.MOK || faces.isEmpty()) {
|
|
|
- maybeLogNoFaceTip(); inDetecting = false; return
|
|
|
- }
|
|
|
- code =
|
|
|
- fe.process(nv21, p.width, p.height, FaceEngine.CP_PAF_NV21, faces, processMask)
|
|
|
- if (code != ErrorInfo.MOK) {
|
|
|
- inDetecting = false; return
|
|
|
- }
|
|
|
- val liveList = mutableListOf<LivenessInfo>()
|
|
|
- if (fe.getLiveness(liveList) != ErrorInfo.MOK) {
|
|
|
- inDetecting = false; return
|
|
|
- }
|
|
|
- if (liveList.none { it.liveness == LivenessInfo.ALIVE }) {
|
|
|
- callBack(null, faces.size, false); inDetecting = false; return
|
|
|
- }
|
|
|
- if (needCheckCenter && !faces[0].rect.isInCenterArea(p.width, p.height)) {
|
|
|
- inDetecting = false; return
|
|
|
+ // ARC 路径(FaceEngine 串行执行 + 丢帧互斥)
|
|
|
+ if (!ensureArcReady()) return
|
|
|
+ if (!detectGate.tryLock()) return
|
|
|
+
|
|
|
+ feScope.launch {
|
|
|
+ try {
|
|
|
+ val fe = faceEngine ?: return@launch
|
|
|
+ val faces = mutableListOf<FaceInfo>()
|
|
|
+
|
|
|
+ var code = fe.detectFaces(nv21, p.width, p.height, FaceEngine.CP_PAF_NV21, faces)
|
|
|
+ withContext(Dispatchers.Main) { faceOverlayView?.setFaceRect(faces.map { it.rect }) }
|
|
|
+ if (code != ErrorInfo.MOK || faces.isEmpty()) {
|
|
|
+ maybeLogNoFaceTip(); return@launch
|
|
|
+ }
|
|
|
+
|
|
|
+ code = fe.process(nv21, p.width, p.height, FaceEngine.CP_PAF_NV21, faces, processMask)
|
|
|
+ if (code != ErrorInfo.MOK) return@launch
|
|
|
+
|
|
|
+ val liveList = mutableListOf<LivenessInfo>()
|
|
|
+ if (fe.getLiveness(liveList) != ErrorInfo.MOK) return@launch
|
|
|
+ val alive = liveList.any { it.liveness == LivenessInfo.ALIVE }
|
|
|
+ if (!alive) {
|
|
|
+ if (shouldEmit(lastInitCbTs)) ThreadUtils.runOnMain { callBack(null, faces.size, false) }
|
|
|
+ return@launch
|
|
|
+ }
|
|
|
+ if (needCheckCenter && !faces[0].rect.isInCenterArea(p.width, p.height)) return@launch
|
|
|
+
|
|
|
+ if (shouldEmit(lastInitCbTs)) {
|
|
|
+ val bmp = withContext(Dispatchers.IO) { ImageConvertUtils.nv21ToBitmap(nv21, p.width, p.height) }
|
|
|
+ ThreadUtils.runOnMain { callBack(bmp, faces.size, true) }
|
|
|
+ }
|
|
|
+ } catch (e: Throwable) {
|
|
|
+ logger.warn("ARC detect error: ${e.message}", e)
|
|
|
+ } finally {
|
|
|
+ if (detectGate.isLocked) detectGate.unlock()
|
|
|
+ }
|
|
|
}
|
|
|
- val bmp = ImageConvertUtils.nv21ToBitmap(nv21, p.width, p.height)
|
|
|
- callBack(bmp, faces.size, true)
|
|
|
- inDetecting = false
|
|
|
}
|
|
|
|
|
|
override fun onCameraClosed() {}
|
|
|
@@ -410,15 +501,26 @@ object FaceUtil {
|
|
|
.previewOn(preview)
|
|
|
.cameraListener(listener)
|
|
|
.build()
|
|
|
- cameraHelper!!.init()
|
|
|
- cameraHelper!!.start()
|
|
|
+
|
|
|
+ cameraLock.lock()
|
|
|
+ try {
|
|
|
+ cameraHelper!!.init()
|
|
|
+ cameraHelper!!.start()
|
|
|
+ } finally {
|
|
|
+ cameraLock.unlock()
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
- // ================= checkCamera =================
|
|
|
+ // ================= checkCamera:命中即停 =================
|
|
|
fun checkCamera(
|
|
|
preview: View,
|
|
|
- callBack: (Bitmap?, Long?) -> Unit
|
|
|
+ callBack: (Bitmap?, face: Rect?, Long?) -> Unit
|
|
|
) {
|
|
|
+ stashAppContext(preview.context)
|
|
|
+
|
|
|
+ // 进入 checkCamera 时重置“命中即停”标记
|
|
|
+ stopAfterHit.set(false)
|
|
|
+
|
|
|
if (rgbCameraId == null) {
|
|
|
rgbCameraId = findCameraIdByFacing(Camera.CameraInfo.CAMERA_FACING_FRONT)
|
|
|
?: findCameraIdByFacing(Camera.CameraInfo.CAMERA_FACING_BACK)
|
|
|
@@ -439,15 +541,16 @@ object FaceUtil {
|
|
|
hlkVerifyJob?.cancel()
|
|
|
hlkVerifyJob = ioScope.launch {
|
|
|
try {
|
|
|
- hlkClient?.startVerifyWithNotes(
|
|
|
- timeoutSec = 15, loop = true,
|
|
|
- onFaceState = { rect, state, yaw, pitch, roll ->
|
|
|
- logger.info("onFaceState: $rect, $state, $yaw, $pitch, $roll")
|
|
|
- lastFaceRectByHlk = rect; lastAliveByHlk = (state == 0)
|
|
|
- },
|
|
|
- onLiveness = { alive -> lastAliveByHlk = alive },
|
|
|
- onResult = { userId -> lastUserIdByHlk = userId }
|
|
|
- )
|
|
|
+ hlkLock.withLock {
|
|
|
+ hlkClient?.startVerifyWithNotes(
|
|
|
+ timeoutSec = 15, loop = true,
|
|
|
+ onFaceState = { rect, state, _, _, _ ->
|
|
|
+ lastFaceRectByHlk = rect; lastAliveByHlk = (state == 0)
|
|
|
+ },
|
|
|
+ onLiveness = { alive -> lastAliveByHlk = alive },
|
|
|
+ onResult = { userId -> lastUserIdByHlk = userId }
|
|
|
+ )
|
|
|
+ }
|
|
|
} finally {
|
|
|
hlkVerifyRunning = false
|
|
|
}
|
|
|
@@ -458,56 +561,85 @@ object FaceUtil {
|
|
|
override fun onPreview(nv21: ByteArray?, camera: Camera?) {
|
|
|
val p = previewSize ?: return
|
|
|
|
|
|
+ // 命中后拦截一切后续 onPreview
|
|
|
+ if (stopAfterHit.get()) return
|
|
|
+
|
|
|
if (backend == FaceBackend.HLK) {
|
|
|
- val bmp = ImageConvertUtils.nv21ToBitmap(nv21, p.width, p.height)
|
|
|
- callBack(bmp, registerUserIdAndLocalUserId[lastUserIdByHlk])
|
|
|
+ if (shouldEmit(lastCheckCbTs)) {
|
|
|
+ ioScope.launch {
|
|
|
+ val data = nv21 ?: return@launch
|
|
|
+ val bmp = ImageConvertUtils.nv21ToBitmap(data, p.width, p.height)
|
|
|
+ ThreadUtils.runOnMain {
|
|
|
+ callBack(bmp, lastFaceRectByHlk, registerUserIdAndLocalUserId[lastUserIdByHlk])
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
return
|
|
|
}
|
|
|
|
|
|
- // ARC:原流程
|
|
|
- if (inDetecting) return
|
|
|
- inDetecting = true
|
|
|
- val bmp = ImageConvertUtils.nv21ToBitmap(nv21, p.width, p.height)
|
|
|
- val fe = faceEngine ?: run { inDetecting = false; return }
|
|
|
- val faces = mutableListOf<FaceInfo>()
|
|
|
- var code = fe.detectFaces(nv21, p.width, p.height, FaceEngine.CP_PAF_NV21, faces)
|
|
|
- if (code != ErrorInfo.MOK || faces.isEmpty()) {
|
|
|
- inDetecting = false; return
|
|
|
- }
|
|
|
- code =
|
|
|
- fe.process(nv21, p.width, p.height, FaceEngine.CP_PAF_NV21, faces, processMask)
|
|
|
- if (code != ErrorInfo.MOK) {
|
|
|
- inDetecting = false; return
|
|
|
- }
|
|
|
- val liveList = mutableListOf<LivenessInfo>()
|
|
|
- val lc = fe.getLiveness(liveList)
|
|
|
- if (lc != ErrorInfo.MOK) {
|
|
|
- inDetecting = false; return
|
|
|
- }
|
|
|
- if (liveList.none { it.liveness == LivenessInfo.ALIVE }) {
|
|
|
- callBack(null, null); inDetecting = false; return
|
|
|
- }
|
|
|
- val ft = FaceFeature()
|
|
|
- fe.extractFaceFeature(
|
|
|
- nv21,
|
|
|
- p.width,
|
|
|
- p.height,
|
|
|
- FaceEngine.CP_PAF_NV21,
|
|
|
- faces[0],
|
|
|
- ExtractType.RECOGNIZE,
|
|
|
- 0,
|
|
|
- ft
|
|
|
- )
|
|
|
- val searchResult =
|
|
|
- runCatching { if (fe.faceCount > 0) fe.searchFaceFeature(ft) else null }.getOrNull()
|
|
|
- logger.info("人脸结果:${searchResult?.maxSimilar}")
|
|
|
- if ((searchResult?.maxSimilar ?: 0f) > 0.5f) {
|
|
|
- callBack(bmp, searchResult?.faceFeatureInfo?.searchId?.toLong())
|
|
|
- } else {
|
|
|
- callBack(null, null)
|
|
|
- }
|
|
|
- ThreadUtils.runOnMainDelayed(1000) {
|
|
|
- inDetecting = false
|
|
|
+ if (!ensureArcReady()) return
|
|
|
+ if (!detectGate.tryLock()) return
|
|
|
+
|
|
|
+ feScope.launch {
|
|
|
+ var requestStop = false
|
|
|
+ try {
|
|
|
+ val data = nv21 ?: return@launch
|
|
|
+ val fe = faceEngine ?: return@launch
|
|
|
+ val faces = mutableListOf<FaceInfo>()
|
|
|
+ var code = fe.detectFaces(data, p.width, p.height, FaceEngine.CP_PAF_NV21, faces)
|
|
|
+ if (code != ErrorInfo.MOK || faces.isEmpty()) return@launch
|
|
|
+ code = fe.process(data, p.width, p.height, FaceEngine.CP_PAF_NV21, faces, processMask)
|
|
|
+ if (code != ErrorInfo.MOK) return@launch
|
|
|
+
|
|
|
+ val liveList = mutableListOf<LivenessInfo>()
|
|
|
+ val lc = fe.getLiveness(liveList)
|
|
|
+ if (lc != ErrorInfo.MOK) return@launch
|
|
|
+ val alive = liveList.any { it.liveness == LivenessInfo.ALIVE }
|
|
|
+ if (!alive) {
|
|
|
+ if (shouldEmit(lastCheckCbTs)) ThreadUtils.runOnMain { callBack(null, null, null) }
|
|
|
+ return@launch
|
|
|
+ }
|
|
|
+
|
|
|
+ val ft = FaceFeature()
|
|
|
+ fe.extractFaceFeature(
|
|
|
+ data, p.width, p.height, FaceEngine.CP_PAF_NV21,
|
|
|
+ faces[0], ExtractType.RECOGNIZE, 0, ft
|
|
|
+ )
|
|
|
+
|
|
|
+ val searchResult = runCatching { if (fe.faceCount > 0) fe.searchFaceFeature(ft) else null }.getOrNull()
|
|
|
+ val score = searchResult?.maxSimilar ?: 0f
|
|
|
+ logger.info("人脸库数:${fe.faceCount},相似度:${score}")
|
|
|
+
|
|
|
+ // 命中即停:>0.5 立即回调 + 停止相机 + 拦截后续帧
|
|
|
+ if (score > 0.5f) {
|
|
|
+ if (stopAfterHit.compareAndSet(false, true)) {
|
|
|
+ if (shouldEmit(lastCheckCbTs)) {
|
|
|
+ val bmp = withContext(Dispatchers.IO) { ImageConvertUtils.nv21ToBitmap(data, p.width, p.height) }
|
|
|
+ ThreadUtils.runOnMain {
|
|
|
+ callBack(
|
|
|
+ bmp,
|
|
|
+ faces[0].rect,
|
|
|
+ searchResult?.faceFeatureInfo?.searchId?.toLong()
|
|
|
+ )
|
|
|
+ }
|
|
|
+ }
|
|
|
+ requestStop = true // 解锁之后再 stop()
|
|
|
+ }
|
|
|
+ return@launch
|
|
|
+ }
|
|
|
+
|
|
|
+ // 未命中:按原逻辑回调空
|
|
|
+ if (shouldEmit(lastCheckCbTs)) ThreadUtils.runOnMain { callBack(null, null, null) }
|
|
|
+
|
|
|
+ } catch (e: Throwable) {
|
|
|
+ logger.warn("ARC check error: ${e.message}", e)
|
|
|
+ } finally {
|
|
|
+ if (detectGate.isLocked) detectGate.unlock()
|
|
|
+ // 命中后再真正停止,避免持锁调用 stop()
|
|
|
+ if (requestStop) {
|
|
|
+ ThreadUtils.runOnMain { stop() }
|
|
|
+ }
|
|
|
+ }
|
|
|
}
|
|
|
}
|
|
|
|
|
|
@@ -528,109 +660,120 @@ object FaceUtil {
|
|
|
.previewOn(preview)
|
|
|
.cameraListener(listener)
|
|
|
.build()
|
|
|
- cameraHelper!!.init()
|
|
|
- cameraHelper!!.start()
|
|
|
+
|
|
|
+ cameraLock.lock()
|
|
|
+ try {
|
|
|
+ cameraHelper!!.init()
|
|
|
+ cameraHelper!!.start()
|
|
|
+ } finally {
|
|
|
+ cameraLock.unlock()
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
fun stop() {
|
|
|
- cameraHelper?.release()
|
|
|
- cameraHelper = null
|
|
|
- hlkVerifyJob?.cancel()
|
|
|
- hlkVerifyJob = null
|
|
|
- hlkClient?.stopVerify() // 双保险:让设备侧监听循环马上跳出
|
|
|
+ cameraLock.lock()
|
|
|
+ try {
|
|
|
+ cameraHelper?.release()
|
|
|
+ cameraHelper = null
|
|
|
+ hlkVerifyJob?.cancel()
|
|
|
+ hlkVerifyJob = null
|
|
|
+ ioScope.launch {
|
|
|
+ hlkLock.withLock {
|
|
|
+ hlkClient?.stopVerify()
|
|
|
+ }
|
|
|
+ }
|
|
|
+ } finally {
|
|
|
+ cameraLock.unlock()
|
|
|
+ }
|
|
|
+ // 注意:不 cancel appJob,便于 FaceUtil 复用生命周期
|
|
|
+ // 如需彻底销毁,可在外部生命周期结束时调用:
|
|
|
+ // appJob.cancel(); feExecutor.shutdownNow()
|
|
|
}
|
|
|
|
|
|
- // ----------------- 注册/比对(仅 ARC 有效) -----------------
|
|
|
+ // ----------------- 注册/比对(仅 ARC 有效,已串行化) -----------------
|
|
|
fun registerFace(faceData: List<Pair<Long, String>>) {
|
|
|
if (backend == FaceBackend.HLK) {
|
|
|
- // HLK 下请走 Hlk223PhotoEnroll / ENROLL_ITG
|
|
|
-
|
|
|
faceData.forEach { (uid, b64) ->
|
|
|
ThreadUtils.runOnIO {
|
|
|
- val jpegBytes = ImageCompress.base64ToJpegUnder(b64, 1000 * 2)
|
|
|
- val crc32 = CRC32().apply { update(jpegBytes) }.value.toInt()
|
|
|
+ val jpegBytes = ImageCompress.base64ToJpegUnder(b64, 1500)
|
|
|
+ val crc32 = CRC32().apply { update(jpegBytes) }.value
|
|
|
logger.info("图片大小:{}", jpegBytes.size)
|
|
|
val userId = hlkClient?.enrollWithPhoto(jpegBytes, crc32 = crc32)
|
|
|
- userId?.let {
|
|
|
- registerUserIdAndLocalUserId[it] = uid
|
|
|
- }
|
|
|
+ userId?.let { registerUserIdAndLocalUserId[it] = uid }
|
|
|
logger.info("注册Id:{}", userId)
|
|
|
}
|
|
|
}
|
|
|
return
|
|
|
}
|
|
|
- faceEngine?.removeFaceFeature(-1)
|
|
|
- faceData.forEach { (uid, b64) ->
|
|
|
- val bmp = decodeBase64ToBitmap(b64)
|
|
|
- val img = bitmapToBgr24(bmp)
|
|
|
- val faces = mutableListOf<FaceInfo>()
|
|
|
- val code =
|
|
|
- faceEngine?.detectFaces(img, bmp.width, bmp.height, FaceEngine.CP_PAF_BGR24, faces)
|
|
|
- if (faces.isNullOrEmpty()) return@forEach
|
|
|
- val faceFeature = FaceFeature()
|
|
|
- faceEngine?.extractFaceFeature(
|
|
|
- img,
|
|
|
- bmp.width,
|
|
|
- bmp.height,
|
|
|
- FaceEngine.CP_PAF_BGR24,
|
|
|
- faces[0],
|
|
|
- ExtractType.REGISTER,
|
|
|
- 0,
|
|
|
- faceFeature
|
|
|
- )
|
|
|
- val info = FaceFeatureInfo(uid.toInt(), faceFeature.featureData)
|
|
|
- faceEngine?.registerFaceFeature(info)
|
|
|
-// try {
|
|
|
-// if ((faceEngine?.searchFaceFeature(faceFeature)?.maxSimilar ?: 0f) > 0.5) {
|
|
|
-// faceEngine?.updateFaceFeature(info)
|
|
|
-// } else {
|
|
|
-// faceEngine?.registerFaceFeature(info)
|
|
|
-// }
|
|
|
-// } catch (_: Exception) {
|
|
|
-// faceEngine?.registerFaceFeature(info)
|
|
|
-// }
|
|
|
+ if (!ensureArcReady()) return
|
|
|
+
|
|
|
+ feScope.launch {
|
|
|
+ arcLock.withLock {
|
|
|
+ logger.info("注册人脸:${faceData.map { it.first }}")
|
|
|
+ faceData.forEach { (uid, b64) ->
|
|
|
+ val bmp = decodeBase64ToBitmap(b64)
|
|
|
+ val img = bitmapToBgr24(bmp)
|
|
|
+ val faces = mutableListOf<FaceInfo>()
|
|
|
+ val code = faceEngine?.detectFaces(
|
|
|
+ img, bmp.width, bmp.height, FaceEngine.CP_PAF_BGR24, faces
|
|
|
+ )
|
|
|
+ logger.info("人脸检查结果:${uid},${faces.size},${code}")
|
|
|
+ if (faces.isNullOrEmpty() || code != ErrorInfo.MOK) return@forEach
|
|
|
+ val faceFeature = FaceFeature()
|
|
|
+ faceEngine?.extractFaceFeature(
|
|
|
+ img, bmp.width, bmp.height, FaceEngine.CP_PAF_BGR24,
|
|
|
+ faces[0], ExtractType.REGISTER, 0, faceFeature
|
|
|
+ )
|
|
|
+ val info = FaceFeatureInfo(uid.toInt(), faceFeature.featureData)
|
|
|
+ if ((faceEngine?.faceCount ?: 0) > 0) {
|
|
|
+ if (faceEngine?.getFaceFeature(uid.toInt()) != null) {
|
|
|
+ faceEngine?.updateFaceFeature(info)
|
|
|
+ } else {
|
|
|
+ faceEngine?.registerFaceFeature(info)
|
|
|
+ }
|
|
|
+ } else {
|
|
|
+ faceEngine?.registerFaceFeature(info)
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
}
|
|
|
}
|
|
|
|
|
|
fun verifyFaceArcSoft(b64a: String, b64b: String, threshold: Float = 0.7f): Boolean {
|
|
|
if (backend == FaceBackend.HLK) return false
|
|
|
if (b64a.isEmpty() || b64b.isEmpty()) return false
|
|
|
- val bmpA = decodeBase64ToBitmap(b64a)
|
|
|
- val bmpB = decodeBase64ToBitmap(b64b)
|
|
|
- val facesA = mutableListOf<FaceInfo>()
|
|
|
- val facesB = mutableListOf<FaceInfo>()
|
|
|
- val imgA = bitmapToBgr24(bmpA)
|
|
|
- val imgB = bitmapToBgr24(bmpB)
|
|
|
- val codeA =
|
|
|
- faceEngine?.detectFaces(imgA, bmpA.width, bmpA.height, FaceEngine.CP_PAF_BGR24, facesA)
|
|
|
- val codeB =
|
|
|
- faceEngine?.detectFaces(imgB, bmpB.width, bmpB.height, FaceEngine.CP_PAF_BGR24, facesB)
|
|
|
- if (codeA != ErrorInfo.MOK || codeB != ErrorInfo.MOK || facesA.isEmpty() || facesB.isEmpty()) return false
|
|
|
- val ftA = FaceFeature();
|
|
|
- val ftB = FaceFeature()
|
|
|
- faceEngine?.extractFaceFeature(
|
|
|
- imgA,
|
|
|
- bmpA.width,
|
|
|
- bmpA.height,
|
|
|
- FaceEngine.CP_PAF_BGR24,
|
|
|
- facesA[0],
|
|
|
- ExtractType.RECOGNIZE,
|
|
|
- 0,
|
|
|
- ftA
|
|
|
- )
|
|
|
- faceEngine?.extractFaceFeature(
|
|
|
- imgB,
|
|
|
- bmpB.width,
|
|
|
- bmpB.height,
|
|
|
- FaceEngine.CP_PAF_BGR24,
|
|
|
- facesB[0],
|
|
|
- ExtractType.RECOGNIZE,
|
|
|
- 0,
|
|
|
- ftB
|
|
|
- )
|
|
|
- val sim = FaceSimilar()
|
|
|
- if (faceEngine?.compareFaceFeature(ftA, ftB, sim) != ErrorInfo.MOK) return false
|
|
|
- return sim.score >= threshold
|
|
|
+ if (!ensureArcReady()) return false
|
|
|
+
|
|
|
+ return runBlocking(feDispatcher) {
|
|
|
+ arcLock.withLock {
|
|
|
+ val bmpA = decodeBase64ToBitmap(b64a)
|
|
|
+ val bmpB = decodeBase64ToBitmap(b64b)
|
|
|
+ val facesA = mutableListOf<FaceInfo>()
|
|
|
+ val facesB = mutableListOf<FaceInfo>()
|
|
|
+ val imgA = bitmapToBgr24(bmpA)
|
|
|
+ val imgB = bitmapToBgr24(bmpB)
|
|
|
+ val codeA = faceEngine?.detectFaces(
|
|
|
+ imgA, bmpA.width, bmpA.height, FaceEngine.CP_PAF_BGR24, facesA
|
|
|
+ )
|
|
|
+ val codeB = faceEngine?.detectFaces(
|
|
|
+ imgB, bmpB.width, bmpB.height, FaceEngine.CP_PAF_BGR24, facesB
|
|
|
+ )
|
|
|
+ if (codeA != ErrorInfo.MOK || codeB != ErrorInfo.MOK || facesA.isEmpty() || facesB.isEmpty()) return@withLock false
|
|
|
+ val ftA = FaceFeature()
|
|
|
+ val ftB = FaceFeature()
|
|
|
+ faceEngine?.extractFaceFeature(
|
|
|
+ imgA, bmpA.width, bmpA.height, FaceEngine.CP_PAF_BGR24,
|
|
|
+ facesA[0], ExtractType.RECOGNIZE, 0, ftA
|
|
|
+ )
|
|
|
+ faceEngine?.extractFaceFeature(
|
|
|
+ imgB, bmpB.width, bmpB.height, FaceEngine.CP_PAF_BGR24,
|
|
|
+ facesB[0], ExtractType.RECOGNIZE, 0, ftB
|
|
|
+ )
|
|
|
+ val sim = FaceSimilar()
|
|
|
+ if (faceEngine?.compareFaceFeature(ftA, ftB, sim) != ErrorInfo.MOK) return@withLock false
|
|
|
+ sim.score >= threshold
|
|
|
+ }
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
// ===== 工具 =====
|
|
|
@@ -640,7 +783,7 @@ object FaceUtil {
|
|
|
}
|
|
|
|
|
|
private fun bitmapToBgr24(bitmap: Bitmap): ByteArray {
|
|
|
- val w = bitmap.width;
|
|
|
+ val w = bitmap.width
|
|
|
val h = bitmap.height
|
|
|
val pixels = IntArray(w * h); bitmap.getPixels(pixels, 0, w, 0, 0, w, h)
|
|
|
val bgr = ByteArray(w * h * 3)
|
|
|
@@ -654,13 +797,16 @@ object FaceUtil {
|
|
|
}
|
|
|
|
|
|
/**
|
|
|
- * 删除人脸
|
|
|
+ * 删除人脸(HLK)
|
|
|
*/
|
|
|
fun clearFace(userId: Long?) {
|
|
|
- userId?.let { userId ->
|
|
|
- registerUserIdAndLocalUserId.entries.find { it.value == userId }?.key?.let {
|
|
|
+ userId?.let { uid ->
|
|
|
+ registerUserIdAndLocalUserId.entries.find { it.value == uid }?.key?.let {
|
|
|
ThreadUtils.runOnIO {
|
|
|
- hlkClient?.deleteUser(it)
|
|
|
+ hlkLock.withLock {
|
|
|
+ faceEngine?.removeFaceFeature(it)
|
|
|
+ hlkClient?.deleteUser(it)
|
|
|
+ }
|
|
|
}
|
|
|
}
|
|
|
}
|