|
|
@@ -16,6 +16,7 @@ import com.arcsoft.face.ErrorInfo
|
|
|
import com.arcsoft.face.Face3DAngle
|
|
|
import com.arcsoft.face.FaceEngine
|
|
|
import com.arcsoft.face.FaceFeature
|
|
|
+import com.arcsoft.face.FaceFeatureInfo
|
|
|
import com.arcsoft.face.FaceInfo
|
|
|
import com.arcsoft.face.FaceSimilar
|
|
|
import com.arcsoft.face.GenderInfo
|
|
|
@@ -24,13 +25,16 @@ import com.arcsoft.face.enums.DetectFaceOrientPriority
|
|
|
import com.arcsoft.face.enums.DetectMode
|
|
|
import com.arcsoft.face.enums.ExtractType
|
|
|
import com.grkj.shared.config.Constants
|
|
|
-import com.grkj.shared.utils.extension.expandToPadCenter
|
|
|
import com.grkj.shared.utils.extension.isInCenterArea
|
|
|
import com.grkj.shared.utils.face.arcsoft.CameraHelper
|
|
|
import com.grkj.shared.utils.face.arcsoft.CameraListener
|
|
|
import com.grkj.shared.utils.face.arcsoft.NV21ToBitmap
|
|
|
import com.grkj.shared.widget.FaceOverlayView
|
|
|
-import com.sik.sikimage.CropImageUtils
|
|
|
+import com.sik.sikimage.ImageConvertUtils
|
|
|
+import kotlinx.coroutines.CoroutineScope
|
|
|
+import kotlinx.coroutines.Dispatchers
|
|
|
+import kotlinx.coroutines.SupervisorJob
|
|
|
+import kotlinx.coroutines.launch
|
|
|
import org.slf4j.Logger
|
|
|
import org.slf4j.LoggerFactory
|
|
|
|
|
|
@@ -43,11 +47,12 @@ object ArcSoftUtil {
|
|
|
private var previewSize: Camera.Size? = null
|
|
|
private val rgbCameraId = Camera.CameraInfo.CAMERA_FACING_BACK
|
|
|
private var faceEngine: FaceEngine? = null
|
|
|
+ private var checkFaceEngine: FaceEngine? = null
|
|
|
private val cameraWidth: Int = 640
|
|
|
private val cameraHeight: Int = 480
|
|
|
private var afCode = -1
|
|
|
- private val processMask: Int =
|
|
|
- FaceEngine.ASF_AGE or FaceEngine.ASF_MASK_DETECT or FaceEngine.ASF_GENDER or FaceEngine.ASF_LIVENESS
|
|
|
+ private val processMask: Int = FaceEngine.ASF_MASK_DETECT or FaceEngine.ASF_LIVENESS
|
|
|
+ private val registerFaceFeatureJob get() = CoroutineScope(Dispatchers.IO + SupervisorJob())
|
|
|
|
|
|
private const val ACTION_REQUEST_PERMISSIONS: Int = 0x001
|
|
|
var isActivated = false
|
|
|
@@ -63,10 +68,7 @@ object ArcSoftUtil {
|
|
|
|
|
|
fun checkActiveStatus(context: Context) {
|
|
|
val activeCode = FaceEngine.activeOnline(
|
|
|
- context,
|
|
|
- Constants.ACTIVE_KEY,
|
|
|
- Constants.APP_ID,
|
|
|
- Constants.SDK_KEY
|
|
|
+ context, Constants.ACTIVE_KEY, Constants.APP_ID, Constants.SDK_KEY
|
|
|
)
|
|
|
when (activeCode) {
|
|
|
ErrorInfo.MOK -> {
|
|
|
@@ -96,12 +98,24 @@ object ArcSoftUtil {
|
|
|
return
|
|
|
}
|
|
|
faceEngine = FaceEngine()
|
|
|
+ checkFaceEngine = FaceEngine()
|
|
|
afCode = faceEngine!!.init(
|
|
|
context,
|
|
|
DetectMode.ASF_DETECT_MODE_VIDEO,
|
|
|
DetectFaceOrientPriority.ASF_OP_0_ONLY,
|
|
|
1,
|
|
|
- FaceEngine.ASF_FACE_DETECT or FaceEngine.ASF_AGE or FaceEngine.ASF_MASK_DETECT or FaceEngine.ASF_GENDER or FaceEngine.ASF_LIVENESS or FaceEngine.ASF_FACE_RECOGNITION
|
|
|
+ FaceEngine.ASF_FACE_DETECT or FaceEngine.ASF_MASK_DETECT or FaceEngine.ASF_LIVENESS or FaceEngine.ASF_FACE_RECOGNITION
|
|
|
+ )
|
|
|
+ logger.info("initEngine: init: $afCode")
|
|
|
+ if (afCode != ErrorInfo.MOK) {
|
|
|
+ logger.info("初始化失败")
|
|
|
+ }
|
|
|
+ afCode = checkFaceEngine!!.init(
|
|
|
+ context,
|
|
|
+ DetectMode.ASF_DETECT_MODE_IMAGE,
|
|
|
+ DetectFaceOrientPriority.ASF_OP_0_ONLY,
|
|
|
+ 1,
|
|
|
+ FaceEngine.ASF_FACE_DETECT or FaceEngine.ASF_MASK_DETECT or FaceEngine.ASF_LIVENESS or FaceEngine.ASF_FACE_RECOGNITION
|
|
|
)
|
|
|
logger.info("initEngine: init: $afCode")
|
|
|
isInit = afCode == ErrorInfo.MOK
|
|
|
@@ -131,10 +145,7 @@ object ArcSoftUtil {
|
|
|
|
|
|
val cameraListener: CameraListener = object : CameraListener {
|
|
|
override fun onCameraOpened(
|
|
|
- camera: Camera,
|
|
|
- cameraId: Int,
|
|
|
- displayOrientation: Int,
|
|
|
- isMirror: Boolean
|
|
|
+ camera: Camera, cameraId: Int, displayOrientation: Int, isMirror: Boolean
|
|
|
) {
|
|
|
logger.info("onCameraOpened: $cameraId $displayOrientation $isMirror")
|
|
|
previewSize = camera.parameters.previewSize
|
|
|
@@ -196,14 +207,11 @@ object ArcSoftUtil {
|
|
|
return
|
|
|
}
|
|
|
if (!needCheckCenter || (faceInfoList[0].rect.isInCenterArea(
|
|
|
- previewSize!!.width,
|
|
|
- previewSize!!.height
|
|
|
+ previewSize!!.width, previewSize!!.height
|
|
|
))
|
|
|
) {
|
|
|
val bitmap = NV21ToBitmap(context).nv21ToBitmap(
|
|
|
- nv21,
|
|
|
- previewSize!!.width,
|
|
|
- previewSize!!.height
|
|
|
+ nv21, previewSize!!.width, previewSize!!.height
|
|
|
)
|
|
|
// val faceRect = faceInfoList[0].rect.expandToPadCenter()
|
|
|
logger.debug("人脸检测结果-识别结果 : ${bitmap == null} - $faceInfoList")
|
|
|
@@ -230,14 +238,160 @@ object ArcSoftUtil {
|
|
|
logger.info("onCameraConfigurationChanged: $cameraID $displayOrientation")
|
|
|
}
|
|
|
}
|
|
|
- cameraHelper = CameraHelper.Builder()
|
|
|
- .previewViewSize(Point(cameraWidth, cameraHeight))
|
|
|
+ cameraHelper = CameraHelper.Builder().previewViewSize(Point(cameraWidth, cameraHeight))
|
|
|
+ .rotation(windowManager.defaultDisplay.rotation)
|
|
|
+ .specificCameraId(rgbCameraId ?: Camera.CameraInfo.CAMERA_FACING_FRONT).isMirror(false)
|
|
|
+ .previewOn(preview).cameraListener(cameraListener).build()
|
|
|
+ cameraHelper!!.init()
|
|
|
+ cameraHelper!!.start()
|
|
|
+ }
|
|
|
+
|
|
|
+ /**
|
|
|
+ * 注册人脸
|
|
|
+ */
|
|
|
+ fun registerFace(faceData: List<Pair<Long, String>>) {
|
|
|
+ faceData.forEachIndexed { index, userFace ->
|
|
|
+ registerFaceFeatureJob.launch {
|
|
|
+ val faceBitmap = decodeBase64ToBitmap(userFace.second)
|
|
|
+ val imageData =
|
|
|
+ ImageConvertUtils.bitmapToNv21(faceBitmap, faceBitmap.width, faceBitmap.height)
|
|
|
+ val faceInfoList = mutableListOf<FaceInfo>()
|
|
|
+ checkFaceEngine?.detectFaces(
|
|
|
+ imageData,
|
|
|
+ faceBitmap.width,
|
|
|
+ faceBitmap.height,
|
|
|
+ FaceEngine.CP_PAF_NV21,
|
|
|
+ faceInfoList
|
|
|
+ )
|
|
|
+ val faceFeature = FaceFeature()
|
|
|
+ checkFaceEngine?.extractFaceFeature(
|
|
|
+ imageData,
|
|
|
+ faceBitmap.width,
|
|
|
+ faceBitmap.height,
|
|
|
+ FaceEngine.CP_PAF_NV21,
|
|
|
+ faceInfoList[0],
|
|
|
+ ExtractType.REGISTER,
|
|
|
+ 0,
|
|
|
+ faceFeature
|
|
|
+ )
|
|
|
+ val faceFeatureInfo =
|
|
|
+ FaceFeatureInfo(userFace.first.toInt(), faceFeature.featureData)
|
|
|
+ if (checkFaceEngine?.getFaceFeature(userFace.first.toInt()) == null) {
|
|
|
+ checkFaceEngine?.registerFaceFeature(faceFeatureInfo)
|
|
|
+ } else {
|
|
|
+ checkFaceEngine?.updateFaceFeature(faceFeatureInfo)
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @JvmOverloads
|
|
|
+ fun checkCamera(
|
|
|
+ context: Context,
|
|
|
+ windowManager: WindowManager,
|
|
|
+ preview: View,
|
|
|
+ needCheckCenter: Boolean = false,
|
|
|
+ callBack: (Long?) -> Unit
|
|
|
+ ) {
|
|
|
+ val metrics = DisplayMetrics()
|
|
|
+ windowManager.defaultDisplay.getMetrics(metrics)
|
|
|
+
|
|
|
+ val cameraListener: CameraListener = object : CameraListener {
|
|
|
+ override fun onCameraOpened(
|
|
|
+ camera: Camera, cameraId: Int, displayOrientation: Int, isMirror: Boolean
|
|
|
+ ) {
|
|
|
+ logger.info("onCameraOpened: $cameraId $displayOrientation $isMirror")
|
|
|
+ previewSize = camera.parameters.previewSize
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+ override fun onPreview(nv21: ByteArray?, camera: Camera?) {
|
|
|
+ if (inDetecting) {
|
|
|
+ return
|
|
|
+ }
|
|
|
+ inDetecting = true
|
|
|
+ val faceInfoList: List<FaceInfo> = ArrayList()
|
|
|
+ var code = checkFaceEngine!!.detectFaces(
|
|
|
+ nv21,
|
|
|
+ previewSize!!.width,
|
|
|
+ previewSize!!.height,
|
|
|
+ FaceEngine.CP_PAF_NV21,
|
|
|
+ faceInfoList
|
|
|
+ )
|
|
|
+
|
|
|
+ if (needCheckCenter && !faceInfoList[0].rect.isInCenterArea(
|
|
|
+ previewSize!!.width, previewSize!!.height
|
|
|
+ )
|
|
|
+ ) {
|
|
|
+ inDetecting = false
|
|
|
+ return
|
|
|
+ }
|
|
|
+ if (code == ErrorInfo.MOK && faceInfoList.isNotEmpty()) {
|
|
|
+ code = faceEngine!!.process(
|
|
|
+ nv21,
|
|
|
+ previewSize!!.width,
|
|
|
+ previewSize!!.height,
|
|
|
+ FaceEngine.CP_PAF_NV21,
|
|
|
+ faceInfoList,
|
|
|
+ processMask
|
|
|
+ )
|
|
|
+ if (code != ErrorInfo.MOK) {
|
|
|
+ inDetecting = false
|
|
|
+ return
|
|
|
+ }
|
|
|
+ } else {
|
|
|
+ inDetecting = false
|
|
|
+ return
|
|
|
+ }
|
|
|
+
|
|
|
+ val faceLivenessInfoList: List<LivenessInfo> = ArrayList()
|
|
|
+ val livenessCode = faceEngine!!.getLiveness(faceLivenessInfoList)
|
|
|
+
|
|
|
+ // 有其中一个的错误码不为ErrorInfo.MOK,return
|
|
|
+ if ((livenessCode) != ErrorInfo.MOK) {
|
|
|
+ logger.debug("人脸检测结果:年龄、性别、角度、获取验证失败")
|
|
|
+ inDetecting = false
|
|
|
+ return
|
|
|
+ }
|
|
|
+
|
|
|
+ // 自己加的,必须有活体检测
|
|
|
+ if (faceLivenessInfoList.none { it.liveness == LivenessInfo.ALIVE }) {
|
|
|
+ callBack(null)
|
|
|
+ inDetecting = false
|
|
|
+ return
|
|
|
+ }
|
|
|
+ val faceFeature = FaceFeature()
|
|
|
+ checkFaceEngine?.extractFaceFeature(
|
|
|
+ nv21,
|
|
|
+ previewSize!!.width,
|
|
|
+ previewSize!!.height,
|
|
|
+ FaceEngine.CP_PAF_NV21,
|
|
|
+ faceInfoList[0],
|
|
|
+ ExtractType.RECOGNIZE,
|
|
|
+ 0,
|
|
|
+ faceFeature
|
|
|
+ )
|
|
|
+ val searchResult = checkFaceEngine?.searchFaceFeature(faceFeature)
|
|
|
+ logger.debug("人脸检测结果-识别结果 : ${searchResult?.faceFeatureInfo} - $faceInfoList")
|
|
|
+ callBack(searchResult?.faceFeatureInfo?.searchId?.toLong())
|
|
|
+ }
|
|
|
+
|
|
|
+ override fun onCameraClosed() {
|
|
|
+ logger.info("onCameraClosed: ")
|
|
|
+ }
|
|
|
+
|
|
|
+ override fun onCameraError(e: Exception) {
|
|
|
+ logger.info("onCameraError: " + e.message)
|
|
|
+ }
|
|
|
+
|
|
|
+ override fun onCameraConfigurationChanged(cameraID: Int, displayOrientation: Int) {
|
|
|
+ logger.info("onCameraConfigurationChanged: $cameraID $displayOrientation")
|
|
|
+ }
|
|
|
+ }
|
|
|
+ cameraHelper = CameraHelper.Builder().previewViewSize(Point(cameraWidth, cameraHeight))
|
|
|
.rotation(windowManager.defaultDisplay.rotation)
|
|
|
- .specificCameraId(rgbCameraId ?: Camera.CameraInfo.CAMERA_FACING_FRONT)
|
|
|
- .isMirror(false)
|
|
|
- .previewOn(preview)
|
|
|
- .cameraListener(cameraListener)
|
|
|
- .build()
|
|
|
+ .specificCameraId(rgbCameraId ?: Camera.CameraInfo.CAMERA_FACING_FRONT).isMirror(false)
|
|
|
+ .previewOn(preview).cameraListener(cameraListener).build()
|
|
|
cameraHelper!!.init()
|
|
|
cameraHelper!!.start()
|
|
|
}
|
|
|
@@ -265,9 +419,7 @@ object ArcSoftUtil {
|
|
|
* @param threshold 阈值,一般设置 0.7f 左右
|
|
|
*/
|
|
|
fun verifyFaceArcSoft(
|
|
|
- b64a: String,
|
|
|
- b64b: String,
|
|
|
- threshold: Float = 0.7f
|
|
|
+ b64a: String, b64b: String, threshold: Float = 0.7f
|
|
|
): Boolean {
|
|
|
// 1. 解码成 Bitmap
|
|
|
val bmpA = decodeBase64ToBitmap(b64a)
|
|
|
@@ -280,18 +432,10 @@ object ArcSoftUtil {
|
|
|
val imgB = bitmapToBgr24(bmpB)
|
|
|
|
|
|
val imgADetectResultCode = faceEngine?.detectFaces(
|
|
|
- imgA,
|
|
|
- bmpA.width,
|
|
|
- bmpA.height,
|
|
|
- FaceEngine.CP_PAF_BGR24,
|
|
|
- facesA
|
|
|
+ imgA, bmpA.width, bmpA.height, FaceEngine.CP_PAF_BGR24, facesA
|
|
|
)
|
|
|
val imgBDetectResultCode = faceEngine?.detectFaces(
|
|
|
- imgB,
|
|
|
- bmpB.width,
|
|
|
- bmpB.height,
|
|
|
- FaceEngine.CP_PAF_BGR24,
|
|
|
- facesB
|
|
|
+ imgB, bmpB.width, bmpB.height, FaceEngine.CP_PAF_BGR24, facesB
|
|
|
)
|
|
|
logger.info("人脸检测结果1:${facesA.size},${facesA.size}")
|
|
|
logger.info("人脸检测结果2:${imgADetectResultCode},${imgBDetectResultCode}")
|
|
|
@@ -303,12 +447,24 @@ object ArcSoftUtil {
|
|
|
val ftA = FaceFeature()
|
|
|
val ftB = FaceFeature()
|
|
|
faceEngine?.extractFaceFeature(
|
|
|
- imgA, bmpA.width, bmpA.height, FaceEngine.CP_PAF_BGR24,
|
|
|
- facesA[0], ExtractType.RECOGNIZE, 0, ftA
|
|
|
+ imgA,
|
|
|
+ bmpA.width,
|
|
|
+ bmpA.height,
|
|
|
+ FaceEngine.CP_PAF_BGR24,
|
|
|
+ facesA[0],
|
|
|
+ ExtractType.RECOGNIZE,
|
|
|
+ 0,
|
|
|
+ ftA
|
|
|
)
|
|
|
faceEngine?.extractFaceFeature(
|
|
|
- imgB, bmpB.width, bmpB.height, FaceEngine.CP_PAF_BGR24,
|
|
|
- facesB[0], ExtractType.RECOGNIZE, 0, ftB
|
|
|
+ imgB,
|
|
|
+ bmpB.width,
|
|
|
+ bmpB.height,
|
|
|
+ FaceEngine.CP_PAF_BGR24,
|
|
|
+ facesB[0],
|
|
|
+ ExtractType.RECOGNIZE,
|
|
|
+ 0,
|
|
|
+ ftB
|
|
|
)
|
|
|
|
|
|
// 4. 特征比对
|