From 73c46e8da00ec25d850ecea17397669ef11881df Mon Sep 17 00:00:00 2001 From: diljale Date: Thu, 13 Jun 2024 20:39:06 -0700 Subject: [PATCH] Update face liveness samples for kotlin and swift (#98) * update face liveness samples for kotlin and swift * revert target sdk version --------- Co-authored-by: Bhaven Dedhia --- .../kotlin/face/FaceAnalyzerSample/README.md | 4 +- .../face/FaceAnalyzerSample/app/build.gradle | 12 +- .../app/src/main/AndroidManifest.xml | 2 +- .../FaceAnalyzerSample/AnalyzeActivity.kt | 131 ++++++++++-------- .../example/FaceAnalyzerSample/AppUtils.kt | 4 +- .../FaceAnalyzerSample/AutoFitSurfaceView.kt | 2 +- .../FaceAnalyzerSample/MainActivity.kt | 26 +--- .../FaceAnalyzerSample/ResultActivity.kt | 24 ++-- .../com/example/FaceAnalyzerSample/Utils.kt | 14 +- .../main/res/layout/app_activity_result.xml | 13 ++ .../main/res/layout/app_activity_settings.xml | 28 ++-- .../src/main/res/values/string_feedback.xml | 4 +- .../face/FaceAnalyzerSample/build.gradle | 6 +- .../project.pbxproj | 4 + .../FaceAnalyzerSample/ClientStartView.swift | 4 +- .../Core/CameraPreviewView.swift | 12 +- .../FaceAnalyzerSample/Core/CameraView.swift | 9 +- .../Core/LivenessActor.swift | 86 ++++++------ .../Core/LivenessModel.swift | 17 +++ .../Core/LivenessView.swift | 80 ++++++----- .../Core/Localization.swift | 4 +- .../FaceAnalyzerSample/Core/Utility.swift | 3 +- .../Core/en.lproj/Localizable.strings | 1 + .../Core/zh-Hans.lproj/Localizable.strings | 1 + .../FaceAnalyzerSample/MainView.swift | 7 +- .../FaceAnalyzerSample/ResultView.swift | 22 ++- .../FaceAnalyzerSample/SecureInputView.swift | 1 + .../FaceAnalyzerSample/SettingsView.swift | 19 +-- .../swift/face/FaceAnalyzerSample/README.md | 26 +--- 29 files changed, 331 insertions(+), 235 deletions(-) create mode 100644 samples/swift/face/FaceAnalyzerSample/FaceAnalyzerSample/Core/LivenessModel.swift diff --git a/samples/kotlin/face/FaceAnalyzerSample/README.md b/samples/kotlin/face/FaceAnalyzerSample/README.md index eff5b2e..c937160 100644 --- a/samples/kotlin/face/FaceAnalyzerSample/README.md +++ b/samples/kotlin/face/FaceAnalyzerSample/README.md @@ -144,8 +144,8 @@ plugins { ``` * You need to add the following dependencies to apps' build.grade `dependencies` section. ``` - implementation "com.azure.ai:azure-ai-vision-common:0.17.0-beta.1" - implementation "com.azure.ai:azure-ai-vision-faceanalyzer:0.17.0-beta.1" + implementation "com.azure.ai:azure-ai-vision-common:0.17.1-beta.1" + implementation "com.azure.ai:azure-ai-vision-faceanalyzer:0.17.1-beta.1" ``` * You need to add repository in the settings.gradle for dependencyResolutionManagement ``` diff --git a/samples/kotlin/face/FaceAnalyzerSample/app/build.gradle b/samples/kotlin/face/FaceAnalyzerSample/app/build.gradle index b98c79b..7bc1f0e 100644 --- a/samples/kotlin/face/FaceAnalyzerSample/app/build.gradle +++ b/samples/kotlin/face/FaceAnalyzerSample/app/build.gradle @@ -38,10 +38,10 @@ android { } dependencies { - implementation group: 'net.sourceforge.streamsupport', name: 'android-retrofuture', version: '1.7.4' - implementation 'com.google.android.material:material:1.12.0' + implementation 'com.google.android.material:material:1.9.0' implementation 'androidx.constraintlayout:constraintlayout:2.1.4' - implementation "androidx.activity:activity-ktx:1.9.0" - implementation "com.azure.ai:azure-ai-vision-faceanalyzer:0.17.0-beta.1" - implementation "com.azure.android:azure-core-http-okhttp:1.0.0-beta.14" -} + implementation 'com.azure.android:azure-core-http-okhttp:1.0.0-beta.14' + implementation "androidx.activity:activity-ktx:1.7.2" + implementation "com.azure.ai:azure-ai-vision-common:0.17.1-beta.1" + implementation "com.azure.ai:azure-ai-vision-faceanalyzer:0.17.1-beta.1" +} \ No newline at end of file diff --git a/samples/kotlin/face/FaceAnalyzerSample/app/src/main/AndroidManifest.xml b/samples/kotlin/face/FaceAnalyzerSample/app/src/main/AndroidManifest.xml index 891b6f0..e5681fd 100644 --- a/samples/kotlin/face/FaceAnalyzerSample/app/src/main/AndroidManifest.xml +++ b/samples/kotlin/face/FaceAnalyzerSample/app/src/main/AndroidManifest.xml @@ -50,4 +50,4 @@ - + \ No newline at end of file diff --git a/samples/kotlin/face/FaceAnalyzerSample/app/src/main/java/com/example/FaceAnalyzerSample/AnalyzeActivity.kt b/samples/kotlin/face/FaceAnalyzerSample/app/src/main/java/com/example/FaceAnalyzerSample/AnalyzeActivity.kt index e35305d..d72ae50 100644 --- a/samples/kotlin/face/FaceAnalyzerSample/app/src/main/java/com/example/FaceAnalyzerSample/AnalyzeActivity.kt +++ b/samples/kotlin/face/FaceAnalyzerSample/app/src/main/java/com/example/FaceAnalyzerSample/AnalyzeActivity.kt @@ -29,32 +29,32 @@ import com.azure.android.core.credential.AccessToken import com.azure.android.core.credential.TokenCredential import com.azure.android.core.credential.TokenRequestContext import org.threeten.bp.OffsetDateTime -import java.net.URL import kotlin.math.sqrt -/*** - * Sample class to fetch token for starting liveness session. - * It is recommended to fetch this token from app server for production as part of init section. - */ -class StringTokenCredential(token: String) : TokenCredential { - override fun getToken( - request: TokenRequestContext, - callback: TokenCredential.TokenCredentialCallback - ) { - callback.onSuccess(_token) - } - - private var _token: AccessToken? = null - - init { - _token = AccessToken(token, OffsetDateTime.MAX) - } -} /*** * Analyze activity performs one-time face analysis, using the default camera stream as input. * Launches the result activity once the analyzed event is triggered. */ open class AnalyzeActivity : AppCompatActivity() { + /*** + * Sample class to fetch token for starting liveness session. + * It is recommended to fetch this token from app server for production as part of init section. + */ + class StringTokenCredential(token: String) : TokenCredential { + override fun getToken( + request: TokenRequestContext, + callback: TokenCredential.TokenCredentialCallback + ) { + callback.onSuccess(_token) + } + + private var _token: AccessToken? = null + + init { + _token = AccessToken(token, OffsetDateTime.MAX) + } + } + private lateinit var mSurfaceView: SurfaceView private lateinit var mCameraPreviewLayout: FrameLayout private lateinit var mBackgroundLayout: ConstraintLayout @@ -81,12 +81,12 @@ open class AnalyzeActivity : AppCompatActivity() { mCameraPreviewLayout.removeAllViews() mCameraPreviewLayout.addView(mSurfaceView) mCameraPreviewLayout.visibility = View.INVISIBLE - mInstructionsView = findViewById(R.id.instructionString); + mInstructionsView = findViewById(R.id.instructionString) mBackgroundLayout = findViewById(R.id.activity_main_layout) - var analyzeModel = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) { + val analyzeModel = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) { intent.getParcelableExtra("model", AnalyzeModel::class.java) } else { - @Suppress("DEPRECATION") intent.getParcelableExtra("model") + @Suppress("DEPRECATION") intent.getParcelableExtra("model") } mFaceApiEndpoint = analyzeModel?.endpoint mSessionToken = analyzeModel?.token @@ -99,14 +99,16 @@ open class AnalyzeActivity : AppCompatActivity() { override fun onResume() { super.onResume() - initializeConfig() - val visionSourceOptions = VisionSourceOptions(this, this as LifecycleOwner) - visionSourceOptions.setPreview(mSurfaceView) - mVisionSource = VisionSource.fromDefaultCamera(visionSourceOptions) - displayCameraOnLayout() - - // Initialize faceAnalyzer with default camera as vision source - createFaceAnalyzer() + if (mFaceAnalyzer == null) { + initializeConfig() + val visionSourceOptions = VisionSourceOptions(this, this as LifecycleOwner) + visionSourceOptions.setPreview(mSurfaceView) + mVisionSource = VisionSource.fromDefaultCamera(visionSourceOptions) + displayCameraOnLayout() + + // Initialize faceAnalyzer with default camera as vision source + createFaceAnalyzer() + } startAnalyzeOnce() } @@ -156,39 +158,46 @@ open class AnalyzeActivity : AppCompatActivity() { mFaceAnalyzer?.apply { this.analyzed.addEventListener(analyzedListener) - this.analyzing.addEventListener(analyzinglistener) + this.analyzing.addEventListener(analyzingListener) + this.stopped.addEventListener(stoppedListener) } } /** * Listener for Analyzing callback. Receives tracking and user feedback information */ - protected var analyzinglistener = + @Suppress("MemberVisibilityCanBePrivate") + protected var analyzingListener = EventListener { _, e -> e.result.use { result -> if (result.faces.isNotEmpty()) { // Get the first face in result - var face = result.faces.iterator().next() + val face = result.faces.iterator().next() // Lighten/darken the screen based on liveness feedback - var requiredAction = face.actionRequiredFromApplicationTask?.action; - if (requiredAction == ActionRequiredFromApplication.BRIGHTEN_DISPLAY) { - mBackgroundLayout.setBackgroundColor(Color.WHITE) - face.actionRequiredFromApplicationTask.setAsCompleted() - } else if (requiredAction == ActionRequiredFromApplication.DARKEN_DISPLAY) { - mBackgroundLayout.setBackgroundColor(Color.BLACK) - face.actionRequiredFromApplicationTask.setAsCompleted() - } else if (requiredAction == ActionRequiredFromApplication.STOP_CAMERA) { - mCameraPreviewLayout.visibility = View.INVISIBLE - face.actionRequiredFromApplicationTask.setAsCompleted() + val requiredAction = face.actionRequiredFromApplicationTask?.action + when (requiredAction) { + ActionRequiredFromApplication.BRIGHTEN_DISPLAY -> { + mBackgroundLayout.setBackgroundColor(Color.WHITE) + face.actionRequiredFromApplicationTask.setAsCompleted() + } + ActionRequiredFromApplication.DARKEN_DISPLAY -> { + mBackgroundLayout.setBackgroundColor(Color.BLACK) + face.actionRequiredFromApplicationTask.setAsCompleted() + } + ActionRequiredFromApplication.STOP_CAMERA -> { + mCameraPreviewLayout.visibility = View.INVISIBLE + face.actionRequiredFromApplicationTask.setAsCompleted() + } + else -> {} } // Display user feedback and warnings on UI if (!mDoneAnalyzing) { - var feedbackMessage = MapFeedbackToMessage(FeedbackForFace.NONE) + var feedbackMessage = mapFeedbackToMessage(FeedbackForFace.NONE) if (face.feedbackForFace != null) { - feedbackMessage = MapFeedbackToMessage(face.feedbackForFace) + feedbackMessage = mapFeedbackToMessage(face.feedbackForFace) } val currentTime = System.currentTimeMillis() @@ -210,18 +219,19 @@ open class AnalyzeActivity : AppCompatActivity() { * Receives recognition and liveness result. * Launches result activity. */ + @Suppress("MemberVisibilityCanBePrivate") protected var analyzedListener = EventListener { _, e -> val bd = Bundle() e.result.use { result -> if (result.faces.isNotEmpty()) { // Get the first face in result - var face = result.faces.iterator().next() - var livenessStatus: LivenessStatus = face.livenessResult?.livenessStatus?: LivenessStatus.FAILED - var livenessFailureReason = face.livenessResult?.livenessFailureReason?: LivenessFailureReason.NONE - var verifyStatus = face.recognitionResult?.recognitionStatus?:RecognitionStatus.NOT_COMPUTED - var verifyConfidence = face.recognitionResult?.confidence?:Float.NaN - var resultIdsList: ArrayList = ArrayList() + val face = result.faces.iterator().next() + val livenessStatus: LivenessStatus = face.livenessResult?.livenessStatus?: LivenessStatus.FAILED + val livenessFailureReason = face.livenessResult?.livenessFailureReason?: LivenessFailureReason.NONE + val verifyStatus = face.recognitionResult?.recognitionStatus?:RecognitionStatus.NOT_COMPUTED + val verifyConfidence = face.recognitionResult?.confidence?:Float.NaN + val resultIdsList: ArrayList = ArrayList() if (face.livenessResult.resultId != null) { resultIdsList.add(face.livenessResult.resultId.toString()) } @@ -242,6 +252,14 @@ open class AnalyzeActivity : AppCompatActivity() { } } + @Suppress("MemberVisibilityCanBePrivate") + protected var stoppedListener = + EventListener { _, e -> + if (e.reason == FaceAnalysisStoppedReason.ERROR) { + mResultReceiver?.send(AnalyzedResultType.ERROR, null) + } + } + /** * Sets faceAnalysisOptions and recognitionMode. Calls analyzeOnce. */ @@ -260,12 +278,12 @@ open class AnalyzeActivity : AppCompatActivity() { return } - mFaceAnalysisOptions = FaceAnalysisOptions(); + mFaceAnalysisOptions = FaceAnalysisOptions() mFaceAnalysisOptions?.setFaceSelectionMode(FaceSelectionMode.LARGEST) try { - mFaceAnalyzer?.analyzeOnceAsync(mFaceAnalysisOptions); + mFaceAnalyzer?.analyzeOnceAsync(mFaceAnalysisOptions) } catch (ex: Exception) { ex.printStackTrace() } @@ -282,7 +300,7 @@ open class AnalyzeActivity : AppCompatActivity() { * Displays camera stream on UI in a circular shape. */ private fun displayCameraOnLayout() { - val previewSize = mVisionSource?.getCameraPreviewFormat() + val previewSize = mVisionSource?.cameraPreviewFormat val params = mCameraPreviewLayout.layoutParams as ConstraintLayout.LayoutParams params.dimensionRatio = previewSize?.height.toString() + ":" + previewSize?.width params.width = ConstraintLayout.LayoutParams.MATCH_CONSTRAINT @@ -295,17 +313,18 @@ open class AnalyzeActivity : AppCompatActivity() { /** * Override back button to always return to main activity */ + @Deprecated("Deprecated in Java") override fun onBackPressed() { synchronized(this) { mBackPressed = true } @Suppress("DEPRECATION") super.onBackPressed() - mFaceAnalyzer?.stopAnalyzeOnce(); + mFaceAnalyzer?.stopAnalyzeOnce() val bd = Bundle() mResultReceiver?.send(AnalyzedResultType.BACKPRESSED, bd) } - private fun MapFeedbackToMessage(feedback : FeedbackForFace): String { + private fun mapFeedbackToMessage(feedback : FeedbackForFace): String { when(feedback) { FeedbackForFace.NONE -> return getString(R.string.feedback_none) FeedbackForFace.LOOK_AT_CAMERA -> return getString(R.string.feedback_look_at_camera) diff --git a/samples/kotlin/face/FaceAnalyzerSample/app/src/main/java/com/example/FaceAnalyzerSample/AppUtils.kt b/samples/kotlin/face/FaceAnalyzerSample/app/src/main/java/com/example/FaceAnalyzerSample/AppUtils.kt index 66103b6..3487140 100644 --- a/samples/kotlin/face/FaceAnalyzerSample/app/src/main/java/com/example/FaceAnalyzerSample/AppUtils.kt +++ b/samples/kotlin/face/FaceAnalyzerSample/app/src/main/java/com/example/FaceAnalyzerSample/AppUtils.kt @@ -8,7 +8,7 @@ import android.content.Intent import android.net.Uri import androidx.activity.result.PickVisualMediaRequest import androidx.activity.result.contract.ActivityResultContracts -import com.example.FaceAnalyzerSample.app.Utils +import com.example.FaceAnalyzerSample.Utils import java.io.IOException import java.io.ByteArrayOutputStream @@ -27,7 +27,7 @@ object AppUtils { val faceApiKey = sharedPref.getString("key", "").toString() val sendResultsToClient = sharedPref.getBoolean("sendResultsToClient", false) - Utils.getFaceAPISessionToken(faceApiEndpoint, faceApiKey, verifyImage, sendResultsToClient) + Utils.getFaceAPISessionToken(faceApiEndpoint, faceApiKey, verifyImage, sendResultsToClient, context.contentResolver) } // Function to retrieve a string value from SharedPreferences fun getVerifyImage(context: Context, uri: Uri) : ByteArray { diff --git a/samples/kotlin/face/FaceAnalyzerSample/app/src/main/java/com/example/FaceAnalyzerSample/AutoFitSurfaceView.kt b/samples/kotlin/face/FaceAnalyzerSample/app/src/main/java/com/example/FaceAnalyzerSample/AutoFitSurfaceView.kt index 6254cdc..8519f31 100644 --- a/samples/kotlin/face/FaceAnalyzerSample/app/src/main/java/com/example/FaceAnalyzerSample/AutoFitSurfaceView.kt +++ b/samples/kotlin/face/FaceAnalyzerSample/app/src/main/java/com/example/FaceAnalyzerSample/AutoFitSurfaceView.kt @@ -108,4 +108,4 @@ class AutoFitSurfaceView @JvmOverloads constructor( clipToOutline = true } } -} +} \ No newline at end of file diff --git a/samples/kotlin/face/FaceAnalyzerSample/app/src/main/java/com/example/FaceAnalyzerSample/MainActivity.kt b/samples/kotlin/face/FaceAnalyzerSample/app/src/main/java/com/example/FaceAnalyzerSample/MainActivity.kt index 1ab2527..398522b 100644 --- a/samples/kotlin/face/FaceAnalyzerSample/app/src/main/java/com/example/FaceAnalyzerSample/MainActivity.kt +++ b/samples/kotlin/face/FaceAnalyzerSample/app/src/main/java/com/example/FaceAnalyzerSample/MainActivity.kt @@ -31,7 +31,7 @@ import androidx.activity.result.contract.ActivityResultContracts import androidx.appcompat.app.AppCompatActivity import androidx.core.app.ActivityCompat import androidx.core.content.ContextCompat -import com.example.FaceAnalyzerSample.app.Utils +import com.example.FaceAnalyzerSample.Utils import java.io.InputStream /*** @@ -62,26 +62,9 @@ open class MainActivity : AppCompatActivity() { mPickMedia = registerForActivityResult(PickImage()) { uri -> if (uri != null) { mVerifyImage = AppUtils.getVerifyImage(this, uri) - val orientation = this.applicationContext.contentResolver.query( - MediaStore.Images.Media.EXTERNAL_CONTENT_URI, - arrayOf(MediaStore.Images.ImageColumns.ORIENTATION), - MediaStore.Images.Media._ID + " = ?", - arrayOf(DocumentsContract.getDocumentId(uri).split(":")[1]), - null).use { - return@use if (it == null || it.count != 1 || !it.moveToFirst()) null else - when (it.getInt(0)) { - -270 -> ExifInterface.ORIENTATION_ROTATE_90 - -180 -> ExifInterface.ORIENTATION_ROTATE_180 - -90 -> ExifInterface.ORIENTATION_ROTATE_270 - 90 -> ExifInterface.ORIENTATION_ROTATE_90 - 180 -> ExifInterface.ORIENTATION_ROTATE_180 - 270 -> ExifInterface.ORIENTATION_ROTATE_270 - else -> null - } - } this.applicationContext.contentResolver.openInputStream(uri).use { inputStream -> if (inputStream != null) { - showImage(inputStream, orientation) + showImage(inputStream) } } } @@ -95,13 +78,14 @@ open class MainActivity : AppCompatActivity() { } } @SuppressLint("NewApi") - private fun showImage(inputStream: InputStream, knownOrientationExifEnum: Int?) { + private fun showImage(inputStream: InputStream) { var bitmapImage = BitmapFactory.decodeStream(inputStream) try { // rotate bitmap (best effort) val matrix = Matrix() - (knownOrientationExifEnum ?: ExifInterface(inputStream).getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL)) + ExifInterface(inputStream) + .getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL) .let { orientation -> when (orientation) { ExifInterface.ORIENTATION_ROTATE_90 -> matrix.postRotate(90F) diff --git a/samples/kotlin/face/FaceAnalyzerSample/app/src/main/java/com/example/FaceAnalyzerSample/ResultActivity.kt b/samples/kotlin/face/FaceAnalyzerSample/app/src/main/java/com/example/FaceAnalyzerSample/ResultActivity.kt index 63a2a5a..3b83e4d 100644 --- a/samples/kotlin/face/FaceAnalyzerSample/app/src/main/java/com/example/FaceAnalyzerSample/ResultActivity.kt +++ b/samples/kotlin/face/FaceAnalyzerSample/app/src/main/java/com/example/FaceAnalyzerSample/ResultActivity.kt @@ -7,6 +7,7 @@ package com.example.FaceAnalyzerSample import android.content.Intent import android.os.Bundle +import android.widget.Button import android.widget.TextView import androidx.appcompat.app.AppCompatActivity @@ -16,6 +17,7 @@ import androidx.appcompat.app.AppCompatActivity class ResultActivity : AppCompatActivity() { private val viewMap: LinkedHashMap = LinkedHashMap() + private lateinit var mRetryButton: Button override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) @@ -26,22 +28,22 @@ class ResultActivity : AppCompatActivity() { viewMap[findViewById(R.id.resultLabel3)] = findViewById(R.id.resultValue3) viewMap[findViewById(R.id.resultLabel4)] = findViewById(R.id.resultValue4) - var livenessStatus = intent.getStringExtra("livenessStatus") - var livenessFailureReason = intent.getStringExtra("livenessFailureReason") - var verificationStatus = intent.getStringExtra("verificationStatus") - var verificationConfidence = intent.getStringExtra("verificationConfidence") - var errorMessage = intent.getStringExtra("error") + val livenessStatus = intent.getStringExtra("livenessStatus") + val livenessFailureReason = intent.getStringExtra("livenessFailureReason") + val verificationStatus = intent.getStringExtra("verificationStatus") + val verificationConfidence = intent.getStringExtra("verificationConfidence") + val errorMessage = intent.getStringExtra("error") - var itr = viewMap.entries.iterator() + val itr = viewMap.entries.iterator() var mapEntry = itr.next() - if(errorMessage.isNullOrBlank() == false){ + if(!errorMessage.isNullOrBlank()){ mapEntry.key.text = "Error:" mapEntry.value.text = errorMessage } else { // Display liveness results - if (livenessStatus.isNullOrBlank() == false) { + if (!livenessStatus.isNullOrBlank()) { mapEntry.key.text = "Liveness status:" mapEntry.value.text = livenessStatus mapEntry = itr.next() @@ -50,7 +52,7 @@ class ResultActivity : AppCompatActivity() { mapEntry = itr.next() } - if (verificationStatus.isNullOrBlank() == false) { + if (!verificationStatus.isNullOrBlank()) { mapEntry.key.text = "Verification status:" mapEntry.value.text = verificationStatus mapEntry = itr.next() @@ -58,11 +60,15 @@ class ResultActivity : AppCompatActivity() { mapEntry.value.text = verificationConfidence } } + + mRetryButton = findViewById(R.id.retryButton) + mRetryButton.setOnClickListener { @Suppress("DEPRECATION") super.onBackPressed() } } /** * Overrides back button to always return to main activity */ + @Deprecated("Deprecated in Java") override fun onBackPressed() { @Suppress("DEPRECATION") super.onBackPressed() for (entry in viewMap.entries) { diff --git a/samples/kotlin/face/FaceAnalyzerSample/app/src/main/java/com/example/FaceAnalyzerSample/Utils.kt b/samples/kotlin/face/FaceAnalyzerSample/app/src/main/java/com/example/FaceAnalyzerSample/Utils.kt index 4b861f9..0513588 100644 --- a/samples/kotlin/face/FaceAnalyzerSample/app/src/main/java/com/example/FaceAnalyzerSample/Utils.kt +++ b/samples/kotlin/face/FaceAnalyzerSample/app/src/main/java/com/example/FaceAnalyzerSample/Utils.kt @@ -1,5 +1,7 @@ -package com.example.FaceAnalyzerSample.app +package com.example.FaceAnalyzerSample +import android.annotation.SuppressLint +import android.content.ContentResolver import android.provider.Settings import android.util.Log import kotlinx.coroutines.Dispatchers @@ -20,9 +22,11 @@ import java.util.UUID object Utils { var mSessionToken: String = "" - const val LINE_FEED = "\r\n" + private const val LINE_FEED = "\r\n" - fun getFaceAPISessionToken(faceApiEndpoint: String, faceApiKey: String, verifyImageArray: ByteArray?, sendResultsToClient: Boolean): String = runBlocking { + @OptIn(ExperimentalStdlibApi::class) + @SuppressLint("HardwareIds") + fun getFaceAPISessionToken(faceApiEndpoint: String, faceApiKey: String, verifyImageArray: ByteArray?, sendResultsToClient: Boolean, contentResolver: ContentResolver): String = runBlocking { withContext(Dispatchers.IO) { val url: URL? var urlConnection: HttpsURLConnection? = null @@ -34,10 +38,12 @@ object Utils { URL("$faceApiEndpoint/face/v1.1-preview.1/detectLiveness/singleModal/sessions") } + val deviceId = Settings.Secure.getString(contentResolver, Settings.Secure.ANDROID_ID).hexToLong() + val tokenRequest = JSONObject(mapOf( "livenessOperationMode" to "Passive", "sendResultsToClient" to sendResultsToClient, - "deviceCorrelationId" to Settings.Secure.ANDROID_ID + "deviceCorrelationId" to UUID(deviceId, deviceId) )).toString() val charset: Charset = Charset.forName("UTF-8") urlConnection = url.openConnection() as HttpsURLConnection diff --git a/samples/kotlin/face/FaceAnalyzerSample/app/src/main/res/layout/app_activity_result.xml b/samples/kotlin/face/FaceAnalyzerSample/app/src/main/res/layout/app_activity_result.xml index 88d43b6..66fd326 100644 --- a/samples/kotlin/face/FaceAnalyzerSample/app/src/main/res/layout/app_activity_result.xml +++ b/samples/kotlin/face/FaceAnalyzerSample/app/src/main/res/layout/app_activity_result.xml @@ -110,4 +110,17 @@ app:layout_constraintStart_toStartOf="parent" app:layout_constraintTop_toBottomOf="@+id/resultLabel3" /> +