From 2a712f8e5e3b2e2479ffdbfa5802b7490ca2bdf5 Mon Sep 17 00:00:00 2001 From: Tuan Mai A <62716934+st-tuanmai@users.noreply.github.com> Date: Tue, 26 Mar 2024 02:18:09 +0700 Subject: [PATCH] Add delegate to opject detector example (#354) * add delegate to opject detector example * remove development team --- .../ObjectDetector.xcodeproj/project.pbxproj | 12 ++-- .../ObjectDetector/Base.lproj/Main.storyboard | 47 ++++++++++++++- .../Configs/DefaultConstants.swift | 2 + ...ft => InferenceConfigurationManager.swift} | 20 ++++--- .../Services/ObjectDetectorService.swift | 27 ++++++--- .../BottomSheetViewController.swift | 59 +++++++++++++------ .../CameraViewController.swift | 13 ++-- .../MediaLibraryViewController.swift | 16 ++--- .../ViewControllers/RootViewController.swift | 2 +- examples/object_detection/ios/Podfile | 2 +- examples/object_detection/ios/Podfile.lock | 16 ++--- 11 files changed, 150 insertions(+), 66 deletions(-) rename examples/object_detection/ios/ObjectDetector/Configs/{InferenceConfigManager.swift => InferenceConfigurationManager.swift} (80%) diff --git a/examples/object_detection/ios/ObjectDetector.xcodeproj/project.pbxproj b/examples/object_detection/ios/ObjectDetector.xcodeproj/project.pbxproj index 1dd5d221..79a339ec 100644 --- a/examples/object_detection/ios/ObjectDetector.xcodeproj/project.pbxproj +++ b/examples/object_detection/ios/ObjectDetector.xcodeproj/project.pbxproj @@ -14,7 +14,7 @@ 9E41A7603AB8BE3B86C06DB9 /* Pods_ObjectDetector.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = C3FBCABF9DABD56A270978F4 /* Pods_ObjectDetector.framework */; }; AAF981DB2A80D27500C7121A /* CameraViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = AAF981DA2A80D27500C7121A /* CameraViewController.swift */; }; AAF981DD2A80D28B00C7121A /* MediaLibraryViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = AAF981DC2A80D28B00C7121A /* MediaLibraryViewController.swift */; }; - AAF981DF2A81330700C7121A /* InferenceConfigManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = AAF981DE2A81330700C7121A /* InferenceConfigManager.swift */; }; + AAF981DF2A81330700C7121A /* InferenceConfigurationManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = AAF981DE2A81330700C7121A /* InferenceConfigurationManager.swift */; }; AAF981E52A85156500C7121A /* DefaultConstants.swift in Sources */ = {isa = PBXBuildFile; fileRef = AAF981E42A85156500C7121A /* DefaultConstants.swift */; }; BF2B2D2F2A3C09E200589A11 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = BF2B2D2E2A3C09E200589A11 /* AppDelegate.swift */; }; BF2B2D332A3C09E200589A11 /* RootViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = BF2B2D322A3C09E200589A11 /* RootViewController.swift */; }; @@ -48,7 +48,7 @@ 7FEC31BE2AA97EF20074D3D3 /* SceneDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SceneDelegate.swift; sourceTree = ""; }; AAF981DA2A80D27500C7121A /* CameraViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraViewController.swift; sourceTree = ""; }; AAF981DC2A80D28B00C7121A /* MediaLibraryViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MediaLibraryViewController.swift; sourceTree = ""; }; - AAF981DE2A81330700C7121A /* InferenceConfigManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = InferenceConfigManager.swift; sourceTree = ""; }; + AAF981DE2A81330700C7121A /* InferenceConfigurationManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = InferenceConfigurationManager.swift; sourceTree = ""; }; AAF981E42A85156500C7121A /* DefaultConstants.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DefaultConstants.swift; sourceTree = ""; }; BF2B2D2B2A3C09E200589A11 /* ObjectDetector.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = ObjectDetector.app; sourceTree = BUILT_PRODUCTS_DIR; }; BF2B2D2E2A3C09E200589A11 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; @@ -146,7 +146,7 @@ isa = PBXGroup; children = ( AAF981E42A85156500C7121A /* DefaultConstants.swift */, - AAF981DE2A81330700C7121A /* InferenceConfigManager.swift */, + AAF981DE2A81330700C7121A /* InferenceConfigurationManager.swift */, ); path = Configs; sourceTree = ""; @@ -399,7 +399,7 @@ BF2B2D332A3C09E200589A11 /* RootViewController.swift in Sources */, AAF981E52A85156500C7121A /* DefaultConstants.swift in Sources */, BF2B2D6B2A3C0C3F00589A11 /* BottomSheetViewController.swift in Sources */, - AAF981DF2A81330700C7121A /* InferenceConfigManager.swift in Sources */, + AAF981DF2A81330700C7121A /* InferenceConfigurationManager.swift in Sources */, BF2B2D642A3C0C2900589A11 /* CameraFeedService.swift in Sources */, BF2B2D2F2A3C09E200589A11 /* AppDelegate.swift in Sources */, 7FEC31BF2AA97EF20074D3D3 /* SceneDelegate.swift in Sources */, @@ -570,7 +570,7 @@ CODE_SIGN_IDENTITY = "Apple Development"; CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_TEAM = ""; + DEVELOPMENT_TEAM = H83UK2M7VU; GENERATE_INFOPLIST_FILE = YES; INFOPLIST_FILE = ObjectDetector/Info.plist; INFOPLIST_KEY_NSCameraUsageDescription = "This app uses camera to detection the objects that appear in the camera feed."; @@ -603,7 +603,7 @@ CODE_SIGN_IDENTITY = "Apple Development"; CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_TEAM = ""; + DEVELOPMENT_TEAM = H83UK2M7VU; GENERATE_INFOPLIST_FILE = YES; INFOPLIST_FILE = ObjectDetector/Info.plist; INFOPLIST_KEY_NSCameraUsageDescription = "This app uses camera to detection the objects that appear in the camera feed."; diff --git a/examples/object_detection/ios/ObjectDetector/Base.lproj/Main.storyboard b/examples/object_detection/ios/ObjectDetector/Base.lproj/Main.storyboard index 4d691723..7182c406 100644 --- a/examples/object_detection/ios/ObjectDetector/Base.lproj/Main.storyboard +++ b/examples/object_detection/ios/ObjectDetector/Base.lproj/Main.storyboard @@ -1,8 +1,9 @@ - + - + + @@ -125,6 +126,44 @@ + + + + + + + + + + + + + + + + @@ -150,9 +189,12 @@ + + + @@ -173,6 +215,7 @@ + diff --git a/examples/object_detection/ios/ObjectDetector/Configs/DefaultConstants.swift b/examples/object_detection/ios/ObjectDetector/Configs/DefaultConstants.swift index 4a6a8c1a..24918a37 100644 --- a/examples/object_detection/ios/ObjectDetector/Configs/DefaultConstants.swift +++ b/examples/object_detection/ios/ObjectDetector/Configs/DefaultConstants.swift @@ -14,6 +14,7 @@ import Foundation import UIKit +import MediaPipeTasksVision // MARK: Define default constants struct DefaultConstants { @@ -33,6 +34,7 @@ struct DefaultConstants { ] static let displayFont = UIFont.systemFont(ofSize: 14.0, weight: .medium) static let model: Model = .efficientdetLite0 + static let delegate: Delegate = .CPU } // MARK: Model diff --git a/examples/object_detection/ios/ObjectDetector/Configs/InferenceConfigManager.swift b/examples/object_detection/ios/ObjectDetector/Configs/InferenceConfigurationManager.swift similarity index 80% rename from examples/object_detection/ios/ObjectDetector/Configs/InferenceConfigManager.swift rename to examples/object_detection/ios/ObjectDetector/Configs/InferenceConfigurationManager.swift index 2f223e6e..4ff028a3 100644 --- a/examples/object_detection/ios/ObjectDetector/Configs/InferenceConfigManager.swift +++ b/examples/object_detection/ios/ObjectDetector/Configs/InferenceConfigurationManager.swift @@ -13,31 +13,35 @@ // limitations under the License. import Foundation +import MediaPipeTasksVision /** * Singleton storing the configs needed to initialize an MediaPipe Tasks object and run inference. * Controllers can observe the `InferenceConfigManager.notificationName` for any changes made by the user. */ -class InferenceConfigManager: NSObject { +class InferenceConfigurationManager: NSObject { var model: Model = DefaultConstants.model { didSet { postConfigChangedNotification() } } - + var delegate: Delegate = DefaultConstants.delegate { + didSet { postConfigChangedNotification() } + } + var maxResults: Int = DefaultConstants.maxResults { didSet { postConfigChangedNotification() } } - + var scoreThreshold: Float = DefaultConstants.scoreThreshold { didSet { postConfigChangedNotification() } } - - static let sharedInstance = InferenceConfigManager() - + + static let sharedInstance = InferenceConfigurationManager() + static let notificationName = Notification.Name.init(rawValue: "com.google.mediapipe.inferenceConfigChanged") - + private func postConfigChangedNotification() { NotificationCenter.default - .post(name: InferenceConfigManager.notificationName, object: nil) + .post(name: InferenceConfigurationManager.notificationName, object: nil) } } diff --git a/examples/object_detection/ios/ObjectDetector/Services/ObjectDetectorService.swift b/examples/object_detection/ios/ObjectDetector/Services/ObjectDetectorService.swift index 3409417f..93d56e2f 100644 --- a/examples/object_detection/ios/ObjectDetector/Services/ObjectDetectorService.swift +++ b/examples/object_detection/ios/ObjectDetector/Services/ObjectDetectorService.swift @@ -46,10 +46,11 @@ class ObjectDetectorService: NSObject { private(set) var runningMode = RunningMode.image private var maxResults = 3 private var scoreThreshold: Float = 0.5 - var modelPath: String + private var modelPath: String + private var delegate: Delegate // MARK: - Custom Initializer - private init?(model: Model, maxResults: Int, scoreThreshold: Float, runningMode:RunningMode) { + private init?(model: Model, maxResults: Int, scoreThreshold: Float, runningMode:RunningMode, delegate: Delegate) { guard let modelPath = model.modelPath else { return nil } @@ -57,6 +58,7 @@ class ObjectDetectorService: NSObject { self.maxResults = maxResults self.scoreThreshold = scoreThreshold self.runningMode = runningMode + self.delegate = delegate super.init() createObjectDetector() @@ -68,6 +70,7 @@ class ObjectDetectorService: NSObject { objectDetectorOptions.maxResults = self.maxResults objectDetectorOptions.scoreThreshold = self.scoreThreshold objectDetectorOptions.baseOptions.modelAssetPath = modelPath + objectDetectorOptions.baseOptions.delegate = delegate if runningMode == .liveStream { objectDetectorOptions.objectDetectorLiveStreamDelegate = self } @@ -83,12 +86,14 @@ class ObjectDetectorService: NSObject { static func videoObjectDetectorService( model: Model, maxResults: Int, scoreThreshold: Float, - videoDelegate: ObjectDetectorServiceVideoDelegate?) -> ObjectDetectorService? { + videoDelegate: ObjectDetectorServiceVideoDelegate?, + delegate: Delegate) -> ObjectDetectorService? { let objectDetectorService = ObjectDetectorService( model: model, maxResults: maxResults, scoreThreshold: scoreThreshold, - runningMode: .video) + runningMode: .video, + delegate: delegate) objectDetectorService?.videoDelegate = videoDelegate return objectDetectorService @@ -97,12 +102,14 @@ class ObjectDetectorService: NSObject { static func liveStreamDetectorService( model: Model, maxResults: Int, scoreThreshold: Float, - liveStreamDelegate: ObjectDetectorServiceLiveStreamDelegate?) -> ObjectDetectorService? { + liveStreamDelegate: ObjectDetectorServiceLiveStreamDelegate?, + delegate: Delegate) -> ObjectDetectorService? { let objectDetectorService = ObjectDetectorService( model: model, maxResults: maxResults, scoreThreshold: scoreThreshold, - runningMode: .liveStream) + runningMode: .liveStream, + delegate: delegate) objectDetectorService?.liveStreamDelegate = liveStreamDelegate return objectDetectorService @@ -110,13 +117,15 @@ class ObjectDetectorService: NSObject { static func stillImageDetectorService( model: Model, maxResults: Int, - scoreThreshold: Float) -> ObjectDetectorService? { + scoreThreshold: Float, + delegate: Delegate) -> ObjectDetectorService? { let objectDetectorService = ObjectDetectorService( model: model, maxResults: maxResults, scoreThreshold: scoreThreshold, - runningMode: .image) - + runningMode: .image, + delegate: delegate) + return objectDetectorService } diff --git a/examples/object_detection/ios/ObjectDetector/ViewControllers/BottomSheetViewController.swift b/examples/object_detection/ios/ObjectDetector/ViewControllers/BottomSheetViewController.swift index 661cc32c..8e33abc6 100644 --- a/examples/object_detection/ios/ObjectDetector/ViewControllers/BottomSheetViewController.swift +++ b/examples/object_detection/ios/ObjectDetector/ViewControllers/BottomSheetViewController.swift @@ -13,11 +13,12 @@ // limitations under the License. import UIKit +import MediaPipeTasksVision protocol BottomSheetViewControllerDelegate: AnyObject { /** This method is called when the user opens or closes the bottom sheet. - **/ + **/ func viewController( _ viewController: BottomSheetViewController, didSwitchBottomSheetViewState isOpen: Bool) @@ -27,17 +28,17 @@ protocol BottomSheetViewControllerDelegate: AnyObject { * score threshold) and updating the singleton`` DetectorMetadata`` on user input. */ class BottomSheetViewController: UIViewController { - + enum Action { case changeScoreThreshold(Float) case changeMaxResults(Int) case changeModel(Model) case changeBottomSheetViewBottomSpace(Bool) } - + // MARK: Delegates weak var delegate: BottomSheetViewControllerDelegate? - + // MARK: Storyboards Connections @IBOutlet weak var choseModelButton: UIButton! @IBOutlet weak var inferenceTimeNameLabel: UILabel! @@ -48,6 +49,7 @@ class BottomSheetViewController: UIViewController { @IBOutlet weak var maxResultLabel: UILabel! @IBOutlet weak var toggleBottomSheetButton: UIButton! @IBOutlet weak var toggleBottomSheetButtonTopSpace: NSLayoutConstraint! + @IBOutlet weak var delegateButton: UIButton! // MARK: Instance Variables var isUIEnabled: Bool = false { @@ -67,16 +69,16 @@ class BottomSheetViewController: UIViewController { inferenceTimeLabel.text = inferenceTimeString inferenceTimeNameLabel.isHidden = false } - + // MARK: - Private function private func setupUI() { - - maxResultStepper.value = Double(InferenceConfigManager.sharedInstance.maxResults) - maxResultLabel.text = "\(InferenceConfigManager.sharedInstance.maxResults)" - - thresholdStepper.value = Double(InferenceConfigManager.sharedInstance.scoreThreshold) - thresholdValueLabel.text = "\(InferenceConfigManager.sharedInstance.scoreThreshold)" - + + maxResultStepper.value = Double(InferenceConfigurationManager.sharedInstance.maxResults) + maxResultLabel.text = "\(InferenceConfigurationManager.sharedInstance.maxResults)" + + thresholdStepper.value = Double(InferenceConfigurationManager.sharedInstance.scoreThreshold) + thresholdValueLabel.text = "\(InferenceConfigurationManager.sharedInstance.scoreThreshold)" + // Choose model option let selectedModelAction = {(action: UIAction) in self.updateModel(modelTitle: action.title) @@ -85,7 +87,7 @@ class BottomSheetViewController: UIViewController { let actions: [UIAction] = Model.allCases.compactMap { model in return UIAction( title: model.name, - state: (InferenceConfigManager.sharedInstance.model == model) ? .on : .off, + state: (InferenceConfigurationManager.sharedInstance.model == model) ? .on : .off, handler: selectedModelAction ) } @@ -93,13 +95,34 @@ class BottomSheetViewController: UIViewController { choseModelButton.menu = UIMenu(children: actions) choseModelButton.showsMenuAsPrimaryAction = true choseModelButton.changesSelectionAsPrimaryAction = true + + let selectedDelegateAction = {(action: UIAction) in + self.updateDelegate(title: action.title) + } + + let delegates: [Delegate] = [.CPU, .GPU] + let delegateActions: [UIAction] = delegates.compactMap { delegate in + return UIAction( + title: delegate == .CPU ? "CPU" : "GPU", + state: (InferenceConfigurationManager.sharedInstance.delegate == delegate) ? .on : .off, + handler: selectedDelegateAction + ) + } + + delegateButton.menu = UIMenu(children: delegateActions) + delegateButton.showsMenuAsPrimaryAction = true + delegateButton.changesSelectionAsPrimaryAction = true } private func updateModel(modelTitle: String) { guard let model = Model(name: modelTitle) else { return } - InferenceConfigManager.sharedInstance.model = model + InferenceConfigurationManager.sharedInstance.model = model + } + + private func updateDelegate(title: String) { + InferenceConfigurationManager.sharedInstance.delegate = title == "GPU" ? .GPU : .CPU } private func enableOrDisableClicks() { @@ -115,16 +138,16 @@ class BottomSheetViewController: UIViewController { inferenceTimeNameLabel.isHidden = !sender.isSelected delegate?.viewController(self, didSwitchBottomSheetViewState: sender.isSelected) } - + @IBAction func thresholdStepperValueChanged(_ sender: UIStepper) { let scoreThreshold = Float(sender.value) - InferenceConfigManager.sharedInstance.scoreThreshold = scoreThreshold + InferenceConfigurationManager.sharedInstance.scoreThreshold = scoreThreshold thresholdValueLabel.text = "\(scoreThreshold)" } - + @IBAction func maxResultStepperValueChanged(_ sender: UIStepper) { let maxResults = Int(sender.value) - InferenceConfigManager.sharedInstance.maxResults = maxResults + InferenceConfigurationManager.sharedInstance.maxResults = maxResults maxResultLabel.text = "\(maxResults)" } } diff --git a/examples/object_detection/ios/ObjectDetector/ViewControllers/CameraViewController.swift b/examples/object_detection/ios/ObjectDetector/ViewControllers/CameraViewController.swift index 6ea6d190..62d8eb98 100644 --- a/examples/object_detection/ios/ObjectDetector/ViewControllers/CameraViewController.swift +++ b/examples/object_detection/ios/ObjectDetector/ViewControllers/CameraViewController.swift @@ -157,10 +157,11 @@ class CameraViewController: UIViewController { objectDetectorService = nil objectDetectorService = ObjectDetectorService .liveStreamDetectorService( - model: InferenceConfigManager.sharedInstance.model, - maxResults: InferenceConfigManager.sharedInstance.maxResults, - scoreThreshold: InferenceConfigManager.sharedInstance.scoreThreshold, - liveStreamDelegate: self) + model: InferenceConfigurationManager.sharedInstance.model, + maxResults: InferenceConfigurationManager.sharedInstance.maxResults, + scoreThreshold: InferenceConfigurationManager.sharedInstance.scoreThreshold, + liveStreamDelegate: self, + delegate: InferenceConfigurationManager.sharedInstance.delegate) } private func clearObjectDetectorServiceOnSessionInterruption() { @@ -172,7 +173,7 @@ class CameraViewController: UIViewController { NotificationCenter.default .addObserver(self, selector: #selector(clearAndInitializeObjectDetectorService), - name: InferenceConfigManager.notificationName, + name: InferenceConfigurationManager.notificationName, object: nil) isObserving = true } @@ -181,7 +182,7 @@ class CameraViewController: UIViewController { if isObserving { NotificationCenter.default .removeObserver(self, - name:InferenceConfigManager.notificationName, + name: InferenceConfigurationManager.notificationName, object: nil) } isObserving = false diff --git a/examples/object_detection/ios/ObjectDetector/ViewControllers/MediaLibraryViewController.swift b/examples/object_detection/ios/ObjectDetector/ViewControllers/MediaLibraryViewController.swift index 9da01fb7..9635d1b4 100644 --- a/examples/object_detection/ios/ObjectDetector/ViewControllers/MediaLibraryViewController.swift +++ b/examples/object_detection/ios/ObjectDetector/ViewControllers/MediaLibraryViewController.swift @@ -295,16 +295,18 @@ extension MediaLibraryViewController: UIImagePickerControllerDelegate, UINavigat case .image: objectDetectorService = ObjectDetectorService .stillImageDetectorService( - model: InferenceConfigManager.sharedInstance.model, - maxResults: InferenceConfigManager.sharedInstance.maxResults, - scoreThreshold: InferenceConfigManager.sharedInstance.scoreThreshold) + model: InferenceConfigurationManager.sharedInstance.model, + maxResults: InferenceConfigurationManager.sharedInstance.maxResults, + scoreThreshold: InferenceConfigurationManager.sharedInstance.scoreThreshold, + delegate: InferenceConfigurationManager.sharedInstance.delegate) case .video: objectDetectorService = ObjectDetectorService .videoObjectDetectorService( - model: InferenceConfigManager.sharedInstance.model, - maxResults: InferenceConfigManager.sharedInstance.maxResults, - scoreThreshold: InferenceConfigManager.sharedInstance.scoreThreshold, - videoDelegate: self) + model: InferenceConfigurationManager.sharedInstance.model, + maxResults: InferenceConfigurationManager.sharedInstance.maxResults, + scoreThreshold: InferenceConfigurationManager.sharedInstance.scoreThreshold, + videoDelegate: self, + delegate: InferenceConfigurationManager.sharedInstance.delegate) default: break; } diff --git a/examples/object_detection/ios/ObjectDetector/ViewControllers/RootViewController.swift b/examples/object_detection/ios/ObjectDetector/ViewControllers/RootViewController.swift index 170e79ef..921c18e3 100644 --- a/examples/object_detection/ios/ObjectDetector/ViewControllers/RootViewController.swift +++ b/examples/object_detection/ios/ObjectDetector/ViewControllers/RootViewController.swift @@ -35,7 +35,7 @@ class RootViewController: UIViewController { // MARK: Constants private struct Constants { - static let inferenceBottomHeight = 260.0 + static let inferenceBottomHeight = 300.0 static let expandButtonHeight = 41.0 static let expandButtonTopSpace = 10.0 static let mediaLibraryViewControllerStoryBoardId = "MEDIA_LIBRARY_VIEW_CONTROLLER" diff --git a/examples/object_detection/ios/Podfile b/examples/object_detection/ios/Podfile index 66ec88da..bed78f55 100644 --- a/examples/object_detection/ios/Podfile +++ b/examples/object_detection/ios/Podfile @@ -3,7 +3,7 @@ platform :ios, '15.0' target 'ObjectDetector' do use_frameworks! - pod 'MediaPipeTasksVision', '0.10.5' + pod 'MediaPipeTasksVision', '0.10.12' # Pods for ObjectDetector end diff --git a/examples/object_detection/ios/Podfile.lock b/examples/object_detection/ios/Podfile.lock index 3e63f47b..1d32f2c8 100644 --- a/examples/object_detection/ios/Podfile.lock +++ b/examples/object_detection/ios/Podfile.lock @@ -1,10 +1,10 @@ PODS: - - MediaPipeTasksCommon (0.10.5) - - MediaPipeTasksVision (0.10.5): - - MediaPipeTasksCommon (= 0.10.5) + - MediaPipeTasksCommon (0.10.12) + - MediaPipeTasksVision (0.10.12): + - MediaPipeTasksCommon (= 0.10.12) DEPENDENCIES: - - MediaPipeTasksVision (= 0.10.5) + - MediaPipeTasksVision (= 0.10.12) SPEC REPOS: trunk: @@ -12,9 +12,9 @@ SPEC REPOS: - MediaPipeTasksVision SPEC CHECKSUMS: - MediaPipeTasksCommon: 235e81afa9f7bd5b5b39f00a858bfb2539d1a52f - MediaPipeTasksVision: d58f3a82c9fbda8c3be1bf2a1bf4d73e56a457e3 + MediaPipeTasksCommon: 254e6bff77804b262f6ecf180477142ea551e802 + MediaPipeTasksVision: 78d5c47cd7996b4d815bacba0a52dbf01458dfaf -PODFILE CHECKSUM: 2e1cd95933b8c68f98c3d3290332c555b26ed4e5 +PODFILE CHECKSUM: 29f14d152cc8d5ead847a890518b7de9bb9955fe -COCOAPODS: 1.12.1 +COCOAPODS: 1.14.3