Skip to content

Commit

Permalink
Working app
Browse files Browse the repository at this point in the history
Added live feed detection to predict hand signs from HandSigns.mlmodel trained using Custom Vision.
  • Loading branch information
sayaleepote committed Jul 3, 2018
1 parent 76961ad commit 3779a71
Show file tree
Hide file tree
Showing 6 changed files with 110 additions and 13 deletions.
20 changes: 16 additions & 4 deletions CustomVisionMicrosoftToCoreML.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
4829C79620E54A1800F77F34 /* CustomVisionMicrosoftToCoreML.xcdatamodeld in Sources */ = {isa = PBXBuildFile; fileRef = 4829C79420E54A1800F77F34 /* CustomVisionMicrosoftToCoreML.xcdatamodeld */; };
4829C79820E54A1900F77F34 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 4829C79720E54A1900F77F34 /* Assets.xcassets */; };
4829C79B20E54A1900F77F34 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 4829C79920E54A1900F77F34 /* LaunchScreen.storyboard */; };
4829C7A320E54AC100F77F34 /* HandSigns.mlmodel in Sources */ = {isa = PBXBuildFile; fileRef = 4829C7A220E54AC100F77F34 /* HandSigns.mlmodel */; };
/* End PBXBuildFile section */

/* Begin PBXFileReference section */
Expand All @@ -24,6 +25,7 @@
4829C79720E54A1900F77F34 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
4829C79A20E54A1900F77F34 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
4829C79C20E54A1900F77F34 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
4829C7A220E54AC100F77F34 /* HandSigns.mlmodel */ = {isa = PBXFileReference; lastKnownFileType = file.mlmodel; path = HandSigns.mlmodel; sourceTree = SOURCE_ROOT; };
/* End PBXFileReference section */

/* Begin PBXFrameworksBuildPhase section */
Expand Down Expand Up @@ -58,6 +60,7 @@
children = (
4829C78D20E54A1800F77F34 /* AppDelegate.swift */,
4829C78F20E54A1800F77F34 /* ViewController.swift */,
4829C7A220E54AC100F77F34 /* HandSigns.mlmodel */,
4829C79120E54A1800F77F34 /* Main.storyboard */,
4829C79720E54A1900F77F34 /* Assets.xcassets */,
4829C79920E54A1900F77F34 /* LaunchScreen.storyboard */,
Expand Down Expand Up @@ -138,6 +141,7 @@
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
4829C7A320E54AC100F77F34 /* HandSigns.mlmodel in Sources */,
4829C79020E54A1800F77F34 /* ViewController.swift in Sources */,
4829C78E20E54A1800F77F34 /* AppDelegate.swift in Sources */,
4829C79620E54A1800F77F34 /* CustomVisionMicrosoftToCoreML.xcdatamodeld in Sources */,
Expand Down Expand Up @@ -284,14 +288,18 @@
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CODE_SIGN_STYLE = Automatic;
CODE_SIGN_STYLE = Manual;
DEVELOPMENT_TEAM = 54NTRVDHZ4;
INFOPLIST_FILE = CustomVisionMicrosoftToCoreML/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 11.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
PRODUCT_BUNDLE_IDENTIFIER = com.assignment.app.CustomVisionMicrosoftToCoreML;
PRODUCT_BUNDLE_IDENTIFIER = com.app.CustomVisionMicrosoftToCoreML;
PRODUCT_NAME = "$(TARGET_NAME)";
PROVISIONING_PROFILE = "";
PROVISIONING_PROFILE_SPECIFIER = "";
SWIFT_VERSION = 4.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
Expand All @@ -301,14 +309,18 @@
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CODE_SIGN_STYLE = Automatic;
CODE_SIGN_STYLE = Manual;
DEVELOPMENT_TEAM = 54NTRVDHZ4;
INFOPLIST_FILE = CustomVisionMicrosoftToCoreML/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 11.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
PRODUCT_BUNDLE_IDENTIFIER = com.assignment.app.CustomVisionMicrosoftToCoreML;
PRODUCT_BUNDLE_IDENTIFIER = com.app.CustomVisionMicrosoftToCoreML;
PRODUCT_NAME = "$(TARGET_NAME)";
PROVISIONING_PROFILE = "";
PROVISIONING_PROFILE_SPECIFIER = "";
SWIFT_VERSION = 4.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<Bucket
type = "1"
version = "2.0">
</Bucket>
28 changes: 25 additions & 3 deletions CustomVisionMicrosoftToCoreML/Base.lproj/Main.storyboard
Original file line number Diff line number Diff line change
@@ -1,24 +1,46 @@
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="13122.16" systemVersion="17A277" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="BYZ-38-t0r">
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="14113" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="BYZ-38-t0r">
<device id="retina4_7" orientation="portrait">
<adaptation id="fullscreen"/>
</device>
<dependencies>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="13104.12"/>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="14088"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="tne-QT-ifu">
<objects>
<viewController id="BYZ-38-t0r" customClass="ViewController" customModuleProvider="target" sceneMemberID="viewController">
<viewController id="BYZ-38-t0r" customClass="ViewController" customModule="CustomVisionMicrosoftToCoreML" customModuleProvider="target" sceneMemberID="viewController">
<view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="" textAlignment="center" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="93L-3H-2pl">
<rect key="frame" x="16" y="623" width="343" height="0.0"/>
<color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<fontDescription key="fontDescription" type="system" pointSize="18"/>
<nil key="textColor"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstItem="6Tk-OE-BBY" firstAttribute="bottom" secondItem="93L-3H-2pl" secondAttribute="bottom" constant="44" id="Rmk-mA-8Gj"/>
<constraint firstItem="6Tk-OE-BBY" firstAttribute="trailing" secondItem="93L-3H-2pl" secondAttribute="trailing" constant="16" id="UeU-3s-Zmw"/>
<constraint firstItem="93L-3H-2pl" firstAttribute="leading" secondItem="6Tk-OE-BBY" secondAttribute="leading" constant="16" id="VFZ-pU-K2F"/>
</constraints>
<viewLayoutGuide key="safeArea" id="6Tk-OE-BBY"/>
</view>
<connections>
<outlet property="predictionLabel" destination="93L-3H-2pl" id="fpU-u6-xVw"/>
</connections>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="117.59999999999999" y="118.29085457271366"/>
</scene>
</scenes>
</document>
4 changes: 2 additions & 2 deletions CustomVisionMicrosoftToCoreML/Info.plist
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@
<string>1</string>
<key>LSRequiresIPhoneOS</key>
<true/>
<key>NSCameraUsageDescription</key>
<string>App needs camera for detection</string>
<key>UILaunchStoryboardName</key>
<string>LaunchScreen</string>
<key>UIMainStoryboardFile</key>
Expand All @@ -31,8 +33,6 @@
<key>UISupportedInterfaceOrientations</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>UISupportedInterfaceOrientations~ipad</key>
<array>
Expand Down
66 changes: 62 additions & 4 deletions CustomVisionMicrosoftToCoreML/ViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -7,19 +7,77 @@
//

import UIKit
import AVKit
import Vision

class ViewController: UIViewController {
enum HandSign: String {
case fiveHand = "FiveHand"
case fistHand = "FistHand"
case victoryHand = "VictoryHand"
case noHand = "NoHand"
}

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {

@IBOutlet weak var predictionLabel: UILabel!

override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
configureCamera()
}

override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}


func configureCamera() {
let captureSession = AVCaptureSession()
captureSession.sessionPreset = .photo
captureSession.startRunning()
guard let captureDevice = AVCaptureDevice.default(for: .video) else { return }
guard let captureInput = try? AVCaptureDeviceInput(device: captureDevice) else { return }
captureSession.addInput(captureInput)

let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
view.layer.addSublayer(previewLayer)
previewLayer.frame = view.frame

let dataOutput = AVCaptureVideoDataOutput()
dataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoQueue"))
captureSession.addOutput(dataOutput)
}

// MARK: - AVCaptureVideoDataOutputSampleBufferDelegate

func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }

guard let handSignsModel = try? VNCoreMLModel(for: HandSigns().model) else { return }
let request = VNCoreMLRequest(model: handSignsModel) { (finishedRequest, err) in

guard let results = finishedRequest.results as? [VNClassificationObservation] else { return }

guard let firstResult = results.first else { return }
var predictionString = ""
DispatchQueue.main.async {
switch firstResult.identifier {
case HandSign.fistHand.rawValue:
predictionString = "Fist👊🏽"
case HandSign.victoryHand.rawValue:
predictionString = "Victory✌🏽"
case HandSign.fiveHand.rawValue:
predictionString = "High Five🖐🏽"
case HandSign.noHand.rawValue:
predictionString = "No Hand ❎"
default:
break
}
self.predictionLabel.text = predictionString + "(\(firstResult.confidence))"
}
}

try? VNImageRequestHandler(cvPixelBuffer: pixelBuffer, options: [:]).perform([request])
}

}

Binary file added HandSigns.mlmodel
Binary file not shown.

0 comments on commit 3779a71

Please sign in to comment.