diff --git a/SobrCameraView-Example.xcodeproj/project.pbxproj b/SobrCameraView-Example.xcodeproj/project.pbxproj index eeb9771..c6dd405 100644 --- a/SobrCameraView-Example.xcodeproj/project.pbxproj +++ b/SobrCameraView-Example.xcodeproj/project.pbxproj @@ -174,14 +174,17 @@ isa = PBXProject; attributes = { LastSwiftUpdateCheck = 0700; - LastUpgradeCheck = 0710; + LastUpgradeCheck = 0800; ORGANIZATIONNAME = "Software Brauerei AG"; TargetAttributes = { 9458EB621B30A48D0000DA1F = { CreatedOnToolsVersion = 6.3.2; + DevelopmentTeam = M5QETZED9L; + LastSwiftMigration = 0800; }; 9458EB771B30A48D0000DA1F = { CreatedOnToolsVersion = 6.3.2; + LastSwiftMigration = 0800; TestTargetID = 9458EB621B30A48D0000DA1F; }; }; @@ -289,8 +292,10 @@ CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; CLANG_WARN_EMPTY_BODY = YES; CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; CLANG_WARN_INT_CONVERSION = YES; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_SUSPICIOUS_MOVE = YES; CLANG_WARN_UNREACHABLE_CODE = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; @@ -334,8 +339,10 @@ CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; CLANG_WARN_EMPTY_BODY = YES; CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; CLANG_WARN_INT_CONVERSION = YES; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_SUSPICIOUS_MOVE = YES; CLANG_WARN_UNREACHABLE_CODE = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; @@ -354,6 +361,7 @@ IPHONEOS_DEPLOYMENT_TARGET = 8.3; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = iphoneos; + SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; VALIDATE_PRODUCT = YES; }; name = Release; @@ -362,10 +370,13 @@ isa = XCBuildConfiguration; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + DEVELOPMENT_TEAM = M5QETZED9L; INFOPLIST_FILE = "SobrCameraView-Example/Info.plist"; + IPHONEOS_DEPLOYMENT_TARGET = 10.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = "ch.sobr.$(PRODUCT_NAME:rfc1034identifier)"; PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 3.0; }; name = Debug; }; @@ -373,10 +384,13 @@ isa = XCBuildConfiguration; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + DEVELOPMENT_TEAM = M5QETZED9L; INFOPLIST_FILE = "SobrCameraView-Example/Info.plist"; + IPHONEOS_DEPLOYMENT_TARGET = 10.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = "ch.sobr.$(PRODUCT_NAME:rfc1034identifier)"; PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 3.0; }; name = Release; }; @@ -396,6 +410,7 @@ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = "ch.sobr.$(PRODUCT_NAME:rfc1034identifier)"; PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 3.0; TEST_HOST = "$(BUILT_PRODUCTS_DIR)/SobrCameraView-Example.app/SobrCameraView-Example"; }; name = Debug; @@ -412,6 +427,7 @@ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = "ch.sobr.$(PRODUCT_NAME:rfc1034identifier)"; PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 3.0; TEST_HOST = "$(BUILT_PRODUCTS_DIR)/SobrCameraView-Example.app/SobrCameraView-Example"; }; name = Release; diff --git a/SobrCameraView-Example/AppDelegate.swift b/SobrCameraView-Example/AppDelegate.swift index f28e247..43d8aca 100644 --- a/SobrCameraView-Example/AppDelegate.swift +++ b/SobrCameraView-Example/AppDelegate.swift @@ -14,30 +14,30 @@ class AppDelegate: UIResponder, UIApplicationDelegate { var window: UIWindow? - func application(application: UIApplication, didFinishLaunchingWithOptions launchOptions: [NSObject: AnyObject]?) -> Bool { + func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplicationLaunchOptionsKey: Any]?) -> Bool { // Override point for customization after application launch. return true } - func applicationWillResignActive(application: UIApplication) { + func applicationWillResignActive(_ application: UIApplication) { // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. // Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game. } - func applicationDidEnterBackground(application: UIApplication) { + func applicationDidEnterBackground(_ application: UIApplication) { // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. } - func applicationWillEnterForeground(application: UIApplication) { + func applicationWillEnterForeground(_ application: UIApplication) { // Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background. } - func applicationDidBecomeActive(application: UIApplication) { + func applicationDidBecomeActive(_ application: UIApplication) { // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. } - func applicationWillTerminate(application: UIApplication) { + func applicationWillTerminate(_ application: UIApplication) { // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. } diff --git a/SobrCameraView-Example/ImageViewController.swift b/SobrCameraView-Example/ImageViewController.swift index 2f58dec..15c0796 100644 --- a/SobrCameraView-Example/ImageViewController.swift +++ b/SobrCameraView-Example/ImageViewController.swift @@ -20,16 +20,16 @@ class ImageViewController: UIViewController { override func viewDidLoad() { super.viewDidLoad() // Do any additional setup after loading the view. - self.sourceImageView.contentMode = .ScaleAspectFit + self.sourceImageView.contentMode = .scaleAspectFit } - override func viewWillAppear(animated: Bool) { + override func viewWillAppear(_ animated: Bool) { super.viewWillAppear(animated) self.sourceImageView.image = self.sourceImage } - @IBAction func back(sender: UIButton) { - self.navigationController?.popViewControllerAnimated(true) + @IBAction func back(_ sender: UIButton) { + _ = self.navigationController?.popViewController(animated: true) } } diff --git a/SobrCameraView-Example/MainViewController.swift b/SobrCameraView-Example/MainViewController.swift index e904045..e7f59a3 100644 --- a/SobrCameraView-Example/MainViewController.swift +++ b/SobrCameraView-Example/MainViewController.swift @@ -13,43 +13,61 @@ class MainViewController: UIViewController { //MARK: Outlets @IBOutlet weak var cameraView: SobrCameraView! - private var _image: UIImage? - private var _feature: CIRectangleFeature? + fileprivate var _image: UIImage? + fileprivate var _feature: CIRectangleFeature? override func viewDidLoad() { + super.viewDidLoad() self.cameraView.setupCameraView() self.cameraView.borderDetectionEnabled = true self.cameraView.borderDetectionFrameColor = UIColor(red:0.2, green:0.6, blue:0.86, alpha:0.5) + + Timer.scheduledTimer(timeInterval: 0.5, target: self, selector: #selector(MainViewController.updateIfBorderIsDetected), userInfo: nil, repeats: true) } - override func viewWillAppear(animated: Bool) { + + func updateIfBorderIsDetected() { + + if self.cameraView.isBorderDetected { + + print("border detected") + + } else { + + print("nothing") + + } + } + + + override func viewWillAppear(_ animated: Bool) { super.viewWillAppear(animated) - self.navigationController?.navigationBarHidden = true + self.navigationController?.isNavigationBarHidden = true self.cameraView.start() } - override func viewWillDisappear(animated: Bool) { + override func viewWillDisappear(_ animated: Bool) { super.viewWillDisappear(animated) self.cameraView.stop() } - override func prepareForSegue(segue: UIStoryboardSegue, sender: AnyObject?) { + override func prepare(for segue: UIStoryboardSegue, sender: Any?) { if segue.identifier == "showImage" { - (segue.destinationViewController as! ImageViewController).sourceImage = self._image + (segue.destination as! ImageViewController).sourceImage = self._image } } //MARK: Actions - @IBAction func captureImage(sender: AnyObject?) { + @IBAction func captureImage(_ sender: AnyObject?) { self.cameraView.captureImage { (image, feature) -> Void in self._image = image self._feature = feature - self.performSegueWithIdentifier("showImage", sender: nil) + self.performSegue(withIdentifier: "showImage", sender: nil) } } - @IBAction func toggleTorch(sender: AnyObject?) { + @IBAction func toggleTorch(_ sender: AnyObject?) { self.cameraView.torchEnabled = !self.cameraView.torchEnabled } diff --git a/SobrCameraView-Example/UIKitExtensions.swift b/SobrCameraView-Example/UIKitExtensions.swift index b5937cc..d763216 100644 --- a/SobrCameraView-Example/UIKitExtensions.swift +++ b/SobrCameraView-Example/UIKitExtensions.swift @@ -10,7 +10,7 @@ import UIKit extension UIImageView { func contentScale() -> CGFloat { - return CGFloat(fminf(Float(CGRectGetWidth(self.bounds)/self.image!.size.width), Float(CGRectGetHeight(self.bounds)/self.image!.size.height))) + return CGFloat(fminf(Float(self.bounds.width/self.image!.size.width), Float(self.bounds.height/self.image!.size.height))) } func contentSize() -> CGSize { @@ -21,6 +21,6 @@ extension UIImageView { func contentFrame() -> CGRect { let scaledImageSize = self.contentSize() - return CGRect(x: 0.5*(CGRectGetWidth(self.bounds) - scaledImageSize.width), y: 0.5 * (CGRectGetHeight(self.bounds) - scaledImageSize.height), width: scaledImageSize.width, height: scaledImageSize.height) + return CGRect(x: 0.5*(self.bounds.width - scaledImageSize.width), y: 0.5 * (self.bounds.height - scaledImageSize.height), width: scaledImageSize.width, height: scaledImageSize.height) } } diff --git a/SobrCameraView-ExampleTests/SobrCameraView_ExampleTests.swift b/SobrCameraView-ExampleTests/SobrCameraView_ExampleTests.swift index 3d53fa0..a887bba 100644 --- a/SobrCameraView-ExampleTests/SobrCameraView_ExampleTests.swift +++ b/SobrCameraView-ExampleTests/SobrCameraView_ExampleTests.swift @@ -28,7 +28,7 @@ class SobrCameraView_ExampleTests: XCTestCase { func testPerformanceExample() { // This is an example of a performance test case. - self.measureBlock() { + self.measure() { // Put the code you want to measure the time of here. } } diff --git a/SobrCameraView/SobrCameraView.swift b/SobrCameraView/SobrCameraView.swift index 6a4bdb5..4f19f4e 100644 --- a/SobrCameraView/SobrCameraView.swift +++ b/SobrCameraView/SobrCameraView.swift @@ -21,27 +21,27 @@ Available Image Filters - `.Normal`: Increases the contrast on colored pictures. */ public enum SobrCameraViewImageFilter: Int { - case BlackAndWhite = 0 - case Normal = 1 + case blackAndWhite = 0 + case normal = 1 } /** * A simple UIView-Subclass which enables border detection of documents */ -public class SobrCameraView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate { +open class SobrCameraView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate { //MARK: Properties /// Enables realtime border detection. - public var borderDetectionEnabled = true + open var borderDetectionEnabled = true /// The color of the detection frame. - public var borderDetectionFrameColor: UIColor = UIColor(red: 1, green: 0, blue: 0, alpha: 0.5) + open var borderDetectionFrameColor: UIColor = UIColor(red: 1, green: 0, blue: 0, alpha: 0.5) /// Sets the torch enabled or disabled. - public var torchEnabled = false { + open var torchEnabled = false { didSet { if let device = self.captureDevice { if device.hasTorch && device.hasFlash { try! device.lockForConfiguration() - device.torchMode = self.torchEnabled ? .On : .Off + device.torchMode = self.torchEnabled ? .on : .off device.unlockForConfiguration() } } @@ -49,63 +49,77 @@ public class SobrCameraView: UIView, AVCaptureVideoDataOutputSampleBufferDelegat } /// Sets the imageFilter based on `SobrCameraViewImageFilter` Enum. - public var imageFilter: SobrCameraViewImageFilter = .Normal { + open var imageFilter: SobrCameraViewImageFilter = .normal { didSet { if let glkView = self.glkView { - let effect = UIBlurEffect(style: .Dark) + let effect = UIBlurEffect(style: .dark) let effectView = UIVisualEffectView(effect: effect) effectView.frame = self.bounds self.insertSubview(effectView, aboveSubview: glkView) - dispatch_after(dispatch_time(DISPATCH_TIME_NOW, Int64(0.25 * Double(NSEC_PER_SEC))), dispatch_get_main_queue(), { () -> Void in + DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + Double(Int64(0.25 * Double(NSEC_PER_SEC))) / Double(NSEC_PER_SEC), execute: { () -> Void in effectView.removeFromSuperview() }) } } } + /// Sets if border is detected + open var isBorderDetected : Bool = false + + /// Get last detected rectangle + open var lastDetectedRectangle : CIRectangleFeature { + + if borderDetectLastRectangleFeature != nil { + return borderDetectLastRectangleFeature! + } else { + return CIRectangleFeature() + } + + } + //MARK: Private Properties - private var captureSession = AVCaptureSession() - private var captureDevice: AVCaptureDevice? - private var context: EAGLContext? - private var stillImageOutput: AVCaptureStillImageOutput = AVCaptureStillImageOutput() - private var forceStop: Bool = false - private var coreImageContext: CIContext? - private var renderBuffer: GLuint = 0 - private var glkView: GLKView? - private var stopped: Bool = false - private var imageDetectionConfidence = 0.0 - private var borderDetectFrame: Bool = false - private var borderDetectLastRectangleFeature: CIRectangleFeature? - private var capturing: Bool = false - private var timeKeeper: NSTimer? + fileprivate var captureSession = AVCaptureSession() + fileprivate var captureDevice: AVCaptureDevice? + fileprivate var context: EAGLContext? + fileprivate var stillImageOutput: AVCaptureStillImageOutput = AVCaptureStillImageOutput() + fileprivate var forceStop: Bool = false + fileprivate var coreImageContext: CIContext? + fileprivate var renderBuffer: GLuint = 0 + fileprivate var glkView: GLKView? + fileprivate var stopped: Bool = false + fileprivate var imageDetectionConfidence = 0.0 + fileprivate var borderDetectFrame: Bool = false + fileprivate var borderDetectLastRectangleFeature: CIRectangleFeature? + fileprivate var capturing: Bool = false + fileprivate var timeKeeper: Timer? - private static let highAccuracyRectangleDetector = CIDetector(ofType: CIDetectorTypeRectangle, context: nil, options: [CIDetectorAccuracy: CIDetectorAccuracyHigh]) + fileprivate static let highAccuracyRectangleDetector = CIDetector(ofType: CIDetectorTypeRectangle , context: nil, options: [CIDetectorAccuracy: CIDetectorAccuracyHigh]) //MARK: Lifecycle /** Adds observers to the NSNotificationCenter. */ - public override func awakeFromNib() { + open override func awakeFromNib() { super.awakeFromNib() - NSNotificationCenter.defaultCenter().addObserver(self, selector: Selector("_backgroundMode"), name: UIApplicationWillResignActiveNotification, object: nil) - NSNotificationCenter.defaultCenter().addObserver(self, selector: Selector("_foregroundMode"), name: UIApplicationDidBecomeActiveNotification, object: nil) + NotificationCenter.default.addObserver(self, selector: #selector(SobrCameraView._backgroundMode), name: NSNotification.Name.UIApplicationWillResignActive, object: nil) + NotificationCenter.default.addObserver(self, selector: #selector(SobrCameraView._foregroundMode), name: NSNotification.Name.UIApplicationDidBecomeActive, object: nil) } deinit { - NSNotificationCenter.defaultCenter().removeObserver(self) + NotificationCenter.default.removeObserver(self) } //MARK: Actions /** Set's up all needed Elements for Video and Border detection. Should be called in `viewDidLoad:` in the view controller. */ - public func setupCameraView() { + open func setupCameraView() { self.setupGLKView() - let allDevices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) - let aDevice: AnyObject? = allDevices.first + let allDevices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) + let aDevice: AnyObject? = allDevices?.first as AnyObject? if aDevice == nil { return @@ -121,23 +135,23 @@ public class SobrCameraView: UIView, AVCaptureVideoDataOutputSampleBufferDelegat let dataOutput = AVCaptureVideoDataOutput() dataOutput.alwaysDiscardsLateVideoFrames = true // dataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_32BGRA] - dataOutput.setSampleBufferDelegate(self, queue: dispatch_get_main_queue()) + dataOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main) self.captureSession.addOutput(dataOutput) self.captureSession.addOutput(self.stillImageOutput) let connection = dataOutput.connections.first as! AVCaptureConnection - connection.videoOrientation = .Portrait + connection.videoOrientation = .portrait - if self.captureDevice!.flashAvailable { + if self.captureDevice!.isFlashAvailable { try! self.captureDevice?.lockForConfiguration() - self.captureDevice?.flashMode = .Off + self.captureDevice?.flashMode = .off self.captureDevice?.unlockForConfiguration() } - if self.captureDevice!.isFocusModeSupported(.ContinuousAutoFocus) { + if self.captureDevice!.isFocusModeSupported(.continuousAutoFocus) { try! self.captureDevice?.lockForConfiguration() - self.captureDevice?.focusMode = .ContinuousAutoFocus + self.captureDevice?.focusMode = .continuousAutoFocus self.captureDevice?.unlockForConfiguration() } @@ -148,17 +162,17 @@ public class SobrCameraView: UIView, AVCaptureVideoDataOutputSampleBufferDelegat /** Starts the camera. */ - public func start() { + open func start() { self.stopped = false self.captureSession.startRunning() self.hideGlkView(false, completion: nil) - self.timeKeeper = NSTimer.scheduledTimerWithTimeInterval(0.5, target: self, selector: Selector("_enableBorderDetection"), userInfo: nil, repeats: true) + self.timeKeeper = Timer.scheduledTimer(timeInterval: 0.5, target: self, selector: #selector(SobrCameraView._enableBorderDetection), userInfo: nil, repeats: true) } /** Stops the camera */ - public func stop() { + open func stop() { self.stopped = true self.captureSession.stopRunning() self.hideGlkView(true, completion: nil) @@ -171,19 +185,19 @@ public class SobrCameraView: UIView, AVCaptureVideoDataOutputSampleBufferDelegat :param: point The point to focus. :param: completion The completion handler will be called everytime. Even if the camera does not support focus. */ - public func focusAt(point: CGPoint, completion:((Void)-> Void)?) { + open func focusAt(_ point: CGPoint, completion:((Void)-> Void)?) { if let device = self.captureDevice { let poi = CGPoint(x: point.y / self.bounds.height, y: 1.0 - (point.x / self.bounds.width)) - if device.focusPointOfInterestSupported && device.isFocusModeSupported(.AutoFocus) { + if device.isFocusPointOfInterestSupported && device.isFocusModeSupported(.autoFocus) { try! device.lockForConfiguration() - if device.isFocusModeSupported(.ContinuousAutoFocus) { - device.focusMode = .ContinuousAutoFocus + if device.isFocusModeSupported(.continuousAutoFocus) { + device.focusMode = .continuousAutoFocus device.focusPointOfInterest = poi } - if device.exposurePointOfInterestSupported && device.isExposureModeSupported(.ContinuousAutoExposure) { + if device.isExposurePointOfInterestSupported && device.isExposureModeSupported(.continuousAutoExposure) { device.exposurePointOfInterest = poi - device.exposureMode = .ContinuousAutoExposure + device.exposureMode = .continuousAutoExposure } device.unlockForConfiguration() @@ -201,7 +215,7 @@ public class SobrCameraView: UIView, AVCaptureVideoDataOutputSampleBufferDelegat :param: completion Returns the image as `UIImage`. */ - public func captureImage(completion: (image: UIImage, feature: CIRectangleFeature?) -> Void) { + open func captureImage(_ completion: @escaping (_ image: UIImage, _ feature: CIRectangleFeature?) -> Void) { if self.capturing { return } @@ -225,31 +239,32 @@ public class SobrCameraView: UIView, AVCaptureVideoDataOutputSampleBufferDelegat } } - self.stillImageOutput.captureStillImageAsynchronouslyFromConnection(videoConnection!, completionHandler: { (imageSampleBuffer, error) -> Void in + self.stillImageOutput.captureStillImageAsynchronously(from: videoConnection!, completionHandler: { (imageSampleBuffer, error) -> Void in let jpg = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageSampleBuffer) - var enhancedImage: CIImage = CIImage(data: jpg)! + var enhancedImage: CIImage = CIImage(data: jpg!)! switch self.imageFilter { - case .BlackAndWhite: + case .blackAndWhite: enhancedImage = self.contrastFilter(enhancedImage) default: enhancedImage = self.enhanceFilter(enhancedImage) } if self.borderDetectionEnabled && self.detectionConfidenceValid() { - if let rectangleFeature = self.biggestRectangle(SobrCameraView.highAccuracyRectangleDetector.featuresInImage(enhancedImage) as! [CIRectangleFeature]) { + if let rectangleFeature = self.biggestRectangle(SobrCameraView.highAccuracyRectangleDetector?.features(in: enhancedImage) as! [CIRectangleFeature]) { + rectangleFeature enhancedImage = self.perspectiveCorrectedImage(enhancedImage, feature: rectangleFeature) } } UIGraphicsBeginImageContext(CGSize(width: enhancedImage.extent.size.height, height: enhancedImage.extent.size.width)) - UIImage(CIImage: enhancedImage, scale: 1.0, orientation: UIImageOrientation.Right).drawInRect(CGRect(x: 0, y: 0, width: enhancedImage.extent.size.height, height: enhancedImage.extent.size.width)) + UIImage(ciImage: enhancedImage, scale: 1.0, orientation: UIImageOrientation.right).draw(in: CGRect(x: 0, y: 0, width: enhancedImage.extent.size.height, height: enhancedImage.extent.size.width)) let image = UIGraphicsGetImageFromCurrentImageContext() UIGraphicsEndImageContext() - completion(image: image, feature: self.biggestRectangle(SobrCameraView.highAccuracyRectangleDetector.featuresInImage(enhancedImage) as! [CIRectangleFeature])) + completion(image!, self.biggestRectangle(SobrCameraView.highAccuracyRectangleDetector?.features(in: enhancedImage) as! [CIRectangleFeature])) }) self.capturing = false } @@ -258,50 +273,50 @@ public class SobrCameraView: UIView, AVCaptureVideoDataOutputSampleBufferDelegat /** This method is for internal use only. But it must be public to subscribe to `NSNotificationCenter` events. */ - public func _backgroundMode() { + open func _backgroundMode() { self.forceStop = true } /** This method is for internal use only. But it must be public to subscribe to `NSNotificationCenter` events. */ - public func _foregroundMode() { + open func _foregroundMode() { self.forceStop = false } /** This method is for internal use only. But it must be public to subscribe to `NSNotificationCenter` events. */ - public func _enableBorderDetection() { + open func _enableBorderDetection() { self.borderDetectFrame = true } - private func setupGLKView() { + fileprivate func setupGLKView() { if let _ = self.context { return } - self.context = EAGLContext(API: .OpenGLES2) + self.context = EAGLContext(api: .openGLES2) self.glkView = GLKView(frame: self.bounds, context: self.context!) - self.glkView!.autoresizingMask = ([UIViewAutoresizing.FlexibleWidth, UIViewAutoresizing.FlexibleHeight]) + self.glkView!.autoresizingMask = ([UIViewAutoresizing.flexibleWidth, UIViewAutoresizing.flexibleHeight]) self.glkView!.translatesAutoresizingMaskIntoConstraints = true self.glkView!.contentScaleFactor = 1.0 - self.glkView!.drawableDepthFormat = .Format24 - self.insertSubview(self.glkView!, atIndex: 0) + self.glkView!.drawableDepthFormat = .format24 + self.insertSubview(self.glkView!, at: 0) glGenRenderbuffers(1, &self.renderBuffer) glBindRenderbuffer(GLenum(GL_RENDERBUFFER), self.renderBuffer) - self.coreImageContext = CIContext(EAGLContext: self.context!, options: [kCIContextUseSoftwareRenderer: true]) - EAGLContext.setCurrentContext(self.context!) + self.coreImageContext = CIContext(eaglContext: self.context!, options: [kCIContextUseSoftwareRenderer: true]) + EAGLContext.setCurrent(self.context!) } - private func contrastFilter(image: CIImage) -> CIImage { + fileprivate func contrastFilter(_ image: CIImage) -> CIImage { return CIFilter(name: "CIColorControls", withInputParameters: ["inputContrast":1.1, kCIInputImageKey: image])!.outputImage! } - private func enhanceFilter(image: CIImage) -> CIImage { + fileprivate func enhanceFilter(_ image: CIImage) -> CIImage { return CIFilter(name: "CIColorControls", withInputParameters: ["inputBrightness":0.0, "inputContrast":1.14, "inputSaturation":0.0, kCIInputImageKey: image])!.outputImage! } - private func biggestRectangle(rectangles: [CIRectangleFeature]) -> CIRectangleFeature? { + fileprivate func biggestRectangle(_ rectangles: [CIRectangleFeature]) -> CIRectangleFeature? { if rectangles.count == 0 { return nil } @@ -327,42 +342,42 @@ public class SobrCameraView: UIView, AVCaptureVideoDataOutputSampleBufferDelegat return biggestRectangle } - private func overlayImageForFeatureInImage(image: CIImage, feature: CIRectangleFeature) -> CIImage! { + fileprivate func overlayImageForFeatureInImage(_ image: CIImage, feature: CIRectangleFeature) -> CIImage! { var overlay = CIImage(color: CIColor(color: self.borderDetectionFrameColor)) - overlay = overlay.imageByCroppingToRect(image.extent) - overlay = overlay.imageByApplyingFilter("CIPerspectiveTransformWithExtent", withInputParameters: ["inputExtent": CIVector(CGRect: image.extent), - "inputTopLeft": CIVector(CGPoint: feature.topLeft), - "inputTopRight": CIVector(CGPoint: feature.topRight), - "inputBottomLeft": CIVector(CGPoint: feature.bottomLeft), - "inputBottomRight": CIVector(CGPoint: feature.bottomRight)]) - return overlay.imageByCompositingOverImage(image) + overlay = overlay.cropping(to: image.extent) + overlay = overlay.applyingFilter("CIPerspectiveTransformWithExtent", withInputParameters: ["inputExtent": CIVector(cgRect: image.extent), + "inputTopLeft": CIVector(cgPoint: feature.topLeft), + "inputTopRight": CIVector(cgPoint: feature.topRight), + "inputBottomLeft": CIVector(cgPoint: feature.bottomLeft), + "inputBottomRight": CIVector(cgPoint: feature.bottomRight)]) + return overlay.compositingOverImage(image) } - private func hideGlkView(hide: Bool, completion:( () -> Void)?) { - UIView.animateWithDuration(0.1, animations: { () -> Void in + fileprivate func hideGlkView(_ hide: Bool, completion:( () -> Void)?) { + UIView.animate(withDuration: 0.1, animations: { () -> Void in self.glkView?.alpha = (hide) ? 0.0 : 1.0 - }) { (finished) -> Void in + }, completion: { (finished) -> Void in completion?() - } + }) } - private func detectionConfidenceValid() -> Bool { + fileprivate func detectionConfidenceValid() -> Bool { return (self.imageDetectionConfidence > 1.0) } - private func perspectiveCorrectedImage(image: CIImage, feature: CIRectangleFeature) -> CIImage { - return image.imageByApplyingFilter("CIPerspectiveCorrection", withInputParameters: [ - "inputTopLeft": CIVector(CGPoint: feature.topLeft), - "inputTopRight": CIVector(CGPoint: feature.topRight), - "inputBottomLeft": CIVector(CGPoint: feature.bottomLeft), - "inputBottomRight":CIVector(CGPoint: feature.bottomRight)]) + fileprivate func perspectiveCorrectedImage(_ image: CIImage, feature: CIRectangleFeature) -> CIImage { + return image.applyingFilter("CIPerspectiveCorrection", withInputParameters: [ + "inputTopLeft": CIVector(cgPoint: feature.topLeft), + "inputTopRight": CIVector(cgPoint: feature.topRight), + "inputBottomLeft": CIVector(cgPoint: feature.bottomLeft), + "inputBottomRight":CIVector(cgPoint: feature.bottomRight)]) } //MARK: AVCaptureVideoDataOutputSampleBufferDelegate /** This method is for internal use only. But must be declared public because it matches a requirement in public protocol `AVCaptureVideoDataOutputSampleBufferDelegate`. */ - public func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) { + open func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) { if self.forceStop { return } @@ -372,10 +387,10 @@ public class SobrCameraView: UIView, AVCaptureVideoDataOutputSampleBufferDelegat } let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) - var image = CIImage(CVPixelBuffer: pixelBuffer!) + var image = CIImage(cvPixelBuffer: pixelBuffer!) switch self.imageFilter { - case .BlackAndWhite: + case .blackAndWhite: image = self.contrastFilter(image) default: image = self.enhanceFilter(image) @@ -383,21 +398,23 @@ public class SobrCameraView: UIView, AVCaptureVideoDataOutputSampleBufferDelegat if self.borderDetectionEnabled { if self.borderDetectFrame { - self.borderDetectLastRectangleFeature = self.biggestRectangle(SobrCameraView.highAccuracyRectangleDetector.featuresInImage(image) as! [CIRectangleFeature]) + self.borderDetectLastRectangleFeature = self.biggestRectangle(SobrCameraView.highAccuracyRectangleDetector?.features(in: image) as! [CIRectangleFeature]) self.borderDetectFrame = false } if let lastRectFeature = self.borderDetectLastRectangleFeature { self.imageDetectionConfidence += 0.5 image = self.overlayImageForFeatureInImage(image, feature: lastRectFeature) + self.isBorderDetected = true } else { self.imageDetectionConfidence = 0.0 + self.isBorderDetected = false } } if let context = self.context, let ciContext = self.coreImageContext, let glkView = self.glkView { - ciContext.drawImage(image, inRect: self.bounds, fromRect: image.extent) + ciContext.draw(image, in: self.bounds, from: image.extent) context.presentRenderbuffer(Int(GL_RENDERBUFFER)) glkView.setNeedsDisplay() }