Last active
March 2, 2017 03:38
-
-
Save sanghapark/aa7c46ab974ef9720459fb2a44169cd0 to your computer and use it in GitHub Desktop.
[iOS] Capturing still Images with focus & exposure
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import UIKit | |
import AVFoundation | |
import Photos | |
class ViewController: UIViewController { | |
var focusMarker: UIImageView! | |
var exposureMarker: UIImageView! | |
var resetMarker: UIImageView! | |
private var adjustingExposureContext: String = "" | |
@IBOutlet weak var camPreview: UIView! | |
@IBOutlet weak var thumbnail: UIButton! | |
@IBOutlet weak var flashLabel: UILabel! | |
let captureSession = AVCaptureSession() | |
var previewLayer: AVCaptureVideoPreviewLayer! | |
var activeInput: AVCaptureDeviceInput! | |
let imageOutput = AVCaptureStillImageOutput() | |
override func viewDidLoad() { | |
super.viewDidLoad() | |
setupSession() | |
setupPreview() | |
startSession() | |
} | |
override func prefersStatusBarHidden() -> Bool { | |
return true | |
} | |
// MARK: - Setup session and preview | |
func setupSession() { | |
captureSession.sessionPreset = AVCaptureSessionPresetHigh | |
let camera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo) | |
do { | |
let input = try AVCaptureDeviceInput(device: camera) | |
if captureSession.canAddInput(input) { | |
captureSession.addInput(input) | |
activeInput = input | |
} | |
} catch { | |
print("Error setting device input: \(error)") | |
} | |
imageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG] | |
if captureSession.canAddOutput(imageOutput) { | |
captureSession.addOutput(imageOutput) | |
} | |
} | |
func setupPreview() { | |
// Configure previewLayer | |
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) | |
previewLayer.frame = camPreview.bounds | |
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill | |
camPreview.layer.addSublayer(previewLayer) | |
// Attach tap recognizer for focus & exposure. | |
let tapForFocus = UITapGestureRecognizer(target: self, action: "tapToFocus:") | |
tapForFocus.numberOfTapsRequired = 1 | |
let tapForExposure = UITapGestureRecognizer(target: self, action: "tapToExpose:") | |
tapForExposure.numberOfTapsRequired = 2 | |
let tapForReset = UITapGestureRecognizer(target: self, action: "resetFocusAndExposure") | |
tapForReset.numberOfTapsRequired = 2 | |
tapForReset.numberOfTouchesRequired = 2 | |
camPreview.addGestureRecognizer(tapForFocus) | |
camPreview.addGestureRecognizer(tapForExposure) | |
camPreview.addGestureRecognizer(tapForReset) | |
tapForFocus.requireGestureRecognizerToFail(tapForExposure) | |
// Create marker views. | |
focusMarker = imageViewWithImage("Focus_Point") | |
exposureMarker = imageViewWithImage("Exposure_Point") | |
resetMarker = imageViewWithImage("Reset_Point") | |
camPreview.addSubview(focusMarker) | |
camPreview.addSubview(exposureMarker) | |
camPreview.addSubview(resetMarker) | |
} | |
func startSession() { | |
if !captureSession.running { | |
dispatch_async(videoQueue()) { | |
self.captureSession.startRunning() | |
} | |
} | |
} | |
func stopSession() { | |
if captureSession.running { | |
dispatch_async(videoQueue()) { | |
self.captureSession.stopRunning() | |
} | |
} | |
} | |
func videoQueue() -> dispatch_queue_t { | |
return dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0) | |
} | |
// MARK: - Configure | |
@IBAction func switchCameras(sender: AnyObject) { | |
if AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo).count > 1 { | |
var newPosition: AVCaptureDevicePosition! | |
if activeInput.device.position == AVCaptureDevicePosition.Back { | |
newPosition = AVCaptureDevicePosition.Front | |
} else { | |
newPosition = AVCaptureDevicePosition.Back | |
} | |
var newCamera: AVCaptureDevice! | |
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) | |
for device in devices { | |
if device.position == newPosition { | |
newCamera = device as! AVCaptureDevice | |
} | |
} | |
do { | |
let input = try AVCaptureDeviceInput(device: newCamera) | |
captureSession.beginConfiguration() | |
captureSession.removeInput(activeInput) | |
if captureSession.canAddInput(input) { | |
captureSession.addInput(input) | |
activeInput = input | |
} else { | |
captureSession.addInput(activeInput) | |
} | |
captureSession.commitConfiguration() | |
} catch { | |
print("Error switching cameras: \(error)") | |
} | |
} | |
} | |
func focusAtPoint(point: CGPoint) { | |
let device = activeInput.device | |
if device.focusPointOfInterestSupported && device.isFocusModeSupported(AVCaptureFocusMode.AutoFocus) { | |
do { | |
try device.lockForConfiguration() | |
device.focusPointOfInterest = point | |
device.focusMode = AVCaptureFocusMode.AutoFocus | |
device.unlockForConfiguration() | |
} catch { | |
print("Error focusing on POI: \(error)") | |
} | |
} | |
} | |
// MARK: Focus Methods | |
func tapToFocus(recognizer: UIGestureRecognizer) { | |
if activeInput.device.focusPointOfInterestSupported { | |
let point = recognizer.locationInView(camPreview) | |
let pointOfInterest = previewLayer.captureDevicePointOfInterestForPoint(point) | |
showMarkerAtPoint(point, marker: focusMarker) | |
focusAtPoint(pointOfInterest) | |
} | |
} | |
func exposeAtPoint(point: CGPoint) { | |
let device = activeInput.device | |
if device.exposurePointOfInterestSupported && device.isExposureModeSupported(AVCaptureExposureMode.ContinuousAutoExposure) { | |
do { | |
try device.lockForConfiguration() | |
device.exposurePointOfInterest = point | |
device.exposureMode = AVCaptureExposureMode.ContinuousAutoExposure | |
if device.isExposureModeSupported(AVCaptureExposureMode.Locked) { | |
device.addObserver(self, forKeyPath: "adjustingExposure", options: NSKeyValueObservingOptions.New, context: &adjustingExposureContext) | |
device.unlockForConfiguration() | |
} | |
} catch { | |
print("Error exposing on POI: \(error)") | |
} | |
} | |
} | |
override func observeValueForKeyPath(keyPath: String?, ofObject object: AnyObject?, change: [String : AnyObject]?, context: UnsafeMutablePointer<Void>) { | |
if context == &adjustingExposureContext { | |
let device = object as! AVCaptureDevice | |
if !device.adjustingExposure && device.isExposureModeSupported(AVCaptureExposureMode.Locked) { | |
object?.removeObserver(self, forKeyPath: "adjustingExposure", context: &adjustingExposureContext) | |
dispatch_async(dispatch_get_main_queue()) { | |
do { | |
try device.lockForConfiguration() | |
device.exposureMode = AVCaptureExposureMode.Locked | |
device.unlockForConfiguration() | |
} catch { | |
print("Error exposing on POI: \(error)") | |
} | |
} | |
} | |
} else { | |
super.observeValueForKeyPath(keyPath, ofObject: object, change: change, context: context) | |
} | |
} | |
// MARK: Exposure Methods | |
func tapToExpose(recognizer: UIGestureRecognizer) { | |
if activeInput.device.exposurePointOfInterestSupported { | |
let point = recognizer.locationInView(camPreview) | |
let pointOfInterest = previewLayer.captureDevicePointOfInterestForPoint(point) | |
showMarkerAtPoint(point, marker: exposureMarker) | |
exposeAtPoint(pointOfInterest) | |
} | |
} | |
// MARK: Reset Focus and Exposure | |
func resetFocusAndExposure() { | |
let device = activeInput.device | |
let focusMode = AVCaptureFocusMode.ContinuousAutoFocus | |
let exposureMode = AVCaptureExposureMode.ContinuousAutoExposure | |
let canResetFocus = device.focusPointOfInterestSupported && device.isFocusModeSupported(focusMode) | |
let canResetExposure = device.exposurePointOfInterestSupported && device.isExposureModeSupported(exposureMode) | |
let center = CGPoint(x: 0.5, y: 0.5) | |
if canResetFocus && canResetExposure { | |
let markerCenter = previewLayer.pointForCaptureDevicePointOfInterest(center) | |
showMarkerAtPoint(markerCenter, marker: resetMarker) | |
} | |
do { | |
try device.lockForConfiguration() | |
if canResetFocus { | |
device.focusMode = focusMode | |
device.focusPointOfInterest = center | |
} | |
if canResetExposure { | |
device.exposureMode = exposureMode | |
device.exposurePointOfInterest = center | |
} | |
device.unlockForConfiguration() | |
} catch { | |
print("Error resetting focus & exposure: \(error)") | |
} | |
} | |
// MARK: Flash Modes | |
@IBAction func setFlashMode(sender: AnyObject) { | |
} | |
// MARK: - Capture photo | |
@IBAction func capturePhoto(sender: AnyObject) { | |
let connection = imageOutput.connectionWithMediaType(AVMediaTypeVideo) | |
if connection.supportsVideoOrientation { | |
connection.videoOrientation = currentVideoOrientation() | |
} | |
imageOutput.captureStillImageAsynchronouslyFromConnection(connection) { (sampleBuffer: CMSampleBuffer!, error: NSError!) in | |
if sampleBuffer != nil { | |
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer) | |
let image = UIImage(data: imageData) | |
let photoBomb = self.penguinPhotoBomb(image!) | |
self.savePhotoToLibrary(photoBomb) | |
} else { | |
print("Error capturing photo: \(error.localizedDescription)") | |
} | |
} | |
} | |
// MARK: - Helpers | |
func savePhotoToLibrary(image: UIImage) { | |
let photoLibrary = PHPhotoLibrary.sharedPhotoLibrary() | |
photoLibrary.performChanges({ | |
PHAssetChangeRequest.creationRequestForAssetFromImage(image) | |
}) { (success: Bool, error: NSError?) -> Void in | |
if success { | |
// Set thumbnail | |
self.setPhotoThumbnail(image) | |
} else { | |
print("Error writing to photo library: \(error!.localizedDescription)") | |
} | |
} | |
} | |
func setPhotoThumbnail(image: UIImage) { | |
dispatch_async(dispatch_get_main_queue()) { () -> Void in | |
self.thumbnail.setBackgroundImage(image, forState: UIControlState.Normal) | |
self.thumbnail.layer.borderColor = UIColor.whiteColor().CGColor | |
self.thumbnail.layer.borderWidth = 1.0 | |
} | |
} | |
func penguinPhotoBomb(image: UIImage) -> UIImage { | |
UIGraphicsBeginImageContextWithOptions(image.size, true, 0.0) | |
image.drawAtPoint(CGPoint(x: 0, y: 0)) | |
// Composite Penguin | |
let penguinImage = UIImage(named: "Penguin_\(randomInt(4))") | |
var xFactor: CGFloat | |
if randomFloat(from: 0.0, to: 1.0) >= 0.5 { | |
xFactor = randomFloat(from: 0.0, to: 0.25) | |
} else { | |
xFactor = randomFloat(from: 0.75, to: 1.0) | |
} | |
var yFactor: CGFloat | |
if image.size.width < image.size.height { | |
yFactor = 0.0 | |
} else { | |
yFactor = 0.35 | |
} | |
let penguinX = (image.size.width * xFactor) - (penguinImage!.size.width / 2) | |
let penguinY = (image.size.height * 0.5) - (penguinImage!.size.height * yFactor) | |
let penguinOrigin = CGPoint(x: penguinX, y: penguinY) | |
penguinImage?.drawAtPoint(penguinOrigin) | |
let finalImage = UIGraphicsGetImageFromCurrentImageContext() | |
UIGraphicsEndImageContext() | |
return finalImage | |
} | |
func imageViewWithImage(name: String) -> UIImageView { | |
let view = UIImageView() | |
let image = UIImage(named: name) | |
view.image = image | |
view.sizeToFit() | |
view.hidden = true | |
return view | |
} | |
func showMarkerAtPoint(point: CGPoint, marker: UIImageView) { | |
marker.center = point | |
marker.hidden = false | |
UIView.animateWithDuration(0.15, | |
delay: 0.0, | |
options: UIViewAnimationOptions.CurveEaseInOut, | |
animations: { () -> Void in | |
marker.layer.transform = CATransform3DMakeScale(0.5, 0.5, 1.0) | |
}) { (Bool) -> Void in | |
let delay = 0.5 | |
let popTime = dispatch_time(DISPATCH_TIME_NOW, Int64(delay * Double(NSEC_PER_SEC))) | |
dispatch_after(popTime, dispatch_get_main_queue(), { () -> Void in | |
marker.hidden = true | |
marker.transform = CGAffineTransformIdentity | |
}) | |
} | |
} | |
override func prepareForSegue(segue: UIStoryboardSegue, sender: AnyObject?) { | |
if segue.identifier == "QuickLookSegue" { | |
let quickLook = segue.destinationViewController as! QuickLookViewController | |
if let image = thumbnail.backgroundImageForState(UIControlState.Normal) { | |
quickLook.photoImage = image | |
} else { | |
quickLook.photoImage = UIImage(named: "Penguin") | |
} | |
} | |
} | |
func currentVideoOrientation() -> AVCaptureVideoOrientation { | |
var orientation: AVCaptureVideoOrientation | |
switch UIDevice.currentDevice().orientation { | |
case .Portrait: | |
orientation = AVCaptureVideoOrientation.Portrait | |
case .LandscapeRight: | |
orientation = AVCaptureVideoOrientation.LandscapeLeft | |
case .PortraitUpsideDown: | |
orientation = AVCaptureVideoOrientation.PortraitUpsideDown | |
default: | |
orientation = AVCaptureVideoOrientation.LandscapeRight | |
} | |
return orientation | |
} | |
func randomFloat(from from:CGFloat, to:CGFloat) -> CGFloat { | |
let rand:CGFloat = CGFloat(Float(arc4random()) / 0xFFFFFFFF) | |
return (rand) * (to - from) + from | |
} | |
func randomInt(n: Int) -> Int { | |
return Int(arc4random_uniform(UInt32(n))) | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment