iOS Swift Camera AVFoundation

Swift and AVFoundation

We have choices to implement camera

Sample

Before staring, we need to link AVFoundation.framework
avfoundation

Sources

import UIKit
import AVFoundation

class ViewController: UIViewController {

    var imageOutput : AVCaptureStillImageOutput?   // TODO
    
    var session: AVCaptureSession?
    
    override func viewDidLoad() {
        super.viewDidLoad()
        
        self.view.backgroundColor = UIColor.whiteColor()
        
        // init camera device
        let captureDevice : AVCaptureDevice? = initCaptureDevice()
        // Prepare output
        initOutput()
        if (captureDevice != nil) {
            let deviceInput : AVCaptureInput? = initInputDevice(captureDevice!)
            if (deviceInput != nil) {
                initSession(deviceInput!)
                // Prepare preview  size?
                let previewLayer: AVCaptureVideoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.session)
                previewLayer.frame = self.view.bounds
                self.view.layer.addSublayer(previewLayer)
                self.session?.startRunning()
            }
        }
        else {
            print("Missing Camera")
        }
    }

    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
    }
    
    override func viewWillLayoutSubviews() {
        // layout button etc...
    }
    
    
    private func initCaptureDevice() -> AVCaptureDevice? {
        var captureDevice: AVCaptureDevice?
        let devices: NSArray = AVCaptureDevice.devices()
        
        // Get Backcamera
        for device: AnyObject in devices {
            if device.position == AVCaptureDevicePosition.Back {
                captureDevice = device as? AVCaptureDevice
            }
        }
        return captureDevice
    }
    
    
    private func initInputDevice(captureDevice : AVCaptureDevice) -> AVCaptureInput? {
        var deviceInput : AVCaptureInput?
        do {
            deviceInput = try AVCaptureDeviceInput(device: captureDevice)
        }
        catch _ {
            deviceInput = nil
        }
        return deviceInput
    }
    
    private func initOutput() {
        self.imageOutput = AVCaptureStillImageOutput()
    }
    
    private func initSession(deviceInput: AVCaptureInput) {
        
        self.session = AVCaptureSession()
        self.session?.sessionPreset = AVCaptureSessionPresetPhoto
        self.session?.addInput(deviceInput)
        self.session?.addOutput(self.imageOutput!)
        
        // session preset
        // https://developer.apple.com/library/prerelease/ios/documentation/AVFoundation/Reference/AVCaptureSession_Class/index.html#//apple_ref/doc/constant_group/Video_Input_Presets
    }
    
    
    /*
     *  Take Photo Helper
     */
    
    private func takePhoto() {
        let videoConnection : AVCaptureConnection? = self.imageOutput?.connectionWithMediaType(AVMediaTypeVideo)
        if (videoConnection != nil) {
            self.imageOutput?.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: { (imageDataSampleBuffer, error) -> Void in
                if (imageDataSampleBuffer != nil) {
                    // Capture data as jpeg format
                    let imageData : NSData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataSampleBuffer)
                    
                    // Create UIImage from JPEG
                    let image = UIImage(data: imageData)
                    
                    //UIImageWriteToSavedPhotosAlbum(image!, self, nil, nil)
                    
                    // TODO change size etc...
                }
            })
        }
    }
}