ios swift camera avfoundation swift3

ios - Cómo usar AVCapturePhotoOutput



swift camera (6)

Ahí está mi implementación completa

import UIKit import AVFoundation class ViewController: UIViewController, AVCapturePhotoCaptureDelegate { var captureSesssion : AVCaptureSession! var cameraOutput : AVCapturePhotoOutput! var previewLayer : AVCaptureVideoPreviewLayer! @IBOutlet weak var capturedImage: UIImageView! @IBOutlet weak var previewView: UIView! override func viewDidLoad() { super.viewDidLoad() captureSesssion = AVCaptureSession() captureSesssion.sessionPreset = AVCaptureSessionPresetPhoto cameraOutput = AVCapturePhotoOutput() let device = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo) if let input = try? AVCaptureDeviceInput(device: device) { if (captureSesssion.canAddInput(input)) { captureSesssion.addInput(input) if (captureSesssion.canAddOutput(cameraOutput)) { captureSesssion.addOutput(cameraOutput) previewLayer = AVCaptureVideoPreviewLayer(session: captureSesssion) previewLayer.frame = previewView.bounds previewView.layer.addSublayer(previewLayer) captureSesssion.startRunning() } } else { print("issue here : captureSesssion.canAddInput") } } else { print("some problem here") } } // Take picture button @IBAction func didPressTakePhoto(_ sender: UIButton) { let settings = AVCapturePhotoSettings() let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first! let previewFormat = [ kCVPixelBufferPixelFormatTypeKey as String: previewPixelType, kCVPixelBufferWidthKey as String: 160, kCVPixelBufferHeightKey as String: 160 ] settings.previewPhotoFormat = previewFormat cameraOutput.capturePhoto(with: settings, delegate: self) } // callBack from take picture func capture(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhotoSampleBuffer photoSampleBuffer: CMSampleBuffer?, previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) { if let error = error { print("error occure : /(error.localizedDescription)") } if let sampleBuffer = photoSampleBuffer, let previewBuffer = previewPhotoSampleBuffer, let dataImage = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) { print(UIImage(data: dataImage)?.size as Any) let dataProvider = CGDataProvider(data: dataImage as CFData) let cgImageRef: CGImage! = CGImage(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: .defaultIntent) let image = UIImage(cgImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.right) self.capturedImage.image = image } else { print("some error here") } } // This method you can use somewhere you need to know camera permission state func askPermission() { print("here") let cameraPermissionStatus = AVCaptureDevice.authorizationStatus(forMediaType: AVMediaTypeVideo) switch cameraPermissionStatus { case .authorized: print("Already Authorized") case .denied: print("denied") let alert = UIAlertController(title: "Sorry :(" , message: "But could you please grant permission for camera within device settings", preferredStyle: .alert) let action = UIAlertAction(title: "Ok", style: .cancel, handler: nil) alert.addAction(action) present(alert, animated: true, completion: nil) case .restricted: print("restricted") default: AVCaptureDevice.requestAccess(forMediaType: AVMediaTypeVideo, completionHandler: { [weak self] (granted :Bool) -> Void in if granted == true { // User granted print("User granted") DispatchQueue.main.async(){ //Do smth that you need in main thread } } else { // User Rejected print("User Rejected") DispatchQueue.main.async(){ let alert = UIAlertController(title: "WHY?" , message: "Camera it is the main feature of our application", preferredStyle: .alert) let action = UIAlertAction(title: "Ok", style: .cancel, handler: nil) alert.addAction(action) self?.present(alert, animated: true, completion: nil) } } }); } } }

He estado trabajando en el uso de una cámara personalizada, y recientemente me actualicé a Xcode 8 beta junto con Swift 3. Originalmente tuve esto:

var stillImageOutput: AVCaptureStillImageOutput?

Sin embargo, ahora recibo la advertencia:

''AVCaptureStillImageOutput'' quedó en desuso en iOS 10.0: utilice AVCapturePhotoOutput en su lugar

Como esto es bastante nuevo, no he visto mucha información al respecto. Aquí está mi código actual:

var captureSession: AVCaptureSession? var stillImageOutput: AVCaptureStillImageOutput? var previewLayer: AVCaptureVideoPreviewLayer? func clickPicture() { if let videoConnection = stillImageOutput?.connection(withMediaType: AVMediaTypeVideo) { videoConnection.videoOrientation = .portrait stillImageOutput?.captureStillImageAsynchronously(from: videoConnection, completionHandler: { (sampleBuffer, error) -> Void in if sampleBuffer != nil { let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer) let dataProvider = CGDataProvider(data: imageData!) let cgImageRef = CGImage(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: .defaultIntent) let image = UIImage(cgImage: cgImageRef!, scale: 1, orientation: .right) } }) } }

He intentado mirar AVCapturePhotoCaptureDelegate , pero no estoy muy seguro de cómo usarlo. ¿Alguien sabe cómo usar esto? Gracias.


En iOS 11 "photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {}" is deprecated

Utilice el siguiente método:

func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) { let imageData = photo.fileDataRepresentation() if let data = imageData, let img = UIImage(data: data) { print(img) } }



La función de delegado de capture se ha cambiado a photoOutput . Aquí está la función actualizada para Swift 4.

func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) { if let error = error { print(error.localizedDescription) } if let sampleBuffer = photoSampleBuffer, let previewBuffer = previewPhotoSampleBuffer, let dataImage = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) { print("image: /(String(describing: UIImage(data: dataImage)?.size))") // Your Image } }


Actualizado a Swift 4 Hola, es realmente fácil de usar AVCapturePhotoOutput .

Necesita el AVCapturePhotoCaptureDelegate que devuelve el CMSampleBuffer .

También puede obtener una imagen de vista previa si le dice a AVCapturePhotoSettings el formato de AVCapturePhotoSettings previa

class CameraCaptureOutput: NSObject, AVCapturePhotoCaptureDelegate { let cameraOutput = AVCapturePhotoOutput() func capturePhoto() { let settings = AVCapturePhotoSettings() let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first! let previewFormat = [kCVPixelBufferPixelFormatTypeKey as String: previewPixelType, kCVPixelBufferWidthKey as String: 160, kCVPixelBufferHeightKey as String: 160] settings.previewPhotoFormat = previewFormat self.cameraOutput.capturePhoto(with: settings, delegate: self) } func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) { if let error = error { print(error.localizedDescription) } if let sampleBuffer = photoSampleBuffer, let previewBuffer = previewPhotoSampleBuffer, let dataImage = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) { print(image: UIImage(data: dataImage).size) // Your Image } } }

Para obtener más información, visite https://developer.apple.com/reference/AVFoundation/AVCapturePhotoOutput

Nota: AVCapturePhotoOutput agregar AVCapturePhotoOutput a AVCaptureSession antes de tomar la foto. Entonces algo como: session.addOutput(output) , y luego: output.capturePhoto(with:settings, delegate:self) Gracias @BigHeadCreations


Tomé la excelente respuesta de @Aleksey Timoshchenko y la actualicé a Swift 4.x

Tenga en cuenta que para mi caso de uso, permito que el usuario tome varias fotos, por eso las guardo en la matriz de images .

Tenga en cuenta que debe conectar el método @IBAction takePhoto través de su storyboard o en código. En mi caso, uso un storyboard .

A partir de iOS 11 , la representación AVCapturePhotoOutput.jpegPhotoDataRepresentation que se utiliza en @Aleksey Timoshchenko está en desuso.

Swift 4.x

class CameraVC: UIViewController { @IBOutlet weak var cameraView: UIView! var images = [UIImage]() var captureSession: AVCaptureSession! var cameraOutput: AVCapturePhotoOutput! var previewLayer: AVCaptureVideoPreviewLayer! override func viewDidLoad() { super.viewDidLoad() } override func viewDidAppear(_ animated: Bool) { super.viewDidAppear(animated) startCamera() } func startCamera() { captureSession = AVCaptureSession() captureSession.sessionPreset = AVCaptureSession.Preset.photo cameraOutput = AVCapturePhotoOutput() if let device = AVCaptureDevice.default(for: .video), let input = try? AVCaptureDeviceInput(device: device) { if (captureSession.canAddInput(input)) { captureSession.addInput(input) if (captureSession.canAddOutput(cameraOutput)) { captureSession.addOutput(cameraOutput) previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) previewLayer.frame = cameraView.bounds cameraView.layer.addSublayer(previewLayer) captureSession.startRunning() } } else { print("issue here : captureSesssion.canAddInput") } } else { print("some problem here") } } @IBAction func takePhoto(_ sender: UITapGestureRecognizer) { let settings = AVCapturePhotoSettings() let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first! let previewFormat = [ kCVPixelBufferPixelFormatTypeKey as String: previewPixelType, kCVPixelBufferWidthKey as String: 160, kCVPixelBufferHeightKey as String: 160 ] settings.previewPhotoFormat = previewFormat cameraOutput.capturePhoto(with: settings, delegate: self) } } extension CameraVC : AVCapturePhotoCaptureDelegate { func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) { if let error = error { print("error occured : /(error.localizedDescription)") } if let dataImage = photo.fileDataRepresentation() { print(UIImage(data: dataImage)?.size as Any) let dataProvider = CGDataProvider(data: dataImage as CFData) let cgImageRef: CGImage! = CGImage(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: .defaultIntent) let image = UIImage(cgImage: cgImageRef, scale: 1.0, orientation: UIImage.Orientation.right) /** save image in array / do whatever you want to do with the image here */ self.images.append(image) } else { print("some error here") } } }