objective framework developer apple ios swift swift2 avfoundation

ios - framework - objective c documentation



Guardar video usando AVFoundation Swift (4)

Puede guardar el registro de su video en un archivo creando y agregando un AVCaptureMovieFileOutput a su sesión de captura y haciendo que su ViewController ajuste al AVCaptureFileOutputRecordingDelegate .

Este ejemplo registra 5 segundos de video en un archivo llamado "output.mov" en el directorio de Documentos de la aplicación.

class ViewController: UIViewController, AVCaptureFileOutputRecordingDelegate { var captureSession = AVCaptureSession() var sessionOutput = AVCaptureStillImageOutput() var movieOutput = AVCaptureMovieFileOutput() var previewLayer = AVCaptureVideoPreviewLayer() @IBOutlet var cameraView: UIView! override func viewWillAppear(animated: Bool) { self.cameraView = self.view let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) for device in devices { if device.position == AVCaptureDevicePosition.Front{ do{ let input = try AVCaptureDeviceInput(device: device as! AVCaptureDevice) if captureSession.canAddInput(input){ captureSession.addInput(input) sessionOutput.outputSettings = [AVVideoCodecKey : AVVideoCodecJPEG] if captureSession.canAddOutput(sessionOutput){ captureSession.addOutput(sessionOutput) previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.Portrait cameraView.layer.addSublayer(previewLayer) previewLayer.position = CGPoint(x: self.cameraView.frame.width / 2, y: self.cameraView.frame.height / 2) previewLayer.bounds = cameraView.frame } captureSession.addOutput(movieOutput) captureSession.startRunning() let paths = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask) let fileUrl = paths[0].URLByAppendingPathComponent("output.mov") try? NSFileManager.defaultManager().removeItemAtURL(fileUrl) movieOutput.startRecordingToOutputFileURL(fileUrl, recordingDelegate: self) let delayTime = dispatch_time(DISPATCH_TIME_NOW, Int64(5 * Double(NSEC_PER_SEC))) dispatch_after(delayTime, dispatch_get_main_queue()) { print("stopping") self.movieOutput.stopRecording() } } } catch{ print("Error") } } } } func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) { print("FINISHED /(error)") // save video to camera roll if error == nil { UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path!, nil, nil, nil) } } }

Hola. Seguí un curso de Jared Davidson para crear una vista de cámara personalizada y guardar imágenes usando AVFoundation. https://www.youtube.com/watch?v=w0O3ZGUS3pk

Sin embargo, me gustaría grabar y guardar videos en lugar de imágenes. ¿Puede alguien ayudarme aquí? Estoy seguro de que es simple, pero la documentación de Apple está escrita en Objective-C y no puedo descifrarla.

Este es mi código Gracias.

import UIKit import AVFoundation class ViewController: UIViewController { var captureSession = AVCaptureSession() var sessionOutput = AVCaptureStillImageOutput() var previewLayer = AVCaptureVideoPreviewLayer() @IBOutlet var cameraView: UIView! override func viewWillAppear(animated: Bool) { let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) for device in devices { if device.position == AVCaptureDevicePosition.Front{ do{ let input = try AVCaptureDeviceInput(device: device as! AVCaptureDevice) if captureSession.canAddInput(input){ captureSession.addInput(input) sessionOutput.outputSettings = [AVVideoCodecKey : AVVideoCodecJPEG] if captureSession.canAddOutput(sessionOutput){ captureSession.addOutput(sessionOutput) captureSession.startRunning() previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.Portrait cameraView.layer.addSublayer(previewLayer) previewLayer.position = CGPoint(x: self.cameraView.frame.width / 2, y: self.cameraView.frame.height / 2) previewLayer.bounds = cameraView.frame } } } catch{ print("Error") } } } } @IBAction func TakePhoto(sender: AnyObject) { if let videoConnection = sessionOutput.connectionWithMediaType(AVMediaTypeVideo){ sessionOutput.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: { buffer, error in let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer) UIImageWriteToSavedPhotosAlbum(UIImage(data: imageData)!, nil, nil, nil) }) } } }


Gracias por esto. Fue muy útil para mí. Aquí hay una versión de la respuesta de Rhythmic Fistman portada a Swift 3 con las declaraciones de importación requeridas y los métodos de delegado.

import UIKit import AVFoundation class ViewController: UIViewController, AVCaptureFileOutputRecordingDelegate { var captureSession = AVCaptureSession() var sessionOutput = AVCaptureStillImageOutput() var movieOutput = AVCaptureMovieFileOutput() var previewLayer = AVCaptureVideoPreviewLayer() @IBOutlet var cameraView: UIView! override func viewWillAppear(_ animated: Bool) { self.cameraView = self.view let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) for device in devices! { if (device as AnyObject).position == AVCaptureDevicePosition.front{ do{ let input = try AVCaptureDeviceInput(device: device as! AVCaptureDevice) if captureSession.canAddInput(input){ captureSession.addInput(input) sessionOutput.outputSettings = [AVVideoCodecKey : AVVideoCodecJPEG] if captureSession.canAddOutput(sessionOutput){ captureSession.addOutput(sessionOutput) previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait cameraView.layer.addSublayer(previewLayer) previewLayer.position = CGPoint(x: self.cameraView.frame.width / 2, y: self.cameraView.frame.height / 2) previewLayer.bounds = cameraView.frame } captureSession.addOutput(movieOutput) captureSession.startRunning() let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask) let fileUrl = paths[0].appendingPathComponent("output.mov") try? FileManager.default.removeItem(at: fileUrl) movieOutput.startRecording(toOutputFileURL: fileUrl, recordingDelegate: self) let delayTime = DispatchTime.now() + 5 DispatchQueue.main.asyncAfter(deadline: delayTime) { print("stopping") self.movieOutput.stopRecording() } } } catch{ print("Error") } } } } //MARK: AVCaptureFileOutputRecordingDelegate Methods func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) { } func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) { print("FINISHED /(error)") // save video to camera roll if error == nil { UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path, nil, nil, nil) } }

}


after if (device as AnyObject).position == AVCaptureDevicePosition.front{

añadir

// Audio Input let audioInputDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio) do { let audioInput = try AVCaptureDeviceInput(device: audioInputDevice) // Add Audio Input if captureSession.canAddInput(audioInput) { captureSession.addInput(audioInput) } else { NSLog("Can''t Add Audio Input") } } catch let error { NSLog("Error Getting Input Device: /(error)") }

Gracias


Para el problema de grabación de sonido,

Agregue este código al crear la capturaSesión

askMicroPhonePermission (finalización: {(isMicrophonePermissionGiven) en

if isMicrophonePermissionGiven { do { try self.captureSession.addInput(AVCaptureDeviceInput(device: self.captureAudio)) } catch { print("Error creating the database") } } })

////////////////////////////////////////////////// //////////////

La función askMicroPhonePermission es la siguiente

func askMicroPhonePermission(completion: @escaping (_ success: Bool)-> Void) { switch AVAudioSession.sharedInstance().recordPermission() { case AVAudioSessionRecordPermission.granted: completion(true) case AVAudioSessionRecordPermission.denied: completion(false) //show alert if required case AVAudioSessionRecordPermission.undetermined: AVAudioSession.sharedInstance().requestRecordPermission({ (granted) in if granted { completion(true) } else { completion(false) // show alert if required } }) default: completion(false) } }

Y debe agregar el valor de la clave NSMicrophoneUsageDescription en el archivo info.plist.