2017-02-08 4 views
3

Как автоматически записывать видео в iOS без взаимодействия с пользователем на элементах управления камерой? Требование состоит в том, чтобы записывать видео с передней камеры на открытое представление, но условие - управление камерой отключено, видеозапись и сохранение автоматического перехода и возврата с этой точки зрения.Запись видео с использованием пользовательской камеры автоматически в swift 3.0

ответ

7

Swift 3,0

наконец я решить эту проблему просто скопировать и вставить весь код и соединять выход и это работает отлично.

class TestViewController: UIViewController { 

     @IBOutlet weak var myView: UIView! 

     var session: AVCaptureSession? 
     var userreponsevideoData = NSData() 
     var userreponsethumbimageData = NSData() 

     override func viewDidLoad() { 
      super.viewDidLoad() 
      createSession() 
     } 

     override func viewDidAppear(animated: Bool) { 
      super.viewDidAppear(animated) 
     } 

     func createSession() { 

      var input: AVCaptureDeviceInput? 
      let movieFileOutput = AVCaptureMovieFileOutput() 
      var prevLayer: AVCaptureVideoPreviewLayer? 
      prevLayer?.frame.size = myView.frame.size 
      session = AVCaptureSession() 
      let error: NSError? = nil 
      do { input = try AVCaptureDeviceInput(device: self.cameraWithPosition(.Front)!) } catch {return} 
      if error == nil { 
       session?.addInput(input) 
      } else { 
       print("camera input error: \(error)") 
      } 
      prevLayer = AVCaptureVideoPreviewLayer(session: session) 
      prevLayer?.frame.size = myView.frame.size 
      prevLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill 
      prevLayer?.connection.videoOrientation = .Portrait 
      myView.layer.addSublayer(prevLayer!) 
      let documentsURL = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0] 
      let filemainurl = NSURL(string: ("\(documentsURL.URLByAppendingPathComponent("temp")!)" + ".mov")) 


      let maxDuration: CMTime = CMTimeMake(600, 10) 
      movieFileOutput.maxRecordedDuration = maxDuration 
      movieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024 
      if self.session!.canAddOutput(movieFileOutput) { 
       self.session!.addOutput(movieFileOutput) 
      } 
      session?.startRunning() 
      movieFileOutput.startRecordingToOutputFileURL(filemainurl, recordingDelegate: self) 

     } 
     func cameraWithPosition(position: AVCaptureDevicePosition) -> AVCaptureDevice? { 
      let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) 
      for device in devices { 
       if device.position == position { 
        return device as? AVCaptureDevice 
       } 
      } 
      return nil 
     } 
     @IBAction func pressbackbutton(sender: AnyObject) { 
      session?.stopRunning() 

     } 

    } 
    extension TestViewController: AVCaptureFileOutputRecordingDelegate 
    { 
     func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) { 
      print(fileURL) 
     } 

     func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) { 
      print(outputFileURL) 
      let filemainurl = outputFileURL 

      do 
      { 
       let asset = AVURLAsset(URL: filemainurl, options: nil) 
       print(asset) 
       let imgGenerator = AVAssetImageGenerator(asset: asset) 
       imgGenerator.appliesPreferredTrackTransform = true 
       let cgImage = try imgGenerator.copyCGImageAtTime(CMTimeMake(0, 1), actualTime: nil) 
       let uiImage = UIImage(CGImage: cgImage) 
       userreponsethumbimageData = NSData(contentsOfURL: filemainurl)! 
       print(userreponsethumbimageData.length) 
       print(uiImage) 
       // imageData = UIImageJPEGRepresentation(uiImage, 0.1) 
      } 
      catch let error as NSError 
      { 
       print(error) 
       return 
      } 

      SVProgressHUD.showWithMaskType(SVProgressHUDMaskType.Clear) 
      let VideoFilePath = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("mergeVideo\(arc4random()%1000)d")!.URLByAppendingPathExtension("mp4")!.absoluteString 

      if NSFileManager.defaultManager().fileExistsAtPath(VideoFilePath!) 

      { 
       do 

       { 
        try NSFileManager.defaultManager().removeItemAtPath(VideoFilePath!) 
       } 
       catch { } 

      } 
      let tempfilemainurl = NSURL(string: VideoFilePath!)! 
      let sourceAsset = AVURLAsset(URL: filemainurl!, options: nil) 
      let assetExport: AVAssetExportSession = AVAssetExportSession(asset: sourceAsset, presetName: AVAssetExportPresetMediumQuality)! 
      assetExport.outputFileType = AVFileTypeQuickTimeMovie 
      assetExport.outputURL = tempfilemainurl 
      assetExport.exportAsynchronouslyWithCompletionHandler {() -> Void in 
       switch assetExport.status 
       { 
       case AVAssetExportSessionStatus.Completed: 
        dispatch_async(dispatch_get_main_queue(), 
         { 
          do 
          { 
           SVProgressHUD .dismiss() 
           self.userreponsevideoData = try NSData(contentsOfURL: tempfilemainurl, options: NSDataReadingOptions()) 
           print("MB - \(self.userreponsevideoData.length) byte") 


          } 
          catch 
          { 
           SVProgressHUD .dismiss() 
           print(error) 
          } 
        }) 
       case AVAssetExportSessionStatus.Failed: 
        print("failed \(assetExport.error)") 
       case AVAssetExportSessionStatus.Cancelled: 
        print("cancelled \(assetExport.error)") 
       default: 
        print("complete") 
        SVProgressHUD .dismiss() 
       } 

      } 
     } 

    }