2015-05-22 1 views
1

Мне интересно, почему мой сеанс захвата начинается медленно, когда приложение запускается. Это происходит не каждый раз, когда я запускаю приложение, поэтому я не уверен, что это просто другие переменные реального телефона или что-то еще. Я не очень хороший параллельный/параллельный программист, поэтому более чем вероятно мое дрянное кодирование :(Capture Session Starts Up Slow

Я бы ВЕЛИКОЕ оценил это, если бы кто-то мог определить, что мешает иногда. Я прочитал, что все звонки с захват сессии может быть заблокирован, поэтому я попытался изо всех сил отправить эти вызовы в другую очередь без каких-либо условий гонки. Я узнал о том, как сделать кодирование таким образом в быстрой форме. here

Вот мой код, где i инициализировать и запустить все: мои очереди - это очередные очереди

/************************************************************************** 
    VIEW DID LOAD 
    ***************************************************************************/ 
    override func viewDidLoad() { 

     super.viewDidLoad() 

     println("Initializing the cameraCaptureDevice with MediaTypeVideo") 

     //------INIT CAMERA CAPTURE DEVICE TO BEGIN WITH------ 
     self.cameraCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo) 

     println("Done initializing camera") 

     var error1: NSError? = nil 

     println("Getting array of available capture devices") 

     //------GRAB ALL OF THE DEVICES------ 
     let devices = AVCaptureDevice.devices() 

     //------FIND THE CAMERA MATCHING THE POSITION------ 
     for device in devices { 

      if device.position == self.cameraCapturePosition { 

       self.cameraCaptureDevice = device as? AVCaptureDevice 

       println("Back camera has been added") 

       self.usingBackCamera = true 
      } 
     } 

     //------ INIT MOVIE FILE OUTPUT ------ 
     self.movieFileOutput = AVCaptureMovieFileOutput() 

     //------SET UP PREVIEW LAYER----- 
     self.videoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.session) 

     if let preview = self.videoPreviewLayer { 

      println("Video Preview Layer set") 

      preview.videoGravity = AVLayerVideoGravityResizeAspectFill 
     } 
     else { 

      println("Video Preview Layer is nil!!! Could not set AVLayerVideoGravityResizeAspectFill") 
     } 

     println("Camera successully can display") 

     //------SET JPEG OUTPUT------ 
     println("Setting JPEG Output") 

     self.stillImageOutput = AVCaptureStillImageOutput() 

     let outputSettings = [ AVVideoCodecKey : AVVideoCodecJPEG ] 

     if let imageOutput = self.stillImageOutput { 

      imageOutput.outputSettings = outputSettings 
     } 
     else { 

      println("still image output is nil, could notset output settings") 
     } 

     println("Successfully configured JPEG Ouput") 

     //------SET MOVIE FILE OUPUT MAX DURATION AND MIN FREE DISK SPACE------ 
     println("Setting Movie File Max Duration") 

     let maxDuration:CMTime = CMTimeMakeWithSeconds(self.totalTime, self.preferredTimeScale) 

     if let movieOutput = self.movieFileOutput { 

      movieOutput.maxRecordedDuration = maxDuration 

      println("Successully set movie file max duration") 
      println("Setting movie file minimun byte space") 

      movieOutput.minFreeDiskSpaceLimit = self.minFreeSpace 

      println("Successfully added minium free space") 
     } 
     else { 

      println("Movie file output is nil, could not set maximum recording duration or minimum free space") 
     } 

     //------ GRAB THE DEVICE'S SUPPORTED FRAME RATE RANGES ------ 
     if let device = self.cameraCaptureDevice { 

      println("Setting frame rates") 

      let supportedFrameRateRanges = device.activeFormat.videoSupportedFrameRateRanges 

      for range in supportedFrameRateRanges { 

       // Workaround until finding a better way 
       // frame rate should be 1 - 30 

       if (range.minFrameRate >= 1 || range.minFrameRate <= 30) == true && (range.maxFrameRate <= 30 || range.maxFrameRate >= 1) == true { 

        println("Frame rate is supported") 

        self.frameRateSupported = true 
       } 
       else { 

        println("Frame rate is not supported") 

        self.frameRateSupported = false 
       } 
      } 

      var error: NSError? 

      if frameRateSupported && device.lockForConfiguration(&error) { 

       device.activeVideoMaxFrameDuration = self.frameDuration 
       device.activeVideoMinFrameDuration = self.frameDuration 
       device.unlockForConfiguration() 

       println("SUCCESS") 
      } 
      else { 

       println("frame rate is not supported or there was an error") 

       if let err = error { 

        println("There was an error setting framerate: \(err.description)") 
       } 
       else { 

        println("Frame rate is not supported") 
       } 
      } 
     } 
     else { 

      println("camera capture device is nil, could not set frame rate") 
     } 

     //------ INIT AUDIO CAPTURE DEVICE ------ 
     self.audioCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio) 

     var error2: NSError? = nil 
     let audioDeviceInput = AVCaptureDeviceInput(device: self.audioCaptureDevice, error: &error2) 

     //------ADD CAMERA CAPTURE DEVICE TO CAPTURE SESSION INPUT------ 
     if let captureDevice = self.cameraCaptureDevice { 

      if error1 == nil { 

       println("Trying to add video input") 

       self.videoDeviceInput = AVCaptureDeviceInput(device: captureDevice, error: &error1) 

      } 
      else { 

       println("Could not create video input") 
      } 
     } 
     else { 

      println("Could not create camera capture device") 
     } 

     //------ ADD INPUTS AND OUTPUTS AS WELL AS OTHER SESSION CONFIGURATIONS------ 
     dispatch_async(self.sessionQueue) { 

      println("Trying to add audio output") 

      if let input = audioDeviceInput { 

       self.session.addInput(audioDeviceInput) 

       println("Successfully added audio output") 
      } 
      else { 
       println("Could not create audio input") 
      } 

      if self.session.canAddInput(self.videoDeviceInput) { 

       self.session.addInput(self.videoDeviceInput) 

       println("Successfully added video input") 
      } 
      else { 

       println("Could not add video input") 
      } 


      println("initializing video capture session") 

      //----- SET THE IMAGE QUALITY/RESOLUTION ----- 
      //Options: 
      // AVCaptureSessionPresetHigh - Highest recording quality (varies per device) 
      // AVCaptureSessionPresetMedium - Suitable for WiFi sharing (actual values may change) 
      // AVCaptureSessionPresetLow - Suitable for 3G sharing (actual values may change) 
      // AVCaptureSessionPreset640x480 - 640x480 VGA (check its supported before setting it) 
      // AVCaptureSessionPreset1280x720 - 1280x720 720p HD (check its supported before setting it) 
      // AVCaptureSessionPresetPhoto - Full photo resolution (not supported for video output) 

      if self.session.canSetSessionPreset(AVCaptureSessionPresetHigh) { 

       println("Capture Session preset is set to High Quality") 

       self.session.sessionPreset = AVCaptureSessionPresetHigh 
      } 
      else { 

       println("Capture Session preset is set to Medium Quality") 

       self.session.sessionPreset = AVCaptureSessionPresetMedium 
      } 

      //------ADD JPEG OUTPUT AND MOVIE FILE OUTPUT TO SESSION OUTPUT------ 
      println("Adding still image and movie file output") 

      if self.session.canAddOutput(self.stillImageOutput) && self.session.canAddOutput(self.movieFileOutput) { 

       self.session.addOutput(self.stillImageOutput) 
       self.session.addOutput(self.movieFileOutput) 

       println("Successfully added outputs") 
      } 
      else { 

       //------ IF OUTPUTS COULD NOT BE ADDED, THEN APP SHOULD NOT RUN ON DEVICE!!!!! ------ 
       println("Could Not Add still image and movie file output") 
      } 

      //------WE CALL A METHOD AS IT ALSO HAS TO BE DONE AFTER CHANGING CAMERA------ 
      self.setCameraOutputProperties() 

      //------DISPLAY PREVIEW LAYER------ 
      if let videoLayer = self.videoPreviewLayer { 

       self.videoPreviewView.layer.addSublayer(self.videoPreviewLayer) 

       println("Video Preview Layer Added as sublayer") 

       self.videoPreviewLayer!.frame = self.videoPreviewView.layer.frame 

       println("Video Preview frame set") 
      } 
      else { 

       println("videoPreviewLayer is nil, could not add sublayer or set frame") 
      } 

      self.view.sendSubviewToBack(self.videoPreviewView) 
     } 
    } 






    /************************************************************************** 
    VIEW DID APPEAR 
    ***************************************************************************/ 
    override func viewDidAppear(animated: Bool) { 

     println("About to start the capture session") 

     //------INITIALIZE THE CAMERA------ 
     dispatch_async(self.startSessionQueue) { 

      if self.beenHereBefore == false { 

       println("Have not seen this view before.... starting the session") 

       //------ START THE PREVIEW SESSION ------ 
       self.startSession() 

       /* 
       CHECK TO MAKE SURE THAT THIS CODE IS REALLY NEEDED FOR AUTHORIZATION 
       */ 

       // ----- SET MEDIA TYPE ------ 
       var mediaTypeVideo = AVMediaTypeVideo 

       AVCaptureDevice.requestAccessForMediaType(mediaTypeVideo, completionHandler: { (granted) -> Void in 

        //------ GRANTED ACCESS TO MEDIATYPE ------ 
        if granted { 

         self.deviceAuthorized = AVAuthorizationStatus.Authorized 
        } 
         //------ NOT GRANTED ACCESS TO MEDIATYPE ------ 
        else { 

         dispatch_async(dispatch_get_main_queue()) { 

          UIAlertView(title: "CopWatch", message: "CopWatch does not have permission to use the camera, please change your privacy settings.", delegate: self, cancelButtonTitle: "OK") 

          self.deviceAuthorized = AVAuthorizationStatus.Denied 

          dispatch_resume(dispatch_get_main_queue()) 
         } 
        } 

       }) 

      } 
      else { 

       println("Been Here Before") 

       self.session.startRunning() 
      } 

      self.weAreRecording = false 
     } 
    } 

и он re - метод, который запускает предварительный просмотр видео

/************************************************************************** 
    START SESSION 
    **************************************************************************/ 
    func startSession() { 

     println("Checking to see if the session is already running before starting the session") 

     //------ START SESSION IF IT IS NOT ALREADY RUNNING------ 

     if !self.session.running { 

      //------START CAMERA------ 
      println("Session is not already running, starting the session now") 

      self.session.startRunning() 
      self.isSessionRunning = true 

      println("Capture Session initiated") 
     } 
     else { 

      println("Session is already running, no need to start it again") 
     } 
    } 

ответ

1

Кажется, что я нашел ответ.

Я добавлял videoPreviewLayer в качестве подзаголовка и отправлял его в обратную сторону представления в асинхронном диспетчерском вызове. По-видимому, это приложение не понравилось, и было очень сложно начать очень медленно.

переместить этот код

//------DISPLAY PREVIEW LAYER------ 
     if let videoLayer = self.videoPreviewLayer { 

      self.videoPreviewView.layer.addSublayer(self.videoPreviewLayer) 

      println("Video Preview Layer Added as sublayer") 

      self.videoPreviewLayer!.frame = self.videoPreviewView.layer.frame 

      println("Video Preview frame set") 
     } 
     else { 

      println("videoPreviewLayer is nil, could not add sublayer or set frame") 
     } 

     self.view.sendSubviewToBack(self.videoPreviewView) 

до вот так:

//------SET UP PREVIEW LAYER----- 
     self.videoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.session) 

     if let preview = self.videoPreviewLayer { 

      println("Video Preview Layer set") 

      preview.videoGravity = AVLayerVideoGravityResizeAspectFill 
     } 
     else { 

      println("Video Preview Layer is nil!!! Could not set AVLayerVideoGravityResizeAspectFill") 
     } 

     println("Camera successully can display") 

     //------DISPLAY PREVIEW LAYER------ 
     if let videoLayer = self.videoPreviewLayer { 

      self.videoPreviewView.layer.addSublayer(self.videoPreviewLayer) 

      println("Video Preview Layer Added as sublayer") 

      self.videoPreviewLayer!.frame = self.videoPreviewView.layer.frame 

      println("Video Preview frame set") 

      self.view.sendSubviewToBack(self.videoPreviewView) 
     } 
     else { 

      println("videoPreviewLayer is nil, could not add sublayer or set frame") 
     } 

я должен был быть в состоянии увидеть этот вопрос, но я думаю, это то, что происходит, когда вы оптимизируете на неправильные времена. Теперь он очень отзывчив.

Мораль истории, если вы программируете с помощью AVFoundation, не настраивайте и не добавляете свой уровень предварительного просмотра видео в качестве подвидности вашего представления в текущем контроллере представления в асинхронной очереди.