2016-11-16 6 views
3

Мне нужно объединить аудиофайл и записанный голос. Например, записанный голос составляет 47 секунд. Я должен отрезать или обрезать звуковую композицию 4 минуты до 47 секунд. И объедините аудиофайл.iOS: обрезка аудиофайлов с помощью Swift?

var url:NSURL? 
    if self.audioRecorder != nil 
    { 
     url = self.audioRecorder!.url 
    } 

    else 
    { 
     url = self.soundFileURL! 

    } 
    print("playing \(url)") 
    do 
    { 
     self.newplayer = try AVPlayer(URL: url!) 
     let avAsset = AVURLAsset(URL: url!, options: nil) 
     print("\(avAsset)") 
     let audioDuration = avAsset.duration 
     let totalSeconds = CMTimeGetSeconds(audioDuration) 
     let hours = floor(totalSeconds/3600) 
     var minutes = floor(totalSeconds % 3600/60) 
     var seconds = floor(totalSeconds % 3600 % 60) 
     print("hours = \(hours),minutes = \(minutes),seconds = \(seconds)")} 

Это выход: // ч = 0,0, мин = 0.0, секунды = 42,0

Для отделки метода я просто попытался это: Как установить точную продолжительность, время начала и окончания времени, так и новый url:

func exportAsset(asset:AVAsset, fileName:String) 
{ 
    let documentsDirectory = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0] 
    let trimmedSoundFileURL = documentsDirectory.URLByAppendingPathComponent(fileName) 
    print("saving to \(trimmedSoundFileURL!.absoluteString)") 

    let filemanager = NSFileManager.defaultManager() 
    if filemanager.fileExistsAtPath(trimmedSoundFileURL!.absoluteString!) { 
     print("sound exists") 
    } 

    let exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetAppleM4A) 
    exporter!.outputFileType = AVFileTypeAppleM4A 
    exporter!.outputURL = trimmedSoundFileURL 

    let duration = CMTimeGetSeconds(asset.duration) 
    if (duration < 5.0) { 
     print("sound is not long enough") 
     return 
    } 
    // e.g. the first 5 seconds 
    let startTime = CMTimeMake(0, 1) 
    let stopTime = CMTimeMake(5, 1) 
    let exportTimeRange = CMTimeRangeFromTimeToTime(startTime, stopTime) 
    exporter!.timeRange = exportTimeRange 


    // do it 
    exporter!.exportAsynchronouslyWithCompletionHandler({ 
     switch exporter!.status { 
     case AVAssetExportSessionStatus.Failed: 
      print("export failed \(exporter!.error)") 
     case AVAssetExportSessionStatus.Cancelled: 
      print("export cancelled \(exporter!.error)") 
     default: 
      print("export complete") 
     } 
    }) 
} 

ответ

3

Наконец-то я нахожу ответ на свой вопрос. Он отлично работает ... Я прикрепил код ниже. Я добавил аккуратное аудио-код в it.it будет полезен для тех, кто пытается объединить и обрезать аудио (swift2.3):

func mixAudio() 
{ 
    let currentTime = CFAbsoluteTimeGetCurrent() 
    let composition = AVMutableComposition() 
    let compositionAudioTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid) 
    compositionAudioTrack.preferredVolume = 0.8 
    let avAsset = AVURLAsset.init(URL: soundFileURL, options: nil) 
    print("\(avAsset)") 
    var tracks = avAsset.tracksWithMediaType(AVMediaTypeAudio) 
    let clipAudioTrack = tracks[0] 
    do { 
     try compositionAudioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, avAsset.duration), ofTrack: clipAudioTrack, atTime: kCMTimeZero) 
    } 
    catch _ { 
    } 
    let compositionAudioTrack1 = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid) 
    compositionAudioTrack.preferredVolume = 0.8 

    let avAsset1 = AVURLAsset.init(URL: soundFileURL1) 
    print(avAsset1) 


    var tracks1 = avAsset1.tracksWithMediaType(AVMediaTypeAudio) 
    let clipAudioTrack1 = tracks1[0] 
    do { 
     try compositionAudioTrack1.insertTimeRange(CMTimeRangeMake(kCMTimeZero, avAsset1.duration), ofTrack: clipAudioTrack1, atTime: kCMTimeZero) 
    } 
    catch _ { 
    } 
    var paths = NSSearchPathForDirectoriesInDomains(.LibraryDirectory, .UserDomainMask, true) 
    let CachesDirectory = paths[0] 
    let strOutputFilePath = CachesDirectory.stringByAppendingString("/Fav") 
    print(" strOutputFilePath is \n \(strOutputFilePath)") 

    let requiredOutputPath = CachesDirectory.stringByAppendingString("/Fav.m4a") 
    print(" requiredOutputPath is \n \(requiredOutputPath)") 

    soundFile1 = NSURL.fileURLWithPath(requiredOutputPath) 
    print(" OUtput path is \n \(soundFile1)") 
    var audioDuration = avAsset.duration 
    var totalSeconds = CMTimeGetSeconds(audioDuration) 
    var hours = floor(totalSeconds/3600) 
    var minutes = floor(totalSeconds % 3600/60) 
    var seconds = Int64(totalSeconds % 3600 % 60) 
    print("hours = \(hours), minutes = \(minutes), seconds = \(seconds)") 

    let recordSettings:[String : AnyObject] = [ 

     AVFormatIDKey: Int(kAudioFormatMPEG4AAC), 
     AVSampleRateKey: 12000, 
     AVNumberOfChannelsKey: 1, 
     AVEncoderAudioQualityKey: AVAudioQuality.Low.rawValue 
    ] 
    do { 
     audioRecorder = try AVAudioRecorder(URL: soundFile1, settings: recordSettings) 
     audioRecorder!.delegate = self 
     audioRecorder!.meteringEnabled = true 
     audioRecorder!.prepareToRecord() 
    } 

    catch let error as NSError 
    { 
     audioRecorder = nil 
     print(error.localizedDescription) 
    } 

    do { 

     try NSFileManager.defaultManager().removeItemAtURL(soundFile1) 
    } 
    catch _ { 
    } 
    let exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A) 
    exporter!.outputURL = soundFile1 
    exporter!.outputFileType = AVFileTypeAppleM4A 
    let duration = CMTimeGetSeconds(avAsset1.duration) 
    print(duration) 
    if (duration < 5.0) { 
     print("sound is not long enough") 
     return 
    } 
    // e.g. the first 30 seconds 
    let startTime = CMTimeMake(0, 1) 
    let stopTime = CMTimeMake(seconds,1) 
    let exportTimeRange = CMTimeRangeFromTimeToTime(startTime, stopTime) 
    print(exportTimeRange) 
    exporter!.timeRange = exportTimeRange 
    print(exporter!.timeRange) 


    exporter!.exportAsynchronouslyWithCompletionHandler 
     {() -> Void in 
      print(" OUtput path is \n \(requiredOutputPath)") 
      print("export complete: \(CFAbsoluteTimeGetCurrent() - currentTime)") 
      var url:NSURL? 
      if self.audioRecorder != nil 
      { 
       url = self.audioRecorder!.url 
      } 

      else 
      { 
       url = self.soundFile1! 
       print(url) 

      } 

      print("playing \(url)") 

    do 
    { 
     print(self.soundFile1) 
     print(" OUtput path is \n \(requiredOutputPath)") 
     self.setSessionPlayback() 
     do { 
             self.optData = try NSData(contentsOfURL: self.soundFile1!, options: NSDataReadingOptions.DataReadingMappedIfSafe) 
             print(self.optData) 
             self.recordencryption = self.optData.base64EncodedStringWithOptions(NSDataBase64EncodingOptions()) 

             // print(self.recordencryption) 
              self.myImageUploadRequest() 


            } 



     self.wasteplayer = try AVAudioPlayer(contentsOfURL: self.soundFile1) 
     self.wasteplayer.numberOfLoops = 0 
     self.wasteplayer.play() 


    } 

    catch _ 
    { 
    } 

    } 
} 
Смежные вопросы