2013-07-16 4 views
2

Я пытаюсь выполнить потоковое вещание с помощью Служб Audio Unit и Audio Converter Service, но я немного застрял. Моя цель - захватить вход микрофона (PCM), преобразовать его на лету в AAC и отправить пакет по сети. Это, кажется, работает, но звук ужасен и аварии приложение после 4sВ реальном времени кодирование AAC с аудиоустройствами

// инициализации Аудиоблок

OSStatus status; 
    AudioComponentDescription desc; 
    desc.componentType = kAudioUnitType_Output; 
    desc.componentSubType = kAudioUnitSubType_RemoteIO; 
    desc.componentFlags = 0; 
    desc.componentFlagsMask = 0; 
    desc.componentManufacturer = kAudioUnitManufacturer_Apple; 

    AudioComponent component = AudioComponentFindNext(NULL, &desc); 
    status = AudioComponentInstanceNew(component, &_audioUnit); 
    NSLog(@"status instance new: %lu",status); 

    UInt32 flag = 1; 
    status = AudioUnitSetProperty(_audioUnit, 
            kAudioOutputUnitProperty_EnableIO, 
            kAudioUnitScope_Input, 
            1, 
            &flag, 
            sizeof(flag)); 
    NSLog(@"status AudioUnitSetProperty input: %lu",status); 

    AudioStreamBasicDescription audioFormat; 
    memset(&audioFormat, 0, sizeof(AudioStreamBasicDescription)); 
    audioFormat.mSampleRate   = 44100; 
    audioFormat.mFormatID   = kAudioFormatLinearPCM; 
    audioFormat.mFormatFlags  = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked; 
    audioFormat.mFramesPerPacket = 1; 
    audioFormat.mChannelsPerFrame = 1; 
    audioFormat.mBitsPerChannel  = 16; 
    audioFormat.mBytesPerPacket  = 
    audioFormat.mBytesPerFrame  = audioFormat.mChannelsPerFrame * sizeof(SInt16); 

    status = AudioUnitSetProperty(_audioUnit, 
            kAudioUnitProperty_StreamFormat, 
            kAudioUnitScope_Input, 
            0, 
            &audioFormat, 
            sizeof(audioFormat)); 
    NSLog(@"status AudioUnitSetProperty audioFormat: %lu",status); 

    AURenderCallbackStruct renderCallbackInfo; 
    renderCallbackInfo.inputProc  = recordingCallback; 
    renderCallbackInfo.inputProcRefCon = NULL; 
    AudioUnitSetProperty(_audioUnit, 
         kAudioOutputUnitProperty_SetInputCallback, 
         kAudioUnitScope_Global, 
         1, 
         &renderCallbackInfo, 
         sizeof(renderCallbackInfo)); 

    float aBufferLength = 0.005; // In seconds 
    AudioSessionSetProperty(kAudioSessionProperty_PreferredHardwareIOBufferDuration, 
          sizeof(aBufferLength), &aBufferLength); 

    _converter = [[Converter alloc] initWithFormat]; 
    status = AudioUnitInitialize(_audioUnit); 
    NSLog(@"status AudioUnit initialize: %lu",status); 

// запись обратного вызова:

static OSStatus recordingCallback(void *inRefCon, 
            AudioUnitRenderActionFlags *ioActionFlags, 
            const AudioTimeStamp *inTimeStamp, 
            UInt32 inBusNumber, 
            UInt32 inNumberFrames, 
            AudioBufferList *ioData) { 

    AudioBufferList bufferList; 
    bufferList.mNumberBuffers = 1; 
    bufferList.mBuffers[0].mNumberChannels = 1; 
    bufferList.mBuffers[0].mData = NULL; 
    bufferList.mBuffers[0].mDataByteSize = inNumberFrames * sizeof(SInt16); 
    OSStatus status = AudioUnitRender(_audioUnit, ioActionFlags, inTimeStamp,1, inNumberFrames, &bufferList); 
    AudioBuffer aac; 

    pcm.mData = malloc(inNumberFrames * sizeof(SInt16)); 
    unsigned char * p = malloc(inNumberFrames * sizeof(SInt16)); 
    memcpy(p, bufferList.mBuffers[0].mData, inNumberFrames * sizeof(SInt16)); 
    memcpy(pcm.mData,p,bufferList.mBuffers[0].mDataByteSize); // dunno why i can't memcpy bufferlist data directly to pcm struct 
    pcm.mDataByteSize = inNumberFrames * sizeof(SInt16); 
    pcm.mNumberChannels = 1; 
    int osstatus = [_converter convertAudioBuffer:&pcm EncodedAudioBuffer:&aac]; 

    // send to the network 
    NSData* data = [[NSData alloc]initWithBytes:aac.mData length:aac.mDataByteSize]; 
    dispatch_async(myQueue, ^{_pts+=1024;[sender sendBuf2:data withTime:_pts];}); 
    return noErr; 
} 

// конвертер сторона:

-(int) convertAudioBuffer:(AudioBuffer*)inSamples EncodedAudioBuffer:(AudioBuffer*) outData{ 
    memset(_buffer, 0, _converterSettings.maxPacketSize); 

    _converterSettings.buffer = *inSamples; 
    //_converterSettings.bytesToEncode = inSamples->mDataByteSize; 

    UInt32 ioOutputDataPackets = 1; 
    AudioStreamPacketDescription outPacketDesc[1]; 


    AudioBufferList convertedData; 
    convertedData.mNumberBuffers = 1; 
    convertedData.mBuffers[0].mNumberChannels = 1; 
    convertedData.mBuffers[0].mDataByteSize = _converterSettings.maxPacketSize; 
    convertedData.mBuffers[0].mData = _buffer; 

    OSStatus error = AudioConverterFillComplexBuffer(_audioConverter, 
                MyAudioConverterCallback, 
                &_converterSettings, 
                &ioOutputDataPackets, 
                &convertedData, 
                outPacketDesc); 
    if (error != noErr) 
    { 
     NSError *err = [NSError errorWithDomain:NSOSStatusErrorDomain code:error userInfo:nil]; 
     NSLog(@"%ld",convertedData.mBuffers[0].mDataByteSize); 
    } 
    NSLog(@"%ld",convertedData.mBuffers[0].mDataByteSize); 
    /* Set the ouput data */ 
    outData->mNumberChannels = convertedData.mBuffers[0].mNumberChannels; 
    outData->mDataByteSize  = convertedData.mBuffers[0].mDataByteSize; 
    outData->mData    = convertedData.mBuffers[0].mData; 
    return 0; 
} 

прок обратного вызова:

OSStatus MyAudioConverterCallback(AudioConverterRef inAudioConverter, 
            UInt32 *ioDataPacketCount, 
            AudioBufferList *ioData, 
            AudioStreamPacketDescription **outDataPacketDescription, 
            void *inUserData) 
{ 
    if (outDataPacketDescription) 
    { 
     *outDataPacketDescription = NULL; 
    } 
    Settings *audioConverterSettings = (Settings *)inUserData; 
    ioData->mBuffers[0].mData = audioConverterSettings->buffer.mData; 
    ioData->mBuffers[0].mDataByteSize = audioConverterSettings->buffer.mDataByteSize; 
    ioData->mBuffers[0].mNumberChannels = audioConverterSettings->buffer.mNumberChannels; 

    return noErr; 
} 

и, наконец, выход:

2013-07-16 16:58:57.192 AudioUnitAAC[84656:c07] status instance new: 0 
2013-07-16 16:58:57.195 AudioUnitAAC[84656:c07] status AudioUnitSetProperty input: 0 
2013-07-16 16:58:57.197 AudioUnitAAC[84656:c07] status AudioUnitSetProperty audioFormat: 0 
2013-07-16 16:58:57.235 AudioUnitAAC[84656:c07] status AudioUnit initialize: 0 
2013-07-16 16:58:58.182 AudioUnitAAC[84656:c07] start : 0 
2013-07-16 16:58:58.200 AudioUnitAAC[84656:6e07] 4 bytes encoded 
2013-07-16 16:58:58.211 AudioUnitAAC[84656:6e07] 152 bytes encoded 
2013-07-16 16:58:58.223 AudioUnitAAC[84656:6e07] 169 bytes encoded 
2013-07-16 16:58:58.235 AudioUnitAAC[84656:6e07] 157 bytes encoded 
2013-07-16 16:58:58.246 AudioUnitAAC[84656:6e07] 160 bytes encoded 
2013-07-16 16:58:58.258 AudioUnitAAC[84656:6e07] 164 bytes encoded 
.... 

, пока она не вылетает по какой-то причине случайного (AudioConverterFillComplexBuffer плохого доступа, NSLog (@ "% л.д.", convertedData.mBuffers [0] .mDataByteSize) плохо инструкции .. .)

Я новый с яблоком аудио ядра и любую помощь была бы оценена :)

+0

ли вы понять это? У меня такая же проблема на iOS 7.1 –

+0

nop, мне пришлось вернуться с очередями аудио – HaneTV

ответ

0

Благодаря этому посту я получил его на работу! Я Изменено:

pcm.mData = malloc(inNumberFrames * sizeof(SInt16)); 
unsigned char * p = malloc(inNumberFrames * sizeof(SInt16)); 
memcpy(p, bufferList.mBuffers[0].mData, inNumberFrames * sizeof(SInt16)); 
memcpy(pcm.mData,p,bufferList.mBuffers[0].mDataByteSize); // dunno why i can't memcpy bufferlist data directly to pcm struct 
pcm.mDataByteSize = inNumberFrames * sizeof(SInt16); 
pcm.mNumberChannels = 1; 

к:

memset(&audioBuffer, 0, sizeof(AudioBufferList)); 
memset(&audioBuffer.mBuffers[0], 0, sizeof(AudioBuffer)); 
audioBuffer.mNumberBuffers = 1; 
audioBuffer.mBuffers[0].mNumberChannels = bufferList->mBuffers[0].mNumberChannels; 
audioBuffer.mBuffers[0].mDataByteSize = bufferList->mBuffers[0].mDataByteSize; 
audioBuffer.mBuffers[0].mData = bufferList->mBuffers[0].mData; 

И в обратный вызов я делаю это:

ioData->mBuffers[0].mData = audioBuffer.mBuffers[0].mData; 
ioData->mBuffers[0].mDataByteSize = audioBuffer.mBuffers[0].mDataByteSize; 
ioData->mBuffers[0].mNumberChannels = audioBuffer.mBuffers[0].mNumberChannels; 
Смежные вопросы