久久久久久久av_日韩在线中文_看一级毛片视频_日本精品二区_成人深夜福利视频_武道仙尊动漫在线观看

誰(shuí)能幫我通過(guò)音頻單元錄制 iPhone 輸出的聲音

Can anybody help me in recording iPhone output sound through Audio Unit(誰(shuí)能幫我通過(guò)音頻單元錄制 iPhone 輸出的聲音)
本文介紹了誰(shuí)能幫我通過(guò)音頻單元錄制 iPhone 輸出的聲音的處理方法,對(duì)大家解決問(wèn)題具有一定的參考價(jià)值,需要的朋友們下面隨著小編來(lái)一起學(xué)習(xí)吧!

問(wèn)題描述

限時(shí)送ChatGPT賬號(hào)..

這是我的代碼:我使用此代碼通過(guò) Audio Unit 錄制 iPhone 輸出音頻然后將輸出保存在 output.caf 但 output.caf 文件為空任何人都知道我該怎么做?輸出音頻文件為空

這是初始化音頻單元

-(void) 初始化OutputUnit{OSStatus 狀態(tài);//描述音頻組件AudioComponentDescription 描述;desc.componentType = kAudioUnitType_Output;desc.componentSubType = kAudioUnitSubType_RemoteIO;desc.componentFlags = 0;desc.componentFlagsMask = 0;desc.componentManufacturer = kAudioUnitManufacturer_Apple;//獲取組件AudioComponent inputComponent = AudioComponentFindNext(NULL, &desc);//獲取音頻單元狀態(tài) = AudioComponentInstanceNew(inputComponent, &audioUnit);//啟用 IO 進(jìn)行錄制UInt32 標(biāo)志 = 1;狀態(tài) = AudioUnitSetProperty(audioUnit,kAudioOutputUnitProperty_EnableIO,kAudioUnitScope_Input,kInputBus,&國(guó)旗,大小(標(biāo)志));//啟用 IO 進(jìn)行播放狀態(tài) = AudioUnitSetProperty(audioUnit,kAudioOutputUnitProperty_EnableIO,kAudioUnitScope_Output,k輸出總線,&國(guó)旗,大小(標(biāo)志));//描述格式AudioStreamBasicDescription audioFormat={0};音頻格式.mSampleRate = 44100.00;audioFormat.mFormatID = kAudioFormatLinearPCM;audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger |kAudioFormatFlagIsPacked;audioFormat.mFramesPerPacket = 1;audioFormat.mChannelsPerFrame = 1;audioFormat.mBitsPerChannel = 16;audioFormat.mBytesPerPacket = 2;audioFormat.mBytesPerFrame = 2;//應(yīng)用格式狀態(tài) = AudioUnitSetProperty(audioUnit,kAudioUnitProperty_StreamFormat,kAudioUnitScope_Output,kInputBus,&音頻格式,大小(音頻格式));狀態(tài) = AudioUnitSetProperty(audioUnit,kAudioUnitProperty_StreamFormat,kAudioUnitScope_Input,k輸出總線,&音頻格式,大小(音頻格式));//設(shè)置輸入回調(diào)AURenderCallbackStruct 回調(diào)結(jié)構(gòu);callbackStruct.inputProc = 錄音回調(diào);callbackStruct.inputProcRefCon = self;狀態(tài) = AudioUnitSetProperty(audioUnit,kAudioOutputUnitProperty_SetInputCallback,kAudioUnitScope_Global,kInputBus,&回調(diào)結(jié)構(gòu),sizeof(callbackStruct));//設(shè)置輸出回調(diào)callbackStruct.inputProc = 播放回調(diào);callbackStruct.inputProcRefCon = self;狀態(tài) = AudioUnitSetProperty(audioUnit,kAudioUnitProperty_SetRenderCallback,kAudioUnitScope_Global,k輸出總線,&回調(diào)結(jié)構(gòu),sizeof(callbackStruct));//禁用記錄器的緩沖區(qū)分配(可選 - 如果我們想傳入我們自己的,請(qǐng)執(zhí)行此操作)標(biāo)志 = 0;狀態(tài) = AudioUnitSetProperty(audioUnit,kAudioUnitProperty_ShouldAllocateBuffer,kAudioUnitScope_Output,kInputBus,&國(guó)旗,大小(標(biāo)志));音頻單元初始化(音頻單元);音頻輸出單元開(kāi)始(音頻單元);//初始化 le fichier 音頻NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);NSString *documentsDirectory = [paths objectAtIndex:0];NSString *destinationFilePath = [[[NSString alloc] initWithFormat: @"%@/output.caf", documentsDirectory] ??autorelease];NSLog(@">>>%@",destinationFilePath);CFURLRef 目的地URL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, (CFStringRef)destinationFilePath, kCFURLPOSIXPathStyle, false);OSStatus setupErr = ExtAudioFileCreateWithURL(destinationURL, kAudioFileWAVEType, &audioFormat, NULL, kAudioFileFlags_EraseFile, &effectState.audioFileRef);CFRelease(destinationURL);NSAssert(setupErr == noErr, @"無(wú)法創(chuàng)建寫入文件");setupErr = ExtAudioFileSetProperty(effectState.audioFileRef, kExtAudioFileProperty_ClientDataFormat, sizeof(AudioStreamBasicDescription), &audioFormat);NSAssert(setupErr == noErr, @"無(wú)法為格式創(chuàng)建文件");setupErr = ExtAudioFileWriteAsync(effectState.audioFileRef, 0, NULL);NSAssert(setupErr == noErr, @"無(wú)法初始化音頻文件的寫入緩沖區(qū)");}

錄音回調(diào)

靜態(tài) OSStatus recordingCallback (void * inRefCon,AudioUnitRenderActionFlags * ioActionFlags,const AudioTimeStamp * inTimeStamp,UInt32 inBusNumber,UInt32 inNumberFrames,音頻緩沖區(qū)列表 * ioData) {NSLog(@"回調(diào)");if (*ioActionFlags == kAudioUnitRenderAction_PostRender&&inBusNumber==0){AudioBufferList *bufferList;//<- 用緩沖區(qū)填充它(你會(huì)想要 malloc 它,因?yàn)樗且粋€(gè)動(dòng)態(tài)長(zhǎng)度列表)EffectState *effectState = (EffectState *)inRefCon;AudioUnit rioUnit =[(MixerHostAudio*)inRefCon getAudioUnit];OSStatus 狀態(tài);NSLog(@"de5eal el 回調(diào)");//下面我得到了錯(cuò)誤狀態(tài) = AudioUnitRender(rioUnit,ioActionFlags,在時(shí)間戳,inBusNumber,inNumberFrames,緩沖區(qū)列表);if (noErr != status) { NSLog(@"AudioUnitRender error");返回?zé)o錯(cuò)誤;}//現(xiàn)在,我們剛剛讀取的樣本位于 bufferList 的緩沖區(qū)中ExtAudioFileWriteAsync(effectState->audioFileRef, inNumberFrames, bufferList);}返回?zé)o錯(cuò)誤;}//然后停止錄制-(無(wú)效)停止記錄{音頻輸出單元停止(音頻單元);AudioUnitUninitialize(audioUnit);}

解決方案

initializaeOutputUnit 你只創(chuàng)建了你的音頻文件:

OSStatus setupErr = ExtAudioFileCreateWithURL(destinationURL, kAudioFileWAVEType, &audioFormat, NULL, kAudioFileFlags_EraseFile, &effectState.audioFileRef);

通過(guò)傳遞 0(幀)和 NULL(音頻緩沖區(qū))僅用于初始化內(nèi)部緩沖區(qū):

setupErr = ExtAudioFileWriteAsync(effectState.audioFileRef, 0, NULL);

這就是 recordingCallback 的問(wèn)題所在:

1) ioActionFlags 始終為 0,inBusNumber 始終為 1,因?yàn)檫@就是您設(shè)置回調(diào)的方式 (kInputBus = 1):

if (*ioActionFlags == kAudioUnitRenderAction_PostRender&&inBusNumber==0)

所以只需刪除 if 語(yǔ)句.

2) 從 AudioUnitRender 您將收到 -50 錯(cuò)誤,它在 CoreAudioTypes.h 中定義為 kAudio_ParamError 錯(cuò)誤.發(fā)生這種情況的原因是 bufferList 未定義且為 NULL!

<塊引用>

 OSStatus 狀態(tài);狀態(tài) = AudioUnitRender(這個(gè)->mAudioUnit,ioActionFlags,在時(shí)間戳,kInputBus,inNumberFrames,&緩沖區(qū)列表);if (noErr != status) {printf("AudioUnitRender 錯(cuò)誤:%ld", status);返回?zé)o錯(cuò)誤;}

您只需要定義一個(gè)有效的 AudioBuffer 并將其傳遞給 AudioUnitRender,這是我的工作 RenderCallback:

<塊引用>

 靜態(tài) OSStatus recordingCallback (void * inRefCon,AudioUnitRenderActionFlags * ioActionFlags,const AudioTimeStamp * inTimeStamp,UInt32 inBusNumber,UInt32 inNumberFrames,音頻緩沖區(qū)列表 * ioData) {雙倍 timeInSeconds = inTimeStamp->mSampleTime/kSampleRate;printf("
%fs inBusNumber: %lu inNumberFrames: %lu ", timeInSeconds, inBusNumber, inNumberFrames);//printAudioUnitRenderActionFlags(ioActionFlags);AudioBufferList 緩沖區(qū)列表;SInt16 個(gè)樣本[inNumberFrames];//足夠大的大小,不必?fù)?dān)心緩沖區(qū)溢出memset (&samples, 0, sizeof (samples));bufferList.mNumberBuffers = 1;bufferList.mBuffers[0].mData = 樣本;bufferList.mBuffers[0].mNumberChannels = 1;bufferList.mBuffers[0].mDataByteSize = inNumberFrames*sizeof(SInt16);ViewController* THIS = THIS = (__bridge ViewController *)inRefCon;OSStatus 狀態(tài);狀態(tài) = AudioUnitRender(這個(gè)->mAudioUnit,ioActionFlags,在時(shí)間戳,kInputBus,inNumberFrames,&緩沖區(qū)列表);if (noErr != status) {printf("AudioUnitRender 錯(cuò)誤:%ld", status);返回?zé)o錯(cuò)誤;}//現(xiàn)在,我們剛剛讀取的樣本位于 bufferList 的緩沖區(qū)中ExtAudioFileWriteAsync(THIS->mAudioFileRef, inNumberFrames, &bufferList);返回?zé)o錯(cuò)誤;}

stopRecord 中,您應(yīng)該使用 ExtAudioFileDispose 關(guān)閉音頻文件:

<塊引用>

 - (void)stopRecording:(NSTimer*)theTimer {printf("
停止錄制
");AudioOutputUnitStop(mAudioUnit);AudioUnitUninitialize(mAudioUnit);OSStatus 狀態(tài) = ExtAudioFileDispose(mAudioFileRef);printf("OSStatus(ExtAudioFileDispose): %ld
", status);}

完整源代碼:http://pastebin.com/92Fyjaye

this is my code : i use this code to record the iPhone output audio by using Audio Unit then saving the output in output.caf but the output.caf file is empty any body have idea about what shall i do ? the output audio file is empty

this is intializing the audio unit

-(void) initializaeOutputUnit
{
    OSStatus status;

    // Describe audio component
    AudioComponentDescription desc;
    desc.componentType = kAudioUnitType_Output;
    desc.componentSubType = kAudioUnitSubType_RemoteIO;
    desc.componentFlags = 0;
    desc.componentFlagsMask = 0;
    desc.componentManufacturer = kAudioUnitManufacturer_Apple;

    // Get component
    AudioComponent inputComponent = AudioComponentFindNext(NULL, &desc);

    // Get audio units
    status = AudioComponentInstanceNew(inputComponent, &audioUnit);

    // Enable IO for recording
    UInt32 flag = 1;
    status = AudioUnitSetProperty(audioUnit, 
                                  kAudioOutputUnitProperty_EnableIO, 
                                  kAudioUnitScope_Input, 
                                  kInputBus,
                                  &flag, 
                                  sizeof(flag));

    // Enable IO for playback
    status = AudioUnitSetProperty(audioUnit, 
                                  kAudioOutputUnitProperty_EnableIO, 
                                  kAudioUnitScope_Output, 
                                  kOutputBus,
                                  &flag, 
                                  sizeof(flag));

    // Describe format
    AudioStreamBasicDescription audioFormat={0};
    audioFormat.mSampleRate         = 44100.00;
    audioFormat.mFormatID           = kAudioFormatLinearPCM;
    audioFormat.mFormatFlags        = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
    audioFormat.mFramesPerPacket    = 1;
    audioFormat.mChannelsPerFrame   = 1;
    audioFormat.mBitsPerChannel     = 16;
    audioFormat.mBytesPerPacket     = 2;
    audioFormat.mBytesPerFrame      = 2;

    // Apply format
    status = AudioUnitSetProperty(audioUnit, 
                                  kAudioUnitProperty_StreamFormat, 
                                  kAudioUnitScope_Output, 
                                  kInputBus, 
                                  &audioFormat, 
                                  sizeof(audioFormat));
    status = AudioUnitSetProperty(audioUnit, 
                                  kAudioUnitProperty_StreamFormat, 
                                  kAudioUnitScope_Input, 
                                  kOutputBus, 
                                  &audioFormat, 
                                  sizeof(audioFormat));


    // Set input callback
    AURenderCallbackStruct callbackStruct;
    callbackStruct.inputProc = recordingCallback;
    callbackStruct.inputProcRefCon = self;
    status = AudioUnitSetProperty(audioUnit, 
                                  kAudioOutputUnitProperty_SetInputCallback, 
                                  kAudioUnitScope_Global, 
                                  kInputBus, 
                                  &callbackStruct, 
                                  sizeof(callbackStruct));

    // Set output callback
    callbackStruct.inputProc = playbackCallback;
    callbackStruct.inputProcRefCon = self;
    status = AudioUnitSetProperty(audioUnit, 
                                  kAudioUnitProperty_SetRenderCallback, 
                                  kAudioUnitScope_Global, 
                                  kOutputBus,
                                  &callbackStruct, 
                                  sizeof(callbackStruct));

    // Disable buffer allocation for the recorder (optional - do this if we want to pass in our own)
    flag = 0;
    status = AudioUnitSetProperty(audioUnit, 
                                  kAudioUnitProperty_ShouldAllocateBuffer,
                                  kAudioUnitScope_Output, 
                                  kInputBus,
                                  &flag, 
                                  sizeof(flag));


    AudioUnitInitialize(audioUnit);
    AudioOutputUnitStart(audioUnit);


    // On initialise le fichier audio
    NSArray  *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *documentsDirectory = [paths objectAtIndex:0];
    NSString *destinationFilePath = [[[NSString alloc] initWithFormat: @"%@/output.caf", documentsDirectory] autorelease];
    NSLog(@">>> %@", destinationFilePath);
    CFURLRef destinationURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, (CFStringRef)destinationFilePath, kCFURLPOSIXPathStyle, false);

    OSStatus setupErr = ExtAudioFileCreateWithURL(destinationURL, kAudioFileWAVEType, &audioFormat, NULL, kAudioFileFlags_EraseFile, &effectState.audioFileRef);  
    CFRelease(destinationURL);
    NSAssert(setupErr == noErr, @"Couldn't create file for writing");

    setupErr = ExtAudioFileSetProperty(effectState.audioFileRef, kExtAudioFileProperty_ClientDataFormat, sizeof(AudioStreamBasicDescription), &audioFormat);
    NSAssert(setupErr == noErr, @"Couldn't create file for format");

    setupErr =  ExtAudioFileWriteAsync(effectState.audioFileRef, 0, NULL);
    NSAssert(setupErr == noErr, @"Couldn't initialize write buffers for audio file");

   }

the recording call back

static OSStatus recordingCallback       (void *                         inRefCon,
                                         AudioUnitRenderActionFlags *      ioActionFlags,
                                         const AudioTimeStamp *            inTimeStamp,
                                         UInt32                            inBusNumber,
                                         UInt32                            inNumberFrames,
                                         AudioBufferList *                 ioData) {
    NSLog(@"callback");
   if (*ioActionFlags == kAudioUnitRenderAction_PostRender&&inBusNumber==0) 
   {
        AudioBufferList *bufferList; // <- Fill this up with buffers (you will want to malloc it, as it's a dynamic-length list)

        EffectState *effectState = (EffectState *)inRefCon;
       AudioUnit rioUnit =[(MixerHostAudio*)inRefCon getAudioUnit];

        OSStatus status;
        NSLog(@"de5eal el call back ");
        // BELOW I GET THE ERROR
        status = AudioUnitRender( rioUnit,     
                                 ioActionFlags, 
                                 inTimeStamp, 
                                 inBusNumber, 
                                 inNumberFrames, 
                                 bufferList);

        if (noErr != status) { NSLog(@"AudioUnitRender error"); return noErr;}

        // Now, we have the samples we just read sitting in buffers in bufferList
        ExtAudioFileWriteAsync(effectState->audioFileRef, inNumberFrames, bufferList);

    }
    return noErr;     
}




// then stop Recording 
- (void) stopRecord
{

    AudioOutputUnitStop(audioUnit);
    AudioUnitUninitialize(audioUnit);
}

解決方案

In initializaeOutputUnit you only created your audio file:

OSStatus setupErr = ExtAudioFileCreateWithURL(destinationURL, kAudioFileWAVEType, &audioFormat, NULL, kAudioFileFlags_EraseFile, &effectState.audioFileRef);

by passing 0 (frames) and NULL (audiobuffer) is just for init internal buffers:

setupErr =  ExtAudioFileWriteAsync(effectState.audioFileRef, 0, NULL);

That's what's going wrong in recordingCallback:

1) ioActionFlags are always 0 and inBusNumber are always 1, because thats how you setup your callback (kInputBus = 1):

if (*ioActionFlags == kAudioUnitRenderAction_PostRender&&inBusNumber==0)

so just remove the if statement.

2) From AudioUnitRender you will receive -50 error, which is defined in CoreAudioTypes.h as an kAudio_ParamError error. This happens by bufferList is not defined and NULL!

 OSStatus status; 
 status = AudioUnitRender(THIS->mAudioUnit,     
                          ioActionFlags, 
                          inTimeStamp, 
                          kInputBus, 
                          inNumberFrames, 
                          &bufferList);

 if (noErr != status) {
      printf("AudioUnitRender error: %ld", status);   
      return noErr; 
 }

You just need to define an valid AudioBuffer and pass it to AudioUnitRender, this is my working RenderCallback:

  static OSStatus recordingCallback       (void *                      inRefCon,
                                           AudioUnitRenderActionFlags *      ioActionFlags,
                                           const AudioTimeStamp *            inTimeStamp,
                                           UInt32                            inBusNumber,
                                           UInt32                            inNumberFrames,
                                           AudioBufferList *                 ioData)  {
      double timeInSeconds = inTimeStamp->mSampleTime / kSampleRate;
      printf("
%fs inBusNumber: %lu inNumberFrames: %lu ", timeInSeconds, inBusNumber, inNumberFrames);
      //printAudioUnitRenderActionFlags(ioActionFlags);

      AudioBufferList bufferList;

      SInt16 samples[inNumberFrames]; // A large enough size to not have to worry about buffer overrun
      memset (&samples, 0, sizeof (samples));

      bufferList.mNumberBuffers = 1;
      bufferList.mBuffers[0].mData = samples;
      bufferList.mBuffers[0].mNumberChannels = 1;
      bufferList.mBuffers[0].mDataByteSize = inNumberFrames*sizeof(SInt16);

      ViewController* THIS = THIS = (__bridge ViewController *)inRefCon;

      OSStatus status;
      status = AudioUnitRender(THIS->mAudioUnit,     
                               ioActionFlags, 
                               inTimeStamp, 
                               kInputBus, 
                               inNumberFrames, 
                               &bufferList);

      if (noErr != status) {

          printf("AudioUnitRender error: %ld", status); 
          return noErr;
      }

      // Now, we have the samples we just read sitting in buffers in bufferList
      ExtAudioFileWriteAsync(THIS->mAudioFileRef, inNumberFrames, &bufferList);

      return noErr;      
 }

In stopRecord you should close the audio file with ExtAudioFileDispose:

  - (void)stopRecording:(NSTimer*)theTimer {
      printf("
stopRecording
");
      AudioOutputUnitStop(mAudioUnit);
      AudioUnitUninitialize(mAudioUnit);

      OSStatus status = ExtAudioFileDispose(mAudioFileRef);
      printf("OSStatus(ExtAudioFileDispose): %ld
", status); 
 }

Full source code: http://pastebin.com/92Fyjaye

這篇關(guān)于誰(shuí)能幫我通過(guò)音頻單元錄制 iPhone 輸出的聲音的文章就介紹到這了,希望我們推薦的答案對(duì)大家有所幫助,也希望大家多多支持html5模板網(wǎng)!

【網(wǎng)站聲明】本站部分內(nèi)容來(lái)源于互聯(lián)網(wǎng),旨在幫助大家更快的解決問(wèn)題,如果有圖片或者內(nèi)容侵犯了您的權(quán)益,請(qǐng)聯(lián)系我們刪除處理,感謝您的支持!

相關(guān)文檔推薦

Can#39;t change target membership visibility in Xcode 4.5(無(wú)法更改 Xcode 4.5 中的目標(biāo)成員身份可見(jiàn)性)
UITableView: Handle cell selection in a mixed cell table view static and dynamic cells(UITableView:在混合單元格表視圖靜態(tài)和動(dòng)態(tài)單元格中處理單元格選擇)
How to remove Address Bar in Safari in iOS?(如何在 iOS 中刪除 Safari 中的地址欄?)
iOS 5 SDK is gone after upgrade to Xcode 4.5(升級(jí)到 Xcode 4.5 后,iOS 5 SDK 消失了)
Having trouble creating UIImage from CIImage in iOS5(在 iOS5 中從 CIImage 創(chuàng)建 UIImage 時(shí)遇到問(wèn)題)
Open target=quot;_blankquot; links outside of UIWebView in Safari(打開(kāi)目標(biāo)=“_blank;Safari 中 UIWebView 之外的鏈接)
主站蜘蛛池模板: 中文字字幕一区二区三区四区五区 | gav成人免费播放视频 | 中文字幕第一页在线 | 欧美一区二区三区在线 | 日韩在线视频一区二区三区 | 日韩在线视频播放 | 一区二区三区视频在线观看 | 精品伦精品一区二区三区视频 | 日屁视频 | 国产视频久久久 | 日韩国产中文字幕 | 日韩日韩日韩日韩日韩日韩日韩 | 亚洲精品久久久久久一区二区 | 99久久久无码国产精品 | 青青草av | 欧美a在线看 | 在线观看深夜视频 | 中文字幕四虎 | 日韩三级在线观看 | 高清一区二区 | 国产精品成人一区二区三区 | 一二三区av | 中文字幕欧美一区二区 | 久久人爽爽人爽爽 | 欧美日韩福利 | 日韩欧美中文字幕在线观看 | 91欧美| 国产分类视频 | 成年免费大片黄在线观看一级 | 亚洲免费观看视频网站 | 91成人 | 久久久久久影院 | 国产乱码精品一区二区三区中文 | 99reav| 欧美美女爱爱视频 | 欧美日产国产成人免费图片 | 一区二区影视 | 欧美人妇做爰xxxⅹ性高电影 | 免费av手机在线观看 | 在线播放91 | 国产精品s色 |