iOS 音频采集 AudioBufferList转CMSampleBufferRef
2016-03-12 13:57
369 查看
@property (nonatomic,
assign) AudioComponent component;
@property (nonatomic,
assign) AudioComponentInstance componetInstance;
static OSStatus handleInputBuffer(void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
const
AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList *ioData) {
@autoreleasepool {
<CLassName> *ref = (__bridge
<ClassName> *)inRefCon;
AudioStreamBasicDescription asbd = [ref
asbd];
CMSampleBufferRef buff =
NULL;
CMFormatDescriptionRef format =
NULL;
OSStatus status =
CMAudioFormatDescriptionCreate(kCFAllocatorDefault, &asbd,
0, NULL,
0, NULL, NULL, &format);
if (status) {
return status;
}
CMSampleTimingInfo timing = {
CMTimeMake(1,
48000),
kCMTimeZero, kCMTimeInvalid };
status = CMSampleBufferCreate(kCFAllocatorDefault,
NULL, false,
NULL, NULL, format, (CMItemCount)inNumberFrames,
1, &timing, 0,
NULL, &buff);
if (status) {
//失败
return status;
}
AudioBuffer buffer;
buffer.mData =
NULL;
buffer.mDataByteSize =
0;
buffer.mNumberChannels =
2;
AudioBufferList buffers;
buffers.mNumberBuffers =
1;
buffers.mBuffers[0] = buffer;
status = AudioUnitRender(ref.componetInstance,
ioActionFlags,
inTimeStamp,
inBusNumber,
inNumberFrames,
&buffers);
if (status) {
return status;
}
status = CMSampleBufferSetDataBufferFromAudioBufferList(buff,
kCFAllocatorDefault,
kCFAllocatorDefault,
0, &buffers);
if (!status) {
}
return status;
}
}
- (AudioStreamBasicDescription)asbd
{
AudioStreamBasicDescription desc = {0};
desc.mSampleRate =
48000;
desc.mFormatID =
kAudioFormatLinearPCM;
desc.mFormatFlags =
kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
desc.mChannelsPerFrame =
2;
desc.mFramesPerPacket =
1;
desc.mBitsPerChannel =
16;
desc.mBytesPerFrame = desc.mBitsPerChannel /
8 * desc.mChannelsPerFrame;
desc.mBytesPerPacket = desc.mBytesPerFrame * desc.mFramesPerPacket;
return desc;
}
- (void)initMicrophoneSource
{
AVAuthorizationStatus status = [AVCaptureDevice
authorizationStatusForMediaType:AVMediaTypeAudio];
if(status ==
AVAuthorizationStatusAuthorized){
AVAudioSession *session = [AVAudioSession
sharedInstance];
NSError *error =
nil;
[session setCategory:AVAudioSessionCategoryPlayAndRecord
withOptions:AVAudioSessionCategoryOptionDefaultToSpeaker |
AVAudioSessionCategoryOptionMixWithOthers |
AVAudioSessionCategoryOptionAllowBluetooth error:nil];
if (![session
setActive:YES
error:&error]) {
NSString *log =
@"Failed to set audio session active.";
NSLog(@"%@", log);
return ;
}
AudioComponentDescription acd;
acd.componentType =
kAudioUnitType_Output;
acd.componentSubType =
kAudioUnitSubType_RemoteIO;
acd.componentManufacturer =
kAudioUnitManufacturer_Apple;
acd.componentFlags =
0;
acd.componentFlagsMask =
0;
self.component =
AudioComponentFindNext(NULL, &acd);
OSStatus status =
noErr;
status = AudioComponentInstanceNew(self.component,
&_componetInstance);
if (noErr != status) {
NSString *log =
@"Failed to new a audio component instance.";
NSLog(@"%@", log);
return ;
}
UInt32 flagOne =
1;
AudioUnitSetProperty(self.componetInstance,
kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input,
1, &flagOne,
sizeof(flagOne));
AudioStreamBasicDescription desc = [self
asbd];
AudioUnitSetProperty(self.componetInstance,
kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output,
1, &desc,
sizeof(desc));
AURenderCallbackStruct cb;
cb.inputProcRefCon = (__bridge
void *)(self);
cb.inputProc =
handleInputBuffer;
AudioUnitSetProperty(self.componetInstance,
kAudioOutputUnitProperty_SetInputCallback,
kAudioUnitScope_Global, 1, &cb,
sizeof(cb));
status = AudioUnitInitialize(self.componetInstance);
if (noErr != status) {
NSString *log =
@"Failed to init audio unit.";
NSLog(@"%@", log);
}
AudioOutputUnitStart(self.componetInstance);
}
}
assign) AudioComponent component;
@property (nonatomic,
assign) AudioComponentInstance componetInstance;
static OSStatus handleInputBuffer(void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
const
AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList *ioData) {
@autoreleasepool {
<CLassName> *ref = (__bridge
<ClassName> *)inRefCon;
AudioStreamBasicDescription asbd = [ref
asbd];
CMSampleBufferRef buff =
NULL;
CMFormatDescriptionRef format =
NULL;
OSStatus status =
CMAudioFormatDescriptionCreate(kCFAllocatorDefault, &asbd,
0, NULL,
0, NULL, NULL, &format);
if (status) {
return status;
}
CMSampleTimingInfo timing = {
CMTimeMake(1,
48000),
kCMTimeZero, kCMTimeInvalid };
status = CMSampleBufferCreate(kCFAllocatorDefault,
NULL, false,
NULL, NULL, format, (CMItemCount)inNumberFrames,
1, &timing, 0,
NULL, &buff);
if (status) {
//失败
return status;
}
AudioBuffer buffer;
buffer.mData =
NULL;
buffer.mDataByteSize =
0;
buffer.mNumberChannels =
2;
AudioBufferList buffers;
buffers.mNumberBuffers =
1;
buffers.mBuffers[0] = buffer;
status = AudioUnitRender(ref.componetInstance,
ioActionFlags,
inTimeStamp,
inBusNumber,
inNumberFrames,
&buffers);
if (status) {
return status;
}
status = CMSampleBufferSetDataBufferFromAudioBufferList(buff,
kCFAllocatorDefault,
kCFAllocatorDefault,
0, &buffers);
if (!status) {
}
return status;
}
}
- (AudioStreamBasicDescription)asbd
{
AudioStreamBasicDescription desc = {0};
desc.mSampleRate =
48000;
desc.mFormatID =
kAudioFormatLinearPCM;
desc.mFormatFlags =
kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
desc.mChannelsPerFrame =
2;
desc.mFramesPerPacket =
1;
desc.mBitsPerChannel =
16;
desc.mBytesPerFrame = desc.mBitsPerChannel /
8 * desc.mChannelsPerFrame;
desc.mBytesPerPacket = desc.mBytesPerFrame * desc.mFramesPerPacket;
return desc;
}
- (void)initMicrophoneSource
{
AVAuthorizationStatus status = [AVCaptureDevice
authorizationStatusForMediaType:AVMediaTypeAudio];
if(status ==
AVAuthorizationStatusAuthorized){
AVAudioSession *session = [AVAudioSession
sharedInstance];
NSError *error =
nil;
[session setCategory:AVAudioSessionCategoryPlayAndRecord
withOptions:AVAudioSessionCategoryOptionDefaultToSpeaker |
AVAudioSessionCategoryOptionMixWithOthers |
AVAudioSessionCategoryOptionAllowBluetooth error:nil];
if (![session
setActive:YES
error:&error]) {
NSString *log =
@"Failed to set audio session active.";
NSLog(@"%@", log);
return ;
}
AudioComponentDescription acd;
acd.componentType =
kAudioUnitType_Output;
acd.componentSubType =
kAudioUnitSubType_RemoteIO;
acd.componentManufacturer =
kAudioUnitManufacturer_Apple;
acd.componentFlags =
0;
acd.componentFlagsMask =
0;
self.component =
AudioComponentFindNext(NULL, &acd);
OSStatus status =
noErr;
status = AudioComponentInstanceNew(self.component,
&_componetInstance);
if (noErr != status) {
NSString *log =
@"Failed to new a audio component instance.";
NSLog(@"%@", log);
return ;
}
UInt32 flagOne =
1;
AudioUnitSetProperty(self.componetInstance,
kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input,
1, &flagOne,
sizeof(flagOne));
AudioStreamBasicDescription desc = [self
asbd];
AudioUnitSetProperty(self.componetInstance,
kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output,
1, &desc,
sizeof(desc));
AURenderCallbackStruct cb;
cb.inputProcRefCon = (__bridge
void *)(self);
cb.inputProc =
handleInputBuffer;
AudioUnitSetProperty(self.componetInstance,
kAudioOutputUnitProperty_SetInputCallback,
kAudioUnitScope_Global, 1, &cb,
sizeof(cb));
status = AudioUnitInitialize(self.componetInstance);
if (noErr != status) {
NSString *log =
@"Failed to init audio unit.";
NSLog(@"%@", log);
}
AudioOutputUnitStart(self.componetInstance);
}
}
相关文章推荐
- 峰回路转,Firefox 浏览器即将重返 iOS 平台
- 峰回路转,Firefox 浏览器即将重返 iOS 平台
- 不可修补的 iOS 漏洞可能导致 iPhone 4s 到 iPhone X 永久越狱
- iOS 12.4 系统遭黑客破解,漏洞危及数百万用户
- 每日安全资讯:NSO,一家专业入侵 iPhone 的神秘公司
- [转][源代码]Comex公布JailbreakMe 3.0源代码
- 讲解iOS开发中基本的定位功能实现
- iOS中定位当前位置坐标及转换为火星坐标的方法
- js判断客户端是iOS还是Android等移动终端的方法
- iOS应用中UISearchDisplayController搜索效果的用法
- IOS开发环境windows化攻略
- iOS应用中UITableView左滑自定义选项及批量删除的实现
- 浅析iOS应用开发中线程间的通信与线程安全问题
- 检测iOS设备是否越狱的方法
- .net平台推送ios消息的实现方法
- 探讨Android与iOS,我们将何去何从?
- Android、iOS和Windows Phone中的推送技术详解
- iOS推送的那些事
- IOS 改变键盘颜色代码