CMSampleBufferRef与byte*互相转换(audio)
2017-01-28 02:47
1046 查看
byte*转CMSampleBufferRef
-(AudioStreamBasicDescription) getAudioFormat{
AudioStreamBasicDescription format;
format.mSampleRate = 44100;
format.mFormatID = kAudioFormatLinearPCM;
format.mFormatFlags = kLinearPCMFormatFlagIsPacked | kLinearPCMFormatFlagIsSignedInteger;
format.mBytesPerPacket = 2*2;
format.mFramesPerPacket = 1;
format.mBytesPerFrame = 2*2;
format.mChannelsPerFrame = 2;
format.mBitsPerChannel = 16;
format.mReserved = 0;
return format;
}
- (CMSampleBufferRef)createAudioSample:(void *)audioData frames:(UInt32)len
{
int channels = 2;
AudioBufferList audioBufferList;
audioBufferList.mNumberBuffers = 1;
audioBufferList.mBuffers[0].mNumberChannels=channels;
audioBufferList.mBuffers[0].mDataByteSize=len;
audioBufferList.mBuffers[0].mData = audioData;
AudioStreamBasicDescription asbd = [self getAudioFormat];
CMSampleBufferRef buff = NULL;
static CMFormatDescriptionRef format = NULL;
CMTime time = CMTimeMake(len/2 , 44100);
CMSampleTimingInfo timing = {CMTimeMake(1,44100), time, kCMTimeInvalid };
OSStatus error = 0;
if(format == NULL)
error = CMAudioFormatDescriptionCreate(kCFAllocatorDefault, &asbd, 0, NULL, 0, NULL, NULL, &format);
error = CMSampleBufferCreate(kCFAllocatorDefault, NULL, false, NULL, NULL, format, len/(2*channels), 1, &timing, 0, NULL, &buff);
if ( error ) {
CLS_LOG(@"CMSampleBufferCreate returned error: %ld", (long)error);
return NULL;
}
error = CMSampleBufferSetDataBufferFromAudioBufferList(buff, kCFAllocatorDefault, kCFAllocatorDefault, 0, &audioBufferList);
if( error )
{
CLS_LOG(@"CMSampleBufferSetDataBufferFromAudioBufferList returned error: %ld", (long)error);
return NULL;
}
return buff;
}
CMSampleBufferRef转byte*
-(void) getAudioData: (CMSampleBufferRef)sampleBuffer{
AudioBufferList audioBufferList;
CMBlockBufferRef blockBuffer;
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer, NULL, &audioBufferList, sizeof(audioBufferList), NULL, NULL, 0, &blockBuffer);
for( int y=0; y<audioBufferList.mNumberBuffers; y++ )
{
AudioBuffer audioBuffer = audioBufferList.mBuffers[y];
void* audio = audioBuffer.mData;//这里获取
}
CFRelease(blockBuffer);
}
-(AudioStreamBasicDescription) getAudioFormat{
AudioStreamBasicDescription format;
format.mSampleRate = 44100;
format.mFormatID = kAudioFormatLinearPCM;
format.mFormatFlags = kLinearPCMFormatFlagIsPacked | kLinearPCMFormatFlagIsSignedInteger;
format.mBytesPerPacket = 2*2;
format.mFramesPerPacket = 1;
format.mBytesPerFrame = 2*2;
format.mChannelsPerFrame = 2;
format.mBitsPerChannel = 16;
format.mReserved = 0;
return format;
}
- (CMSampleBufferRef)createAudioSample:(void *)audioData frames:(UInt32)len
{
int channels = 2;
AudioBufferList audioBufferList;
audioBufferList.mNumberBuffers = 1;
audioBufferList.mBuffers[0].mNumberChannels=channels;
audioBufferList.mBuffers[0].mDataByteSize=len;
audioBufferList.mBuffers[0].mData = audioData;
AudioStreamBasicDescription asbd = [self getAudioFormat];
CMSampleBufferRef buff = NULL;
static CMFormatDescriptionRef format = NULL;
CMTime time = CMTimeMake(len/2 , 44100);
CMSampleTimingInfo timing = {CMTimeMake(1,44100), time, kCMTimeInvalid };
OSStatus error = 0;
if(format == NULL)
error = CMAudioFormatDescriptionCreate(kCFAllocatorDefault, &asbd, 0, NULL, 0, NULL, NULL, &format);
error = CMSampleBufferCreate(kCFAllocatorDefault, NULL, false, NULL, NULL, format, len/(2*channels), 1, &timing, 0, NULL, &buff);
if ( error ) {
CLS_LOG(@"CMSampleBufferCreate returned error: %ld", (long)error);
return NULL;
}
error = CMSampleBufferSetDataBufferFromAudioBufferList(buff, kCFAllocatorDefault, kCFAllocatorDefault, 0, &audioBufferList);
if( error )
{
CLS_LOG(@"CMSampleBufferSetDataBufferFromAudioBufferList returned error: %ld", (long)error);
return NULL;
}
return buff;
}
CMSampleBufferRef转byte*
-(void) getAudioData: (CMSampleBufferRef)sampleBuffer{
AudioBufferList audioBufferList;
CMBlockBufferRef blockBuffer;
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer, NULL, &audioBufferList, sizeof(audioBufferList), NULL, NULL, 0, &blockBuffer);
for( int y=0; y<audioBufferList.mNumberBuffers; y++ )
{
AudioBuffer audioBuffer = audioBufferList.mBuffers[y];
void* audio = audioBuffer.mData;//这里获取
}
CFRelease(blockBuffer);
}
相关文章推荐
- CMSampleBufferRef与byte*互相转换(audio)
- CMSampleBufferRef与byte*互相转换
- CMSampleBufferRef 与 UIImage 的转换
- CMSampleBufferRef转换为nsdata对象
- CMSampleBufferRef 与 UIImage 的转换
- CVPixelBufferRef与UIImage的互相转换
- get buffer from CMSampleBufferRef
- CMSampleBufferRef获取h264 char*数据及sps/pps
- 【iOS初学笔记】CMSampleBufferRef 转 UIImage
- CMSampleBufferRef TO NSData
- CMSampleBufferRef转CIImage
- CMSampleBufferRef转UIImage
- ByteBuffer和String的互相转换
- Java中byte与(16进制)字符串的互相转换
- C# string byte[] Base64 常用互相转换
- int byte互相转换
- Java中byte与16进制字符串的互相转换
- QByteArray中data成员函数,可实现QByteArray与QString互相转换
- Flex中利用ByteArray与BitmapData互相转换实现图片的二进制保存与复原
- Object,byte[],ByteBuffer之间的转换。