iphone视频聊天代码实现
2013-06-25 18:15
423 查看
视频聊天从摄像头中取得缓冲数据,转换成NSData,通过网络发送,接收端取得NSData后把NSData转换成图像,双方不停的收发数据,播放图像,就形成了视频聊天。废话不多说,直接上代码:
首先创建视频输入输出:
NSError *error= nil;
//Setup the video input
AVCaptureDevice *videoDevice=[self getFrontCamera];//[AVCaptureDevicedefaultDeviceWithMediaType: AVMediaTypeVideo];
//Create a device input with the device and add it to thesession.
AVCaptureDeviceInput *videoInput=[AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
//Setup the video output
_videoOutput =[[AVCaptureVideoDataOutput alloc] init];
_videoOutput.alwaysDiscardsLateVideoFrames = NO;
_videoOutput.videoSettings =
[NSDictionary dictionaryWithObject:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
//Create the session
_capSession =[[AVCaptureSession alloc] init];
[_capSession addInput:videoInput];
//[_capSessionaddInput:audioInput];
[_capSession addOutput:_videoOutput];
//[_capSessionaddOutput:_audioOutput];
_capSession.sessionPreset = AVCaptureSessionPresetLow;
//Setup the queue
dispatch_queue_t queue= dispatch_queue_create("MyQueue", NULL);
[_videoOutput setSampleBufferDelegate:self queue:queue];
[_audioOutput setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
[_capSession startRunning];
通过AVCaptureVideoDataOutputSampleBufferDelegate代理 取得摄像头数据
#pragma mark AVCaptureSession delegate
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
NSData *data=[NSData dataWithBytes:&sampleBuffer length:malloc_size(sampleBuffer)];
[self recieveVideoFromData:data];
}
收到数据后转换成图像
-(void)recieveVideoFromData:(NSData *)data{
CMSampleBufferRef sampleBuffer;
[data getBytes:&sampleBuffer length:sizeof(sampleBuffer)];
NSAutoreleasePool *pool =[[NSAutoreleasePool alloc] init];
CVImageBufferRef imageBuffer= CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0);
uint8_t *baseAddress=(uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow= CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width= CVPixelBufferGetWidth(imageBuffer);
size_t height= CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace= CGColorSpaceCreateDeviceRGB();
CGContextRef newContext= CGBitmapContextCreate(baseAddress,
width,height, 8,
bytesPerRow,colorSpace,
kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage= CGBitmapContextCreateImage(newContext);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
UIImage *image=[UIImage imageWithCGImage:newImage scale:1.0
orientation:UIImageOrientationRight];
首先创建视频输入输出:
NSError *error= nil;
//Setup the video input
AVCaptureDevice *videoDevice=[self getFrontCamera];//[AVCaptureDevicedefaultDeviceWithMediaType: AVMediaTypeVideo];
//Create a device input with the device and add it to thesession.
AVCaptureDeviceInput *videoInput=[AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
//Setup the video output
_videoOutput =[[AVCaptureVideoDataOutput alloc] init];
_videoOutput.alwaysDiscardsLateVideoFrames = NO;
_videoOutput.videoSettings =
[NSDictionary dictionaryWithObject:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
//Create the session
_capSession =[[AVCaptureSession alloc] init];
[_capSession addInput:videoInput];
//[_capSessionaddInput:audioInput];
[_capSession addOutput:_videoOutput];
//[_capSessionaddOutput:_audioOutput];
_capSession.sessionPreset = AVCaptureSessionPresetLow;
//Setup the queue
dispatch_queue_t queue= dispatch_queue_create("MyQueue", NULL);
[_videoOutput setSampleBufferDelegate:self queue:queue];
[_audioOutput setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
[_capSession startRunning];
通过AVCaptureVideoDataOutputSampleBufferDelegate代理 取得摄像头数据
#pragma mark AVCaptureSession delegate
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
NSData *data=[NSData dataWithBytes:&sampleBuffer length:malloc_size(sampleBuffer)];
[self recieveVideoFromData:data];
}
收到数据后转换成图像
-(void)recieveVideoFromData:(NSData *)data{
CMSampleBufferRef sampleBuffer;
[data getBytes:&sampleBuffer length:sizeof(sampleBuffer)];
NSAutoreleasePool *pool =[[NSAutoreleasePool alloc] init];
CVImageBufferRef imageBuffer= CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0);
uint8_t *baseAddress=(uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow= CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width= CVPixelBufferGetWidth(imageBuffer);
size_t height= CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace= CGColorSpaceCreateDeviceRGB();
CGContextRef newContext= CGBitmapContextCreate(baseAddress,
width,height, 8,
bytesPerRow,colorSpace,
kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage= CGBitmapContextCreateImage(newContext);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
UIImage *image=[UIImage imageWithCGImage:newImage scale:1.0
orientation:UIImageOrientationRight];
相关文章推荐
- iphone视频聊天代码实现
- iphone视频聊天代码实现
- iphone视频聊天代码实现 .
- iphone视频聊天代码实现
- iphone视频聊天代码实现
- iphone视频聊天代码实现
- iphone视频聊天代码实现
- iphone视频聊天代码实现
- 120行代码实现 浏览器WebRTC视频聊天
- iPhone开发——通过代码自定义cell实现qq聊天界面
- 几行代码轻松实现网页视频聊天
- iPhone应用中APNS推送通知流程代码实现案例
- 使用Qt示例代码实现的视频播放。VS项目
- ASP.NET实现在线播放FLV视频文件的代码实用不大
- 使用C/C++实现Socket聊天程序(代码+实验报告)
- FMS3系列(五):通过FMS实现时时视频聊天(Flash|Flex)
- C#ASP.NET+FMS+JS实现视频聊天
- Http Live Streaming 实现iphone在线播放视频(1)-参考
- iPhone应用中APNS推送通知流程代码实现案例
- linux基本的聊天软件代码实现