您的位置:首页 > 其它

iPhone摄像头设备获取

2012-04-12 22:45 302 查看
又写了个新版的帖子,看不懂怎么用的看本人这个新贴,在不懂,那我也没办法了。
iPhone摄像头设备获取(分离简化版)

目的:打开、关闭前置摄像头,绘制图像,并获取摄像头的二进制数据。
需要的库
AVFoundation.framework、CoreVideo.framework、CoreMedia.framework、QuartzCore.framework
该摄像头捕抓必须编译真机的版本,模拟器下编译不了。
函数说明

-(void)createControl
{
//UI界面控件的创建
}

-(AVCaptureDevice*)getFrontCamera;
获取前置摄像头设备
-(void)startVideoCapture;
打开摄像头并开始捕捉图像
其中代码:
AVCaptureVideoPreviewLayer*previewLayer=[AVCaptureVideoPreviewLayerlayerWithSession:self->avCaptureSession];
previewLayer.frame=localView.bounds;
previewLayer.videoGravity=AVLayerVideoGravityResizeAspectFill;
[self->localView.layeraddSublayer:previewLayer];
为把图片画到UIView里面

-(void)stopVideoCapture:(id)arg;

关闭摄像头,停止捕抓图像
其中代码:

for(UIView*viewinself->localView.subviews){
[viewremoveFromSuperview];
}

为移除摄像头图像的View
详情见代码,代码拷过去可以直接使用Over!!!!

代码:
头文件:

//
//AVCallController.h
//Pxlinstall
//
//CreatedbyLinCharlieC.on11-3-24.
//Copyright2011xxxx.Allrightsreserved.
//

#import<UIKit/UIKit.h>
#import<AVFoundation/AVFoundation.h>

@interfaceAVCallController:UIViewController<AVCaptureVideoDataOutputSampleBufferDelegate>
{
//UI
UILabel*labelState;
UIButton*btnStartVideo;
UIView*localView;

AVCaptureSession*avCaptureSession;
AVCaptureDevice*avCaptureDevice;
BOOLfirstFrame;//是否为第一帧
intproducerFps;

}
@property(nonatomic,retain)AVCaptureSession*avCaptureSession;
@property(nonatomic,retain)UILabel*labelState;

-(void)createControl;
-(AVCaptureDevice*)getFrontCamera;
-(void)startVideoCapture;
-(void)stopVideoCapture:(id)arg;
@end
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
实现文件:
//
//AVCallController.m
//Pxlinstall
//
//CreatedbyLinCharlieC.on11-3-24.
//Copyright2011高鸿移通.Allrightsreserved.
//

#import"AVCallController.h"

@implementationAVCallController

@synthesizeavCaptureSession;
@synthesizelabelState;

//Thedesignatedinitializer.OverrideifyoucreatethecontrollerprogrammaticallyandwanttoperformcustomizationthatisnotappropriateforviewDidLoad.
/*
-(id)initWithNibName:(NSString*)nibNameOrNilbundle:(NSBundle*)nibBundleOrNil{
self=[superinitWithNibName:nibNameOrNilbundle:nibBundleOrNil];
if(self){
//Custominitialization.
}
returnself;
}
*/
-(id)init
{
if(self=[superinit])
{
firstFrame=YES;
producerFps=50;
}
returnself;
}

//ImplementloadViewtocreateaviewhierarchyprogrammatically,withoutusinganib.
-(void)loadView{
[superloadView];
[selfcreateControl];
}

/*
//ImplementviewDidLoadtodoadditionalsetupafterloadingtheview,typicallyfromanib.
-(void)viewDidLoad{
[superviewDidLoad];
}
*/

/*
//Overridetoalloworientationsotherthanthedefaultportraitorientation.
-(BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation{
//ReturnYESforsupportedorientations.
return(interfaceOrientation==UIInterfaceOrientationPortrait);
}
*/

-(void)didReceiveMemoryWarning{
//Releasestheviewifitdoesn'thaveasuperview.
[superdidReceiveMemoryWarning];

//Releaseanycacheddata,images,etc.thataren'tinuse.
}

-(void)viewDidUnload{
[superviewDidUnload];
//Releaseanyretainedsubviewsofthemainview.
//e.g.self.myOutlet=nil;
}

-(void)dealloc{
[superdealloc];
}

#pragmamark-
#pragmamarkcreateControl
-(void)createControl
{
//UI展示
self.view.backgroundColor=[UIColorgrayColor];
labelState=[[UILabelalloc]initWithFrame:CGRectMake(10,20,220,30)];
labelState.backgroundColor=[UIColorclearColor];
[self.viewaddSubview:labelState];
[labelStaterelease];

btnStartVideo=[[UIButtonalloc]initWithFrame:CGRectMake(20,350,80,50)];
[btnStartVideosetTitle:@"Star"forState:UIControlStateNormal];

[btnStartVideosetBackgroundImage:[UIImageimageNamed:@"Images/button.png"]forState:UIControlStateNormal];
[btnStartVideoaddTarget:selfaction:@selector(startVideoCapture)forControlEvents:UIControlEventTouchUpInside];
[self.viewaddSubview:btnStartVideo];
[btnStartVideorelease];

UIButton*stop=[[UIButtonalloc]initWithFrame:CGRectMake(120,350,80,50)];
[stopsetTitle:@"Stop"forState:UIControlStateNormal];

[stopsetBackgroundImage:[UIImageimageNamed:@"Images/button.png"]forState:UIControlStateNormal];
[stopaddTarget:selfaction:@selector(stopVideoCapture:)forControlEvents:UIControlEventTouchUpInside];
[self.viewaddSubview:stop];
[stoprelease];

localView=[[UIViewalloc]initWithFrame:CGRectMake(40,50,200,300)];
[self.viewaddSubview:localView];
[localViewrelease];

}
#pragmamark-
#pragmamarkVideoCapture
-(AVCaptureDevice*)getFrontCamera
{
//获取前置摄像头设备
NSArray*cameras=[AVCaptureDevicedevicesWithMediaType:AVMediaTypeVideo];
for(AVCaptureDevice*deviceincameras)
{
if(device.position==AVCaptureDevicePositionFront)
returndevice;
}
return[AVCaptureDevicedefaultDeviceWithMediaType:AVMediaTypeVideo];

}
-(void)startVideoCapture
{
//打开摄像设备,并开始捕抓图像
[labelStatesetText:@"StartingVideostream"];
if(self->avCaptureDevice||self->avCaptureSession)
{
[labelStatesetText:@"Alreadycapturing"];
return;
}

if((self->avCaptureDevice=[selfgetFrontCamera])==nil)
{
[labelStatesetText:@"Failedtogetvalidecapturedevice"];
return;
}

NSError*error=nil;
AVCaptureDeviceInput*videoInput=[AVCaptureDeviceInputdeviceInputWithDevice:self->avCaptureDeviceerror:&error];
if(!videoInput)
{
[labelStatesetText:@"Failedtogetvideoinput"];
self->avCaptureDevice=nil;
return;
}

self->avCaptureSession=[[AVCaptureSessionalloc]init];
self->avCaptureSession.sessionPreset=AVCaptureSessionPresetLow;
[self->avCaptureSessionaddInput:videoInput];

//Currently,theonlysupportedkeyiskCVPixelBufferPixelFormatTypeKey.Recommendedpixelformatchoicesare
//kCVPixelFormatType_420YpCbCr8BiPlanarVideoRangeorkCVPixelFormatType_32BGRA.
//OniPhone3G,therecommendedpixelformatchoicesarekCVPixelFormatType_422YpCbCr8orkCVPixelFormatType_32BGRA.
//
AVCaptureVideoDataOutput*avCaptureVideoDataOutput=[[AVCaptureVideoDataOutputalloc]init];
NSDictionary*settings=[[NSDictionaryalloc]initWithObjectsAndKeys:
//[NSNumbernumberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange],kCVPixelBufferPixelFormatTypeKey,
[NSNumbernumberWithInt:240],(id)kCVPixelBufferWidthKey,
[NSNumbernumberWithInt:320],(id)kCVPixelBufferHeightKey,
nil];
avCaptureVideoDataOutput.videoSettings=settings;
[settingsrelease];
avCaptureVideoDataOutput.minFrameDuration=CMTimeMake(1,self->producerFps);
/*Wecreateaserialqueuetohandletheprocessingofourframes*/
dispatch_queue_tqueue=dispatch_queue_create("org.doubango.idoubs",NULL);
[avCaptureVideoDataOutputsetSampleBufferDelegate:selfqueue:queue];
[self->avCaptureSessionaddOutput:avCaptureVideoDataOutput];
[avCaptureVideoDataOutputrelease];
dispatch_release(queue);

AVCaptureVideoPreviewLayer*previewLayer=[AVCaptureVideoPreviewLayerlayerWithSession:self->avCaptureSession];
previewLayer.frame=localView.bounds;
previewLayer.videoGravity=AVLayerVideoGravityResizeAspectFill;

[self->localView.layeraddSublayer:previewLayer];

self->firstFrame=YES;
[self->avCaptureSessionstartRunning];

[labelStatesetText:@"Videocapturestarted"];

}
-(void)stopVideoCapture:(id)arg
{
//停止摄像头捕抓
if(self->avCaptureSession){
[self->avCaptureSessionstopRunning];
self->avCaptureSession=nil;
[labelStatesetText:@"Videocapturestopped"];
}
self->avCaptureDevice=nil;
//移除localView里面的内容
for(UIView*viewinself->localView.subviews){
[viewremoveFromSuperview];
}
}
#pragmamark-
#pragmamarkAVCaptureVideoDataOutputSampleBufferDelegate
-(void)captureOutput:(AVCaptureOutput*)captureOutputdidOutputSampleBuffer:(CMSampleBufferRef)sampleBufferfromConnection:(AVCaptureConnection*)connection
{
//捕捉数据输出要怎么处理虽你便
CVPixelBufferRefpixelBuffer=CMSampleBufferGetImageBuffer(sampleBuffer);
/*Lockthebuffer*/
if(CVPixelBufferLockBaseAddress(pixelBuffer,0)==kCVReturnSuccess)
{
UInt8*bufferPtr=(UInt8*)CVPixelBufferGetBaseAddress(pixelBuffer);
size_tbuffeSize=CVPixelBufferGetDataSize(pixelBuffer);

if(self->firstFrame)
{
if(1)
{
//第一次数据要求:宽高,类型
intwidth=CVPixelBufferGetWidth(pixelBuffer);
intheight=CVPixelBufferGetHeight(pixelBuffer);

intpixelFormat=CVPixelBufferGetPixelFormatType(pixelBuffer);
switch(pixelFormat){
casekCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
//TMEDIA_PRODUCER(producer)->video.chroma=tmedia_nv12;//iPhone3GSor4
NSLog(@"Capturepixelformat=NV12");
break;
casekCVPixelFormatType_422YpCbCr8:
//TMEDIA_PRODUCER(producer)->video.chroma=tmedia_uyvy422;//iPhone3
NSLog(@"Capturepixelformat=UYUY422");
break;
default:
//TMEDIA_PRODUCER(producer)->video.chroma=tmedia_rgb32;
NSLog(@"Capturepixelformat=RGB32");
break;
}

self->firstFrame=NO;
}
}
/*Weunlockthebuffer*/
CVPixelBufferUnlockBaseAddress(pixelBuffer,0);
}
/*Wecreateanautoreleasepoolbecauseaswearenotinthemain_queueourcodeis
notexecutedinthemainthread.Sowehavetocreateanautoreleasepoolforthethreadwearein*/
//NSAutoreleasePool*pool=[[NSAutoreleasePoolalloc]init];
//
//CVImageBufferRefimageBuffer=CMSampleBufferGetImageBuffer(sampleBuffer);
///*Locktheimagebuffer*/
//CVPixelBufferLockBaseAddress(imageBuffer,0);
///*Getinformationabouttheimage*/
//uint8_t*baseAddress=(uint8_t*)CVPixelBufferGetBaseAddress(imageBuffer);
//size_tbytesPerRow=CVPixelBufferGetBytesPerRow(imageBuffer);
//size_twidth=CVPixelBufferGetWidth(imageBuffer);
//size_theight=CVPixelBufferGetHeight(imageBuffer);
//
///*CreateaCGImageReffromtheCVImageBufferRef*/
//CGColorSpaceRefcolorSpace=CGColorSpaceCreateDeviceRGB();
//CGContextRefnewContext=CGBitmapContextCreate(baseAddress,width,height,8,bytesPerRow,colorSpace,kCGBitmapByteOrder32Little|kCGImageAlphaPremultipliedFirst);
//CGImageRefnewImage=CGBitmapContextCreateImage(newContext);
//
///*Wereleasesomecomponents*/
//CGContextRelease(newContext);
//CGColorSpaceRelease(colorSpace);
//
///*Wedisplaytheresultonthecustomlayer.Allthedisplaystuffmustbedoneinthemainthreadbecause
//UIKitisnothreadsafe,andaswearenotinthemainthread(rememberwedidn'tusethemain_queue)
//weuseperformSelectorOnMainThreadtocallourCALayerandtellittodisplaytheCGImage.*/
//[self.customLayerperformSelectorOnMainThread:@selector(setContents:)withObject:(id)newImagewaitUntilDone:YES];
//
///*Wedisplaytheresultontheimageview(Weneedtochangetheorientationoftheimagesothatthevideoisdisplayedcorrectly).
//SamethingasfortheCALayerwearenotinthemainthreadso...*/
//UIImage*image=[UIImageimageWithCGImage:newImagescale:1.0orientation:UIImageOrientationRight];
//
///*WerelasetheCGImageRef*/
//CGImageRelease(newImage);
//
//[self.imageViewperformSelectorOnMainThread:@selector(setImage:)withObject:imagewaitUntilDone:YES];
//
///*Weunlocktheimagebuffer*/
//CVPixelBufferUnlockBaseAddress(imageBuffer,0);
//
//[pooldrain];
}
@end


简化版:

这次把视频捕获的彻底的分出来了。应该每个都看得懂怎么用吧。
贴上代码略说明下
需要的库
AVFoundation.framework、CoreVideo.framework、CoreMedia.framework
该摄像头捕抓必须编译真机的版本且要sdk4.0以上,模拟器下编译不了。
[font=]文件说明:
[font=]CameraHelp.h/.m为主要文件即摄像头捕获
[font=]VideoController.h/.m为调用示例

//
//CameraHelp.h
//
//
//CreatedbyZhuangChuanXian.on11-6-28.
//Copyright2011.Allrightsreserved.
//
#import<UIKit/UIKit.h>
#import<Foundation/Foundation.h>
#import<AVFoundation/AVFoundation.h>

#undefPRODUCER_HAS_VIDEO_CAPTURE
#definePRODUCER_HAS_VIDEO_CAPTURE(__IPHONE_OS_VERSION_MIN_REQUIRED>=40000&&TARGET_OS_EMBEDDED)
@protocolCameraHelpDelegate
-(void)VideoDataOutputBuffer:(char*)videoBufferdataSize:(int)size;
@end

@interfaceCameraHelp:NSObject
#ifPRODUCER_HAS_VIDEO_CAPTURE
<AVCaptureVideoDataOutputSampleBufferDelegate>
#endif
{
@private
intmWidth;
intmHeight;
intmFps;
BOOLmFrontCamera;
BOOLmFirstFrame;
BOOLmStarted;
UIView*mPreview;
id<CameraHelpDelegate>outDelegate;
#ifPRODUCER_HAS_VIDEO_CAPTURE
AVCaptureSession*mCaptureSession;
AVCaptureDevice*mCaptureDevice;
#endif
}
//单例模式
+(CameraHelp*)shareCameraHelp;
+(void)closeCamera;
//设置前置摄像头
-(BOOL)setFrontCamera;
//设置后置摄像头
-(BOOL)setBackCamera;
//开始前设置捕获参数
-(void)prepareVideoCapture:(int)widthandHeight:(int)heightandFps:(int)fpsandFrontCamera:(BOOL)bfrontandPreview:(UIView*)view;
//开始捕获
-(void)startVideoCapture;
//停止捕获
-(void)stopVideoCapture;
//设置要显示到得View
-(void)setPreview:(UIView*)preview;
//设置数据输出
-(void)setVideoDataOutputBuffer:(id<CameraHelpDelegate>)delegate;
@end

实现的自己下载例子,该例可以在编译运行,函数不懂的看这iPhone摄像头设备获取

下面是调用说明很简单就句话
//
//VideoController.m
//
//
//Createdbyzcx.on11-6-28.
//Copyright2011.Allrightsreserved.
//

#import"VideoController.h"
#import"CameraHelp.h"

@implementationVideoController
@synthesizevideoView;
//Thedesignatedinitializer.OverrideifyoucreatethecontrollerprogrammaticallyandwanttoperformcustomizationthatisnotappropriateforviewDidLoad.

-(id)initWithNibName:(NSString*)nibNameOrNilbundle:(NSBundle*)nibBundleOrNil{
self=[superinitWithNibName:nibNameOrNilbundle:nibBundleOrNil];
if(self){
//Custominitialization.
self.modalTransitionStyle=UIModalTransitionStyleFlipHorizontal;
self.modalPresentationStyle=UIModalPresentationFullScreen;
}
returnself;
}

//ImplementviewDidLoadtodoadditionalsetupafterloadingtheview,typicallyfromanib.
-(void)viewDidLoad{
[superviewDidLoad];
[selfsetTitle:@"VideoCapture"];
//捕获很简单就下面这几句话
//设置输出的View
[[CameraHelpshareCameraHelp]setPreview:videoView];
//捕获数据输出到本地
[[CameraHelpshareCameraHelp]setVideoDataOutputBuffer:self];
//开始捕获
[[CameraHelpshareCameraHelp]startVideoCapture];
}
//如果要获取捕获的数据记得重写这个接口哦不然蹦了,不要怪人哦。
-(void)VideoDataOutputBuffer:(char*)videoBufferdataSize:(int)size
{
NSLog(@"RecvDatasize=%d",size);
}

/*
//Overridetoalloworientationsotherthanthedefaultportraitorientation.
-(BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation{
//ReturnYESforsupportedorientations.
return(interfaceOrientation==UIInterfaceOrientationPortrait);
}
*/

-(void)didReceiveMemoryWarning{
//Releasestheviewifitdoesn'thaveasuperview.
[superdidReceiveMemoryWarning];

//Releaseanycacheddata,images,etc.thataren'tinuse.
}

-(void)viewDidUnload{
[superviewDidUnload];
//Releaseanyretainedsubviewsofthemainview.
//e.g.self.myOutlet=nil;
}

-(void)dealloc{
//停止捕获
[[CameraHelpshareCameraHelp]stopVideoCapture];
//关闭输出
[[CameraHelpshareCameraHelp]setVideoDataOutputBuffer:nil];
[superdealloc];
}

-(IBAction)onButtonEndClick:(id)sender
{
[selfdismissModalViewControllerAnimated:YES];
}
@end

最后程序关闭时记得调用
[CameraHelpcloseCamera];
不然会内存泄露的
最后觉得好的话顶下哈。。。。
  







                                            
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: