opencv在iOS中的使用[几个滤波]以及uiimage与iplimage互转
2016-03-31 10:13
661 查看
<span style="font-family: Arial, Helvetica, sans-serif; background-color: rgb(255, 255, 255);">最近由于项目需要用到美颜,因此初步使用了opencv进行了研究,思路如下:</span>
1 使用滤波,可以采用双边滤波或者高斯滤波;
2 肤色检测;
3 图像的融合,即1和2点融合;
4 锐化增强细节;
但是使用发现opencv不适合移动端的使用,尽量还是采用opengl去做图像处理,利用gpu可以降低cpu使用率;
下面贴出一些我在使用过成中的接口:(参考网上的资料比较多)
特别是uiimage和iplimage互转的时候注意 rotate(
orientation:UIImageOrientationRight)
// NOTE you SHOULD cvReleaseImage() for the return value when end of the code. - (IplImage *)CreateIplImageFromUIImage:(UIImage *)image { CGImageRef imageRef = image.CGImage; //构造图像 CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); IplImage *iplimage = cvCreateImage(cvSize(image.size.width, image.size.height), IPL_DEPTH_8U, 4); CGContextRef contextRef = CGBitmapContextCreate(iplimage->imageData, iplimage->width, iplimage->height, iplimage->depth, iplimage->widthStep, colorSpace, kCGImageAlphaPremultipliedLast|kCGBitmapByteOrderDefault); CGContextDrawImage(contextRef, CGRectMake(0, 0, image.size.width, image.size.height), imageRef); CGContextRelease(contextRef); CGColorSpaceRelease(colorSpace); IplImage *ret = cvCreateImage(cvGetSize(iplimage), IPL_DEPTH_8U, 3); cvCvtColor(iplimage, ret, CV_RGBA2BGR); //ret = cvCloneImage(iplimage); cvReleaseImage(&iplimage); return ret; } // NOTE You should convert color mode as RGB before passing to this function - (UIImage *)UIImageFromIplImage:(IplImage *)image { NSLog(@"IplImage (%d, %d) %d bits by %d channels, %d bytes/row %s", image->width, image->height, image->depth, image->nChannels, image->widthStep, image->channelSeq); cvCvtColor(image, image, CV_BGR2RGB); CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); NSData *data = [NSData dataWithBytes:image->imageData length:image->imageSize]; CGDataProviderRef provider = CGDataProviderCreateWithCFData((CFDataRef)data); CGImageRef imageRef = CGImageCreate(image->width, image->height, image->depth, image->depth * image->nChannels, image->widthStep, colorSpace, kCGImageAlphaNone|kCGBitmapByteOrderDefault, provider, NULL, false, kCGRenderingIntentDefault); UIImage *ret = [UIImage imageWithCGImage:imageRef scale:1 orientation:UIImageOrientationRight]; CGImageRelease(imageRef); CGDataProviderRelease(provider); CGColorSpaceRelease(colorSpace); return ret; } -(void) process_Histogram:(UIImage *) imageView { IplImage *img_color = [self CreateIplImageFromUIImage:imageView]; IplImage* imgChannel[4] = { 0, 0, 0, 0 }; IplImage* dst = cvCreateImage( cvGetSize(img_color), IPL_DEPTH_8U, 3 ); if( img_color ) { for(int i = 0; i < img_color -> nChannels; i++ ) { imgChannel[i] = cvCreateImage( cvGetSize( img_color ), IPL_DEPTH_8U, 1 ); //要求单通道图像才能直方图均衡化 } //通道分离 cvSplit( img_color, imgChannel[0], imgChannel[1], imgChannel[2], imgChannel[3] );//BGRA for( int i = 0; i < dst -> nChannels; i++ ) { //直方图均衡化,原始图像和目标图像必须是单通道 cvEqualizeHist( imgChannel[i], imgChannel[i] ); } //通道组合 cvMerge( imgChannel[0], imgChannel[1], imgChannel[2], imgChannel[3], dst ); m_image = [self UIImageFromIplImage:dst]; //释放资源 for( int i = 0; i < img_color -> nChannels; i++ ) { if( imgChannel[i] ) { cvReleaseImage( &imgChannel[i] ); } } cvReleaseImage( &dst ); } } -(void)process_Image:(UIImage *)imageview { IplImage *img_color = [self CreateIplImageFromUIImage:imageview]; IplImage *avgImg = cvCreateImage(cvGetSize(img_color), IPL_DEPTH_8U, img_color->nChannels); IplImage *medianImg = cvCreateImage(cvGetSize(img_color), IPL_DEPTH_8U, img_color->nChannels); IplImage *gaussianImg = cvCreateImage(cvGetSize(img_color), IPL_DEPTH_8U, img_color->nChannels); IplImage *unsharimage = cvCreateImage(cvGetSize(img_color), IPL_DEPTH_8U, img_color->nChannels); //滤波 //cvSmooth(img_color, avgImg, CV_BLUR,7,img_color->nChannels); //采用7x7的窗口对图像进行均值滤波 //cvSmooth(img_color, medianImg, CV_MEDIAN, 7, img_color->nChannels); //采用7x7的窗口对图像进行中值滤波 //cvSmooth(img_color, gaussianImg, CV_BILATERAL ,3 , 3, 100.0, 100.0); // 双边平滑滤波 cvSmooth(img_color, gaussianImg, CV_GAUSSIAN, 5, 5, 3, 3); // Gauss平滑滤波,核大小为7x7 //肤色检测 //图像融合 //锐化 //cvLaplace(gaussianImg, unsharimage); float low[9] ={ 1.0/16, 2.0/16, 1.0/16, 2.0/16, 4.0/16, 2.0/16, 1.0/16, 2.0/16, 1.0/16 };//低通滤波核 CvMat kernela = cvMat( 3, 3, CV_32FC1, low); // assigns kernel values cvFilter2D(gaussianImg, unsharimage, &kernela); m_image = [self UIImageFromIplImage:unsharimage]; cvReleaseImage(&img_color); cvReleaseImage(&avgImg); cvReleaseImage(&medianImg); cvReleaseImage(&gaussianImg); } -(void)process_Gray:(UIImage *)imageview { IplImage *img_color = [self CreateIplImageFromUIImage:imageview]; IplImage *pGrayImage = cvCreateImage(cvGetSize(img_color), IPL_DEPTH_8U, 1); IplImage* grayImagePlus = cvCreateImage(cvGetSize(img_color), IPL_DEPTH_8U, 3); cvCvtColor(img_color, pGrayImage, CV_BGR2GRAY);//至此grayImage是无法装载到UIImage中的,也是无法在UIImageView显示的,会报错! //将grayImage的内容再转换回IPL_DEPTH_8U色深、3通道图像 cvCvtColor(pGrayImage, grayImagePlus, CV_GRAY2BGR);//此处将灰度格式图像转换成BGR格式的图像 m_image = [self UIImageFromIplImage:grayImagePlus]; cvReleaseImage(&img_color); cvReleaseImage(&pGrayImage); cvReleaseImage(&grayImagePlus); }
相关文章推荐
- ios UIScrollview UIImageView点击事件处理
- 使用Autolayout实现UITableView的Cell动态布局和高度动态改变
- UIAlertControllr的使用
- Druid基本配置及内置监控使用
- UITableView滚动到底部(用于评论回复)
- UIImage,CGImage和CGImageRef 区别
- continue 与break 的区别
- IOS学习笔记45--UITableView性能优化
- NSTimer 回调事件被UI交互阻塞的解决方法
- UISplitViewController
- 使用SoapUI 测试Web Service
- UIView的一个弹出动画
- iOS之搜索框UISearchController的使用(iOS8.0以后替代UISearchBar+display)
- 编译错误 --- does not name a type和field `XX' has incomplete type
- ios9中 UIStackView的使用
- CodeForces 632D Longest Subsequence(数论)
- UIAlertView利用runtime传值
- [转] How to change font settings for all UI elements (toolbar and context menus, property editors, etc.)
- 设置UINavigationBar的UI
- IOS开发-UI学习-根据URL显示图片,下载图片的练习(button,textfield,image view,url,data)