您的位置:首页 > 运维架构

【第二部分 图像处理】第3章 Opencv图像处理进阶【4 图像轮廓D】

2018-03-09 17:19 661 查看

3.4图形的矩

矩函数有着广泛的应用,如模式识别、目标跟踪、模式识别等。下面笔者就带领大家一起揭开其神秘的面纱。

3.4.1图像的矩相关API

3.4.1.1图像的矩形API讲解

 矩的计算:moments()函数

C++: Moments moments( InputArray array,
bool binaryImage=false )


【参数】

第一个参数,array – Raster image (single-channel, 8-bit or floating-point 2D array) or an array ( or ) of 2D points (Point or Point2f ).

第二个参数,binaryImage – If it is true, all non-zero image pixels are treated as 1’s. The parameter is used for images only.

【返回值】

moments – Output moments.

 计算轮廓面积:contourArea()函数

C++: double contourArea(InputArray contour,
bool oriented=false )


【参数】

第一个参数,contour – Input vector of 2D points (contour vertices), stored in std::vector or Mat.

第二个参数,orientation – Oriented area flag. If it is true, the function returns a signed area value, depending on the contour orientation (clockwise or counter-clockwise). Using this feature you can determine orientation of a contour by taking the sign of an area. By default, the parameter is false, which means that the absolute value is returned.

 计算轮廓长度:arcLength()函数

C++: double arcLength(InputArray curve,
bool closed)


【参数】

第一个参数,curve – Input vector of 2D points, stored in std::vector or Mat.

第二个参数,closed – Flag indicating whether the curve is closed or not.

3.4.1.2图像的矩形API源代码

 矩的计算:moments()函数

/*【moments ( )源代码】**************************************************************
* @Version:OpenCV 3.0.0(Opnencv2和Opnencv3差别不大,Linux和PC的对应版本源码完全一样,均在对应的安装目录下)
* @源码路径:…\opencv\sources\modules\imgproc\src\ moments.cpp
* @起始行数:556行
********************************************************************************/
cv::Moments cv::moments( InputArray _src, bool binary )
{
const int TILE_SIZE = 32;
MomentsInTileFunc func = 0;
uchar nzbuf[TILE_SIZE*TILE_SIZE];
Moments m;
int type = _src.type(), depth = CV_MAT_DEPTH(type), cn = CV_MAT_CN(type);
Size size = _src.size();

if( size.width <= 0 || size.height <= 0 )
return m;

#ifdef HAVE_OPENCL
if( !(ocl::useOpenCL() && type == CV_8UC1  &&
_src.isUMat() && ocl_moments(_src, m, binary)) )
#endif
{
Mat mat = _src.getMat();
if( mat.checkVector(2) >= 0 && (depth == CV_32F || depth == CV_32S))
return contourMoments(mat);

if( cn > 1 )
CV_Error( CV_StsBadArg, "Invalid image type (must be single-channel)" );

#if IPP_VERSION_X100 >= 801 && 0
CV_IPP_CHECK()
{
if (!binary)
{
IppiSize roi = { mat.cols, mat.rows };
IppiMomentState_64f * moment = NULL;
// ippiMomentInitAlloc_64f, ippiMomentFree_64f are deprecated in 8.1, but there are not another way
// to initialize IppiMomentState_64f. When GetStateSize and Init functions will appear we have to
// change our code.
CV_SUPPRESS_DEPRECATED_START
if (ippiMomentInitAlloc_64f(&moment, ippAlgHintAccurate) >= 0)
{
typedef IppStatus (CV_STDCALL * ippiMoments)(const void * pSrc, int srcStep, IppiSize roiSize, IppiMomentState_64f* pCtx);
ippiMoments ippFunc =
type == CV_8UC1 ? (ippiMoments)ippiMoments64f_8u_C1R :
type == CV_16UC1 ? (ippiMoments)ippiMoments64f_16u_C1R :
type == CV_32FC1? (ippiMoments)ippiMoments64f_32f_C1R : 0;

if (ippFunc)
{
if (ippFunc(mat.data, (int)mat.step, roi, moment) >= 0)
{
IppiPoint point = { 0, 0 };
ippiGetSpatialMoment_64f(moment, 0, 0, 0, point, &m.m00);
ippiGetSpatialMoment_64f(moment, 1, 0, 0, point, &m.m10);
ippiGetSpatialMoment_64f(moment, 0, 1, 0, point, &m.m01);

ippiGetSpatialMoment_64f(moment, 2, 0, 0, point, &m.m20);
ippiGetSpatialMoment_64f(moment, 1, 1, 0, point, &m.m11);
ippiGetSpatialMoment_64f(moment, 0, 2, 0, point, &m.m02);

ippiGetSpatialMoment_64f(moment, 3, 0, 0, point, &m.m30);
ippiGetSpatialMoment_64f(moment, 2, 1, 0, point, &m.m21);
ippiGetSpatialMoment_64f(moment, 1, 2, 0, point, &m.m12);
ippiGetSpatialMoment_64f(moment, 0, 3, 0, point, &m.m03);
ippiGetCentralMoment_64f(moment, 2, 0, 0, &m.mu20);
ippiGetCentralMoment_64f(moment, 1, 1, 0, &m.mu11);
ippiGetCentralMoment_64f(moment, 0, 2, 0, &m.mu02);
ippiGetCentralMoment_64f(moment, 3, 0, 0, &m.mu30);
ippiGetCentralMoment_64f(moment, 2, 1, 0, &m.mu21);
ippiGetCentralMoment_64f(moment, 1, 2, 0, &m.mu12);
ippiGetCentralMoment_64f(moment, 0, 3, 0, &m.mu03);
ippiGetNormalizedCentralMoment_64f(moment, 2, 0, 0, &m.nu20);
ippiGetNormalizedCentralMoment_64f(moment, 1, 1, 0, &m.nu11);
ippiGetNormalizedCentralMoment_64f(moment, 0, 2, 0, &m.nu02);
ippiGetNormalizedCentralMoment_64f(moment, 3, 0, 0, &m.nu30);
ippiGetNormalizedCentralMoment_64f(moment, 2, 1, 0, &m.nu21);
ippiGetNormalizedCentralMoment_64f(moment, 1, 2, 0, &m.nu12);
ippiGetNormalizedCentralMoment_64f(moment, 0, 3, 0, &m.nu03);

ippiMomentFree_64f(moment);
CV_IMPL_ADD(CV_IMPL_IPP);
return m;
}
setIppErrorStatus();
}
ippiMomentFree_64f(moment);
}
else
setIppErrorStatus();
CV_SUPPRESS_DEPRECATED_END
}
}
#endif

if( binary || depth == CV_8U )
func = momentsInTile<uchar, int, int>;
else if( depth == CV_16U )
func = momentsInTile<ushort, int, int64>;
else if( depth == CV_16S )
func = momentsInTile<short, int, int64>;
else if( depth == CV_32F )
func = momentsInTile<float, double, double>;
else if( depth == CV_64F )
func = momentsInTile<double, double, double>;
else
CV_Error( CV_StsUnsupportedFormat, "" );

Mat src0(mat);

for( int y = 0; y < size.height; y += TILE_SIZE )
{
Size tileSize;
tileSize.height = std::min(TILE_SIZE, size.height - y);

for( int x = 0; x < size.width; x += TILE_SIZE )
{
tileSize.width = std::min(TILE_SIZE, size.width - x);
Mat src(src0, cv::Rect(x, y, tileSize.width, tileSize.height));

if( binary )
{
cv::Mat tmp(tileSize, CV_8U, nzbuf);
cv::compare( src, 0, tmp, CV_CMP_NE );
src = tmp;
}

double mom[10];
func( src, mom );

if(binary)
{
double s = 1./255;
for( int k = 0; k < 10; k++ )
mom[k] *= s;
}

double xm = x * mom[0], ym = y * mom[0];

// accumulate moments computed in each tile

// + m00 ( = m00' )
m.m00 += mom[0];

// + m10 ( = m10' + x*m00' )
m.m10 += mom[1] + xm;

// + m01 ( = m01' + y*m00' )
m.m01 += mom[2] + ym;

// + m20 ( = m20' + 2*x*m10' + x*x*m00' )
m.m20 += mom[3] + x * (mom[1] * 2 + xm);

// + m11 ( = m11' + x*m01' + y*m10' + x*y*m00' )
m.m11 += mom[4] + x * (mom[2] + ym) + y * mom[1];

// + m02 ( = m02' + 2*y*m01' + y*y*m00' )
m.m02 += mom[5] + y * (mom[2] * 2 + ym);

// + m30 ( = m30' + 3*x*m20' + 3*x*x*m10' + x*x*x*m00' )
m.m30 += mom[6] + x * (3. * mom[3] + x * (3. * mom[1] + xm));

// + m21 ( = m21' + x*(2*m11' + 2*y*m10' + x*m01' + x*y*m00') + y*m20')
m.m21 += mom[7] + x * (2 * (mom[4] + y * mom[1]) + x * (mom[2] + ym)) + y * mom[3];

// + m12 ( = m12' + y*(2*m11' + 2*x*m01' + y*m10' + x*y*m00') + x*m02')
m.m12 += mom[8] + y * (2 * (mom[4] + x * mom[2]) + y * (mom[1] + xm)) + x * mom[5];

// + m03 ( = m03' + 3*y*m02' + 3*y*y*m01' + y*y*y*m00' )
m.m03 += mom[9] + y * (3. * mom[5] + y * (3. * mom[2] + ym));
}
}
}

completeMomentState( &m );
return m;
}


 计算轮廓面积:contourArea()函数

/*【contourArea ( )源代码】***********************************************************
* @Version:OpenCV 3.0.0(Opnencv2和Opnencv3差别不大,Linux和PC的对应版本源码完全一样,均在对应的安装目录下)
* @源码路径:…\opencv\sources\modules\imgproc\src\ shapedescr.cpp
* @起始行数:338行
********************************************************************************/
double cv::contourArea( InputArray _contour, bool oriented )
{
Mat contour = _contour.getMat();
int npoints = contour.checkVector(2);
int depth = contour.depth();
CV_Assert(npoints >= 0 && (depth == CV_32F || depth == CV_32S));

if( npoints == 0 )
return 0.;

double a00 = 0;
bool is_float = depth == CV_32F;
const Point* ptsi = contour.ptr<Point>();
const Point2f* ptsf = contour.ptr<Point2f>();
Point2f prev = is_float ? ptsf[npoints-1] : Point2f((float)ptsi[npoints-1].x, (float)ptsi[npoints-1].y);

for( int i = 0; i < npoints; i++ )
{
Point2f p = is_float ? ptsf[i] : Point2f((float)ptsi[i].x, (float)ptsi[i].y);
a00 += (double)prev.x * p.y - (double)prev.y * p.x;
prev = p;
}

a00 *= 0.5;
if( !oriented )
a00 = fabs(a00);

return a00;
}


 计算轮廓长度:arcLength()函数

/*【arcLength ( )源代码】*************************************************************
* @Version:OpenCV 3.0.0(Opnencv2和Opnencv3差别不大,Linux和PC的对应版本源码完全一样,均在对应的安装目录下)
* @源码路径:…\opencv\sources\modules\imgproc\src\ shapedescr.cpp
* @起始行数:296行
********************************************************************************/
double cv::arcLength( InputArray _curve, bool is_closed )
{
Mat curve = _curve.getMat();
int count = curve.checkVector(2);
int depth = curve.depth();
CV_Assert( count >= 0 && (depth == CV_32F || depth == CV_32S));
double perimeter = 0;

int i, j = 0;
const int N = 16;
float buf
;

if( count <= 1 )
return 0.;

bool is_float = depth == CV_32F;
int last = is_closed ? count-1 : 0;
const Point* pti = curve.ptr<Point>();
const Point2f* ptf = curve.ptr<Point2f>();

Point2f prev = is_float ? ptf[last] : Point2f((float)pti[last].x,(float)pti[last].y);

for( i = 0; i < count; i++ )
{
Point2f p = is_float ? ptf[i] : Point2f((float)pti[i].x,(float)pti[i].y);
float dx = p.x - prev.x, dy = p.y - prev.y;
buf[j] = dx*dx + dy*dy;

if( ++j == N || i == count-1 )
{
Mat bufmat(1, j, CV_32F, buf);
sqrt(bufmat, bufmat);
for( ; j > 0; j-- )
perimeter += buf[j-1];
}
prev = p;
}

return perimeter;
}


3.4.2图像的矩实例

代码参看附件【demo1】。



图1



图2

参考:

中文

英文

本章 参考附件

点击进入
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: 
相关文章推荐