您的位置:首页 > 运维架构

利用opencv逼近二值圖像的邊界點,並過濾不需要的邊界,達到尋邊效果

2012-02-10 08:56 1741 查看
http://blog.csdn.net/c2716266/article/details/7191460

二值化圖像;

利用黑白像素值求差,得到邊緣點;

過濾邊緣點找到合適區域;

利用cvFitLine2D擬合線。

做的比較粗糙,搜尋時間在10ms左右,希望有研究opencv的朋友斧正。

效果預覽:




[cpp] view
plaincopy

void CvProcess::FindLine(

IplImage* orgImg ,//原始圖像

IplImage*runImg,//顯示用圖像

CvRect rec,//roi

int thredValue,//二值化閾值

int lineAccuracy,//搜索精度

int SearchDirection,//搜索方向

int EdgePolarity)//搜索方式 黑到白 白到黑

{

cvCopy(orgImg,runImg);//原始圖像拷貝到顯示圖像用於顯示

IplImage* thrdImg = cvCreateImage(//創建一個單通道二值圖像用於各種處理

cvSize(orgImg->width,orgImg->height),

IPL_DEPTH_8U,

1);

//將原始圖像轉換為單通道灰度圖像

cvCvtColor(runImg,thrdImg,CV_BGR2GRAY);

//二值化處理

cvThreshold(

thrdImg,

thrdImg,

thredValue,

255,

CV_THRESH_BINARY);

// cvNamedWindow("");

// cvShowImage("",thrdImg);

if(rec.width>0&&rec.width<IMAGE_WIDTH&&rec.height>0&&rec.width<IMAGE_HEIGHT)//判斷是否有適合的ROI區域

{ //設置ROI

cvSetImageROI(runImg,rec);

cvSetImageROI(thrdImg,rec);

//搜索邊界

CvPoint2D32f *EdgePoint2D = //用於存儲搜索到的所有邊界點

(CvPoint2D32f *)malloc((IMAGE_HEIGHT*IMAGE_WIDTH) * sizeof(CvPoint2D32f));

CvPoint2D32f *RelEdgePoint2D =//用於存儲搜索到的正確的點

(CvPoint2D32f *)malloc((IMAGE_HEIGHT*IMAGE_WIDTH) * sizeof(CvPoint2D32f));

int EdgePoint2DCount=0;//點計數

int RelEdgePoint2DCount=0; //真實點計數

float *line = new float[4]; //用於畫逼近線

byte ftData=0,secData=0; //搜索邊界點所需資源

//得到ROI區域內的搜索線

std::vector<CLine> searchlines = GetRecLines(rec,lineAccuracy,SearchDirection);

switch(SearchDirection)//搜索方向

{

case TB :

//上到下縱向搜索

for (int i=0;i<thrdImg->roi->width;i++)

{

for (int j=0;j<thrdImg->roi->height-1;j++)

{ //上下搜索所有的差值大於200的點

ftData=CV_IMAGE_ELEM(thrdImg,uchar,thrdImg->roi->yOffset+j,thrdImg->roi->xOffset+i);//利用宏直接得到結果

//ftData=(thrdImg->imageData + i * thrdImg->widthStep)[j];//注意這裡是 寬度用的是 widthStep 而不是 width

secData=CV_IMAGE_ELEM(thrdImg,uchar,thrdImg->roi->yOffset+j+1,thrdImg->roi->xOffset+i);

switch(EdgePolarity)

{

case B2W:

if(secData-ftData>200)//黑到白

{

for(int n=0;n<searchlines.size();n++)//搜索在搜索線上的點

{

if (searchlines
.PTS.x==i&&searchlines
.PTS.y<j

&&searchlines
.PTE.y>j)

{

EdgePoint2D[EdgePoint2DCount]=cvPoint2D32f(i,j);

}

}

if (EdgePoint2DCount>0)//大於2點時比較

{

bool realPoint=TRUE;

//刪除X坐標相同的縱向點,減少逼近時誤判幾率

for (int m=1;m<=EdgePoint2DCount;m++)

{

if(EdgePoint2D[EdgePoint2DCount].x == EdgePoint2D[EdgePoint2DCount-m].x)

{

realPoint=FALSE;

}

}

if(realPoint)//得到非重復點並畫出

{

RelEdgePoint2D[RelEdgePoint2DCount]=cvPoint2D32f(i,j);

cvCircle(runImg,cvPoint(i,j),

1,CV_RGB(255,0,0),2, CV_AA,0); //畫點

RelEdgePoint2DCount++;

}

}

EdgePoint2DCount++;

}

break;

case W2B:

if(ftData-secData>200)//白到黑

{

for(int n=0;n<searchlines.size();n++)//搜索在搜索線上的點

{

if (searchlines
.PTS.x==i&&searchlines
.PTS.y<j

&&searchlines
.PTE.y>j)

{

EdgePoint2D[EdgePoint2DCount]=cvPoint2D32f(i,j);

}

}

if (EdgePoint2DCount>0)//大於2點時比較

{

bool realPoint=TRUE;

//刪除X坐標相同的縱向點,減少逼近時誤判幾率

for (int m=1;m<=EdgePoint2DCount;m++)

{

if(EdgePoint2D[EdgePoint2DCount].x == EdgePoint2D[EdgePoint2DCount-m].x)

{

realPoint=FALSE;

}

}

if(realPoint)//得到非重復點並畫出

{

RelEdgePoint2D[RelEdgePoint2DCount]=cvPoint2D32f(i,j);

cvCircle(runImg,cvPoint(i,j),

1,CV_RGB(255,0,0),2, CV_AA,0); //畫點

RelEdgePoint2DCount++;

}

}

EdgePoint2DCount++;

}

break;

}

}

}

if(RelEdgePoint2DCount>2)//當找到的點大於2時在搜尋逼近線

{ //找出逼近線

cvFitLine2D(RelEdgePoint2D,RelEdgePoint2DCount, CV_DIST_L1,NULL,0.01,0.01,line);

CvPoint FirstPoint;//起點

CvPoint LastPoint;//終點

FirstPoint.x=int (line[2]-1000*line[0]);

FirstPoint.y=int (line[3]-1000*line[1]);

LastPoint.x=int (line[2]+1000*line[0]);

LastPoint.y=int (line[3]+1000*line[1]);

cvLine( runImg, FirstPoint, LastPoint, CV_RGB(255,0,0), 1, CV_AA);//畫出逼近線

}

break;

case LR :

//左到右橫向搜索

for (int j=0;j<thrdImg->roi->height;j++)

{

for (int i=0;i<thrdImg->roi->width-1;i++)

{

ftData=CV_IMAGE_ELEM(thrdImg,uchar,thrdImg->roi->yOffset+j,thrdImg->roi->xOffset+i);//利用宏直接得到結果

//ftData=(thrdImg->imageData + i * thrdImg->widthStep)[j];//注意這裡是 寬度用的是 widthStep 而不是 width

secData=CV_IMAGE_ELEM(thrdImg,uchar,thrdImg->roi->yOffset+j,thrdImg->roi->xOffset+i+1);

switch(EdgePolarity)

{

case B2W:

if(secData-ftData>200)//黑到白

{

for(int n=0;n<searchlines.size();n++)//point in searchlines

{

if (searchlines
.PTS.y==j&&searchlines
.PTS.x<i

&&searchlines
.PTE.x>i)

{

EdgePoint2D[EdgePoint2DCount]=cvPoint2D32f(i,j);

}

}

if (EdgePoint2DCount>0)//大於2點時比較

{

bool realPoint=TRUE;

for (int m=1;m<=EdgePoint2DCount;m++)//刪除y坐標相同的橫向點

{

if(EdgePoint2D[EdgePoint2DCount].y == EdgePoint2D[EdgePoint2DCount-m].y)

{

realPoint=FALSE;

}

}

if(realPoint)//得到非重復點並畫出

{

RelEdgePoint2D[RelEdgePoint2DCount]=cvPoint2D32f(i,j);

cvCircle(runImg,cvPoint(i,j),

1,CV_RGB(255,0,0),2, CV_AA,0); //畫點

RelEdgePoint2DCount++;

}

}

EdgePoint2DCount++;

}

break;

case W2B:

if(ftData-secData>200)//白到黑

{

for(int n=0;n<searchlines.size();n++)//找出在搜索線上的點

{

if (searchlines
.PTS.y==j&&searchlines
.PTS.x<i

&&searchlines
.PTE.x>i)

{

EdgePoint2D[EdgePoint2DCount]=cvPoint2D32f(i,j);

}

}

if (EdgePoint2DCount>0)//大於2點時比較

{

bool realPoint=TRUE;

for (int m=1;m<=EdgePoint2DCount;m++)//刪除X坐標相同的縱向點

{

if(EdgePoint2D[EdgePoint2DCount].y == EdgePoint2D[EdgePoint2DCount-m].y)

{

realPoint=FALSE;

}

}

if(realPoint)//得到非重復點並畫出

{

RelEdgePoint2D[RelEdgePoint2DCount]=cvPoint2D32f(i,j);

cvCircle(runImg,cvPoint(i,j),

1,CV_RGB(255,0,0),2, CV_AA,0); //draw points

RelEdgePoint2DCount++;

}

}

EdgePoint2DCount++;

}

break;

}

}

}

//搜索逼近線

if(RelEdgePoint2DCount>2)

{

cvFitLine2D(RelEdgePoint2D,RelEdgePoint2DCount, CV_DIST_L1,NULL,0.01,0.01,line);

CvPoint FirstPoint;//起點

CvPoint LastPoint;//終點

FirstPoint.x=int (line[2]-1000*line[0]);

FirstPoint.y=int (line[3]-1000*line[1]);

LastPoint.x=int (line[2]+1000*line[0]);

LastPoint.y=int (line[3]+1000*line[1]);

cvLine( runImg, FirstPoint, LastPoint, CV_RGB(255,0,0), 1, CV_AA);//畫出逼近線

}

break;

}

//釋放資源

free(EdgePoint2D);

free(RelEdgePoint2D);

delete[] line;

searchlines.clear();

cvResetImageROI(runImg);

cvResetImageROI(thrdImg);

DrawRecLines(runImg,rec,lineAccuracy,SearchDirection);

}

//釋放資源

cvReleaseImage(&thrdImg);

}

//畫ROI時候 連帶畫出搜索線

void CvProcess::DrawRecLines(IplImage* runImg,CvRect rec,int lineAccuracy,int SearchDirection)

{

cvRectangleR(runImg,rec,CV_RGB(0,255,0),1, CV_AA,0);

CvPoint RecPS=cvPoint(rec.x,rec.y),

RecPE=cvPoint(rec.x+rec.width,rec.y+rec.height);

switch(SearchDirection)

{

case TB :

for (int i=1;i<lineAccuracy;i++)

{

CvPoint Ps=cvPoint(((double)rec.width/lineAccuracy)*i+RecPS.x,RecPS.y);

CvPoint Pe=cvPoint(((double)rec.width/lineAccuracy)*i+RecPS.x,RecPE.y);

cvLine(runImg,Ps,Pe,CV_RGB(0,255,255),1, CV_AA,0);

}

break;

case LR :

for (int i=1;i<lineAccuracy;i++)

{

CvPoint Ps=cvPoint(RecPS.x,((double)rec.height/lineAccuracy)*i+RecPS.y);

CvPoint Pe=cvPoint(RecPE.x,((double)rec.height/lineAccuracy)*i+RecPS.y);

cvLine(runImg,Ps,Pe,CV_RGB(0,255,255),1, CV_AA,0);

}

break;

}

}

//得到ROI內部搜索線

std::vector<CLine> CvProcess::GetRecLines(CvRect rec,int lineAccuracy,int SearchDirection)

{

std::vector<CLine> SearchLines;

CLine line;

rec.x=0;//坐標轉換值ROI區域

rec.y=0;

CvPoint RecPS=cvPoint(rec.x,rec.y),

RecPE=cvPoint(rec.x+rec.width,rec.y+rec.height);

switch(SearchDirection)

{

case TB :

for (int i=1;i<lineAccuracy;i++)

{

line.PTS=cvPoint(((double)rec.width/lineAccuracy)*i+RecPS.x,RecPS.y);

line.PTE=cvPoint(((double)rec.width/lineAccuracy)*i+RecPS.x,RecPE.y);

SearchLines.push_back(line);

}

break;

case LR :

for (int i=1;i<lineAccuracy;i++)

{

line.PTS=cvPoint(RecPS.x,((double)rec.height/lineAccuracy)*i+RecPS.y);

line.PTE=cvPoint(RecPE.x,((double)rec.height/lineAccuracy)*i+RecPS.y);

SearchLines.push_back(line);

}

break;

}

return SearchLines;
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签:  image null delete float