六种方法分别是:基于RGB分割,基于RG同道的分割,ycrcb+otsu(ostu可以参考http://blog.csdn.net/onezeros/article/details/6136770,
http://wenku.baidu.com/view/05c47e03bed5b9f3f90f1ce4.html),YCrCb空间,YUV空间,HSV空间。下一步就是通过JNI将这些检测移植到android上,最终目标是实现Android智能手机利用掌纹开关机。
环境是在qt下,.pro文件里增加如下代码:
~~~
INCLUDEPATH += /usr/include/opencv
LIBS += /usr/lib/libcv.so \
/usr/lib/libcvaux.so \
/usr/lib/libcxcore.so \
/usr/lib/libhighgui.so \
/usr/lib/libml.so
~~~
请看源码:
~~~
#include <iostream>
#include "cv.h"
#include "highgui.h"
void SkinRGB(IplImage* rgb,IplImage* _dst);
void cvSkinRG(IplImage* rgb,IplImage* gray);
void cvThresholdOtsu(IplImage* src, IplImage* dst);
void cvSkinOtsu(IplImage* src, IplImage* dst);
void cvSkinYCbCr(IplImage* img, IplImage* mask);
void cvSkinYUV(IplImage* src,IplImage* dst);
void cvSkinHSV(IplImage* src,IplImage* dst);
using namespace std;
// skin region location using rgb limitation
int main()
{
IplImage *srcImg = cvLoadImage("/home/yan/download/testPalm4.jpg", 1);
IplImage *dstRGB = cvCreateImage(cvGetSize(srcImg), 8, 3);
IplImage *dstRG = cvCreateImage(cvGetSize(srcImg), 8, 1);
IplImage* dst_crotsu=cvCreateImage(cvGetSize(srcImg),8,1);
IplImage* dst_ycbcr=cvCreateImage(cvGetSize(srcImg),8,1);
IplImage* dst_yuv=cvCreateImage(cvGetSize(srcImg),8,3);
IplImage* dst_hsv=cvCreateImage(cvGetSize(srcImg),8,3);
SkinRGB(srcImg, dstRGB);
cvSaveImage("/home/yan/download/1_dstRGB.jpg", dstRGB);
cvSkinRG(srcImg, dstRG);
cvSaveImage("/home/yan/download/2_dstRG.jpg", dstRG);
cvSkinOtsu(srcImg, dst_crotsu);
cvSaveImage("/home/yan/download/3_dst_crotsu.jpg", dst_crotsu);
cvSkinYCbCr(srcImg, dst_ycbcr);
cvSaveImage("/home/yan/download/4_dst_ycbcr.jpg", dst_ycbcr);
cvSkinYUV(srcImg, dst_yuv);
cvSaveImage("/home/yan/download/5_dst_yuv.jpg", dst_yuv);
cvSkinHSV(srcImg, dst_hsv);
cvSaveImage("/home/yan/download/6_dst_hsv.jpg", dst_hsv);
cvNamedWindow("srcImg", 1);
cvShowImage("srcImg", srcImg);
cvNamedWindow("dstRGB", 1);
cvShowImage("dstRGB", dstRGB);
cvNamedWindow("dstRG", 1);
cvShowImage("dstRG", dstRG);
cvNamedWindow("dstcrotsu", 1);
cvShowImage("dstcrotsu", dst_crotsu);
cvNamedWindow("dst_ycbcr", 1);
cvShowImage("dst_ycbcr", dst_ycbcr);
cvNamedWindow("dst_yuv", 1);
cvShowImage("dst_yuv", dst_yuv);
cvNamedWindow("dst_hsv", 1);
cvShowImage("dst_hsv", dst_hsv);
cvWaitKey(0);
cout << "Hello World!" << endl;
return 0;
}
void SkinRGB(IplImage* rgb,IplImage* _dst)
{
cout<<"111"<<endl;
assert(rgb->nChannels==3&& _dst->nChannels==3);
static const int R=2;
static const int G=1;
static const int B=0;
IplImage* dst=cvCreateImage(cvGetSize(_dst),8,3);
cvZero(dst);
for (int h=0;h<rgb->height;h++) {
unsigned char* prgb=(unsigned char*)rgb->imageData+h*rgb->widthStep;
unsigned char* pdst=(unsigned char*)dst->imageData+h*dst->widthStep;
for (int w=0;w<rgb->width;w++) {
if ((prgb[R]>95 && prgb[G]>40 && prgb[B]>20 &&
prgb[R]-prgb[B]>15 && prgb[R]-prgb[G]>15/*&&
!(prgb[R]>170&&prgb[G]>170&&prgb[B]>170)*/)||//uniform illumination
(prgb[R]>200 && prgb[G]>210 && prgb[B]>170 &&
abs(prgb[R]-prgb[B])<=15 && prgb[R]>prgb[B]&& prgb[G]>prgb[B])//lateral illumination
) {
memcpy(pdst,prgb,3);
}
prgb+=3;
pdst+=3;
}
}
cvCopyImage(dst,_dst);
cvReleaseImage(&dst);
}
void cvSkinRG(IplImage* rgb,IplImage* gray)
{
assert(rgb->nChannels==3&&gray->nChannels==1);
const int R=2;
const int G=1;
const int B=0;
double Aup=-1.8423;
double Bup=1.5294;
double Cup=0.0422;
double Adown=-0.7279;
double Bdown=0.6066;
double Cdown=0.1766;
for (int h=0; h<rgb->height; h++)
{
unsigned char* pGray=(unsigned char*)gray->imageData+h*gray->widthStep;
unsigned char* pRGB=(unsigned char* )rgb->imageData+h*rgb->widthStep;
for (int w=0; w<rgb->width; w++)
{
int s=pRGB[R]+pRGB[G]+pRGB[B];
double r=(double)pRGB[R]/s;
double g=(double)pRGB[G]/s;
double Gup=Aup*r*r+Bup*r+Cup;
double Gdown=Adown*r*r+Bdown*r+Cdown;
double Wr=(r-0.33)*(r-0.33)+(g-0.33)*(g-0.33);
if (g<Gup && g>Gdown && Wr>0.004)
{
*pGray=255;
}
else
{
*pGray=0;
}
pGray++;
pRGB+=3;
}
}
}
void cvThresholdOtsu(IplImage* src, IplImage* dst)
{
int height=src->height;
int width=src->width;
//histogram
float histogram[256]= {0};
for(int i=0; i<height; i++)
{
unsigned char* p=(unsigned char*)src->imageData+src->widthStep*i;
for(int j=0; j<width; j++)
{
histogram[*p++]++;
}
}
//normalize histogram
int size=height*width;
for(int i=0; i<256; i++)
{
histogram[i]=histogram[i]/size;
}
//average pixel value
float avgValue=0;
for(int i=0; i<256; i++)
{
avgValue+=i*histogram[i];
}
int threshold;
float maxVariance=0;
float w=0,u=0;
for(int i=0; i<256; i++)
{
w+=histogram[i];
u+=i*histogram[i];
float t=avgValue*w-u;
float variance=t*t/(w*(1-w));
if(variance>maxVariance)
{
maxVariance=variance;
threshold=i;
}
}
cvThreshold(src,dst,threshold,255,CV_THRESH_BINARY);
}
void cvSkinOtsu(IplImage* src, IplImage* dst)
{
assert(dst->nChannels==1&& src->nChannels==3);
IplImage* ycrcb=cvCreateImage(cvGetSize(src),8,3);
IplImage* cr=cvCreateImage(cvGetSize(src),8,1);
cvCvtColor(src,ycrcb,CV_BGR2YCrCb);
cvSplit(ycrcb,0,cr,0,0);
cvThresholdOtsu(cr,cr);
cvCopyImage(cr,dst);
cvReleaseImage(&cr);
cvReleaseImage(&ycrcb);
}
void cvSkinYCbCr(IplImage* img, IplImage* mask)
{
CvSize imageSize = cvSize(img->width, img->height);
IplImage *imgY = cvCreateImage(imageSize, IPL_DEPTH_8U, 1);
IplImage *imgCr = cvCreateImage(imageSize, IPL_DEPTH_8U, 1);
IplImage *imgCb = cvCreateImage(imageSize, IPL_DEPTH_8U, 1);
IplImage *imgYCrCb = cvCreateImage(imageSize, img->depth, img->nChannels);
cvCvtColor(img,imgYCrCb,CV_BGR2YCrCb);
cvSplit(imgYCrCb, imgY, imgCr, imgCb, 0);
int y, cr, cb, l, x1, y1, value;
unsigned char *pY, *pCr, *pCb, *pMask;
pY = (unsigned char *)imgY->imageData;
pCr = (unsigned char *)imgCr->imageData;
pCb = (unsigned char *)imgCb->imageData;
pMask = (unsigned char *)mask->imageData;
cvSetZero(mask);
l = img->height * img->width;
for (int i = 0; i < l; i++){
y = *pY;
cr = *pCr;
cb = *pCb;
cb -= 109;
cr -= 152
;
x1 = (819*cr-614*cb)/32 + 51;
y1 = (819*cr+614*cb)/32 + 77;
x1 = x1*41/1024;
y1 = y1*73/1024;
value = x1*x1+y1*y1;
if(y<100) (*pMask)=(value<700) ? 255:0;
else (*pMask)=(value<850)? 255:0;
pY++;
pCr++;
pCb++;
pMask++;
}
cvReleaseImage(&imgY);
cvReleaseImage(&imgCr);
cvReleaseImage(&imgCb);
cvReleaseImage(&imgYCrCb);
}
void cvSkinYUV(IplImage* src,IplImage* dst)
{
IplImage* ycrcb=cvCreateImage(cvGetSize(src),8,3);
//IplImage* cr=cvCreateImage(cvGetSize(src),8,1);
//IplImage* cb=cvCreateImage(cvGetSize(src),8,1);
cvCvtColor(src,ycrcb,CV_BGR2YCrCb);
//cvSplit(ycrcb,0,cr,cb,0);
static const int Cb=2;
static const int Cr=1;
static const int Y=0;
//IplImage* dst=cvCreateImage(cvGetSize(_dst),8,3);
cvZero(dst);
for (int h=0; h<src->height; h++)
{
unsigned char* pycrcb=(unsigned char*)ycrcb->imageData+h*ycrcb->widthStep;
unsigned char* psrc=(unsigned char*)src->imageData+h*src->widthStep;
unsigned char* pdst=(unsigned char*)dst->imageData+h*dst->widthStep;
for (int w=0; w<src->width; w++)
{
if (pycrcb[Cr]>=133&&pycrcb[Cr]<=173&&pycrcb[Cb]>=77&&pycrcb[Cb]<=127)
{
memcpy(pdst,psrc,3);
}
pycrcb+=3;
psrc+=3;
pdst+=3;
}
}
//cvCopyImage(dst,_dst);
//cvReleaseImage(&dst);
}
void cvSkinHSV(IplImage* src,IplImage* dst)
{
IplImage* hsv=cvCreateImage(cvGetSize(src),8,3);
//IplImage* cr=cvCreateImage(cvGetSize(src),8,1);
//IplImage* cb=cvCreateImage(cvGetSize(src),8,1);
cvCvtColor(src,hsv,CV_BGR2HSV);
//cvSplit(ycrcb,0,cr,cb,0);
static const int V=2;
static const int S=1;
static const int H=0;
//IplImage* dst=cvCreateImage(cvGetSize(_dst),8,3);
cvZero(dst);
for (int h=0; h<src->height; h++)
{
unsigned char* phsv=(unsigned char*)hsv->imageData+h*hsv->widthStep;
unsigned char* psrc=(unsigned char*)src->imageData+h*src->widthStep;
unsigned char* pdst=(unsigned char*)dst->imageData+h*dst->widthStep;
for (int w=0; w<src->width; w++)
{
if (phsv[H]>=7&&phsv[H]<=29)
{
memcpy(pdst,psrc,3);
}
phsv+=3;
psrc+=3;
pdst+=3;
}
}
//cvCopyImage(dst,_dst);
//cvReleaseImage(&dst);
}
~~~
下面是效果图:
测试图片:
![](https://box.kancloud.cn/2016-01-19_569e21ac93ccb.jpg)
下图的贴图依次对应上面的六种方法:
![](https://box.kancloud.cn/2016-01-19_569e21acc03a8.jpg)
![](https://box.kancloud.cn/2016-01-19_569e21acdd39c.jpg)
![](https://box.kancloud.cn/2016-01-19_569e21ad0652e.jpg)
![](https://box.kancloud.cn/2016-01-19_569e21ad18287.jpg)
![](https://box.kancloud.cn/2016-01-19_569e21ad38c05.jpg)
![](https://box.kancloud.cn/2016-01-19_569e21ad6923c.jpg)
从上面的结果对比图中可以清晰看的,ycrcb+ostu的效果无疑是最好的。其次是rgb和yuv方法。这个图片效果之所以这么好是因为测试图片拍摄的时候背景为白色。然后,遗憾的是,当背景色不纯的时候,比如有红也有黑,效果就很不理想了。实验发现,当背景为纯色,且是白色或黑色时,效果最好。
参考:
http://blog.sina.com.cn/s/blog_9ce5a1b501017otq.html
http://blog.csdn.net/scyscyao/article/details/5468577
http://wenku.baidu.com/view/05c47e03bed5b9f3f90f1ce4.html
http://blog.csdn.net/onezeros/article/details/6136770
--------------------------本掌纹是作者自己的,转载请注明作者yanzi1225627
- 前言
- Linux下使用QT调用opencv读取摄像头视频 调试心得
- Android开发 摄像头SurfaceView预览 背景带矩形框 实现(原理:双surfaceview,顶层画矩形框,底层预览视频)
- Android开发:安装NDK,移植OpenCV2.3.1,JNI调用OpenCV全过程
- 2013新春奉送:Android摄像头开发完美demo---(循环聚焦,缩放大小,旋转picture,查询支持的picturesize, ImageButton按键效果)
- 如何设置ImageButton按键按下去后的 特效----(如类似风车旋转的动画特效)
- Android摄像头:只拍摄SurfaceView预览界面特定区域内容(矩形框)---完整实现(原理:底层SurfaceView+上层绘制ImageView)
- Android开发:SurfaceView上新建线程绘制旋转图片 及 刷新特定区域(脏矩形)
- Android开发:ImageView上绘制旋转圆环(透明度不同的旋转圆环,利用canvas.drawArc实现)
- Android上掌纹识别第一步:基于OpenCV的6种肤色分割 源码和效果图
- Android开发:实时处理摄像头预览帧视频------浅析PreviewCallback,onPreviewFrame,AsyncTask的综合应用
- Android摄像头开发:拍照后添加相框,融合相框和图片为一副 图片
- Android(OpenCV) NDK开发: 0xdeadbaad(code=1)错误 及 关闭armeabi和libnative_camera_r2.2.2.so的生成
- Android摄像头开发:实时摄像头视频预览帧的编码问题(二)
- setContentView切换页面(无需每次都findViewById)-----二
- Android开发:setContentView切换界面,自定义带CheckBox的ListView显示SQlite条目-----实现