在android上实现基于肤色的皮肤检测的几个技术要点:
成都创新互联公司是一家集网站建设,大通企业网站建设,大通品牌网站建设,网站定制,大通网站建设报价,网络营销,网络优化,大通网站推广为一体的创新建站企业,帮助传统企业提升企业形象加强企业竞争力。可充分满足这一群体相比中小企业更为丰富、高端、多元的互联网需求。同时我们时刻保持专业、时尚、前沿,时刻以成就客户成长自我,坚持不断学习、思考、沉淀、净化自己,让我们为更多的企业打造出实用型网站。
(1)android上使用相机预览,包括相机api的使用和surfaceview的应用。
(2)android上相机使用的色彩空间NV12.
(3)NV12是YCrCb的色彩空间,了解YCrCb色彩空间。YCrCb和YUV之间的转换。
yuv色彩模型来源于rgb模型,该模型的特点是将亮度和色度分离开,从而适合于图像处理领域。YCbCr模型来源于yuv模型.
(4)YCrCb色彩空间表示的人类肤色的特征。这个特征是133≤Cr≤173,77≤Cb≤127.实验表明Cr在[140,160]区间是符合黄种人的肤色。
YUV和RGB的转换:
这里是不是不是yuv而是Y Cb Cr???
Y = 0.299 R + 0.587 G + 0.114 B
U = -0.1687 R - 0.3313 G + 0.5 B + 128
V = 0.5 R - 0.4187 G - 0.0813 B + 128
R = Y + 1.402 (V-128)
G = Y - 0.34414 (U-128) - 0.71414 (V-128)
B = Y + 1.772 (U-128)
以前,一直没明白yuv和YcbCr之间的差异,想必有些朋友也会有同样的疑惑。
所以,我看完之后就记载下来了。
一、和rgb之间换算公式的差异
yuv<-->rgb
Y'= 0.299*R' + 0.587*G' + 0.114*B'
U'= -0.147*R' - 0.289*G' + 0.436*B' = 0.492*(B'- Y')
V'= 0.615*R' - 0.515*G' - 0.100*B' = 0.877*(R'- Y')
R' = Y' + 1.140*V'
G' = Y' - 0.394*U' - 0.581*V'
B' = Y' + 2.032*U'
yCbCr<-->rgb
Y’ = 0.257*R' + 0.504*G' + 0.098*B' + 16
Cb' = -0.148*R' - 0.291*G' + 0.439*B' + 128
Cr' = 0.439*R' - 0.368*G' - 0.071*B' + 128
R' = 1.164*(Y’-16) + 1.596*(Cr'-128)
G' = 1.164*(Y’-16) - 0.813*(Cr'-128) - 0.392*(Cb'-128)
B' = 1.164*(Y’-16) + 2.017*(Cb'-128)
Note: 上面各个符号都带了一撇,表示该符号在原值基础上进行了gamma correction
源代码如下:
- 001 package com.example.hearrate;
- 002
- 003 import java.io.FileNotFoundException;
- 004 import java.io.FileOutputStream;
- 005 import java.io.IOException;
- 006 import java.util.List;
- 007
- 008 import android.graphics.Bitmap;
- 009 import android.graphics.BitmapFactory;
- 010 import android.graphics.Canvas;
- 011 import android.graphics.Color;
- 012 import android.graphics.Paint;
- 013 import android.graphics.PixelFormat;
- 014 import android.graphics.PorterDuffXfermode;
- 015 import android.graphics.Rect;
- 016 import android.hardware.Camera;
- 017 import android.hardware.Camera.CameraInfo;
- 018 import android.hardware.Camera.Size;
- 019 import android.os.AsyncTask;
- 020 import android.os.Build;
- 021 import android.os.Bundle;
- 022 import android.annotation.SuppressLint;
- 023 import android.app.Activity;
- 024 import android.content.res.Configuration;
- 025 import android.util.Log;
- 026 import android.view.Menu;
- 027 import android.view.SurfaceHolder;
- 028 import android.view.SurfaceView;
- 029 import android.graphics.PorterDuff;
- 030 import android.graphics.PorterDuff.Mode;
- 031 public class MainActivity extends Activity implements SurfaceHolder.Callback ,Camera.PreviewCallback{
- 032 SurfaceHolder mHolder;
- 033 SurfaceView mView;
- 034 SurfaceView mLayer;
- 035 SurfaceHolder mLayerHolder;
- 036 private Camera mCamera =null;
- 037 private boolean bIfPreview =false;
- 038 private int mPreviewHeight;
- 039 private int mPreviewWidth;
- 040 private Canvas canvas;
- 041 private Paint paint;
- 042 private int facex=0,facey=0;
- 043 private boolean bprocessing=false;
- 044 private int[] RGBData;
- 045 private byte[] mYUVData;
- 046 private boolean bfront=false;
- 047 @Override
- 048 protected void onCreate(Bundle savedInstanceState) {
- 049 super.onCreate(savedInstanceState);
- 050 setContentView(R.layout.activity_main);
- 051 mView=(SurfaceView)findViewById(R.id.layer0);
- 052 paint = new Paint();
- 053 paint.setColor(Color.RED);
- 054 paint.setAntiAlias(true);
- 055
- 056 mPreviewWidth=320;
- 057 mPreviewHeight=400;
- 058 mHolder=mView.getHolder();
- 059 mHolder.addCallback(this);
- 060 mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
- 061
- 062 mLayer=(SurfaceView)findViewById(R.id.layer1);
- 063 mLayer.setZOrderOnTop(true);
- 064 //mLayer.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
- 065
- 066
- 067 mLayerHolder=mLayer.getHolder();
- 068 mLayerHolder.setFormat(PixelFormat.TRANSPARENT);
- 069 mLayerHolder.addCallback(this);
- 070 mLayerHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
- 071 }
- 072 void drawlayer1()
- 073 {
- 074 canvas=mLayerHolder.lockCanvas();
- 075 // canvas.drawRGB(0, 0, 0);
- 076 // canvas.save();
- 077 Bitmap bmp=BitmapFactory.decodeResource(getResources(), R.drawable.ic_launcher);
- 078 //绘制
- 079 // canvas.drawBitmap(bmp, null, paint);
- 080 drawImage(canvas,bmp,facex,facex,72,72,0,0);
- 081 canvas.restore();
- 082 bmp=null;
- 083 mLayerHolder.unlockCanvasAndPost(canvas);
- 084 }
- 085 @Override
- 086 public boolean onCreateOptionsMenu(Menu menu) {
- 087 // Inflate the menu; this adds items to the action bar if it is present.
- 088 getMenuInflater().inflate(R.menu.activity_main, menu);
- 089 return true;
- 090 }
- 091
- 092 @Override
- 093 public void surfaceChanged(SurfaceHolder arg0, int arg1, int width, int height) {
- 094 // TODO Auto-generated method stub
- 095 mPreviewWidth=width;
- 096 mPreviewHeight=height;
- 097 if(arg0.equals(mLayerHolder))
- 098 {
- 099 //drawlayer1();
- 100 return;
- 101 }
- 102
- 103 RGBData= new int[mPreviewHeight* mPreviewWidth];
- 104 mYUVData= new byte[mPreviewHeight* mPreviewWidth+(mPreviewHeight/2)* (mPreviewWidth/2)+(mPreviewHeight/2)* (mPreviewWidth/2)];
- 105 initCamera();
- 106 }
- 107
- 108 @SuppressLint("NewApi")
- 109 @Override
- 110 public void surfaceCreated(SurfaceHolder arg0) {
- 111 // TODO Auto-generated method stub
- 112 // TODO Auto-generated method stub
- 113 if(arg0.equals(mLayerHolder))
- 114 return;
- 115 if(Build.VERSION.SDK_INT>=Build.VERSION_CODES.GINGERBREAD)
- 116 {
- 117 for(int i=0;i
- 118 {
- 119 CameraInfo info=new CameraInfo();
- 120 Camera.getCameraInfo(i, info);
- 121 if(info.facing==CameraInfo.CAMERA_FACING_FRONT)
- 122 {
- 123 //mCamera = Camera.open(i);
- 124 //bfront=true;
- 125 }
- 126 }
- 127 }
- 128 if(mCamera==null)
- 129 {
- 130 mCamera = Camera.open();// 开启摄像头(2.3版本后支持多摄像头,需传入参数)
- 131 bfront=false;
- 132 }
- 133 try
- 134 {
- 135
- 136 mCamera.setPreviewDisplay(mHolder);//set the surface to be used for live preview
- 137
- 138 Log("成功打开");
- 139 } catch (Exception ex)
- 140 {
- 141 if(null != mCamera)
- 142 {
- 143 mCamera.release();
- 144 mCamera = null;
- 145 }
- 146 canvas=mHolder.lockCanvas();
- 147 canvas.drawRGB(0, 0, 0);
- 148 canvas.save();
- 149 Bitmap bmp=BitmapFactory.decodeResource(getResources(), R.drawable.bg);
- 150 //绘制
- 151 // canvas.drawBitmap(bmp, null, paint);
- 152 drawImage(canvas,bmp,0,0,mPreviewWidth,mPreviewHeight,0,0);
- 153 canvas.restore();
- 154 bmp=null;
- 155 mHolder.unlockCanvasAndPost(canvas);
- 156 Log("打开失败"+ex.getMessage());
- 157 }
- 158 }
- 159 // GameView.drawImage(canvas, mBitDestTop, miDTX, mBitQQ.getHeight(), mBitDestTop.getWidth(), mBitDestTop.getHeight()/2, 0, 0);
- 160 public static void drawImage(Canvas canvas, Bitmap blt, int x, int y, int w, int h, int bx, int by)
- 161 { //x,y表示绘画的起点,
- 162 Rect src = new Rect();// 图片
- 163 Rect dst = new Rect();// 屏幕位置及尺寸
- 164 //src 这个是表示绘画图片的大小
- 165 src.left = bx; //0,0
- 166 src.top = by;
- 167 src.right = bx + w;// mBitDestTop.getWidth();,这个是桌面图的宽度,
- 168 src.bottom = by + h;//mBitDestTop.getHeight()/2;// 这个是桌面图的高度的一半
- 169 // 下面的 dst 是表示 绘画这个图片的位置
- 170 dst.left = x; //miDTX,//这个是可以改变的,也就是绘图的起点X位置
- 171 dst.top = y; //mBitQQ.getHeight();//这个是QQ图片的高度。 也就相当于 桌面图片绘画起点的Y坐标
- 172 dst.right = x + w; //miDTX + mBitDestTop.getWidth();// 表示需绘画的图片的右上角
- 173 dst.bottom = y + h; // mBitQQ.getHeight() + mBitDestTop.getHeight();//表示需绘画的图片的右下角
- 174 canvas.drawBitmap(blt, src, dst, null);//这个方法 第一个参数是图片原来的大小,第二个参数是 绘画该图片需显示多少。也就是说你想绘画该图片的某一些地方,而不是全部图片,第三个参数表示该图片绘画的位置
- 175
- 176 src = null;
- 177 dst = null;
- 178 }
- 179 @Override
- 180 public void surfaceDestroyed(SurfaceHolder arg0) {
- 181 // TODO Auto-generated method stub
- 182 if(arg0.equals(mLayerHolder))
- 183 return;
- 184 if(null != mCamera)
- 185 {
- 186 mCamera.setPreviewCallback(null); //!!这个必须在前,不然退出出错
- 187 mCamera.stopPreview();
- 188 bIfPreview = false;
- 189 mCamera.release();
- 190 mCamera = null;
- 191 }
- 192 }
- 193
- 194
- 195 @Override
- 196 public void onPreviewFrame(byte[] data, Camera camera) {
- 197 // TODO Auto-generated method stub
- 198 Log("going into onPreviewFrame"+data.length);
- 199
- 200 int imageWidth = camera.getParameters().getPreviewSize().width ;
- 201 int imageHeight =camera.getParameters().getPreviewSize().height ;
- 202 // int RGBData[] = new int[imageWidth* imageHeight];
- 203 if(!bprocessing)
- 204 {
- 205 System.arraycopy(data, 0, mYUVData, 0, data.length);
- 206
- 207 new ProcessTask().execute(mYUVData);
- 208 }
- 209 // decodeYUV420SP(RGBData, mYUVData, imageWidth, imageHeight);
- 210
- 211 // Bitmap bitmap = Bitmap.createBitmap(imageWidth, imageHeight, Bitmap.Config.ARGB_8888);
- 212 // bitmap.setPixels(RGBData, 0, imageWidth, 0, 0, imageWidth, imageHeight);
- 213 // FileOutputStream outStream = null;
- 214 // ByteArrayOutputStream baos = new ByteArrayOutputStream();
- 215
- 216 // outStream = new FileOutputStream(String.format("/sdcard/%d.bmp", System.currentTimeMillis()));
- 217 // outStream.write(bitmap.);
- 218 // outStream.close();
- 219 /*
- 220 FileOutputStream out;
- 221 try {
- 222 String path=String.format("/mnt/sdcard/%d.png", System.currentTimeMillis());
- 223 out = new FileOutputStream(path);
- 224 bitmap.compress(Bitmap.CompressFormat.PNG, 90, out);
- 225 out.close();
- 226 } catch (FileNotFoundException e) {
- 227 // TODO Auto-generated catch block
- 228 e.printStackTrace();
- 229 } catch (IOException e) {
- 230 // TODO Auto-generated catch block
- 231 e.printStackTrace();
- 232 }
- 233 */
- 234 //mYUV420sp = data; // 获取原生的YUV420SP数据
- 235 //int mInitPos= mPreviewWidth*mPreviewHeight;
- 236
- 237 //if(mYUV420sp.length<=mInitPos+1)
- 238 // return;
- 239 //byte cr=0;
- 240 //int framesize=mInitPos;
- 241 //int uvp=0;
- 242 //int i,j,u=0,v=0,yp = 0;
- 243 //int uvp=framesize+(i>>1)*w+j;
- 244 // canvas=mLayerHolder.lockCanvas();
- 245 // canvas.drawRGB(0, 0, 0);
- 246 // canvas.save();
- 247 // Bitmap bmp=BitmapFactory.decodeResource(getResources(), R.drawable.ic_launcher);
- 248 //绘制
- 249 // canvas.drawBitmap(bmp, null, paint);
- 250 // drawImage(canvas,bmp,facex,facex,72,72,0,0);
- 251
- 252
- 253 // bmp=null;
- 254 // int RGBData[] = new int[mPreviewHeight* mPreviewWidth];
- 255 // byte[] mYUVData = new byte[mYUV420sp.length];
- 256 // System.arraycopy(mYUV420sp, 0, mYUVData, 0, mYUV420sp.length);
- 257 /*
- 258 for( i=0,yp = 0;i
- 259 {
- 260 uvp=framesize+(i>>1)*mPreviewWidth;
- 261 for( j=0;j
- 262 {
- 263 int y = (0xff & ((int) mYUVData[yp])) - 16;
- 264 if (y < 0) y = 0;
- 265 if((j&1)==0)
- 266 {
- 267 v = (0xff & mYUVData[uvp++]) - 128;
- 268 u = (0xff & mYUVData[uvp++]) - 128;
- 269 }
- 270 // if(133≤Cr≤173,77≤Cb≤127
- 271 if(v>133&&v<173)
- 272 canvas.drawPoint(j, i, paint);
- 273
- 274 int y1192 = 1192 * y;
- 275
- 276 int r = (y1192 + 1634 * v);
- 277
- 278 int g = (y1192 - 833 * v - 400 * u);
- 279
- 280 int b = (y1192 + 2066 * u);
- 281
- 282 if (r < 0) r = 0; else if (r > 262143) r = 262143;
- 283
- 284 if (g < 0) g = 0; else if (g > 262143) g = 262143;
- 285
- 286 if (b < 0) b = 0; else if (b > 262143) b = 262143;
- 287 // int rgb=0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
- 288
- 289 //r=(rgb&0x00ff0000)>>4;
- 290 // g=(rgb&0x0000ff00)>>2;
- 291 // b=(rgb&0x000000ff);
- 292 // if(r>200&&g>200&&b>200)
- 293 // canvas.drawPoint(j, i, paint);
- 294 // rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
- 295 }
- 296
- 297
- 298 }
- 299
- 300
- 301 canvas.restore();
- 302 mLayerHolder.unlockCanvasAndPost(canvas);
- 303 */
- 304 /*
- 305 * framesize=w*h;
- 306 * yp=0;
- 307 * for (int i=0;i
- 308 * {
- 309 * uvp=framesize+(i>>1)*w;
- 310 * for(int j=0;j
- 311 * {
- 312 * int y = (0xff & ((int) yuv420sp[yp])) - 16;
- 313 * if(j&1==0)
- 314 * {
- 315 * v = (0xff & yuv420sp[uvp++]) - 128;
- 316
- 317 u = (0xff & yuv420sp[uvp++]) - 128;
- 318 * }
- 319 *
- 320 *
- 321 * }
- 322 * }
- 323 *
- 324 *
- 325 * */
- 326 }
- 327 public void drawdetect()
- 328 {
- 329 canvas=mLayerHolder.lockCanvas();
- 330
- 331 if(canvas==null)
- 332 return;
- 333 canvas.drawColor(Color.TRANSPARENT);
- 334 Paint p = new Paint();
- 335 //清屏
- 336 p.setXfermode(new PorterDuffXfermode(Mode.CLEAR));
- 337 canvas.drawPaint(p);
- 338 p.setXfermode(new PorterDuffXfermode(Mode.SRC));
- 339 canvas.save();
- 340 canvas.drawBitmap(RGBData, 0, mPreviewWidth, 0, 0, mPreviewWidth, mPreviewHeight, true, p);
- 341
- 342 canvas.restore();
- 343 mLayerHolder.unlockCanvasAndPost(canvas);
- 344 }
- 345 public void detectwhite(byte[] yuv420sp, int width, int height)
- 346 {
- 347 //检测所有白色
- 348 final int frameSize = width * height;
- 349
- 350 for (int j = 0, yp = 0; j < height; j++) {
- 351 int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;
- 352 for (int i = 0; i < width; i++, yp++) {
- 353 int y = (0xff & ((int) yuv420sp[yp]));
- 354 if (y < 0) y = 0;
- 355 if ((i & 1) == 0) {
- 356 v = (0xff & yuv420sp[uvp++]);;
- 357 u = (0xff & yuv420sp[uvp++]);
- 358 }
- 359 ///133≤Cr≤173,77≤Cb≤127
- 360 if(y>250)
- 361 {
- 362 RGBData[yp]=Color.RED;
- 363 // canvas.drawPoint(i, j, paint);
- 364 }else
- 365 {
- 366 RGBData[yp]=Color.TRANSPARENT;
- 367 }
- 368
- 369 }
- 370
- 371 }
- 372
- 373 }
- 374 public void detectface(byte[] yuv420sp, int width, int height)
- 375 {
- 376
- 377 final int frameSize = width * height;
- 378
- 379 for (int j = 0, yp = 0; j < height; j++) {
- 380 int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;
- 381 for (int i = 0; i < width; i++, yp++) {
- 382
- 383 if ((i & 1) == 0) {
- 384 v = (0xff & yuv420sp[uvp++]);;
- 385 u = (0xff & yuv420sp[uvp++]);
- 386 }
- 387 ///133≤Cr≤173,77≤Cb≤127
- 388 if((v)>133&&(v)<160&&(u>77)&&(u<127))
- 389 {
- 390 RGBData[yp]=Color.RED;
- 391 // canvas.drawPoint(i, j, paint);
- 392 }else
- 393 {
- 394 RGBData[yp]=Color.TRANSPARENT;
- 395 } 
名称栏目:Android实现基于肤色的皮肤检测
文章网址:http://www.shufengxianlan.com/qtweb/news34/255134.html网站建设、网络推广公司-创新互联,是专注品牌与效果的网站制作,网络营销seo公司;服务项目有等
声明:本网站发布的内容(图片、视频和文字)以用户投稿、用户转载内容为主,如果涉及侵权请尽快告知,我们将会在第一时间删除。文章观点不代表本网站立场,如需处理请联系客服。电话:028-86922220;邮箱:631063699@qq.com。内容未经允许不得转载,或转载时需注明来源: 创新互联