基本信息
源码名称:开发API人脸检测实例
源码大小:0.67M
文件格式:.rar
开发语言:Java
更新时间:2015-06-01
友情提示:(无需注册或充值,赞助后即可获取资源下载链接)
嘿,亲!知识可是无价之宝呢,但咱这精心整理的资料也耗费了不少心血呀。小小地破费一下,绝对物超所值哦!如有下载和支付问题,请联系我们QQ(微信同号):78630559
本次赞助数额为: 2 元×
微信扫码支付:2 元
×
请留下您的邮箱,我们将在2小时内将文件发到您的邮箱
源码介绍
package cliu.TutorialOnFaceDetect;
/*
* TutorialOnFaceDetect
* Download by http://www.codefans.net
* [AUTHOR]: Chunyen Liu
* [SDK ]: Android SDK 2.1 and up
* [NOTE ]: developer.com tutorial, "Face Detection with Android APIs"
*/
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.PointF;
import android.media.FaceDetector;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
import android.widget.LinearLayout.LayoutParams;
public class TutorialOnFaceDetect extends Activity {
private MyImageView mIV;
private Bitmap mFaceBitmap;
private int mFaceWidth = 200;
private int mFaceHeight = 200;
private static final int MAX_FACES = 10;
private static String TAG = "TutorialOnFaceDetect";
private static boolean DEBUG = false;
protected static final int GUIUPDATE_SETFACE = 999;
protected Handler mHandler = new Handler(){
// @Override
public void handleMessage(Message msg) {
mIV.invalidate();
super.handleMessage(msg);
}
};
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mIV = new MyImageView(this);
setContentView(mIV, new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT));
// load the photo
Bitmap b = BitmapFactory.decodeResource(getResources(), R.drawable.face3);
mFaceBitmap = b.copy(Bitmap.Config.RGB_565, true);
b.recycle();
mFaceWidth = mFaceBitmap.getWidth();
mFaceHeight = mFaceBitmap.getHeight();
mIV.setImageBitmap(mFaceBitmap);
mIV.invalidate();
// perform face detection in setFace() in a background thread
doLengthyCalc();
}
public void setFace() {
FaceDetector fd;
FaceDetector.Face [] faces = new FaceDetector.Face[MAX_FACES];
PointF eyescenter = new PointF();
float eyesdist = 0.0f;
int [] fpx = null;
int [] fpy = null;
int count = 0;
try {
fd = new FaceDetector(mFaceWidth, mFaceHeight, MAX_FACES);
count = fd.findFaces(mFaceBitmap, faces);
} catch (Exception e) {
Log.e(TAG, "setFace(): " e.toString());
return;
}
// check if we detect any faces
if (count > 0) {
fpx = new int[count * 2];
fpy = new int[count * 2];
for (int i = 0; i < count; i ) {
try {
faces[i].getMidPoint(eyescenter);
eyesdist = faces[i].eyesDistance();
// set up left eye location
fpx[2 * i] = (int)(eyescenter.x - eyesdist / 2);
fpy[2 * i] = (int)eyescenter.y;
// set up right eye location
fpx[2 * i 1] = (int)(eyescenter.x eyesdist / 2);
fpy[2 * i 1] = (int)eyescenter.y;
if (DEBUG)
Log.e(TAG, "setFace(): face " i ": confidence = " faces[i].confidence()
", eyes distance = " faces[i].eyesDistance()
", pose = (" faces[i].pose(FaceDetector.Face.EULER_X) ","
faces[i].pose(FaceDetector.Face.EULER_Y) ","
faces[i].pose(FaceDetector.Face.EULER_Z) ")"
", eyes midpoint = (" eyescenter.x "," eyescenter.y ")");
} catch (Exception e) {
Log.e(TAG, "setFace(): face " i ": " e.toString());
}
}
}
mIV.setDisplayPoints(fpx, fpy, count * 2, 1);
}
private void doLengthyCalc() {
Thread t = new Thread() {
Message m = new Message();
public void run() {
try {
setFace();
m.what = TutorialOnFaceDetect.GUIUPDATE_SETFACE;
TutorialOnFaceDetect.this.mHandler.sendMessage(m);
} catch (Exception e) {
Log.e(TAG, "doLengthyCalc(): " e.toString());
}
}
};
t.start();
}
}