OpenCV 在Android 中的應用
使用Android NDK 編譯so 庫
簡介
在linuxt 系統下使用OpenCV2.3 + NDK R6 編譯OpenCV 人臉檢測應用
準備
Android NDK ( r5 或更高版本) https://developer.android.com/sdk/ndk/index.html
OpenCV Android 包https://sourceforge.net/projects/opencvlibrary/files/opencv-android/2.3/
cmake( 可選,替代NDK) https://www.cmake.org/
註:https://code.google.com/p/android-opencv/ 網站上說要使用crystax ndk r4 代替NDK 。估計可能是對於較舊的Android 版本需要這樣。如果NDK 無法編譯,請嘗試使用crystax ndk r4 編譯。
OpenCV 設置
從網站上下載OpenCV 2.3.0 for Android 後,解壓到某個目錄,如~/ 目錄下
設置OPENCV_PACKAGE_DIR 環境變量
$ export OPENCV_PACKAGE_DIR=~/enCV-2.3.0/
新建一個Android 工程
在eclipse 中新建一個android 工程如study.opencv ,並且在工程根目錄下新建一個名為jni 的目錄。將下載的android-ndk-r6 解壓到某個目錄下,如~/
從~/android-ndk-r6/sample 下某個sample 中拷貝Android.mk, Application.mk 到study.opencv/jni 目錄
設置編譯腳本
在Android.mk 中,include $(CLEAR_VARS) 後面,加入下行
include $(OPENCV_PACKAGE_DIR)/$(TARGET_ARCH_ABI)/share/opencv/OpenCV.mk
如果應用支持ARM NEON 那麼還需要加入以下行
include $(OPENCV_PACKAGE_DIR)/armeabi-v7a-neon/share/opencv/OpenCV.mk
LOCAL_ARM_NEON := true
在Application.mk 中加入以下行
APP_STL := gnustl_static
APP_CPPFLAGS := -frtti -fexceptions
註:關於Android.mk 與Application.mk 的詳細說明,請參考ndk/docs 下Android-mk.html 和Application-mk.html 。
Java 層定義native 接口
新建study.opencv.FaceRec 類,定義一個人臉檢測的本地接口
/**
* detect front face from image.
*
* @param xml
* opencv haarcascade xml file path
* @param infile
* input image file path
* @param outfile
* output image file path
*/
public native void detect(String xml, String infile, String outfile);
生成jni 頭文件
使用javah 命令生成jni 頭文件
$ cd ~/workspace/study.opencv/bin
$ javah study.opencv.FaceRec
會在bin 目錄生成一個study_opencv_FaceRec.h 文件。將此文件拷貝到../jni 目錄中
註:如果接口有變更,請先手動刪除生成的.h 文件。以防止一些意外的錯誤。
在c 層實現圖像人臉檢測
在jni 目錄中使用文本編輯器新建一個facedetect.cpp ,實現圖像人臉檢測
Cpp代碼
#include "cv.h"
#include "highgui.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include <math.h>
#include <float.h>
#include <limits.h>
#include <time.h>
#include <ctype.h>
#include <android/log.h>
#include <study_opencv_FaceRec.h>
#include <jni.h>
#define LOG_TAG "opencv_face_detect"
#define LOGI(…) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define LOGE(…) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
static CvMemStorage* storage = 0;
static CvHaarClassifierCascade* cascade = 0;
void detect_and_draw( IplImage* image );
const char* cascade_name =
"haarcascade_frontalface_alt.xml";
/* "haarcascade_profileface.xml";*/
/*int captureFromImage(char* xml, char* filename);*/
char* jstring2String(JNIEnv*, jstring);
int captureFromImage(char* xml, char* filename, char* outfile)
{
LOGI("begin: ");
// we just detect image
// CvCapture* capture = 0;
IplImage *frame, *frame_copy = 0;
const char* input_name = "lina.png";
if(xml != NULL)
{
cascade_name = xml;
}
if(filename != NULL)
{
input_name = filename;
}
LOGI("xml=%s,filename=%s", cascade_name, input_name);
// load xml
cascade = (CvHaarClassifierCascade*)cvLoad( cascade_name, 0, 0, 0 );
LOGI("load cascade ok ? %d", cascade != NULL ? 1 : 0);
if( !cascade )
{
LOGI("ERROR: Could not load classifier cascade\n" );
// I just won't write long full file path, to instead of relative path, but I failed.
FILE * fp = fopen(input_name,"w");
if(fp == NULL){
LOGE("create failed");
}
return -1;
}
storage = cvCreateMemStorage(0);
// cvNamedWindow( "result", 1 );
IplImage* image = cvLoadImage( input_name, 1 );
if( image )
{
LOGI("load image successfully");
detect_and_draw( image );
// cvWaitKey(0);
if(outfile != NULL)
{
LOGI("after detected save image file");
cvSaveImage(outfile, image);//把圖像寫入文件
}
cvReleaseImage( &image );
}
else
{
LOGE("can't load image from : %s ", input_name);
}
}
void detect_and_draw( IplImage* img )
{
static CvScalar colors[] =
{
{{0,0,255}},
{{0,128,255}},
{{0,255,255}},
{{0,255,0}},
{{255,128,0}},
{{255,255,0}},
{{255,0,0}},
{{255,0,255}}
};
double scale = 1.3;
IplImage* gray = cvCreateImage( cvSize(img->width,img->height), 8, 1 );
IplImage* small_img = cvCreateImage( cvSize( cvRound (img->width/scale),
cvRound (img->height/scale)),
8, 1 );
int i;
cvCvtColor( img, gray, CV_BGR2GRAY );
cvResize( gray, small_img, CV_INTER_LINEAR );
cvEqualizeHist( small_img, small_img );
cvClearMemStorage( storage );
if( cascade )
{
double t = (double)cvGetTickCount();
CvSeq* faces = cvHaarDetectObjects( small_img, cascade, storage,
1.1, 2, 0/*CV_HAAR_DO_CANNY_PRUNING*/,
cvSize(30, 30) );
t = (double)cvGetTickCount() – t;
LOGI( "detection time = %gms\n", t/((double)cvGetTickFrequency()*1000.) );
for( i = 0; i < (faces ? faces->total : 0); i++ )
{
CvRect* r = (CvRect*)cvGetSeqElem( faces, i );
CvPoint center;
int radius;
center.x = cvRound((r->x + r->width*0.5)*scale);
center.y = cvRound((r->y + r->height*0.5)*scale);
radius = cvRound((r->width + r->height)*0.25*scale);
cvCircle( img, center, radius, colors[i%8], 3, 8, 0 );
}
}
// cvShowImage( "result", img );
cvReleaseImage( &gray );
cvReleaseImage( &small_img );
}
JNIEXPORT void JNICALL Java_study_opencv_FaceRec_detect
(JNIEnv * env, jobject obj, jstring xml, jstring filename, jstring outfile)
{
LOGI("top method invoked! ");/*LOGI("1");
char * c_xml = (char *)env->GetStringUTFChars(xml, JNI_FALSE);
LOGI("char * = %s", c_xml);
if(c_xml == NULL)
{
LOGI("error in get char*");
return;
}
char * c_file = env->GetStringCritical(env, filename, 0);
if(c_xml == NULL)
{
LOGI("error in get char*");
return;
}
captureFromImage(c_xml, c_file);
env->ReleaseStringCritical(env, xml, c_xml);
env->ReleaseStringCritical(env, file_name, c_file);
*/
captureFromImage(jstring2String(env,xml), jstring2String(env,filename), jstring2String(env,outfile));
}
//jstring to char*
char* jstring2String(JNIEnv* env, jstring jstr)
{
if(jstr == NULL)
{
LOGI("NullPointerException!");
return NULL;
}
char* rtn = NULL;
jclass clsstring = env->FindClass("java/lang/String");
jstring strencode = env->NewStringUTF("utf-8");
jmethodID mid = env->GetMethodID(clsstring, "getBytes", "(Ljava/lang/String;)[B");
jbyteArray barr= (jbyteArray)env->CallObjectMethod(jstr, mid, strencode);
jsize alen = env->GetArrayLength(barr);
jbyte* ba = env->GetByteArrayElements(barr, JNI_FALSE);
if (alen > 0)
{
rtn = (char*)malloc(alen + 1);
memcpy(rtn, ba, alen);
rtn[alen] = 0;
}
env->ReleaseByteArrayElements(barr, ba, 0);
LOGI("char*=%s",rtn);
return rtn;
}
Android.mk:
LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
include $(OPENCV_PACKAGE_DIR)/$(TARGET_ARCH_ABI)/share/opencv/OpenCV.mk
LOCAL_MODULE := facedetect
LOCAL_CFLAGS := -Werror
LOCAL_SRC_FILES := \
facedetect.cpp \
LOCAL_LDLIBS := -llog
include $(BUILD_SHARED_LIBRARY)
Application.mk:
APP_ABI := armeabi armeabi-v7a
APP_PLATFORM := android-10
APP_STL := gnustl_static
APP_CPPFLAGS := -frtti -fexceptions
使用NDK 進行編譯
在工程jni 目 錄 下 執 行ndk-build
$ cd ~/workspace/study.opencv/jni
$ ~/android-ndk-r6/ndk-build.
如果 編譯 成功, 則 會在工程下面生成libs/armeabi/facedetect.so 庫 瞭.
如有 編譯 失 敗 , 請 根據提示修改 錯誤
調用JNI 接口
將opencv 人 臉檢測 要用到的xml 文件( 位於OpenCV-2.3.0/armeabi/share/opencv/haarcascades/ 目錄下) 及 圖 像文件使用DDMS push 到data/data/study.opencv/files 目 錄 中。
在activity 中新建一個 線 程, 調 用FaceRec#detect 方法。
@Override
public void onCreate(Bundle savedInstanceState) {
super .onCreate(savedInstanceState);
setContentView(R.layout. main );
final FaceRec face = new FaceRec();
new Thread() {
@Override
public void run() {
face.detect(
"/data/data/study.opencv/files/haarcascade_frontalface_alt2.xml" ,
"/data/data/study.opencv/files/wqw1.jpg" ,
"/data/data/study.opencv/files/wqw1_detected.jpg" );
}
}.start();
}
運行結果
經測試,對png,jpg,bmp圖片正確識別人臉,不過速度太慢瞭。