source code document - cc.gatech.edukihwan23/imagecv/final2005/sourcecode... · ... cvframe.cpp...

72
Source Code Document Computer Vision Final Project Face Detection Kihwan Kim (1) Source Flow Brief Source flow of V-J face detector CCVFrame class : CVFrame.cpp Training.cpp InitPossibleFeatures(); // Making each possible types of features MakeWeakClassifier(); //Initializing and allocating all possible weakclassifier WeakClassifier; //Making them LoadPrositiveExamples(); //Load all positive examples from certain directory LoadNegativeExamples(); //Load all Negative examples from certain directory InitPositiveImage(); //Initialize every Positive Images MakeIntegImage(BYTE* src, double* dst, int width, int height); // Make integral images of the examples InitNegativeImage(); //Initialize every Negative Images MakeIntegImage(BYTE* src, double* dst, int width, int height); // Make integral images of the examples BeginCascadeClassifier(); //Cascade Classifier routine begins AdaBoost(n[nNode], nNode -1); //Apply one step of Adaboost GetOneFeature(m_ImgPositive[j].IntegrailImage,0,0,&minsc); // Calculate all values through n-weakclass times with one type of features for each examples GetOneFeature(m_ImgNegative[j].IntegrailImage,0,0,&minsc); // Calculate all values through n-weakclass times with one type of features for each examples // Then get the minimuim values and update the error and weight and save for all possible weak learners // Then loop again. CImageProc class : ImageProc.cpp InitCamImageBuff(); //Initialize the image buffer will be used in frame grabber InitVJFaceDetector(); //Initialize the V-J train data set and allocate for original images(follow every others) CCVFrame class : CVFrame.cpp ProcFaceDetector.cpp OnFaceVjCam(); //Begin to access webcam device. nDetecMode = MODE_VIOLA_JONES; //Select detection module for thread. AfxBeginThread(ThreadFunc, this, THREAD_PRIORITY_NORMAL,0, 0); //Make the Thread function to see the camera frame. ThreadFunc (LPVOID pParam); VJFaceDetector(grayImg, image2, TRUE);//main detection process VJFaceDetector(CvArr* image1, CvArr* image2, BOOL nFlip); //parsing the result area and draw the box. Brief Source flow off Morph face detector CImageProc class : ImageProc.cpp InitCamImageBuff();//Initialize the image buffer will be used in frame grabber CCVFrame class : CVFrame.cpp ProcFaceDetector.cpp OnFaceMoCam(); //Begin to access webcam device. nDetecMode = MODE_MORPHOLOGY_VJ; //Select detection module for thread AfxBeginThread(ThreadFunc, this, THREAD_PRIORITY_NORMAL,0, 0); //Make the Thread function to see the camera frame ThreadFunc (LPVOID pParam); ImageProc.cpp EfDetection.cpp m_ImageProc->m_EfDetection.GetEyePos(imageBuffer, w, h, &lEyeX, &lEyeY, &rEyeX, &rEyeY); // Main morphology detection entry Scaling(imageBuffer, pSrcBuffer, .........); // resize input image to certain configure data ( smaller than original size) CLAHE(tempBuffer1,........);MeanFilter(pPreprocBuffer....);Erosion(smoothingImage,......); // Applying Contrast Limited Adaptive Histogram Equalization ClosingGrayImage(pPreprocBuffer......); // Applying Closing operation ( erosion & dilation )

Upload: vonhan

Post on 18-Mar-2018

240 views

Category:

Documents


5 download

TRANSCRIPT

Source Code Document Computer Vision Final Project

Face Detection Kihwan Kim

(1) Source Flow Brief Source flow of V-J face detector CCVFrame class : CVFrame.cpp Training.cpp InitPossibleFeatures(); // Making each possible types of features MakeWeakClassifier(); //Initializing and allocating all possible weakclassifier WeakClassifier; //Making them LoadPrositiveExamples(); //Load all positive examples from certain directory LoadNegativeExamples(); //Load all Negative examples from certain directory InitPositiveImage(); //Initialize every Positive Images MakeIntegImage(BYTE* src, double* dst, int width, int height);

// Make integral images of the examples InitNegativeImage(); //Initialize every Negative Images MakeIntegImage(BYTE* src, double* dst, int width, int height);

// Make integral images of the examples BeginCascadeClassifier(); //Cascade Classifier routine begins AdaBoost(n[nNode], nNode -1); //Apply one step of Adaboost GetOneFeature(m_ImgPositive[j].IntegrailImage,0,0,&minsc); // Calculate all values through n-weakclass times with one type of features for each examples GetOneFeature(m_ImgNegative[j].IntegrailImage,0,0,&minsc); // Calculate all values through n-weakclass times with one type of features for each examples // Then get the minimuim values and update the error and weight and save for all possible weak learners // Then loop again. CImageProc class : ImageProc.cpp InitCamImageBuff(); //Initialize the image buffer will be used in frame grabber InitVJFaceDetector(); //Initialize the V-J train data set and allocate for original images(follow every others) CCVFrame class : CVFrame.cpp ProcFaceDetector.cpp OnFaceVjCam(); //Begin to access webcam device. nDetecMode = MODE_VIOLA_JONES; //Select detection module for thread. AfxBeginThread(ThreadFunc, this, THREAD_PRIORITY_NORMAL,0, 0); //Make the Thread function to see the camera frame. ThreadFunc (LPVOID pParam); VJFaceDetector(grayImg, image2, TRUE);//main detection process VJFaceDetector(CvArr* image1, CvArr* image2, BOOL nFlip);

//parsing the result area and draw the box. Brief Source flow off Morph face detector CImageProc class : ImageProc.cpp InitCamImageBuff();//Initialize the image buffer will be used in frame grabber CCVFrame class : CVFrame.cpp ProcFaceDetector.cpp OnFaceMoCam(); //Begin to access webcam device. nDetecMode = MODE_MORPHOLOGY_VJ; //Select detection module for thread AfxBeginThread(ThreadFunc, this, THREAD_PRIORITY_NORMAL,0, 0);

//Make the Thread function to see the camera frame ThreadFunc (LPVOID pParam); ImageProc.cpp EfDetection.cpp m_ImageProc->m_EfDetection.GetEyePos(imageBuffer, w, h, &lEyeX, &lEyeY, &rEyeX, &rEyeY); // Main morphology detection entry Scaling(imageBuffer, pSrcBuffer, .........); // resize input image to certain configure data ( smaller than original size) CLAHE(tempBuffer1,........);MeanFilter(pPreprocBuffer....);Erosion(smoothingImage,......); // Applying Contrast Limited Adaptive Histogram Equalization

ClosingGrayImage(pPreprocBuffer......); // Applying Closing operation ( erosion & dilation )

FilterImage(smoothingImage.........); // Applying Second derative Gaussian filter AndOperation(pThresBuffer..........); // Applying mixing closing and gaussians ConnectedComponents(pPreprocBuffer, pLabelBuffer);

// Connecting operation for effective labeling and label it. SetFeatureWindow(pLabelBuffer....); // rearranging labeled feature windows( indexing ) SetEyePair(m_pFtrInfo....); // Set possible two pairs ( prospective eye set feature pair ) GroupingFeature(pSrcBuffer....); // Grouping featueres and apply face verification MoFaceDetector(CvArr* image1, CvArr* image2 ,BOOL nFlip, BOOL bEye); // Find eye pair

(2) Sources : some miscellaneous sources is skipped and some formats could not be suite with this document’s format Setting.h ( Configuration ) #define IDD_CAM 1000 #define MODE_VIOLA_JONES 0x05 #define MODE_MORPHOLOGY 0x06 #define MODE_MORPHOLOGY_VJ 0x07 // trainig image size restriction #define WINDOW_WIDTH 24 #define WINDOW_HEIGHT 24 #define TR_FACE_TOTAL_CNT 10 #define MAXWIDTH_DLG_IMG 100 #define MAXHEIGHT_DLG_IMG 100 #define TOTAL_FEATURE 100000 #define MAX_WEAK_CLASSIFIER 100000 #define MIN_PIXEL_DIST 4 #define MIN_PIXEL_DIST_ROW 2 #define MIN_PIXEL_DIST_COL 4 #define MAX_POS_TRAIN_IMG 5000 #define MAX_NEG_TRAIN_IMG 5000 //cascade node define #define NODE_CASCADE 1 // feature numbers that repeatedly appears #define REFCOUNT 1 #define MAKE_FEATURE_PIXEL_STEP 1 //#define MAKE_FEATURE_PIXEL_STEP 2 #define IMAGE_COL 320 #define IMAGE_ROW 240 #define IMAGE_COL1 352 #define IMAGE_ROW1 288 // Face Verification ( Geometric constraints ) #define RATIO_EYEW 0.2 #define RATIO_EYEH 0.2 #define RATIO_BTNEYEW 0.3 #define RATIO_BTNEYEH 0.4 #define RATIO_CHEEKDIST 0.25 #define RATIO_CHEEKW 0.3 #define RATIO_CHEEKH 0.3 #define RATIO_OFFSET 0.1 #define THRES_MEANDIFF 50.0 // Symmetry measurement #define SYMM_STEPS 20 #define SYMM_LOCAL_STEPS 10 // half of the srch steps // Mouth Gradient #define GRAD_XSTEPS 15 #define GRAD_YSTEPS 15 // Iris Search #define IRIS_SRCH_WIDTH 1.0 // * iris_diameter #define IRIS_SRCH_HEIGHT 1.0 // * iris_diameter #define NUM_CFPOINT 40 // eye circle #define NUM_ECPOINT 16 #define NUM_ERPOINT 10 #define THRES_EYECRCL 0.3

#define MAX_FDTIMAGE_NUM 5000 //#defineTHRES_BINARY 157 #define THRES_BINARY 150 #define MAX_LABEL_NUM 1000 #define MIN_FTR_WIDTH 4 #define MAX_FTR_WIDTH 75 #define MIN_FTR_HEIGHT 2 #define MAX_FTR_HEIGHT 30 #define MAX_PAIR_NUM 300 //#define MAX_EYES_DIST 165 #define MAX_EYES_DIST 50 #define MIN_EYES_DIST 20 #define MAX_ANGLE 13.0 #define FACE_COL 50 #define FACE_ROW 60 #define THRES_MOUTH_MID (0.25) #define THRES_MOUTH_WIDTH_DN (0.0) #define THRES_FTR_SIZE_RATIO (0.26) #define THRES_MOUTH_WIDTH_UP (1.7) #define THRES_BROW_TO_EYE_RATIO (0.5) #define IRIS_OUT_RATIO 1.3 // CHEEK Images #define CHEEK_STDEV_GRAY_UP 30.0 #define CHEEK_STDEV_GRAY_DN 0.70 #define CHEEK_STDEV_GRAY_DIFF 37.0 #define CHEEK_MEAN_GRAY_DIFF 95.0 #define CHEEK_MEAN_GRAY_UP 220 #define CHEEK_MEAN_GRAY_DN 65 #define EYE_CHEEK_DIFF_UP (10) #define EYE_CHEEK_DIFF_DN (-120) // EYE Images #define EYE_STDEV_GRAY_UP 68.5 #define EYE_STDEV_GRAY_DN 8.0 #define EYE_STDEV_GRAY_DIFF 27.0 #define EYE_MEAN_GRAY_DIFF 40.0 #define EYE_MEAN_GRAY_UP 170 #define EYE_MEAN_GRAY_DN 47 //CHEEK #define CHEEK_WINDOW_WIDTH 10 #define CHEEK_WINDOW_HEIGHT 8 #define CHEEK_LEFT_X 6 #define CHEEK_LEFT_Y 16 #define CHEEK_RIGHT_X 33 #define CHEEK_RIGHT_Y 16 #define BOUNDARY_MINX (IMAGE_COL - IMAGE_COL + 20) #define BOUNDARY_MINY (IMAGE_ROW - IMAGE_ROW + 10) #define BOUNDARY_MAXX (IMAGE_COL - 20) #define BOUNDARY_MAXY (IMAGE_ROW - 3) //#define THRESHOLD 25 #define THRESHOLD 21 #define FACE_COL 50 #define FACE_ROW 60

CVFrameDlg.cpp // CCVFrameDlg dialog CCVFrameDlg::CCVFrameDlg(CWnd* pParent /*=NULL*/) : CDialog(CCVFrameDlg::IDD, pParent) { // Dialog Constructor : Member Variable initialization //{{AFX_DATA_INIT(CCVFrameDlg) // NOTE: the ClassWizard will add member initialization here //}}AFX_DATA_INIT // Note that LoadIcon does not require a subsequent DestroyIcon in Win32

m_hIcon = AfxGetApp()->LoadIcon(IDR_MAINFRAME);

m_ImageProc = new CImageProc(); m_IntegImg = new CIntegralImage(); trainset = new IntImage[TR_FACE_TOTAL_CNT]; m_DlgImg = new BYTE[MAXWIDTH_DLG_IMG*MAXHEIGHT_DLG_IMG]; memset(m_DlgImg,0,MAXWIDTH_DLG_IMG*MAXHEIGHT_DLG_IMG); //train m_total_features = TOTAL_FEATURE; weakClass = new WeakClassifier[MAX_WEAK_CLASSIFIER]; m_szPositiveImgPath = new TrImageDataInfo[MAX_POS_TRAIN_IMG]; m_szNegativeImgPath = new TrImageDataInfo[MAX_NEG_TRAIN_IMG]; m_ImgPositive = new TrainingImg[MAX_POS_TRAIN_IMG]; m_ImgNegative = new TrainingImg[MAX_NEG_TRAIN_IMG]; m_total_weakclass = 0; m_nType0 = m_nType1 = m_nType2 = m_nType3 = m_nType4 = 0; m_nPositiveImg = 0; m_nNegativeImg = 0; m_fMaxFP = 0.5; m_bStopTheAdaboost = FALSE; bStopCam = FALSE; bCamInit = FALSE; m_featureType = 0; m_width = 100; m_height = 100; } void CCVFrameDlg::DoDataExchange(CDataExchange* pDX) { // DDX CDialog::DoDataExchange(pDX); //{{AFX_DATA_MAP(CCVFrameDlg) DDX_Control(pDX, IDC_HIST, m_Log); //}}AFX_DATA_MAP } BEGIN_MESSAGE_MAP(CCVFrameDlg, CDialog) //{{AFX_MSG_MAP(CCVFrameDlg) ON_WM_SYSCOMMAND() ON_WM_PAINT() ON_WM_QUERYDRAGICON() ON_BN_CLICKED(IDC_BTN_OPEN, OnBtnOpen) ON_BN_CLICKED(IDC_BTN_CAM, OnBtnCam) ON_WM_TIMER() ON_WM_DESTROY() ON_BN_CLICKED(IDC_BTN_CAMSTOP, OnBtnCamstop) ON_BN_CLICKED(IDC_TEST_BTN1, OnTestBtn1) ON_BN_CLICKED(IDC_INIT_INTEGRAL, OnInitIntegral) ON_BN_CLICKED(IDC_FACE_VJ_IM, OnFaceVjIm) ON_BN_CLICKED(IDC_FACE_MO_IM, OnFaceMoIm) ON_BN_CLICKED(IDC_FACE_VJ_CAM, OnFaceVjCam) ON_BN_CLICKED(IDC_FACE_MO_CAM, OnFaceMoCam) ON_BN_CLICKED(IDC_FACE_MO_VJCAM, OnFaceMoVjcam) ON_BN_CLICKED(IDC_INIT_TRAIN_IMG, OnInitTrainImg) ON_BN_CLICKED(IDC_RADIO1, OnFilter1) ON_BN_CLICKED(IDC_RADIO2, OnFilter2) ON_BN_CLICKED(IDC_RADIO3, OnFilter3) ON_BN_CLICKED(IDC_RADIO4, OnFilter4)

ON_BN_CLICKED(IDC_RADIO5, OnFilter5) ON_BN_CLICKED(IDC_BTN_TRAINSTOP, OnBtnTrainstop) //}}AFX_MSG_MAP END_MESSAGE_MAP() ///////////////////////////////////////////////////////////////////////////// // CCVFrameDlg message handlers BOOL CCVFrameDlg::OnInitDialog() { CDialog::OnInitDialog(); // Add "About..." menu item to system menu. // IDM_ABOUTBOX must be in the system command range. ASSERT((IDM_ABOUTBOX & 0xFFF0) == IDM_ABOUTBOX); ASSERT(IDM_ABOUTBOX < 0xF000); CMenu* pSysMenu = GetSystemMenu(FALSE); if (pSysMenu != NULL) { CString strAboutMenu; strAboutMenu.LoadString(IDS_ABOUTBOX); if (!strAboutMenu.IsEmpty()) { pSysMenu->AppendMenu(MF_SEPARATOR); pSysMenu->AppendMenu(MF_STRING, IDM_ABOUTBOX, strAboutMenu); } } // Set the icon for this dialog. The framework does this automatically // when the application's main window is not a dialog SetIcon(m_hIcon, TRUE); // Set big icon SetIcon(m_hIcon, FALSE); // Set small icon // TODO: Add extra initialization here m_ImageProc->m_pDlg = this; m_IntegImg->m_pDlg = this; m_ImageProc->InitVJFaceDetector(); return TRUE; // return TRUE unless you set the focus to a control } void CCVFrameDlg::OnSysCommand(UINT nID, LPARAM lParam) { if ((nID & 0xFFF0) == IDM_ABOUTBOX) { CAboutDlg dlgAbout; dlgAbout.DoModal(); } else { CDialog::OnSysCommand(nID, lParam); } } // If you add a minimize button to your dialog, you will need the code below // to draw the icon. For MFC applications using the document/view model, // this is automatically done for you by the framework. void CCVFrameDlg::OnPaint() { if (IsIconic())

{ CPaintDC dc(this); // device context for painting SendMessage(WM_ICONERASEBKGND, (WPARAM) dc.GetSafeHdc(), 0); // Center icon in client rectangle int cxIcon = GetSystemMetrics(SM_CXICON); int cyIcon = GetSystemMetrics(SM_CYICON); CRect rect; GetClientRect(&rect); int x = (rect.Width() - cxIcon + 1) / 2; int y = (rect.Height() - cyIcon + 1) / 2; // Draw the icon dc.DrawIcon(x, y, m_hIcon); } else { CDialog::OnPaint(); } } // The system calls this to obtain the cursor to display while the user drags // the minimized window. HCURSOR CCVFrameDlg::OnQueryDragIcon() { return (HCURSOR) m_hIcon; } // Open Image files for test void CCVFrameDlg::OnBtnOpen() { // TODO: Add your control notification handler code here // TODO: Add your control notification handler code here CFileDialog dlg(TRUE, _T("*.jpg"), "",OFN_FILEMUSTEXIST|OFN_PATHMUSTEXIST|OFN_HIDEREADONLY, "jpg files (*.jpg) |*.jpg|AVI files (*.avi) |*.avi| All Files (*.*)|*.*||",NULL); char title[]= {"Open Image"}; dlg.m_ofn.lpstrTitle= title; if (dlg.DoModal() == IDOK) { //CString path= dlg.GetPathName(); m_ImageProc->m_filename = dlg.GetPathName(); m_ImageProc->InitImageBuff(); CString temp; temp.Format("Opened File : %s",m_ImageProc->m_filename); m_Log.AppendString(temp); } //cvFlip( m_ImageProc->m_img, NULL, 0); cvvNamedWindow( "Original Image", 1 ); cvvShowImage( "Original Image", m_ImageProc->m_img ); // convert color to gray image. IplImage* image = cvCreateImage(cvSize( m_ImageProc->m_img->width,m_ImageProc->m_img->height), IPL_DEPTH_8U, 1 ); image->origin = m_ImageProc->m_img->origin; cvCvtColor( m_ImageProc->m_img, image, CV_BGR2GRAY ); cvCanny( image, image, 100, 200, 3 ); //cvFlip( image, NULL, 0); cvvNamedWindow( "Result Image1", 1 ); cvvShowImage( "Result Image1", image ); }

/ / on timer for alternative of using Thread : Not use at this moment void CCVFrameDlg::OnTimer(UINT nIDEvent) { // TODO: Add your message handler code here and/or call default switch(nIDEvent){ case IDD_CAM: //StartCam(); //alternative break; default: break; } CDialog::OnTimer(nIDEvent); } / / alternative of using Thread : Not use at this moment void CCVFrameDlg::StartCam() { // alternative function /* cvGrabFrame( m_ImageProc->m_cap ); m_ImageProc->m_frame = cvRetrieveFrame( m_ImageProc->m_cap ); //cvErode( frame, frame, 0, 2 ); cvShowImage( "Webcam", m_ImageProc->m_frame ); if( cvWaitKey(10) >= 0 ){ KillTimer(IDD_CAM); cvReleaseCapture( &m_ImageProc->m_cap ); cvReleaseImage( &m_ImageProc->m_frame); cvDestroyWindow( "Webcam" ); } */ } / / alternative of using Thread : Not use at this moment void CCVFrameDlg::StopCam() { /* // alternative function bStopCam = FALSE; //m_pThread->ResumeThread(); m_pThread->Delete(); //KillTimer(IDD_CAM); cvReleaseCapture( &m_ImageProc->m_cap ); cvReleaseImage( &m_ImageProc->m_frame); cvDestroyWindow( "Webcam" ); */ } void CCVFrameDlg::OnDestroy() { CDialog::OnDestroy(); // TODO: Add your message handler code here bStopCam = FALSE; cvDestroyWindow( "Webcam" ); delete m_DlgImg; } // Controling CAM thread for suspending threads void CCVFrameDlg::OnBtnCamstop() { // TODO: Add your control notification handler code here

bStopCam = FALSE; if(m_pThread) m_pThread->SuspendThread(); cvDestroyWindow( "Webcam" ); } // Temporal test button for one step training sample void CCVFrameDlg::OnTestBtn1() { // TODO: Add your control notification handler code here CFileDialog dlg(TRUE, _T("*.jpg"), "",OFN_FILEMUSTEXIST|OFN_PATHMUSTEXIST|OFN_HIDEREADONLY, "jpg files (*.jpg) |*.jpg|AVI files (*.avi) |*.avi| All Files (*.*)|*.*||",NULL); char title[]= {"Open Image"}; dlg.m_ofn.lpstrTitle= title; if (dlg.DoModal() == IDOK) { m_ImageProc->m_filename = dlg.GetPathName(); m_ImageProc->InitImageBuff(); CString temp2; temp2.Format("Opened File : %s",m_ImageProc->m_filename); m_Log.AppendString(temp2); } IplImage* image = reinterpret_cast<IplImage*>(m_ImageProc->m_img3); IplImage* image2 = reinterpret_cast<IplImage*>(m_ImageProc->m_img); IplImage* dst = cvCreateImage( cvGetSize(image), 8, 1 ); IplImage* reSizeimg = cvCreateImage( cvSize(200,200), 8, 1 ); cvResize(image, reSizeimg,CV_INTER_NN); cvvNamedWindow( "resize Image", 1 ); cvvShowImage( "resize Image", reSizeimg ); // training examples int width = image->width; int height = image->height; m_width = width; m_height = height; BYTE* tempBuffer; double* tempIntBuff; tempBuffer = new BYTE[width*height]; memset(tempBuffer,0,width*height); tempIntBuff = new double[(width+1)*(height+1)]; memset(tempIntBuff,0.0,(width+1)*(height+1)); for(int row=0; row < height; row++){ for(int col=0; col < width ; col++){ tempBuffer[width*(row) + (col)] = image->imageData[width*(row) + (col)]; m_DlgImg[width*(row) + (col)] = tempBuffer[width*(row) + (col)]; } } //Invalidate(TRUE); MakeIntegImage(tempBuffer,tempIntBuff,width,height); // calc variance double ex = 0; // sum double ex2 = 0; // sq sum for(int i=0;i<WINDOW_WIDTH*WINDOW_HEIGHT;i++) { ex += tempBuffer[i]; ex2 += (tempBuffer[i]*tempBuffer[i]); } int size = WINDOW_WIDTH*WINDOW_HEIGHT; double var = ex2/size - (ex/size)*(ex/size);

//double MaxNo = 0.0; double nmax = 0.0; for( row=0; row < height; row++){ for(int col=0; col < width ; col++){ if( tempIntBuff[width*(row) + (col)] > nmax ){ nmax = tempIntBuff[width*(row) + (col)]; } } } CString str; str.Format("max in integ img = %f",nmax); m_Log.AppendString(str); MakeWeakClassifier(); //MakeWeakClassifier2(); double maxFeat[5]; int max_index[5]; int temp = 0; for(int k=0;k<5;k++){ max_index[k] =0; maxFeat[k] = 0.0; } for(int j= 0; j < 5; j++){ for( i = 0; i< m_total_weakclass ; i++){ if( weakClass[i].type == j){ double feat = GetOneFeature(tempIntBuff,0,0, &weakClass[i]); //double feature = feat/var; double feature = feat; if(feature > maxFeat[j]){ maxFeat[j] = feature; max_index[j] = i; } } temp++; } } int indexnum = m_featureType; str.Format(" maxFeat = %f, maxindex: %d, maxType: %d, x1:%d, x2:%d, x3:%d, x4:%d, y1:%d, y2:%d, y3:%d, y4:%d" ,maxFeat[indexnum],max_index[indexnum],weakClass[max_index[indexnum]].type ,weakClass[max_index[indexnum]].x1 ,weakClass[max_index[indexnum]].x2 ,weakClass[max_index[indexnum]].x3 ,weakClass[max_index[indexnum]].x4 ,weakClass[max_index[indexnum]].y1 ,weakClass[max_index[indexnum]].y2 ,weakClass[max_index[indexnum]].y3 ,weakClass[max_index[indexnum]].y4); m_Log.AppendString(str); str.Format(" Total wc : %d, type0; %d, type1: %d, type2:%d, type3:%d, type4:%d " ,m_total_weakclass,m_nType0,m_nType1,m_nType2,m_nType3,m_nType4 ); m_Log.AppendString(str); CvPoint pt1; CvPoint pt2; if(weakClass[max_index[indexnum]].type == 3){

pt1.x = weakClass[max_index[indexnum]].y1; pt1.y = weakClass[max_index[indexnum]].x1; pt2.x = weakClass[max_index[indexnum]].y4; pt2.y = weakClass[max_index[indexnum]].x3; }else if(weakClass[max_index[indexnum]].type == 2){ pt1.x = weakClass[max_index[indexnum]].y1; pt1.y = weakClass[max_index[indexnum]].x1; pt2.x = weakClass[max_index[indexnum]].y3; pt2.y = weakClass[max_index[indexnum]].x4; }else if(weakClass[max_index[indexnum]].type == 0){ pt1.x = weakClass[max_index[indexnum]].y1; pt1.y = weakClass[max_index[indexnum]].x1; pt2.x = weakClass[max_index[indexnum]].y3; pt2.y = weakClass[max_index[indexnum]].x3; }else if(weakClass[max_index[indexnum]].type == 1){ pt1.x = weakClass[max_index[indexnum]].y1; pt1.y = weakClass[max_index[indexnum]].x1; pt2.x = weakClass[max_index[indexnum]].y3; pt2.y = weakClass[max_index[indexnum]].x3; }else if(weakClass[max_index[indexnum]].type == 4){ pt1.x = weakClass[max_index[indexnum]].y1; pt1.y = weakClass[max_index[indexnum]].x1; pt2.x = weakClass[max_index[indexnum]].y3; pt2.y = weakClass[max_index[indexnum]].x3; } cvRectangle( image, pt1, pt2, CV_RGB(255,0,0),1,8,0); IplImage* reSizeimg2 = cvCreateImage( cvSize(200,200), 8, 1 ); cvResize(image, reSizeimg2,CV_INTER_NN); //cvResize(image, reSizeimg2,CV_INTER_LINEAR); delete [] tempBuffer; delete [] tempIntBuff; cvvNamedWindow( "resize Image2", 1 ); cvvShowImage( "resize Image2", reSizeimg2 ); } // Define and Getting each type of features double CCVFrameDlg::GetOneFeature(double* integSrc, int row, int col , WeakClassifier* weak) { double f1=0; // x -> row y->col !!!!!!!!!!!!! cautious! int x1 =weak->x1;int x2=weak->x2;int x3=weak->x3;int x4=weak->x4; int y1 =weak->y1;int y2=weak->y2;int y3=weak->y3;int y4=weak->y4; double sizeFeature = 0.0; switch(weak->type) { case 0: f1 = integSrc[(row+x1)*WINDOW_WIDTH+col+y3] - integSrc[(row+x1)*WINDOW_WIDTH + col+y1] + integSrc[(row+x3)*WINDOW_WIDTH+col+y3] - integSrc[(row+x3)*WINDOW_WIDTH + col+y1] + 2*(integSrc[(row+x2)*WINDOW_WIDTH+col+y1] - integSrc[(row+x2)*WINDOW_WIDTH+col+y3]); sizeFeature = (x3 - x1)*(y3 - y1); break; case 1: f1 = integSrc[(row+x3)*WINDOW_WIDTH+col+y1] + integSrc[(row+x3)*WINDOW_WIDTH+col+y3] - integSrc[(row+x1)*WINDOW_WIDTH+col+y1] - integSrc[(row+x1)*WINDOW_WIDTH+col+y3] + 2*(integSrc[(row+x1)*WINDOW_WIDTH+col+y2] - integSrc[(row+x3)*WINDOW_WIDTH+col+y2]); sizeFeature = (x3 - x1)*(y3 - y1); break;

case 2: f1 = integSrc[(row+x1)*WINDOW_WIDTH+col+y1] - integSrc[(row+x1)*WINDOW_WIDTH+col+y3] + integSrc[(row+x4)*WINDOW_WIDTH+col+y3] - integSrc[(row+x4)*WINDOW_WIDTH+col+y1] + 3*(integSrc[(row+x2)*WINDOW_WIDTH+col+y3] - integSrc[(row+x2)*WINDOW_WIDTH+col+y1] + integSrc[(row+x3)*WINDOW_WIDTH+col+y1] - integSrc[(row+x3)*WINDOW_WIDTH+col+y3]); sizeFeature = (x4 - x1)*(y3 - y1); break; case 3: f1 = integSrc[(row+x1)*WINDOW_WIDTH+col+y1] - integSrc[(row+x1)*WINDOW_WIDTH+col+y4] + integSrc[(row+x3)*WINDOW_WIDTH+col+y4] - integSrc[(row+x3)*WINDOW_WIDTH+col+y1] + 3*(integSrc[(row+x3)*WINDOW_WIDTH+col+y2] - integSrc[(row+x3)*WINDOW_WIDTH+col+y3] + integSrc[(row+x1)*WINDOW_WIDTH+col+y3] - integSrc[(row+x1)*WINDOW_WIDTH+col+y2] ); sizeFeature = (x3 - x1)*(y4 - y1); break; case 4: f1 = integSrc[(row+x1)*WINDOW_WIDTH+col+y1] + integSrc[(row+x1)*WINDOW_WIDTH+col+y3] + integSrc[(row+x3)*WINDOW_WIDTH+col+y1] + integSrc[(row+x3)*WINDOW_WIDTH+col+y3] - 2*(integSrc[(row+x2)*WINDOW_WIDTH+col+y1] + integSrc[(row+x2)*WINDOW_WIDTH+col+y3] + integSrc[(row+x1)*WINDOW_WIDTH+col+y2] + integSrc[(row+x3)*WINDOW_WIDTH+col+y2]) + 4*integSrc[(row+x2)*WINDOW_WIDTH+col+y2]; sizeFeature = (x3 - x1)*(y3 - y1); break; default: break; } //temporary normalize f1 = f1/sizeFeature; return f1; } // Making Integral Images void CCVFrameDlg::MakeIntegImage(BYTE* src, double* dst, int width, int height) { int i,j; double partialsum; double* tmpdata = new double[(width+1)*(height+1)]; memset(tmpdata,0.0,(width+1)*(height+1)); for(i=0;i<height+1;i++) tmpdata[(width+1)*i+0] = 0; for(i=0;i<(width+1);i++) tmpdata[(width+1)*0+i] = 0; for(i=0;i<height;i++) { for(j=0;j<width;j++) { tmpdata[(width+1)*(i+1)+(j+1)] = src[width*i+j]; } } for(i=1;i<(width+1);i++) { tmpdata[(width+1)*0+i] = tmpdata[(width+1)*0+(i-1)] + tmpdata[(width+1)*0+i]; } for(i=1;i<height+1;i++) { partialsum = 0; for(j=0;j<(width+1);j++) {

partialsum += tmpdata[(width+1)*i+j]; tmpdata[(width+1)*i+j] = tmpdata[(width+1)*(i-1)+j] + partialsum; } } for(i=0;i<height;i++) { for(j=0;j<width;j++) { dst[width*i+j] = tmpdata[(width+1)*(i+1)+(j+1)]; } } delete tmpdata; } // Initialize Weakclassifier void CCVFrameDlg::InitPossibleFeatures() { MakeWeakClassifier(); } // Make Weakclassifier void CCVFrameDlg::MakeWeakClassifier() { m_nType0 = m_nType1 = m_nType2 = m_nType3 = m_nType4 = 0; int sx = WINDOW_HEIGHT; int sy = WINDOW_WIDTH; int minDist = MIN_PIXEL_DIST; int x1,x2,x3,x4,y1,y2,y3,y4; //SimpleClassifier sc; int index; //ofstream f; int pickup=9; int cntwc = 0; index = 0; for(x1=0;x1<sx-MIN_PIXEL_DIST;x1+=2) for(x3=x1+MIN_PIXEL_DIST;x3<sx;x3+=2) for(y1=0;y1<sy-MIN_PIXEL_DIST;y1+=2) for(y3=y1+MIN_PIXEL_DIST;y3<sy;y3+=2) { if( (y3-y1)>(x3-x1) ){ // restriction x2 = (x1+x3)/2; y2 = y4 = x4 = -1; weakClass[index].type = 0; weakClass[index].error = 0.0;

weakClass[index].x1 = x1; weakClass[index].x2 = x2; weakClass[index].x3 = x3; weakClass[index].x4 = x4; weakClass[index].y1 = y1; weakClass[index].y2 = y2; weakClass[index].y3 = y3; weakClass[index].y4 = y4;

weakClass[index].parity = 0; weakClass[index].thresh = 0.0; index++; m_nType0++; } } for(x1=0;x1<sx - MIN_PIXEL_DIST;x1+=2) for(x3=x1+MIN_PIXEL_DIST;x3<sx;x3+=2) for(y1=0;y1<sy-MIN_PIXEL_DIST;y1+=2) for(y3=y1+MIN_PIXEL_DIST;y3<sy;y3+=2) { if( (y3-y1) < (x3-x1) ){ // restriction y2 = (y1+y3)/2; x2 = x4 = y4 = -1;

weakClass[index].type = 1; weakClass[index].error = 0.0;

weakClass[index].x1 = x1; weakClass[index].x2 = x2; weakClass[index].x3 = x3; weakClass[index].x4 = x4; weakClass[index].y1 = y1; weakClass[index].y2 = y2; weakClass[index].y3 = y3; weakClass[index].y4 = y4;

weakClass[index].parity = 0; weakClass[index].thresh = 0.0; index++; m_nType1++; } } for(x1=0;x1<sx-MIN_PIXEL_DIST;x1+=2) for(x4=x1+MIN_PIXEL_DIST;x4<sx;x4+=2) for(y1=0;y1<sy-MIN_PIXEL_DIST;y1+=2) for(y3=y1+MIN_PIXEL_DIST;y3<sy;y3+=2) { if( (y3-y1)>(x4-x1) ){ // restriction x2 = x1 + (x4-x1)/3; x3 = x2 + (x4-x1)/3; y2 = y4 = -1; weakClass[index].type = 2; weakClass[index].error = 0.0;

weakClass[index].x1 = x1; weakClass[index].x2 = x2; weakClass[index].x3 = x3; weakClass[index].x4 = x4; weakClass[index].y1 = y1; weakClass[index].y2 = y2; weakClass[index].y3 = y3; weakClass[index].y4 = y4;

weakClass[index].parity = 0; weakClass[index].thresh = 0.0; index++; m_nType2++; } } for(x1=0;x1<sx-MIN_PIXEL_DIST;x1+=2) for(x3=x1+MIN_PIXEL_DIST;x3<sx;x3+=2) for(y1=0;y1<sy-MIN_PIXEL_DIST;y1+=2) for(y4=y1+MIN_PIXEL_DIST;y4<sy;y4+=3) { if( (y4-y1)<(x3-x1) ){ // restriction y2 = y1 + (y4-y1)/3; y3 = y2 + (y4-y1)/3; x2 = x4 = -1; weakClass[index].type = 3; weakClass[index].error = 0.0;

weakClass[index].x1 = x1; weakClass[index].x2 = x2; weakClass[index].x3 = x3; weakClass[index].x4 = x4; weakClass[index].y1 = y1; weakClass[index].y2 = y2; weakClass[index].y3 = y3; weakClass[index].y4 = y4;

weakClass[index].parity = 0; weakClass[index].thresh = 0.0; index++; m_nType3++; } } for(x1=0;x1<sx-MIN_PIXEL_DIST;x1+=2) for(x3=x1+MIN_PIXEL_DIST;x3<sx;x3+=2) for(y1=0;y1<sy-MIN_PIXEL_DIST;y1+=2) for(y3=y1+MIN_PIXEL_DIST;y3<sy;y3+=2) { x2 = (x1+x3)/2;

y2 = (y1+y3)/2; x4 = y4 = -1; weakClass[index].type = 4; weakClass[index].error = 0.0; weakClass[index].x1 = x1; weakClass[index].x2 = x2; weakClass[index].x3 = x3; weakClass[index].x4 = x4; weakClass[index].y1 = y1; weakClass[index].y2 = y2; weakClass[index].y3 = y3; weakClass[index].y4 = y4; weakClass[index].parity = 0; weakClass[index].thresh = 0.0; index++; m_nType4++; } m_total_weakclass = index; CString str; str.Format("Number of total weak classifier: %d",index); m_Log.AppendString(str); } // step-generalized version void CCVFrameDlg::MakeWeakClassifier2() { // Skip close to above } // Init Integral Images void CCVFrameDlg::OnInitIntegral() { // TODO: Add your control notification handler code here CFileDialog dlg(TRUE, _T("*.jpg"), "",OFN_FILEMUSTEXIST|OFN_PATHMUSTEXIST|OFN_HIDEREADONLY, "jpg files (*.jpg) |*.jpg|AVI files (*.avi) |*.avi| All Files (*.*)|*.*||",NULL); char title[]= {"Open Image"}; dlg.m_ofn.lpstrTitle= title; if (dlg.DoModal() == IDOK) { m_ImageProc->m_filename = dlg.GetPathName(); m_ImageProc->InitImageBuff(); } m_IntegImg->Load(m_ImageProc->m_filename); } void CCVFrameDlg::BoostingInputFiles() { //Skip psedo } // Message Handler ft for filter type test void CCVFrameDlg::OnFilter1() { // TODO: Add your control notification handler code here m_featureType = 0; } void CCVFrameDlg::OnFilter2() { // TODO: Add your control notification handler code here m_featureType = 1; } void CCVFrameDlg::OnFilter3() { // TODO: Add your control notification handler code here m_featureType = 2; } void CCVFrameDlg::OnFilter4() { // TODO: Add your control notification handler code here

m_featureType = 3; } void CCVFrameDlg::OnFilter5() { // TODO: Add your control notification handler code here m_featureType =4; } void CCVFrameDlg::OnBtnTrainstop() { // TODO: Add your control notification handler code here m_bStopTheAdaboost = TRUE; }

Train.cpp // Training.cpp #include "CVFrameDlg.h" #include "cv.h" #include "highgui.h" #include "cxcore.h" #include "stdafx.h" #include "CVFrame.h" #include "CVFrameDlg.h" #include <stdio.h> #include <stdlib.h> #include <string.h> #include <assert.h> #include <math.h> #include <float.h> #include <limits.h> #include <time.h> #include <ctype.h> //#include "CVAPP.h" #include "./api/HistoryEdit.h" #include "ImageProc.h" #include "IntegralImage.h" #include "Setting.h" #include "./api/FileInfo.h" #include <algorithm> #ifdef _DEBUG #define new DEBUG_NEW #undef THIS_FILE static char THIS_FILE[] = __FILE__; #endif // Loading positive examples (face) void CCVFrameDlg::LoadPrositiveExamples() { CString dirPath,pathName, filename; CFileDialog dlg(TRUE, NULL, (LPCTSTR)m_openedFileName); char title1[]= {"Open POSITIVE(face) Image Directory: Select one image within the directory"}; dlg.m_ofn.lpstrTitle= title1; if (dlg.DoModal() == IDOK) { m_bOpenFile = TRUE; pathName = dlg.GetPathName(); filename = dlg.GetFileName(); dirPath = pathName.Left( pathName.GetLength()-filename.GetLength() ); } CFileInfoArray fileArray;

fileArray.AddDir( dirPath, // Directory "*.*", // Filemask (all files) FALSE, // Recurse subdirs CFileInfoArray::AP_SORTBYNAME | CFileInfoArray::AP_SORTASCENDING, // Sort by name and ascending FALSE // Don't add entries for dirs ); //char buff[128]; m_cntTrainedImg = fileArray.GetSize() ; int tempSize = fileArray.GetSize() ; m_nPositiveImg = tempSize; for(int j=0; j < tempSize ; j++){ m_szPositiveImgPath[j].path = fileArray[j].GetFilePath(); m_szPositiveImgPath[j].filename = fileArray[j].GetFileTitle(); /* m_ImageProc->m_filename = m_szPositiveImgPath[j].path ; m_ImageProc->InitImageBuff(); // convert color to gray image. IplImage* image = cvCreateImage(cvSize( m_ImageProc->m_img->width,m_ImageProc->m_img->height), IPL_DEPTH_8U, 1 ); image->origin = m_ImageProc->m_img->origin; cvCvtColor( m_ImageProc->m_img, image, CV_BGR2GRAY ); sprintf(buff,"Image %s",m_szPositiveImgPath[j].filename); cvvNamedWindow( buff, 1 ); cvvShowImage( buff, image ); */ } } // Loading Negative examples (face) void CCVFrameDlg::LoadNegativeExamples() { CString dirPath,pathName, filename; CFileDialog dlg(TRUE, NULL, (LPCTSTR)m_openedFileName); char title1[]= {"Open NEGATIVE(Non-face)Image Directory: Select one image within the directory"}; dlg.m_ofn.lpstrTitle= title1; if (dlg.DoModal() == IDOK) { m_bOpenFile = TRUE; pathName = dlg.GetPathName(); filename = dlg.GetFileName(); dirPath = pathName.Left( pathName.GetLength()-filename.GetLength() ); } CFileInfoArray fileArray; fileArray.AddDir( dirPath, // Directory "*.*", // Filemask (all files)

FALSE, // Recurse subdirs CFileInfoArray::AP_SORTBYNAME | CFileInfoArray::AP_SORTASCENDING, // Sort by name and ascending FALSE // Don't add entries for dirs ); //char buff[128]; m_cntTrainedImg = fileArray.GetSize() ; int tempSize = fileArray.GetSize() ; m_nNegativeImg = tempSize; for(int j=0; j < tempSize ; j++){ m_szNegativeImgPath[j].path = fileArray[j].GetFilePath(); m_szNegativeImgPath[j].filename = fileArray[j].GetFileTitle(); } } // Init positive images void CCVFrameDlg::InitPositiveImage() { CString temp; for(int i=0; i < m_nPositiveImg ; i++){ IplImage* tempImg;IplImage* tempImage2; tempImg = cvLoadImage(m_szPositiveImgPath[i].path,1); //load gray image tempImage2 = cvCreateImage(cvSize( tempImg->width,tempImg->height), IPL_DEPTH_8U, 1 ); tempImage2->origin = tempImg->origin; cvCvtColor( tempImg, tempImage2, CV_BGR2GRAY ); memset(m_ImgPositive[i].InputImage,0,WINDOW_WIDTH*WINDOW_HEIGHT); memset(m_ImgPositive[i].IntegrailImage,0,(WINDOW_WIDTH+1)*(WINDOW_HEIGHT+1)); if( ((tempImage2->width)!=WINDOW_WIDTH)||((tempImage2->width)!=WINDOW_HEIGHT) ){ AfxMessageBox("Image size is not match!"); return; } int width = tempImage2->width; int height = tempImage2->height; m_width = width; m_height = height; for(int row=0; row < height; row++){ for(int col=0; col < width ; col++){ m_ImgPositive[i].InputImage[width*(row) + (col)] = tempImage2->imageData[width*(row) + (col)]; m_DlgImg[width*(row) + (col)] = m_ImgPositive[i].InputImage[width*(row) + (col)]; } } Invalidate(FALSE); // AfxMessageBox("push to next"); MakeIntegImage(m_ImgPositive[i].InputImage,m_ImgPositive[i].IntegrailImage,width,height);

temp.Format("Positive Trained File : %s is formulated integral image",m_szPositiveImgPath[i].filename); m_Log.AppendString(temp); cvReleaseImage( &tempImg ); } } // Init Negative images void CCVFrameDlg::InitNegativeImage() { CString temp; for(int i=0; i < m_nNegativeImg ; i++){ IplImage* tempImg;IplImage* tempImage2; tempImg = cvLoadImage(m_szNegativeImgPath[i].path,1); //load gray image tempImage2 = cvCreateImage(cvSize( tempImg->width,tempImg->height), IPL_DEPTH_8U, 1 ); tempImage2->origin = tempImg->origin; cvCvtColor( tempImg, tempImage2, CV_BGR2GRAY ); memset(m_ImgNegative[i].InputImage,0,WINDOW_WIDTH*WINDOW_HEIGHT); memset(m_ImgNegative[i].IntegrailImage,0,(WINDOW_WIDTH+1)*(WINDOW_HEIGHT+1)); if( ((tempImage2->width)!=WINDOW_WIDTH)||((tempImage2->width)!=WINDOW_HEIGHT) ){ AfxMessageBox("Image size is not match!"); return; } int width = tempImage2->width; int height = tempImage2->height; m_width = width; m_height = height; for(int row=0; row < height; row++){ for(int col=0; col < width ; col++){ m_ImgNegative[i].InputImage[width*(row) + (col)] = tempImage2->imageData[width*(row) + (col)]; m_DlgImg[width*(row) + (col)] = m_ImgNegative[i].InputImage[width*(row) + (col)]; } } Invalidate(FALSE); MakeIntegImage(m_ImgNegative[i].InputImage,m_ImgNegative[i].IntegrailImage,width,height); temp.Format("Negative Trained File : %s is formulated integral image",m_szPositiveImgPath[i].filename); m_Log.AppendString(temp); cvReleaseImage( &tempImg ); } } // Begin Cascading Classifier void CCVFrameDlg::BeginCascadeClassifier() { double* F = new double[NODE_CASCADE]; double* D = new double[NODE_CASCADE]; int* n = new int[NODE_CASCADE]; F[0] = 1.0; D[0] = 1.0; m_fMaxFP = 0.5f; for(int nNode = 0; nNode < NODE_CASCADE ; nNode++){ // initialize weight if(nNode == 0) { double wIniWeightPosi = 0.5/m_nPositiveImg;

double wIniWeightNega = 0.5/m_nNegativeImg; // Paper define 'i' for number of examples and 'j' for number of feature so i follow it. for(int i=0; i < m_nPositiveImg; i++ ){ m_ImgPositive[i].weight = wIniWeightPosi; } for(int ii=0; ii < m_nNegativeImg; ii++ ){ m_ImgNegative[ii].weight = wIniWeightNega; } } nNode++; n[nNode] = 0; F[nNode] = F[nNode-1]; F[nNode] = 1.0; while(F[nNode]>(m_fMaxFP)){ n[nNode]++; // AdaBoost //m_pThread = AfxBeginThread(ThreadAdaFunc, this, THREAD_PRIORITY_NORMAL,0, 0); AdaBoost(n[nNode], nNode -1); } } } // Adaboost void CCVFrameDlg::AdaBoost(int n, int nNode) { int* eachlabels = NULL; double* eachweight = NULL; double* errors1; double* errors2; double* value = new double[m_nPositiveImg+m_nNegativeImg]; double* weakFeatureValue = new double[m_nPositiveImg+m_nNegativeImg]; int* labels = new int[m_nPositiveImg+m_nNegativeImg]; double* weights = new double[m_nPositiveImg+m_nNegativeImg]; errors1 = new double[m_nPositiveImg+m_nNegativeImg]; errors2 = new double[m_nPositiveImg+m_nNegativeImg]; eachlabels = new int[m_nPositiveImg+m_nNegativeImg]; eachweight = new double[m_nPositiveImg+m_nNegativeImg]; CString strTemp; //int ntrainingPos = training.m_posSize; int cntLabel = 0; for(int i=0;i< m_nPositiveImg;i++) { weights[i] = m_ImgPositive[i].weight; labels[i] = 1;// positive cntLabel++; } for( i=0;i< m_nNegativeImg;i++) { weights[i] = m_ImgNegative[i].weight; labels[i] = 0;// negative cntLabel++; } double minerror; minerror = 1E100;

// CWeakClassifier minsc = allFeatures.m_wData[0]; ///////////////////////////////////////////////////////////////////////////// //... weakClass[0] -> minsc!!! WeakClassifier minsc = weakClass[0]; ///////////////////////////////////////////////////////////////////////////// int sindex = 0; // number of generated weakclass(feature) for(int ii=0;ii<m_total_weakclass;ii++) { double* min1; double* min2; double e; memcpy(eachlabels, labels, sizeof(int)*(m_nPositiveImg+m_nNegativeImg)); memcpy(eachweight, weights, sizeof(double)*(m_nPositiveImg+m_nNegativeImg)); int cntAllexample = 0; for(int j=0;j<m_nPositiveImg;j++){ weakFeatureValue[j] = GetOneFeature(m_ImgPositive[j].IntegrailImage,0,0,&weakClass[ii]); cntAllexample++; } for( j=cntAllexample;j<m_nPositiveImg + cntAllexample;j++){ weakFeatureValue[j] = GetOneFeature(m_ImgNegative[j].IntegrailImage,0,0,&weakClass[ii]); } memcpy(value,weakFeatureValue,sizeof(double)*(m_nPositiveImg+m_nNegativeImg)); QuickSort(weakFeatureValue, eachlabels, eachweight,0,(m_nPositiveImg+m_nNegativeImg)-1 ); // compute errors1, suppose parity is 1, that is f(x)<thresh ==> h(x) = 1 e = 0.0; for(int i=0; i<(m_nPositiveImg+m_nNegativeImg) ; i++ ){ if( eachlabels[i] == 1 ) // positive ex e += eachweight[i]; } errors1[0] = e; for(i=1;i<(m_nPositiveImg+m_nNegativeImg);i++){ errors1[i] = errors1[i-1]; if(eachlabels[i-1] == 1) errors1[i] -= eachweight[i-1]; //errors1[i] -= w_s[i]; else errors1[i] += eachweight[i-1]; //errors1[i] += w_s[i]; if(errors1[i] < 0.000000000001){ // skip this one errors1[i] = errors1[i-1]; } } // compute errors2, suppose parity is 0, that is f(x)>thresh ==> h(x) = 1 e = 0.0; for(i=0;i<(m_nPositiveImg+m_nNegativeImg);i++){ if(eachlabels[i] == 0) //negative ex e += eachweight[i]; } errors2[0] = e; for(i=1;i<(m_nPositiveImg+m_nNegativeImg);i++){ errors2[i] = errors2[i-1]; if(eachlabels[i-1] == 0)

errors2[i] -= eachweight[i-1]; //errors2[i] -= w_s[i]; else errors2[i] += eachweight[i-1]; //errors2[i] += w_s[i]; if(errors2[i] < 0.000000000001){ // skip this one errors2[i] = errors2[i-1]; } } min1 = std::min_element(errors1,errors1+(m_nPositiveImg+m_nNegativeImg)); min2 = std::min_element(errors2,errors2+(m_nPositiveImg+m_nNegativeImg)); int pos1 = min1 - errors1, pos2 = min2 - errors2; if( (*min1)<(*min2) ){ weakClass[ii].parity = 1; weakClass[ii].error = (*min1); if(pos1==(m_nPositiveImg+m_nNegativeImg)) weakClass[ii].thresh = weakFeatureValue[pos1] + (weakFeatureValue[pos1]-weakFeatureValue[pos1-1])/2.0; else if(pos1==0) weakClass[ii].thresh = weakFeatureValue[pos1] - (weakFeatureValue[pos1+1]-weakFeatureValue[pos1])/2.0; else weakClass[ii].thresh = weakFeatureValue[pos1] - (weakFeatureValue[pos1]-weakFeatureValue[pos1-1])/2.0; int cntTemp1=0; //c for(int a=0;a<m_nPositiveImg;a++){ if(value[a]<weakClass[ii].thresh){ cntTemp1++; } } weakClass[ii].detection_rate = (double)cntTemp1/(double)(m_nPositiveImg); cntTemp1=0; for(a=m_nPositiveImg;a<(m_nPositiveImg+m_nNegativeImg);a++){ if(value[a]<weakClass[ii].thresh){ cntTemp1++; } } weakClass[ii].false_positive_rate = (double)cntTemp1/(double)(m_nNegativeImg); }else{ weakClass[ii].parity = -1; weakClass[ii].error = (*min2); if(pos2==(m_nPositiveImg+m_nNegativeImg)) weakClass[ii].thresh = weakFeatureValue[pos2] + (weakFeatureValue[pos2]-weakFeatureValue[pos2-1])/2.0; else if(pos1==0) weakClass[ii].thresh = weakFeatureValue[pos2] - (weakFeatureValue[pos2+1]-weakFeatureValue[pos2])/2.0; else weakClass[ii].thresh = weakFeatureValue[pos2] - (weakFeatureValue[pos2]-weakFeatureValue[pos2-1])/2.0; int c=0; for(int a=0;a<m_nPositiveImg;a++){ if(value[a]>=weakClass[ii].thresh){ c++; } } weakClass[ii].detection_rate = (double)c/(double)(m_nPositiveImg); c=0;

for(a=m_nPositiveImg;a<(m_nPositiveImg+m_nNegativeImg);a++){ if(value[a]>=weakClass[ii].thresh){ c++; } } weakClass[ii].false_positive_rate = (double)c/(double)(m_nNegativeImg); } if((weakClass[ii].error < minerror)&&(weakClass[ii].reference_count < REFCOUNT)){ minerror = weakClass[ii].error; ///////////////////////////////////////////////////////////////////////////////// minsc = weakClass[ii]; ///////////////////////////////////////////////////////////////////////////////// sindex = ii; } strTemp.Format("%d/%d(total) Weak Classifier has been processed for all examples", ii,m_total_weakclass ); m_Log.AppendString(strTemp); if(m_bStopTheAdaboost){ return; } int temporaryvalue = 0; } weakClass[sindex].reference_count++; double beta; beta = minsc.error / (1.0-minsc.error); // update the weights int cntAllexample = 0; int tempValue = 0; for(int j=0;j<m_nPositiveImg;j++){ double v; v = GetOneFeature(m_ImgPositive[j].IntegrailImage,0,0,&minsc); if(minsc.parity*v<minsc.parity*minsc.thresh){ tempValue = 1; }else{ tempValue = 0; } if(tempValue == m_ImgPositive[j].Label) m_ImgPositive[j].weight *= beta; cntAllexample++; } for( j=cntAllexample;j<m_nPositiveImg + cntAllexample;j++){ double v; v = GetOneFeature(m_ImgNegative[j].IntegrailImage,0,0,&minsc); if(minsc.parity*v<minsc.parity*minsc.thresh){ tempValue = 1; }else{ tempValue = 0; } if(tempValue == m_ImgNegative[j].Label) m_ImgNegative[j].weight *= beta; } minsc.alpha = -log(beta);

delete[] errors1; errors1 = NULL; delete[] errors2; errors2 = NULL; delete[] eachlabels; delete[] eachweight; delete[] labels; delete[] weights; delete[] weakFeatureValue; //return minsc; SingleMinStrongClass = minsc; } // Init and begin Train images void CCVFrameDlg::OnInitTrainImg() { m_bStopTheAdaboost = FALSE; InitPossibleFeatures(); LoadPrositiveExamples(); LoadNegativeExamples(); InitPositiveImage(); InitNegativeImage(); BeginCascadeClassifier(); return; }

ProcFaceDetection.cpp // ProcFaceDetector.cpp #include "CVFrameDlg.h" #include "cv.h" #include "highgui.h" #include "cxcore.h" #include "stdafx.h" #include "CVFrame.h" #include "CVFrameDlg.h" #include <stdio.h> #include <stdlib.h> #include <string.h> #include <assert.h> #include <math.h> #include <float.h> #include <limits.h> #include <time.h> #include <ctype.h> //#include "CVAPP.h" #include "./api/HistoryEdit.h" #include "ImageProc.h" #include "IntegralImage.h" #include "Setting.h" #include "./api/FileInfo.h" #ifdef _DEBUG #define new DEBUG_NEW #undef THIS_FILE static char THIS_FILE[] = __FILE__; #endif // Thread for realtime detectiion UINT ThreadFunc (LPVOID pParam) { CCVFrameDlg* pDlg = (CCVFrameDlg*)pParam; switch(pDlg->nDetecMode){ // case for V-J detector case MODE_VIOLA_JONES: while((pDlg->bStopCam)==TRUE) {

cvGrabFrame( pDlg->m_ImageProc->m_cap ); pDlg->m_ImageProc->m_frame = cvRetrieveFrame( pDlg->m_ImageProc->m_cap ); //cvErode( pDlg->m_ImageProc->m_frame, pDlg->m_ImageProc->m_frame, 0, 2 ); IplImage* grayImg = cvCreateImage(cvGetSize(pDlg->m_ImageProc->m_frame), IPL_DEPTH_8U, 1 ); grayImg->origin = pDlg->m_ImageProc->m_frame->origin; //cvCvtColor( pDlg->m_ImageProc->m_frame, grayImg, CV_RGB2GRAY ); cvCvtColor( pDlg->m_ImageProc->m_frame, grayImg, CV_BGR2GRAY ); IplImage* image2 = reinterpret_cast<IplImage*>(pDlg->m_ImageProc->m_frame); // color output pDlg->m_ImageProc->VJFaceDetector(grayImg, image2, TRUE); cvShowImage( "Webcam", image2 ); //cvReleaseImage(&grayImg); //cvReleaseImage(&image2); if( cvWaitKey(10) >= 0 ){ //pDlg->m_pThread->ResumeThread(); break; } } break; // case Morpho detection case MODE_MORPHOLOGY: while((pDlg->bStopCam)==TRUE) { cvGrabFrame( pDlg->m_ImageProc->m_cap ); pDlg->m_ImageProc->m_frame = cvRetrieveFrame( pDlg->m_ImageProc->m_cap ); IplImage* grayImg = cvCreateImage(cvGetSize(pDlg->m_ImageProc->m_frame), IPL_DEPTH_8U, 1 ); grayImg->origin = pDlg->m_ImageProc->m_frame->origin; cvCvtColor( pDlg->m_ImageProc->m_frame, grayImg, CV_BGR2GRAY ); IplImage* image2 = reinterpret_cast<IplImage*>(pDlg->m_ImageProc->m_frame); cvFlip( grayImg, NULL, 0); int w = grayImg->width; int h = grayImg->height; BYTE* imageBuffer = new BYTE[w*h]; memcpy(imageBuffer,grayImg->imageData,w*h); BOOL bEyeFound = FALSE; int lEyeX, lEyeY, rEyeX, rEyeY; bEyeFound = pDlg->m_ImageProc->m_EfDetection.GetEyePos(imageBuffer, w, h, &lEyeX, &lEyeY, &rEyeX, &rEyeY); if (bEyeFound) { pDlg->m_ImageProc->m_lEyeX = lEyeX; pDlg->m_ImageProc->m_lEyeY = lEyeY; pDlg->m_ImageProc->m_rEyeX = rEyeX; pDlg->m_ImageProc->m_rEyeY = rEyeY; } else { pDlg->m_ImageProc->m_lEyeX = 0; pDlg->m_ImageProc->m_lEyeY = 0; pDlg->m_ImageProc->m_rEyeX = 0; pDlg->m_ImageProc->m_rEyeY = 0; } delete[] imageBuffer; // color output pDlg->m_ImageProc->MoFaceDetector(grayImg, image2,TRUE, bEyeFound); cvShowImage( "Webcam", image2 ); if( cvWaitKey(10) >= 0 ){ //pDlg->m_pThread->ResumeThread(); break; } } break; case MODE_MORPHOLOGY_VJ: while((pDlg->bStopCam)==TRUE) {

cvGrabFrame( pDlg->m_ImageProc->m_cap ); pDlg->m_ImageProc->m_frame = cvRetrieveFrame( pDlg->m_ImageProc->m_cap ); IplImage* grayImg = cvCreateImage(cvGetSize(pDlg->m_ImageProc->m_frame), IPL_DEPTH_8U, 1 ); grayImg->origin = pDlg->m_ImageProc->m_frame->origin; cvCvtColor( pDlg->m_ImageProc->m_frame, grayImg, CV_BGR2GRAY ); IplImage* image2 = reinterpret_cast<IplImage*>(pDlg->m_ImageProc->m_frame); //if(nFlip) cvFlip( grayImg, NULL, 0); /*Temprary test mix two algorithm*/ pDlg->m_ImageProc->faces = cvHaarDetectObjects(grayImg, pDlg->m_ImageProc->cascade, pDlg->m_ImageProc->storage, 1.2, // scale the cascade by 20% after each pass 2, // groups of 3 (2+1) or more neigbor face rectangles are joined into a single face, smaller groups are rejected CV_HAAR_DO_CANNY_PRUNING, // use Canny to reduce number of false alarms cvSize(0,0) // start from the minimum face size allowed by the particular classifier ); if( pDlg->m_ImageProc->faces->total > 0 ){ CvRect* r = (CvRect*) cvGetSeqElem( pDlg->m_ImageProc->faces, 0); // CvPoint pt1; // CvPoint pt2; for(int row=0; row < grayImg->height; row++ ){ for(int col=0; col < grayImg->width ; col++ ){ if( (row < r->y)||(row > r->y + r->height)||(col < r->x)||(col > r->x + r->width)) grayImg->imageData[row*grayImg->width + col] = 0; } } }else{ cvSetZero(grayImg); } //cvFlip( grayImg, NULL, 0); int w = grayImg->width; int h = grayImg->height; BYTE* imageBuffer = new BYTE[w*h]; memcpy(imageBuffer,grayImg->imageData,w*h); BOOL bEyeFound = FALSE; int lEyeX, lEyeY, rEyeX, rEyeY; bEyeFound = pDlg->m_ImageProc->m_EfDetection.GetEyePos(imageBuffer, w, h, &lEyeX, &lEyeY, &rEyeX, &rEyeY); if (bEyeFound) { pDlg->m_ImageProc->m_lEyeX = lEyeX; pDlg->m_ImageProc->m_lEyeY = lEyeY; pDlg->m_ImageProc->m_rEyeX = rEyeX; pDlg->m_ImageProc->m_rEyeY = rEyeY; } else { pDlg->m_ImageProc->m_lEyeX = 0; pDlg->m_ImageProc->m_lEyeY = 0; pDlg->m_ImageProc->m_rEyeX = 0; pDlg->m_ImageProc->m_rEyeY = 0; } delete[] imageBuffer; //CString temp; //temp.Format("m_lEyeX : %d m_rEyeX : %d",lEyeX,rEyeX); //pDlg->m_Log.AppendString(temp); // color output pDlg->m_ImageProc->MoFaceDetector(grayImg, image2,TRUE, bEyeFound); cvShowImage( "Webcam", image2 );

if( cvWaitKey(10) >= 0 ){ //pDlg->m_pThread->ResumeThread(); break; } } break; default: break; } return 0; } .// CAM Button message handler void CCVFrameDlg::OnBtnCam() { // TODO: Add your control notification handler code here if(bCamInit==FALSE) m_ImageProc->m_cap = cvCaptureFromCAM(0); bCamInit = TRUE; cvvNamedWindow("Webcam",5); m_ImageProc->m_frame = cvQueryFrame(m_ImageProc->m_cap); cvShowImage("Webcam",m_ImageProc->m_frame); bStopCam = TRUE; m_pThread = AfxBeginThread(ThreadFunc, this, THREAD_PRIORITY_NORMAL,0, 0); //m_pThread = AfxBeginThread(ThreadFunc, this, THREAD_PRIORITY_HIGHEST,0, CREATE_SUSPENDED); } // detec from image button message handler void CCVFrameDlg::OnFaceVjIm() { // TODO: Add your control notification handler code here CFileDialog dlg(TRUE, _T("*.jpg"), "",OFN_FILEMUSTEXIST|OFN_PATHMUSTEXIST|OFN_HIDEREADONLY, "jpg files (*.jpg) |*.jpg|AVI files (*.avi) |*.avi| All Files (*.*)|*.*||",NULL); char title[]= {"Open Image"}; dlg.m_ofn.lpstrTitle= title; if (dlg.DoModal() == IDOK) {

m_ImageProc->m_filename = dlg.GetPathName(); m_ImageProc->InitImageBuff(); CString temp; temp.Format("Opened File : %s",m_ImageProc->m_filename); m_Log.AppendString(temp); } cvvNamedWindow( "Original Image", 1 ); cvvShowImage( "Original Image", m_ImageProc->m_img ); IplImage* image = reinterpret_cast<IplImage*>(m_ImageProc->m_img3); IplImage* image2 = reinterpret_cast<IplImage*>(m_ImageProc->m_img); IplImage* dst = cvCreateImage( cvGetSize(image), 8, 1 ); //m_ImageProc->InitFaceDetector(); m_ImageProc->VJFaceDetector(image, image2, FALSE); cvvNamedWindow( "Result Image1", 1 ); cvvShowImage( "Result Image1", image2 ); CString result = "c:\\result.jpg"; //AfxMessageBox(result); //cvSave(result,) //cvSaveImage(result,image2) ; } // detec from image button message handler void CCVFrameDlg::OnFaceMoIm() { // TODO: Add your control notification handler code here CFileDialog dlg(TRUE, _T("*.jpg"), "",OFN_FILEMUSTEXIST|OFN_PATHMUSTEXIST|OFN_HIDEREADONLY,

"jpg files (*.jpg) |*.jpg|AVI files (*.avi) |*.avi| All Files (*.*)|*.*||",NULL); char title[]= {"Open Image"}; dlg.m_ofn.lpstrTitle= title; if (dlg.DoModal() == IDOK) { m_ImageProc->m_filename = dlg.GetPathName(); m_ImageProc->InitImageBuff(); CString temp; temp.Format("Opened File : %s",m_ImageProc->m_filename); m_Log.AppendString(temp); } cvvNamedWindow( "Original Image", 1 );

cvvShowImage( "Original Image", m_ImageProc->m_img ); IplImage* image = reinterpret_cast<IplImage*>(m_ImageProc->m_img3); IplImage* image2 = reinterpret_cast<IplImage*>(m_ImageProc->m_img); IplImage* dst = cvCreateImage( cvGetSize(image), 8, 1 ); //m_ImageProc->InitFaceDetector(); int w = image->width; int h = image->height; BYTE* imageBuffer = new BYTE[w*h]; //image->imageData memcpy(imageBuffer,image->imageData,w*h); BOOL bEyeFound = FALSE; int lEyeX, lEyeY, rEyeX, rEyeY; bEyeFound = m_ImageProc->m_EfDetection.GetEyePos(imageBuffer, w, h, &lEyeX, &lEyeY, &rEyeX, &rEyeY); if (bEyeFound) { m_ImageProc->m_lEyeX = lEyeX; m_ImageProc->m_lEyeY = lEyeY; m_ImageProc->m_rEyeX = rEyeX; m_ImageProc->m_rEyeY = rEyeY; } else { m_ImageProc->m_lEyeX = 0; m_ImageProc->m_lEyeY = 0; m_ImageProc->m_rEyeX = 0; m_ImageProc->m_rEyeY = 0; } delete[] imageBuffer; m_ImageProc->MoFaceDetector(image, image2 ,FALSE, bEyeFound); // image2 : color image : gray //m_ImageProc->VJFaceDetector(image, image2, FALSE); cvvNamedWindow( "Result Image1", 1 ); cvvShowImage( "Result Image1", image2 ); } // VJ CAM messageHandler void CCVFrameDlg::OnFaceVjCam() { // TODO: Add your control notification handler code here if(bCamInit==FALSE) m_ImageProc->m_cap = cvCaptureFromCAM(0); bCamInit = TRUE; nDetecMode = MODE_VIOLA_JONES; cvvNamedWindow("Webcam",5); m_ImageProc->m_frame = cvQueryFrame(m_ImageProc->m_cap); cvShowImage("Webcam",m_ImageProc->m_frame); bStopCam = TRUE; m_pThread = AfxBeginThread(ThreadFunc, this, THREAD_PRIORITY_NORMAL,0, 0); }

// Morphology CAM messagee Handler void CCVFrameDlg::OnFaceMoCam() { // TODO: Add your control notification handler code here if(bCamInit==FALSE) m_ImageProc->m_cap = cvCaptureFromCAM(0); bCamInit = TRUE; cvvNamedWindow("Webcam",5); m_ImageProc->m_frame = cvQueryFrame(m_ImageProc->m_cap); cvShowImage("Webcam",m_ImageProc->m_frame); bStopCam = TRUE; nDetecMode = MODE_MORPHOLOGY; m_pThread = AfxBeginThread(ThreadFunc, this, THREAD_PRIORITY_NORMAL,0, 0); } // VJ+Morph CAM message Handler void CCVFrameDlg::OnFaceMoVjcam() { // TODO: Add your control notification handler code here if(bCamInit==FALSE) m_ImageProc->m_cap = cvCaptureFromCAM(0); bCamInit = TRUE; cvvNamedWindow("Webcam",5); m_ImageProc->m_frame = cvQueryFrame(m_ImageProc->m_cap); cvShowImage("Webcam",m_ImageProc->m_frame); bStopCam = TRUE; nDetecMode = MODE_MORPHOLOGY_VJ; m_pThread = AfxBeginThread(ThreadFunc, this, THREAD_PRIORITY_NORMAL,0, 0); }

ImageProc.cpp // ImageProc.cpp : implementation file // #include "stdafx.h" #include "CVFrame.h" #include "ImageProc.h" #include "CVFrameDlg.h" #ifdef _DEBUG #define new DEBUG_NEW #undef THIS_FILE static char THIS_FILE[] = __FILE__; #endif ///////////////////////////////////////////////////////////////////////////// // CImageProc CImageProc::CImageProc() { } CImageProc::~CImageProc() { cvReleaseImage( &m_img ); cvReleaseImage( &m_img2 ); cvReleaseImage( &m_img3 ); cvReleaseImage( &m_img4 ); cvReleaseImage( &m_frame); cvReleaseCapture( &m_cap ); //cvReleaseImage( &m_Camimg2); } BEGIN_MESSAGE_MAP(CImageProc, CWnd) //{{AFX_MSG_MAP(CImageProc) // NOTE - the ClassWizard will add and remove mapping macros here. //}}AFX_MSG_MAP END_MESSAGE_MAP() /////////////////////////////////////////////////////////////////////////////

// CImageProc message handlers void CImageProc::InitCamImageBuff() { } // Init image buffers void CImageProc::InitImageBuff() { //m_img = cvvLoadImage( m_filename ); // load image m_img = cvLoadImage( m_filename , 1); // load image // make image buffer m_img2 = cvCloneImage(m_img); m_img3 = cvLoadImage(m_filename,0); //load gray image m_img4 = cvCloneImage(m_img); } // Initialize VJFace Detector (loading data) void CImageProc::InitVJFaceDetector() { storage=cvCreateMemStorage(0); // CvSeq* faces; //CString trData_path = "C:\\Program Files\\OpenCV\\data\\haarcascades\\haarcascade_frontalface_default.xml"; CString trData_path = "haarcascade_frontalface_alt_tree.xml"; //AfxMessageBox(trData_path); cascade = (CvHaarClassifierCascade*)cvLoad(trData_path); } // VJFace detector void CImageProc::VJFaceDetector(CvArr* image1, CvArr* image2, BOOL nFlip) { if(nFlip) cvFlip( image1, NULL, 0); faces = cvHaarDetectObjects(image1,

cascade, storage, 1.2, // scale the cascade by 20% after each pass 2, // groups of 3 (2+1) or more neigbor face rectangles are joined into a single face, smaller groups are rejected CV_HAAR_DO_CANNY_PRUNING, // use Canny to reduce number of false alarms cvSize(0,0) // start from the minimum face size allowed by the particular classifier ); //test //CvPoint pt3 = { 10, 10 }; //CvPoint pt4 = { 20 , 20 }; //cvRectangle( image2, pt3, pt4, CV_RGB(255,0,0),3,8,0); for(int i=0; i<(faces ? faces->total:0); i++){ CvRect* r = (CvRect*) cvGetSeqElem( faces, i); // CvPoint* e = (CvPoint*) cvGetSeqElem( faces, i); CvPoint pt1; CvPoint pt2; CvPoint pt3;// l-e CvPoint pt4;// l-e CvPoint pt5;// r-e CvPoint pt6;// r-e if(nFlip){ pt1.x = r->x ; pt1.y = abs((r->y) - m_frame->height); pt2.x = r->x + r->width ; pt2.y = abs((r->y + r->height) - m_frame->height ) ; pt3.x = r->x + (r->width)/5 ; pt3.y = abs((r->y + (r->height/3)) - m_frame->height); pt4.x = r->x + (r->width)/5 + (r->width)/5;

pt4.y = abs((r->y + (r->height/4) + (r->width)/4 ) - m_frame->height ) ; pt5.x = r->x + 3*(r->width)/5; pt5.y = abs((r->y + (r->height/3)) - m_frame->height); pt6.x = r->x + 3*(r->width)/5 + (r->width)/5; pt6.y = abs((r->y + (r->height/4) + (r->width)/4 ) - m_frame->height ) ; }else{ pt1.x = r->x ; pt1.y = r->y ; pt2.x = r->x + r->width ; pt2.y = r->y + r->height; }

//CvPoint pt1 = { r->x, r->y }; //CvPoint pt2 = { r->x + r->width, r->y + r->height }; cvRectangle( image2, pt1, pt2, CV_RGB(255,0,0),3,8,0); //cvRectangle( image2, pt3, pt4, CV_RGB(255,0,0),3,8,0); //cvRectangle( image2, pt5, pt6, CV_RGB(255,0,0),3,8,0); cvRectangle( image2, pt3, pt6, CV_RGB(255,0,0),3,8,0); } } // Morphology detector void CImageProc::MoFaceDetector(CvArr* image1, CvArr* image2 ,BOOL nFlip, BOOL bEye) { // if(nFlip) // cvFlip( image1, NULL, 0); if( (bEye == TRUE)&&(nFlip==TRUE ) ){ CvPoint pt1 = { m_lEyeX-10, abs(m_lEyeY-10 - m_frame->height) }; CvPoint pt2 = { m_lEyeX+10, abs(m_lEyeY+10 - m_frame->height)}; CvPoint pt3 = { m_rEyeX-10, abs(m_rEyeY-10 - m_frame->height)}; CvPoint pt4 = { m_rEyeX+10, abs(m_rEyeY+10 - m_frame->height)}; cvRectangle( image2, pt1, pt2, CV_RGB(255,0,0),3,8,0); cvRectangle( image2, pt3, pt4, CV_RGB(255,0,0),3,8,0); }else{ CvPoint pt1 = { m_lEyeX-10, m_lEyeY-10 }; CvPoint pt2 = { m_lEyeX+10, m_lEyeY+10 }; CvPoint pt3 = { m_rEyeX-10, m_rEyeY-10 }; CvPoint pt4 = { m_rEyeX+10, m_rEyeY+10 }; cvRectangle( image2, pt1, pt2, CV_RGB(255,0,0),3,8,0); cvRectangle( image2, pt3, pt4, CV_RGB(255,0,0),3,8,0); } }

EfDetection.cpp // EfDetection.cpp: implementation of the EfDetection class. // ////////////////////////////////////////////////////////////////////// #include "stdafx.h" #include "EfDetection.h" #include <math.h> #define R2A (180.0/3.141592653589793) #define PI (3.141592653589793) #define ROUND(a) ((int)(a + 0.5)) #ifdef _DEBUG #undef THIS_FILE static char THIS_FILE[]=__FILE__; #define new DEBUG_NEW #endif ////////////////////////////////////////////////////////////////////// // Construction/Destruction ////////////////////////////////////////////////////////////////////// int CompareVfEyePairListEyeStructConf(const void *arg1, const void *arg2 ) { VfEyePair r1, r2; r1=*((VfEyePair*)arg1); r2=*((VfEyePair*)arg2); if( r1.eyeStructConf > r2.eyeStructConf ) return -1; else if (r1.eyeStructConf == r2.eyeStructConf ) return 0; else return 1; } int CompareVfEyePairListEyeCrclConf(const void *arg1, const void *arg2 ) { VfEyePair r1, r2;

r1=*((VfEyePair*)arg1); r2=*((VfEyePair*)arg2); if( r1.eyeCrclConf > r2.eyeCrclConf ) return -1; else if (r1.eyeCrclConf == r2.eyeCrclConf ) return 0; else return 1; } EfDetection::EfDetection() { m_FaceWidth = FACE_COL; m_FaceHeight = FACE_ROW; m_filX1 = 0; m_filY1 = 0; m_filX2 = 0; m_filY2 = 0; m_cnt1Conf = 0.0; m_cnt1Dist = 0.0; } EfDetection::~EfDetection() { } /************************** main function CLAHE ******************/ int EfDetection::CLAHE (BYTE* pImage, unsigned int uiXRes, unsigned int uiYRes, BYTE Min, BYTE Max, unsigned int uiNrX, unsigned int uiNrY, unsigned int uiNrBins, float fCliplimit) /* pImage - Pointer to the input/output image * uiXRes - Image resolution in the X direction * uiYRes - Image resolution in the Y direction * Min - Minimum greyvalue of input image (also becomes minimum of output image) * Max - Maximum greyvalue of input image (also becomes maximum of output image) * uiNrX - Number of contextial regions in the X direction (min 2, max 16) * uiNrY - Number of contextial regions in the Y direction (min 2, max 16) * uiNrBins - Number of greybins for histogram ("dynamic range") * float fCliplimit - Normalized cliplimit (higher values give more contrast) * The number of "effective" greylevels in the output image is set by uiNrBins; selecting * a small value (eg. 128) speeds up processing and still produce an output image of * good quality. The output image will have the same minimum and maximum value as the input * image. A clip limit smaller than 1 results in standard (non-contrast limited) AHE. */ { unsigned int uiX, uiY; /* counters */ unsigned int uiXSize, uiYSize, uiSubX, uiSubY; /* size of context. reg. and subimages */ unsigned int uiXL, uiXR, uiYU, uiYB; /* auxiliary variables interpolation routine */ unsigned long ulClipLimit, ulNrPixels;/* clip limit and region pixel count */ BYTE* pImPointer; /* pointer to image */ BYTE aLUT[uiNR_OF_GREY]; /* lookup table used for scaling of input image */ unsigned long* pulHist, *pulMapArray; /* pointer to histogram and mappings*/ unsigned long* pulLU, *pulLB, *pulRU, *pulRB; /* auxiliary pointers interpolation */ if (uiNrX > 16) return -1; /* # of regions x-direction too large */ if (uiNrY > 16) return -2; /* # of regions y-direction too large */ int t = uiXRes % uiNrX; int tt = uiYRes % uiNrY; if (uiXRes % uiNrX) return -3; /* x-resolution no multiple of uiNrX */ // if (uiYRes & uiNrY) return -4; /* y-resolution no multiple of uiNrY */ if (uiYRes % uiNrY) return -4; /* y-resolution no multiple of uiNrY */ if (Max >= uiNR_OF_GREY) return -5; /* maximum too large */ if (Min >= Max) return -6; /* minimum equal or larger than maximum */ if (uiNrX < 2 || uiNrY < 2) return -7;/* at least 4 contextual regions required */ if (fCliplimit == 1.0) return 0; /* is OK, immediately returns original image. */ if (uiNrBins == 0) uiNrBins = 128; /* default value when not specified */ pulMapArray=(unsigned long *)malloc(sizeof(unsigned long)*uiNrX*uiNrY*uiNrBins); if (pulMapArray == 0) return -8; /* Not enough memory! (try reducing uiNrBins) */ uiXSize = uiXRes/uiNrX; uiYSize = uiYRes/uiNrY; /* Actual size of contextual regions */ ulNrPixels = (unsigned long)uiXSize * (unsigned long)uiYSize; if(fCliplimit > 0.0) { /* Calculate actual cliplimit */ ulClipLimit = (unsigned long) (fCliplimit * (uiXSize * uiYSize) / uiNrBins); ulClipLimit = (ulClipLimit < 1UL) ? 1UL : ulClipLimit; }

else ulClipLimit = 1UL<<14; /* Large value, do not clip (AHE) */ MakeLut(aLUT, Min, Max, uiNrBins); /* Make lookup table for mapping of greyvalues */ /* Calculate greylevel mappings for each contextual region */ for (uiY = 0, pImPointer = pImage; uiY < uiNrY; uiY++) { for (uiX = 0; uiX < uiNrX; uiX++, pImPointer += uiXSize) { pulHist = &pulMapArray[uiNrBins * (uiY * uiNrX + uiX)]; MakeHistogram(pImPointer,uiXRes,uiXSize,uiYSize,pulHist,uiNrBins,aLUT); ClipHistogram(pulHist, uiNrBins, ulClipLimit); MapHistogram(pulHist, Min, Max, uiNrBins, ulNrPixels); } pImPointer += (uiYSize - 1) * uiXRes; /* skip lines, set pointer */ } /* Interpolate greylevel mappings to get CLAHE image */ for (pImPointer = pImage, uiY = 0; uiY <= uiNrY; uiY++) { if (uiY == 0) { /* special case: top row */ uiSubY = uiYSize >> 1; uiYU = 0; uiYB = 0; } else { if (uiY == uiNrY) { /* special case: bottom row */ uiSubY = uiYSize >> 1; uiYU = uiNrY-1; uiYB = uiYU; } else { /* default values */ uiSubY = uiYSize; uiYU = uiY - 1; uiYB = uiYU + 1; } } for (uiX = 0; uiX <= uiNrX; uiX++) { if (uiX == 0) { /* special case: left column */ uiSubX = uiXSize >> 1; uiXL = 0; uiXR = 0; } else { if (uiX == uiNrX) { /* special case: right column */ uiSubX = uiXSize >> 1; uiXL = uiNrX - 1; uiXR = uiXL; } else { /* default values */ uiSubX = uiXSize; uiXL = uiX - 1; uiXR = uiXL + 1; } } pulLU = &pulMapArray[uiNrBins * (uiYU * uiNrX + uiXL)]; pulRU = &pulMapArray[uiNrBins * (uiYU * uiNrX + uiXR)]; pulLB = &pulMapArray[uiNrBins * (uiYB * uiNrX + uiXL)]; pulRB = &pulMapArray[uiNrBins * (uiYB * uiNrX + uiXR)]; Interpolate(pImPointer,uiXRes,pulLU,pulRU,pulLB,pulRB,uiSubX,uiSubY,aLUT); pImPointer += uiSubX; /* set pointer on next matrix */ } pImPointer += (uiSubY - 1) * uiXRes; } free(pulMapArray); /* free space for histograms */ return 0; /* return status OK */ } void EfDetection::ClipHistogram (unsigned long* pulHistogram, unsigned int uiNrGreylevels, unsigned long ulClipLimit) /* This function performs clipping of the histogram and redistribution of bins. * The histogram is clipped and the number of excess pixels is counted. Afterwards * the excess pixels are equally redistributed across the whole histogram (providing * the bin count is smaller than the cliplimit). */ { unsigned long* pulBinPointer, *pulEndPointer, *pulHisto; unsigned long ulNrExcess, ulUpper, ulBinIncr, ulStepSize, i; long lBinExcess; ulNrExcess = 0; pulBinPointer = pulHistogram; for (i = 0; i < uiNrGreylevels; i++) { /* calculate total number of excess pixels */ lBinExcess = (long) pulBinPointer[i] - (long) ulClipLimit; if (lBinExcess > 0) ulNrExcess += lBinExcess; /* excess in current bin */ }; /* Second part: clip histogram and redistribute excess pixels in each bin */ ulBinIncr = ulNrExcess / uiNrGreylevels; /* average binincrement */ ulUpper = ulClipLimit - ulBinIncr; /* Bins larger than ulUpper set to cliplimit */

for (i = 0; i < uiNrGreylevels; i++) { if (pulHistogram[i] > ulClipLimit) pulHistogram[i] = ulClipLimit; /* clip bin */ else { if (pulHistogram[i] > ulUpper) { /* high bin count */ ulNrExcess -= pulHistogram[i] - ulUpper; pulHistogram[i]=ulClipLimit; } else { /* low bin count */ ulNrExcess -= ulBinIncr; pulHistogram[i] += ulBinIncr; } } } while (ulNrExcess) { /* Redistribute remaining excess */ pulEndPointer = &pulHistogram[uiNrGreylevels]; pulHisto = pulHistogram; while (ulNrExcess && pulHisto < pulEndPointer) { ulStepSize = uiNrGreylevels / ulNrExcess; if (ulStepSize < 1) ulStepSize = 1; /* stepsize at least 1 */ for (pulBinPointer=pulHisto; pulBinPointer < pulEndPointer && ulNrExcess; pulBinPointer += ulStepSize) { if (*pulBinPointer < ulClipLimit) { (*pulBinPointer)++; ulNrExcess--; /* reduce excess */ } } pulHisto++; /* restart redistributing on other bin location */ } } } void EfDetection::MakeHistogram (BYTE* pImage, unsigned int uiXRes, unsigned int uiSizeX, unsigned int uiSizeY, unsigned long* pulHistogram, unsigned int uiNrGreylevels, BYTE* pLookupTable) /* This function classifies the greylevels present in the array image into * a greylevel histogram. The pLookupTable specifies the relationship * between the greyvalue of the pixel (typically between 0 and 4095) and * the corresponding bin in the histogram (usually containing only 128 bins). */ { BYTE* pImagePointer; unsigned int i; for (i = 0; i < uiNrGreylevels; i++) pulHistogram[i] = 0L; /* clear histogram */ for (i = 0; i < uiSizeY; i++) { pImagePointer = &pImage[uiSizeX]; while (pImage < pImagePointer) pulHistogram[pLookupTable[*pImage++]]++; pImagePointer += uiXRes; pImage = &pImagePointer[-uiSizeX]; } } void EfDetection::MapHistogram (unsigned long* pulHistogram, BYTE Min, BYTE Max, unsigned int uiNrGreylevels, unsigned long ulNrOfPixels) /* This function calculates the equalized lookup table (mapping) by * cumulating the input histogram. Note: lookup table is rescaled in range [Min..Max]. */ { unsigned int i; unsigned long ulSum = 0; const float fScale = ((float)(Max - Min)) / ulNrOfPixels; const unsigned long ulMin = (unsigned long) Min; for (i = 0; i < uiNrGreylevels; i++) { ulSum += pulHistogram[i]; pulHistogram[i]=(unsigned long)(ulMin+ulSum*fScale); if (pulHistogram[i] > Max) pulHistogram[i] = Max; } } void EfDetection::MakeLut (BYTE * pLUT, BYTE Min, BYTE Max, unsigned int uiNrBins) /* To speed up histogram clipping, the input image [Min,Max] is scaled down to * [0,uiNrBins-1]. This function calculates the LUT. */

{ int i; const BYTE BinSize = (BYTE) (1 + (Max - Min) / uiNrBins); for (i = Min; i <= Max; i++) pLUT[i] = (i - Min) / BinSize; } void EfDetection::Interpolate (BYTE * pImage, int uiXRes, unsigned long * pulMapLU, unsigned long * pulMapRU, unsigned long * pulMapLB, unsigned long * pulMapRB, unsigned int uiXSize, unsigned int uiYSize, BYTE * pLUT) /* pImage - pointer to input/output image * uiXRes - resolution of image in x-direction * pulMap* - mappings of greylevels from histograms * uiXSize - uiXSize of image submatrix * uiYSize - uiYSize of image submatrix * pLUT - lookup table containing mapping greyvalues to bins * This function calculates the new greylevel assignments of pixels within a submatrix * of the image with size uiXSize and uiYSize. This is done by a bilinear interpolation * between four different mappings in order to eliminate boundary artifacts. * It uses a division; since division is often an expensive operation, I added code to * perform a logical shift instead when feasible. */ { const unsigned int uiIncr = uiXRes-uiXSize; /* Pointer increment after processing row */ BYTE GreyValue; unsigned int uiNum = uiXSize*uiYSize; /* Normalization factor */ unsigned int uiXCoef, uiYCoef, uiXInvCoef, uiYInvCoef, uiShift = 0; if (uiNum & (uiNum - 1)) /* If uiNum is not a power of two, use division */ for (uiYCoef = 0, uiYInvCoef = uiYSize; uiYCoef < uiYSize; uiYCoef++, uiYInvCoef--,pImage+=uiIncr) { for (uiXCoef = 0, uiXInvCoef = uiXSize; uiXCoef < uiXSize; uiXCoef++, uiXInvCoef--) { GreyValue = pLUT[*pImage]; /* get histogram bin value */ *pImage++ = (BYTE ) ((uiYInvCoef * (uiXInvCoef*pulMapLU[GreyValue] + uiXCoef * pulMapRU[GreyValue]) + uiYCoef * (uiXInvCoef * pulMapLB[GreyValue] + uiXCoef * pulMapRB[GreyValue])) / uiNum); } } else { /* avoid the division and use a right shift instead */ while (uiNum >>= 1) uiShift++; /* Calculate 2log of uiNum */ for (uiYCoef = 0, uiYInvCoef = uiYSize; uiYCoef < uiYSize; uiYCoef++, uiYInvCoef--,pImage+=uiIncr) { for (uiXCoef = 0, uiXInvCoef = uiXSize; uiXCoef < uiXSize; uiXCoef++, uiXInvCoef--) { GreyValue = pLUT[*pImage]; /* get histogram bin value */ *pImage++ = (BYTE)((uiYInvCoef* (uiXInvCoef * pulMapLU[GreyValue] + uiXCoef * pulMapRU[GreyValue]) + uiYCoef * (uiXInvCoef * pulMapLB[GreyValue] + uiXCoef * pulMapRB[GreyValue])) >> uiShift); } } } } // Meanfilter void EfDetection::MeanFilter(BYTE *inputImage, BYTE *outputImage, int windowSize, int width, int imageHeight, int imageWidth) { int n2 = windowSize/2; int k2 = width/2; int sum = 0; for (int i = m_BoundaryMinY ; i < m_BoundaryMaxY-1 ; i++) for (int j = m_BoundaryMinX ; j < m_BoundaryMaxX-1 ; j++) { sum = 0; for (int ii = -n2 ; ii <= n2 ; ii++) { for (int jj = -k2 ; jj <= k2 ; jj++) { if (!(i+ii < 0 || i+ii >= imageHeight || j+jj < 0 || j+jj >= imageWidth)) sum += inputImage[(i+ii)*imageWidth+j+jj];

} } outputImage[i*imageWidth+j] = (BYTE)(sum/(windowSize*width)); } } // overloaded meanfilter void EfDetection::MeanFilter(BYTE *inputImage, BYTE *outputImage, int windowSize, int width, int imageHeight, int imageWidth, int imageHeightMin, int imageHeightMax, int imageWidthMin, int imageWidthMax) { int n2 = windowSize/2; int k2 = width/2; int sum = 0; for (int i = imageHeightMin ; i < imageHeightMax-1 ; i++) for (int j = imageWidthMin ; j < imageWidthMax-1 ; j++) { sum = 0; for (int ii = -n2 ; ii <= n2 ; ii++) { for (int jj = -k2 ; jj <= k2 ; jj++) { if (!(i+ii < 0 || i+ii >= imageHeight || j+jj < 0 || j+jj >= imageWidth)) sum += (int)inputImage[(i+ii)*imageWidth+j+jj]; } } outputImage[i*imageWidth+j] = (BYTE)(sum/(windowSize*width)); } } // Scaling images void EfDetection::Scaling(BYTE *input, BYTE *output, UINT oldWidth, UINT oldHeight, UINT newWidth, UINT newHeight) { int i, j, m, n; double x, y, p, q; int xs, ys, nx, ny; BYTE *temp; int width, height; nx = oldWidth/2; ny = oldHeight/2; if (oldWidth > newWidth) { xs = oldWidth/2; ys = oldHeight/2; temp = new BYTE[oldHeight*oldWidth]; width = oldWidth; height = oldHeight; memset(temp, 125, oldHeight*oldWidth); } else { xs = newWidth/2; ys = newHeight/2; temp = new BYTE[newHeight*newWidth]; width = newWidth; height = newHeight; memset(temp, 125, newHeight*newWidth); } int d; double zx = (double)newWidth/(double)oldWidth; double zy = (double)newHeight/(double)oldHeight; for (i = -ys; i < ys; i++) { for (j = -xs; j < xs; j++) { y = i / zy; x = j / zx; if (y > 0) m = (int)y; else m = (int)y - 1; if (x > 0) n = (int)x; else n = (int)x - 1; q = y - m; p = x - n; if ((m >= -ny) && (m+1 < ny) && (n >= -nx) && (n+1 < nx)) d = (int)((1-q)*((1-p)*input[(m+ny)*oldWidth+n+nx] + p*input[(m+ny)*oldWidth+n+1+nx])

+ q*((1-p)*input[(m+1+ny)*oldWidth+(n+nx)] + p*input[(m+1+ny)*oldWidth+(n+1+nx)])); else d = 0; if (d < 0) d = 0; if (d > 255) d = 255; temp[(i+ys)*width+j+xs] = d; } } int count = 0; int row = 0, col = 0; for (i = 0; i < height; i++) for (j = 0; j < width; j++) { if ((i >= (int)(ys - (double)ys*zy + 0.5)) && (j >= (int)(xs - (double)xs*zx + 0.5)) && (i < (int)(ys + (double)ys*zy + 0.5)) && (j < (int)(xs + (double)xs*zx + 0.5))) { output[row*newWidth+col] = temp[i*width + j]; col++; if (col == newWidth) { col = 0; row++; } count++; } } delete temp; } // scaling with factor void EfDetection::ImageScaling(BYTE *inputImage, BYTE *outputImage, int factor, int scaleFactor) { int i, j, ii, jj, sum, temp; int count = 0; if ( scaleFactor == 1) { for (i = 0 ; i <= m_ScaleHeight-factor ; i = i+factor) for (j = 0 ; j <= m_ScaleWidth-factor ; j = j+factor) { sum = 0; for (ii = 0 ; ii < factor ; ii++) for (jj = 0 ; jj < factor ; jj++) { sum += inputImage[(i+ii)*m_ScaleWidth + (j+jj)]; } outputImage[count++] = sum/(factor*factor); } } else if (scaleFactor == 0) { for (i = 0 ; i < m_ScaleHeight/2 ; i++) for (j = 0 ; j < m_ScaleWidth/2 ; j++) { sum = 0; for (ii = i*factor ; ii < i*factor+factor ; ii++) for (jj = j*factor ; jj < j*factor+factor ; jj++) { temp = inputImage[i*m_ScaleWidth/2 + j]; outputImage[ii*m_ScaleWidth + jj] = temp; } } } } // overloaded image scaling void EfDetection::ImageScaling(BYTE *inputImage, BYTE *outputImage, int factor, int scaleFactor, int height, int width) { int i, j, ii, jj, sum, temp; int count = 0; if ( scaleFactor == 1) { for (i = 0 ; i <= height-factor ; i = i+factor) for (j = 0 ; j <= width-factor ; j = j+factor) { sum = 0; for (ii = 0 ; ii < factor ; ii++)

for (jj = 0 ; jj < factor ; jj++) { sum += inputImage[(i+ii)*width + (j+jj)]; } outputImage[count++] = sum/(factor*factor); } } else if (scaleFactor == 0) { for (i = 0 ; i < height/2 ; i++) for (j = 0 ; j < width/2 ; j++) { sum = 0; for (ii = i*factor ; ii < i*factor+factor ; ii++) for (jj = j*factor ; jj < j*factor+factor ; jj++) { temp = inputImage[i*width/2 + j]; outputImage[ii*width + jj] = temp; } } } } // rotation ft void EfDetection::Rotation(BYTE *input, BYTE *output, UINT width, UINT height, double radian) { int i, j, m, n; double ax, ay, ap, aq; double c, s; int xs, ys, d1; xs = (width)/2; ys = (height)/2; c = cos(radian); s = sin(radian); for (i = -ys; i < ys; i++) { for (j = -xs; j < xs; j++) { ay = j*s + i*c; ax = j*c - i*s; if (ay > 0) m = (int)ay; else m = (int)ay-1; if (ax > 0) n = (int)ax; else n = (int)ax-1; if (m == ys - 1) m = m - 1; if (n == xs - 1) n = n - 1; aq = ay - m; ap = ax - n; if ((m >= -ys) && (m < ys) && (n >= -xs) && (n < xs)) d1 = (int)((1-aq)*((1-ap)*input[(m+ys)*width+(n+xs)] + ap*input[(m+ys)*width+(n+1+xs)]) + aq*((1-ap)*input[(m+1+ys)*width+(n+xs)] + ap*input[(m+1+ys)*width+(n+1+xs)])); else d1 = 0; if (d1 < 0) d1 = 0; if (d1 > 255) d1 = 255; output[(i+ys)*width+(j+xs)] = d1; } } } // crop certain area void EfDetection::CropImage(BYTE *input, BYTE *output, UINT width, UINT height, CPoint lEye, CPoint rEye) { int i, j; int mx, my; int distance; int sx, sy, ex, ey; mx = (rEye.x + lEye.x)/2; my = rEye.y; distance = rEye.x - lEye.x; sx = mx - distance; ex = mx + distance; sy = my - (double)distance*0.5;

ey = my + (double)distance*1.9; int row = 0, col = 0; int tempWidth, tempHeight; tempWidth = ex - sx; tempHeight = ey - sy; BYTE *tempImage = new BYTE[tempWidth*tempHeight]; memset(tempImage, 0, sizeof(BYTE)*(tempWidth*tempHeight)); int temp = sy; for (i = 0; i < height; i++) { for (j = 0; j < width; j++) { if ((i >= sy) && (i < ey) && (j >= sx) && (j < ex)) { if (temp < 0) { tempImage[row*tempWidth+col] = 125; } else tempImage[row*tempWidth+col] = input[i*width+j]; col++; } } if ((i >= sy) && (i < ey)) { if (temp < 0) i--; row++; temp++; } col = 0; } Scaling(tempImage, output, tempWidth, tempHeight, m_FaceWidth, m_FaceHeight); delete [] tempImage; } void EfDetection::CropImage(BYTE *input, BYTE *output, UINT width, UINT height, int lEyeX, int lEyeY, int rEyeX, int rEyeY) { int i, j; double mx, my; double distance; int sx, sy, ex, ey; mx = (rEyeX + lEyeX)/2.0; my = (rEyeY + lEyeY)/2.0; distance = sqrt((rEyeX - lEyeX)*(rEyeX - lEyeX) + (rEyeY - lEyeY)*(rEyeY - lEyeY)); sx = (int)(mx - distance + 0.5); ex = (int)(mx + distance + 0.5); sy = (int)(my - distance*0.5 + 0.5); ey = (int)(my + distance*1.9 + 0.5); int row = 0, col = 0; int tempWidth, tempHeight; tempWidth = ex - sx; tempHeight = ey - sy; BYTE *tempImage = new BYTE[tempWidth*tempHeight]; memset(tempImage, 0, sizeof(BYTE)*(tempWidth*tempHeight)); int temp = sy; for (i = 0; i < height; i++) { for (j = 0; j < width; j++) { if ((i >= sy) && (i < ey) && (j >= sx) && (j < ex)) { if (temp < 0) {

tempImage[row*tempWidth+col] = 125; } else tempImage[row*tempWidth+col] = input[i*width+j]; col++; } } if ((i >= sy) && (i < ey)) { row++; if (temp < 0) i--; temp++; } col = 0; } Scaling(tempImage, output, tempWidth, tempHeight, m_FaceWidth, m_FaceHeight); delete [] tempImage; } // Erosion for morphology void EfDetection::Erosion(BYTE *inputImage, BYTE *outputImage, int windowSize, int width, int imageHeight, int imageWidth, int imageHeightMin, int imageHeightMax, int imageWidthMin, int imageWidthMax) { int n2 = windowSize/2; int k2 = width/2; int maximum; for (int i = imageHeightMin ; i < imageHeightMax-1 ; i++) for (int j = imageWidthMin ; j < imageWidthMax-1 ; j++) { maximum = INT_MAX; for (int ii = -n2 ; ii <= n2 ; ii++) for (int jj = -k2 ; jj <= k2 ; jj++) { if (!(i+ii < 0 || i+ii >= imageHeight || j+jj < 0 || j+jj >= imageWidth) && inputImage[(i+ii)*imageWidth + j+jj] < maximum) maximum = inputImage[(i+ii)*imageWidth+j+jj]; } outputImage[i*imageWidth+j] = (BYTE)maximum; } } // Dilation for morphology void EfDetection::Dilation(BYTE *inputImage, BYTE *outputImage, int windowSize, int width, int imageHeight, int imageWidth, int imageHeightMin, int imageHeightMax, int imageWidthMin, int imageWidthMax) { int n2 = windowSize/2; int k2 = width/2; int minimum; for (int i = imageHeightMin ; i < imageHeightMax-1 ; i++) for (int j = imageWidthMin ; j < imageWidthMax-1 ; j++) { minimum = INT_MIN; for (int ii = -n2 ; ii <= n2 ; ii++) { for (int jj = -k2 ; jj <= k2 ; jj++) if (!(i+ii < 0 || i+ii >= imageHeight || j+jj < 0 || j+jj >= imageWidth) && inputImage[(i+ii)*imageWidth + j+jj] > minimum) minimum = inputImage[(i+ii)*imageWidth+j+jj]; } outputImage[i*imageWidth+j] = (BYTE)minimum; } } // Substraction void EfDetection::Subtraction(BYTE *inputImage, BYTE *processedImage, BYTE *outputImage, int windowSize, int imageHeight, int imageWidth, int imageHeightMin, int imageHeightMax, int imageWidthMin, int imageWidthMax) {

int temp, temp1, temp2, temp3; for (int i = imageHeightMin ; i < imageHeightMax-1 ; i++) for (int j = imageWidthMin ; j < imageWidthMax-1 ; j++) { temp1 = (int)inputImage[i*imageWidth + j]; temp2 = (int)processedImage[i*imageWidth + j]; temp3 = (int)abs(temp1 - temp2); if (temp3 < windowSize) temp = 0; else temp = temp3; outputImage[i*imageWidth + j] = (int)temp; } } // Closing operation ( morphology) void EfDetection::ClosingGrayImage(BYTE *inputImage, BYTE *outputImage, int height, int width, int thr, int imageHeight, int imageWidth, int imageHeightMin, int imageHeightMax, int imageWidthMin, int imageWidthMax) { BYTE *dilationImage = new BYTE[imageHeight*imageWidth]; BYTE *erosionImage = new BYTE[imageHeight*imageWidth]; memset(dilationImage, 255, sizeof(BYTE)*(imageHeight*imageWidth)); memset(erosionImage, 255, sizeof(BYTE)*(imageHeight*imageWidth)); Dilation(inputImage, dilationImage, height, width, imageHeight, imageWidth, imageHeightMin, imageHeightMax, imageWidthMin, imageWidthMax); Erosion(dilationImage, erosionImage, height, width, imageHeight, imageWidth, imageHeightMin, imageHeightMax, imageWidthMin, imageWidthMax); delete[] dilationImage; Subtraction(inputImage, erosionImage, outputImage, thr, imageHeight, imageWidth, imageHeightMin, imageHeightMax, imageWidthMin, imageWidthMax); delete[] erosionImage; } // manual filtering void EfDetection::FilterImage(BYTE* srcImageBuffer, BYTE* dstImageBuffer) { int filter_width = 1; int filter_height = 9; int filter[] = { 1, 13, 12, -24, -55, -24, 12, 13, 1 }; double sum; int row, col, in_row, in_col; int *tempImage = new int[m_ScaleWidth*m_ScaleHeight]; memset(tempImage, 255, sizeof(int)*m_ScaleWidth*m_ScaleHeight); for (row = 0; row < m_ScaleHeight - filter_height; row++) { for (col = 0; col < m_ScaleWidth - filter_width; col++) { sum = 0.0; for (in_row = 0; in_row < filter_height; in_row++) { for (in_col = 0; in_col < filter_width; in_col++) { sum += filter[in_row*filter_width + in_col]*srcImageBuffer[(row + in_row)*m_ScaleWidth + (col + in_col)]; } }

if (sum != 0.0) { tempImage[(row + filter_height/2)*m_ScaleWidth + (col + filter_width/2)] = (int)(sum/(filter_width*filter_height)); } else { tempImage[(row + filter_height/2)*m_ScaleWidth + (col + filter_width/2)] = 0; } } } BYTE *tempImage1 = new BYTE[m_ScaleWidth*m_ScaleHeight]; memset(tempImage1, 0, m_ScaleWidth*m_ScaleHeight); NormalizeImage(tempImage, tempImage1, m_ScaleWidth*m_ScaleHeight); delete[] tempImage; memset(dstImageBuffer, 0, sizeof(BYTE)*m_ScaleWidth*m_ScaleHeight); for (row = m_BoundaryMinY; row < m_BoundaryMaxY - filter_height; row++) { for (col = m_BoundaryMinX; col < m_BoundaryMaxX - filter_width; col++) { if(tempImage1[row*m_ScaleWidth + col] < THRES_BINARY) dstImageBuffer[row*m_ScaleWidth + col] = 0; else dstImageBuffer[row*m_ScaleWidth + col] = 255; } } delete[] tempImage1; } // Manual filtering void EfDetection::FilterImage(BYTE* srcImageBuffer, BYTE* dstImageBuffer, int height, int width, int imageHeightMin, int imageHeightMax, int imageWidthMin, int imageWidthMax) { int filter_width = 1; int filter_height = 9; int filter[] = { 1, 13, 12, -24, -55, -24, 12, 13, 1 }; double sum; int row, col, in_row, in_col; int *tempImage = new int[width*height]; memset(tempImage, 255, sizeof(int)*(width*height)); for (row = 0; row < height - filter_height; row++) { for (col = 0; col < width - filter_width; col++) { sum = 0.0; for (in_row = 0; in_row < filter_height; in_row++) { for (in_col = 0; in_col < filter_width; in_col++) { sum += (double)(filter[in_row*filter_width + in_col]*srcImageBuffer[(row + in_row)*width + (col + in_col)]); } }

tempImage[(row + filter_height/2)*width + (col + filter_width/2)] = (int)(sum/(filter_width*filter_height)); } } BYTE *tempImage1 = new BYTE[width*height]; memset(tempImage1, 0, sizeof(BYTE)*(width*height)); NormalizeImage(tempImage, tempImage1, width*height); delete[] tempImage; memset(dstImageBuffer, 0, sizeof(BYTE)*(width*height)); for (row = imageHeightMin; row < imageHeightMax - filter_height; row++) { for (col = imageWidthMin; col < imageWidthMax - filter_width; col++) { if(tempImage1[row*width + col] < 165) dstImageBuffer[row*width + col] = 0; else dstImageBuffer[row*width + col] = 255; } } delete[] tempImage1; } // normalization void EfDetection::NormalizeImage(int* srcImageBuffer, BYTE* dstImageBuffer, int imageSize) { int i, grayVal; int srcMin = srcImageBuffer[0]; int srcMax = srcImageBuffer[0]; for( i = 0; i < imageSize; i++) { if (srcImageBuffer[i] <= srcMin) srcMin = srcImageBuffer[i]; if (srcImageBuffer[i] >= srcMax) srcMax = srcImageBuffer[i]; } for ( i = 0 ; i < imageSize ; i++) { grayVal = 255*(srcImageBuffer[i] - srcMin)/(srcMax - srcMin); if (grayVal < 0) grayVal = 0; if (grayVal > 255) grayVal = 255; dstImageBuffer[i] = grayVal; } } // And operation void EfDetection::AndOperation(BYTE *inputImage1, BYTE *inputImage2, BYTE *outputImage) { int i, j, temp; for (i = 0 ; i < m_ScaleHeight; i++) { for (j = 0 ; j < m_ScaleWidth ; j++) { temp = inputImage1[i*m_ScaleWidth + j] + inputImage2[i*m_ScaleWidth + j]; if ( temp == 510) outputImage[i*m_ScaleWidth + j] = 255; else outputImage[i*m_ScaleWidth + j] = 0; } } } // overloaded And operation void EfDetection::AndOperation(BYTE *inputImage1, BYTE *inputImage2, BYTE *outputImage, int height, int width) {

int i, j, temp; for (i = 0 ; i < height; i++) { for (j = 0 ; j < width ; j++) { temp = inputImage1[i*width + j] + inputImage2[i*width + j]; if ( temp == 510) outputImage[i*width + j] = 255; else outputImage[i*width + j] = 0; } } } // noise removal ( heuristics ) void EfDetection::NoiseRemoving(BYTE *inputImage, BYTE *outputImage, int height, int width) { BYTE *tempImage1 = new BYTE[m_ScaleWidth*m_ScaleHeight]; memset(tempImage1, 0, sizeof(BYTE)*(m_ScaleWidth*m_ScaleHeight)); /* ClosingBinary(inputImage, tempImage1, 1, 3); OpeningBinary(tempImage1, outputImage, 1, 5); */ // ClosingBinary(inputImage, tempImage1, 1, 5); OpeningBinary(inputImage, outputImage, 1, 5); /*ClosingBinary(inputImage, tempImage1, 1, 3); */ delete [] tempImage1; } void EfDetection::NoiseRemoving(BYTE *inputImage, BYTE *outputImage, int height, int width, int imageHeightMin, int imageHeightMax, int imageWidthMin, int imageWidthMax) { BYTE *tempImage1 = new BYTE[width*height]; memset(tempImage1, 0, sizeof(BYTE)*(width*height)); // ClosingBinary(inputImage, tempImage1, 1, 5); OpeningBinary(inputImage, outputImage, 1, 3, height, width, imageHeightMin, imageHeightMax, imageWidthMin, imageWidthMax); /*ClosingBinary(inputImage, tempImage1, 1, 3); */ delete [] tempImage1; } // Binary void EfDetection::DilationBinary(BYTE *input, BYTE *output, int windowSize, int windowWidth) { int n2 = windowSize/2; int k2 = windowWidth/2; memcpy(output, input, m_ScaleHeight*m_ScaleWidth); for (int i = m_BoundaryMinY; i < m_BoundaryMaxY; ++i) for (int j = m_BoundaryMinX; j < m_BoundaryMaxX; ++j) { for (int k = -n2; k <= n2; ++k) for (int l = -k2; l <= k2; ++l) { if (!(i+k < 0 || i+k >= m_ScaleHeight || j+l < 0 || j+l >= m_ScaleWidth) && input[(i+k)*m_ScaleWidth+(j+l)] == 255) { output[i*m_ScaleWidth+j] = 255; break; } } } } void EfDetection::ErosionBinary(BYTE *input, BYTE *output, int windowSize, int windowWidth) {

int n2 = windowSize/2; int k2 = windowWidth/2; memcpy(output, input, m_ScaleWidth*m_ScaleHeight); for (int i = m_BoundaryMinY; i < m_BoundaryMaxY; ++i) for (int j = m_BoundaryMinX; j < m_BoundaryMaxX; ++j) { for (int k = -n2; k <= n2; ++k) for (int l = -k2; l <= k2; ++l) { if (!(i+k < 0 || i+k >= m_ScaleHeight || j+l < 0 || j+l >= m_ScaleWidth) && input[(i+k)*m_ScaleWidth+(j+l)] != 255) { output[i*m_ScaleWidth+j] = 0; break; } } } } // closing morphology binary void EfDetection::ClosingBinary(BYTE *inputImage, BYTE *outputImage, int height, int width) { BYTE *dilationImage = new BYTE[m_ScaleHeight*m_ScaleWidth]; BYTE *erosionImage = new BYTE[m_ScaleHeight*m_ScaleWidth]; memset(dilationImage, 255, sizeof(BYTE)*(m_ScaleHeight*m_ScaleWidth)); memset(erosionImage, 255, sizeof(BYTE)*(m_ScaleHeight*m_ScaleWidth)); DilationBinary(inputImage, dilationImage, height, width); ErosionBinary(dilationImage, outputImage, height, width); delete[] dilationImage; delete[] erosionImage; } // opening morphology binary void EfDetection::OpeningBinary(BYTE *inputImage, BYTE *outputImage, int height, int width) { BYTE *dilationImage = new BYTE[m_ScaleHeight*m_ScaleWidth]; BYTE *erosionImage = new BYTE[m_ScaleHeight*m_ScaleWidth]; memset(dilationImage, 255, sizeof(BYTE)*(m_ScaleHeight*m_ScaleWidth)); memset(erosionImage, 255, sizeof(BYTE)*(m_ScaleHeight*m_ScaleWidth)); ErosionBinary(inputImage, erosionImage, height, width); DilationBinary(erosionImage, outputImage, height, width); delete[] erosionImage; delete[] dilationImage; } void EfDetection::DilationBinary(BYTE *input, BYTE *output, int windowSize, int windowWidth, int imageHeight, int imageWidth, int imageHeightMin, int imageHeightMax, int imageWidthMin, int imageWidthMax) { int n2 = windowSize/2; int k2 = windowWidth/2; memcpy(output, input, imageHeight*imageWidth); for (int i = imageHeightMin; i < imageHeightMax; ++i) for (int j = imageWidthMin; j < imageWidthMax; ++j) { for (int k = -n2; k <= n2; ++k) for (int l = -k2; l <= k2; ++l) { if (!(i+k < 0 || i+k >= imageHeight || j+l < 0 || j+l >= imageWidth) && input[(i+k)*imageWidth+(j+l)] == 255) { output[i*imageWidth+j] = 255; break; } } }

} void EfDetection::ErosionBinary(BYTE *input, BYTE *output, int windowSize, int windowWidth, int imageHeight, int imageWidth, int imageHeightMin, int imageHeightMax, int imageWidthMin, int imageWidthMax) { int n2 = windowSize/2; int k2 = windowWidth/2; memcpy(output, input, imageWidth*imageHeight); for (int i = imageHeightMin; i < imageHeightMax; ++i) for (int j = imageWidthMin; j < imageWidthMax; ++j) { for (int k = -n2; k <= n2; ++k) for (int l = -k2; l <= k2; ++l) { if (!(i+k < 0 || i+k >= imageHeight || j+l < 0 || j+l >= imageWidth) && input[(i+k)*imageWidth+(j+l)] != 255) { output[i*imageWidth+j] = 0; break; } } } } // Labeling1 int EfDetection::ConnectedComponents (BYTE *inputImage, int *outputImage) { // label connected components in 1D array, be sure to split across rows int *labeling_image = new int[m_ScaleWidth*m_ScaleHeight]; int component = 0; int i; memset(labeling_image, 0, sizeof(int)*(m_ScaleHeight*m_ScaleWidth)); for (i = 0; i < m_ScaleWidth*m_ScaleHeight; i++) { if (inputImage[i] ) { component++; while ( inputImage[i] ) { labeling_image[i] = component; if ( i % m_ScaleWidth == m_ScaleWidth-1 ) break; i++; } } } // associative memory for merging int* assoc = new int[component+1]; for (i = 0; i < component+1; i++) assoc[i] = i; int si, sj; for (int y = 1; y < m_ScaleHeight; y++) { // x = 0 has previous neighbors (0,y-1), (1,y-1) si = labeling_image[y*m_ScaleWidth]; sj = labeling_image[m_ScaleWidth*(y-1)]; AddToAssociative(si,sj,assoc); sj = labeling_image[1+m_ScaleWidth*(y-1)]; AddToAssociative(si,sj,assoc); for (int x = 1; x < m_ScaleWidth-1; x++) { si = labeling_image[x+m_ScaleWidth*y]; sj = labeling_image[x-1+m_ScaleWidth*(y-1)]; AddToAssociative(si,sj,assoc); sj = labeling_image[x+m_ScaleWidth*(y-1)]; AddToAssociative(si,sj,assoc); sj = labeling_image[x+1+m_ScaleWidth*(y-1)]; AddToAssociative(si,sj,assoc);

sj = labeling_image[x-1+m_ScaleWidth*y]; AddToAssociative(si,sj,assoc); } // x = xdim-1 has previous neighbors(x-1,y-1), (x,y-1), (x-1,y) x = m_ScaleWidth-1; si = labeling_image[x+m_ScaleWidth*y]; sj = labeling_image[x-1+m_ScaleWidth*(y-1)]; AddToAssociative(si,sj,assoc); sj = labeling_image[x+m_ScaleWidth*(y-1)]; AddToAssociative(si,sj,assoc); sj = labeling_image[x-1+m_ScaleWidth*y]; AddToAssociative(si,sj,assoc); } // replace each cycle by single label int compact_count = 0; for (i = 1; i <= component; i++) { if ( i <= assoc[i] ) { compact_count++; int current = i; while ( assoc[current] != i ) { int next = assoc[current]; assoc[current] = compact_count; current = next; } assoc[current] = compact_count; } } // relabel image for (i = 0; i < m_ScaleHeight*m_ScaleWidth; i++) { if ( labeling_image[i] ) outputImage[i] = assoc[labeling_image[i]]; else outputImage[i] = 0; } delete assoc; delete [] labeling_image; return compact_count; } // Labeling2 int EfDetection::ConnectedComponents (BYTE *inputImage, int *outputImage, int height, int width) { // label connected components in 1D array, be sure to split across rows int *labeling_image = new int[width*height]; int component = 0; int i; memset(labeling_image, 0, sizeof(int)*width*height); for (i = 0; i < width*height; i++) { if (inputImage[i] ) { component++; while ( inputImage[i] ) { labeling_image[i] = component; if ( i % width == width-1 ) break; i++;

} } } int* assoc = new int[component+1]; for (i = 0; i < component+1; i++) assoc[i] = i; int si, sj; for (int y = 1; y < height; y++) { // x = 0 has previous neighbors (0,y-1), (1,y-1) si = labeling_image[y*width]; sj = labeling_image[width*(y-1)]; AddToAssociative(si,sj,assoc); sj = labeling_image[1+width*(y-1)]; AddToAssociative(si,sj,assoc); // 0 < x < xdim-1 has previous neighbors // (x-1,y-1), (x,y-1), (x+1,y-1), (x-1,y) for (int x = 1; x < width-1; x++) { si = labeling_image[x+width*y]; sj = labeling_image[x-1+width*(y-1)]; AddToAssociative(si,sj,assoc); sj = labeling_image[x+width*(y-1)]; AddToAssociative(si,sj,assoc); sj = labeling_image[x+1+width*(y-1)]; AddToAssociative(si,sj,assoc); sj = labeling_image[x-1+width*y]; AddToAssociative(si,sj,assoc); } // x = xdim-1 has previous neighbors(x-1,y-1), (x,y-1), (x-1,y) x = width-1; si = labeling_image[x+width*y]; sj = labeling_image[x-1+width*(y-1)]; AddToAssociative(si,sj,assoc); sj = labeling_image[x+width*(y-1)]; AddToAssociative(si,sj,assoc); sj = labeling_image[x-1+width*y]; AddToAssociative(si,sj,assoc); } // replace each cycle by single label int compact_count = 0; for (i = 1; i <= component; i++) { if ( i <= assoc[i] ) { compact_count++; int current = i; while ( assoc[current] != i ) { int next = assoc[current]; assoc[current] = compact_count; current = next; } assoc[current] = compact_count; } } // relabel image for (i = 0; i < height*width; i++) { if ( labeling_image[i] ) outputImage[i] = assoc[labeling_image[i]]; else outputImage[i] = 0; } delete assoc;

delete [] labeling_image; return compact_count; } void EfDetection::AddToAssociative (int si, int sj, int *assoc) { if ( si == 0 || sj == 0 || sj == si ) return; int search = sj; do { search = assoc[search]; }while ( search != sj && search != si ); if ( search == sj ) { int temp = assoc[si]; assoc[si] = assoc[sj]; assoc[sj] = temp; } } // Set feature windows void EfDetection::SetFeatureWindow(int* labelBuffer, VfFtrInfo* pVfFtrInfo, int labelCnt) { int i, row, col; int labelIdx; for (i = 0; i < labelCnt; i++) { pVfFtrInfo[i].ftrRect.left = INT_MAX; pVfFtrInfo[i].ftrRect.top = INT_MAX; pVfFtrInfo[i].ftrRect.right = INT_MIN; pVfFtrInfo[i].ftrRect.bottom = INT_MIN; pVfFtrInfo[i].size = 0; } for (row = m_BoundaryMinY; row < m_BoundaryMaxY; row++) { for (col = m_BoundaryMinX; col < m_BoundaryMaxX; col++) { if (labelBuffer[row*m_ScaleWidth + col] != 0) { labelIdx = labelBuffer[row*m_ScaleWidth + col] - 1; if (col < pVfFtrInfo[labelIdx].ftrRect.left) { pVfFtrInfo[labelIdx].ftrRect.left = col-1; } else if (col > pVfFtrInfo[labelIdx].ftrRect.right) { pVfFtrInfo[labelIdx].ftrRect.right = col+1; } if (row < pVfFtrInfo[labelIdx].ftrRect.top) { pVfFtrInfo[labelIdx].ftrRect.top = row-1; } else if (row > pVfFtrInfo[labelIdx].ftrRect.bottom) { pVfFtrInfo[labelIdx].ftrRect.bottom = row+1; } pVfFtrInfo[labelIdx].size++; } } } } void EfDetection::SetFeatureWindow(int* labelBuffer, VfFtrInfo* pEfFtrInfo, int labelCnt, int height, int width, int imageHeightMin, int imageHeightMax, int imageWidthMin, int imageWidthMax) { int i, row, col; int labelIdx; for (i = 0; i < labelCnt; i++) {

pEfFtrInfo[i].ftrRect.left = INT_MAX; pEfFtrInfo[i].ftrRect.top = INT_MAX; pEfFtrInfo[i].ftrRect.right = INT_MIN; pEfFtrInfo[i].ftrRect.bottom = INT_MIN; pEfFtrInfo[i].size = 0; } for (row = imageHeightMin; row < imageHeightMax; row++) { for (col = imageWidthMin; col < imageWidthMax; col++) { if (labelBuffer[row*width + col] != 0) { labelIdx = labelBuffer[row*width + col] - 1; if (col < pEfFtrInfo[labelIdx].ftrRect.left) { pEfFtrInfo[labelIdx].ftrRect.left = col-1; } else if (col > pEfFtrInfo[labelIdx].ftrRect.right) { pEfFtrInfo[labelIdx].ftrRect.right = col+1; } if (row < pEfFtrInfo[labelIdx].ftrRect.top) { pEfFtrInfo[labelIdx].ftrRect.top = row-1; } else if (row > pEfFtrInfo[labelIdx].ftrRect.bottom) { pEfFtrInfo[labelIdx].ftrRect.bottom = row+1; } pEfFtrInfo[labelIdx].size++; } } } } int CompareVfFtrInfoArray(const void *arg1, const void *arg2 ) { VfFtrInfo r1, r2; r1=*((VfFtrInfo*)arg1); r2=*((VfFtrInfo*)arg2); if( r1.eyeConf > r2.eyeConf ) return -1; else if (r1.eyeConf == r2.eyeConf ) return 0; else return 1; } void EfDetection::SetEyePair(VfFtrInfo* pFtrInfo, int labelCnt, int* ftrCnt) { // check feature window size. // select approaprite ones and set eyeConf = 1.0. otherwise set eyeConf = -1.0 int i; int count = 0; for (i = 0; i < labelCnt; i++) { if (pFtrInfo[i].ftrRect.Width() >= MIN_FTR_WIDTH && pFtrInfo[i].ftrRect.Width() <= MAX_FTR_WIDTH && pFtrInfo[i].ftrRect.Height() >= MIN_FTR_HEIGHT && pFtrInfo[i].ftrRect.Height() <= MAX_FTR_HEIGHT && pFtrInfo[i].ftrRect.Width() >= pFtrInfo[i].ftrRect.Height()) { pFtrInfo[i].eyeConf = 1.0; count++; } else { pFtrInfo[i].eyeConf = -1.0; } } *ftrCnt = count; // sort pFtrInfo according to eyeConf qsort( (void *)pFtrInfo, (size_t)labelCnt, sizeof(VfFtrInfo), CompareVfFtrInfoArray );

} // Grouping Features and Face Verification by geo-info structure check int EfDetection::GroupingFeature(BYTE *srcImgBuffer, BYTE* eyeStructImgBuffer, BYTE* eyeCrclImgBuffer, VfFtrInfo* pFtrInfo, VfEyePair *pEyePair, int featureCount) { int i, j; CPoint lEye, rEye, rotateLEye, rotateREye; int lsize, rsize; int mx, my, sx, ex, ey; VfEyePair tempPair[MAX_PAIR_NUM]; double distance; int dx, dy; double radian; BOOL found = false; int cnt = 0; int index = 1; double maxConf = 0; double conf; m_bFound = false; // Pairling for (i = 0; i < featureCount; i++) { lEye = pFtrInfo[i].ftrRect.CenterPoint(); if ((lEye.y < m_BoundaryMinY) || (lEye.y > m_BoundaryMaxY)) continue; if ((lEye.x < m_BoundaryMinX) || (lEye.x > m_BoundaryMaxX)) continue; lsize = pFtrInfo[i].size; for (j = 0; j < featureCount; j++) { if (i == j) continue; rEye = pFtrInfo[j].ftrRect.CenterPoint(); if (rEye.x <= lEye.x) continue; rsize = pFtrInfo[j].size; if ((((double)lsize/(double)rsize) < 0.125) || (((double)lsize/(double)rsize) > 8.0)) continue; if ((rEye.y < m_BoundaryMinY) || (rEye.y > m_BoundaryMaxY)) continue; if ((rEye.x < m_BoundaryMinX) || (rEye.x > m_BoundaryMaxX)) continue; distance = sqrt( (lEye.x - rEye.x)*(lEye.x - rEye.x) + (lEye.y - rEye.y)*(lEye.y - rEye.y) ); dx = rEye.x - lEye.x; dy = rEye.y - lEye.y; radian = atan2(dy, dx); mx = (rEye.x+lEye.x)/2; my = (rEye.y+lEye.y)/2; sx = mx - distance; ex = mx + distance; ey = my + distance*1.9; if (ey > m_ScaleHeight+m_ScaleHeight*0.2) continue; if (sx < 0) continue; if (ex > m_ScaleWidth) continue; // 1st - Geometric Check if ((distance < MAX_EYES_DIST) && (distance > MIN_EYES_DIST) && (fabs(radian*R2A) <= MAX_ANGLE)) { // 1st structure check process tempPair[cnt].lEye = lEye; tempPair[cnt].rEye = rEye; tempPair[cnt].distance = distance; tempPair[cnt].eyeL = i; tempPair[cnt].eyeR = j; tempPair[cnt].mouthMidDist = 9999999.0; tempPair[cnt].browToEyeRatio = -999999.0; tempPair[cnt].eyeStructConf = -1.0; tempPair[cnt].cfVal = 0; tempPair[cnt].mouthGradX = 0; tempPair[cnt].mouthGradY = 0; int srchWidth, srchHeight; int lex, ley, rex, rey; DynamicGetIrisPoints(srcImgBuffer, m_ScaleWidth, m_ScaleHeight, lEye.x, lEye.y, rEye.x, rEye.y, &lex, &ley, &rex, &rey, &srchWidth, &srchHeight);

lEye.x = lex; lEye.y = ley; rEye.x = rex; rEye.y = rey; tempPair[cnt].lEye = lEye; tempPair[cnt].rEye = rEye; JazzCheckStructure(pFtrInfo, featureCount, lEye, rEye, &tempPair[cnt]); if (tempPair[cnt].confidence == 0.0) continue; FaceVerify(eyeStructImgBuffer, m_ScaleWidth, m_ScaleHeight, lEye.x, lEye.y, rEye.x, rEye.y, &tempPair[cnt]); if (tempPair[cnt].eyeStructConf == -1.0) continue; cnt++; } } } int cnt1 = 0; if (cnt > 0) { int lEyeX, lEyeY, newLEyeX, newLEyeY; int rEyeX, rEyeY, newREyeX, newREyeY; bool bIsLEye, bIsREye; double lEyeCrclConf, rEyeCrclConf; double eyeDist = 0.0; for (i = 0; i < cnt; i++) { lEyeX = tempPair[i].lEye.x; lEyeY = tempPair[i].lEye.y; rEyeX = tempPair[i].rEye.x; rEyeY = tempPair[i].rEye.y; eyeDist = tempPair[i].distance; bIsLEye = SetEyeCircle(eyeCrclImgBuffer, m_ScaleWidth, m_ScaleHeight, lEyeX, lEyeY, eyeDist, &newLEyeX, &newLEyeY, &lEyeCrclConf); if (!bIsLEye) continue; bIsREye = SetEyeCircle(eyeCrclImgBuffer, m_ScaleWidth, m_ScaleHeight, rEyeX, rEyeY, eyeDist, &newREyeX, &newREyeY, &rEyeCrclConf); if (!bIsREye) continue; tempPair[i].eyeCrclConf = (lEyeCrclConf + rEyeCrclConf) / 2.0; tempPair[i].lEye.x = newLEyeX; tempPair[i].lEye.y = newLEyeY; tempPair[i].rEye.x = newREyeX; tempPair[i].rEye.y = newREyeY; pEyePair[cnt1] = tempPair[i]; cnt1++; } qsort( (void *)pEyePair, (size_t)cnt1, sizeof(VfEyePair), CompareVfEyePairListEyeCrclConf ); //qsort( (void *)pEyePair, (size_t)cnt1, sizeof(VfEyePair), CompareVfEyePairListEyeStructConf ); // kihwan 20050427 m_cnt1Conf = pEyePair[cnt1].confidence; m_cnt1Dist = pEyePair[cnt1].distance; } /* // for save the cropped image.. BYTE *rotateImage = new BYTE[IMAGE_ROW*IMAGE_COL]; BYTE *cropImage = new BYTE[FACE_ROW*FACE_COL]; memset(rotateImage, 0, sizeof(BYTE)*(IMAGE_COL*IMAGE_ROW)); memset(cropImage, 0, sizeof(BYTE)*(FACE_ROW*FACE_COL)); for (i = 0; i < MAX_PAIR_NUM; i++) { memset(m_pFaceBuffer[i], 0, sizeof(BYTE)*(FACE_ROW*FACE_COL)); } */ if (cnt1 > 0) {

m_bFound = true; m_filX1 = (((double)m_ImageWidth/(double)m_ScaleWidth) * (double)pEyePair[0].lEye.x + 0.5); m_filY1 = (((double)m_ImageHeight/(double)m_ScaleHeight) * (double)pEyePair[0].lEye.y + 0.5); m_filX2 = (((double)m_ImageWidth/(double)m_ScaleWidth) * (double)pEyePair[0].rEye.x + 0.5); m_filY2 = (((double)m_ImageHeight/(double)m_ScaleHeight) * (double)pEyePair[0].rEye.y + 0.5); } else { m_bFound = false; m_filX1 = -1; m_filY1 = -1; m_filX2 = -1; m_filY2 = -1; } return cnt1; } // eye variance filter double EfDetection::GetEyeVariance(BYTE *input, UINT width, UINT height, double *stdDev, double *mean) { int x1 = EYE_LEFT_X, y1 = EYE_LEFT_Y; int x2 = EYE_RIGHT_X, y2 = EYE_RIGHT_Y; double ViLeft, ViRight, result; int i, j, cnt = 0; double sum1 = 0, sum2 = 0; for (i = y1; i < y1 + EYE_WINDOW_HEIGHT; i++) for (j = x1; j < x1 + EYE_WINDOW_WIDTH; j++) { sum1 = sum1 + (input[i*width + j])*(input[i*width + j]); sum2 = sum2 + (input[i*width + j]); cnt++; } ViLeft = sqrt(fabs((sum2/(double)cnt)*(sum2/(double)cnt) - sum1/(double)cnt)); stdDev[0] = ViLeft; mean[0] = sum2 / (double)cnt; sum1 = 0; sum2 = 0; cnt = 0; for (i = y2; i < y2 + EYE_WINDOW_HEIGHT; i++) for (j = x2; j < x2 + EYE_WINDOW_WIDTH; j++) { sum1 = sum1 + (input[i*width + j])*(input[i*width + j]); sum2 = sum2 + (input[i*width + j]); cnt++; } ViRight = sqrt(fabs((sum2/(double)cnt)*(sum2/(double)cnt) - sum1/(double)cnt)); stdDev[1] = ViRight; mean[1] = sum2 / (double)cnt; result = fabs(ViLeft - ViRight)/ (ViLeft + ViRight); return result; } // Cheek Variance double EfDetection::GetCheekVariance(BYTE *input, UINT width, UINT height, double *stdDev, double *mean) { int x1 = CHEEK_LEFT_X, y1 = CHEEK_LEFT_Y; int x2 = CHEEK_RIGHT_X, y2 = CHEEK_RIGHT_Y; double ViLeft, ViRight, result; int i, j, cnt = 0; double sum1 = 0, sum2 = 0; for (i = y1; i < y1 + CHEEK_WINDOW_HEIGHT; i++)

for (j = x1; j < x1 + CHEEK_WINDOW_WIDTH; j++) { sum1 = sum1 + (input[i*width + j])*(input[i*width + j]); sum2 = sum2 + (input[i*width + j]); cnt++; } ViLeft = sqrt(fabs((sum2/(double)cnt)*(sum2/(double)cnt) - sum1/(double)cnt)); stdDev[0] = ViLeft; mean[0] = sum2 / (double)cnt; sum1 = 0; sum2 = 0; cnt = 0; for (i = y2; i < y2 + CHEEK_WINDOW_HEIGHT; i++) for (j = x2; j < x2 + CHEEK_WINDOW_WIDTH; j++) { sum1 = sum1 + (input[i*width + j])*(input[i*width + j]); sum2 = sum2 + (input[i*width + j]); cnt++; } ViRight = sqrt(fabs((sum2/(double)cnt)*(sum2/(double)cnt) - sum1/(double)cnt)); stdDev[1] = ViRight; mean[1] = sum2 / (double)cnt; result = fabs(ViLeft - ViRight)/ (ViLeft + ViRight); return result; } // Check face structure double EfDetection::CheckStructure(VfFtrInfo *pFtrInfor, int ftrCnt, CPoint lEye, CPoint rEye, VfEyePair *fEyePair) { double distance; int mx, my; int dx, dy; double x[4], y[4], x1[4], y1[4], x2[4], y2[4]; double steps; double radian; double unitx1, unity1, unitx2, unity2; double tempx1, tempy1, tempx2, tempy2; double tempxx1, tempyy1, tempxx2, tempyy2; double tempxxx1, tempyyy1, tempxxx2, tempyyy2; BOOL mouth, eyebrowL, eyebrowR; CPoint point; mouth = FALSE; eyebrowL = FALSE; eyebrowR = FALSE; // eye pair and it’s angles dx = (rEye.x - lEye.x); dy = (rEye.y - lEye.y); mx = (lEye.x + rEye.x) / 2 +0.5; my = (lEye.y + rEye.y) / 2 +0.5; distance = sqrt(dx*dx + dy*dy); steps = distance*2.0; radian = atan2(dy, dx); // units unitx1 = (double)dx/steps; unity1 = (double)dy/steps; unitx2 = (double)-dy/steps; unity2 = (double)dx/steps; // ey- nose area tempx1 = (mx - 0.4*steps*unitx1); tempy1 = (my - 0.4*steps*unity1); tempx2 = (mx + 0.4*steps*unitx1); tempy2 = (my + 0.4*steps*unity1);

x[0] = (tempx1 + 0.6*steps*unitx2 + 0.5); y[0] = (tempy1 + 0.6*steps*unity2 + 0.5); x[1] = (tempx1 + 1.45*steps*unitx2 + 0.5); y[1] = (tempy1 + 1.45*steps*unity2 + 0.5); x[2] = (tempx2 + 1.45*steps*unitx2 + 0.5); y[2] = (tempy2 + 1.45*steps*unity2 + 0.5); x[3] = (tempx2 + 0.6*steps*unitx2 + 0.5); y[3] = (tempy2 + 0.6*steps*unity2 + 0.5); if (x[0] < 0) x[0] = 0; if (x[1] < 0) x[1] = 0; if (x[2] >= m_ScaleWidth) x[2] = m_ScaleWidth - 1; if (x[3] >= m_ScaleWidth) x[3] = m_ScaleWidth - 1; if (y[0] < 0) y[0] = 0; if (y[1] >= m_ScaleHeight) y[1] = m_ScaleHeight - 1; if (y[2] >= m_ScaleHeight) y[2] = m_ScaleHeight - 1; if (y[3] < 0) y[3] = 0; int mcount = CheckPoint(pFtrInfor, ftrCnt, x, y, fEyePair, 3); if (mcount > 6) return 0.0; if (mcount) mouth = true; fEyePair->mouthCNT = mcount; // left eye brow tempxx1 = (mx - 0.65*steps*unitx1); tempyy1 = (my - 0.65*steps*unity1); tempxx2 = (mx - 0.1*steps*unitx1); tempyy2 = (my - 0.1*steps*unity1); x1[0] = (tempxx1 - 0.5*steps*unitx2 + 0.5); y1[0] = (tempyy1 - 0.5*steps*unity2 + 0.5); x1[1] = (tempxx1 - 0.05*steps*unitx2 + 0.5); y1[1] = (tempyy1 - 0.05*steps*unity2 + 0.5); x1[2] = (tempxx2 - 0.05*steps*unitx2 + 0.5); y1[2] = (tempyy2 - 0.05*steps*unity2 + 0.5); x1[3] = (tempxx2 - 0.5*steps*unitx2 + 0.5); y1[3] = (tempyy2 - 0.5*steps*unity2 + 0.5); if (x1[0] < 0) x1[0] = 0; if (x1[1] < 0) x1[1] = 0; if (x1[2] >= m_ScaleWidth) x1[2] = m_ScaleWidth - 1; if (x1[3] >= m_ScaleWidth) x1[3] = m_ScaleWidth - 1; if (y1[0] < 0) y1[0] = 0; if (y1[1] >= m_ScaleHeight) y1[1] = m_ScaleHeight - 1; if (y1[2] >= m_ScaleHeight) y1[2] = m_ScaleHeight - 1; if (y1[3] < 0) y1[3] = 0; int ecount1 = CheckPoint(pFtrInfor, ftrCnt, x1, y1, fEyePair, 1); if (ecount1 > 2) return 0.0; if (ecount1) eyebrowL = true; fEyePair->eyebrowLCNT = ecount1; // right eye brow tempxxx1 = (mx + 0.1*steps*unitx1); tempyyy1 = (my + 0.1*steps*unity1); tempxxx2 = (mx + 0.65*steps*unitx1); tempyyy2 = (my + 0.65*steps*unity1); x2[0] = (tempxxx1 - 0.5*steps*unitx2 + 0.5); y2[0] = (tempyyy1 - 0.5*steps*unity2 + 0.5); x2[1] = (tempxxx1 - 0.05*steps*unitx2 + 0.5); y2[1] = (tempyyy1 - 0.05*steps*unity2 + 0.5); x2[2] = (tempxxx2 - 0.05*steps*unitx2 + 0.5); y2[2] = (tempyyy2 - 0.05*steps*unity2 + 0.5); x2[3] = (tempxxx2 - 0.5*steps*unitx2 + 0.5); y2[3] = (tempyyy2 - 0.5*steps*unity2 + 0.5); if (x2[0] < 0) x2[0] = 0; if (x2[1] < 0) x2[1] = 0; if (x2[2] >= m_ScaleWidth) x2[2] = m_ScaleWidth - 1; if (x2[3] >= m_ScaleWidth) x2[3] = m_ScaleWidth - 1; if (y2[0] < 0) y2[0] = 0;

if (y2[1] >= m_ScaleHeight) y2[1] = m_ScaleHeight - 1; if (y2[2] >= m_ScaleHeight) y2[2] = m_ScaleHeight - 1; if (y2[3] < 0) y2[3] = 0; int ecount2 = CheckPoint(pFtrInfor, ftrCnt, x2, y2, fEyePair, 2); if (ecount2 > 2) return 0.0; if (ecount2) eyebrowR = true; fEyePair->eyebrowRCNT = ecount2; if ((mouth) && (eyebrowL) && (eyebrowR)) { if (mcount >= 2) return 10.0; if (mcount == 1) return 8.0; } if ((mouth) && ((eyebrowL) || (eyebrowR))) { if (mcount >= 2) return 9.0; if (mcount == 1) return 7.0; } if (mouth) { if (mcount >= 2) return 6.0; if (mcount == 1) return 5.0; } if ((eyebrowL) && (eyebrowR)) return 4.0; // if ((eyebrowL) || (eyebrowR)) return 3.0; return 1.0; } int EfDetection::CheckPoint(VfFtrInfo *pFtrInfor, int ftrCnt, double *rectX, double *rectY, VfEyePair* fEyePair, int index) { int count = 0, i; double slopeL, slopeDn, slopeR, slopeUp; double crossL, crossR, crossUp, crossDn; double tempL = 0, tempUp = 0, tempR = 0, tempDn = 0, xx, yy; CPoint point; if (rectY[0] == rectY[3]) { for (i = 0; i < ftrCnt; i++) { point = pFtrInfor[i].ftrRect.CenterPoint(); if ((rectX[0] < point.x) && (rectX[3] > point.x) && (rectY[0] < point.y) && (rectY[1] > point.y)) { switch(index) { case 1:fEyePair->eyebrowL[count] = i; break; case 2:fEyePair->eyebrowR[count] = i; break; case 3:fEyePair->mouth[count] = i; break; } count++; } } return count; } else if (rectY[0] < rectY[3]) { slopeL = (rectY[0] - rectY[1])/(rectX[0] - rectX[1]); slopeUp = (rectY[1] - rectY[2])/(rectX[1] - rectX[2]); slopeR = (rectY[2] - rectY[3])/(rectX[2] - rectX[3]); slopeDn = (rectY[3] - rectY[0])/(rectX[3] - rectX[0]); crossL = rectY[1] - slopeL*rectX[1]; crossR = rectY[3] - slopeR*rectX[3]; crossUp = rectY[1] - slopeUp*rectX[1]; crossDn = rectY[3] - slopeDn*rectX[3]; for (i = 0; i < ftrCnt; i++) {

point = pFtrInfor[i].ftrRect.CenterPoint(); xx = point.x; yy = point.y; if ((rectX[1] < point.x) && (rectX[3] > point.x) && (rectY[0] < point.y) && (rectY[2] > point.y)) { tempL = slopeL*(double)(point.x) + crossL - (double)(point.y); tempUp = slopeUp*(double)(point.x) + crossUp - (double)(point.y); tempR = slopeR*(double)(point.x) + crossR - (double)(point.y); tempDn = slopeDn*(double)(point.x) + crossDn - (double)(point.y); if ((tempL < 0) && (tempR > 0) && (tempUp > 0) && (tempDn < 0)) { switch(index) { case 1:fEyePair->eyebrowL[count] = i; break; case 2:fEyePair->eyebrowR[count] = i; break; case 3:fEyePair->mouth[count] = i; break; } count++; } } } return count; } else if(rectY[0] > rectY[3]) { slopeL = (rectY[0] - rectY[1])/(rectX[0] - rectX[1]); slopeUp = (rectY[1] - rectY[2])/(rectX[1] - rectX[2]); slopeR = (rectY[2] - rectY[3])/(rectX[2] - rectX[3]); slopeDn = (rectY[3] - rectY[0])/(rectX[3] - rectX[0]); crossL = rectY[1] - slopeL*rectX[1]; crossR = rectY[3] - slopeR*rectX[3]; crossUp = rectY[1] - slopeUp*rectX[1]; crossDn = rectY[3] - slopeDn*rectX[3]; for (i = 0; i < ftrCnt; i++) { point = pFtrInfor[i].ftrRect.CenterPoint(); xx = point.x; yy = point.y; if ((rectX[0] < point.x) && (rectX[2] > point.x) && (rectY[1] > point.y) && (rectY[3] < point.y)) { tempL = slopeL*(double)(point.x) + crossL - (double)(point.y); tempUp = slopeUp*(double)(point.x) + crossUp - (double)(point.y); tempR = slopeR*(double)(point.x) + crossR - (double)(point.y); tempDn = slopeDn*(double)(point.x) + crossDn - (double)(point.y); if ((tempL > 0) && (tempR < 0) && (tempUp > 0) && (tempDn < 0)) { switch(index) { case 1:fEyePair->eyebrowL[count] = i; break; case 2:fEyePair->eyebrowR[count] = i; break; case 3:fEyePair->mouth[count] = i; break; } count++; } } } return count; } return 0; } double EfDetection::DistanceEye_EyeBrow(int x1, int y1, int x2, int y2) { double dEye_Eyebrow; dEye_Eyebrow = sqrt((x1-x2)*(x1-x2) + (y1-y2)*(y1-y2)); return dEye_Eyebrow; }

void EfDetection::DynamicRotateImage(BYTE *input, BYTE *output, int x1, int y1, int x2, int y2, int *newx1, int *newy1, int *newx2, int *newy2, double *radian) { double distance; double dx, dy; double rad; distance = sqrt( (x2 - x1)*(x2 - x1) + (y2 - y1)*(y2 - y1) ); dx = x2 - x1; dy = y2 - y1; rad = atan2(dy, dx); double eyex1 = x2 - (double)m_ScaleWidth/2; double eyey1 = y2 - (double)m_ScaleHeight/2; double eyex2 = x1 - (double)m_ScaleWidth/2; double eyey2 = y1 - (double)m_ScaleHeight/2; *newx2 = (cos(-rad)*eyex1 - sin(-rad)*eyey1) + (double)m_ScaleWidth/2 + 0.5; *newy2 = (sin(-rad)*eyex1 + cos(-rad)*eyey1) + (double)m_ScaleHeight/2 + 0.5; *newx1 = (cos(-rad)*eyex2 - sin(-rad)*eyey2) + (double)m_ScaleWidth/2 + 0.5; *newy1 = (sin(-rad)*eyex2 + cos(-rad)*eyey2) + (double)m_ScaleHeight/2 + 0.5; Rotation(input, output, m_ScaleWidth, m_ScaleHeight, rad); *radian = rad; } void EfDetection::DynamicRotatePoint(double radian, int x, int y, int *newx, int *newy) { double eyex = x - (double)m_ScaleWidth/2; double eyey = y - (double)m_ScaleHeight/2; *newx = (cos(-radian)*eyex - sin(-radian)*eyey) + (double)m_ScaleWidth/2 + 0.5; *newy = (sin(-radian)*eyex + cos(-radian)*eyey) + (double)m_ScaleHeight/2 + 0.5; } void EfDetection::DynamicGetIrisPoints(BYTE* srcImageBuffer, int x1, int y1, int x2, int y2, int* newx1, int* newy1, int* newx2, int* newy2, int* srchW, int* srchH) { double sigma = sqrt(2.0); double distance = sqrt( (x2 - x1)*(x2 - x1) + (y2 - y1)*(y2 - y1) ); int filterSize = (int)(distance*0.2); if (filterSize % 2 == 0) { filterSize += 1; } double* filter = new double[filterSize*filterSize]; memset(filter, 0, sizeof(double)*(filterSize*filterSize)); int i, j; int index = 0; for (j = (-1)*filterSize/2; j <= filterSize/2; j++) { for (i = (-1)*filterSize/2; i <= filterSize/2; i++) { filter[index++] = exp((-1.0)*(i*i + j*j)/(2*sigma*sigma)); } } double k = 1.0 / filter[0]; double filterSum = 0.0; for (i = 0; i < filterSize*filterSize; i++) { filter[i] *= k; filterSum += filter[i]; } int srchWidth = (int)(IRIS_SRCH_WIDTH*(double)filterSize); int srchHeight = (int)(IRIS_SRCH_HEIGHT*(double)filterSize); BYTE* closedImage = new BYTE[m_ScaleWidth*m_ScaleHeight]; memset(closedImage, 0, sizeof(BYTE)*(m_ScaleWidth*m_ScaleHeight)); int minX, minY, maxX, maxY;

minX = x1 - (srchWidth+filterSize)/2; maxX = x1 + (srchWidth+filterSize)/2; minY = y1 - (srchHeight+filterSize)/2; maxY = y1 + (srchHeight+filterSize)/2; if (minX < 0) minX = 0; if (minY < 0) minY = 0; if (maxX > m_ScaleWidth) maxX = m_ScaleWidth - 1; if (maxY > m_ScaleHeight) maxY = m_ScaleHeight - 1; Erosion(srcImageBuffer, closedImage, 3, 3, m_ScaleHeight, m_ScaleWidth, minY, maxY, minX, maxX); Dilation(closedImage, srcImageBuffer, 3, 3, m_ScaleHeight, m_ScaleWidth, minY, maxY, minX, maxX); minX = x2 - (srchWidth+filterSize)/2; maxX = x2 + (srchWidth+filterSize)/2; minY = y2 - (srchHeight+filterSize)/2; maxY = y2 + (srchHeight+filterSize)/2; if (minX < 0) minX = 0; if (minY < 0) minY = 0; if (maxX > m_ScaleWidth) maxX = m_ScaleWidth - 1; if (maxY > m_ScaleHeight) maxY = m_ScaleHeight - 1; Erosion(srcImageBuffer, closedImage, 3, 3, m_ScaleHeight, m_ScaleWidth, minY, maxY, minX, maxX); Dilation(closedImage, srcImageBuffer, 3, 3, m_ScaleHeight, m_ScaleWidth, minY, maxY, minX, maxX); delete[] closedImage; int row, col, in_row, in_col; int x, y; double sum; double* filteredBuffer = new double[srchWidth*srchHeight]; memset(filteredBuffer, 0, sizeof(double)*(srchWidth*srchHeight)); // left eye region for (row = 0; row < srchHeight; row++) { for (col = 0; col < srchWidth; col++) { sum = 0.0; for (in_row = 0; in_row < filterSize; in_row++) { for (in_col = 0; in_col < filterSize; in_col++) { x = x1 - srchWidth/2 + col - filterSize/2 + in_col; y = y1 - srchHeight/2 + row - filterSize/2 + in_row; sum += filter[in_row*filterSize + in_col]*srcImageBuffer[y*m_ScaleWidth + x]; } } filteredBuffer[row*srchWidth + col] = sum; } } int locX1, locY1; DynamicGetMinCoord(filteredBuffer, srchWidth, srchHeight, &locX1, &locY1); *newx1 = x1 - srchWidth/2 + locX1; *newy1 = y1 - srchHeight/2 + locY1; // right eye region for (row = 0; row < srchHeight; row++) { for (col = 0; col < srchWidth; col++) { sum = 0.0; for (in_row = 0; in_row < filterSize; in_row++) { for (in_col = 0; in_col < filterSize; in_col++) { x = x2 - srchWidth/2 + col - filterSize/2 + in_col; y = y2 - srchHeight/2 + row - filterSize/2 + in_row; sum += filter[in_row*filterSize + in_col]*srcImageBuffer[y*m_ScaleWidth + x]; } }

filteredBuffer[row*srchWidth + col] = sum; } } int locX2, locY2; DynamicGetMinCoord(filteredBuffer, srchWidth, srchHeight, &locX2, &locY2); *newx2 = x2 - srchWidth/2 + locX2; *newy2 = y2 - srchHeight/2 + locY2; delete[] filter; delete[] filteredBuffer; *srchW = srchWidth; *srchH = srchHeight; } void EfDetection::DynamicGetIrisPoints(BYTE* srcImageBuffer, int srcW, int srcH, int x1, int y1, int x2, int y2, int* newx1, int* newy1, int* newx2, int* newy2, int* srchW, int* srchH) { double sigma = sqrt(2.0); double distance = sqrt( (x2 - x1)*(x2 - x1) + (y2 - y1)*(y2 - y1) ); int filterSize = (int)(distance*0.2); if (filterSize % 2 == 0) { filterSize += 1; } double* filter = new double[filterSize*filterSize]; memset(filter, 0, sizeof(double)*(filterSize*filterSize)); int i, j; int index = 0; for (j = (-1)*filterSize/2; j <= filterSize/2; j++) { for (i = (-1)*filterSize/2; i <= filterSize/2; i++) { filter[index++] = exp((-1.0)*(i*i + j*j)/(2*sigma*sigma)); //filter1[index++] = (BYTE)(exp((-1.0)*(i*i + j*j)/(2*sigma*sigma))); } } double k = 1.0 / filter[0]; double filterSum = 0.0; for (i = 0; i < filterSize*filterSize; i++) { filter[i] *= k; filterSum += filter[i]; } // Normalization double min, max; min = INT_MAX; max = INT_MIN; for( i = 0 ; i < filterSize*filterSize ; i++) { if (filter[i] <= min) min = filter[i]; if (filter[i] >= max) max = filter[i]; } double LowBoundary = 27.0 ; double temp, temp1, temp2, temp3, temp4; for ( i = 0 ; i < filterSize*filterSize ; i++) { temp1 = filter[i]; temp2= fabs(filter[i] - min); temp3 = fabs(max - min); temp = 255.0*(temp2/temp3); temp4 = temp2/temp3; filter[i] = temp - LowBoundary;

} //filter[filterSize*filterSize/2] = 255.0/2.0; for ( i = 0 ; i < filterSize*filterSize ; i++) { if(filter[i]<0) { filter[i]= 0.0; } } //Normalization int srchWidth = (int)(IRIS_SRCH_WIDTH*(double)filterSize); int srchHeight = (int)(IRIS_SRCH_HEIGHT*(double)filterSize); unsigned char* erosedBuffer = new unsigned char[srcW*srcH]; unsigned char* closedBuffer = new unsigned char[srcW*srcH]; memcpy(erosedBuffer, srcImageBuffer, sizeof(unsigned char)*(srcW*srcH)); memcpy(closedBuffer, srcImageBuffer, sizeof(unsigned char)*(srcW*srcH)); int minX, minY, maxX, maxY; minX = x1 - (srchWidth+filterSize)/2; maxX = x1 + (srchWidth+filterSize)/2; minY = y1 - (srchHeight+filterSize)/2; maxY = y1 + (srchHeight+filterSize)/2; if (minX < 0) minX = 0; if (minY < 0) minY = 0; if (maxX > srcW) maxX = srcW - 1; if (maxY > srcH) maxY = srcH - 1; Erosion(srcImageBuffer, erosedBuffer, 3, 3, srcH, srcW, minY, maxY, minX, maxX); Dilation(erosedBuffer, closedBuffer, 3, 3, srcH, srcW, minY, maxY, minX, maxX); minX = x2 - (srchWidth+filterSize)/2; maxX = x2 + (srchWidth+filterSize)/2; minY = y2 - (srchHeight+filterSize)/2; maxY = y2 + (srchHeight+filterSize)/2; if (minX < 0) minX = 0; if (minY < 0) minY = 0; if (maxX > srcW) maxX = srcW - 1; if (maxY > srcH) maxY = srcH - 1; Erosion(srcImageBuffer, erosedBuffer, 3, 3, srcH, srcW, minY, maxY, minX, maxX); Dilation(erosedBuffer, closedBuffer, 3, 3, srcH, srcW, minY, maxY, minX, maxX); int row, col, in_row, in_col; int x, y; double sum; double* filteredBuffer = new double[srchWidth*srchHeight]; memset(filteredBuffer, 0, sizeof(double)*(srchWidth*srchHeight)); // left eye region for (row = 0; row < srchHeight; row++) { for (col = 0; col < srchWidth; col++) { sum = 0.0; for (in_row = 0; in_row < filterSize; in_row++) { for (in_col = 0; in_col < filterSize; in_col++) { x = x1 - srchWidth/2 + col - filterSize/2 + in_col; y = y1 - srchHeight/2 + row - filterSize/2 + in_row; if (x < 0 || x > srcW-1 || y < 0 || y > srcH-1) { sum += filter[in_row*filterSize + in_col]*255; } else {

sum += filter[in_row*filterSize + in_col]*closedBuffer[y*srcW + x]; } } } filteredBuffer[row*srchWidth + col] = sum; } } int locX1, locY1; DynamicGetMinCoord(filteredBuffer, srchWidth, srchHeight, &locX1, &locY1); *newx1 = x1 - srchWidth/2 + locX1; *newy1 = y1 - srchHeight/2 + locY1; // right eye region for (row = 0; row < srchHeight; row++) { for (col = 0; col < srchWidth; col++) { sum = 0.0; for (in_row = 0; in_row < filterSize; in_row++) { for (in_col = 0; in_col < filterSize; in_col++) { x = x2 - srchWidth/2 + col - filterSize/2 + in_col; y = y2 - srchHeight/2 + row - filterSize/2 + in_row; if (x < 0 || x > srcW-1 || y < 0 || y > srcH-1) { sum += filter[in_row*filterSize + in_col]*255; } else { sum += filter[in_row*filterSize + in_col]*closedBuffer[y*srcW + x]; } } } filteredBuffer[row*srchWidth + col] = sum; } } int locX2, locY2; DynamicGetMinCoord(filteredBuffer, srchWidth, srchHeight, &locX2, &locY2); *newx2 = x2 - srchWidth/2 + locX2; *newy2 = y2 - srchHeight/2 + locY2; delete[] erosedBuffer; delete[] closedBuffer; delete[] filter; delete[] filteredBuffer; *srchW = srchWidth; *srchH = srchHeight; } void EfDetection::DynamicGetMinCoord(double* srcImageBuffer, int width, int height, int* mx, int* my) { double srcMin = srcImageBuffer[0]; int tx, ty; for (ty = 0; ty < height; ty++) { for (tx = 0; tx < width; tx++) { if (srcImageBuffer[ty*width + tx] <= srcMin) { srcMin = srcImageBuffer[ty*width + tx];

*mx = tx; *my = ty; } } } } ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// // Main Eye detecttion ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// #define MIN_EYE_DISTANCE 5 BOOL EfDetection::GetEyePos(unsigned char* imageBuffer, int imageWidth, int imageHeight, int* lEyeX, int* lEyeY, int* rEyeX, int* rEyeY) { // Memory Allocation m_bFound = FALSE; m_ImageWidth = imageWidth; m_ImageHeight = imageHeight; int s1, s2; bool up1=true, up2=true; s1 = (int)((imageWidth*0.73) / 16.0 + 0.5); s2 = (int)((imageHeight*0.73) / 16.0 + 0.5); if (s1 - (int)((imageWidth*0.73) / 16.0) == 0) up1 = false; if (s2 - (int)((imageHeight*0.73) / 16.0) == 0) up2 = false; if (up1 && !up2) s2++; if (!up1 && up2) s1++; m_ScaleWidth = s1 * 16; m_ScaleHeight = s2 * 16; m_BoundaryMinX = (m_ScaleWidth - m_ScaleWidth + 20); m_BoundaryMinY = (m_ScaleHeight - m_ScaleHeight + 10); m_BoundaryMaxX = (m_ScaleWidth - 20); m_BoundaryMaxY = (m_ScaleHeight - 3); BYTE *pSrcBuffer = new BYTE[m_ScaleHeight*m_ScaleWidth]; BYTE *pPreprocBuffer = new BYTE[m_ScaleHeight*m_ScaleWidth]; BYTE *smoothingImage = new BYTE[m_ScaleHeight*m_ScaleWidth]; BYTE *closedImage_V = new BYTE[m_ScaleHeight*m_ScaleWidth]; BYTE *closedImage_V_1_2 = new BYTE[m_ScaleHeight*m_ScaleWidth]; BYTE *tempImage1 = new BYTE[m_ScaleHeight*m_ScaleWidth/4]; BYTE *tempImage2 = new BYTE[m_ScaleHeight*m_ScaleWidth/4]; BYTE *pAndImage = new BYTE[m_ScaleHeight*m_ScaleWidth]; BYTE *pThresBuffer = new BYTE[m_ScaleHeight*m_ScaleWidth]; int *pLabelBuffer = new int[m_ScaleHeight*m_ScaleWidth]; memset(pSrcBuffer, 0, m_ScaleHeight*m_ScaleWidth); memset(pPreprocBuffer, 0, m_ScaleHeight*m_ScaleWidth); memset(smoothingImage, 0, m_ScaleHeight*m_ScaleWidth); memset(closedImage_V, 0, m_ScaleHeight*m_ScaleWidth); memset(closedImage_V_1_2, 0, m_ScaleHeight*m_ScaleWidth); memset(tempImage1, 0, m_ScaleHeight*m_ScaleWidth/4); memset(tempImage2, 0, m_ScaleHeight*m_ScaleWidth/4); memset(pAndImage, 0, m_ScaleHeight*m_ScaleWidth); memset(pThresBuffer, 0, m_ScaleHeight*m_ScaleWidth); memset(pLabelBuffer, 0, sizeof(int)*m_ScaleHeight*m_ScaleWidth); // Scaling Original Image Scaling(imageBuffer, pSrcBuffer, m_ImageWidth, m_ImageHeight, m_ScaleWidth, m_ScaleHeight); memcpy(pPreprocBuffer, pSrcBuffer, m_ScaleHeight*m_ScaleWidth); // cfFilter Image unsigned char *tempBuffer1 = new unsigned char[m_ScaleWidth*m_ScaleHeight]; unsigned char *tempBuffer2 = new unsigned char[m_ScaleWidth*m_ScaleHeight]; memset(tempBuffer1, 0, sizeof(BYTE)*(m_ScaleWidth*m_ScaleHeight)); memset(tempBuffer2, 0, sizeof(BYTE)*(m_ScaleWidth*m_ScaleHeight)); memcpy(tempBuffer1, pSrcBuffer, m_ScaleWidth*m_ScaleHeight); CLAHE(tempBuffer1, m_ScaleWidth, m_ScaleHeight, 0, 255, 2, 2, 128, 5.0);

FilterImage1(tempBuffer1, tempBuffer2, m_ScaleHeight, m_ScaleWidth, 1, m_ScaleHeight-1, 1, m_ScaleWidth-1); // cfFilter Image // Histogram Equalization CLAHE(pPreprocBuffer, m_ScaleWidth, m_ScaleHeight, 0, 255, 4, 16, 128, 8.0); // Smoothing : MeanFilter & Erosion MeanFilter(pPreprocBuffer, smoothingImage, 3, 5, m_ScaleHeight, m_ScaleWidth); memset(pPreprocBuffer, 0, m_ScaleHeight*m_ScaleWidth); Erosion(smoothingImage, pPreprocBuffer, 1, 3, m_ScaleHeight, m_ScaleWidth, m_BoundaryMinY, m_BoundaryMaxY, m_BoundaryMinX, m_BoundaryMaxX); // Morphology ClosingGrayImage(pPreprocBuffer, closedImage_V, 7, 3, 0, m_ScaleHeight, m_ScaleWidth, m_BoundaryMinY, m_BoundaryMaxY, m_BoundaryMinX, m_BoundaryMaxX); ImageScaling(pPreprocBuffer, tempImage1, 2, 1); ClosingGrayImage(tempImage1, tempImage2, 7, 3, 0, m_ScaleHeight/2, m_ScaleWidth/2, m_BoundaryMinY/2, m_BoundaryMaxY/2, m_BoundaryMinX/2, m_BoundaryMaxX/2); delete [] tempImage1; ImageScaling(tempImage2, closedImage_V_1_2, 2, 0); delete [] tempImage2; int temp; for (int i = 0 ; i < m_ScaleHeight*m_ScaleWidth ; i++) { temp = closedImage_V[i] + closedImage_V_1_2[i]; if (temp/2 < THRESHOLD) pThresBuffer[i] = 0; else pThresBuffer[i] = 255; } delete[] closedImage_V; delete[] closedImage_V_1_2; // 2nd Derivative Gaussian Filter memset(pPreprocBuffer, 0, m_ScaleHeight*m_ScaleWidth); FilterImage(smoothingImage, pPreprocBuffer); // Morphology & Gaussian AndOperation(pThresBuffer, pPreprocBuffer, pAndImage); delete[] smoothingImage; delete[] pThresBuffer; // Noise Remove memset(pPreprocBuffer, 0, m_ScaleHeight*m_ScaleWidth); NoiseRemoving(pAndImage, pPreprocBuffer, 1, 9); delete [] pAndImage; // Labeling m_labelCnt = ConnectedComponents(pPreprocBuffer, pLabelBuffer); // Set feature window SetFeatureWindow(pLabelBuffer, m_pFtrInfo, m_labelCnt); delete [] pLabelBuffer; // SetEyePair SetEyePair(m_pFtrInfo, m_labelCnt, &m_ftrCnt); // Groups Feature points m_PairCnt = GroupingFeature(pSrcBuffer, tempBuffer2, tempBuffer1, m_pFtrInfo, m_pEyePair, m_ftrCnt); *lEyeX = m_filX1; *lEyeY = m_filY1; *rEyeX = m_filX2; *rEyeY = m_filY2;

delete [] pPreprocBuffer; delete [] pSrcBuffer; delete [] tempBuffer1; delete [] tempBuffer2; return m_bFound; } BOOL EfDetection::GetFaceImageTemp(unsigned char* imageBuffer) { /* BYTE *rotateImage = new BYTE[m_ScaleHeight*m_ScaleWidth]; BYTE *cropImage = new BYTE[FACE_ROW*FACE_COL]; memset(rotateImage, 0, sizeof(BYTE)*(m_ScaleHeight*m_ScaleWidth)); memset(cropImage, 0, sizeof(BYTE)*(FACE_ROW*FACE_COL)); double distance; int dx, dy; double radian; CPoint rotateLEye, rotateREye; distance = sqrt( (m_filX1 - m_filX2)*(m_filX1 - m_filX2) + (m_filY1 - m_filY2)*(m_filY1 - m_filY2)); if(distance < 1) return FALSE; dx = m_filX2 - m_filX1; dy = m_filY2 - m_filY1; radian = atan2(dy, dx); int eyex1 = m_filX2 - m_ScaleWidth/2; int eyey1 = m_filY2 - m_ScaleHeight/2; int eyex2 = m_filX1 - m_ScaleWidth/2; int eyey2 = m_filY1 - m_ScaleHeight/2; rotateREye.x = (cos(-radian)*eyex1 - sin(-radian)*eyey1) + m_ScaleWidth/2; rotateREye.y = (sin(-radian)*eyex1 + cos(-radian)*eyey1) + m_ScaleHeight/2; rotateLEye.x = (cos(-radian)*eyex2 - sin(-radian)*eyey2) + m_ScaleWidth/2; rotateLEye.y = (sin(-radian)*eyex2 + cos(-radian)*eyey2) + m_ScaleHeight/2; Rotation(imageBuffer, rotateImage, m_ScaleWidth, m_ScaleWidth, radian); CropImage(rotateImage, cropImage, m_ScaleWidth, m_ScaleWidth, rotateLEye, rotateREye); delete [] rotateImage; memcpy(m_pFaceBuffer, cropImage, FACE_ROW*FACE_COL); delete [] cropImage; */ return TRUE; } BOOL EfDetection::GetFaceImage(unsigned char* imageBuffer, int imageWidth, int imageHeight, int lEyeX, int lEyeY, int rEyeX, int rEyeY, unsigned char* faceImageBuffer, int faceImageWidth, int faceImageHeight, double sideDist, double upDist, double downDist) { double distance; double dx, dy; double rad; BYTE *output = new BYTE[imageWidth*imageHeight]; memset(output, 0, sizeof(BYTE)*(imageWidth*imageHeight)); distance = sqrt( (rEyeX - lEyeX)*(rEyeX - lEyeX) + (rEyeY - lEyeY)*(rEyeY - lEyeY) ); dx = rEyeX - lEyeX; dy = rEyeY - lEyeY; rad = atan2(dy, dx); double eyex1 = rEyeX - (double)imageWidth/2; double eyey1 = rEyeY - (double)imageHeight/2; double eyex2 = lEyeX - (double)imageWidth/2; double eyey2 = lEyeY - (double)imageHeight/2; int newx2 = (cos(-rad)*eyex1 - sin(-rad)*eyey1) + (double)imageWidth/2 + 0.5; int newy2 = (sin(-rad)*eyex1 + cos(-rad)*eyey1) + (double)imageHeight/2 + 0.5;

int newx1 = (cos(-rad)*eyex2 - sin(-rad)*eyey2) + (double)imageWidth/2 + 0.5; int newy1 = (sin(-rad)*eyex2 + cos(-rad)*eyey2) + (double)imageHeight/2 + 0.5; Rotation(imageBuffer, output, imageWidth, imageHeight, rad); int i, j; double mx, my; int sx, sy, ex, ey; mx = (newx2 + newx1)/2.0; my = (newy2 + newy1)/2.0; distance = sqrt((newx2 - newx1)*(newx2 - newx1) + (newy2 - newy1)*(newy2 - newy1)); sx = (int)(mx - distance + 0.5); ex = (int)(mx + distance + 0.5); sy = (int)(my - distance*0.5 + 0.5); ey = (int)(my + distance*1.9 + 0.5); int row = 0, col = 0; int tempWidth, tempHeight; tempWidth = ex - sx; tempHeight = ey - sy; BYTE *tempImage = new BYTE[tempWidth*tempHeight]; memset(tempImage, 0, sizeof(BYTE)*(tempWidth*tempHeight)); int temp = sy; for (i = 0; i < imageHeight; i++) { for (j = 0; j < imageWidth; j++) { if ((i >= sy) && (i < ey) && (j >= sx) && (j < ex)) { if (temp < 0) { tempImage[row*tempWidth+col] = 125; } else tempImage[row*tempWidth+col] = output[i*imageWidth+j]; col++; } } if ((i >= sy) && (i < ey)) { row++; if (temp < 0) i--; temp++; } col = 0; } Scaling(tempImage, faceImageBuffer, tempWidth, tempHeight, faceImageWidth, faceImageHeight); delete [] tempImage; delete [] output; return TRUE; } BOOL EfDetection::CheckCandidate(BYTE *input, int width, int height) { int labelCnt, ftrCnt; VfFtrInfo pFtrInfo[100]; BYTE *pPreprocBuffer = new BYTE[height*width]; BYTE *smoothingImage = new BYTE[height*width]; BYTE *closedImage_V = new BYTE[height*width]; BYTE *closedImage_V_1_2 = new BYTE[height*width]; BYTE *tempImage1 = new BYTE[height*width/4]; BYTE *tempImage2 = new BYTE[height*width/4]; BYTE *pAndImage = new BYTE[height*width]; BYTE *pThresBuffer = new BYTE[height*width]; int *pLabelBuffer = new int[height*width]; memset(pPreprocBuffer, 0, height*width); memset(smoothingImage, 0, height*width); memset(closedImage_V, 0, height*width);

memset(closedImage_V_1_2, 0, height*width); memset(tempImage1, 0, height*width/4); memset(tempImage2, 0, height*width/4); memset(pAndImage, 0, height*width); memset(pThresBuffer, 0, height*width); memset(pLabelBuffer, 0, sizeof(int)*height*width); memcpy(pPreprocBuffer, input, height*width); // Histogram Equalization CLAHE(pPreprocBuffer, width, height, 0, 255, 10, 10, 128, 3.0); // Smoothing : MeanFilter & Erosion MeanFilter(pPreprocBuffer, smoothingImage, 3, 3, height, width, 4, height-4, 4, width-4); Erosion(smoothingImage, pPreprocBuffer, 1, 3, height, width, 4, height-4, 4, width-4); // Morphology ClosingGrayImage(pPreprocBuffer, closedImage_V, 5, 3, 3, height, width, 4, height-4, 4, width-4); ImageScaling(pPreprocBuffer, tempImage1, 2, 1, height, width); ClosingGrayImage(tempImage1, tempImage2, 3, 3, 7, height/2, width/2, 2, (height-4)/2, 2, (width-4)/2); delete [] tempImage1; ImageScaling(tempImage2, closedImage_V_1_2, 2, 0, height, width); delete [] tempImage2; int temp; for (int i = 0 ; i < height*width ; i++) { temp = closedImage_V[i] + closedImage_V_1_2[i]; if (temp/2 < 17) pThresBuffer[i] = 0; else pThresBuffer[i] = 255; } delete[] closedImage_V; delete[] closedImage_V_1_2; // 2nd Derivative Gaussian Filter FilterImage(smoothingImage, pPreprocBuffer, height, width, 4, height-4, 4, width-4); // Morphology & Gaussian AndOperation(pThresBuffer, pPreprocBuffer, pAndImage, height, width); delete[] smoothingImage; delete[] pThresBuffer; // Noise Remove NoiseRemoving(pAndImage, pPreprocBuffer, height, width, 4, height-4, 4, width-4); delete [] pAndImage; // Labeling labelCnt = ConnectedComponents(pPreprocBuffer, pLabelBuffer, height, width); delete [] pPreprocBuffer; // Set feature window SetFeatureWindow(pLabelBuffer, pFtrInfo, labelCnt, height, width, 4, height-4, 4, width-4); //delete [] pLabelBuffer; // SetEyePair ftrCnt = labelCnt; bool lfind = false, rfind = false; CRect lEye(EYE_LEFT_X, EYE_LEFT_Y, EYE_LEFT_X+EYE_WINDOW_WIDTH+5, EYE_LEFT_Y+EYE_WINDOW_HEIGHT); CRect rEye(EYE_RIGHT_X, EYE_RIGHT_Y, EYE_RIGHT_X+EYE_WINDOW_WIDTH+5, EYE_RIGHT_Y+EYE_WINDOW_HEIGHT); if (ftrCnt > 17) return FALSE; int maxIdx = -1, lIdx, rIdx; int maxLength = 0; int length;

int lEyeY, rEyeY; for (i = 0; i < ftrCnt; i++) { int Height = pFtrInfo[i].ftrRect.Height(); int Width = pFtrInfo[i].ftrRect.Width(); if (lEye.PtInRect(pFtrInfo[i].ftrRect.CenterPoint()) && (Width < 30) && (Height < 15) && (Width >= Height)) { lfind = true; lIdx = i; lEyeY = pFtrInfo[i].ftrRect.CenterPoint().y; } if (rEye.PtInRect(pFtrInfo[i].ftrRect.CenterPoint()) && (Width < 30) && (Height < 15) && (Width >= Height)) { rfind = true; rIdx = i; rEyeY = pFtrInfo[i].ftrRect.CenterPoint().y; } } if ((!lfind) || (!rfind)) return FALSE; for (i = 0; i < ftrCnt; i++) { length = pFtrInfo[i].ftrRect.Width(); int y = pFtrInfo[i].ftrRect.CenterPoint().y; if ((length > maxLength) && (i != lIdx) && (i != rIdx) && (y > 13)) { maxLength = length; maxIdx = i; } if (length > 0.8*width) return FALSE; } double sizeSum = 0.0; for (i = 0; i < ftrCnt; i++) { if (i == lIdx || i == rIdx || i == maxIdx) continue; if (pFtrInfo[i].ftrRect.CenterPoint().y < pFtrInfo[lIdx].ftrRect.CenterPoint().y) continue; sizeSum += (double)(pFtrInfo[i].ftrRect.Width() * pFtrInfo[i].ftrRect.Height()); } double sizeRatio = sizeSum / (width*height); if (sizeRatio > THRES_FTR_SIZE_RATIO) { return FALSE; } return TRUE; } void EfDetection::FaceVerify(unsigned char* srcBuffer, int srcW, int srcH, int lEyeX, int lEyeY, int rEyeX, int rEyeY, VfEyePair* pEyePair) { double maxConf = -1.0; double dEyeDist; int btnEyeX, btnEyeY, lCheekX, lCheekY, rCheekX, rCheekY; int eyeW, eyeH, btnEyeW, btnEyeH, cheekDist, cheekW, cheekH; double btnEyeMean, lEyeMean, rEyeMean, lCheekMean, rCheekMean; double btnEyeStd, lEyeStd, rEyeStd, lCheekStd, rCheekStd; bool bGrad = false; int lex, ley, rex, rey; lex = ley = rex = rey = 0; lex = lEyeX; ley = lEyeY; rex = rEyeX; rey = rEyeY; dEyeDist = sqrt( (lEyeX- rEyeX)*(lEyeX - rEyeX) + (lEyeY - rEyeY)*(lEyeY - rEyeY) ); btnEyeX = (rex + lex)/2; btnEyeY = (rey + ley)/2 - dEyeDist*0.1; // set face structure parameter eyeW = (int)(dEyeDist * RATIO_EYEW + 0.5); eyeH = (int)(dEyeDist * RATIO_EYEH + 0.5); btnEyeW = (int)(dEyeDist * RATIO_BTNEYEW + 0.5); btnEyeH = (int)(dEyeDist * RATIO_BTNEYEH + 0.5); cheekDist = (int)(dEyeDist * RATIO_CHEEKDIST + 0.5); cheekW = (int)(dEyeDist * RATIO_CHEEKW + 0.5); cheekH = (int)(dEyeDist * RATIO_CHEEKH + 0.5);

btnEyeMean = GetDiscreteMean(srcBuffer, srcW, srcH, btnEyeX, btnEyeY, btnEyeW, btnEyeH, 20, 20); lCheekX = lex; lCheekY = ley + cheekDist; rCheekX = rex; rCheekY = rey + cheekDist; lEyeMean = GetDiscreteMean(srcBuffer, srcW, srcH, lex, ley, eyeW, eyeH, 20, 20); lCheekMean = GetDiscreteMean(srcBuffer, srcW, srcH, lCheekX, lCheekY, cheekW, cheekH, 20, 20); rEyeMean = GetDiscreteMean(srcBuffer, srcW, srcH, rex, rey, eyeW, eyeH, 20, 20); rCheekMean = GetDiscreteMean(srcBuffer, srcW, srcH, rCheekX, rCheekY, cheekW, cheekH, 20, 20); lEyeStd = GetDiscreteStd(srcBuffer, srcW, srcH, lex, ley, eyeW, eyeH, 10, 10); lCheekStd = GetDiscreteStd(srcBuffer, srcW, srcH, lCheekX, lCheekY, cheekW, cheekH, 10, 10); rEyeStd = GetDiscreteStd(srcBuffer, srcW, srcH, rex, rey, eyeW, eyeH, 10, 10); rCheekStd = GetDiscreteStd(srcBuffer, srcW, srcH, rCheekX, rCheekY, cheekW, cheekH, 10, 10); btnEyeStd = GetDiscreteStd(srcBuffer, srcW, srcH, btnEyeX, btnEyeY, btnEyeW, btnEyeH, 20, 20); double gradXSum = 0, gradYSum = 0; if (lCheekMean - lEyeMean > 30.0 && btnEyeMean - lEyeMean > 20.0 && rCheekMean - rEyeMean > 30.0 && btnEyeMean - rEyeMean > 20.0 && fabs(lEyeMean - rEyeMean) < 70.0 && fabs(lCheekMean - rCheekMean) < 70.0) { double cfVal = SetCFF(srcBuffer, srcW, srcH, btnEyeX, btnEyeY, dEyeDist/2.0, dEyeDist/4.0); SetMouthGradient(srcBuffer, srcW, srcH, lex, ley, rex, rey, &gradXSum, &gradYSum); { pEyePair->lEye.x = lex; pEyePair->lEye.y = ley; pEyePair->rEye.x = rex; pEyePair->rEye.y = rey; pEyePair->distance = dEyeDist; pEyePair->cfVal = cfVal; pEyePair->mouthGradX = gradXSum; pEyePair->mouthGradY = gradYSum; double upMean = (btnEyeMean + lCheekMean + rCheekMean) / (btnEyeMean + lCheekMean + rCheekMean + lEyeMean + rEyeMean); double dnMean = (lEyeMean + rEyeMean) / (btnEyeMean + lCheekMean + rCheekMean + lEyeMean + rEyeMean); double upDiff = ( fabs(lEyeMean - lCheekMean) / (lEyeMean + lCheekMean) + fabs(rEyeMean - rCheekMean) / (rEyeMean + rCheekMean) + fabs(lEyeMean - btnEyeMean) / (lEyeMean + btnEyeMean) + fabs(rEyeMean - btnEyeMean) / (rEyeMean + btnEyeMean) ) / 4.0; double dnDiff = ( fabs(lEyeMean - rEyeMean) / (lEyeMean + rEyeMean) + fabs(lCheekMean - rCheekMean) / (lCheekMean + rCheekMean) + fabs(btnEyeMean - lCheekMean) / (btnEyeMean + lCheekMean) + fabs(btnEyeMean - rCheekMean) / (btnEyeMean + rCheekMean) ) / 4.0; pEyePair->eyeStructConf = upMean - dnMean; pEyePair->eyeStructConf = upMean - dnMean +(lEyeStd+rEyeStd)/(lCheekStd+rCheekStd); } } else { pEyePair->eyeStructConf = -1.0; } } double EfDetection::GetDiscreteStd(unsigned char* srcBuffer, int srcW, int srcH, int mx, int my, int width, int height, int wStep, int hStep) { double xInc = (double)(width) / (double)(wStep); double yInc = (double)(height) / (double)(hStep); double x, y; int ix, iy; int i, j; double sum = 0, mean=0.0; double sum2 = 0, meansqr=0.0; double variance =0.0; double std = 0.0; int beforeX, beforeY;

beforeX = beforeY = 0; int cnt = 0; double srcMax = 0.0; double srcMaxStd = 0.0; double srcMin = 1000000.0; double srcMinStd = 0.0; y = my - height/2; for (j = 0; j <= hStep; j++) { x = mx - width/2; for (i = 0; i <= wStep; i++) { ix = (int)(x + 0.5); iy = (int)(y + 0.5); if (ix != beforeX || iy != beforeY) { beforeX = ix; beforeY = iy; sum += srcBuffer[iy*srcW + ix]; sum2 += srcBuffer[iy*srcW + ix]*srcBuffer[iy*srcW + ix]; cnt++; //sort if (srcBuffer[iy*srcW + ix] >= srcMax) { srcMax = srcBuffer[iy*srcW + ix]; srcMaxStd = srcBuffer[iy*srcW + ix]*srcBuffer[iy*srcW + ix]; } if (srcBuffer[iy*srcW + ix] <= srcMin) { srcMin = srcBuffer[iy*srcW + ix]; srcMinStd = srcBuffer[iy*srcW + ix]*srcBuffer[iy*srcW + ix]; } // } x += xInc; } y += yInc; } // mean = sum / ((wStep+1)*(hStep+1)); mean = (sum-srcMax-srcMin)/ (cnt-2); meansqr = (sum2-srcMaxStd-srcMin)/ (cnt-2); variance = (double)( meansqr-(mean*mean) ); std = sqrt(variance); return std; } double EfDetection::GetDiscreteMean(unsigned char* srcBuffer, int srcW, int srcH, int mx, int my, int width, int height, int wStep, int hStep) { double xInc = (double)(width) / (double)(wStep); double yInc = (double)(height) / (double)(hStep); double x, y; int ix, iy; int i, j; double sum = 0, mean; int beforeX, beforeY; beforeX = beforeY = 0; int cnt = 0; y = my - height/2; for (j = 0; j <= hStep; j++) { x = mx - width/2; for (i = 0; i <= wStep; i++) { ix = (int)(x + 0.5);

iy = (int)(y + 0.5); if (ix != beforeX || iy != beforeY) { beforeX = ix; beforeY = iy; sum += srcBuffer[iy*srcW + ix]; cnt++; } x += xInc; } y += yInc; } // mean = sum / ((wStep+1)*(hStep+1)); mean = sum / cnt; return mean; } bool EfDetection::SetEyeCircle(unsigned char* srcBuffer, int imageW, int imageH, int eyeX, int eyeY, int eyeDist, int* newEyeX, int* newEyeY, double* eyeCrclConf) { int irisSize = (int)(eyeDist*0.2 + 0.5); if (irisSize % 2 == 0) { irisSize += 1; } int srchWidth = (int)(1.0*(double)irisSize); int srchHeight = (int)(1.0*(double)irisSize); int wStep = 10; int hStep = 10; unsigned char* erosedBuffer = new unsigned char[imageW*imageH]; unsigned char* closedBuffer = new unsigned char[imageW*imageH]; memcpy(erosedBuffer, srcBuffer, sizeof(unsigned char)*(imageW*imageH)); memcpy(closedBuffer, srcBuffer, sizeof(unsigned char)*(imageW*imageH)); int minX, minY, maxX, maxY; minX = eyeX - (srchWidth+irisSize)/2; maxX = eyeX + (srchWidth+irisSize)/2; minY = eyeY - (srchHeight+irisSize)/2; maxY = eyeY + (srchHeight+irisSize)/2; if (minX < 0) minX = 0; if (minY < 0) minY = 0; if (maxX > imageW) maxX = imageW - 1; if (maxY > imageH) maxY = imageH - 1;

// close eye region Erosion(srcBuffer, erosedBuffer, closeSize, closeSize, imageH, imageW, minY, maxY, minX, maxX);

Dilation(erosedBuffer, closedBuffer, closeSize, closeSize, imageH, imageW, minY, maxY, minX, maxX); double* confBuffer = new double[wStep*hStep]; memset(confBuffer, 0, sizeof(double)*(wStep*hStep)); int* grayVal = new int[NUM_ECPOINT*NUM_ERPOINT]; double* grayRMean = new double[NUM_ERPOINT]; double angle; double angleStep = 2.0 * PI / NUM_ECPOINT; double radiusMax = (double)eyeDist * 0.2; double radius; double radiusStep = radiusMax / NUM_ERPOINT; double inMean, outMean; double xInc = (double)(srchWidth) / (double)(wStep); double yInc = (double)(srchHeight) / (double)(hStep); double col, row; int icol, irow; int i, j;

int x, y, a, r; row = eyeY - srchHeight/2; for (j = 0; j < hStep; j++) { col = eyeX - srchWidth/2; for (i = 0; i < wStep; i++) { icol = (int)(col + 0.5); irow = (int)(row + 0.5); memset(grayRMean, 0, sizeof(double)*NUM_ERPOINT); // get eye circle confidence radius = 0.0; for (r = 0; r < NUM_ERPOINT; r++) { angle = 0.0; for (a = 0; a < NUM_ECPOINT; a++) { x = icol + ROUND((-1.0)*radius*sin(angle)); y = irow + ROUND((-1.0)*radius*cos(angle)); if (x < 0 || x > imageW || y < 0 || y > imageH) return false; grayVal[r*NUM_ECPOINT + a] = closedBuffer[y*imageW + x]; grayRMean[r] += closedBuffer[y*imageW + x]; // grayVal[r*NUM_ECPOINT + a] = srcBuffer[y*imageW + x]; // grayRMean[r] += srcBuffer[y*imageW + x]; angle += angleStep; } grayRMean[r] /= NUM_ECPOINT; radius += radiusStep; } inMean = outMean = 0.0; for (r = 0; r < NUM_ERPOINT; r++) { if (r < NUM_ERPOINT/2) { inMean += grayRMean[r]; } else { outMean += grayRMean[r]; } } inMean /= NUM_ERPOINT/2; outMean /= NUM_ERPOINT/2; confBuffer[j*wStep + i] = (outMean - inMean) / (inMean + outMean); col += xInc; } row += yInc; } bool bIsEye = false; int locIdxX, locIdxY; double maxConf = DynamicGetMaxCoord(confBuffer, wStep, hStep, &locIdxX, &locIdxY); if (maxConf > THRES_EYECRCL) { bIsEye = true; *eyeCrclConf = maxConf; *newEyeX = eyeX - srchWidth/2 + locIdxX*xInc; *newEyeY = eyeY - srchHeight/2 + locIdxY*yInc; } else { bIsEye = false; *eyeCrclConf = -1.0; *newEyeX = -1; *newEyeY = -1; }

delete[] erosedBuffer; delete[] closedBuffer; delete[] confBuffer; delete[] grayVal; delete[] grayRMean; return bIsEye; }