source: foam/trunk/camera-opencv-test/face_detect.c @ 42

Revision 42, 8.7 KB checked in by dave, 11 years ago (diff)

added

Line 
1/*
2** cam_display.c
3**
4** Made by (Arne Caspari)
5** Login   <arne@localhost>
6**
7** Started on  Fri Oct 12 11:09:49 2007 Arne Caspari
8*/
9
10#include <cv.h>
11#include <unicap.h>
12#include <ucil.h>
13#include <stdio.h>
14#include <glib.h>
15
16static volatile int quit = 0;
17
18struct caminfo
19{
20      unicap_handle_t    handle;
21      char              *device_identifier;
22      unsigned int       fourcc;
23      unicap_rect_t      format_size;
24      char              *window;
25      unicap_data_buffer_t buffer;
26      IplImage          *image;
27      int                frame_count;
28      unicap_property_t *properties;
29      int                property_count;
30      CvHaarClassifierCascade *cascade;
31      CvMemStorage            *storage;
32};
33
34
35//
36// Define arrays of properties which should be set during
37// initialisation here. The information could obtained from the
38// "device_info" example.
39//
40static unicap_property_t camera0_properties[] =
41{
42/*    { */
43/*       identifier:      "Brightness",  */
44/*       relations_count: 0, */
45/*       { value:         16000.0 },  */
46/*       flags:           UNICAP_FLAGS_MANUAL, */
47/*       flags_mask:      UNICAP_FLAGS_MANUAL, */
48/*    }, */
49};
50
51static unicap_property_t camera1_properties[] =
52{
53   {
54      identifier:      "focus",
55      relations_count: 0,
56      { value:         600.0 },
57      flags:           UNICAP_FLAGS_MANUAL,
58      flags_mask:      UNICAP_FLAGS_MANUAL,
59   },
60};
61
62//
63// Define the cameras that should be opened here. The information
64// could be obtained from the "device_info" example
65//
66static struct caminfo cameras[] =
67{
68   {
69      handle:            NULL,
70      device_identifier: "Imaging Source DFx 31AF03-Z 1940800048",
71      fourcc:            UCIL_FOURCC( 'Y', '8', '0', '0' ),
72      format_size:       { 0, 0, 1024, 768 },
73      window:            "Camera 1",
74      image:             NULL,
75      properties:        camera0_properties,
76      property_count:    sizeof( camera0_properties ) / sizeof( unicap_property_t ),
77
78      cascade: NULL,
79      storage: NULL,
80   },
81};
82     
83//
84// Define the FourCC for the target buffer here.
85// For example:
86// Y800 for monochrome 8 bit images
87// RGB3 for 24 bit RGB images
88//
89#define TARGET_FOURCC ( UCIL_FOURCC( 'Y', '8', '0', '0' ) )
90
91//
92// Define the BitsPerPixel for the target buffer here
93// For example:
94// 8 for 8 bit monochrome images
95// 24 for 24 bit RGB images
96//
97#define TARGET_BPP ( 8 )
98
99
100#define CASCADE_NAME "../haarcascade_frontalface_default.xml"
101
102void detect_and_draw( IplImage* img, CvHaarClassifierCascade *cascade, CvMemStorage *storage )
103{
104    static CvScalar colors[] =
105    {
106        {{255,255,255}},
107        {{0,128,255}},
108        {{0,255,255}},
109        {{0,255,0}},
110        {{255,128,0}},
111        {{255,255,0}},
112        {{255,0,0}},
113        {{255,0,255}}
114    };
115
116    double scale = 2;
117    IplImage* small_img = cvCreateImage( cvSize( cvRound (img->width/scale),
118                                                 cvRound (img->height/scale)),
119                                         8, 1 );
120    int i;
121
122    double t = (double)cvGetTickCount();
123
124    cvResize( img, small_img, CV_INTER_LINEAR );
125    cvEqualizeHist( img, img );
126/*     cvPyrDown( img, small_img, CV_GAUSSIAN_5x5 ); */
127    cvClearMemStorage( storage );
128
129    if( cascade )
130    {
131        CvSeq* faces = cvHaarDetectObjects( small_img, cascade, storage,
132                                            1.2, 2, CV_HAAR_DO_CANNY_PRUNING,
133                                            cvSize(30, 30) );
134        t = (double)cvGetTickCount() - t;
135        printf( "detection time = %gms\n", t/((double)cvGetTickFrequency()*1000.) );
136        for( i = 0; i < (faces ? faces->total : 0); i++ )
137        {
138            CvRect* r = (CvRect*)cvGetSeqElem( faces, i );
139            CvPoint center;
140            int radius;
141            center.x = cvRound((r->x + r->width*0.5)*scale);
142            center.y = cvRound((r->y + r->height*0.5)*scale);
143            radius = cvRound((r->width + r->height)*0.25*scale);
144            cvCircle( img, center, radius, colors[i%8], 3, 8, 0 );
145        }
146    }
147
148    cvReleaseImage( &small_img );
149}
150
151//
152// Implement your image processing function in this callback.
153//
154// Currently this callback will only display an image through CV.
155// cvWaitKey( 5 ) is required to get the window updated.
156// The program will terminate when the callbacks for all cameras will
157// have set their 'quit' condition.
158//
159static void new_frame_cb( unicap_event_t event, unicap_handle_t handle, unicap_data_buffer_t *buffer, struct caminfo *camera )
160{
161
162   ucil_convert_buffer( &camera->buffer, buffer );
163
164   detect_and_draw( camera->image, camera->cascade, camera->storage );
165   cvShowImage( camera->window, camera->image );
166
167   if( cvWaitKey( 5 ) == 'q' )
168   {
169      quit++;
170   }
171   
172}
173
174//
175//
176//
177int main( int argc, char **argv )
178{
179   int dev_count = sizeof( cameras ) / sizeof( struct caminfo );
180   int res = 0;
181   int i;
182
183   //
184   // Important: You need to call g_thread_init since OpenCVs HighGUI
185   // uses gtk/glib and we are calling it from a threaded environment
186   //
187   g_thread_init( NULL );
188
189   for( i = 0; i < dev_count; i++ )
190   {
191      unicap_device_t device, device_spec;
192      unicap_format_t format, format_spec;
193      int j;
194     
195      unicap_void_device( &device_spec );
196      strcpy( device_spec.identifier, cameras[i].device_identifier );
197      if( !SUCCESS( unicap_enumerate_devices( &device_spec, &device, 0 ) ) )
198      {
199         fprintf( stderr, "Could not find device: %s\n", device_spec.identifier );
200         exit( 1 );
201      }
202     
203      if( !SUCCESS( unicap_open( &cameras[i].handle, &device ) ) )
204      {
205         fprintf( stderr, "Failed to open device: %s\n", device.identifier );
206         exit( 1 );
207      }
208     
209      unicap_void_format( &format_spec );
210      format_spec.fourcc = cameras[i].fourcc;
211      format_spec.size.width = cameras[i].format_size.width;
212      format_spec.size.height = cameras[i].format_size.height;
213      if( !SUCCESS( unicap_enumerate_formats( cameras[i].handle, &format_spec, &format, 0 ) ) )
214      {
215         fprintf( stderr, "Could not find format! \n" );
216         exit( 1 );
217      }
218     
219      format.buffer_type = UNICAP_BUFFER_TYPE_SYSTEM;
220      format.size.width = cameras[i].format_size.width;
221      format.size.height = cameras[i].format_size.height;     
222
223      if( !SUCCESS( unicap_set_format( cameras[i].handle, &format ) ) )
224      {
225         fprintf( stderr, "Failed to set format: %s \n", format.identifier );
226         exit( 1 );
227      }
228
229      // Read back format
230      if( !SUCCESS( unicap_get_format( cameras[i].handle, &format ) ) )
231      {
232         fprintf( stderr, "Failed to get format\n" );
233         exit( 1 );
234      }
235
236      unicap_copy_format( &cameras[i].buffer.format, &format );
237      cameras[i].buffer.format.fourcc = TARGET_FOURCC;
238      cameras[i].buffer.format.bpp = TARGET_BPP;
239      cameras[i].buffer.format.buffer_size = cameras[i].buffer.buffer_size =
240         cameras[i].buffer.format.size.width * cameras[i].buffer.format.size.height * cameras[i].buffer.format.bpp / 8;
241      cameras[i].buffer.data = malloc( cameras[i].buffer.format.buffer_size );
242      cameras[i].image = cvCreateImage( cvSize( format.size.width, format.size.height ), 8, cameras[i].buffer.format.bpp / 8 );
243      cameras[i].image->imageData = cameras[i].buffer.data;
244      unicap_register_callback( cameras[i].handle, UNICAP_EVENT_NEW_FRAME, (unicap_callback_t) new_frame_cb, (void*)&cameras[i] );
245
246
247      for( j = 0; j < cameras[i].property_count; j++ )
248      {
249         unicap_property_t property, property_spec;
250         unicap_void_property( &property_spec );
251         strcpy( property_spec.identifier, cameras[i].properties[j].identifier );
252         unicap_enumerate_properties( cameras[i].handle, &property_spec, &property, 0 );
253
254         property.flags = cameras[i].properties[j].flags;
255         property.flags_mask = cameras[i].properties[j].flags_mask;
256         
257         switch( property.type )
258         {
259            case UNICAP_PROPERTY_TYPE_RANGE:
260            case UNICAP_PROPERTY_TYPE_VALUE_LIST:
261               property.value = cameras[i].properties[j].value;
262               break;
263               
264            case UNICAP_PROPERTY_TYPE_MENU:
265               strcpy( property.menu_item, cameras[i].properties[j].menu_item );
266               break;
267         }
268
269         unicap_set_property( cameras[i].handle, &property );
270      }
271
272
273      cameras[i].cascade = (CvHaarClassifierCascade*)cvLoad( CASCADE_NAME, 0, 0, 0 );
274      if( !cameras[i].cascade )
275      {
276         fprintf( stderr, "Failed to load cascade!\n" );
277         exit( 1 );
278      }
279      cameras[i].storage = cvCreateMemStorage(0);
280      cvNamedWindow( cameras[i].window, 1 );
281     
282      if( !SUCCESS( unicap_start_capture( cameras[i].handle ) ) )
283      {
284         fprintf( stderr, "Failed to start capture!\n" );
285         exit( 1 );
286      }
287   }
288
289
290   // While this loop runs, the callbacks for each cameras will be
291   // called in their own threads
292   while( quit < dev_count )
293   {
294      usleep( 10000 );
295   }
296
297   for( i = 0; i < dev_count; i++ )
298   {
299      unicap_stop_capture( cameras[i].handle );
300      unicap_close( cameras[i].handle );
301   }
302   
303   cvDestroyAllWindows();
304
305   return( 0 );
306}
307
Note: See TracBrowser for help on using the repository browser.