New repo for repo.or.cz
[The-Artvertiser.git] / artvertiser / artvertiser.cpp
blob7a8c3aba9e01a2ebdf60fd886bc002c034c9ba99
1 /*
2 * Copyright 2008, 2009, 2010 Julian Oliver <julian@julianoliver.com> and
3 * Damian Stewart <damian@frey.co.nz>.
5 * This program is free software: you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License as published by the Free
7 * Software Foundation, either version 3 of the License, or (at your option)
8 * any later version.
10 * This program is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
13 * more details.
15 * You should have received a copy of the GNU General Public License along with
16 * this program. If not, see <http://www.gnu.org/licenses/>.
18 * This code builds upon BazAR, in particular 'multigl.cpp'. It has been
19 * modified to support texture and video-texture mapping to an OpenGL plane over
20 * the ROI. The ROI in the model image is now read in from a file generated by
21 * the training process. Pose estimation stabilisation, augmentation fades,
22 * fonts, mouse input hooks, augmentation over archival video and other bits have
23 * pieces have been added also.
25 * I've fixed a bug in BazAR's planar_object_recognizer::build_with_cache where
26 * corner values for the ROI were only being set immediately after training, not
27 * on plain init.
29 * Usage:
31 * There are four ways to use Artvertiser.
33 * With video substitution of the ROI:
35 * ./artvertiser -m <model file> -a <avi file>
37 * With video substitution of the ROI and capture from an AVI file
39 * ./artvertiser -m <model file> -a <avi file> -b <avi file>
41 * With image substitution of the ROI and capture from a v4l device
43 * ./artvertiser -m <model file>
45 * See defines below for setting capture window size and V4L device index
49 #include "multigrab.h"
51 // read from arduino
52 #include <stdio.h> /* Standard input/output definitions */
53 #include <stdlib.h>
54 #include <stdint.h> /* Standard types */
55 #include <string.h> /* String function definitions */
56 #include <unistd.h> /* UNIX standard function definitions */
57 #include <errno.h> /* Error number definitions */
58 #include <fcntl.h> /* File control definitions */
59 #include <errno.h> /* Error number definitions */
60 #include <fcntl.h> /* File control definitions */
61 #include <termios.h> /* POSIX terminal control definitions */
63 #include <iostream>
64 #include <sstream> // for conv int->str
65 #include <vector>
66 #include <opencv/cv.h>
67 #include <highgui.h>
68 #include <map>
70 #include <stdio.h>
71 #include <time.h>
73 #ifdef HAVE_CONFIG_H
74 #include <config.h>
75 #endif
77 #include <calib/camera.h>
79 #ifdef __APPLE__
80 #define HAVE_APPLE_OPENGL_FRAMEWORK
81 #endif
82 #ifdef HAVE_APPLE_OPENGL_FRAMEWORK
83 #include <GLUT/glut.h>
84 #else
85 #include <GL/glut.h>
86 #endif
88 #include "/usr/include/freetype2/freetype/config/ftconfig.h"
89 #include <FTGL/ftgl.h>
91 #include "FProfiler/FProfiler.h"
93 // framerate counter
94 #include "framerate.h"
96 #include <list>
97 using namespace std;
99 // matrix tracker
100 #include "MatrixTracker/MatrixTracker.h"
102 #define IsRGB(s) ((s[0] == 'R') && (s[1] == 'G') && (s[2] == 'B'))
103 #define IsBGR(s) ((s[0] == 'B') && (s[1] == 'G') && (s[2] == 'R'))
105 #ifndef GL_CLAMP_TO_BORDER
106 #define GL_CLAMP_TO_BORDER 0x812D
107 #endif
108 #define GL_MIRROR_CLAMP_EXT 0x8742
110 #define DEFAULT_WIDTH 640
111 #define DEFAULT_HEIGHT 480
112 #define DEFAULT_V4LDEVICE 0
114 #define NUMARTVERTS 5
116 // buttons via arduino
117 // button_state is bitmapped so as to handle multiple button presses at once
118 char button_state = 0;
119 bool button_state_changed = false;
120 const char BUTTON_RED = 0x01;
121 const char BUTTON_GREEN = 0x02;
122 const char BUTTON_BLUE = 0x04;
123 // serial comms
124 int serialport_init(const char* serialport, int baud);
125 int serialport_read_until(int fd, char* buf, char until);
126 bool serial_thread_should_exit = false;
127 bool serial_thread_is_running = false;
128 pthread_t serial_thread;
129 void startSerialThread();
130 void shutdownSerialThread();
131 void* serialThreadFunc( void* );
132 // running on binoculars?
133 bool running_on_binoculars = false;
134 bool no_fullscreen = false;
137 // we continue tracking for 1 second, then fade for 3
138 static const float SECONDS_LOST_TRACK = 0.5f;
139 static const float SECONDS_LOST_FADE = 1.0f;
140 static const float MAX_FADE_SHOW = 0.9f;
141 static const float MAX_FADE_NORMAL = 1.0f;
143 static const int DEFAULT_CAPTURE_FPS = 20;
145 //#define WIDTH 320
146 //#define HEIGHT 240
148 MultiGrab *multi=0;
149 CamCalibration *calib=0;
150 CvPoint projPts[4];
151 IplTexture *raw_frame_texture=0;
152 FTime raw_frame_timestamp;
153 IplTexture *tex=0;
154 IplImage *image = 0;
155 CvCapture *capture = 0;
156 CvCapture *avi_capture = 0;
157 IplImage *avi_image = 0;
158 IplImage *avi_frame = 0;
159 //IplImage *model_image = 0;
160 IplImage *this_frame = 0;
161 IplImage *last_frame = 0;
162 IplImage *diff= 0;
163 IplImage *bit_frame= 0;
165 int v4l_device = DEFAULT_V4LDEVICE;
166 int video_width = DEFAULT_WIDTH;
167 int video_height = DEFAULT_HEIGHT;
168 int detect_width = DEFAULT_WIDTH;
169 int detect_height = DEFAULT_HEIGHT;
170 int desired_capture_fps = DEFAULT_CAPTURE_FPS;
172 // load some images. hard-coded for know until i get the path parsing together.
173 IplImage *image1 = cvLoadImage("artvert1.png");
174 IplImage *image2 = cvLoadImage("artvert2.png");
175 IplImage *image3 = cvLoadImage("artvert3.png");
176 IplImage *image4 = cvLoadImage("artvert4.png");
177 IplImage *image5 = cvLoadImage("artvert5.png");
178 IplImage *fallback_artvert_image = cvLoadImage("artvert1.png");
180 // matrix tracker
181 MatrixTracker matrix_tracker;
183 // define a container struct for each artvert
184 struct artvert_struct
186 const char *artvert;
187 IplImage *image;
188 const char *date;
189 const char *author;
190 const char *advert;
191 const char *street;
194 typedef vector<artvert_struct> artverts_list;
195 artverts_list artverts(5);
197 // create a vector for the images and initialise it.
198 typedef vector<IplImage> imgVec;
200 bool frame_ok=false;
201 bool cache_light=false;
202 bool dynamic_light=false;
203 bool sphere_object=false;
204 bool avi_play=false;
205 bool avi_play_init=false;
206 bool lbutton_down = false;
207 bool label = false;
209 bool track_kalman = false;
210 bool delay_video = true;
211 // how many frames to delay the video
212 static const int VIDEO_DELAY_FRAMES=7;
214 double a_proj[3][4];
215 double old_a_proj[3][4];
216 float fade = 0.0;
217 FTime last_frame_caught_time;
218 FTime frame_timer;
219 float draw_fps;
220 int difference = 0;
221 int have_proj = 0;
222 int nb_light_measures=0;
223 int geom_calib_nb_homography;
224 int current_cam = 0;
225 int avi_init = 0;
226 int augment = 1;
227 int cnt=0;
229 vector<int> roi_vec;
230 CvPoint2D32f *c1 = new CvPoint2D32f[4];
231 vector<int> artvert_roi_vec;
233 char *image_path;
235 class Artvert
237 public:
238 Artvert() {
239 artvert_image=0;
240 model_file="model.bmp";
241 artvert_image_file="artvert1.png";
242 artvert_is_movie= false;
243 artist = "unknown artist";
244 advert = "unknown advert";
245 name = "unnamed artvert";
246 avi_capture = NULL;
247 avi_image = NULL;
248 avi_play_init = false;
250 ~Artvert()
252 if ( artvert_image )
253 cvReleaseImage( &artvert_image );
254 if ( avi_capture )
255 cvReleaseCapture( &avi_capture );
256 if ( avi_image )
257 cvReleaseImage( &avi_image );
260 IplImage* getArtvertImage()
262 if ( artvert_is_movie )
265 if ( !avi_capture )
267 avi_capture = cvCaptureFromAVI( artvert_movie_file.c_str() );
268 avi_play_init = false;
271 IplImage *avi_frame = 0;
272 avi_frame = cvQueryFrame( avi_capture );
273 if ( avi_frame == 0 )
275 if ( avi_play_init )
277 // we know the avi is good, so: rewind!
278 cvSetCaptureProperty( avi_capture, CV_CAP_PROP_POS_FRAMES, 0 );
279 // try again
280 avi_frame = cvQueryFrame( avi_capture );
281 if ( avi_frame == 0 )
282 return fallback_artvert_image;
284 else
285 return fallback_artvert_image;
287 if ( avi_image == 0 )
288 avi_image = cvCreateImage( cvGetSize(avi_frame), avi_frame->depth, avi_frame->nChannels );
289 cvCopy( avi_frame, avi_image );
290 avi_image->origin = avi_frame->origin;
291 GLenum format = IsBGR(avi_image->channelSeq) ? GL_BGR_EXT : GL_RGBA;
293 if (!avi_play_init)
295 glGenTextures(1, &imageID);
296 glBindTexture(GL_TEXTURE_2D, imageID);
297 glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
298 avi_play_init=true;
300 return avi_image;
302 else
304 if ( !artvert_image )
306 printf("loading artvert image '%s'\n", artvert_image_file.c_str() );
307 artvert_image = cvLoadImage( artvert_image_file.c_str() );
309 if ( !artvert_image )
311 fprintf(stderr, "couldn't load artvert image '%s'\n", artvert_image_file.c_str() );
312 artvert_image = fallback_artvert_image;
314 return artvert_image;
318 string model_file;
319 string artvert_image_file;
320 string artist;
321 string advert;
322 string name;
323 bool artvert_is_movie;
324 string artvert_movie_file;
325 private:
326 CvCapture* avi_capture;
327 IplImage* avi_image;
328 bool avi_play_init;
329 GLuint imageID;
331 IplImage* artvert_image;
334 vector< Artvert > artvert_list;
335 bool new_artvert_switching_in_progress = false;
336 int current_artvert_index=-1;
337 bool new_artvert_requested = false;
338 int new_artvert_requested_index = 0;
339 vector< bool > model_file_needs_training;
340 #include "ofxXmlSettings/ofxXmlSettings.h"
342 // detection thread
343 pthread_t detection_thread;
344 double detection_fps = 0.0;
345 static void shutdownDetectionThread();
346 static void startDetectionThread( int thread_priority = 0 /* only takes effect if root */ );
347 static void* detectionThreadFunc( void* _data );
348 bool detection_thread_should_exit = false;
349 bool detection_thread_running = false;
352 static void start();
353 static void geomCalibStart(bool cache);
355 // initialise a couple of fonts.
356 CvFont font, fontbold;
358 // Gl format for texturing
359 GLenum format;
360 GLuint imageID;
362 // ftgl font setup
363 static FTFont *ftglFont;
365 // interface
366 bool show_status = false;
367 bool show_profile_results = false;
368 string status_string = "";
370 // menu
371 // hide after 5s
372 #define MENU_HIDE_TIME 5.0f
373 bool menu_show = false;
374 FTime menu_timer;
375 bool menu_is_showing = false;
376 void updateMenu();
377 void drawMenu();
379 /* use this to read paths from the file system
381 string getExtension(const string &file)
383 string::size_type dot = file.rfind('.');
384 string lcpath = file;
385 string suffix;
386 transform(lcpath.begin(), lcpath.end(), lcpath.begin(), tolower);
387 if(dot != string::npos)
389 suffix = lcpath.substr(dot + 1);
391 return suffix;
397 string getSettingsString()
399 planar_object_recognizer &detector(multi->cams[current_cam]->detector);
400 static char detector_settings_string[2048];
401 sprintf( detector_settings_string, "1.ransac dist %4.2f 2.iter %i detected points %i match count %i,\n"
402 "3.refine %6.4f 4.score %6.4f 5.best_support thresh %2i 6.tau %2i\n"
403 "smoothing: 7.position %5.3f 8.position_z %5.3f \n frames back: 9.raw %2i 0.returned %2i",
404 detector.ransac_dist_threshold_ui,
405 detector.max_ransac_iterations_ui,
406 detector.detected_point_number,
407 detector.match_number,
408 detector.non_linear_refine_threshold_ui,
409 detector.match_score_threshold_ui,
410 detector.best_support_thresh_ui,
411 detector.point_detector_tau_ui,
412 matrix_tracker.getPositionSmoothing(),
413 matrix_tracker.getPositionZSmoothing(),
414 matrix_tracker.getFramesBackRaw(),
415 matrix_tracker.getFramesBackReturned() );
417 return detector_settings_string;
421 std::string date(int now)
423 time_t rawtime;
424 struct tm *timeinfo;
425 char tBuffer[80];
426 time ( &rawtime );
427 timeinfo = localtime ( &rawtime );
428 strftime (tBuffer,80,"%I:%M:%S:%p, %d %b %Y",timeinfo);
429 string timeStr;
430 stringstream _timeStr;
431 _timeStr << tBuffer;
432 timeStr = _timeStr.str();
433 return timeStr;
437 // mouse input using GLUT.
438 void entry(int state)
440 if (state == GLUT_ENTERED)
441 cout << "Mouse Entered" << endl;
442 else
443 cout << "Mouse Left" << endl;
446 void mouse(int button, int state, int x, int y)
448 if (button == GLUT_RIGHT_BUTTON)
450 if (state == GLUT_DOWN)
452 label = true;
455 else
457 label = false;
460 else if (button == GLUT_LEFT_BUTTON)
462 if (state == GLUT_DOWN)
464 lbutton_down = true;
465 if (cnt >= NUMARTVERTS-1)
467 cnt = 0;
469 else
471 cnt ++;
474 else
475 lbutton_down = false;
477 cout << "we are on image " << cnt << endl;
480 // text drawing function
481 static void drawText(IplImage *img, const char *text, CvPoint point, CvFont *font, CvScalar colour, double size)
483 cvInitFont( font, CV_FONT_HERSHEY_DUPLEX, size, size, 0, 1, CV_AA);
484 //cvInitFont( font, CV_FONT_HERSHEY_PLAIN, size, size, 0, 1, CV_AA);
485 cvPutText(img, text, point, font, colour);
488 // read in ROI coords from txt file into vector.
489 static vector<int> readROI(const char *filename)
491 cout << filename << endl;
492 string l;
493 ifstream roi(filename);
494 vector<int> v;
495 int coord;
496 char s[10];
497 char *s1;
498 int lines = 0;
500 if (roi.is_open())
502 while (!roi.eof())
504 getline(roi, l);
505 strcpy(s, l.c_str());
506 s1 = strtok(s, " ");
508 while (s1 != NULL)
510 //roi_vec.push_back(atoi(s1));
511 v.push_back(atoi(s1));
512 s1 = strtok(NULL, " ,");
515 roi.close();
517 else
519 cout << "roi file not found" << endl;
521 return v;
524 //! GLUT callback on window size change
525 static void reshape(int width, int height)
527 //GLfloat h = (GLfloat) height / (GLfloat) width;
528 int winWidth = video_width;
529 int winHeight = video_height;
530 glViewport(0,0,winWidth, winHeight);
531 glutPostRedisplay();
534 //! Print a command line help and exit.
535 static void usage(const char *s)
537 cerr << "usage:\n" << s
538 << " [-m <model image>] [-m <model image>] ... \n "
539 " [-ml <model images file .xml>] [-r] [-t] [-g] [-a <path>] [-l] [-vd <num>] [-vs <width> <height>]\n"
540 " [-ds <width> <height>] [-fps <fps>] [-binoc [-nofullscreen]]\n\n"
541 //" -a <path> specify path to AVI (instead of v4l device)\n"
542 " -b <path> specify path to AVI (instead of v4l device), ignores -vs\n"
543 " -m specify model image (may be used multiple times)\n"
544 " -ml <path> load model images from <path> (xml) (respects additional -m paths)\n"
545 " -r do not load any data\n"
546 " -t train a new classifier\n"
547 " -g recompute geometric calibration\n"
548 " -a <path> load an AVI movie as an artvert\n"
549 " -i <path> load an image as an artvert\n"
550 " -l rebuild irradiance map from scratch\n"
551 " -vd <num> V4L video device number (0-n)\n"
552 " -vs <width> <height> video width and height (default 640x480)\n"
553 " -ds <width> <height> frame size at which to run the detector (default to video width/height)\n"
554 " -fps <fps> desired fps at which to run the image capture\n"
555 " -binoc run as if operating on binoculars (necessary for osx training)\n"
556 " -nofullscreen don't try to run fullscreen in -binoc mode\n\n";
557 exit(1);
562 void exit_handler()
564 printf("in exit_handler\n");
565 // shutdown detection thread
566 if ( detection_thread_running )
568 printf("stopping detection\n");
569 shutdownDetectionThread();
571 // shutdown serial
572 if ( serial_thread_is_running )
574 printf("stopping serial\n");
575 shutdownSerialThread();
577 // shutdown binocular training
578 if ( multi && multi->model.isInteractiveTrainBinocularsRunning() )
580 printf("stopping interactive train binoculars\n");
581 multi->model.abortInteractiveTrainBinoculars();
584 // shutdown capture
585 if ( multi )
587 printf("stopping multithread capture\n");
590 multi->cams[0]->shutdownMultiThreadCapture();
592 // delete cameras
593 if ( multi )
595 printf("deleteing multi\n");
596 delete multi;
600 int serialport_init(const char* serialport, int baud)
602 struct termios toptions;
603 int fd;
605 //fprintf(stderr,"init_serialport: opening port %s @ %d bps\n",
606 // serialport,baud);
608 fd = open(serialport, O_RDWR | O_NOCTTY | O_NDELAY);
609 if (fd == -1) {
610 perror("init_serialport: Unable to open port ");
611 return -1;
614 if (tcgetattr(fd, &toptions) < 0) {
615 perror("init_serialport: Couldn't get term attributes");
616 return -1;
618 speed_t brate = baud; // let you override switch below if needed
619 switch(baud) {
620 case 4800: brate=B4800; break;
621 case 9600: brate=B9600; break;
622 #ifdef B14400
623 case 14400: brate=B14400; break;
624 #endif
625 case 19200: brate=B19200; break;
626 #ifdef B28800
627 case 28800: brate=B28800; break;
628 #endif
629 case 38400: brate=B38400; break;
630 case 57600: brate=B57600; break;
631 case 115200: brate=B115200; break;
633 cfsetispeed(&toptions, brate);
634 cfsetospeed(&toptions, brate);
636 // 8N1
637 toptions.c_cflag &= ~PARENB;
638 toptions.c_cflag &= ~CSTOPB;
639 toptions.c_cflag &= ~CSIZE;
640 toptions.c_cflag |= CS8;
641 // no flow control
642 toptions.c_cflag &= ~CRTSCTS;
644 toptions.c_cflag |= CREAD | CLOCAL; // turn on READ & ignore ctrl lines
645 toptions.c_iflag &= ~(IXON | IXOFF | IXANY); // turn off s/w flow ctrl
647 toptions.c_lflag &= ~(ICANON | ECHO | ECHOE | ISIG); // make raw
648 toptions.c_oflag &= ~OPOST; // make raw
650 // see: http://unixwiz.net/techtips/termios-vmin-vtime.html
651 toptions.c_cc[VMIN] = 0;
652 toptions.c_cc[VTIME] = 20;
654 if( tcsetattr(fd, TCSANOW, &toptions) < 0) {
655 perror("init_serialport: Couldn't set term attributes");
656 return -1;
659 return fd;
663 // arduino serial port read
664 int serialport_read_until(int fd, char* buf, char until)
666 char b[1];
667 int i=0;
668 // 1000ms timeout
669 int timeout = 1000*1000;
670 do {
671 int n = read(fd, b, 1); // read a char at a time
672 if( n==0||n==-1 ) {
673 timeout -= 100;
674 usleep( 100 ); // wait 100 usec try again
675 continue;
677 buf[i] = b[0]; i++;
678 } while( b[0] != until && timeout > 0 );
680 if ( timeout<=0 )
681 fprintf(stderr, "serialport_read_until timed out\n");
683 buf[i] = 0; // null terminate the string
684 return 0;
688 bool loadOrTrain( int new_index )
690 // fetch data
691 if ( new_index < 0 || new_index >= artvert_list.size() )
693 fprintf(stderr,"loadOrTrain: invalid index %i (artvert_list has %i members)\n", new_index, (int)artvert_list.size() );
695 return false;
698 // switching model..
699 new_artvert_switching_in_progress = true;
700 bool wants_training = model_file_needs_training[new_index];
701 string model_file = artvert_list[new_index].model_file;
702 // do we need to switch, or can we use the same model file?
703 if ( ( current_artvert_index < 0 || current_artvert_index >= artvert_list.size() ) ||
704 model_file != artvert_list[current_artvert_index].model_file )
706 // load
707 bool trained = multi->loadOrTrainCache( wants_training, model_file.c_str(), running_on_binoculars );
708 if ( !trained )
710 // fail
711 current_artvert_index = -1;
712 new_artvert_switching_in_progress = false;
713 return false;
717 // copy char model_file before munging with strcat
718 char s[1024];
719 strcpy (s, model_file.c_str());
720 strcat(s, ".roi");
721 roi_vec = readROI(s);
723 strcpy( s, model_file.c_str() );
724 strcat(s, ".artvertroi");
725 artvert_roi_vec = readROI(s);
726 if ( artvert_roi_vec.empty() )
728 // use roi_vec
729 artvert_roi_vec.insert( artvert_roi_vec.begin(), roi_vec.begin(), roi_vec.end() );
732 // load model_image for use with diffing, later
733 // model_image = cvLoadImage(model_file.c_str());
735 c1[0].x = roi_vec[0];
736 c1[0].y = roi_vec[1];
737 c1[1].x = roi_vec[2];
738 c1[1].y = roi_vec[3];
739 c1[2].x = roi_vec[4];
740 c1[2].y = roi_vec[5];
741 c1[3].x = roi_vec[6];
742 c1[3].y = roi_vec[7];
745 // update current index
746 current_artvert_index = new_index;
748 // no longer switching
749 new_artvert_switching_in_progress = false;
751 return true;
757 /*!\brief Initialize everything
759 * - Parse the command line
760 * - Initialize all the cameras
761 * - Load or interactively build a model, with its classifier.
762 * - Set the GLUT callbacks for geometric calibration or, if already done, for photometric calibration.
765 static bool init( int argc, char** argv )
767 // register exit function
768 atexit( &exit_handler );
770 // dump opencv version
771 printf("built with opencv version %s\n", CV_VERSION );
773 // more from before init should be moved here
774 bool redo_lighting=false;
775 bool redo_training = false;
776 bool redo_geom = false;
777 char *avi_bg_path=(char*)"";
778 bool got_ds = false;
779 bool got_fps = false;
780 bool video_source_is_avi = false;
781 char *model_file_list_file = NULL;
783 // parse command line
784 for (int i=1; i<argc; i++)
786 if (strcmp(argv[i], "-m") ==0)
788 if (i==argc-1)
789 usage(argv[0]);
790 Artvert a;
791 a.model_file = argv[i+1];
792 a.advert = "cmdline "+a.model_file;
793 // store
794 artvert_list.push_back( a );
795 printf(" -m: adding model image '%s'\n", argv[i+1] );
796 i++;
798 else if ( strcmp(argv[i], "-binoc")==0 )
800 running_on_binoculars = true;
801 printf(" -binoc: running on binoculars\n");
803 else if ( strcmp(argv[i], "-nofullscreen")==0 )
805 no_fullscreen = true;
806 printf(" -nofullscreen: won't go fullscreen\n");
808 else if ( strcmp(argv[i], "-ml" )== 0 )
810 if ( i==argc-1)
811 usage(argv[0]);
812 model_file_list_file = argv[i+1];
813 printf(" -ml: loading model image list from '%s'\n", argv[i+1] );
814 i++;
816 else if (strcmp(argv[i], "-r")==0)
818 redo_geom=redo_training=redo_lighting=true;
820 else if (strcmp(argv[i], "-g")==0)
822 redo_geom=true;
824 else if (strcmp(argv[i], "-l")==0)
826 redo_lighting=true;
828 else if (strcmp(argv[i], "-t")==0)
830 redo_training=true;
831 printf( "-t: redoing training\n");
833 else if (strcmp(argv[i], "-a")==0)
835 avi_capture=cvCaptureFromAVI(argv[i+1]);
836 avi_play=true;
838 else if (strcmp(argv[i], "-i")==0)
840 IplImage *image1 = cvLoadImage(argv[i]+1);
842 else if (strcmp(argv[i], "-b")==0)
844 video_source_is_avi = true;
845 avi_bg_path=argv[i+1];
846 printf(" -b: loading from avi '%s'\n", avi_bg_path );
848 else if (strcmp(argv[i], "-i")==0)
850 image_path=argv[i+1];
852 else if ( strcmp(argv[i], "-fps")==0 )
854 desired_capture_fps=atoi(argv[i+1]);
855 got_fps = true;
856 i++;
858 else if (strcmp(argv[i], "-vd")==0)
860 v4l_device=atoi(argv[i+1]);
861 printf(" -vd: using v4l device %i\n", v4l_device);
862 i++;
864 else if (strcmp(argv[i], "-vs")==0)
866 video_width=atoi(argv[i+1]);
867 video_height=atoi(argv[i+2]);
868 if ( !got_ds )
870 // also set detect size (if not already set)
871 detect_width = video_width;
872 detect_height = video_height;
874 printf(" -vs: video size is %ix%i\n", video_width, video_height );
875 if ( video_width == 0 || video_height == 0 )
877 usage(argv[0]);
878 exit(1);
880 i+=2;
882 else if ( strcmp(argv[i], "-ds")==0 )
884 detect_width = atoi(argv[i+1]);
885 detect_height = atoi(argv[i+2]);
886 printf(" -ds: detection frame size is %ix%i\n", detect_width, detect_height );
887 if ( detect_width == 0 || detect_height == 0 )
889 usage(argv[0]);
890 exit(1);
892 got_ds = true;
893 i+=2;
895 else if (argv[i][0]=='-')
897 usage(argv[0]);
903 // read model files from model_file_list_file
904 if ( model_file_list_file != NULL )
906 // try to open
907 ofxXmlSettings data;
908 data.loadFile( model_file_list_file );
910 if ( data.getNumTags( "artverts" ) == 1 )
912 data.pushTag( "artverts" );
913 int num_filenames = data.getNumTags( "advert" );
914 printf(" -ml: opened %s, %i adverts\n", model_file_list_file, num_filenames );
915 for ( int i=0; i<num_filenames; i++ )
917 data.pushTag("advert", i);
918 Artvert a;
919 a.model_file = data.getValue( "model_filename", "model.bmp" );
920 a.advert = data.getValue( "advert", "unknown advert" );
921 int num_artverts = data.getNumTags( "artvert" );
922 printf(" -ml: got advert, model file '%s', advert '%s', %i artverts\n", a.model_file.c_str(), a.advert.c_str(), num_artverts );
923 for ( int j=0; j<num_artverts; j++ )
925 data.pushTag("artvert", j );
926 a.name = data.getValue( "name", "unnamed" );
927 a.artist = data.getValue( "artist", "unknown artist" );
928 if ( data.getNumTags("movie_filename") != 0 )
930 // load a movie
931 a.artvert_is_movie = true;
932 a.artvert_movie_file = data.getValue("movie_filename", "artvertmovie1.mp4" );
934 else
936 // load an image
937 a.artvert_image_file = data.getValue( "image_filename", "artvert1.png" );
939 printf(" %i: %s:%s:%s\n", j, a.name.c_str(), a.artist.c_str(),
940 a.artvert_is_movie?(a.artvert_movie_file+"( movie)").c_str() : a.artvert_image_file.c_str() );
942 artvert_list.push_back( a );
943 data.popTag();
945 data.popTag();
947 data.popTag();
949 else
951 printf(" -ml: error reading '%s': couldn't find 'artverts' tag\n", model_file_list_file );
955 // check if model file list is empty
956 if ( artvert_list.empty() )
958 // add default
959 Artvert a;
960 artvert_list.push_back( a );
963 // set up training flags
964 for ( int i=0; i<artvert_list.size(); i++ )
965 model_file_needs_training.push_back( redo_training );
967 // check for video size arg if necessary
968 if ( video_source_is_avi )
970 // try to read from video
971 CvCapture* temp_cap = cvCaptureFromAVI(avi_bg_path);
972 video_width = (int)cvGetCaptureProperty( temp_cap, CV_CAP_PROP_FRAME_WIDTH );
973 video_height = (int)cvGetCaptureProperty( temp_cap, CV_CAP_PROP_FRAME_HEIGHT );
974 int video_fps = (int)cvGetCaptureProperty( temp_cap, CV_CAP_PROP_FPS );
975 printf(" -b: read video width/height %i/%i from avi (ignoring -vs)\n", video_width, video_height );
976 if ( !got_ds )
978 detect_width = video_width;
979 detect_height = video_height;
981 if ( !got_fps )
983 desired_capture_fps = video_fps;
985 cvReleaseCapture(&temp_cap);
988 //cout << avi_bg_path << endl;
989 cache_light = !redo_lighting;
991 glutReshapeWindow( video_width, video_height );
993 multi = new MultiGrab();
995 if( multi->init(avi_bg_path, video_width, video_height, v4l_device,
996 detect_width, detect_height, desired_capture_fps ) ==0)
998 cerr <<"Initialization error.\n";
999 return false;
1003 artvert_struct artvert1 = {"Arrebato, 1980", image1, "Feb, 2009", "Iván Zulueta", "Polo", "Madrid, Spain"};
1004 artvert_struct artvert2 = {"name2", image2, "2008", "simon innings", "Helmut Lang", "Parlance Avenue"};
1005 artvert_struct artvert3 = {"name3", image3, "2008", "simon innings", "Loreal", "Parlance Avenue"};
1006 artvert_struct artvert4 = {"name4", image4, "2008", "simon innings", "Hugo Boss", "Parlance Avenue"};
1007 artvert_struct artvert5 = {"name5", image5, "2008", "simon innings", "Burger King", "Parlance Avenue"};
1009 artverts[0] = artvert1;
1010 artverts[1] = artvert2;
1011 artverts[2] = artvert3;
1012 artverts[3] = artvert4;
1013 artverts[4] = artvert5;
1016 // load geometry
1017 loadOrTrain(0);
1019 // try to load geom cache + start the run loop
1020 geomCalibStart(!redo_geom);
1022 // start detection
1023 startDetectionThread( 1 /* priority, only if running as root */ );
1025 last_frame_caught_time.SetNow();
1026 frame_timer.SetNow();
1028 // start serial
1029 startSerialThread();
1031 printf("init() finished\n");
1032 return true;
1035 /*! The keyboard callback: reacts to '+' and '-' to change the viewed cam, 'q' exits.
1036 * 'd' turns on/off the dynamic lightmap update.
1037 * 'f' goes fullscreen.
1039 static void keyboard(unsigned char c, int x, int y)
1041 char old_button_state = button_state;
1042 const char* filename;
1043 switch (c)
1045 case 'n' :
1046 if (augment == 1)
1047 augment = 0;
1048 else
1049 augment = 1;
1050 case '+' :
1051 if (current_cam < multi->cams.size()-1)
1052 current_cam++;
1053 break;
1054 case '-':
1055 if (current_cam >= 1)
1056 current_cam--;
1057 break;
1058 case 'q':
1059 case 27 /*esc*/:
1060 exit(0);
1061 break;
1062 case 'l':
1063 dynamic_light = !dynamic_light;
1064 break;
1065 case 'd':
1066 delay_video = !delay_video;
1067 break;
1068 case 'a':
1069 if (avi_play == true)
1070 avi_play = false;
1071 else
1072 avi_play = true;
1073 case 'f':
1074 glutFullScreen();
1075 break;
1076 case 'k':
1077 track_kalman = !track_kalman;
1078 break;
1079 case 'S':
1080 show_status = !show_status;
1081 break;
1082 case 'p':
1083 show_profile_results = true;
1084 break;
1085 case 'P':
1086 FProfiler::Clear();
1087 break;
1088 case 'i':
1089 if (cnt >= NUMARTVERTS-1)
1090 cnt = 0;
1091 else
1092 cnt ++;
1093 cout << "we are on image " << cnt << endl;
1094 break;
1095 case '[':
1096 button_state |= BUTTON_RED;
1097 break;
1098 case ']':
1099 button_state |= BUTTON_GREEN;
1100 break;
1101 case '\\':
1102 button_state |= BUTTON_BLUE;
1103 break;
1105 default:
1106 break;
1109 if ( multi && show_status )
1111 planar_object_recognizer &detector(multi->cams[current_cam]->detector);
1112 bool something = true;
1113 switch (c)
1115 // detector settings
1116 case '1':
1117 detector.ransac_dist_threshold_ui*=1.02f;
1118 break;
1119 case '!':
1120 detector.ransac_dist_threshold_ui/=1.02f;
1121 break;
1122 case '2':
1123 detector.max_ransac_iterations_ui+=10;
1124 break;
1125 case '@':
1126 detector.max_ransac_iterations_ui-=10;
1127 break;
1128 case '3':
1129 detector.non_linear_refine_threshold_ui*=1.02f;
1130 break;
1131 case '#':
1132 detector.non_linear_refine_threshold_ui/=1.02f;
1133 break;
1134 case '4':
1135 detector.match_score_threshold_ui*=1.02f;
1136 break;
1137 case '$':
1138 detector.match_score_threshold_ui/=1.02f;
1139 break;
1140 case '5':
1141 detector.best_support_thresh_ui++;
1142 break;
1143 case '%':
1144 detector.best_support_thresh_ui--;
1145 break;
1146 case '6':
1147 detector.point_detector_tau_ui++;
1148 break;
1149 case '^':
1150 detector.point_detector_tau_ui--;
1151 break;
1152 case '7':
1153 matrix_tracker.increasePositionSmoothing();
1154 break;
1155 case '&':
1156 matrix_tracker.decreasePositionSmoothing();
1157 break;
1158 case '8':
1159 matrix_tracker.increasePositionZSmoothing();
1160 break;
1161 case '*':
1162 matrix_tracker.decreasePositionZSmoothing();
1163 break;
1164 case '9':
1165 matrix_tracker.increaseFramesBackRaw();
1166 break;
1167 case '(':
1168 matrix_tracker.decreaseFramesBackRaw();
1169 break;
1170 case '0':
1171 matrix_tracker.increaseFramesBackReturned();
1172 break;
1173 case ')':
1174 matrix_tracker.decreaseFramesBackReturned();
1175 break;
1178 default:
1179 something = false;
1180 break;
1182 if ( something )
1184 printf("%s\n", getSettingsString().c_str());
1189 glutPostRedisplay();
1191 if ( old_button_state != button_state )
1192 button_state_changed = true;
1195 static void keyboardReleased(unsigned char c, int x, int y)
1197 char old_button_state = button_state;
1198 switch ( c )
1200 case '[':
1201 button_state &= (BUTTON_GREEN | BUTTON_BLUE);
1202 break;
1203 case ']':
1204 button_state &= (BUTTON_RED | BUTTON_BLUE);
1205 break;
1206 case '\\':
1207 button_state &= (BUTTON_GREEN | BUTTON_RED);
1208 break;
1209 default:
1210 break;
1212 if ( old_button_state != button_state )
1213 button_state_changed = true;
1215 static void emptyWindow()
1217 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
1220 int main(int argc, char *argv[])
1222 glutInit(&argc, argv);
1223 glutInitDisplayMode(GLUT_RGB | GLUT_DEPTH | GLUT_DOUBLE);
1224 //glutInitWindowSize(video_width,video_height); // hard set the init window size
1225 //glutInitWindowSize(800,450); // hard set the init window size
1226 glutDisplayFunc(emptyWindow);
1228 bool start_fullscreen = false;
1229 #ifdef __APPLE__
1230 // if on apple, don't ever go fullscreen
1231 #else
1232 // look for -binoc flag on commandline
1233 // also look for -nofullscreen flag
1234 for ( int i=0; i<argc; i++ )
1236 if ( strcmp( argv[i], "-binoc" )== 0 )
1238 // if found, start_fullscreen is true
1239 start_fullscreen = true;
1241 // if found, -nofullscreen cancels fullscreen
1242 if ( strcmp( argv[i], "-nofullscreen" )==0 )
1244 start_fullscreen = false;
1247 #endif
1249 if ( !start_fullscreen )
1251 glutReshapeFunc(reshape);
1252 glutCreateWindow("The Artvertiser 0.4");
1254 glutMouseFunc(mouse);
1255 glutEntryFunc(entry);
1257 if ( start_fullscreen )
1259 glutGameModeString("1024x768:16@60");
1260 glutEnterGameMode();
1261 glutSetCursor(GLUT_CURSOR_NONE);
1264 if (!init(argc,argv)) return -1;
1267 //ftglFont = new FTBufferFont("/usr/share/fonts/truetype/freefont/FreeMono.ttf");
1268 ftglFont = new FTBufferFont("fonts/FreeSans.ttf");
1269 ftglFont->FaceSize(12);
1270 ftglFont->CharMap(ft_encoding_unicode);
1272 //cvDestroyAllWindows();
1273 //cvWaitKey(0);
1275 glutKeyboardFunc(keyboard);
1276 glutKeyboardUpFunc(keyboardReleased);
1277 glutMainLoop();
1278 glutLeaveGameMode();
1279 return 0; /* ANSI C requires main to return int. */
1282 //!\brief Draw a frame contained in an IplTexture object on an OpenGL viewport.
1283 static bool drawBackground(IplTexture *tex)
1285 //printf("draw background\n");
1286 if (!tex || !tex->getIm()) return false;
1287 //printf("drawBackground: drawing frame with timestamp %f\n", raw_frame_timestamp.ToSeconds() );
1289 IplImage *im = tex->getIm();
1290 int w = im->width-1;
1291 int h = im->height-1;
1293 glMatrixMode(GL_PROJECTION);
1294 glLoadIdentity();
1295 glMatrixMode(GL_MODELVIEW);
1296 glLoadIdentity();
1298 glDisable(GL_BLEND);
1299 glDisable(GL_DEPTH_TEST);
1301 tex->loadTexture();
1303 glBegin(GL_QUADS);
1304 glColor4f(1,1,1,1);
1306 glTexCoord2f(tex->u(0), tex->v(0));
1307 glVertex2f(-1, 1);
1308 glTexCoord2f(tex->u(w), tex->v(0));
1309 glVertex2f(1, 1);
1310 glTexCoord2f(tex->u(w), tex->v(h));
1311 glVertex2f(1, -1);
1312 glTexCoord2f(tex->u(0), tex->v(h));
1313 glVertex2f(-1, -1);
1314 glEnd();
1316 tex->disableTexture();
1318 return true;
1321 /*! \brief Draw all the points
1324 static void drawDetectedPoints(int frame_width, int frame_height)
1326 if (!multi) return;
1328 planar_object_recognizer &detector(multi->cams[current_cam]->detector);
1330 glMatrixMode(GL_PROJECTION);
1331 glLoadIdentity();
1332 glOrtho(0, frame_width-1, frame_height-1, 0, -1, 1);
1334 glMatrixMode(GL_MODELVIEW);
1335 glLoadIdentity();
1337 glDisable(GL_BLEND);
1338 glDisable(GL_LIGHTING);
1339 glDisable(GL_DEPTH_TEST);
1342 glPointSize(2);
1343 glBegin(GL_POINTS);
1344 // draw all detected points
1345 glColor4f(0,1,0,1);
1346 for ( int i=0; detector.isReady() && i<detector.detected_point_number; ++i)
1348 keypoint& kp = detector.detected_points[i];
1349 int s = kp.scale;
1350 float x =PyrImage::convCoordf(kp.u, s, 0);
1351 float y =PyrImage::convCoordf(kp.v, s, 0);
1352 glVertex2f(x,y);
1354 // draw matching points red
1355 if ( detector.object_is_detected )
1357 glColor4f( 1, 0, 0, 1 );
1358 for (int i=0; i<detector.match_number; ++i)
1360 image_object_point_match * match = detector.matches+i;
1361 if (match->inlier)
1363 int s = (int)(match->image_point->scale);
1364 float x=PyrImage::convCoordf(match->image_point->u, s, 0);
1365 float y=PyrImage::convCoordf(match->image_point->v, s, 0);
1366 glVertex2f(x,y);
1370 glEnd();
1376 /*! \brief A draw callback during camera calibration
1378 * GLUT calls that function during camera calibration when repainting the
1379 * window is required.
1380 * During geometric calibration, no 3D is known: we just plot 2d points
1381 * where some feature points have been recognized.
1383 static void geomCalibDraw(void)
1385 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
1387 glDisable(GL_LIGHTING);
1388 drawBackground(raw_frame_texture);
1390 if (!multi) return;
1392 IplImage *im = multi->cams[current_cam]->getLastProcessedFrame();
1393 planar_object_recognizer &detector(multi->cams[current_cam]->detector);
1394 if (!im) return;
1396 if (!detector.isReady()) return;
1397 detector.lock();
1399 glMatrixMode(GL_PROJECTION);
1400 glLoadIdentity();
1401 glOrtho(0, im->width-1, im->height-1, 0, -1, 1);
1403 glMatrixMode(GL_MODELVIEW);
1404 glLoadIdentity();
1406 glDisable(GL_BLEND);
1407 glDisable(GL_LIGHTING);
1408 glDisable(GL_DEPTH_TEST);
1410 if (detector.object_is_detected)
1412 glPointSize(2);
1413 glBegin(GL_POINTS);
1414 glColor4f(0,1,0,1);
1415 for (int i=0; i<detector.match_number; ++i)
1417 image_object_point_match * match = detector.matches+i;
1418 if (match->inlier)
1420 int s = (int)(match->image_point->scale);
1421 float x=PyrImage::convCoordf(match->image_point->u, s, 0);
1422 float y=PyrImage::convCoordf(match->image_point->v, s, 0);
1423 glVertex2f(x,y);
1426 glEnd();
1429 detector.unlock();
1431 glutSwapBuffers();
1434 /*!\brief Called when geometric calibration ends. It makes
1435 * sure that the CamAugmentation object is ready to work.
1437 static void geomCalibEnd()
1440 if (!multi->model.augm.LoadOptimalStructureFromFile((char*)"camera_c.txt", (char*)"camera_r_t.txt"))
1442 cout << "failed to load camera calibration.\n";
1443 exit(-1);
1445 glutIdleFunc(0);
1446 //glutDisplayFunc(0);
1447 delete calib;
1448 calib=0;
1451 /*! Called by GLUT during geometric calibration when there's nothing else to do.
1452 * This function grab frames from camera(s), run the 2D detection on every image,
1453 * and keep the result in memory for calibration. When enough homographies have
1454 * been detected, it tries to actually calibrate the cameras.
1456 static void geomCalibIdle(void)
1458 // detect the calibration object in every image
1459 // (this loop could be paralelized)
1460 int nbdet=0;
1461 for (int i=0; i<multi->cams.size(); ++i)
1463 bool dummy = false;
1464 if (multi->cams[i]->detect(dummy, dummy)) nbdet++;
1468 if(!raw_frame_texture) raw_frame_texture = new IplTexture;
1469 IplImage* raw_frame = raw_frame_texture->getImage();
1470 multi->cams[current_cam]->getLastDrawFrame( &raw_frame );
1471 raw_frame_texture->setImage(raw_frame);
1472 //raw_frame_texture->setImage(multi->cams[current_cam]->frame);
1474 if (nbdet>0)
1476 for (int i=0; i<multi->cams.size(); ++i)
1478 if (multi->cams[i]->detector.object_is_detected)
1480 add_detected_homography(i, multi->cams[i]->detector, *calib);
1482 else
1484 calib->AddHomography(i);
1487 geom_calib_nb_homography++;
1490 printf("geom calib: %.2f%%\n", 100.0f*geom_calib_nb_homography/150.0f );
1492 if (geom_calib_nb_homography>=150)
1494 if (calib->Calibrate(
1495 50, // max hom
1496 (multi->cams.size() > 1 ? 1:2), // padding or random
1497 (multi->cams.size() > 1 ? 0:3),
1498 1, // padding ratio 1/2
1501 0.0078125, //alpha
1502 0.9, //beta
1503 0.001953125,//gamma
1504 10, // iter
1505 0.05, //eps
1506 3 //postfilter eps
1509 calib->PrintOptimizedResultsToFile1();
1510 geomCalibEnd();
1511 start();
1512 return;
1515 glutPostRedisplay();
1518 /*!\brief Start geometric calibration. If the calibration can be loaded from disk,
1519 * continue directly with photometric calibration.
1521 static void geomCalibStart(bool cache)
1523 if (cache && multi->model.augm.LoadOptimalStructureFromFile((char*)"camera_c.txt", (char*)"camera_r_t.txt"))
1525 start();
1526 return;
1529 // construct a CamCalibration object and register all the cameras
1530 calib = new CamCalibration();
1532 for (int i=0; i<multi->cams.size(); ++i)
1534 calib->AddCamera(multi->cams[i]->width, multi->cams[i]->height);
1537 geom_calib_nb_homography=0;
1538 glutDisplayFunc(geomCalibDraw);
1539 glutIdleFunc(geomCalibIdle);
1544 static void drawAugmentation()
1547 // we know that im is not NULL already
1548 // IplImage *im = multi->model.image;
1550 //for ( int tracked_or_raw=0; tracked_or_raw<2; tracked_or_raw++ )
1553 // Fetch object -> image, world->image and world -> object matrices
1555 CvMat *world;
1556 /*if ( tracked_or_raw == 1 )
1558 // fetch from model:
1559 world = multi->model.augm.GetObjectToWorld();
1561 else*/
1564 // or fetch interpolated position
1565 world = cvCreateMat( 3, 4, CV_64FC1 );
1567 //printf(". now we want interpolated pose for %f\n", raw_frame_timestamp.ToSeconds() );
1568 if ( track_kalman )
1569 matrix_tracker.getInterpolatedPoseKalman( world,
1570 multi->cams[0]->getFrameIndexForTime( raw_frame_timestamp ) );
1571 else
1572 matrix_tracker.getInterpolatedPose( world, raw_frame_timestamp );
1575 /*// apply a scale factor
1576 float scalef = 1.0f;
1577 for ( int i=0; i<3; i++ )
1578 cvmSet(world, i, i, scalef*cvmGet( world, i, i ));*/
1581 // instead of this:
1582 /*CvMat *proj = multi->model.augm.GetProjectionMatrix(current_cam);
1583 CvMat *old_proj = multi->model.augm.GetProjectionMatrix(current_cam);*/
1584 // we make our own project matrix:
1585 // fetch the pre-projection matrix from model.augm
1586 CvMat* proj = multi->model.augm.GetPreProjectionMatrix(current_cam);
1587 // multiply by the object-to-world matrix
1588 CamCalibration::Mat3x4Mul( proj, world, proj );
1590 Mat3x4 moveObject, rot, obj2World, movedRT_;
1591 moveObject.setTranslate( multi->model.getImageWidth()/2, multi->model.getImageHeight()/2,
1592 -120*3/4);
1593 // apply rotation
1594 rot.setRotate(Vec3(1,0,0),2*M_PI*180.0/360.0);
1595 //rot.setIdentity();
1596 moveObject.mul(rot);
1597 //moveObject.scale
1599 CvMat cvMoveObject = cvMat(3,4,CV_64FC1, moveObject.m);
1600 CvMat movedRT=cvMat(3,4,CV_64FC1,movedRT_.m);
1603 // pose only during movement
1604 //if (pixel_shift >= 200 || !have_proj)
1605 if ( true )
1607 // memcpy or vectorisation speedup?
1608 for( int i = 0; i < 3; i++ )
1610 for( int j = 0; j < 4; j++ )
1612 a_proj[i][j] = cvmGet( proj, i, j );
1613 obj2World.m[i][j] = cvmGet(world, i, j);
1617 have_proj = 1;
1618 memcpy(old_a_proj, a_proj, sizeof(a_proj));
1620 else // copy last known good projection over current
1622 memcpy(a_proj, old_a_proj, sizeof(old_a_proj));
1624 // dump the matrix
1626 printf("found matrix: %8.4f %8.4f %8.4f %8.4f\n"
1627 " %8.4f %8.4f %8.4f %8.4f\n"
1628 " %8.4f %8.4f %8.4f %8.4f\n",
1629 a_proj[0][0], a_proj[0][1], a_proj[0][2], a_proj[0][3],
1630 a_proj[1][0], a_proj[1][1], a_proj[1][2], a_proj[1][3],
1631 a_proj[2][0], a_proj[2][1], a_proj[2][2], a_proj[2][3]
1632 );*/
1635 CamCalibration::Mat3x4Mul( world, &cvMoveObject, &movedRT);
1636 // translate into OpenGL PROJECTION and MODELVIEW matrices
1637 PerspectiveCamera c;
1638 //c.loadTdir(a_proj, multi->cams[0]->frame->width, multi->cams[0]->frame->height);
1639 c.loadTdir(a_proj, multi->cams[0]->detect_width, multi->cams[0]->detect_height );
1640 c.flip();
1641 c.setPlanes(100,1000000); // near/far clip planes
1642 cvReleaseMat(&proj);
1644 // must set the model view after drawing the background.
1645 c.setGlProjection();
1646 c.setGlModelView();
1648 /*! this is the beginning of prototype code for an occlusion mask built
1649 * by comparison of the tracked plane with that of the model image
1651 // create a copy of frame texture and warp to model image perspective
1652 CvPoint2D32f *c2 = new CvPoint2D32f[4];
1653 // update corner points of ROI in pixel space
1654 c2[0].x = cvRound(multi->cams[0]->detector.detected_u_corner1);
1655 c2[0].y = cvRound(multi->cams[0]->detector.detected_v_corner1);
1656 c2[1].x = cvRound(multi->cams[0]->detector.detected_u_corner2);
1657 c2[1].y = cvRound(multi->cams[0]->detector.detected_v_corner2);
1658 c2[2].x = cvRound(multi->cams[0]->detector.detected_u_corner3);
1659 c2[2].y = cvRound(multi->cams[0]->detector.detected_v_corner3);
1660 c2[3].x = cvRound(multi->cams[0]->detector.detected_u_corner4);
1661 c2[3].y = cvRound(multi->cams[0]->detector.detected_v_corner4);
1663 CvMat* mmat = cvCreateMat(3,3, CV_32FC1);
1664 IplImage *warped = cvCreateImage(cvSize(model_image->width, model_image->height), 8, 3);
1665 mmat = cvGetPerspectiveTransform(c2, c1, mmat);
1666 cvWarpPerspective(raw_frame_texture->getIm(), warped, mmat);
1667 cvReleaseMat(&mmat);
1669 // find difference between model image and frame
1670 IplImage *i1=cvCreateImage(cvSize(im->width,im->height),im->depth,1);
1671 IplImage *i2=cvCreateImage(cvSize(im->width,im->height),im->depth,1);
1672 IplImage *diff=cvCreateImage(cvSize(im->width,im->height),im->depth,1);
1673 IplImage *display=cvCreateImage(cvSize(im->width,im->height),im->depth,1);
1675 cvCvtColor(im, i1,CV_BGR2GRAY);
1676 cvCvtColor(warped, i2,CV_BGR2GRAY);
1677 cvAbsDiff(i2,i1,diff);
1678 cvThreshold(diff, display, 35, 255, CV_THRESH_BINARY);
1680 cvReleaseImage(&warped);
1681 cvSaveImage("checkdiff.png", display);
1684 /* circles at corners of ROI. useful for debugging.
1685 cvCircle(raw_frame_texture->getIm(), c1, 10, CV_RGB(255, 0, 0), 2);
1686 cvCircle(raw_frame_texture->getIm(), c2, 10, CV_RGB(255, 0, 0), 2);
1687 cvCircle(raw_frame_texture->getIm(), c3, 10, CV_RGB(255, 0, 0), 2);
1688 cvCircle(raw_frame_texture->getIm(), c4, 10, CV_RGB(255, 0, 0), 2);
1690 tex = new IplTexture;
1691 tex->setImage(raw_frame_texture->getIm());
1692 drawBackground(tex);
1697 #ifndef DEBUG_SHADER
1699 glEnable(GL_DEPTH_TEST);
1700 glEnable(GL_BLEND);
1701 glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
1702 glDisable(GL_CULL_FACE);
1703 // multiply texture colour by surface colour of poly
1704 glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
1705 glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST);
1707 if (avi_play == true)
1709 IplImage *avi_frame = 0;
1710 IplImage *avi_image = 0;
1711 avi_frame = cvQueryFrame( avi_capture );
1712 avi_image = cvCreateImage(cvSize(video_width/2, video_height/2), 8, 3);
1713 cvResize(avi_frame, avi_image, 0);
1714 avi_image->origin = avi_frame->origin;
1715 GLenum format = IsBGR(avi_image->channelSeq) ? GL_BGR_EXT : GL_RGBA;
1717 if (!avi_play_init)
1719 glGenTextures(1, &imageID);
1720 glBindTexture(GL_TEXTURE_2D, imageID);
1721 glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
1722 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
1723 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
1724 glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, avi_image->width, avi_image->height, 0, format, GL_UNSIGNED_BYTE, avi_image->imageData);
1725 avi_play_init=true;
1727 else
1729 glBindTexture(GL_TEXTURE_2D, imageID);
1730 glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, avi_image->width, avi_image->height, format, GL_UNSIGNED_BYTE, avi_image->imageData);
1733 else
1735 if ( current_artvert_index >= 0 &&
1736 current_artvert_index < artvert_list.size() )
1738 glBindTexture(GL_TEXTURE_2D, imageID);
1739 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
1740 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
1741 IplImage* image = artvert_list.at(current_artvert_index).getArtvertImage();
1742 GLenum format = IsBGR(image->channelSeq) ? GL_BGR_EXT : GL_RGBA;
1743 glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, image->width, image->height, 0, format, GL_UNSIGNED_BYTE, image->imageData);
1747 glEnable(GL_TEXTURE_2D);
1749 glHint(GL_POLYGON_SMOOTH, GL_NICEST);
1750 glEnable(GL_POLYGON_SMOOTH);
1753 #ifndef DEBUG_SHADER
1754 // apply the object transformation matrix
1755 Mat3x4 w2e(c.getWorldToEyeMat());
1756 w2e.mul(moveObject);
1757 c.setWorldToEyeMat(w2e);
1758 c.setGlModelView();
1759 #endif
1761 if (multi->model.map.isReady())
1763 glDisable(GL_LIGHTING);
1764 #ifdef DEBUG_SHADER
1765 multi->model.map.enableShader(current_cam, world);
1766 #else
1767 multi->model.map.enableShader(current_cam, &movedRT);
1768 #endif
1772 glColor4f(1.0, 1.0, 1.0, fade);
1774 glBegin(GL_QUADS);
1775 glTexCoord2f(0, 0);
1776 glVertex3f(artvert_roi_vec[0], artvert_roi_vec[1], 0);
1777 glTexCoord2f(1, 0);
1778 glVertex3f(artvert_roi_vec[2], artvert_roi_vec[3], 0);
1779 glTexCoord2f(1, 1);
1780 glVertex3f(artvert_roi_vec[4], artvert_roi_vec[5], 0);
1781 glTexCoord2f(0, 1);
1782 glVertex3f(artvert_roi_vec[6], artvert_roi_vec[7], 0);
1783 glEnd();
1785 glDisable(GL_TEXTURE_2D);
1787 /*! 'label' is a boolean set by the right mouse button and toggles the
1788 * in-scene artvert label.
1791 if (label)
1793 glBegin(GL_LINE_LOOP);
1794 glColor3f(0.0, 1.0, 0.0);
1795 glVertex3f(roi_vec[0]-10, roi_vec[1]-10, 0);
1796 glVertex3f(roi_vec[2]+10, roi_vec[3]-10, 0);
1797 glVertex3f(roi_vec[4]+10, roi_vec[5]+10, 0);
1798 glVertex3f(roi_vec[6]-10, roi_vec[7]+10, 0);
1799 glVertex3f(roi_vec[0]-10, roi_vec[1]-10, 0);
1800 glEnd();
1802 glTranslatef(roi_vec[2]+12, roi_vec[3], 0);
1803 glRotatef(180, 1.0, 0.0, 0.0);
1804 glRotatef(-45, 0.0, 1.0, 0.0);
1805 glColor4f(0.0, 1.0, 0.0, 1);
1807 glBegin(GL_LINE_LOOP);
1808 glVertex3f(0, 10, -.2);
1809 glVertex3f(150, 10, -.2);
1810 glVertex3f(150, -60, -.2);
1811 glVertex3f(0, -60, -.2);
1812 glEnd();
1814 glColor4f(0.0, 1.0, 0.0, .5);
1816 glBegin(GL_QUADS);
1817 glVertex3f(0, 10, -.2);
1818 glVertex3f(150, 10, -.2);
1819 glVertex3f(150, -60, -.2);
1820 glVertex3f(0, -60, -.2);
1821 glEnd();
1823 // render the text in the label
1824 glColor4f(1.0, 1.0, 1.0, 1);
1825 ftglFont->Render(artverts[cnt].artvert);
1826 glTranslatef(0, -12, 0);
1827 ftglFont->Render(artverts[cnt].date);
1828 glTranslatef(0, -12, 0);
1829 ftglFont->Render(artverts[cnt].author);
1830 glTranslatef(0, -12, 0);
1831 ftglFont->Render(artverts[cnt].advert);
1832 glTranslatef(0, -12, 0);
1833 ftglFont->Render(artverts[cnt].street);
1836 #else
1837 #endif
1838 //glEnd();
1840 /*cvReleaseMat(&world);
1842 CvScalar c =cvGet2D(multi->model.image, multi->model.image->height/2, multi->model.image->width/2);
1843 glColor3d(c.val[2], c.val[1], c.val[0]);
1845 if (multi->model.map.isReady())
1846 multi->model.map.disableShader();
1847 else
1848 glDisable(GL_LIGHTING);*/
1850 if ( avi_play == true )
1852 cvReleaseImage(&avi_image);
1853 cvReleaseImage(&avi_frame);
1859 //#define DEBUG_SHADER
1860 /*! The paint callback during photometric calibration and augmentation. In this
1861 * case, we have access to 3D data. Thus, we can augment the calibration target
1862 * with cool stuff.
1864 static void draw()
1866 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
1867 glDisable(GL_LIGHTING);
1869 if ( multi->model.isInteractiveTrainBinocularsRunning() )
1870 multi->model.interactiveTrainBinocularsDraw();
1871 else
1874 drawBackground(raw_frame_texture);
1876 string cnt_str;
1877 stringstream cnt_out;
1878 cnt_out << cnt;
1879 cnt_str = cnt_out.str();
1881 //IplImage *pre_mask = cvCreateImage(cvSize(WIDTH, HEIGHT), 8, 3);
1883 if (!multi)
1884 return;
1886 int now = glutGet(GLUT_ELAPSED_TIME);
1887 /* elapsed time
1888 cout << now/1000.0 << endl;
1892 // fade
1893 double elapsed = frame_timer.Update();
1894 if ( frame_ok )
1896 last_frame_caught_time.SetNow();
1897 // increase fade
1898 if ( fade < (show_status?MAX_FADE_SHOW:MAX_FADE_NORMAL) )
1900 fade += (1.0f/SECONDS_LOST_FADE)*elapsed;
1902 else
1903 fade = show_status?MAX_FADE_SHOW:MAX_FADE_NORMAL;
1904 //printf("frame_ok: fade %f\n", fade );
1906 else
1908 FTime now;
1909 now.SetNow();
1910 double elapsed_since_last_caught = (now-last_frame_caught_time).ToSeconds();
1911 if ( elapsed_since_last_caught > SECONDS_LOST_TRACK )
1913 if ( fade > 0.0f )
1914 fade -= (1.0f/SECONDS_LOST_FADE)*elapsed;
1915 else
1916 fade = 0.0f;
1918 //printf("frame_ok: fade %f, elapsed since last caught %f\n", fade, elapsed_since_last_caught );
1920 //printf("frame %s, lost_count %f -> fade pct %4.2f, fade %4.2f\n", frame_ok?"y":"n", frame_lost_count, fade_pct, fade );
1922 // draw augmentation
1923 if ( fade > 0 && augment == 1)
1925 drawAugmentation();
1928 // calculate fps
1929 draw_fps = (draw_fps*7.0+1.0/elapsed)/8.0f;
1931 glLoadIdentity();
1932 // we need to setup a new projection matrix for the title font.
1933 glMatrixMode(GL_PROJECTION);
1934 glLoadIdentity();
1935 glMatrixMode(GL_MODELVIEW);
1936 glLoadIdentity();
1937 glTranslatef(-.98, 0.9, 0.0);
1938 glScalef(.003, .003, .003);
1939 ftglFont->FaceSize(32);
1940 glColor4f(1.0, 1.0, 1.0, 1);
1941 ftglFont->Render("the artvertiser 0.4");
1942 glTranslatef(0, -(video_height+30), 0);
1943 glColor4f(1.0, 1.0, 1.0, .6);
1944 //ftglFont->FaceSize(16);
1945 //ftglFont->Render(date(now).c_str());
1946 if (frame_ok == 1 and (now/1000)%2== 0)
1948 glTranslatef(video_width-295, video_height+35, 0);
1949 glColor4f(0.0, 1.0, 0.0, .8);
1950 glBegin(GL_TRIANGLES);
1951 glVertex3f(140, 0, 0);
1952 glVertex3f(150, 10, 0);
1953 glVertex3f(140, 20, 0);
1954 glEnd();
1955 glTranslatef(70, 5, 0);
1956 ftglFont->FaceSize(16);
1957 ftglFont->Render("tracking");
1960 // reset the ftgl font size for next pass
1961 ftglFont->FaceSize(12);
1964 if ( show_status )
1966 //printf("draw status\n");
1968 drawDetectedPoints( raw_frame_texture->getIm()->width, raw_frame_texture->getIm()->height );
1970 char detect_fps_string[256];
1971 sprintf(detect_fps_string, "draw fps: %4.2f\ndetection fps: %4.2f", draw_fps, detection_fps );
1972 drawGlutString( detect_fps_string, 1.0f, 0.2f, 0.2f, 0.7, 0.94 );
1974 // now status string
1975 string draw_string = status_string;
1976 if ( multi )
1978 // show detector settings
1979 draw_string += "\n" + getSettingsString();
1981 drawGlutString( draw_string.c_str(), 1.0f, 0.2f, 0.2f, 0.01f, 0.2f );
1984 drawMenu();
1987 glutSwapBuffers();
1988 //cvReleaseImage(&image); // cleanup used image
1989 glFlush();
1994 void startSerialThread()
1996 pthread_attr_t thread_attr;
1997 pthread_attr_init(&thread_attr);
1998 pthread_attr_setdetachstate(&thread_attr, PTHREAD_CREATE_JOINABLE);
1999 // launch the thread
2000 pthread_create( &serial_thread, &thread_attr, serialThreadFunc, NULL );
2001 serial_thread_is_running = true;
2002 pthread_attr_destroy( &thread_attr );
2005 void shutdownSerialThread()
2007 // kill the thread
2008 serial_thread_should_exit = true;
2009 void* ret;
2010 pthread_join( serial_thread, &ret );
2011 serial_thread_is_running = false;
2015 void* serialThreadFunc( void* data )
2017 // arduino vars
2018 int fd = 0;
2019 char serialport[256];
2020 int baudrate = B9600; // default
2021 char buf[256];
2022 char buf2[256];
2023 int rc,n;
2025 fd = serialport_init( "/dev/ttyUSB0", 9600 );
2026 printf("fd said %i, errno %i\n", fd, errno );
2028 while ( !serial_thread_should_exit )
2030 int read = serialport_read_until(fd, buf, '\n');
2031 //printf("read: %s then %s\n",buf2, buf);
2032 if ( (read==0) && strlen( buf ) >= 4 /*includes final \n*/ )
2034 bool button_red = (buf[0]=='1');
2035 bool button_green = (buf[1]=='1');
2036 bool button_blue = (buf[2]=='1');
2037 // printf("buttons: %s %s %s", button_red?"red":" ", button_green?"green":" ", button_blue?"blue":" ");
2038 // bitmapped, to access all 7 press combinations
2039 char new_button_state =
2040 ( button_green ? BUTTON_GREEN : 0 ) |
2041 ( button_blue ? BUTTON_BLUE : 0 ) |
2042 ( button_red ? BUTTON_RED : 0 );
2043 // deal with debounce
2044 if ( new_button_state != button_state )
2046 printf( "serialport read %s -> 0x%x (old was 0x%x)\n",
2047 buf, new_button_state, button_state );
2048 button_state_changed = true;
2049 button_state = new_button_state;
2052 usleep(3*1000);
2055 close(fd);
2058 static void startDetectionThread( int thread_priority )
2060 pthread_attr_t thread_attr;
2061 pthread_attr_init(&thread_attr);
2062 pthread_attr_setdetachstate(&thread_attr, PTHREAD_CREATE_JOINABLE);
2063 // launch the thread
2064 pthread_create( &detection_thread, &thread_attr, detectionThreadFunc, NULL );
2065 if ( thread_priority > 0 )
2067 printf("attempting to set detection thread priority to %i\n", thread_priority );
2068 struct sched_param param;
2069 param.sched_priority = thread_priority;
2071 int res = pthread_setschedparam( detection_thread, SCHED_RR, &param );
2072 if ( res != 0 )
2074 fprintf(stderr,"pthread_setschedparam failed: %s\n",
2075 (res == ENOSYS) ? "ENOSYS" :
2076 (res == EINVAL) ? "EINVAL" :
2077 (res == ENOTSUP) ? "ENOTSUP" :
2078 (res == EPERM) ? "EPERM" :
2079 (res == ESRCH) ? "ESRCH" :
2080 "???"
2084 detection_thread_running = true;
2085 pthread_attr_destroy( &thread_attr );
2088 static void shutdownDetectionThread()
2090 // kill the thread
2091 detection_thread_should_exit = true;
2092 void* ret;
2093 pthread_join( detection_thread, &ret );
2094 detection_thread_running = false;
2097 static void* detectionThreadFunc( void* _data )
2100 FTime detection_thread_timer;
2101 detection_thread_timer.SetNow();
2103 while ( !detection_thread_should_exit )
2105 PROFILE_THIS_BLOCK("detection_thread");
2107 if ( new_artvert_requested )
2109 // no longer draw
2110 frame_ok = false;
2111 // go with the loading
2112 loadOrTrain(new_artvert_requested_index);
2113 new_artvert_requested = false;
2116 bool frame_retrieved = false;
2117 bool frame_retrieved_and_ok = multi->cams[0]->detect( frame_retrieved, frame_ok );
2118 if( frame_retrieved )
2120 double elapsed = detection_thread_timer.Update();
2121 detection_fps = (detection_fps*0.0 + (1.0/elapsed))/1.0;
2123 if ( !frame_retrieved_and_ok )
2125 PROFILE_THIS_BLOCK("sleep till next");
2126 usleep( 10000 );
2127 continue;
2130 if ( detection_thread_should_exit )
2131 break;
2133 multi->model.augm.Clear();
2134 if (multi->cams[0]->detector.object_is_detected)
2136 add_detected_homography(0, multi->cams[0]->detector, multi->model.augm);
2138 else
2140 multi->model.augm.AddHomography();
2143 frame_ok = multi->model.augm.Accomodate(4, 1e-4);
2145 if (frame_ok)
2147 // fetch surface normal in world coordinates
2148 CvMat *mat = multi->model.augm.GetObjectToWorld();
2149 float normal[3];
2150 for (int j=0; j<3; j++)
2151 normal[j] = cvGet2D(mat, j, 2).val[0];
2153 // continue to track
2154 if ( track_kalman )
2155 matrix_tracker.addPoseKalman( mat, multi->cams[0]->getFrameIndexForTime(
2156 multi->cams[0]->getLastProcessedFrameTimestamp() ) );
2157 else
2158 matrix_tracker.addPose( mat, multi->cams[0]->getLastProcessedFrameTimestamp() );
2160 cvReleaseMat(&mat);
2165 printf("detection thread exiting\n");
2167 pthread_exit(0);
2171 /*! GLUT calls this during photometric calibration or augmentation phase when
2172 * there's nothing else to do. This function does the 2D detection and bundle
2173 * adjusts the 3D pose of the calibration pattern.
2175 static void idle()
2177 if ( running_on_binoculars && !no_fullscreen )
2179 static int fullscreen_timer = 30;
2180 if ( fullscreen_timer > 0 )
2182 fullscreen_timer --;
2183 if( fullscreen_timer <= 0 )
2184 glutFullScreen();
2188 // detect the calibration object in every image
2189 // (this loop could be paralelized)
2190 int nbdet=1;
2192 if(!raw_frame_texture) raw_frame_texture = new IplTexture;
2194 PROFILE_SECTION_PUSH("getting last frame");
2196 if ( delay_video )
2198 IplImage* captured_frame;
2199 multi->cams[current_cam]->getLastDrawFrame( &captured_frame, &raw_frame_timestamp );
2201 static list< pair<IplImage*, FTime> > frameRingBuffer;
2202 while ( frameRingBuffer.size()<VIDEO_DELAY_FRAMES )
2204 IplImage* first_frame = cvCreateImage( cvGetSize( captured_frame ), captured_frame->depth, captured_frame->nChannels );
2205 cvCopy( captured_frame, first_frame );
2206 frameRingBuffer.push_back( make_pair( first_frame, raw_frame_timestamp ) );
2209 IplImage* ringbuffered = frameRingBuffer.front().first;
2210 cvCopy( captured_frame, ringbuffered );
2211 frameRingBuffer.push_back( make_pair( ringbuffered, raw_frame_timestamp ) );
2213 frameRingBuffer.pop_front();
2215 IplImage* raw_frame = frameRingBuffer.front().first;
2216 raw_frame_timestamp = frameRingBuffer.front().second;
2218 raw_frame_texture->setImage(raw_frame);
2220 else
2222 IplImage* raw_frame = raw_frame_texture->getImage();
2223 multi->cams[current_cam]->getLastDrawFrame( &raw_frame, &raw_frame_timestamp );
2224 raw_frame_texture->setImage(raw_frame);
2227 PROFILE_SECTION_POP();
2230 if ( multi->model.isInteractiveTrainBinocularsRunning() )
2232 bool button_red = button_state & BUTTON_RED;
2233 bool button_green = button_state & BUTTON_GREEN;
2234 bool button_blue = button_state & BUTTON_BLUE;
2235 multi->model.interactiveTrainBinocularsUpdate( raw_frame_texture->getImage(),
2236 button_red, button_green, button_blue );
2238 else
2240 updateMenu();
2241 //doDetection();
2243 glutPostRedisplay();
2245 PROFILE_SECTION_POP();
2247 if ( show_profile_results )
2249 // show profiler output
2250 printf("showing results\n");
2251 FProfiler::Display( FProfiler::SORT_TIME/*SORT_EXECUTION*/ );
2252 show_profile_results = false;
2256 //! Starts photometric calibration.
2257 static void start()
2259 glutIdleFunc(idle);
2260 glutDisplayFunc(draw);
2266 // menu
2267 bool menu_show = false;
2268 FTime menu_timer;
2269 bool menu_is_showing = false;
2270 bool menu_up = false;
2271 bool menu_down = false;
2272 void updateMenu();
2273 void drawMenu();
2275 int menu_index = 0;
2277 void updateMenu()
2279 // update timer
2280 if ( menu_is_showing )
2282 FTime now;
2283 now.SetNow();
2284 if ( (now-menu_timer).ToSeconds() > MENU_HIDE_TIME )
2286 menu_is_showing = false;
2290 if ( !button_state_changed || new_artvert_switching_in_progress )
2291 return;
2293 printf("menu sees new button state: %s %s %s\n",
2294 button_state&BUTTON_RED?"red":" ",
2295 button_state&BUTTON_GREEN?"green":" ",
2296 button_state&BUTTON_BLUE?"blue":" ");
2298 // clear changed flag
2299 button_state_changed = false;
2301 if ( ( button_state == BUTTON_GREEN ) && !menu_is_showing )
2303 menu_is_showing = true;
2304 menu_timer.SetNow();
2305 if ( menu_index >= artvert_list.size() )
2306 menu_index = artvert_list.size()-1;
2307 // done
2308 return;
2311 // only process rest of buttons if menu is showing
2312 if ( !menu_is_showing )
2313 return;
2315 // navigation
2316 if( button_state == BUTTON_BLUE )
2318 menu_index++;
2319 if ( menu_index >= artvert_list.size() )
2320 menu_index = 0;
2321 menu_timer.SetNow();
2323 if ( button_state == BUTTON_RED )
2325 menu_index--;
2326 if ( menu_index < 0 )
2327 menu_index = artvert_list.size()-1;
2328 menu_timer.SetNow();
2331 // accept?
2332 if ( button_state == BUTTON_GREEN )
2335 new_artvert_requested_index = menu_index;
2336 new_artvert_requested = true;
2338 menu_is_showing = false;
2348 void drawMenu()
2350 if ( !menu_is_showing )
2352 // draw switching text?
2353 if ( new_artvert_switching_in_progress )
2355 glMatrixMode(GL_PROJECTION);
2356 glLoadIdentity();
2357 glMatrixMode(GL_MODELVIEW);
2358 glLoadIdentity();
2359 glTranslatef(-.8, 0.65, 0.0);
2360 glScalef(.003, .003, .003);
2361 ftglFont->FaceSize(24);
2362 glColor4f(0.0, 1.0, 0.0, 1);
2364 if ( multi->model.isLearnInProgress() )
2366 // must manually tokenize
2367 char message[2048];
2368 strncpy( message, multi->model.getLearnProgressMessage(), 2048 );
2369 char* ptr = strtok( message,"\n");
2370 while( ptr != NULL)
2372 ftglFont->Render(ptr);
2373 glTranslatef(0, -26, 0 );
2374 ptr = strtok( NULL, "\n" );
2377 else
2378 ftglFont->Render("changing artvert...");
2383 return;
2386 // draw menu header
2388 // draw loop
2389 glMatrixMode(GL_PROJECTION);
2390 glLoadIdentity();
2391 glMatrixMode(GL_MODELVIEW);
2392 glLoadIdentity();
2393 glEnable(GL_BLEND);
2394 glTranslatef(-.85, 0.65, 0.0);
2395 glScalef(.003, .003, .003);
2396 ftglFont->FaceSize(24);
2397 glColor4f(.25, 1.0, 0.0, 1);
2399 ftglFont->Render("Select artvert:");
2400 glTranslatef( 0, -26, 0 );
2402 for ( int i=0; i<artvert_list.size(); i++ )
2404 string advert = artvert_list[i].advert;
2405 string name = artvert_list[i].name;
2406 string artist = artvert_list[i].artist;
2407 string line = string(" ") + advert + " : '" + name + "' by " + artist;
2409 if ( i == menu_index )
2411 glColor4f( 1,0.37,0,1 );
2413 else
2415 glColor4f( .25f, 1.f, 0.0f, 1 );
2417 ftglFont->Render(line.c_str());
2418 glTranslatef(0, -26, 0 );
2427 //EOF