#include "libcam_ffmpeg.h" #include #include //#include #include #include #include #include #include #include #include #include #include #include #include #include SDL_Surface* screen; double get_current_time(void); double get_current_time(void){ struct timespec buf; clock_gettime(CLOCK_REALTIME, &buf); return (((double)buf.tv_sec)*1.0) + (((double)buf.tv_nsec)/1.0e9); } void displayFrame(SDL_Overlay* bmp, AVFrame* pFrame, int h, int w, AVPicture pict, struct SwsContext* encoderSwsContext,SDL_Rect rect); void displayFrame(SDL_Overlay* bmp, AVFrame* pFrame, int h, int w, AVPicture pict, struct SwsContext* encoderSwsContext,SDL_Rect rect){ SDL_LockYUVOverlay(bmp); sws_scale(encoderSwsContext, pFrame->data, pFrame->linesize, 0, h, pict.data, pict.linesize); SDL_UnlockYUVOverlay(bmp); SDL_DisplayYUVOverlay(bmp, &rect); } void RestoreStartingValues(cameraHandler* ch, camera_attributes** ca_vector,int* ca_number,int* ca_initial_values, int enabled_attributes); void RestoreStartingValues(cameraHandler* ch, camera_attributes** ca_vector,int* ca_number,int* ca_initial_values, int enabled_attributes){ int i; for(i = 0; i < enabled_attributes; i++){ ca_vector[i]->currentValue = ca_initial_values[i]; cameraHandlerSetProperty(ch,ca_number[i],ca_vector[i]->currentValue); fprintf(stderr,"Attribute: %s Value: %d\n",ca_vector[i]->attribute_name,ca_vector[i]->currentValue); } } void BrightnessContrastTest( cameraHandler* ch, int bNumSteps, int bMin, int bMax, int cNumSteps, int cMin, int cMax, ffmpegFrame* tempBuffer ); void BrightnessContrastTest( cameraHandler* ch, int bNumSteps, int bMin, int bMax, int cNumSteps, int cMin, int cMax, ffmpegFrame* tempBuffer ){ // camera_attributes* ba,*ca; int bStep,cStep; if(bNumSteps > 1){ bStep = (bMax - bMin)/(bNumSteps -1); } else{ bStep = 0; } if(cNumSteps > 1){ cStep = (cMax - cMin)/(cNumSteps -1); }else{ cStep = 0; } //fprintf(stderr,"BSTEPS %d CSTEPS %d\n",bStep,cStep); int ib,ic; int bv,cv; bv = bMin; for(ib = 0; ib < bNumSteps; ib++){ cv = cMin; if( (bNumSteps != 1) || (cNumSteps != 1) ){ cameraHandlerSetProperty(ch, CH_BRIGHTNESS,bv); } for(ic = 0; ic < cNumSteps; ic++){ if( (bNumSteps != 1) || (cNumSteps != 1) ){ cameraHandlerSetProperty(ch, CH_CONTRAST,cv); } // SDL_Delay(500); //This is to give time to camera refresh all sensors cameraHandlerCapture(ch,tempBuffer); //this is the desired one cameraHandlerCapture(ch,NULL); cv = cv+ cStep; } bv = bv + bStep; } } SDL_Surface* InitScreen(void); SDL_Surface* InitScreen(void){ SDL_Surface* screen; int W[] = {3840,1600,1280,1024,800,640}; int H[] = {1080,1200,1024,768,600,480}; /* Initialize SDL */ if (SDL_Init (SDL_INIT_VIDEO) < 0) { fprintf (stderr, "Couldn't initialize SDL: %s\n", SDL_GetError ()); exit (1); } atexit (SDL_Quit); int i; int depth; int video_found = 0; for(depth = 16; depth <=32; depth+=8){ for(i = 0; i < 5; i++){ /* Set video mode */ screen = SDL_SetVideoMode (W[i], H[i], depth, SDL_SWSURFACE | SDL_DOUBLEBUF|SDL_FULLSCREEN); if (screen == NULL) { fprintf (stderr, "Couldn't set %dx%dx%d video mode: %s\n",W[i],H[i],depth,SDL_GetError ()); } else{ fprintf(stderr,"Video mode set: %dx%dx%d\n",W[i],H[i],depth); video_found = 1; break; } } if(video_found) break; } if(screen != NULL) SDL_WM_SetCaption ("SDL MultiMedia Application", NULL); return screen; } static void draw_light (SDL_Surface* screen,int num_cols, int num_rows, int col, int row) { // static int direction = 0; // static int value = 0; // static int which = 0; SDL_Rect rect; Uint32 color; /* Create a "black" background */ color = SDL_MapRGB (screen->format, 0,0, 0); SDL_FillRect (screen, NULL, color); /* Draw a layer with white color */ color = SDL_MapRGB (screen->format, 255, 255, 255); if(num_cols > 1){ rect.w = (screen->w/num_cols)*1.5; rect.x = col*(screen->w - rect.w)/(num_cols -1); }else{ rect.w = screen->w; rect.x = 0; } if(num_rows > 1){ rect.h = (screen->h/num_rows)*1.5; rect.y = row*(screen->h - rect.h)/(num_rows -1); }else{ rect.h = screen->h; rect.y = 0; } SDL_FillRect (screen, &rect, color); /* Make sure everything is displayed on screen */ SDL_Flip (screen); /* Don't run too fast */ SDL_Delay (1); } int TestIntParam(char* param,char* name,int* n,int testParam); int TestIntParam(char* param,char* name,int* n,int testParam){ if(sscanf(param,"%d",n) != 1){ fprintf(stderr,"Invalid value for %s - %s\n",name,param); return 0; } return testParam; } int main(int argc, char *argv[]) { int totalFrames; int enableVideo; char *dest_dir; SDL_Surface* screen; int ScreenCols; int ScreenRows; int bMin,bMax,bNumSteps; int cMin,cMax,cNumSteps; if(argc != 12){ fprintf(stderr,"Program Usage: cam_record \n"); return 0; } int testParams = 1; char* deviceName = argv[1]; dest_dir = argv[2]; fprintf(stderr,"Cheking params\n"); testParams = TestIntParam(argv[3],"Cols",&ScreenCols,testParams); testParams = TestIntParam(argv[4],"Rows",&ScreenRows,testParams); testParams = TestIntParam(argv[5],"bMin",&bMin,testParams); testParams = TestIntParam(argv[6],"bMax",&bMax,testParams); testParams = TestIntParam(argv[7],"bNumSteps",&bNumSteps,testParams); testParams = TestIntParam(argv[8],"cMin",&cMin,testParams); testParams = TestIntParam(argv[9],"cMax",&cMax,testParams); testParams = TestIntParam(argv[10],"cNumSteps",&cNumSteps,testParams); testParams = TestIntParam(argv[11],"enableVideo",&enableVideo,testParams); if(testParams == 0) return 1; fprintf(stderr,"***************Program Configuration************************\n"); if(!enableVideo) { fprintf(stderr,"Video Lighting Disabled\n"); ScreenCols = ScreenRows = 1; } totalFrames = ScreenCols*ScreenRows*bNumSteps*cNumSteps; fprintf(stderr,"Input Device: %s\n",deviceName); fprintf(stderr,"Output Dir: %s\n",dest_dir); fprintf(stderr,"Cols:%d Rows:%d\n",ScreenCols,ScreenRows); fprintf(stderr,"Brightness Min:%d Max:%d Number of Steps:%d\n",bMin,bMax,bNumSteps); fprintf(stderr,"Contrast Min:%d Max:%d Number of Steps:%d\n",cMin,cMax,cNumSteps); fprintf(stderr,"Total Frames:%d\n",totalFrames); fprintf(stderr,"************************************************************\n"); cameraHandler* ch; initFFMPEG(); //Do all those damn things needed to start a video capture fprintf(stderr,"Initializing Capture Device\n"); //if(InitCaptureDevice("/dev/video0",&pFormatCtx,&pCodecCtx,&pCodec,&iformat,&formatParams,&videoStream) < 0){ ch = createCameraHandler(deviceName,totalFrames); if(ch == NULL){ fprintf(stderr,"Capture Device Setup failure !"); return -1; } fprintf(stderr,"Capture Device Setup sucessfull !\n"); if(enableVideo){ fprintf(stderr,"Warming capture device...\n"); int h,w; h = cameraHandlerGetProperty(ch,CH_FRAME_HEIGHT); w = cameraHandlerGetProperty(ch,CH_FRAME_WIDTH); atexit (SDL_Quit); screen = SDL_SetVideoMode (w,h, 0,0); if(screen == NULL){ fprintf(stderr,"Video Show test failed !\n"); } else{ SDL_Overlay* bmp; AVPicture pict; AVFrame* pFrame; struct SwsContext* encoderSwsContext; SDL_Rect rect; int fmt; ffmpegFrame* frm = createFrame(ch); fmt = cameraHandlerGetProperty(ch,CH_FRAME_FMT); bmp = SDL_CreateYUVOverlay(w,h ,SDL_YV12_OVERLAY, screen); rect.x = 0; rect.y = 0; rect.w = w; rect.h = h; encoderSwsContext = sws_getContext(w, h,fmt,w,h,PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL); fprintf(stderr,"Video mode %dx%d\n",w,h); pict.data[0] = bmp->pixels[0]; pict.data[1] = bmp->pixels[2]; pict.data[2] = bmp->pixels[1]; pict.linesize[0] = bmp->pitches[0]; pict.linesize[1] = bmp->pitches[2]; pict.linesize[2] = bmp->pitches[1]; pFrame = (AVFrame*) getFramePointer(frm); int done = 0; while((cameraHandlerCapture(ch,frm) != 0) && !done){ displayFrame(bmp,pFrame,h,w,pict,encoderSwsContext,rect); SDL_Event event; while (SDL_PollEvent (&event)){ switch (event.type) { case SDL_QUIT: done = 1; break; case SDL_KEYDOWN: done = 1; break; default: break; }; } } } } if(enableVideo){ screen = InitScreen(); SDL_Delay(50); fprintf(stderr,"Initializing video mode\n"); if(screen == NULL){ fprintf(stderr,"No valid display modes found !\n"); exit(1); } } // Allocate video frame buffer packets got from video stream ffmpegFrame* tempBuffer = createFrame(ch); fprintf(stderr,"Calibrating\n"); int warm_frames = ScreenCols*ScreenRows; int i; for (i = 0; i < warm_frames; i++){ if(enableVideo){ int row,col; row = i/ScreenCols; col = i%ScreenCols; draw_light(screen,ScreenCols,ScreenRows,row,col); cameraHandlerCapture(ch,tempBuffer); } } SDL_Delay(500); fprintf(stderr,"Starting Capture\n"); double startCaptureTime = get_current_time(); for(i = 0; i < ScreenCols*ScreenRows;i++){ // Get one frame int row,col; row = i/ScreenCols; col = i%ScreenCols; //IplImage* frame = cvQueryFrame( capture ); if(enableVideo){ draw_light(screen,ScreenCols,ScreenRows,col,row); } // SDL_Delay(1000); BrightnessContrastTest(ch,bNumSteps,bMin,bMax,cNumSteps,cMin,cMax,tempBuffer); } //Show usefull(?) data double stopCaptureTime = get_current_time(); double fps = totalFrames/(stopCaptureTime - startCaptureTime); fprintf(stderr,"\nTEST Capture - %lf FPS\n",fps); fprintf(stderr,"\nFinished Capture\n"); //fprintf(stderr,"%d corrupted packets discarded\n",failed); SDL_Quit(); //Save all captured frames int bv,cv,ib,ic,bStep,cStep;; if(bNumSteps > 1){ bStep = (bMax - bMin)/(bNumSteps -1); } else{ bStep = 0; } if(cNumSteps > 1){ cStep = (cMax - cMin)/(cNumSteps -1); }else{ cStep = 0; } bv = bMin; i = 0; int nlight; fprintf(stderr,"Saving Frames, please wait !\n"); for(nlight =0; nlight < ScreenCols*ScreenRows;nlight++){ for(ib = 0; ib < bNumSteps; ib++){ cv = cMin; for(ic = 0; ic < cNumSteps; ic++){ char tempName[500]; ffmpegFrame* f = cameraHandlerGetCapturedFrame(ch,i); sprintf(tempName,"%sL%03d_B%03d_C%03d_",dest_dir,nlight,bv,cv); cameraHandlerSaveFrame(ch, f,tempName,i); cameraHandlerSaveYUVFrame(ch, f,tempName,i); //fprintf(stderr,"Saving frame %d with B%03d C%03d\n",nlight,bv,cv); i++; cv = cv + cStep; } bv = bv + bStep; } if(nlight == 0) fprintf(stderr,"Light "); fprintf(stderr,"%d..",nlight); } fprintf(stderr,"OK\n"); //Free things cameraHandlerRelease(ch); //free the remaining structures fprintf(stderr,"OK\n"); ; //Byeeee return 0; }