/* Air Harp: * Capture video frames and perform background subtraction to * isolate moving objects (people). Monitor various regions * of the camera's visual field for activity, playing notes or * modifying midi channels based on active areas */ #include #include #include #include #include #include #include #include #include #include struct vPixel { char r; char g; char b; } vFrame[320][240]; struct bound { int t; int b; int l; int r; int count; } region[4]; class camaraframe { public: VLServer svr; VLPath path; VLNode src, drn; VLControlValue val; VLBuffer buffer; VLInfoPtr info; char *dataPtr; int xsize; int ysize; int c, have_frame; long win; camaraframe(){ printf("initing cameraframe\n"); // GLXconfig t1= {GLX_NORMAL, GLX_RGB, NULL}; #if 0 if(!GLXlink(d.display, GLXgetconfig(d.display,d.screen_num,&t1))) exit(0); #endif // if(printf("f%df\n",GLXwinset(d.display,d.win))) printf("couldn't\n"); printf("passed winset\n"); foreground(); /* Connect to the daemon */ if (!(svr = vlOpenVideo(""))) exit(0); /* Set up a drain node in memory */ drn = vlGetNode(svr, VL_DRN, VL_MEM, VL_ANY); /* Set up a source node on any video source */ src = vlGetNode(svr, VL_SRC, VL_VIDEO, VL_ANY); /* Create a path using the first device that will support it */ path = vlCreatePath(svr, VL_ANY, src, drn); /* Set up the hardware for and define the usage of the path */ if ((vlSetupPaths(svr, (VLPathList)&path, 1, VL_SHARE, VL_SHARE)) < 0) exit(0); /* Set the packing to RGB */ val.intVal = VL_PACKING_RGB_8; vlSetControl(svr, path, drn, VL_PACKING, &val); /* Set screen size to fraction of normal */ val.fractVal.numerator = 1; val.fractVal.denominator = 4; vlSetControl(svr, path, drn, VL_ZOOM, &val); /* Get the video size */ vlGetControl(svr, path, drn, VL_SIZE, &val); xsize = val.xyVal.x; ysize = val.xyVal.y; /* Set up and open a GL window to display the data */ prefsize(xsize, ysize ); win = winopen("Filter Window"); RGBmode(); pixmode(PM_TTOB, 1); gconfig(); /* Create and register a buffer for 4 frames */ buffer = vlCreateBuffer(svr, path, drn, 4 ); if (buffer == NULL ) exit(0); vlRegisterBuffer(svr, path, drn, buffer); /* Begin the data transfer */ if (vlBeginTransfer(svr, path, 0, NULL)) exit(0); have_frame = 0; printf("done initing cameraframe\n"); } void display(){ if(have_frame == 0) return; lrectwrite(0, 0, xsize-1, ysize-1, (ulong *)dataPtr); } void getFrame(){ if(have_frame == 1) putFrame(); do { info = vlGetNextValid(svr, buffer); } while (!info); dataPtr = (char*) vlGetActiveRegion(svr, buffer, info); have_frame = 1; } void putFrame(){ if(have_frame) vlPutFree(svr, buffer); have_frame = 0; } ~camaraframe(){ vlEndTransfer(svr, path); /* Cleanup before exiting */ vlDeregisterBuffer(svr, path, drn, buffer); vlDestroyBuffer(svr, buffer); vlDestroyPath(svr, path); vlCloseVideo(svr); } }; void main(int argc, char** argv){ char *temp; char *display, *displayPtr, *displayEnd; char *currentPtr; char *background,*backgroundPtr; size_t cam1datasize; camaraframe cam1; /* this opens its own window */ float test; int i,j,k; /* loop counters */ char *intfcname; /* Name of the interface to send to */ MDport port; /* Port connected to named interface */ int num_intfcs; /* Number of configured interfaces */ MDevent event; /* A MIDI event structure */ int channel = 0; /* MIDI channel to play on */ unsigned long long now; /* UST of current time */ long long stamp = 0; /* The timestamp for the next event */ int threshold = 30; /* pixel value must be larger than 30 to "count" */ int vol[16]; /* array to hold volumes */ int mango[16]; /* channel/region mapping */ int change; /* setup video window size */ cam1datasize = cam1.xsize * cam1.ysize * 4; setpriority(PRIO_PROCESS, 0, -20); printf("XY size %d %d\n", cam1.xsize, cam1.ysize); /* setup video memory */ display = ( char*)calloc(cam1datasize,sizeof( long)); if(display == NULL) { fprintf(stderr,"Unable to alloc display\n"); exit(0); } displayEnd = display + cam1datasize; /* Initialize the MIDI library. */ num_intfcs = mdInit(); if (num_intfcs == 0) { fprintf(stderr, "No MIDI interfaces configured.\n"); exit(1); } /* Open default (internal) midi interface */ intfcname = NULL; if ((port = mdOpenOutPort(intfcname)) == NULL) { fprintf(stderr, "Cannot open MIDI interface '%s'for output.\n", intfcname); exit(1); } /* Reckon time in ticks relative to the current time. */ mdSetStampMode(port, MD_RELATIVETICKS); /* We now establish the correspondence between real-time (measured * in UST) and ticks. We do this by figuring out what time it is * now and telling the system that the tick origin is now. */ dmGetUST(&now); mdSetStartPoint(port, (long long) now, 0); /* Make the duration of one tick be 30 milliseconds (which is equivalent * to 30,000 microseconds, which is actually the unit taken by mdSetTempo) */ mdSetDivision(port, 1); mdSetTempo(port, 30000); /* send instrument type */ event.stamp = stamp; stamp++; event.msg[0] = MD_PROGRAMCHANGE|channel; event.msg[1] = 9; /* glockenspiel - instrument #10 (numbering starts at 0)*/ mdSend(port, &event, 1); /* initialize all starting volumes to 0 */ for(i = 0; i < 16; i++) { vol[i] = 0; } /* setup mapping for mango song */ mango[0] = 0; /* sax */ mango[1] = 1; /* bass */ mango[2] = 0; mango[3] = 0; mango[4] = 0; /* steel drums */ mango[5] = 3; /* marimba */ mango[6] = 2; /* Nylon Guitar */ mango[7] = 2; /* drawbar organ */ mango[8] = 0; mango[9] = 3; /* percussion */ mango[10] = 0; /* seashore */ mango[11] = 0; mango[12] = 0; mango[13] = 0; mango[14] = 0; mango[15] = 0; /* setup region bounds for use later */ int ycenter = cam1.ysize / 2; int xpos = cam1.xsize / 4; int xcenter = xpos / 2; region[0].t = ycenter + (cam1.ysize / 10); region[0].b = ycenter - (cam1.ysize / 10); region[0].l = xcenter + (cam1.xsize / 10); region[0].r = xcenter - (cam1.xsize / 10); xcenter += xpos; region[1].t = ycenter + (cam1.ysize / 10); region[1].b = ycenter - (cam1.ysize / 10); region[1].l = xcenter + (cam1.xsize / 10); region[1].r = xcenter - (cam1.xsize / 10); xcenter += xpos; region[2].t = ycenter + (cam1.ysize / 10); region[2].b = ycenter - (cam1.ysize / 10); region[2].l = xcenter + (cam1.xsize / 10); region[2].r = xcenter - (cam1.xsize / 10); xcenter += xpos; region[3].t = ycenter + (cam1.ysize / 10); region[3].b = ycenter - (cam1.ysize / 10); region[3].l = xcenter + (cam1.xsize / 10); region[3].r = xcenter - (cam1.xsize / 10); /* prepare to enter backsub loop */ cam1.getFrame(); background = cam1.dataPtr; cam1.have_frame = 0; while(1) { cam1.getFrame(); currentPtr = cam1.dataPtr; backgroundPtr = background; displayPtr = display; i = 0; j = 0; currentPtr++; /* skip alpha channel --> ARGB data packing */ backgroundPtr++; displayPtr++; while(displayPtr < displayEnd) { /* do the RGB background subtraction */ *(displayPtr) = (char)abs(*(backgroundPtr) - *(currentPtr)); vFrame[i][j].r = *(displayPtr); currentPtr++; backgroundPtr++; displayPtr++; *(displayPtr) = (char)abs(*(backgroundPtr) - *(currentPtr)); vFrame[i][j].g = *(displayPtr); currentPtr++; backgroundPtr++; displayPtr++; *(displayPtr) = (char)abs(*(backgroundPtr) - *(currentPtr)); vFrame[i][j].b = *(displayPtr); currentPtr+=2; /* skip alpha channel */ backgroundPtr+=2; displayPtr+=2; j++; if (j == cam1.xsize) { j = 0; i++; } } /* end while */ /* Dump current video frame to screen */ lrectwrite(0, 0, cam1.xsize-1, cam1.ysize-1, (ulong *)display); /* Anylize video for motion: * split the screen into four discrete square regions, count * the number of "active" pixels in each region. */ for(i = 0; i < 4; i++) { region[i].count = 0; for(j = region[i].b; j <= region[i].t; j++) { for(k = region[i].r; k <= region[i].l; k++) { test = (vFrame[j][k].r + vFrame[j][k].g + vFrame[j][k].b)/3.0; if (test > threshold) { region[i].count += 1; } } /* end for */ } /* end for */ } /* end for */ /* decrement all channel volumes (by small amount) */ /* increment channel volumes according to region mapping (by larger amount) */ for(i = 0; i < 16; i++) { change = 0; if(region[(mango[i])].count > 10) { if (vol[i] < 100) { vol[i] = vol[i] + 10; change = 1; if (vol[i] > 100) vol[i] = 100; } } else { if (vol[i] > 0) { vol[i] = vol[i] - 4; change = 1; if (vol[i] < 0) vol[i] = 0; } } if (change == 1) { event.stamp = stamp; event.msg[0] = MD_CONTROLCHANGE|i; event.msg[1] = MD_CHANNELVOLUME; event.msg[2] = vol[i]; mdSend(port, &event, 1); } } /* end for */ stamp++; /* update video frames - copy current into background and * setup for next iteration. */ temp = cam1.dataPtr; cam1.dataPtr = background; background = temp; } /* end while */ } /* end main */