Hallo, i am working on the following code to detect palm open or close.
It ;s working quite nice. What i want to do is: On wave enable handtracking (it achieves it). When wave again (stop tracking) - not achieved. Seems that once a gesture is captured, it does not trigger again the onCompletedGesture event.
/* --------------------------------------------------------------------------
* SimpleOpenNI Hands3d Test
* --------------------------------------------------------------------------
* Processing Wrapper for the OpenNI/Kinect 2 library
* http://code.google.com/p/simple-openni
* --------------------------------------------------------------------------
* prog: Max Rheiner / Interaction Design / Zhdk / http://iad.zhdk.ch/
* date: 12/12/2012 (m/d/y)
* ----------------------------------------------------------------------------
* This demos shows how to use the gesture/hand generator.
* It's not the most reliable yet, a two hands example will follow
* ----------------------------------------------------------------------------
*/
import java.util.Map;
import java.util.Iterator;
import SimpleOpenNI.*;
SimpleOpenNI context;
//the hand tracking part
boolean handsTrackFlag = false;//if kinect is tracking hand or not
PVector handVec = new PVector();//the latest/most up to date hand point
//ArrayList handVecList = new ArrayList();//the previous points in a list
//int handVecListSize = 30;//the number of previous points to be remembered
int lastGesture = 0;//used to keep track of gestures
PVector handMin = new PVector();
PVector handMax = new PVector();
float handThresh = 100;
float openThresh = 160;
Map<Integer,ArrayList<PVector>> handPathList = new HashMap<Integer,ArrayList<PVector>>();
void setup()
{
frameRate(30);
size(640,480);
context = new SimpleOpenNI(this);
if(context.isInit() == false)
{
println("Can't init SimpleOpenNI, maybe the camera is not connected!");
exit();
return;
}
// enable depthMap generation
context.enableDepth();
// disable mirror
context.setMirror(true);
// enable hands + gesture generation
//context.enableGesture();
context.enableHand();
context.startGesture(SimpleOpenNI.GESTURE_WAVE);
//context.startGesture(SimpleOpenNI.GESTURE_CLICK);
// set how smooth the hand capturing should be
//context.setSmoothingHands(.5);
}
void draw()
{
// update the cam
context.update();
background(0);
// set the scene pos
//translate(width/2, height/2, 0);
//rotateX(rotX);
//rotateY(rotY);
open_close_palm();
}
// -----------------------------------------------------------------
// hand events
void open_close_palm(){
if (handsTrackFlag)
{
//update hand from point cloud
handMin = handVec.copy();
handMax = handVec.copy();
// draw the 3d point depth map
int[] depthMap = context.depthMap();
int steps = 3; // to speed up the drawing, draw every third point
int index;
PVector realWorldPoint;
for (int y=0;y < context.depthHeight();y+=steps)
{
for (int x=0;x < context.depthWidth();x+=steps)
{
index = x + y * context.depthWidth();
if (depthMap[index] > 0)
{
// draw the projected point
realWorldPoint = context.depthMapRealWorld()[index];
if (realWorldPoint.dist(handVec) < handThresh) {
stroke(255, 0, 0);
noFill();
point(realWorldPoint.x, realWorldPoint.y);
if (realWorldPoint.x < handMin.x) handMin.x = realWorldPoint.x;
if (realWorldPoint.y < handMin.y) handMin.y = realWorldPoint.y;
//if (realWorldPoint.z < handMin.z) handMin.z = realWorldPoint.z;
if (realWorldPoint.x > handMax.x) handMax.x = realWorldPoint.x;
if (realWorldPoint.y > handMax.y) handMax.y = realWorldPoint.y;
//if (realWorldPoint.z > handMax.z) handMax.z = realWorldPoint.z;
}
}
}
}
textSize(30);
//line(handMin.x, handMin.y, handMax.x, handMax.y);
float hDist = abs(handMax.y-handMin.y); //handMin.dist(handMax);
if (hDist > openThresh) { //println("palm open, dist: " + hDist);
text("OPEN PALM "+ hDist,10,50);
}
else {
//println("palm close, dist: " + hDist);
text("CLOSED PALM "+ hDist,10,50);
}
}
}
int hh =-1;
boolean a=false;
void onNewHand(SimpleOpenNI curContext,int handId,PVector pos)
{
println("onNewHand - handId: " + handId + ", pos: " + pos);
//ArrayList<PVector> vecList = new ArrayList<PVector>();
//vecList.add(pos);
//handPathList.put(handId,vecList);
println("onCreateHands - handId: " + handId + ", pos: " + pos );
handsTrackFlag = true;
handVec = pos;
hh=handId;
//handVecList.clear();
//handVecList.add(pos);
}
void onTrackedHand(SimpleOpenNI curContext,int handId,PVector pos)
{
//println("onTrackedHand - handId: " + handId + ", pos: " + pos );
// apo to palio
//ArrayList<PVector> vecList = handPathList.get(handId);
//if(vecList != null)
//{
// vecList.add(0,pos);
// if(vecList.size() >= handVecListSize)
// // remove the last point
// vecList.remove(vecList.size()-1);
//}
handVec = pos;
//handVecList.add(0, pos);
//if (handVecList.size() >= handVecListSize)
//{ // remove the last point
// handVecList.remove(handVecList.size()-1);
//}
}
void onLostHand(SimpleOpenNI curContext,int handId)
{
println("onLostHand - handId: " + handId);
handPathList.remove(handId);
handsTrackFlag = false;
}
// -----------------------------------------------------------------
// gesture events
void onCompletedGesture(SimpleOpenNI curContext,int gestureType, PVector pos)
{ // gestureType == 0 1 ή 2 wave click risehand
println("onCompletedGesture - gestureType: " + gestureType + ", pos: " + pos);
//context.startGesture(SimpleOpenNI.GESTURE_CLICK);
a=!a;
if (a) {int handId = context.startTrackingHand(pos);}
else context.stopTrackingHand(hh);
//int handId = context.startTrackingHand(pos);
//println("hand stracked: " + handId);
//lastGesture = gestureType;
//context.removeGesture();
//context.startTrackingHands(pos);
}
// -----------------------------------------------------------------
// Keyboard event
void keyPressed()
{
switch(key)
{
case ' ':
context.setMirror(!context.mirror());
break;
case '1':
context.setMirror(true);
break;
case '2':
context.setMirror(false);
break;
}
}
if you try to enable disable gestures in the Hands example of simpleopenni it works ok Enabling - disabling it
/* --------------------------------------------------------------------------
* SimpleOpenNI Hands3d Test
* --------------------------------------------------------------------------
* Processing Wrapper for the OpenNI/Kinect 2 library
* http://code.google.com/p/simple-openni
* --------------------------------------------------------------------------
* prog: Max Rheiner / Interaction Design / Zhdk / http://iad.zhdk.ch/
* date: 12/12/2012 (m/d/y)
* ----------------------------------------------------------------------------
* This demos shows how to use the gesture/hand generator.
* It's not the most reliable yet, a two hands example will follow
* ----------------------------------------------------------------------------
*/
import java.util.Map;
import java.util.Iterator;
import SimpleOpenNI.*;
SimpleOpenNI context;
int handVecListSize = 20;
Map<Integer,ArrayList<PVector>> handPathList = new HashMap<Integer,ArrayList<PVector>>();
color[] userClr = new color[]{ color(255,0,0),
color(0,255,0),
color(0,0,255),
color(255,255,0),
color(255,0,255),
color(0,255,255)
};
void setup()
{
frameRate(30);
size(640,480);
context = new SimpleOpenNI(this);
if(context.isInit() == false)
{
println("Can't init SimpleOpenNI, maybe the camera is not connected!");
exit();
return;
}
// enable depthMap generation
context.enableDepth();
// disable mirror
context.setMirror(true);
// enable hands + gesture generation
//context.enableGesture();
context.enableHand();
context.startGesture(SimpleOpenNI.GESTURE_WAVE);
// set how smooth the hand capturing should be
//context.setSmoothingHands(.5);
}
void draw()
{
// update the cam
context.update();
image(context.depthImage(),0,0);
// draw the tracked hands
if(handPathList.size() > 0)
{
Iterator itr = handPathList.entrySet().iterator();
while(itr.hasNext())
{
Map.Entry mapEntry = (Map.Entry)itr.next();
int handId = (Integer)mapEntry.getKey();
ArrayList<PVector> vecList = (ArrayList<PVector>)mapEntry.getValue();
PVector p;
PVector p2d = new PVector();
stroke(userClr[ (handId - 1) % userClr.length ]);
noFill();
strokeWeight(1);
Iterator itrVec = vecList.iterator();
beginShape();
while( itrVec.hasNext() )
{
p = (PVector) itrVec.next();
context.convertRealWorldToProjective(p,p2d);
vertex(p2d.x,p2d.y);
}
endShape();
stroke(userClr[ (handId - 1) % userClr.length ]);
strokeWeight(4);
p = vecList.get(0);
context.convertRealWorldToProjective(p,p2d);
point(p2d.x,p2d.y);
}
}
}
// -----------------------------------------------------------------
// hand events
void onNewHand(SimpleOpenNI curContext,int handId,PVector pos)
{
println("onNewHand - handId: " + handId + ", pos: " + pos);
ArrayList<PVector> vecList = new ArrayList<PVector>();
vecList.add(pos);
handPathList.put(handId,vecList);
}
int hh =-1;
boolean a=false;
void onTrackedHand(SimpleOpenNI curContext,int handId,PVector pos)
{
// println("onTrackedHand - handId: " + handId + ", pos: " + pos );
ArrayList<PVector> vecList = handPathList.get(handId);
if(vecList != null)
{
vecList.add(0,pos);
if(vecList.size() >= handVecListSize)
// remove the last point
vecList.remove(vecList.size()-1);
}
hh = handId;
}
void onLostHand(SimpleOpenNI curContext,int handId)
{
println("onLostHand - handId: " + handId);
handPathList.remove(handId);
}
// -----------------------------------------------------------------
// gesture events
void onCompletedGesture(SimpleOpenNI curContext,int gestureType, PVector pos)
{
println("onCompletedGesture - gestureType: " + gestureType + ", pos: " + pos);
//context.startGesture(SimpleOpenNI.GESTURE_CLICK);
//int handId = context.startTrackingHand(pos);
a=!a;
if (a) {int handId = context.startTrackingHand(pos);}
else context.stopTrackingHand(hh);
}
// -----------------------------------------------------------------
// Keyboard event
void keyPressed()
{
switch(key)
{
case ' ':
context.setMirror(!context.mirror());
break;
case '1':
context.setMirror(true);
break;
case '2':
context.setMirror(false);
break;
}
}