Quantcast
Channel: Kinect - Processing 2.x and 3.x Forum
Viewing all articles
Browse latest Browse all 530

move blob with hand tracking HELP!

$
0
0

The blob is moving according to the coordinates of the mouse. But I want to move the blob using the coordinates of my hand. So far, what I've done, if you connect Kinect now the blob and the red ball move along the individual variable (blob = mouse X and Y, red ball = hand tracking), but I hope that the blob tracks the hand instead of the red ball.

Please don't mind the title of the movie. was just puppy footage. i used processing v2.2.1 and simpleopenNI 1.96 thank you :)


import SimpleOpenNI.*; import processing.video.*;

Movie movie; SimpleOpenNI context;

color[] userClr = new color[] { color(255, 0, 0), color(0, 255, 0), color(0, 0, 255), color(255, 255, 0), color(255, 0, 255), color(0, 255, 255) };

void setup() { size(854, 480); movie = new Movie(this, "dog2.mp4"); movie.loop();

context = new SimpleOpenNI(this); if (context.isInit() == false) { println("Can't init SimpleOpenNI, maybe the camera is not connected!"); exit(); return; }

context.setMirror(true); context.enableDepth(); context.enableUser();

background(255, 255, 255);

stroke(0, 255, 0); strokeWeight(3); smooth(); }

void movieEvent(Movie movie) { movie.read(); }

void draw() {

context.update(); image(movie, 0, 0); loadPixels(); movie.loadPixels();

for (int x = 0; x < movie.width; x++ ) { for (int y = 0; y < movie.height; y++ ) {

  int loc = x + y*movie.width;

  float r = red  (movie.pixels[loc]);
  float g = green(movie.pixels[loc]);
  float b = blue (movie.pixels[loc]);

  float distance = dist(x, y, mouseX, mouseY);

  float adjustBrightness = map(distance, 0, 150, 2, 0);
  r *= adjustBrightness;
  g *= adjustBrightness;
  b *= adjustBrightness;

  // Constrain RGB to between 0-255
  r = constrain(r, 0, 255);
  g = constrain(g, 0, 255);
  b = constrain(b, 0, 255);

  // Make a new color and set pixel in the window
  color c = color(r, g, b);
  pixels[loc] = c;
}

}

updatePixels();

int[] userList = context.getUsers(); for (int i=0; i<userList.length; i++) { if (context.isTrackingSkeleton(userList[i])) { stroke(userClr[ (userList[i] - 1) % userClr.length ] ); drawSkeleton(userList[i]); } } }

// draw the skeleton with the selected joints void drawSkeleton(int userId) { // to get the 3d joint data

PVector jointPos = new PVector(); context.getJointPositionSkeleton(userId,SimpleOpenNI.SKEL_LEFT_HAND,jointPos); println(jointPos);

fill(255, 0, 0, 100); noStroke();

PVector rightHand = new PVector(); context.getJointPositionSkeleton(userId, SimpleOpenNI.SKEL_LEFT_HAND, rightHand); PVector convertedRightHand = new PVector(); context.convertRealWorldToProjective(rightHand, convertedRightHand); ellipse(convertedRightHand.x, convertedRightHand.y, 50, 50);

PVector leftHand = new PVector(); context.getJointPositionSkeleton(userId, SimpleOpenNI.SKEL_RIGHT_HAND, leftHand); PVector convertedLeftHand = new PVector(); context.convertRealWorldToProjective(leftHand, convertedLeftHand); ellipse(convertedLeftHand.x, convertedLeftHand.y, 50, 50); }

void onNewUser(SimpleOpenNI curContext, int userId) { println("onNewUser - userId: " + userId); println("\tstart tracking skeleton");

curContext.startTrackingSkeleton(userId); }

void onLostUser(SimpleOpenNI curContext, int userId) { println("onLostUser - userId: " + userId); }

void onVisibleUser(SimpleOpenNI curContext, int userId) { //println("onVisibleUser - userId: " + userId); }


Viewing all articles
Browse latest Browse all 530

Trending Articles