I'm looking for a javadoc or some kind of reference that I can use to figure out how to use this library. The "reference" folder in the library just seems to contain info about "FaceData". Thanks.
where can I find documentation for the Kinect PV2 library?
Installing Simpleopen NI on El Capitain
I'm trying to install SimpleOpen NI on el Capitain but all the help pages are inactive. I finally found the library, but I can't figure out how to install it. I've used plenty of libraries before and it's never been this hard. I need the library to run a processing sketch in the background to connect with Isadora. Help?
SimpleOpenNI not compatible?
I copied a simple example code called slug monster to use with my kinect (1414) and processing replies that the library is not compatible. Sketch>Import Library shows SimpleOpenNI. Any ideas?
Creating an array of audio in beads.
Hi, I'm trying to create an array of audios in beads so that they play independently when each SMT zone is touched. The problem I am having is how to initialize and link a different audio file to each zone.
The hope is to be able to use a kinect to play audio files independently within zones. Please let me know if you have any ideas on improving code, or how to use arrays to reduce the footwork of the code.
Thanks
//Set for 6' height from ground 15'6" distance
import vialab.SMT.*;
import beads.*;
//Array of Zones
int arrayNum = 56;
TouchtouchZone[] touchZone = new TouchtouchZone[arrayNum];
boolean isEven;
boolean isOdd;
//Array of Sounds
int soundsNum = 23;
Sound[] sounds = new Sound[soundsNum];
AudioContext ac;
//Playback Variables
int playDelay = 0;
boolean canPlay = true;
//Grid Line Variables
int x, y;
float areaW, areaH;
int num, num2;
float spacing, spacing2;
float xVal, yVal;
//Aspect Ratio Variables
Float aspectX, aspectY;
int buttonSize;
float edgeSpace;
int gridStartX, gridStartY;
//Setup Display INFO
boolean window_fullscreen = false;
int window_width = 1200;
int window_height = 800;
int window_halfWidth;
int window_halfHeight;
int fps_limit = 60;
void setup() {
//Display setup
if ( window_fullscreen) {
window_width = displayWidth;
window_height = displayHeight;
}
window_halfWidth = window_width / 2;
window_halfHeight = window_height / 2;
//processing window setup
frameRate( fps_limit);
size( window_width, window_height, SMT.RENDERER);
SMT.init( this, TouchSource.AUTOMATIC);
//Audio Setup
ac = new AudioContext();
//Aspect Ratio Variables
edgeSpace = 20.0;
// aspectX = 640.0;
// aspectY = 480.0;
aspectX = (float)width;
aspectY = (float)height - edgeSpace*2;
// THIS IS NOT PERFECT YET
gridStartX = (int)(aspectX-aspectY)/2;
gridStartY = (int)edgeSpace;
//Grid Line Variables
// X
num = 8;
areaW=aspectY;
// Y
num2 = 7;
areaH=aspectY;
buttonSize = (int)(aspectY/num2);
// ARRAY MAKES BUTTONS IN CHECKERBOARD STYLE
for (int i=0; i<arrayNum; i++) {
if ((i<=7)&&(i % 2 == 0)) {
x = gridStartX+(i*buttonSize);
y = gridStartY;
}
if (((i>7)&&(i<=15))&&(i % 2 != 0)) {
x = gridStartX+((i-8)*buttonSize);
y = gridStartY+buttonSize;
}
if (((i>15)&&(i<=23))&&(i % 2 == 0)) {
x = gridStartX+((i-16)*buttonSize);
y = gridStartY+(2*buttonSize);
}
if (((i>23)&&(i<=31))&&(i % 2 != 0)) {
x = gridStartX+((i-24)*buttonSize);
y = gridStartY+(3*buttonSize);
}
if (((i>31)&&(i<=39))&&(i % 2 == 0)) {
x = gridStartX+((i-32)*buttonSize);
y = gridStartY+(4*buttonSize);
}
if (((i>39)&&(i<=47))&&(i % 2 != 0)) {
x = gridStartX+((i-40)*buttonSize);
y = gridStartY+(5*buttonSize);
}
if (((i>47)&&(i<=56))&&(i % 2 == 0)) {
x = gridStartX+((i-48)*buttonSize);
y = gridStartY+(6*buttonSize);
}
touchZone[i] = new TouchtouchZone(x, y, buttonSize, buttonSize, 100, 100, 150, 200);
SMT.add(touchZone[i]);
}
// ARRAY INITIALIZES AUDIO SETUP
for (int i=0; i<soundsNum; i++) {
sounds[i] = new Sound(0, 0);
sounds[i].audioSetup();
}
ac.start();
}
void draw() {
background(0);
fill(30);
spacing = buttonSize;
// fill(255);
playDelay++;
if (playDelay >= 15) {
canPlay = true;
} else {
canPlay = false;
}
text("Play Delay: "+playDelay, width-100, height-20);
//FOR GRID DEBUGGING
// rect(0, gridStartY, aspectX, aspectY);
for (int m = 0; m < num; m++) {
for (int n = 0; n < num2; n++) {
stroke(125);
strokeWeight(3);
x = gridStartX+(m*buttonSize);
y = gridStartY+(n*buttonSize);
rect(x, y, buttonSize, buttonSize);
}
}
}
public void drawFrameRate() {
float fps = this.frameRate;
String fps_text = String.format( "fps: %.0f", fps);
pushStyle();
fill( 240, 240, 240, 180);
textAlign( RIGHT, TOP);
textMode( MODEL);
textSize( 32);
text( fps_text, window_width - 10, 10);
popStyle();
}
private class touchZone extends Zone {
protected int colour_red;
protected int colour_green;
protected int colour_blue;
protected int colour_alpha;
public touchZone( int x, int y, int width, int height,
int colour_red, int colour_green, int colour_blue, int colour_alpha) {
super( x, y, width, height);
this.colour_red = colour_red;
this.colour_green = colour_green;
this.colour_blue = colour_blue;
this.colour_alpha = colour_alpha;
this.setCaptureTouches( false);
}
//draw method
public void draw() {
pushStyle();
noStroke();
fill( colour_red, colour_green, colour_blue, colour_alpha);
rect( 0, 0, this.getWidth(), this.getHeight(), 5);
popStyle();
}
public void touch() {
}
//we define the press method so that touches will be unassigned when they 'exit' the zone.
public void press( Touch touch) {
}
}
private class TouchtouchZone extends touchZone {
public TouchtouchZone( int x, int y, int width, int height,
int colour_red, int colour_green, int colour_blue, int colour_alpha) {
super( x, y, width, height,
colour_red, colour_green, colour_blue, colour_alpha);
}
//touch method
public void touch() {
Touch touch = getActiveTouch( 0);
touch.setTint(
colour_red, colour_green, colour_blue, colour_alpha);
for (int i=0; i<soundsNum; i++) {
if (canPlay) {
sounds[i].audioPlay();
}
playDelay=0;
}
}
}
static final boolean isEven(int n) {
return (n & 1) == 0;
}
static final boolean isOdd(int n) {
return !isEven(n);
}
class Sound {
//object variables
float xPos, yPos;
String sourceFile1, sourceFile2, sourceFile3, sourceFile4, sourceFile5, sourceFile6,
sourceFile7, sourceFile8, sourceFile9, sourceFile10, sourceFile11, sourceFile12, sourceFile13,
sourceFile14, sourceFile15, sourceFile16; // this will hold the path to our audio file
SamplePlayer sp1;
SamplePlayer sp2;
SamplePlayer sp3;
SamplePlayer sp4;
SamplePlayer sp5;
SamplePlayer sp6;
SamplePlayer sp7;
SamplePlayer sp8;
SamplePlayer sp9;
SamplePlayer sp10;
SamplePlayer sp11;
SamplePlayer sp12;
SamplePlayer sp13;
SamplePlayer sp14;
SamplePlayer sp15;
SamplePlayer sp16;
//Gain
Gain g;
Glide gainValue;
//Reverb
Reverb r; // our Reverberation unit generator
Sound (float _Xpos, float _Ypos) {
xPos = _Xpos;
yPos = _Ypos;
}
void audioSetup() {
// sourceFile = dataPath("0.mp3");
// sourceFile = dataPath("1.mp3");
sourceFile1 = dataPath("clap-1.mp3");
sourceFile2 = dataPath("snare-1.mp3");
sourceFile3 = dataPath("clap-3.mp3");
sourceFile4 = dataPath("clap-4.mp3");
sourceFile5 = dataPath("crash-1.mp3");
sourceFile6 = dataPath("crash-2.mp3");
sourceFile7 = dataPath("crash-3.mp3");
sourceFile8 = dataPath("crash-4.mp3");
sourceFile9 = dataPath("mid-1.mp3");
sourceFile10 = dataPath("mid-2.mp3");
sourceFile11 = dataPath("mid-3.mp3");
sourceFile12 = dataPath("mid-4.mp3");
sourceFile13 = dataPath("clap-2.mp3");
sourceFile14 = dataPath("snare-2.mp3");
sourceFile15 = dataPath("snare-3.mp3");
sourceFile16 = dataPath("snare-4.mp3");
try {
sp1 = new SamplePlayer(ac, new Sample(sourceFile1));
sp2 = new SamplePlayer(ac, new Sample(sourceFile2));
// sp3 = new SamplePlayer(ac, new Sample(sourceFile3));
// sp4 = new SamplePlayer(ac, new Sample(sourceFile4));
// sp5 = new SamplePlayer(ac, new Sample(sourceFile5));
// sp6 = new SamplePlayer(ac, new Sample(sourceFile6));
// sp7 = new SamplePlayer(ac, new Sample(sourceFile7));
// sp8 = new SamplePlayer(ac, new Sample(sourceFile8));
// sp9 = new SamplePlayer(ac, new Sample(sourceFile9));
// sp10 = new SamplePlayer(ac, new Sample(sourceFile10));
// sp12 = new SamplePlayer(ac, new Sample(sourceFile11));
// sp12 = new SamplePlayer(ac, new Sample(sourceFile12));
// sp13 = new SamplePlayer(ac, new Sample(sourceFile13));
// sp14 = new SamplePlayer(ac, new Sample(sourceFile14));
// sp15 = new SamplePlayer(ac, new Sample(sourceFile15));
// sp16 = new SamplePlayer(ac, new Sample(sourceFile16));
}
catch(Exception e)
{
println("Exception while at_ting to load sample!");
e.printStackTrace();
exit();
}
sp1.setKillOnEnd(false);
sp2.setKillOnEnd(false);
// sp3.setKillOnEnd(false);
// sp4.setKillOnEnd(false);
// sp5.setKillOnEnd(false);
// sp6.setKillOnEnd(false);
// sp7.setKillOnEnd(false);
// sp8.setKillOnEnd(false);
// sp9.setKillOnEnd(false);
// sp10.setKillOnEnd(false);
// sp11.setKillOnEnd(false);
// sp12.setKillOnEnd(false);
// sp13.setKillOnEnd(false);
// sp14.setKillOnEnd(false);
// sp15.setKillOnEnd(false);
// sp16.setKillOnEnd(false);
Gain g = new Gain(ac, 2, 0.2);
g.addInput(sp1);
g.addInput(sp2);
// g.addInput(sp3);
// g.addInput(sp4);
// g.addInput(sp5);
// g.addInput(sp6);
// g.addInput(sp7);
// g.addInput(sp8);
// g.addInput(sp9);
// g.addInput(sp10);
// g.addInput(sp11);
// g.addInput(sp12);
// g.addInput(sp13);
// g.addInput(sp14);
// g.addInput(sp15);
// g.addInput(sp16);
ac.out.addInput(g);
}
void audioPlay() {
for (int i=0; i<soundsNum; i++) {
if (i == 1) {
sp1.start(); // ply the audio file
sp1.setToLoopStart();
}
if (i == 2) {
sp2.start(); // ply the audio file
sp2.setToLoopStart();
}
}
}
void textDisplay() {
}
}
change colors of points data from kinect using shaders
Sorry, I am just starting with shaders. I have read a little but not enough to understand how to change this code.
the code below would build a 3D scene from kinect point cloud data and set all points to white. I just wanted to understand how I could set a matrix of colors dynamically for each point. (basically I just want points closer to kinect to be brighter)
I have tried to implement this, but I couldn't understand how to work with textures yet.
Thank you!
import java.nio.*;
import org.openkinect.freenect.*;
import org.openkinect.processing.*;
// Kinect Library object
Kinect kinect;
// Angle for rotation
float a = PI;
//openGL
PGL pgl;
PShader sh;
int vertLoc;
int vertLocId;
//int colorLoc;
void setup() {
// Rendering in P3D
size(800, 600, P3D);
kinect = new Kinect(this);
kinect.initDepth();
kinect.enableMirror(true); //added
//load shaders
sh = loadShader("frag.glsl", "vert.glsl");
PGL pgl = beginPGL();
IntBuffer intBuffer = IntBuffer.allocate(1);
pgl.genBuffers(1, intBuffer);
//memory location of the VBO
vertLocId = intBuffer.get(0);
endPGL();
}
void draw() {
background(0);
image(kinect.getDepthImage(), 0, 0, 320, 240); //why this is not mirrored ?
pushMatrix();
translate(width/2, height/2, 600);
scale(150);
rotateY(a);
int vertData = kinect.width * kinect.height;
FloatBuffer depthPositions = kinect.getDephToWorldPositions();
pgl = beginPGL();
sh.bind();
vertLoc = pgl.getAttribLocation(sh.glProgram, "vertex");
//color for ALL POINTS of the point cloud
sh.set("fragColor", 1.0f, 1.0f, 1.0f, 1.0f); //<-----
pgl.enableVertexAttribArray(vertLoc);
pgl.bindBuffer(PGL.ARRAY_BUFFER, vertLocId);
pgl.bufferData(PGL.ARRAY_BUFFER, Float.BYTES * vertData *3, depthPositions, PGL.DYNAMIC_DRAW);
pgl.vertexAttribPointer(vertLoc, 3, PGL.FLOAT, false, Float.BYTES * 3, 0);
pgl.bindBuffer(PGL.ARRAY_BUFFER, 0);
//draw the XYZ depth camera points
pgl.drawArrays(PGL.POINTS, 0, vertData);
//clean up the vertex buffers
pgl.disableVertexAttribArray(vertLoc);
sh.unbind();
endPGL();
popMatrix();
fill(255, 0, 0);
text(frameRate, 50, 50);
}
frag.glsl:
#ifdef GL_ES
precision mediump float;
precision mediump int;
#endif
#define PROCESSING_TEXTURE_SHADER //
varying vec4 vertColor;
//varying vec4 vertTexCoord;
//input color
uniform vec4 fragColor;
//uniform sampler2D texture;
void main() {
//vec4 col = texture2D(texture, vertTexCoord.st);
//outputColor
gl_FragColor = fragColor;
//gl_FragColor = vec4(col.rgb, 1.0);
}
vert.glsl:
uniform mat4 transform;
attribute vec4 vertex;
attribute vec4 color;
varying vec4 vertColor;
void main() {
gl_Position = transform * vertex;
vertColor = color;
}
Still having issues with SimpleOpen Library
I'm using Processing 2 on OX 10.11.5. I finally seem to have all my libraries installed (and in the correct place) but I'm getting the error UnsatisfiedLinkError: SimpleOpenNI. It IS in the Processing library with all my other libraries, but it just won't run. I've seem that several people have posted this error, but I haven't seen a fix. Has anyone fixed this successfully?
It's obout kinect depthimage real-time guided-filter
i found the kinect depthimage filter works well with guided-filter in MATLAB. But i don't know how to
write the code in processing...
Multi user detection and visualisation
I am looking to create a visualisation which represents the presence of users and the length of time that they stand in front of the camera for. Firstly, I wrote a code which writes 'Hello' when a user is present. This text grows over time. When the user vacates the room, the text decreases in size until it is no longer visible. The code for this is below.
import SimpleOpenNI.*;
SimpleOpenNI kinect;
int s = 0;
void setup(){
size(1000,700);
kinect = new SimpleOpenNI(this);
kinect.enableDepth();
//turn on user tracking
kinect.enableUser();
}
void draw(){
background(255);
kinect.update();
PImage depth = kinect.depthImage();
//image(depth, 0, 0);
IntVector userList = new IntVector();
kinect.getUsers(userList);
if(userList.size()>0){
println("There is somebody here");
s = s + 1;
textSize(s);
fill(0);
textAlign(CENTER);
text("hello", 500, 350);
}
if(userList.size()<1){
println("There is nobody here");
//s = 0;
if (s >= 1){
s = s - 1;
textSize(s);
fill(0);
textAlign(CENTER);
text("hello", 500, 350);
}
}
}
It's relatively simple in that it basically asks if there is anybody in the room and returns a yes/no answer. What I want to do now is make it so that when there are several people in the room, they all get their own 'Hello' which grows and shrinks depending on the length of time they stay in the space individually. I know that I need to identify each user separately in order to do this but I am unsure how, and there is little information on the SimpleOpenNI library to reference. I'm wondering whether I could alter/add to the above code, or whether I would need to completely rethink the draw section in order to do what I want to do?
Any help would be appreciated.
Improving sketch used for kinect detection and audio playback. (smt and beads libraries)
Hey, so this links to a previous post—I have been building this code, and finally have it in a spot where it is usable, but KNOW people out there can help me improve it, whether through speeding it up, cutting it down, making it more efficient or more obvious what is going on.
Here's an outline of the project. A kinect and projector are mounted above the ground pointing down. A screensaver will play until someone enters the sensors space, at which point, they will trigger sounds by entering zones on the sketch. As more people enter, the zones adapt a little to make it more interesting.
Any input anyone has on improving this and making it more manageable would be hugely helpful.
//Set for 6' height from ground 15'6" distance
import vialab.SMT.*;
import beads.*;
// Grid Line Variables
int numCols = 8;
int numRows = 7;
// ZONES
// Array of Zones
int zonesNum = 28;
int soundsNum = 29;
Zone[] z = new Zone[zonesNum];
Zone[] z2 = new Zone[zonesNum];
Zone[] z3 = new Zone[zonesNum];
Custom custom;
ArrayList<Custom> list;
// Zone Grid Variables
int x, y, w, h;
int[] colX = new int[numCols];
int[] rowY = new int[numRows];
int[] colW = new int[numCols];
int[] rowH = new int[numRows];
int[] colX2 = new int[numCols];
int[] rowY2 = new int[numRows];
int[] colW2 = new int[numCols];
int[] rowH2 = new int[numRows];
int[] colX3 = new int[numCols];
int[] rowY3 = new int[numRows];
int[] colW3 = new int[numCols];
int[] rowH3 = new int[numRows];
// Zone Booleans
boolean [] isTouched = new boolean[zonesNum];
boolean [] canPlay = new boolean[zonesNum];
boolean [] play = new boolean[zonesNum];
// ZONES AUDIO
// Array of Sounds
AudioContext ac;
// Audio Files
int numSamples = 0;
int sampleWith = 0;
String [] sourceFile;
Gain [] gain;
Glide [] gainAmt;
SamplePlayer [] samplePlayer;
// Playback Variables
int [] playDelay = new int[zonesNum];
//int playDelay = 0;
int waitTime = 10;
int count = 0;
// DISPLAY
// Setup SMT Display INFO
boolean window_fullscreen = false;
int window_width = 1200;
int window_height = 800;
int window_halfWidth;
int window_halfHeight;
int fps_limit = 60;
// Defines for dialing in
boolean staticState = false;
// Screensaver Variables
float topStrokeX, botStrokeX, gridX, gridY;
int alpha;
int r;
int unitSize = 50;
PVector[] unit = new PVector[56];
int xUnit = int (width/numCols);
int yUnit = int (height/numRows);
// To Define if people are in space
int people;
boolean peopleIn = false;
// Playhead for screensaver
int playX;
int playY;
int grow;
// Controlling Playhead Speed
float rate = 5;
int passCount=1;
// Removes frame from window
//void init()
//{
// frame.removeNotify();
// frame.setUndecorated(true);
// frame.addNotify();
// super.init();
//}
void setup() {
// // Sets Window to Location
// frame.setLocation(0, 0);
smtSetup();
screenSaverSetup();
audioSetup();
functionGrids();
dialInGrid();
function1();
function2();
ac.start();
}
void draw() {
background(0);
fill(30);
peopleOn();
staticStates ();
if (staticState) {
textDisplay();
}
delay();
//Switch off SMT cursors
// SMT.setTouchDraw(TouchDraw.NONE);
}
// Private class loads variables for each zone from functions tab depending on situation (num of people in space)
private class Custom extends Zone {
public Custom(String name, int x, int y, int width, int height, String audio) {
super(name, x, y, width, height, audio);
this.setCaptureTouches( false);
this.name = name;
this.x = x;
this.y = y;
this.width = width;
this.height = height;
this.name = audio;
}
// Zone Draw method
public void draw() {
int number = this.name.length();
int numTouches = getNumTouches();
stroke(255);
strokeWeight(2);
// Creates different color fill if touch occurs in zone
if (numTouches==0) {
fill( #F0E15E);
rect( 0, 0, this.getWidth(), this.getHeight());
}
if (numTouches>0) {
fill( #5AC16A);
rect( 0, 0, this.getWidth(), this.getHeight());
}
// Takes length of zone name and parses numeric value (i) to stop audio track relatied to zone once touch ends.
if ( number == 6 ) {
String element = this.name;
int x = Character.getNumericValue(element.charAt(5));
int audio = x;
if (numTouches==0) {
isTouched[audio] = false;
}
}
if ( number == 7 ) {
String element = this.name;
int x = Character.getNumericValue(element.charAt(5));
// System.out.println("x=" + x);
int y = Character.getNumericValue(element.charAt(6));
// System.out.println("y=" + y );
int audio = (x*10)+y;
if (numTouches==0) {
isTouched[audio] = false;
}
}
}
// Zone Touch method
public void touch() {
int number = this.name.length();
// Takes length of zone name and parses numeric value (i) to play audio track relatied to zone.
if ( number == 6 ) {
String element = this.name;
int x = Character.getNumericValue(element.charAt(5));
int audio = x;
if (playDelay[audio] >= waitTime) {
samplePlayer[audio].start();
samplePlayer[audio].setToLoopStart();
isTouched[audio] = true;
playDelay[audio]=0;
}
}
if ( number == 7 ) {
String element = this.name;
int x = Character.getNumericValue(element.charAt(5));
int y = Character.getNumericValue(element.charAt(6));
int audio = (x*10)+y;
if (playDelay[audio] >= waitTime) {
samplePlayer[audio].start();
samplePlayer[audio].setToLoopStart();
isTouched[audio] = true;
playDelay[audio]=0;
}
}
}
public void press( Touch touch) {
}
}
void audioSetup() {
//Audio Setup
ac = new AudioContext();
File folder = new File(sketchPath("") + "samples");
File [] fileList = folder.listFiles();
for (int i = 0; i < fileList.length; i ++) {
if (fileList[i].isFile()) {
if (fileList[i].getName().endsWith(".mp3")) {
numSamples ++;
}
}
}
if (numSamples <= 0) {
println("No samples found in " + sketchPath("") + "samples/");
println("Exiting...");
exit();
}
sampleWith = 20;
sourceFile = new String[numSamples];
int count = 0;
for (int i = 0; i < fileList.length; i ++) {
if (fileList[i].isFile()) {
if (fileList[i].getName().endsWith(".mp3")) {
sourceFile[count] = fileList[i].getName();
count ++;
}
}
}
gain = new Gain[numSamples];
gainAmt = new Glide[numSamples];
samplePlayer = new SamplePlayer[numSamples];
try {
for (count = 0; count < numSamples; count ++) {
println("loading" + sketchPath("") + "samples/" + sourceFile[count]);
samplePlayer[count] = new SamplePlayer(ac, new Sample(sketchPath("") + "samples/" + sourceFile[count]));
samplePlayer[count].setKillOnEnd(false);
gainAmt[count] = new Glide(ac, 0.0);
gain[count] = new Gain(ac, 2, 0.2);
gain[count].addInput(samplePlayer[count]);
ac.out.addInput(gain[count]);
}
}
catch (Exception e) {
println("Error loading samples");
e.printStackTrace();
exit();
}
}
void delay () {
for (int i=0; i< zonesNum; i++) {
if (!isTouched[i]) {
playDelay[i]++;
}
}
}
void functionGrids() {
// for dial in grid.
for (int i=0; i<numCols; i++) {
colW[0] = colW[7] = (int)(width*0.03);
colW[1] = colW[6] = (int)(width*0.07);
colW[2] = colW[5] = (int)(width*0.10);
colW[3] = colW[4] = (int)(width*0.3);
}
for (int j=0; j<numCols; j++) {
colX[0] = 0;
colX[1] = colW[0];
colX[2] = colW[0]+colW[1];
colX[3] = colW[0]+colW[1]+colW[2];
colX[4] = colW[0]+colW[1]+colW[2]+colW[3];
colX[5] = colW[0]+colW[1]+colW[2]+colW[3]+colW[4];
colX[6] = colW[0]+colW[1]+colW[2]+colW[3]+colW[4]+colW[5];
colX[7] = colW[0]+colW[1]+colW[2]+colW[3]+colW[4]+colW[5]+colW[6];
}
for (int i=0; i<numRows; i++) {
rowH[0] = rowH[6] = (int)(height*0.05);
rowH[1] = rowH[5] = (int)(height*0.10);
rowH[2] = rowH[4] = (int)(height*0.15);
rowH[3] = (int)(height*0.4);
}
for (int j=0; j<numRows; j++) {
rowY[0] = 0;
rowY[1] = rowH[0];
rowY[2] = rowH[0]+rowH[1];
rowY[3] = rowH[0]+rowH[1]+rowH[2];
rowY[4] = rowH[0]+rowH[1]+rowH[2]+rowH[3];
rowY[5] = rowH[0]+rowH[1]+rowH[2]+rowH[3]+rowH[4];
rowY[6] = rowH[0]+rowH[1]+rowH[2]+rowH[3]+rowH[4]+rowH[5];
}
// for fuction1 grid
for (int i=0; i<numCols; i++) {
colW2[i] = height/numCols;
}
for (int j=0; j<numCols; j++) {
colX2[0] = (width-height)/2;
colX2[1] = colX2[0]+colW2[0];
colX2[2] = colX2[0]+colW2[0]+colW2[1];
colX2[3] = colX2[0]+colW2[0]+colW2[1]+colW2[2];
colX2[4] = colX2[0]+colW2[0]+colW2[1]+colW2[2]+colW2[3];
colX2[5] = colX2[0]+colW2[0]+colW2[1]+colW2[2]+colW2[3]+colW2[4];
colX2[6] = colX2[0]+colW2[0]+colW2[1]+colW2[2]+colW2[3]+colW2[4]+colW2[5];
colX2[7] = colX2[0]+colW2[0]+colW2[1]+colW2[2]+colW2[3]+colW2[4]+colW2[5]+colW2[6];
}
for (int i=0; i<numRows; i++) {
rowH2[i] = height/numCols;
}
for (int j=0; j<numRows; j++) {
rowY2[0] = rowH2[0]/2;
rowY2[1] = rowH2[0]/2+rowH2[0];
rowY2[2] = rowH2[0]/2+rowH2[0]+rowH2[1];
rowY2[3] = rowH2[0]/2+rowH2[0]+rowH2[1]+rowH2[2];
rowY2[4] = rowH2[0]/2+rowH2[0]+rowH2[1]+rowH2[2]+rowH2[3];
rowY2[5] = rowH2[0]/2+rowH2[0]+rowH2[1]+rowH2[2]+rowH2[3]+rowH2[4];
rowY2[6] = rowH2[0]/2+rowH2[0]+rowH2[1]+rowH2[2]+rowH2[3]+rowH2[4]+rowH2[5];
}
// for function2 grid.
for (int i=0; i<numCols; i++) {
colW3[i] = height/numCols;
}
for (int j=0; j<numCols; j++) {
// colX2[j] =+ colX2[j];
colX3[0] = (width-height)/2;
colX3[1] = colX3[0]+colW3[0];
colX3[2] = colX3[0]+colW3[0]+colW3[1];
colX3[3] = colX3[0]+colW3[0]+colW3[1]+colW3[2];
colX3[4] = colX3[0]+colW3[0]+colW3[1]+colW3[2]+colW3[3];
colX3[5] = colX3[0]+colW3[0]+colW3[1]+colW3[2]+colW3[3]+colW3[4];
colX3[6] = colX3[0]+colW3[0]+colW3[1]+colW3[2]+colW3[3]+colW3[4]+colW3[5];
colX3[7] = colX3[0]+colW3[0]+colW3[1]+colW3[2]+colW3[3]+colW3[4]+colW3[5]+colW3[6];
}
for (int i=0; i<numRows; i++) {
rowH3[i] = height/numCols;
}
for (int j=0; j<numRows; j++) {
rowY3[0] = rowH3[0]/2;
rowY3[1] = rowH3[0]/2+rowH3[0];
rowY3[2] = rowH3[0]/2+rowH3[0]+rowH3[1];
rowY3[3] = rowH3[0]/2+rowH3[0]+rowH3[1]+rowH3[2];
rowY3[4] = rowH3[0]/2+rowH3[0]+rowH3[1]+rowH3[2]+rowH3[3];
rowY3[5] = rowH3[0]/2+rowH3[0]+rowH3[1]+rowH3[2]+rowH3[3]+rowH3[4];
rowY3[6] = rowH3[0]/2+rowH3[0]+rowH3[1]+rowH3[2]+rowH3[3]+rowH3[4]+rowH3[5];
}
}
void dialInGrid() {
for (int i=0; i< zonesNum; i++) {
for (int j = 0; j < numRows; j++) {
if ((i>=0)&&(i<4)) {
y = rowY[0];
h = rowH[0];
}
if ((i==4)||(i==5)||(i==6)||(i==7)) {
y = rowY[1];
h = rowH[1];
}
if ((i>=8)&&(i<=11)) {
y = rowY[2];
h = rowH[2];
}
if ((i>=12)&&(i<=15)) {
y = rowY[3];
h = rowH[3];
}
if ((i>=16)&&(i<=19)) {
y = rowY[4];
h = rowH[4];
}
if ((i>=20)&&(i<=23)) {
y = rowY[5];
h = rowH[5];
}
if ((i>=24)&&(i<=27)) {
y = rowY[6];
h = rowH[6];
}
}
for (int k = 0; k < numCols; k++) {
if ((i==0)||(i==8)||(i==16)||(i==24)) {
x = colX[0];
w = colW[0];
}
if ((i==1)||(i==9)||(i==17)||(i==25)) {
x = colX[2];
w = colW[2];
}
if ((i==2)||(i==10)||(i==18)||(i==26)) {
x = colX[4];
w = colW[4];
}
if ((i==3)||(i==11)||(i==19)||(i==27)) {
x = colX[6];
w = colW[6];
}
if ((i==4)||(i==12)||(i==20)) {
x = colX[1];
w = colW[1];
}
if ((i==5)||(i==13)||(i==21)) {
x = colX[3];
w = colW[3];
}
if ((i==6)||(i==14)||(i==22)) {
x = colX[5];
w = colW[5];
}
if ((i==7)||(i==15)||(i==23)) {
x = colX[7];
w = colW[7];
}
}
z[i] = new Custom("TouchZone"+i, x, y, w, h, "audio"+i);
SMT.add(z[i]);
}
}
void function1() {
for (int i=0; i< zonesNum; i++) {
for (int j = 0; j < numRows; j++) {
if ((i>=0)&&(i<8)) {
y = rowY2[2];
h = rowH2[0];
}
if ((i>=8)&&(i<16)) {
y = rowY2[4];
h = rowH2[0];
}
if (i>=16) {
y = -200;
h = 0;
}
}
for (int k = 0; k < numCols; k++) {
if ((i==0)||(i==8)) {
x = colX2[0];
w = colW2[k];
}
if ((i==1)||(i==9)) {
x = colX2[1];
w = colW2[k];
}
if ((i==2)||(i==10)) {
x = colX2[2];
w = colW2[k];
}
if ((i==3)||(i==11)) {
x = colX2[3];
w = colW2[k];
}
if ((i==4)||(i==12)) {
x = colX2[4];
w = colW2[k];
}
if ((i==5)||(i==13)) {
x = colX2[5];
w = colW2[k];
}
if ((i==6)||(i==14)) {
x = colX2[6];
w = colW2[k];
}
if ((i==7)||(i==15)) {
x = colX2[7];
w = colW2[k];
}
if (i>15) {
x = -200;
w = 0;
}
}
z2[i] = new Custom("TouchZone"+i, x, y, w, h, "audio"+i);
SMT.add(z2[i]);
}
}
void function2() {
for (int i=0; i< zonesNum; i++) {
for (int j = 0; j < numRows; j++) {
if ((i>=0)&&(i<4)) {
y = rowY3[0];
}
if ((i>=4)&&(i<8)) {
y = rowY3[1];
}
if ((i>=8)&&(i<12)) {
y = rowY3[2];
}
if ((i>=12)&&(i<16)) {
y = rowY3[3];
}
if ((i>=16)&&(i<20)) {
y = rowY3[4];
}
if ((i>=20)&&(i<24)) {
y = rowY3[5];
}
if ((i>=24)&&(i<28)) {
y = rowY3[6];
}
}
for (int k = 0; k < numCols; k++) {
if ((i==0)||(i==8)||(i==16)||(i==24)) {
x = colX3[0];
}
if ((i==1)||(i==9)||(i==17)||(i==25)) {
x = colX3[1];
}
if ((i==2)||(i==10)||(i==18)||(i==26)) {
x = colX3[2];
}
if ((i==3)||(i==11)||(i==19)||(i==27)) {
x = colX3[3];
}
if ((i==4)||(i==12)||(i==20)) {
x = colX3[4];
}
if ((i==5)||(i==13)||(i==21)) {
x = colX3[5];
}
if ((i==6)||(i==14)||(i==22)) {
x = colX3[6];
}
if ((i==7)||(i==15)||(i==23)) {
x = colX3[7];
}
w = rowH3[0];
h = rowH3[0];
}
z3[i] = new Custom("TouchZone"+i, x, y, w, h, "audio"+i);
SMT.add(z3[i]);
}
}
// plays tuning up audio
void keyReleased() {
if (key == 'a') {
samplePlayer[28].start();
samplePlayer[28].setToLoopStart();
} else {
background(0);
}
}
// sets sketch in degbugging mode
void staticStates () {
if (keyPressed) {
// to get to dial-in
if (key == 'm') {
staticState=true;
peopleIn = true;
for (int i=0; i< zonesNum; i++) {
z[i].setTouchable(true);
z[i].setVisible( true );
SMT.add(z[i]);
SMT.remove(z2[i]);
SMT.remove(z3[i]);
}
}
if (key == 'n') {
staticState=true;
peopleIn = true;
for (int i=0; i< zonesNum; i++) {
z2[i].setTouchable(true);
z2[i].setVisible( true );
SMT.add(z2[i]);
SMT.remove(z3[i]);
SMT.remove(z[i]);
}
}
if (key == 'b') {
staticState=true;
peopleIn = true;
for (int i=0; i< zonesNum; i++) {
z3[i].setTouchable(true);
z3[i].setVisible( true );
SMT.add(z3[i]);
SMT.remove(z[i]);
SMT.remove(z2[i]);
}
}
if (key == 'p') {
staticState=false;
for (int i=0; i< zonesNum; i++) {
SMT.add(z2[i]);
SMT.add(z3[i]);
}
}
}
}
more to follow in separate post...
OpenKinect: disabling console messages?
So I've been using Shiffman's OpenKinect-for-Processing for a while now, It kept spitting out errors in the console which I assume are dropped frames or something, which I've safely ignored so far. But they are seriously hampering my debugging efforts as they keep flooding the console with messages, obscuring println() messages when I'm debugging. Is there a way of disabling these console messages I'm just unaware of?
(if anyone knows more about the errors themselves, pitch in as well :) )
Sample:
[DepthPacketStreamParser::onDataReceived] not all subsequences received 1020 [DepthPacketStreamParser::onDataReceived] not all subsequences received 511 [TurboJpegRgbPacketProcessor::doProcess] Failed to decompress rgb image! TurboJPEG error: 'Corrupt JPEG data: premature end of data segment' [RgbPacketStreamParser::onDataReceived] packetsize or sequence doesn't match! [TurboJpegRgbPacketProcessor::doProcess] Failed to decompress rgb image! TurboJPEG error: 'Corrupt JPEG data: 1672 extraneous bytes before marker 0xd5' [RgbPacketStreamParser::onDataReceived] packetsize or sequence doesn't match!
Developing Joint Movement/Displacement Detector using Processing 3 and Kinect v2
Hi guys! I am now working on Kinect, pretty new to it. I want to detect the displacement of one specific joint in LIVE. For example, my right hand is at top right corner now. Then when I move my right hand to the center of the screen, I want the program to calculate the distance travelled by my right hand joint. If possible, save the data in a excel file too later.
Now I am stucked to get the new coordinates of my new position. I draw the skeleton based on the example in Processing: SkeletonColor. I was thinking to add a new function that calculates displacement, but when I put the function inside the for
loop in void draw()
, the original joint coordinates keep updating. It makes the displacement value 0 (because the new and old joint locations are exactly the same). I also tried to use if function to separate two coordinates within two different frameCount
, but it doesn't seem to work at all.
Does anybody has a solution for this? Have you guys heard a project like this before? Answers are very welcome. Thanks in advance!
Best, GH
make a Halloween projection mapping project
Hi,
I'm planning to have a projection mapping installation at my house for the upcoming Halloween.
I'm probably going to use the nice Keystone library to adjust the projection surfaces to my house shape.
I also would like to use my Kinect to allow some interaction for (young) visitors.
Step #1 I'm going to use video taken from kinect to show visitors figure in my house windows. Maybe video can be augmented with some sort of sinister filter.
Step #2 Have some "scary" video filter to be applied to video of walking-by visitors and projected on a free surface on my house. I found the "Time Displacement" video example can be a good starting point to do a "scary" visualization of user.
Any other recommendation would be more than welcome.
Thanks, Marco
** (java.exe:9372): WARNING **: gstvideo: failed to get caps of pad nat:sink
Hello there!
I have borrowed this code for a school project. It creates a bouncing ball which you can control with you webcam by movements. Sometimes it works just fine, but there seem to be a bug saying
** (java.exe:9372): WARNING **: gstvideo: failed to get caps of pad nat:sink
And then the webcam won't stream. I stumbled upon a discussion with a similar problem, saying using loadPixels() and updatePixels - yet it did not remove the warning.
Below is the code
BackgroundBouncer.pde:
import processing.video.*;
import gab.opencv.*;
import java.awt.Rectangle;
Capture video;
OpenCV opencv;
Ball b;
void setup() {
size(640,480, P3D); //setup screen size and uses OpenGL gfx driver
frameRate(30);
ellipseMode(RADIUS); //set ellipse mode to radius (center point)
video = new Capture(this, width/2, height/2);
opencv = new OpenCV(this, width/2, height/2);
b = new Ball();
video.start();
video.read();
opencv.startBackgroundSubtraction(50,3,0.5); //detects moving objects
}
void draw() {
clear();
scale(2);
loadPixels();
opencv.loadImage(video); //
opencv.flip(OpenCV.HORIZONTAL);
opencv.updateBackground();
opencv.calculateOpticalFlow(); //apparent motion between two frames caused by moving object or camera
opencv.dilate(); //makes the image wider and look at neighbour pixels shape over which minimum is taken
opencv.erode(); //erodes image and look at neighbour pixels shape over which minimum is taken
noFill();stroke(255,0,0);
strokeWeight(1);
image(opencv.getOutput(), 0, 0);
for (Contour c : opencv.findContours()) {
Contour hull = c.getConvexHull();
Rectangle box = hull.getBoundingBox();
b.strike(c, opencv);
}
b.move();
reflect(b);
drawBall(b);
updatePixels();
}
void keyPressed() {
b.position = new PVector(120, 120);
b.momentum = new PVector(0, 0);
}
void captureEvent(Capture c) {
c.read();
}
And the Ball.pde:
float decayRate = 0.9; //how much decay
float scalingFactor = 7; //how big a ball
PVector gravity = new PVector(0, 0.5); //how heavy
class Ball {
public Ball() {
size = 25;
shade = #FF0000; //red ball
position = new PVector(120, 120); //start position
momentum = new PVector(0, 0); //a moment of stillness
}
public PVector position;
public PVector momentum;
public float size;
public color shade;
public void move() {
position.add(momentum);
momentum.add(gravity);
momentum.mult(decayRate);
}
public void strike(Contour c, OpenCV opencv) {
for (PVector p : c.getPoints()) {
if (p.dist(position) <= size) {
Rectangle box = c.getBoundingBox();
PVector flow = opencv.getAverageFlowInRegion(box.x, box.y, box.width, box.height);
flow.mult(scalingFactor);
momentum.add(flow);
return;
}
}
}
}
void drawBall(Ball b) {
fill(b.shade);
ellipse(b.position.x, b.position.y,
b.size, b.size);
}
void reflect(Ball b) {
if (b.position.x - b.size <= 0) {
b.position.x = b.size;
b.momentum.x *= -1;
} else if (b.position.x + b.size > width/2) {
b.position.x = width/2 - b.size;
b.momentum.x *= -1;
}
if (b.position.y - b.size <= 0) {
b.position.y = b.size;
b.momentum.y *= -1;
} else if (b.position.y + b.size > height/2) {
b.position.y = height/2 - b.size;
b.momentum.y *= -1;
}
}
Any help or tips is welcomed! Thanks in advance.
Is it possible to adjust the length of the bone?(kinect v1)
Is it possible to adjust the length of the bone?
The shape should try to use another puppet. Like this,
Use the 'SimpleopenNI' but did not find the relevant methods.
Issues with Kinect initializing on first run of application on startup
Anybody experiencing this issue?
For some reason, when I start my computer and run one of the kinect sketches or one of my own, for some reason the kinect doesnt initialize. The sketch runs but the kinect image doesn't, even though I can see its on and running. If I quit and restart the sketch, it runs fine. ANy ideas?
I'm having to use one of the legacy releases (3.0 beta 4) to leverage an old library. Im on a Mac running 10.11 and am using Shiffman's v1 Kinect code for Average Point Tracking as a base for the Kinect stuff.
Thanks for any help.
I should also say I do not get any errors in the console either.
SimpleOpenNI fails with Processing 3.0
I'm getting a no such method error when trying to run simple open ni in processing 3.0a9. This happens when I call
context = new SimpleOpenNI(this);
I assume this is because PApplet has been jettisoned? Putting opengl as my renderer gave me a bit more verbose information. Not sure exactly where to file this issue, since the simpleopenni google code page is no longer active.
java.lang.RuntimeException: java.lang.NoSuchMethodError: processing.core.PApplet.registerDispose(Ljava/lang/Object;)V
at processing.opengl.PSurfaceJOGL$2.run(PSurfaceJOGL.java:312)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.NoSuchMethodError: processing.core.PApplet.registerDispose(Ljava/lang/Object;)V
at SimpleOpenNI.SimpleOpenNI.initEnv(SimpleOpenNI.java:383)
at SimpleOpenNI.SimpleOpenNI.<init>(SimpleOpenNI.java:255)
at kinectExample.setup(kinectExample.java:25)
at processing.core.PApplet.handleDraw(PApplet.java:1958)
at processing.opengl.PSurfaceJOGL$DrawListener.display(PSurfaceJOGL.java:566)
at jogamp.opengl.GLDrawableHelper.displayImpl(GLDrawableHelper.java:691)
at jogamp.opengl.GLDrawableHelper.display(GLDrawableHelper.java:673)
at jogamp.opengl.GLAutoDrawableBase$2.run(GLAutoDrawableBase.java:442)
at jogamp.opengl.GLDrawableHelper.invokeGLImpl(GLDrawableHelper.java:1277)
at jogamp.opengl.GLDrawableHelper.invokeGL(GLDrawableHelper.java:1131)
at com.jogamp.newt.opengl.GLWindow.display(GLWindow.java:680)
at com.jogamp.opengl.util.AWTAnimatorImpl.display(AWTAnimatorImpl.java:77)
at com.jogamp.opengl.util.AnimatorBase.display(AnimatorBase.java:451)
at com.jogamp.opengl.util.FPSAnimator$MainTask.run(FPSAnimator.java:178)
at java.util.TimerThread.mainLoop(Timer.java:555)
at java.util.TimerThread.run(Timer.java:505)
Exception in thread "Animation Thread" java.lang.NullPointerException
Hello everyone, I'm trying to get some code working on a Raspberry Pi using the freenect libraries and a Kinect V1 (from an Xbox 360). I'm trying to get the example programs in OpenKinect-for-Processing working. They all die though with something like this. I have a freshly compiled version of freenect (libfreenect v0.5.3 Voyager) on the Pi and Processing 3.2 (3.1.2 has the exact same problems though). The freenect libraries work fine though. All the test programs like freenect-glview work fine so this is some problem getting Processing to work with the freenect libs. I'm doing this work for pay for a client (I have my own business as a Pi and Arduino consultant) so at this point I just need to get something working. I'm considering junking freenect and trying to get OpenNi working but I understand that stuff is more complex and could be even more time-consuming to get working. Any one have any ideas? Here's the debug output for RGBDepthTest.pde. All the other example programs die the same way:
Edit: forgot to add yesterday that all this happens on attempting:
kinect = new Kinect(this);
for all the test programs. It's not even getting as far as trying kinect.initDevice();
When I use the debugger I can see
that any code up to this point is working fine. Processing just doesn't seem to want to create a kinect object. After that we see the Animation Thread exception...
Aug 19, 2016 9:54:10 PM processing.mode.java.debug.LineBreakpoint attach
WARNING: no location found for line RGBDepthTest.pde:19 -> RGBDepthTest.java:38
32 linux
Found path: /home/pi/sketchbook/libraries/openkinect_processing/library/v1/linux/
Loaded freenect from /home/pi/kinect/processing-3.2/libfreenect.so
Exception in thread "Animation Thread" java.lang.NullPointerException
at org.openkinect.freenect.Freenect.createContext(Unknown Source)
at org.openkinect.processing.Kinect.<init>(Unknown Source)
at RGBDepthTest.setup(RGBDepthTest.java:39)
at processing.core.PApplet.handleDraw(PApplet.java:2384)
at processing.awt.PSurfaceAWT$12.callDraw(PSurfaceAWT.java:1540)
at processing.core.PSurfaceNone$AnimationThread.run(PSurfaceNone.java:316)
Aug 19, 2016 9:55:46 PM processing.mode.java.Debugger log
INFO: closing runtime
Aug 19, 2016 9:55:46 PM processing.mode.java.Debugger log
INFO: *** VM Event: VMDisconnectEvent: 0
Aug 19, 2016 9:55:46 PM processing.mode.java.Debugger$VMEventReader run
INFO: VMEventReader quit on VM disconnect
How to reset kinect when end of interaction
Kinect used a simple interaction. but kinect not work when other people hide kincet or if kinect is back What do I need to have control ?
SimpleOpenNI Export Application stops sketch from working
I have a working sketch with SimpleOpenNI. Everything is working just like I wanted, but when I export the application it doesn't run. I imported the sketch into Mother and I get te error:
Can't load SimpleOpenNI library (libSimpleOpenNI.jnilib) : java.lang.UnsatisfiedLinkError: Can't load library: /SimpleOpenNI/library/libSimpleOpenNI.jnilib
I already tried copying the simpleOpenNI everywhere and it still doesn't work.
Can anyone give me a tip? Thanks
I'm using Processing 2.0.3 64bit
simpleOpenNI 1.96
in a macos with Mavericks
Processing's “Export” functionality does not work with SimpleOpenNI Kinect application
I'm running Processing 2.2.1 along with SimpleOpenNI 1.96 (based on instructions at http://shiffman.net/p5/kinect/). I modified the DepthImage example sketch and added file writing (code below).
I'm trying to output the depth data from Kinect to .txt files in a folder of my choice. In the Processing IDE, the sketch runs fine; the depth is written to the files correctly.
However I would like this functionality in an .exe file so that another program can run this .exe and read from those files, at run time. Export functionality in Processing IDE runs without errors and I get both win32 and win64 application folder. But if I execute .exe present in either one of them, nothing happens; I cannot see any errors anywhere. Even if I select "Present Mode" while exporting, only a gray screen appears but I cannot see any files being written to the path I supply. Toggle various selections (PresentMode/Export java) on the Export options windows hasn't helped.
I also tried to use savePath() because someone else (not using a kinect application) was able to write data into folders using that. But it did not work for me.
Following is my sketch that works correctly in the IDE:
import SimpleOpenNI.*;
SimpleOpenNI context;
int[] dmap;
int dsize;
float[] dmapf;
PrintWriter output;
int fitr;
String path;
void setup()
{
size(640*2, 480);
fitr=1;
context = new SimpleOpenNI(this);
if (context.isInit() == false)
{
println("Can't init SimpleOpenNI, maybe the camera is not connected!");
exit();
return;
}
// mirror is by default enabled
context.setMirror(false);
// enable depthMap generation
context.enableDepth();
// enable ir generation
context.enableRGB();
path = savePath("E:\\SYMMBOT\\DepthReading");
}
void draw()
{
// update the cam
context.update();
dmap = context.depthMap();
//dmapf = dmap.array();
output = createWriter(path+"\\depth"+fitr+".txt");
fitr++;
int itr = 0;
for(int i=0; i<480; i++){
for(int j=0;j<640;j++){
output.print(dmap[itr]+" ");
itr++;
}
output.println();
}
output.flush();
output.close();
//dsize = context.depthMapSize();
background(200, 0, 0); //<>//
// draw depthImageMap
image(context.depthImage(), 0, 0);
// draw irImageMap
image(context.rgbImage(), context.depthWidth() + 10, 0);
}