User:Mihail Bakalov/Trimester 2/lENS BASED PROTOTYPING/KINECT PROJECT

From XPUB & Lens-Based wiki

MIRROR OF VIRTUAL TRUTH

An installation reflecting on internet trackers in a closed environment. It consists of a projection of virtual reality which shows how data about ones doings online is being tracked A display acting as a mirror shows visual presentation of how a persons data is spread around the web, displaying a conversation between the trackers with the data being pulled about a persons location.


tracking


CODE HERE:

import SimpleOpenNI.*;
import java.awt.Color;
import java.util.Iterator;
import gifAnimation.*;

SingleUserKinect kinect;

PVector userPosRealWorld = new PVector(); // 3d user position
PVector userPosProjective = new PVector(); // user screen position
float comX; // Center of Mass X
float maxX =  500; // 1/2 meter  (0 is in the middle)
float minX =  -500; // -1/2 meter (0 is in the middle)

/* STUDENT: Adjust this value to the right number of images you have */
int numImages = 17; // total number of images

PImage[] images = new PImage[numImages]; // the images will be stored in this list
PImage agent2;

SimpleOpenNI  context;

PImage[] animation;
PImage[] animation2;
PImage[] animation3;

PVector prevUserPosRealWorld = new PVector();
float moveDist = 4;

PFont font;
float[] kordinati;

void setup() {

  // size of the window
  //size(400,400);// use size "(displayWidth, displayHeight)" for fullscreen  
  size(displayWidth, displayHeight, P3D);
  // set frame background to black (this is the background you'll see in Present Mode)
  frame.setBackground(new Color(0, 0, 0));

  // user SingleUserKinect for tracking.
  kinect = new SingleUserKinect(this);
  kinect.context.enableRGB();
  agent2 = loadImage("data/agent_half.gif");
  animation = Gif.getPImages(this, "data/binaryflow2.gif"); 
  animation2 = Gif.getPImages(this, "data/agent_half2.gif");
  animation3 = Gif.getPImages(this, "data/agent_half3.gif");
  
  font = createFont("Arial", 14);
  textFont(font);
}


void draw() {

  // update user position
  kinect.update();
  background(255);
  // set center of mass to the maximum value
  float comX = maxX;

  // if we are tracking a user, get the position of his Center of Mass  
  if (kinect.trackedUserId != 0) { 
    kinect.getCoM(userPosRealWorld);    
    
    comX = userPosRealWorld.x;
    if (comX > maxX) {
      comX = maxX;
    }
    else if (comX < minX) {
      comX = minX;
    }
  }


  float maxRange = maxX - minX; // total range
  float x = map(comX, minX, maxX, 0, maxRange); // map center of mass to a positive range


  float d = dist(userPosRealWorld.x, userPosRealWorld.y ,userPosRealWorld.z,  prevUserPosRealWorld.x, prevUserPosRealWorld.y, prevUserPosRealWorld.z);
  if (userPosRealWorld == new PVector() || prevUserPosRealWorld == new PVector() ) {
    d = 0.0;
  }

  kinect.context.convertRealWorldToProjective(userPosRealWorld, userPosProjective);
  float screenX = map(userPosProjective.x, 0, 640, 0, width);
  float screenY = map(userPosProjective.y, 0, 640, 0, width);
  float screenZ = map(userPosProjective.z, 0, 640, 0, width);
  //println(userPosRealWorld.x + " --- " + userPosProjective.x + " --- " + screenX);
  //kordinati[] = float[screenX] + float[screenY] + float[screenZ];
  // draw the image
  image(kinect.context.rgbImage(), 0, 0, width, height);
  //image(kinect.context.rgbImage(), kinect.context.depthWidth(), 0, displayWidth, displayHeight);

//  image(animation2[(int) (animation2.length / (float) (width) * screenX)], (screenX) - 150, screenY - 500, 114, 155);
 // image(animation3[(int) (animation3.length / (float) (width) * screenX)], (screenX) + 100, screenY - 500, 114, 155);

  image(animation2[(int) (animation2.length / (float) (width) * screenX)], (screenX) - 150, screenY - 500, 200 - screenZ/60, 240 - screenZ/60);
  image(animation3[(int) (animation3.length / (float) (width) * screenX)], (screenX) + 100, screenY - 500, 200 - screenZ/60, 240 - screenZ/60);


  println("distance moved " + d);

  if (d > moveDist) {
    text( userPosRealWorld.x, (screenX) - 10, screenY - 450);
    text( userPosRealWorld.y, (screenX) - 10, screenY - 440);
    text( userPosRealWorld.z, (screenX) - 10, screenY - 430);
    //image(animation[(int) (animation.length / (float) (width) * screenX)], (screenX) - 130, screenY - 400, 74, 42);
    //image(animation[(int) (animation.length / (float) (width) * screenX)], (screenX) + 120, screenY - 400, 74, 42);  
    image(animation[(int) (animation.length / (float) (width) * screenX)], (screenX) + 120, screenY - 400, 160 - screenZ/60, 130 - screenZ/60);  
    image(animation[(int) (animation.length / (float) (width) * screenX)], (screenX) - 130, screenY - 400, 160 - screenZ/60, 130 - screenZ/60);
    
  }

  
  prevUserPosRealWorld = new PVector(userPosRealWorld.x, userPosRealWorld.y, userPosRealWorld.z);
} // end draw




// -----------------------------------------------------------------
// SimpleOpenNI user events
// -----------------------------------------------------------------
// onNewUser is triggered when the kinect registers a new user
void onNewUser(SimpleOpenNI curContext, int userId)
{
  // let our SingleUserKinect Class take care of this
  kinect.registerNewUser(curContext, userId);
}

// onLostUser is triggered when the kinect deregisters a user
void onLostUser(SimpleOpenNI curContext, int userId)
{
  // let our SingleUserKinect Class take care of this
  kinect.deRegisterUser(curContext, userId);
}
tracking