Commit e7438c65 authored by Jeremy Laviole's avatar Jeremy Laviole

Many small updates for compatibility with PapARt 1.5

parent 20f2183f
......@@ -3,7 +3,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>tech.lity.rea</groupId>
<artifactId>natar-multi-camera-server</artifactId>
<version>0.1</version>
<version>0.1.2b</version>
<packaging>jar</packaging>
<properties>
......@@ -64,11 +64,10 @@
<scope>compile</scope>
</dependency>
<!--It should use nectar-apps-->
<dependency>
<groupId>tech.lity.rea</groupId>
<artifactId>natar-apps</artifactId>
<version>0.1</version>
<artifactId>natar-core</artifactId>
<version>0.1.2a-SNAPSHOT</version>
</dependency>
<dependency>
......@@ -152,7 +151,7 @@
</plugin>
<plugin>
<!-- <plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<executions>
......@@ -175,7 +174,7 @@
</configuration>
</execution>
</executions>
</plugin>
</plugin>-->
</plugins>
</build>
<profiles>
......
......@@ -59,56 +59,59 @@ public class CameraFactory {
boolean isIR = format.equalsIgnoreCase("ir");
boolean isDepth = format.equalsIgnoreCase("depth");
try{
switch (type) {
case FFMPEG:
camera = new CameraFFMPEG(description, format);
break;
case PROCESSING:
camera = new CameraProcessing(description);
break;
// Depth Cameras
case OPEN_KINECT_2:
cameraMulti = new CameraOpenKinect2(cameraNo);
break;
case REALSENSE:
cameraMulti = new CameraRealSense(cameraNo);
break;
case OPEN_KINECT:
cameraMulti = new CameraOpenKinect(cameraNo);
break;
case OPENCV:
camera = new CameraOpenCV(cameraNo);
break;
case FLY_CAPTURE:
camera = new CameraFlyCapture(cameraNo);
break;
case OPENNI2:
cameraMulti = new CameraOpenNI2(cameraNo);
break;
default:
throw new RuntimeException("ProCam, Camera: Unspported camera Type");
}
if (cameraMulti != null) {
if (isRGB) {
cameraMulti.setUseColor(true);
cameraMulti.actAsColorCamera();
try {
switch (type) {
case NECTAR:
cameraMulti = new CameraNectar(description);
break;
case FFMPEG:
camera = new CameraFFMPEG(description, format);
break;
case PROCESSING:
camera = new CameraProcessing(description);
break;
// Depth Cameras
case OPEN_KINECT_2:
cameraMulti = new CameraOpenKinect2(cameraNo);
break;
case REALSENSE:
cameraMulti = new CameraRealSense(cameraNo);
break;
case OPEN_KINECT:
cameraMulti = new CameraOpenKinect(cameraNo);
break;
case OPENCV:
camera = new CameraOpenCV(cameraNo);
break;
case FLY_CAPTURE:
camera = new CameraFlyCapture(cameraNo);
break;
case OPENNI2:
cameraMulti = new CameraOpenNI2(cameraNo);
break;
default:
throw new RuntimeException("ProCam, Camera: Unspported camera Type");
}
if (isIR) {
cameraMulti.setUseIR(true);
cameraMulti.actAsIRCamera();
if (cameraMulti != null) {
if (isRGB) {
cameraMulti.setUseColor(true);
cameraMulti.actAsColorCamera();
}
if (isIR) {
cameraMulti.setUseIR(true);
cameraMulti.actAsIRCamera();
}
if (isDepth) {
cameraMulti.setUseDepth(true);
cameraMulti.actAsDepthCamera();
}
return cameraMulti;
}
if (isDepth) {
cameraMulti.setUseDepth(true);
cameraMulti.actAsDepthCamera();
if (camera != null) {
return camera;
}
return cameraMulti;
}
if (camera != null) {
return camera;
}
} catch(NullPointerException e){
} catch (NullPointerException e) {
throw new CannotCreateCameraException("Cannot create the camera type " + type.toString() + " " + description);
}
throw new RuntimeException("ProCam, Camera: Unspported camera Type");
......
......@@ -14,7 +14,7 @@ import org.apache.commons.cli.ParseException;
import redis.clients.jedis.Jedis;
import processing.data.JSONObject;
import tech.lity.rea.nectar.apps.NectarApplication;
import tech.lity.rea.nectar.utils.NectarApplication;
/**
*
......@@ -242,6 +242,11 @@ public class CameraServerImpl extends NectarApplication implements CameraServer,
private void sendColorImage() {
ByteBuffer byteBuffer;
// byteBuffer = dcamera.getColorCamera().getIplImage();
// IplImage smaller = dcamera.getColorCamera().getIplImage().sc
if (useDepth) {
if (dcamera.getColorCamera().getIplImage() == null) {
log("null color Image -d", "");
......@@ -319,7 +324,9 @@ public class CameraServerImpl extends NectarApplication implements CameraServer,
redisDepth.publish(id, imageInfo.toString().getBytes());
log("Sending (PUBLISH) image to: " + name, "");
}
}
}
public long time() {
......
/*
* Part of the PapARt project - https://project.inria.fr/papart/
*
* Copyright (C) 2016 Jérémy Laviole
* Copyright (C) 2014-2016 Inria
* Copyright (C) 2011-2013 Bordeaux University
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation, version 2.1.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General
* Public License along with this library; If not, see
* <http://www.gnu.org/licenses/>.
*/
package tech.lity.rea.nectar.camera;
import processing.core.PApplet;
import tech.lity.rea.nectar.calibration.HomographyCalibration;
import tech.lity.rea.nectar.camera.Camera;
import tech.lity.rea.nectar.camera.CameraFactory;
import tech.lity.rea.nectar.camera.CameraNectar;
import tech.lity.rea.nectar.camera.CameraRGBIRDepth;
import tech.lity.rea.nectar.camera.CannotCreateCameraException;
import tech.lity.rea.nectar.depthcam.DepthCameraDevice;
import tech.lity.rea.nectar.depthcam.DepthComputation;
import tech.lity.rea.nectar.depthcam.OpenNIDepth;
/**
*
* @author Jeremy Laviole
*/
public final class NectarOpenNI extends DepthCameraDevice {
private final CameraNectar cameraNectar;
public NectarOpenNI(PApplet parent, Camera anotherCam) throws CannotCreateCameraException {
super(parent);
if (anotherCam instanceof CameraNectar) {
this.camera = (CameraNectar) anotherCam;
this.camera.setUseDepth(true);
} else {
this.anotherCamera = anotherCam;
}
if (this.anotherCamera == null) {
this.anotherCamera = getColorCamera();
}
cameraNectar = (CameraNectar) camera;
}
@Override
public CameraNectar getMainCamera() {
return cameraNectar;
}
@Override
public int rawDepthSize() {
return getDepthCamera().width() * getDepthCamera().height() * 2;
}
@Override
public Camera.Type type() {
return Camera.Type.NECTAR;
}
@Override
public void loadDataFromDevice() {
// setStereoCalibration(cameraNI.getHardwareExtrinsics());
}
@Override
public DepthComputation createDepthComputation() {
return new OpenNIDepth();
}
}
......@@ -27,6 +27,7 @@ import tech.lity.rea.nectar.camera.Camera;
import tech.lity.rea.nectar.camera.CameraRGBIRDepth;
import tech.lity.rea.nectar.camera.SubCamera;
import tech.lity.rea.nectar.camera.SubDepthCamera;
import toxi.geom.Vec3D;
/**
*
......@@ -52,7 +53,8 @@ public abstract class DepthCameraDevice {
}
public abstract void loadDataFromDevice();
// public abstract DepthAnalysis.DepthComputation createDepthComputation();
public abstract DepthComputation createDepthComputation();
public CameraRGBIRDepth getMainCamera() {
return camera;
......@@ -95,9 +97,9 @@ public abstract class DepthCameraDevice {
camera.close();
}
// public void setTouch(DepthTouchInput kinectTouchInput) {
// camera.getDepthCamera().setTouchInput(kinectTouchInput);
// }
public void setTouch(Object kinectTouchInput) {
camera.getDepthCamera().setTouchInput(kinectTouchInput);
}
// public void setStereoCalibration(String fileName) {
// HomographyCalibration calib = new HomographyCalibration();
......@@ -124,29 +126,31 @@ public abstract class DepthCameraDevice {
public PMatrix3D getStereoCalibrationInv() {
return KinectRGBIRCalibrationInv;
}
// public int findMainImageOffset(Vec3D v) {
// return findMainImageOffset(v.x, v.y, v.z);
// }
//
// public int findMainImageOffset(PVector v) {
// return findMainImageOffset(v.x, v.y, v.z);
// }
//
// public int findColorOffset(Vec3D v) {
// return findColorOffset(v.x, v.y, v.z);
// }
//
// public int findColorOffset(PVector v) {
// return findColorOffset(v.x, v.y, v.z);
// }
//
// public int findDepthOffset(PVector v) {
// return findDepthOffset(v.x, v.y, v.z);
// }
// public int findDepthOffset(Vec3D v) {
// return findDepthOffset(v.x, v.y, v.z);
// }
public int findMainImageOffset(Vec3D v) {
return findMainImageOffset(v.x, v.y, v.z);
}
public int findMainImageOffset(PVector v) {
return findMainImageOffset(v.x, v.y, v.z);
}
public int findColorOffset(Vec3D v) {
return findColorOffset(v.x, v.y, v.z);
}
public int findColorOffset(PVector v) {
return findColorOffset(v.x, v.y, v.z);
}
public int findDepthOffset(PVector v) {
return findDepthOffset(v.x, v.y, v.z);
}
public int findDepthOffset(Vec3D v) {
return findDepthOffset(v.x, v.y, v.z);
}
private PVector vt = new PVector();
private PVector vt2 = new PVector();
......@@ -160,37 +164,37 @@ public abstract class DepthCameraDevice {
* @param z
* @return
*/
// public int findColorOffset(float x, float y, float z) {
// vt.set(x, y, z);
// vt2.set(0, 0, 0);
// // Ideally use a calibration...
//// kinectCalibRGB.getExtrinsics().mult(vt, vt2);
// getStereoCalibration().mult(vt, vt2);
// return getColorCamera().getProjectiveDevice().worldToPixel(vt2.x, vt2.y, vt2.z);
// }
// /**
// * Warning not thread safe.
// *
// * @param x
// * @param y
// * @param z
// * @return
// */
// public int findMainImageOffset(float x, float y, float z) {
// vt.set(x, y, z);
// vt2.set(0, 0, 0);
// // Ideally use a calibration...
//// kinectCalibRGB.getExtrinsics().mult(vt, vt2);
// getStereoCalibration().mult(vt, vt2);
//
// // TODO: find a solution for this...
// return getMainCamera().getProjectiveDevice().worldToPixel(vt2.x, vt2.y, vt2.z);
//// return getColorCamera.getProjectiveDevice().worldToPixel(vt2.x, vt2.y, vt2.z);
// }
//
// private int findDepthOffset(float x, float y, float z) {
// return getDepthCamera().getProjectiveDevice().worldToPixel(x,y,z);
// }
public int findColorOffset(float x, float y, float z) {
vt.set(x, y, z);
vt2.set(0, 0, 0);
// Ideally use a calibration...
// kinectCalibRGB.getExtrinsics().mult(vt, vt2);
getStereoCalibration().mult(vt, vt2);
return getColorCamera().getProjectiveDevice().worldToPixel(vt2.x, vt2.y, vt2.z);
}
/**
* Warning not thread safe.
*
* @param x
* @param y
* @param z
* @return
*/
public int findMainImageOffset(float x, float y, float z) {
vt.set(x, y, z);
vt2.set(0, 0, 0);
// Ideally use a calibration...
// kinectCalibRGB.getExtrinsics().mult(vt, vt2);
getStereoCalibration().mult(vt, vt2);
// TODO: find a solution for this...
return getMainCamera().getProjectiveDevice().worldToPixel(vt2.x, vt2.y, vt2.z);
// return getColorCamera.getProjectiveDevice().worldToPixel(vt2.x, vt2.y, vt2.z);
}
private int findDepthOffset(float x, float y, float z) {
return getDepthCamera().getProjectiveDevice().worldToPixel(x, y, z);
}
}
......@@ -20,11 +20,19 @@
*/
package tech.lity.rea.nectar.depthcam;
import org.bytedeco.javacpp.opencv_core;
/**
* Abstraction for finding depth for each device.
*
* @author Jeremy Laviole
*/
public interface DepthComputation {
public float findDepth(int offset, Object buffer);
public static final float INVALID_DEPTH = -1;
// public float findDepth(int offset, Object buffer);
public float findDepth(int offset);
public void updateDepth(opencv_core.IplImage buffer);
}
......@@ -72,30 +72,32 @@ public class Kinect360 extends DepthCameraDevice {
// Nothing here yet. maybe get calibrations ?
}
// @Override
// public DepthAnalysis.DepthComputation createDepthComputation() {
// return new Kinect360Depth();
// }
//
// public class Kinect360Depth implements DepthAnalysis.DepthComputation {
//
// byte[] depthRaw;
//
// public Kinect360Depth() {
// depthRaw = new byte[rawDepthSize()];
// }
//
// @Override
// public float findDepth(int offset) {
// float d = (depthRaw[offset * 2] & 0xFF) << 8
// | (depthRaw[offset * 2 + 1] & 0xFF);
//
// return d;
// }
//
// @Override
// public void updateDepth(IplImage depthImage) {
// depthImage.getByteBuffer().get(depthRaw);
// }
// }
@Override
public DepthComputation createDepthComputation() {
return new Kinect360Depth();
}
class Kinect360Depth implements DepthComputation {
byte[] depthRaw;
public Kinect360Depth() {
depthRaw = new byte[rawDepthSize()];
}
@Override
public float findDepth(int offset) {
float d = (depthRaw[offset * 2] & 0xFF) << 8
| (depthRaw[offset * 2 + 1] & 0xFF);
return d;
}
@Override
public void updateDepth(IplImage depthImage) {
depthImage.getByteBuffer().get(depthRaw);
}
}
}
/*
* Part of the PapARt project - https://project.inria.fr/papart/
*
* Copyright (C) 2017 RealityTech
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation, version 2.1.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General
* Public License along with this library; If not, see
* <http://www.gnu.org/licenses/>.
*/
package tech.lity.rea.nectar.depthcam;
/**
*
* @author Jeremy Laviole
*/
public class Kinect360Depth implements DepthComputation {
@Override
public float findDepth(int offset, Object buffer) {
float d = (((byte[]) buffer)[offset * 2] & 0xFF) << 8
| (((byte[]) buffer)[offset * 2 + 1] & 0xFF);
return d;
}
}
......@@ -67,33 +67,32 @@ public final class KinectOne extends DepthCameraDevice {
// Nothing here yet. maybe get calibrations ?
}
// @Override
// public DepthAnalysis.DepthComputation createDepthComputation() {
// return new KinectOneDepth();
// }
//
// public class KinectOneDepth implements DepthAnalysis.DepthComputation {
//
// public static final float KINECT_ONE_DEPTH_RATIO = 10f;
//
// byte[] depthRaw;
//
// public KinectOneDepth() {
// depthRaw = new byte[rawDepthSize()];
// }
//
// @Override
// public float findDepth(int offset) {
// float d = (depthRaw[offset * 3 + 1] & 0xFF) * 256
// + (depthRaw[offset * 3] & 0xFF);
//
// return d / KINECT_ONE_DEPTH_RATIO; // / 65535f * 10000f;
// }
//
// @Override
// public void updateDepth(opencv_core.IplImage depthImage) {
// depthImage.getByteBuffer().get(depthRaw);
// }
// }
@Override
public DepthComputation createDepthComputation() {
return new KinectOneDepth();
}
public class KinectOneDepth implements DepthComputation {
public static final float KINECT_ONE_DEPTH_RATIO = 10f;
byte[] depthRaw;
public KinectOneDepth() {
depthRaw = new byte[rawDepthSize()];
}
@Override
public float findDepth(int offset) {
float d = (depthRaw[offset * 3 + 1] & 0xFF) * 256
+ (depthRaw[offset * 3] & 0xFF);
return d / KINECT_ONE_DEPTH_RATIO; // / 65535f * 10000f;
}
@Override
public void updateDepth(opencv_core.IplImage depthImage) {
depthImage.getByteBuffer().get(depthRaw);
}
}
}
/*
* Part of the PapARt project - https://project.inria.fr/papart/
*
* Copyright (C) 2017 RealityTech
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation, version 2.1.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General
* Public License along with this library; If not, see
* <http://www.gnu.org/licenses/>.
*/
package tech.lity.rea.nectar.depthcam;
public class KinectOneDepth implements DepthComputation {
public static final float KINECT_ONE_DEPTH_RATIO = 10f;
@Override
public float findDepth(int offset, Object buffer) {
float d = (((byte[]) buffer)[offset * 3 + 1] & 0xFF) * 256
+ (((byte[]) buffer)[offset * 3] & 0xFF);
return d / KINECT_ONE_DEPTH_RATIO; // / 65535f * 10000f;
}
}
......@@ -80,8 +80,8 @@ public final class OpenNI2 extends DepthCameraDevice {
// setStereoCalibration(cameraNI.getHardwareExtrinsics());
}
// @Override
// public DepthAnalysis.DepthComputation createDepthComputation() {
// return new OpenNIDepth();
// }
@Override
public DepthComputation createDepthComputation() {
return new OpenNIDepth();
}
}
......@@ -7,12 +7,13 @@ package tech.lity.rea.nectar.depthcam;
import java.nio.ShortBuffer;
import org.bytedeco.javacpp.opencv_core;
import static tech.lity.rea.nectar.depthcam.DepthComputation.INVALID_DEPTH;
/**
*
* @author realitytech
*/
public class OpenNIDepth {
public class OpenNIDepth implements DepthComputation {
private ShortBuffer frameData;
float[] histogram;
......@@ -20,17 +21,15 @@ public class OpenNIDepth {
public OpenNIDepth() {
}
// @Override
// public float findDepth(int offset) {
// int depth = (int) (frameData.get(offset) & 0xFFFF);
// if (depth == 0) {
// return INVALID_DEPTH;
// }
// return depth;
// }
//
// @Override
// public void updateDepth(opencv_core.IplImage depthImage) {
// frameData = depthImage.getShortBuffer();
// }
public float findDepth(int offset) {
int depth = (int) (frameData.get(offset) & 0xFFFF);
if (depth == 0) {
return INVALID_DEPTH;
}
return depth;
}
public void updateDepth(opencv_core.IplImage depthImage) {
frameData = depthImage.getShortBuffer();
}
}
......@@ -78,31 +78,30 @@ public final class RealSense extends DepthCameraDevice {
setStereoCalibration(cameraRS.getHardwareExtrinsics());
}
// @Override
// public DepthAnalysis.DepthComputation createDepthComputation() {
// return new RealSenseDepth();
// }
// protected class RealSenseDepth implements DepthAnalysis.DepthComputation {
//
// private final float depthRatio;
// private ShortBuffer depthRawShortBuffer;
//
// public RealSenseDepth() {
// this.depthRatio = cameraRS.getDepthScale();
// }
//
// @Override
// public float findDepth(int offset) {
// float d = depthRawShortBuffer.get(offset) * depthRatio * 1000f;
// return d;
// }
//
// @Override
// public void updateDepth(opencv_core.IplImage depthImage) {
// ByteBuffer depthRawBuffer = depthImage.getByteBuffer();
// depthRawShortBuffer = depthRawBuffer.asShortBuffer();
// }