Jean-Yves Didier

added new set of components

import ARCS from '../engine/arcs.js';
/**
* @class OrientationSensor
* @classdesc Uses browser facility to recover absolute orientation.
*
* Basically, two APIs can be used to recover absolute orientation.
* One is officially deprecated but still used by Firefox. The other one is
* running on all Webkit platforms.
*
* @param config {Object} configuration object to initialize sensor
* @param [config.reference="device"] {string} type of reference frame, could be "device" or "screen"
*/
let OrientationSensor = ARCS.Component.create(
/* @lends OrientationSensor.prototype */
function(config) {
const self = this;
let sensor = null;
let matrix = new Float64Array(16);
config = config || {};
let reference = config.reference || "device";
const toEuler = function(q) {
// yaw / alpha (Z), pitch / beta (Y), roll / gamma (X)
// https://en.wikipedia.org/wiki/Conversion_between_quaternions_and_Euler_angles
const x = q[0];
const y = q[1];
const z = q[2];
const w = q[3];
let gamma = Math.atan2(2*(w*x+y*z), 1-2*(x*x+y*y));
let sinp = 2*(w*y-z*x);
let beta = (Math.abs(sinp)>=1)?Math.sign(sinp)*Math.PI/2:Math.asin(sinp);
let alpha = Math.atan2(2*(w*z+x*y), 1-2*(y*y+z*z));
return [ alpha, beta, gamma];
};
const toQuaternion = function(e) {
const a = e.alpha;
const b = e.beta;
const g = e.gamma;
const cy = Math.cos(a*0.5);
const sy = Math.sin(a*0.5);
const cp = Math.cos(b*0.5);
const sp = Math.sin(b*0.5);
const cr = Math.cos(g*0.5);
const sr = Math.sin(g*0.5);
return [
sr*cp*cy-cr*sp*sy,
cr*sp*cy+sr*cp*sy,
cr*cp*sy-sr*sp*cy,
cr*cp*cy+sr*sp*sy
];
};
const toMatrix = function(e) {
const a = e.alpha;
const b = e.beta;
const g = e.gamma;
const c = Math.cos;
const s = Math.sin;
return [
c(a)*c(g)-s(a)*s(b)*s(g), -c(b)*s(a), c(g)*s(a)*s(b)+c(a)*s(g), 0,
c(g)*s(a)+c(a)*s(b)*s(g), c(a)*c(b), s(a)*s(g)-c(a)*c(g)*s(b), 0,
-c(b)*s(g), s(b), c(b)*c(g), 0,
0, 0, 0, 1
];
};
// here, we should correct orientation
const handleOrientation = function (event) {
if (reference === "screen") {
var orientation = screen.msOrientation
|| screen.mozOrientation || screen.orientation;
event.alpha -= (orientation)?orientation.angle:0;
}
self.emit("sendOrientation",[ event.alpha, event.beta, event.gamma] );
self.emit("sendQuaternion", toQuaternion(event));
self.emit("sendMatrix", toMatrix(event) );
};
const handleReading = function() {
self.emit("sendOrientation", toEuler(sensor.quaternion));
self.emit("sendQuaternion",sensor.quaternion);
sensor.populateMatrix(matrix);
//console.log(matrix);
self.emit("sendMatrix", matrix);
};
/**
* initialises and starts the sensor
* @slot
* @function OrientationSensor#start
*/
this.start = function() {
if (window.AbsoluteOrientationSensor) {
sensor = new AbsoluteOrientationSensor({ referenceFrame: reference });
Promise.all([navigator.permissions.query({ name: "accelerometer" }),
navigator.permissions.query({ name: "magnetometer" }),
navigator.permissions.query({ name: "gyroscope" })])
.then(results => {
if (results.every(result => result.state === "granted")) {
sensor.start();
sensor.addEventListener("reading", handleReading);
} else {
console.log("[AbsoluteOrientation]", "no permission to use sensor");
}
});
} else {
if (window.DeviceOrientationEvent) {
window.addEventListener("deviceorientation", handleOrientation, false);
} else {
console.log("[AbsoluteOrientation]","no device orientation API");
}
}
};
this.setReference = function(ref) {
reference = ref;
};
},
["start","setReference"],
["sendOrientation", "sendMatrix", "sendQuaternion"]
);
/**
* emitted by the sensor to give orientation value using Euler's angles
* @function OrientationSensor#sendOrientation
* @signal
* @param euler {number[]} Euler angle values in degrees
*/
/**
* emitted by the sensor to give rotation matrix
* @function OrientationSensor#sendMatrix
* @signal
* @param matrix {number[]} A 16 component array representing the matrix
*/
/**
* emited by the sensor to give a quaternion representing absolute orientation
* @function OrientationSensor#sendQuaternion
* @signal
* @param quat {number[]} a quaternion given as an array [x,y,z,w]
*/
export default { OrientationSensor : OrientationSensor };
import ARCS from '../engine/arcs.js';
import * as THREE from '../deps/three.js/index.js';
let TransformComposer = ARCS.Component.create(
function(config) {
const self = this;
config ??= {};
config.poses ??= {};
config.operations ??= {};
let poses = {};
// operand list
// multiply, premultiply, invert, compose
// structure for the operation
/*
* {
* poses : {
* pose_name : { type : "", values: [opt] }
* },
* operations : {
* op_name : { func: f_name, params : [] }
* }
* }
*/
let fTypes = {
'mat' : function(m) {
return m.clone();
},
'arr' : function(arr) {
return (new THREE.Matrix4()).fromArray(arr);
},
'pos_quat' : function(pos,quat) {
let m = new THREE.Matrix4()
m.setPosition((new THREE.Vector3()).fromArray(pos));
m.makeRotationFromQuaternion((new THREE.Quaternion()).fromArray(quat));
},
'pos': function(pos) {
return (new THREE.Matrix4()).setPosition((new THREE.Vector3()).fromArray(pos));
},
'quat': function(pos) {
return (new THREE.Matrix4()).makeRotationFromQuaternion((new THREE.Quaternion()).fromArray(quat));
},
};
let fOperators = {
'-1' : function(m) {
return m.clone().invert();
},
'T': function(m) {
return m.clone().transpose();
},
'*>' : function() {
if( arguments.length === 0)
return new THREE.Matrix4();
let mat = arguments[0].clone();
if (arguments.length > 1) {
for (let i=1; i< arguments.length; i++) {
mat.multiply(arguments[i]);
}
}
return mat;
},
'<*' : function() {
if( arguments.length === 0)
return new THREE.Matrix4();
let mat = arguments[0].clone();
if (arguments.length > 1) {
for (let i=1; i< arguments.length; i++) {
mat.premultiply(arguments[i]);
}
}
return mat;
},
't': function(a,b) {
return a.clone().copyPosition(b);
},
'R': function(a, b) {
return a.clone().extractRotation(b);
},
'|': function(m) {
let mat = m.elements;
const mX = new THREE.Vector3(mat[0],0,mat[2]);
const mY = new THREE.Vector3(0,1,0);
const mZ = new THREE.Vector3(mat[8],0,mat[10]);
const lm = new THREE.Vector3();
lm.addVectors(mX,mZ);
const lX = lm.clone().normalize().applyAxisAngle(mY,Math.PI/4);
const lZ = lm.clone().normalize().applyAxisAngle(mY,-Math.PI/4);
// potentially, the acquired point is a little bit lower
// so, a correction should also be applied to this one.
const rY = new THREE.Vector3(mat[4],mat[5],mat[6]);
const rZ = mY.clone().cross(rY);
const aZ = rY.angleTo(mY);
const t = new THREE.Vector3(mat[12], mat[13], mat[14]);
t.applyAxisAngle(rZ,-aZ);
return (new THREE.Matrix4()).fromArray([
lX.x, lX.y, lX.z, 0,
mY.x, mY.y, mY.z, 0,
lZ.x, lZ.y, lZ.z, 0,
t.x, t.y, t.z, 1
]);
}
};
let computeMatrix = function(obj) {
if (typeof obj === "string") {
console.log(obj,poses[obj]);
return poses[obj];
} else {
for (let p in obj) {
let mat = fOperators[p].apply(
null, obj[p].map( (v) => { return computeMatrix(v);} )
);
console.log(JSON.stringify(obj), mat);
return mat;
}
}
};
let computePose = function(name) {
let obj = config.operations[name];
if (obj === undefined) return;
let m = computeMatrix(obj);
console.log(name, m);
self.emit(name, m);
};
for(let p in config.poses) {
if (config.poses.hasOwnProperty(p)) {
self.slot(p, function() { poses[p] = fTypes[config.poses[p].type].apply(null,arguments);});
if (config.poses[p].values !== undefined) {
/*poses[p] =*/ self[p].apply(null, config.poses[p].values);
} else {
poses[p] = new THREE.Matrix4();
}
}
}
for (let o in config.operations) {
// create operations that correspond to a slot and signals
self.signal(o);
self.slot(o, ((name) => { return function() { computePose(name);};})(o));
}
//console.log("transform composer", self, poses);
}
);
export default { TransformComposer: TransformComposer};
import ARCS from '../engine/arcs.js';
/**
* @class XRExtensionManager
* @classdesc WebXRExtension detector
* WebXR extensions are requested as features (required or optional)
* in sessions. In the case of the optional ones, we must test if the features
* are activated or not inside the rendering loop.
* This component is dedicated to this task.
* One convenient way to check webxr features is to use the link below.
* @see https://chromestatus.com/features#webxr
*/
let XRExtensionManager = ARCS.Component.create(
function() {
const self = this;
/**
* Detects WebXR session features inside the rendering loop.
* @param time {number} time elapsed in seconds since rendering loop
* has been started
* @param camera {Object} main camera used for the rendering of the scene
* @param frame {Object} WebXR Frame as returned by WebXR module
* @slot
* @funtion XRExtensionManager#detect
* @emits onHitTest
* @emits onLightEstimation
* @emits onCameraAccess
* @emits onDepth
* @emits onPlaneDetection
*/
this.detect = async function(time, camera, frame) {
// some features need that a pose is already computed in order
// to be detected.
const viewerPose = frame.getViewerPose(frame.manager.getReferenceSpace() );
//const lightProbe = await frame.session.requestLightProbe();
if (!viewerPose) return;
if (frame.session.requestHitTestSource !== undefined) {
self.emit('onHitTest');
}
if (frame.session.requestLightProbe !== undefined )
self.emit('onLightEstimation');
if (frame.session.getTrackedImageScores !== undefined)
self.emit('onImageTracking');
const view = viewerPose.views[0];
if (view.camera !== undefined)
self.emit('onCameraAccess');
if (frame.getDepthInformation !== undefined)
self.emit('onDepth');
if (frame.detectedPlanes !== undefined)
self.emit('onPlaneDetection');
if (frame.trackedAnchors !== undefined)
self.emit('onAnchors');
self.emit('detectionPerformed');
};
},
['detect'],
[
'onHitTest','onCameraAccess','onDepth','onLightEstimation',
'onPlaneDetection','onAnchors','onImageTracking', 'detectionPerformed'
]
);
/**
* emitted if hit test extension is available
* @see https://chromestatus.com/feature/4755348300759040
* @function XRExtensionManager#onHitTest
* @signal
*/
/**
* emitted if raw camera access extension is available
* @see https://chromestatus.com/feature/5759984304390144
* @function XRExtensionManager#onCameraAccess
* @signal
*/
/**
* emitted if depth extension is available
* @see https://chromestatus.com/feature/5742647199137792
* @function XRExtensionManager#onDepth
* @signal
*/
/**
* emitted if light estimation is available
* @see https://chromestatus.com/feature/5704707957850112
* @function XRExtensionManager#onLightEstimation
* @signal
*/
/**
* emitted if plane detection is available
* @see https://chromestatus.com/feature/5732397976911872
* @function XRExtensionManager#onPlaneDetection
* @signal
*/
/**
* emitted if anchors are available
* @see https://chromestatus.com/feature/5129925015109632
* @function XRExtensionManager#onAnchors
* @signal
*/
/**
* emitted when all tests are performed
* @function XRExtensionManager#detectionPerformed
* @signal
*/
export default { XRExtensionManager: XRExtensionManager};
import ARCS from '../engine/arcs.js';
let XRImager;
XRImager = ARCS.Component.create(
function(config) {
let texture = null;
let framebuffer = null;
let prevFramebuffer = null;
let pixels = null;
config = config || {};
let sampling = config.sampling || 1;
let intrinsics = [ 0, 0, 0, 0, 0, 0, 0, 0, 0];
let scaleImage = function(imageData) {
const h = imageData.height;
const w = imageData.width;
const p = imageData.data;
const ht = (h/sampling) | 0;
const wt = (w/sampling) | 0;
//console.log("downscaling from",w,h,"to",wt,ht);
let pixels = new Uint8ClampedArray(ht*wt*4);
let offset;
for (let i=0; i < ht; i++) {
offset = i*sampling*w*4;
for (let j=0; j < wt; j++) {
pixels[i*wt+j] = p[offset + j*sampling*4];
pixels[i*wt+j+1] = p[offset + j*sampling*4 + 1];
pixels[i*wt+j+2] = p[offset + j*sampling*4 + 2];
pixels[i*wt+j+3] = p[offset + j*sampling*4 + 3];
}
}
return new ImageData(pixels, ht, wt);
};
this.update = function(time, camera, frame) {
const viewerPose = frame.getViewerPose(frame.manager.getReferenceSpace() );
const gl = frame.renderer.getContext();
let imageData = null;
if (viewerPose) {
const view = viewerPose.views[0];
if (view.camera) {
if (frame.binding) {
// compute here intrinsics for the camera
let p = view.projectionMatrix;
let width = view.camera.width;
let height = view.camera.height;
intrinsics[0] = (1-p[8]) * width/2 ;
intrinsics[4] = (1-p[9]) * height/2;
intrinsics[2] = width/2*p[0];
intrinsics[5] = width/2*p[5];
texture = frame.binding.getCameraImage(view.camera);
framebuffer = framebuffer || gl.createFramebuffer();
prevFramebuffer = gl.getParameter(gl.FRAMEBUFFER_BINDING);
gl.bindFramebuffer(gl.FRAMEBUFFER,framebuffer);
gl.framebufferTexture2D(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D, texture,0
);
pixels = new Uint8ClampedArray(width*height*4);
gl.readPixels(0,0, width, height, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
gl.bindFramebuffer(gl.FRAMEBUFFER, prevFramebuffer);
imageData = new ImageData(pixels, width, height);
scaleImage(imageData);
this.emit('sendIntrinsics', intrinsics);
this.emit('sendImage', imageData);
}
}
}
};
},
['update'],
['sendImage','sendIntrinsics']
);
export default { XRImager: XRImager };