Jean-Yves Didier

check on webxr

......@@ -61,14 +61,14 @@
"init" : {
"preconnections" : [],
"postconnections" : [
{ "destination": "video", "slot": "start", "value": []},
{ "destination": "viewer", "slot": "start", "value": []}
],
"connections" : [
{ "source": "viewer", "signal": "onRender", "destination": "display", "slot": "updatePosition"},
{ "source": "viewer", "signal": "onRender", "destination": "filters", "slot": "computeProjection"},
{ "source": "viewer", "signal": "onRender", "destination": "trigger", "slot": "triggerCamera"},
{ "source": "filters", "signal": "onSessionStarted", "destination": "video", "slot": "start"},
{ "source": "viewer", "signal": "onRender", "destination": "video", "slot": "grabFrame"},
{ "source": "viewer", "signal": "onStarted", "destination": "video", "slot": "start"},
{ "source": "filters", "signal": "updateProjection", "destination": "display", "slot": "updateProjection"}
],
......
......@@ -66,7 +66,8 @@ LiveSource = ARCS.Component.create(
};
var errorMediaStream = function(error) {
console.error("Cannot initialize video component:", error.code);
console.error("Cannot initialize video component: " + error.code + ' ' + error.name + ' ' + error.message);
};
/**
......@@ -82,6 +83,12 @@ LiveSource = ARCS.Component.create(
.catch(errorMediaStream);
};
this.continue = function() {
if (video) {
video.play();
}
};
/**
* captures a frame from the video stream.
* Emits the signal <b>onImage</b> when the frame is captured
......@@ -90,7 +97,24 @@ LiveSource = ARCS.Component.create(
* @emits onImage
*/
this.grabFrame = function () {
if (!video.srcObject)
return;
if (video.paused)
video.play();
if (video.paused || video.ended) {
console.log('paused', video.paused, 'ended', video.ended);
return;
}
if( video.srcObject.ended) {
console.log('stream has been ended');
return;
}
if (video.readyState === video.HAVE_ENOUGH_DATA) {
console.log('grab');
context.drawImage(video, 0, 0, canvas.width, canvas.height);
this.emit("onImage",context.getImageData(0, 0, canvas.width, canvas.height));
}
......@@ -179,9 +203,11 @@ VideoSource = ARCS.Component.create(
if ( context === undefined || canvas === undefined || video === undefined)
createWidgets();
if (video.paused || video.ended) {
console.log('paused', video.paused, 'ended', video.ended);
return;
}
console.log('grab');
context.drawImage(video, 0, 0, canvas.width, canvas.height);
imageData = context.getImageData(0, 0, canvas.width, canvas.height);
this.emit("onImage",imageData);
......
......@@ -20,6 +20,7 @@ XRViewer = ARCS.Component.create(
let sceneId;
let container;
let self = this;
let defaultStyle;
let _config = config || {};
......@@ -34,6 +35,8 @@ XRViewer = ARCS.Component.create(
_config.sessionConfig.domOverlay.root = rootOverlay; // || document.body;
}
let defaultDisplay = window.getComputedStyle(rootOverlay).getPropertyValue("display");
let firstFrame = true;
// scenegraph initializations
scene = new THREE.Scene();
......@@ -128,13 +131,23 @@ XRViewer = ARCS.Component.create(
};
let render = function (time, frame) {
renderer.xr.updateCamera(camera);
if (frame) {
renderer.xr.updateCamera(camera);
if (firstFrame) {
renderer.xr.getSession().addEventListener('end',() => {
firstFrame = true;
rootOverlay.style.display = defaultDisplay;
self.emit('onEnded');
});
firstFrame = false;
self.emit('onStarted');
}
//console.log(JSON.stringify(renderer.xr.getCamera().position));
let pose = frame.getViewerPose(renderer.xr.getReferenceSpace());
if (pose) {
/*if (pose) {
console.log(pose.views[0].projectionMatrix);
}
}*/
self.emit("onRender",time,renderer.xr.getCamera(),frame);
}
......@@ -144,7 +157,7 @@ XRViewer = ARCS.Component.create(
},
/** @lends XRViewer.slots */
['addScene','removeScene','start','setPose'],
['onRender']
['onRender','onEnded', 'onStarted']
);
/**
......