Actualización

This commit is contained in:
Xes
2025-04-10 12:53:50 +02:00
parent f7a0ba2b2f
commit 2001ceddea
39284 changed files with 991962 additions and 0 deletions

View File

@@ -0,0 +1,147 @@
/*
* Copyright 2016 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var EventEmitter = require('eventemitter3');
var shaka = require('shaka-player');
var Types = require('../video-type');
var Util = require('../util');
var DEFAULT_BITS_PER_SECOND = 1000000;
/**
* Supports regular video URLs (eg. mp4), as well as adaptive manifests like
* DASH (.mpd) and soon HLS (.m3u8).
*
* Events:
* load(video): When the video is loaded.
* error(message): If an error occurs.
*
* To play/pause/seek/etc, please use the underlying video element.
*/
function AdaptivePlayer(params) {
this.video = document.createElement('video');
// Loop by default.
if (params.loop === true) {
this.video.setAttribute('loop', true);
}
if (params.volume !== undefined) {
// XXX: .setAttribute('volume', params.volume) doesn't work for some reason.
this.video.volume = params.volume;
}
// Not muted by default.
if (params.muted === true) {
this.video.muted = params.muted;
}
// For FF, make sure we enable preload.
this.video.setAttribute('preload', 'auto');
// Enable inline video playback in iOS 10+.
this.video.setAttribute('playsinline', true);
this.video.setAttribute('crossorigin', 'anonymous');
}
AdaptivePlayer.prototype = new EventEmitter();
AdaptivePlayer.prototype.load = function(url) {
var self = this;
// TODO(smus): Investigate whether or not differentiation is best done by
// mimeType after all. Cursory research suggests that adaptive streaming
// manifest mime types aren't properly supported.
//
// For now, make determination based on extension.
var extension = Util.getExtension(url);
switch (extension) {
case 'm3u8': // HLS
this.type = Types.HLS;
if (Util.isSafari()) {
this.loadVideo_(url).then(function() {
self.emit('load', self.video, self.type);
}).catch(this.onError_.bind(this));
} else {
self.onError_('HLS is only supported on Safari.');
}
break;
case 'mpd': // MPEG-DASH
this.type = Types.DASH;
this.loadShakaVideo_(url).then(function() {
console.log('The video has now been loaded!');
self.emit('load', self.video, self.type);
}).catch(this.onError_.bind(this));
break;
default: // A regular video, not an adaptive manifest.
this.type = Types.VIDEO;
this.loadVideo_(url).then(function() {
self.emit('load', self.video, self.type);
}).catch(this.onError_.bind(this));
break;
}
};
AdaptivePlayer.prototype.destroy = function() {
this.video.pause();
this.video.src = '';
this.video = null;
};
/*** PRIVATE API ***/
AdaptivePlayer.prototype.onError_ = function(e) {
console.error(e);
this.emit('error', e);
};
AdaptivePlayer.prototype.loadVideo_ = function(url) {
var self = this, video = self.video;
return new Promise(function(resolve, reject) {
video.src = url;
video.addEventListener('canplaythrough', resolve);
video.addEventListener('loadedmetadata', function() {
self.emit('timeupdate', {
currentTime: video.currentTime,
duration: video.duration
});
});
video.addEventListener('error', reject);
video.load();
});
};
AdaptivePlayer.prototype.initShaka_ = function() {
this.player = new shaka.Player(this.video);
this.player.configure({
abr: { defaultBandwidthEstimate: DEFAULT_BITS_PER_SECOND }
});
// Listen for error events.
this.player.addEventListener('error', this.onError_);
};
AdaptivePlayer.prototype.loadShakaVideo_ = function(url) {
// Install built-in polyfills to patch browser incompatibilities.
shaka.polyfill.installAll();
if (!shaka.Player.isBrowserSupported()) {
console.error('Shaka is not supported on this browser.');
return;
}
this.initShaka_();
return this.player.load(url);
};
module.exports = AdaptivePlayer;

View File

@@ -0,0 +1,56 @@
/*
* Copyright 2016 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
function Analytics() {
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','//www.google-analytics.com/analytics.js','ga');
ga('create', 'UA-35315454-8', 'auto');
ga('send', 'pageview');
this.lastModeChangeTime = window.performance.now();
this.lastModeLabel = Analytics.MODE_LABELS[0];
}
Analytics.MODE_LABELS = {
0: 'UNKNOWN',
1: 'NORMAL',
2: 'MAGIC_WINDOW',
3: 'VR'
};
Analytics.prototype.logModeChanged = function(mode) {
var modeLabel = Analytics.MODE_LABELS[mode];
var lastModeLabel = Analytics.MODE_LABELS[this.lastMode];
console.log('Analytics: going from mode %s to %s', lastModeLabel, modeLabel);
ga('send', 'screenview', {
appName: 'EmbedVR',
screenName: modeLabel
});
var now = window.performance.now();
var msSinceLastModeChange = Math.round(now - this.lastModeChangeTime);
ga('send', 'timing', 'Time spent in mode', lastModeLabel, msSinceLastModeChange);
this.lastModeChangeTime = now;
this.lastMode = mode;
}
window.analytics = new Analytics();

View File

@@ -0,0 +1,20 @@
/*
* Copyright 2016 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var Eyes = {
LEFT: 1,
RIGHT: 2
};
module.exports = Eyes;

View File

@@ -0,0 +1,403 @@
/*
* Copyright 2016 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var EventEmitter = require('eventemitter3');
var TWEEN = require('@tweenjs/tween.js');
var Util = require('../util');
// Constants for the focus/blur animation.
var NORMAL_SCALE = new THREE.Vector3(1, 1, 1);
var FOCUS_SCALE = new THREE.Vector3(1.2, 1.2, 1.2);
var FOCUS_DURATION = 200;
// Constants for the active/inactive animation.
var INACTIVE_COLOR = new THREE.Color(1, 1, 1);
var ACTIVE_COLOR = new THREE.Color(0.8, 0, 0);
var ACTIVE_DURATION = 100;
// Constants for opacity.
var MAX_INNER_OPACITY = 0.8;
var MAX_OUTER_OPACITY = 0.5;
var FADE_START_ANGLE_DEG = 35;
var FADE_END_ANGLE_DEG = 60;
/**
* Responsible for rectangular hot spots that the user can interact with.
*
* Specific duties:
* Adding and removing hotspots.
* Rendering the hotspots (debug mode only).
* Notifying when hotspots are interacted with.
*
* Emits the following events:
* click (id): a hotspot is clicked.
* focus (id): a hotspot is focused.
* blur (id): a hotspot is no longer hovered over.
*/
function HotspotRenderer(worldRenderer) {
this.worldRenderer = worldRenderer;
this.scene = worldRenderer.scene;
// Note: this event must be added to document.body and not to window for it to
// work inside iOS iframes.
var body = document.body;
// Bind events for hotspot interaction.
if (!Util.isMobile()) {
// Only enable mouse events on desktop.
body.addEventListener('mousedown', this.onMouseDown_.bind(this), false);
body.addEventListener('mousemove', this.onMouseMove_.bind(this), false);
body.addEventListener('mouseup', this.onMouseUp_.bind(this), false);
}
body.addEventListener('touchstart', this.onTouchStart_.bind(this), false);
body.addEventListener('touchend', this.onTouchEnd_.bind(this), false);
// Add a placeholder for hotspots.
this.hotspotRoot = new THREE.Object3D();
// Align the center with the center of the camera too.
this.hotspotRoot.rotation.y = Math.PI / 2;
this.scene.add(this.hotspotRoot);
// All hotspot IDs.
this.hotspots = {};
// Currently selected hotspots.
this.selectedHotspots = {};
// Hotspots that the last touchstart / mousedown event happened for.
this.downHotspots = {};
// For raycasting. Initialize mouse to be off screen initially.
this.pointer = new THREE.Vector2(1, 1);
this.raycaster = new THREE.Raycaster();
}
HotspotRenderer.prototype = new EventEmitter();
/**
* @param pitch {Number} The latitude of center, specified in degrees, between
* -90 and 90, with 0 at the horizon.
* @param yaw {Number} The longitude of center, specified in degrees, between
* -180 and 180, with 0 at the image center.
* @param radius {Number} The radius of the hotspot, specified in meters.
* @param distance {Number} The distance of the hotspot from camera, specified
* in meters.
* @param hotspotId {String} The ID of the hotspot.
*/
HotspotRenderer.prototype.add = function(pitch, yaw, radius, distance, id) {
// If a hotspot already exists with this ID, stop.
if (this.hotspots[id]) {
// TODO: Proper error reporting.
console.error('Attempt to add hotspot with existing id %s.', id);
return;
}
var hotspot = this.createHotspot_(radius, distance);
hotspot.name = id;
// Position the hotspot based on the pitch and yaw specified.
var quat = new THREE.Quaternion();
quat.setFromEuler(new THREE.Euler(THREE.Math.degToRad(pitch), THREE.Math.degToRad(yaw), 0, 'ZYX'));
hotspot.position.applyQuaternion(quat);
hotspot.lookAt(new THREE.Vector3());
this.hotspotRoot.add(hotspot);
this.hotspots[id] = hotspot;
}
/**
* Removes a hotspot based on the ID.
*
* @param ID {String} Identifier of the hotspot to be removed.
*/
HotspotRenderer.prototype.remove = function(id) {
// If there's no hotspot with this ID, fail.
if (!this.hotspots[id]) {
// TODO: Proper error reporting.
console.error('Attempt to remove non-existing hotspot with id %s.', id);
return;
}
// Remove the mesh from the scene.
this.hotspotRoot.remove(this.hotspots[id]);
// If this hotspot was selected, make sure it gets unselected.
delete this.selectedHotspots[id];
delete this.downHotspots[id];
delete this.hotspots[id];
this.emit('blur', id);
};
/**
* Clears all hotspots from the pano. Often called when changing panos.
*/
HotspotRenderer.prototype.clearAll = function() {
for (var id in this.hotspots) {
this.remove(id);
}
};
HotspotRenderer.prototype.getCount = function() {
var count = 0;
for (var id in this.hotspots) {
count += 1;
}
return count;
};
HotspotRenderer.prototype.update = function(camera) {
if (this.worldRenderer.isVRMode()) {
this.pointer.set(0, 0);
}
// Update the picking ray with the camera and mouse position.
this.raycaster.setFromCamera(this.pointer, camera);
// Fade hotspots out if they are really far from center to avoid overly
// distorted visuals.
this.fadeOffCenterHotspots_(camera);
var hotspots = this.hotspotRoot.children;
// Go through all hotspots to see if they are currently selected.
for (var i = 0; i < hotspots.length; i++) {
var hotspot = hotspots[i];
//hotspot.lookAt(camera.position);
var id = hotspot.name;
// Check if hotspot is intersected with the picking ray.
var intersects = this.raycaster.intersectObjects(hotspot.children);
var isIntersected = (intersects.length > 0);
// If newly selected, emit a focus event.
if (isIntersected && !this.selectedHotspots[id]) {
this.emit('focus', id);
this.focus_(id);
}
// If no longer selected, emit a blur event.
if (!isIntersected && this.selectedHotspots[id]) {
this.emit('blur', id);
this.blur_(id);
}
// Update the set of selected hotspots.
if (isIntersected) {
this.selectedHotspots[id] = true;
} else {
delete this.selectedHotspots[id];
}
}
};
/**
* Toggle whether or not hotspots are visible.
*/
HotspotRenderer.prototype.setVisibility = function(isVisible) {
this.hotspotRoot.visible = isVisible;
};
HotspotRenderer.prototype.onTouchStart_ = function(e) {
// In VR mode, don't touch the pointer position.
if (!this.worldRenderer.isVRMode()) {
this.updateTouch_(e);
}
// Force a camera update to see if any hotspots were selected.
this.update(this.worldRenderer.camera);
this.downHotspots = {};
for (var id in this.selectedHotspots) {
this.downHotspots[id] = true;
this.down_(id);
}
return false;
};
HotspotRenderer.prototype.onTouchEnd_ = function(e) {
// If no hotspots are pressed, emit an empty click event.
if (Util.isEmptyObject(this.downHotspots)) {
this.emit('click');
return;
}
// Only emit a click if the finger was down on the same hotspot before.
for (var id in this.downHotspots) {
this.emit('click', id);
this.up_(id);
e.preventDefault();
}
};
HotspotRenderer.prototype.updateTouch_ = function(e) {
var size = this.getSize_();
var touch = e.touches[0];
this.pointer.x = (touch.clientX / size.width) * 2 - 1;
this.pointer.y = - (touch.clientY / size.height) * 2 + 1;
};
HotspotRenderer.prototype.onMouseDown_ = function(e) {
this.updateMouse_(e);
this.downHotspots = {};
for (var id in this.selectedHotspots) {
this.downHotspots[id] = true;
this.down_(id);
}
};
HotspotRenderer.prototype.onMouseMove_ = function(e) {
this.updateMouse_(e);
};
HotspotRenderer.prototype.onMouseUp_ = function(e) {
this.updateMouse_(e);
// If no hotspots are pressed, emit an empty click event.
if (Util.isEmptyObject(this.downHotspots)) {
this.emit('click');
return;
}
// Only emit a click if the mouse was down on the same hotspot before.
for (var id in this.selectedHotspots) {
if (id in this.downHotspots) {
this.emit('click', id);
this.up_(id);
}
}
};
HotspotRenderer.prototype.updateMouse_ = function(e) {
var size = this.getSize_();
this.pointer.x = (e.clientX / size.width) * 2 - 1;
this.pointer.y = - (e.clientY / size.height) * 2 + 1;
};
HotspotRenderer.prototype.getSize_ = function() {
var canvas = this.worldRenderer.renderer.domElement;
return this.worldRenderer.renderer.getSize();
};
HotspotRenderer.prototype.createHotspot_ = function(radius, distance) {
var innerGeometry = new THREE.CircleGeometry(radius, 32);
var innerMaterial = new THREE.MeshBasicMaterial({
color: 0xffffff, side: THREE.DoubleSide, transparent: true,
opacity: MAX_INNER_OPACITY, depthTest: false
});
var inner = new THREE.Mesh(innerGeometry, innerMaterial);
inner.name = 'inner';
var outerMaterial = new THREE.MeshBasicMaterial({
color: 0xffffff, side: THREE.DoubleSide, transparent: true,
opacity: MAX_OUTER_OPACITY, depthTest: false
});
var outerGeometry = new THREE.RingGeometry(radius * 0.85, radius, 32);
var outer = new THREE.Mesh(outerGeometry, outerMaterial);
outer.name = 'outer';
// Position at the extreme end of the sphere.
var hotspot = new THREE.Object3D();
hotspot.position.z = -distance;
hotspot.scale.copy(NORMAL_SCALE);
hotspot.add(inner);
hotspot.add(outer);
return hotspot;
};
/**
* Large aspect ratios tend to cause visually jarring distortions on the sides.
* Here we fade hotspots out to avoid them.
*/
HotspotRenderer.prototype.fadeOffCenterHotspots_ = function(camera) {
var lookAt = new THREE.Vector3(1, 0, 0);
lookAt.applyQuaternion(camera.quaternion);
// Take into account the camera parent too.
lookAt.applyQuaternion(camera.parent.quaternion);
// Go through each hotspot. Calculate how far off center it is.
for (var id in this.hotspots) {
var hotspot = this.hotspots[id];
var angle = hotspot.position.angleTo(lookAt);
var angleDeg = THREE.Math.radToDeg(angle);
var isVisible = angleDeg < 45;
var opacity;
if (angleDeg < FADE_START_ANGLE_DEG) {
opacity = 1;
} else if (angleDeg > FADE_END_ANGLE_DEG) {
opacity = 0;
} else {
// We are in the case START < angle < END. Linearly interpolate.
var range = FADE_END_ANGLE_DEG - FADE_START_ANGLE_DEG;
var value = FADE_END_ANGLE_DEG - angleDeg;
opacity = value / range;
}
// Opacity a function of angle. If angle is large, opacity is zero. At some
// point, ramp opacity down.
this.setOpacity_(id, opacity);
}
};
HotspotRenderer.prototype.focus_ = function(id) {
var hotspot = this.hotspots[id];
// Tween scale of hotspot.
this.tween = new TWEEN.Tween(hotspot.scale).to(FOCUS_SCALE, FOCUS_DURATION)
.easing(TWEEN.Easing.Quadratic.InOut)
.start();
if (this.worldRenderer.isVRMode()) {
this.timeForHospotClick = setTimeout(function () {
this.emit('click', id);
}, 1200 )
}
};
HotspotRenderer.prototype.blur_ = function(id) {
var hotspot = this.hotspots[id];
this.tween = new TWEEN.Tween(hotspot.scale).to(NORMAL_SCALE, FOCUS_DURATION)
.easing(TWEEN.Easing.Quadratic.InOut)
.start();
if (this.timeForHospotClick) {
clearTimeout( this.timeForHospotClick );
}
};
HotspotRenderer.prototype.down_ = function(id) {
// Become active.
var hotspot = this.hotspots[id];
var outer = hotspot.getObjectByName('inner');
this.tween = new TWEEN.Tween(outer.material.color).to(ACTIVE_COLOR, ACTIVE_DURATION)
.start();
};
HotspotRenderer.prototype.up_ = function(id) {
// Become inactive.
var hotspot = this.hotspots[id];
var outer = hotspot.getObjectByName('inner');
this.tween = new TWEEN.Tween(outer.material.color).to(INACTIVE_COLOR, ACTIVE_DURATION)
.start();
};
HotspotRenderer.prototype.setOpacity_ = function(id, opacity) {
var hotspot = this.hotspots[id];
var outer = hotspot.getObjectByName('outer');
var inner = hotspot.getObjectByName('inner');
outer.material.opacity = opacity * MAX_OUTER_OPACITY;
inner.material.opacity = opacity * MAX_INNER_OPACITY;
};
module.exports = HotspotRenderer;

View File

@@ -0,0 +1,68 @@
/*
* Copyright 2016 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var EventEmitter = require('eventemitter3');
var Message = require('../message');
var Util = require('../util');
/**
* Sits in an embedded iframe, receiving messages from a containing
* iFrame. This facilitates an API which provides the following features:
*
* Playing and pausing content.
* Adding hotspots.
* Sending messages back to the containing iframe when hotspot is clicked
* Sending analytics events to containing iframe.
*
* Note: this script used to also respond to synthetic devicemotion events, but
* no longer does so. This is because as of iOS 9.2, Safari disallows listening
* for devicemotion events within cross-device iframes. To work around this, the
* webvr-polyfill responds to the postMessage event containing devicemotion
* information (sent by the iframe-message-sender in the VR View API).
*/
function IFrameMessageReceiver() {
window.addEventListener('message', this.onMessage_.bind(this), false);
}
IFrameMessageReceiver.prototype = new EventEmitter();
IFrameMessageReceiver.prototype.onMessage_ = function(event) {
if (Util.isDebug()) {
console.log('onMessage_', event);
}
var message = event.data;
var type = message.type.toLowerCase();
var data = message.data;
switch (type) {
case Message.SET_CONTENT:
case Message.SET_VOLUME:
case Message.MUTED:
case Message.ADD_HOTSPOT:
case Message.PLAY:
case Message.PAUSE:
case Message.SET_CURRENT_TIME:
case Message.GET_POSITION:
case Message.SET_FULLSCREEN:
this.emit(type, data);
break;
default:
if (Util.isDebug()) {
console.warn('Got unknown message of type %s from %s', message.type, message.origin);
}
}
};
module.exports = IFrameMessageReceiver;

View File

@@ -0,0 +1,54 @@
/*
* Copyright 2016 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Shows a 2D loading indicator while various pieces of EmbedVR load.
*/
function LoadingIndicator() {
this.el = this.build_();
document.body.appendChild(this.el);
this.show();
}
LoadingIndicator.prototype.build_ = function() {
var overlay = document.createElement('div');
var s = overlay.style;
s.position = 'fixed';
s.top = 0;
s.left = 0;
s.width = '100%';
s.height = '100%';
s.background = '#eee';
var img = document.createElement('img');
img.src = 'images/loading.gif';
var s = img.style;
s.position = 'absolute';
s.top = '50%';
s.left = '50%';
s.transform = 'translate(-50%, -50%)';
overlay.appendChild(img);
return overlay;
};
LoadingIndicator.prototype.hide = function() {
this.el.style.display = 'none';
};
LoadingIndicator.prototype.show = function() {
this.el.style.display = 'block';
};
module.exports = LoadingIndicator;

View File

@@ -0,0 +1,369 @@
/*
* Copyright 2016 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Initialize the loading indicator as quickly as possible to give the user
// immediate feedback.
var LoadingIndicator = require('./loading-indicator');
var loadIndicator = new LoadingIndicator();
var ES6Promise = require('es6-promise');
// Polyfill ES6 promises for IE.
ES6Promise.polyfill();
var IFrameMessageReceiver = require('./iframe-message-receiver');
var Message = require('../message');
var SceneInfo = require('./scene-info');
var Stats = require('../../node_modules/stats-js/build/stats.min');
var Util = require('../util');
var WebVRPolyfill = require('webvr-polyfill');
var WorldRenderer = require('./world-renderer');
var receiver = new IFrameMessageReceiver();
receiver.on(Message.PLAY, onPlayRequest);
receiver.on(Message.PAUSE, onPauseRequest);
receiver.on(Message.ADD_HOTSPOT, onAddHotspot);
receiver.on(Message.SET_CONTENT, onSetContent);
receiver.on(Message.SET_VOLUME, onSetVolume);
receiver.on(Message.MUTED, onMuted);
receiver.on(Message.SET_CURRENT_TIME, onUpdateCurrentTime);
receiver.on(Message.GET_POSITION, onGetPosition);
receiver.on(Message.SET_FULLSCREEN, onSetFullscreen);
window.addEventListener('load', onLoad);
var stats = new Stats();
var scene = SceneInfo.loadFromGetParams();
var worldRenderer = new WorldRenderer(scene);
worldRenderer.on('error', onRenderError);
worldRenderer.on('load', onRenderLoad);
worldRenderer.on('modechange', onModeChange);
worldRenderer.on('ended', onEnded);
worldRenderer.on('play', onPlay);
worldRenderer.hotspotRenderer.on('click', onHotspotClick);
window.worldRenderer = worldRenderer;
var isReadySent = false;
var volume = 0;
function onLoad() {
if (!Util.isWebGLEnabled()) {
showError('WebGL not supported.');
return;
}
// Load the scene.
worldRenderer.setScene(scene);
if (scene.isDebug) {
// Show stats.
showStats();
}
if (scene.isYawOnly) {
WebVRConfig = window.WebVRConfig || {};
WebVRConfig.YAW_ONLY = true;
}
requestAnimationFrame(loop);
}
function onVideoTap() {
worldRenderer.videoProxy.play();
hidePlayButton();
// Prevent multiple play() calls on the video element.
document.body.removeEventListener('touchend', onVideoTap);
}
function onRenderLoad(event) {
if (event.videoElement) {
var scene = SceneInfo.loadFromGetParams();
// On mobile, tell the user they need to tap to start. Otherwise, autoplay.
if (Util.isMobile()) {
// Tell user to tap to start.
showPlayButton();
document.body.addEventListener('touchend', onVideoTap);
} else {
event.videoElement.play();
}
// Attach to pause and play events, to notify the API.
event.videoElement.addEventListener('pause', onPause);
event.videoElement.addEventListener('play', onPlay);
event.videoElement.addEventListener('timeupdate', onGetCurrentTime);
event.videoElement.addEventListener('ended', onEnded);
}
// Hide loading indicator.
loadIndicator.hide();
// Autopan only on desktop, for photos only, and only if autopan is enabled.
if (!Util.isMobile() && !worldRenderer.sceneInfo.video && !worldRenderer.sceneInfo.isAutopanOff) {
worldRenderer.autopan();
}
// Notify the API that we are ready, but only do this once.
if (!isReadySent) {
if (event.videoElement) {
Util.sendParentMessage({
type: 'ready',
data: {
duration: event.videoElement.duration
}
});
} else {
Util.sendParentMessage({
type: 'ready'
});
}
isReadySent = true;
}
}
function onPlayRequest() {
if (!worldRenderer.videoProxy) {
onApiError('Attempt to pause, but no video found.');
return;
}
worldRenderer.videoProxy.play();
}
function onPauseRequest() {
if (!worldRenderer.videoProxy) {
onApiError('Attempt to pause, but no video found.');
return;
}
worldRenderer.videoProxy.pause();
}
function onAddHotspot(e) {
if (Util.isDebug()) {
console.log('onAddHotspot', e);
}
// TODO: Implement some validation?
var pitch = parseFloat(e.pitch);
var yaw = parseFloat(e.yaw);
var radius = parseFloat(e.radius);
var distance = parseFloat(e.distance);
var id = e.id;
worldRenderer.hotspotRenderer.add(pitch, yaw, radius, distance, id);
}
function onSetContent(e) {
if (Util.isDebug()) {
console.log('onSetContent', e);
}
// Remove all of the hotspots.
worldRenderer.hotspotRenderer.clearAll();
// Fade to black.
worldRenderer.sphereRenderer.setOpacity(0, 500).then(function() {
// Then load the new scene.
var scene = SceneInfo.loadFromAPIParams(e.contentInfo);
worldRenderer.destroy();
// Update the URL to reflect the new scene. This is important particularily
// on iOS where we use a fake fullscreen mode.
var url = scene.getCurrentUrl();
//console.log('Updating url to be %s', url);
window.history.pushState(null, 'VR View', url);
// And set the new scene.
return worldRenderer.setScene(scene);
}).then(function() {
// Then fade the scene back in.
worldRenderer.sphereRenderer.setOpacity(1, 500);
});
}
function onSetVolume(e) {
// Only work for video. If there's no video, send back an error.
if (!worldRenderer.videoProxy) {
onApiError('Attempt to set volume, but no video found.');
return;
}
worldRenderer.videoProxy.setVolume(e.volumeLevel);
volume = e.volumeLevel;
Util.sendParentMessage({
type: 'volumechange',
data: e.volumeLevel
});
}
function onMuted(e) {
// Only work for video. If there's no video, send back an error.
if (!worldRenderer.videoProxy) {
onApiError('Attempt to mute, but no video found.');
return;
}
worldRenderer.videoProxy.mute(e.muteState);
Util.sendParentMessage({
type: 'muted',
data: e.muteState
});
}
function onUpdateCurrentTime(time) {
if (!worldRenderer.videoProxy) {
onApiError('Attempt to pause, but no video found.');
return;
}
worldRenderer.videoProxy.setCurrentTime(time);
onGetCurrentTime();
}
function onGetCurrentTime() {
var time = worldRenderer.videoProxy.getCurrentTime();
Util.sendParentMessage({
type: 'timeupdate',
data: time
});
}
function onSetFullscreen() {
if (!worldRenderer.videoProxy) {
onApiError('Attempt to set fullscreen, but no video found.');
return;
}
worldRenderer.manager.onFSClick_();
}
function onApiError(message) {
console.error(message);
Util.sendParentMessage({
type: 'error',
data: {message: message}
});
}
function onModeChange(mode) {
Util.sendParentMessage({
type: 'modechange',
data: {mode: mode}
});
}
function onHotspotClick(id) {
Util.sendParentMessage({
type: 'click',
data: {id: id}
});
}
function onPlay() {
Util.sendParentMessage({
type: 'paused',
data: false
});
}
function onPause() {
Util.sendParentMessage({
type: 'paused',
data: true
});
}
function onEnded() {
Util.sendParentMessage({
type: 'ended',
data: true
});
}
function onSceneError(message) {
showError('Loader: ' + message);
}
function onRenderError(message) {
showError('Render: ' + message);
}
function showError(message) {
// Hide loading indicator.
loadIndicator.hide();
// Sanitize `message` as it could contain user supplied
// values. Re-add the space character as to not modify the
// error messages used throughout the codebase.
message = encodeURI(message).replace(/%20/g, ' ');
var error = document.querySelector('#error');
error.classList.add('visible');
error.querySelector('.message').innerHTML = message;
error.querySelector('.title').innerHTML = 'Error';
}
function hideError() {
var error = document.querySelector('#error');
error.classList.remove('visible');
}
function showPlayButton() {
var playButton = document.querySelector('#play-overlay');
playButton.classList.add('visible');
}
function hidePlayButton() {
var playButton = document.querySelector('#play-overlay');
playButton.classList.remove('visible');
}
function showStats() {
stats.setMode(0); // 0: fps, 1: ms
// Align bottom-left.
stats.domElement.style.position = 'absolute';
stats.domElement.style.left = '0px';
stats.domElement.style.bottom = '0px';
document.body.appendChild(stats.domElement);
}
function loop(time) {
// Use the VRDisplay RAF if it is present.
if (worldRenderer.vrDisplay) {
worldRenderer.vrDisplay.requestAnimationFrame(loop);
} else {
requestAnimationFrame(loop);
}
stats.begin();
// Update the video if needed.
if (worldRenderer.videoProxy) {
worldRenderer.videoProxy.update(time);
}
worldRenderer.render(time);
worldRenderer.submitFrame();
stats.end();
}
function onGetPosition() {
Util.sendParentMessage({
type: 'getposition',
data: {
Yaw: worldRenderer.camera.rotation.y * 180 / Math.PI,
Pitch: worldRenderer.camera.rotation.x * 180 / Math.PI
}
});
}

View File

@@ -0,0 +1,41 @@
/*
* Copyright 2016 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
function ReticleRenderer(camera) {
this.camera = camera;
this.reticle = this.createReticle_();
// In front of the hotspot itself, which is at r=0.99.
this.reticle.position.z = -0.97;
camera.add(this.reticle);
this.setVisibility(false);
}
ReticleRenderer.prototype.setVisibility = function(isVisible) {
// TODO: Tween the transition.
this.reticle.visible = isVisible;
};
ReticleRenderer.prototype.createReticle_ = function() {
// Make a torus.
var geometry = new THREE.TorusGeometry(0.02, 0.005, 10, 20);
var material = new THREE.MeshBasicMaterial({color: 0x000000});
var torus = new THREE.Mesh(geometry, material);
return torus;
};
module.exports = ReticleRenderer;

View File

@@ -0,0 +1,125 @@
/*
* Copyright 2016 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var Util = require('../util');
var CAMEL_TO_UNDERSCORE = {
video: 'video',
image: 'image',
preview: 'preview',
loop: 'loop',
volume: 'volume',
muted: 'muted',
isStereo: 'is_stereo',
defaultYaw: 'default_yaw',
isYawOnly: 'is_yaw_only',
isDebug: 'is_debug',
isVROff: 'is_vr_off',
isAutopanOff: 'is_autopan_off',
hideFullscreenButton: 'hide_fullscreen_button'
};
/**
* Contains all information about a given scene.
*/
function SceneInfo(opt_params) {
var params = opt_params || {};
params.player = {
loop: opt_params.loop,
volume: opt_params.volume,
muted: opt_params.muted
};
this.image = params.image !== undefined ? encodeURI(params.image) : undefined;
this.preview = params.preview !== undefined ? encodeURI(params.preview) : undefined;
this.video = params.video !== undefined ? encodeURI(params.video) : undefined;
this.defaultYaw = THREE.Math.degToRad(params.defaultYaw || 0);
this.isStereo = Util.parseBoolean(params.isStereo);
this.isYawOnly = Util.parseBoolean(params.isYawOnly);
this.isDebug = Util.parseBoolean(params.isDebug);
this.isVROff = Util.parseBoolean(params.isVROff);
this.isAutopanOff = Util.parseBoolean(params.isAutopanOff);
this.loop = Util.parseBoolean(params.player.loop);
this.volume = parseFloat(
params.player.volume ? params.player.volume : '1');
this.muted = Util.parseBoolean(params.player.muted);
this.hideFullscreenButton = Util.parseBoolean(params.hideFullscreenButton);
}
SceneInfo.loadFromGetParams = function() {
var params = {};
for (var camelCase in CAMEL_TO_UNDERSCORE) {
var underscore = CAMEL_TO_UNDERSCORE[camelCase];
params[camelCase] = Util.getQueryParameter(underscore)
|| ((window.WebVRConfig && window.WebVRConfig.PLAYER) ? window.WebVRConfig.PLAYER[underscore] : "");
}
var scene = new SceneInfo(params);
if (!scene.isValid()) {
console.warn('Invalid scene: %s', scene.errorMessage);
}
return scene;
};
SceneInfo.loadFromAPIParams = function(underscoreParams) {
var params = {};
for (var camelCase in CAMEL_TO_UNDERSCORE) {
var underscore = CAMEL_TO_UNDERSCORE[camelCase];
if (underscoreParams[underscore]) {
params[camelCase] = underscoreParams[underscore];
}
}
var scene = new SceneInfo(params);
if (!scene.isValid()) {
console.warn('Invalid scene: %s', scene.errorMessage);
}
return scene;
};
SceneInfo.prototype.isValid = function() {
// Either it's an image or a video.
if (!this.image && !this.video) {
this.errorMessage = 'Either image or video URL must be specified.';
return false;
}
if (this.image && !this.isValidImage_(this.image)) {
this.errorMessage = 'Invalid image URL: ' + this.image;
return false;
}
this.errorMessage = null;
return true;
};
/**
* Generates a URL to reflect this scene.
*/
SceneInfo.prototype.getCurrentUrl = function() {
var url = location.protocol + '//' + location.host + location.pathname + '?';
for (var camelCase in CAMEL_TO_UNDERSCORE) {
var underscore = CAMEL_TO_UNDERSCORE[camelCase];
var value = this[camelCase];
if (value !== undefined) {
url += underscore + '=' + value + '&';
}
}
// Chop off the trailing ampersand.
return url.substring(0, url.length - 1);
};
SceneInfo.prototype.isValidImage_ = function(imageUrl) {
return true;
};
module.exports = SceneInfo;

View File

@@ -0,0 +1,205 @@
/*
* Copyright 2016 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var Eyes = require('./eyes');
var TWEEN = require('@tweenjs/tween.js');
var Util = require('../util');
var VideoType = require('../video-type');
function SphereRenderer(scene) {
this.scene = scene;
// Create a transparent mask.
this.createOpacityMask_();
}
/**
* Sets the photosphere based on the image in the source. Supports stereo and
* mono photospheres.
*
* @return {Promise}
*/
SphereRenderer.prototype.setPhotosphere = function(src, opt_params) {
return new Promise(function(resolve, reject) {
this.resolve = resolve;
this.reject = reject;
var params = opt_params || {};
this.isStereo = !!params.isStereo;
this.src = src;
// Load texture.
var loader = new THREE.TextureLoader();
loader.crossOrigin = 'anonymous';
loader.load(src, this.onTextureLoaded_.bind(this), undefined,
this.onTextureError_.bind(this));
}.bind(this));
};
/**
* @return {Promise} Yeah.
*/
SphereRenderer.prototype.set360Video = function (videoElement, videoType, opt_params) {
return new Promise(function(resolve, reject) {
this.resolve = resolve;
this.reject = reject;
var params = opt_params || {};
this.isStereo = !!params.isStereo;
// Load the video texture.
var videoTexture = new THREE.VideoTexture(videoElement);
videoTexture.minFilter = THREE.LinearFilter;
videoTexture.magFilter = THREE.LinearFilter;
videoTexture.generateMipmaps = false;
if (Util.isSafari() && videoType === VideoType.HLS) {
// fix black screen issue on safari
videoTexture.format = THREE.RGBAFormat;
videoTexture.flipY = false;
} else {
videoTexture.format = THREE.RGBFormat;
}
videoTexture.needsUpdate = true;
this.onTextureLoaded_(videoTexture);
}.bind(this));
};
/**
* Set the opacity of the panorama.
*
* @param {Number} opacity How opaque we want the panorama to be. 0 means black,
* 1 means full color.
* @param {Number} duration Number of milliseconds the transition should take.
*
* @return {Promise} When the opacity change is complete.
*/
SphereRenderer.prototype.setOpacity = function(opacity, duration) {
var scene = this.scene;
// If we want the opacity
var overlayOpacity = 1 - opacity;
return new Promise(function(resolve, reject) {
var mask = scene.getObjectByName('opacityMask');
var tween = new TWEEN.Tween({opacity: mask.material.opacity})
.to({opacity: overlayOpacity}, duration)
.easing(TWEEN.Easing.Quadratic.InOut);
tween.onUpdate(function(e) {
mask.material.opacity = this.opacity;
});
tween.onComplete(resolve).start();
});
};
SphereRenderer.prototype.onTextureLoaded_ = function(texture) {
var sphereLeft;
var sphereRight;
if (this.isStereo) {
sphereLeft = this.createPhotosphere_(texture, {offsetY: 0.5, scaleY: 0.5});
sphereRight = this.createPhotosphere_(texture, {offsetY: 0, scaleY: 0.5});
} else {
sphereLeft = this.createPhotosphere_(texture);
sphereRight = this.createPhotosphere_(texture);
}
// Display in left and right eye respectively.
sphereLeft.layers.set(Eyes.LEFT);
sphereLeft.eye = Eyes.LEFT;
sphereLeft.name = 'eyeLeft';
sphereRight.layers.set(Eyes.RIGHT);
sphereRight.eye = Eyes.RIGHT;
sphereRight.name = 'eyeRight';
this.scene.getObjectByName('photo').children = [sphereLeft, sphereRight];
this.resolve();
};
SphereRenderer.prototype.onTextureError_ = function(error) {
this.reject('Unable to load texture from "' + this.src + '"');
};
SphereRenderer.prototype.createPhotosphere_ = function(texture, opt_params) {
var p = opt_params || {};
p.scaleX = p.scaleX || 1;
p.scaleY = p.scaleY || 1;
p.offsetX = p.offsetX || 0;
p.offsetY = p.offsetY || 0;
p.phiStart = p.phiStart || 0;
p.phiLength = p.phiLength || Math.PI * 2;
p.thetaStart = p.thetaStart || 0;
p.thetaLength = p.thetaLength || Math.PI;
var geometry = new THREE.SphereGeometry(1, 48, 48,
p.phiStart, p.phiLength, p.thetaStart, p.thetaLength);
geometry.applyMatrix(new THREE.Matrix4().makeScale(-1, 1, 1));
var uvs = geometry.faceVertexUvs[0];
for (var i = 0; i < uvs.length; i ++) {
for (var j = 0; j < 3; j ++) {
uvs[i][j].x *= p.scaleX;
uvs[i][j].x += p.offsetX;
uvs[i][j].y *= p.scaleY;
uvs[i][j].y += p.offsetY;
}
}
var material;
if (texture.format === THREE.RGBAFormat && texture.flipY === false) {
material = new THREE.ShaderMaterial({
uniforms: {
texture: { value: texture }
},
vertexShader: [
"varying vec2 vUV;",
"void main() {",
" vUV = vec2( uv.x, 1.0 - uv.y );",
" gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );",
"}"
].join("\n"),
fragmentShader: [
"uniform sampler2D texture;",
"varying vec2 vUV;",
"void main() {",
" gl_FragColor = texture2D( texture, vUV )" + (Util.isIOS() ? ".bgra" : "") + ";",
"}"
].join("\n")
});
} else {
material = new THREE.MeshBasicMaterial({ map: texture });
}
var out = new THREE.Mesh(geometry, material);
//out.visible = false;
out.renderOrder = -1;
return out;
};
SphereRenderer.prototype.createOpacityMask_ = function() {
var geometry = new THREE.SphereGeometry(0.49, 48, 48);
var material = new THREE.MeshBasicMaterial({
color: 0x000000, side: THREE.DoubleSide, opacity: 0, transparent: true});
var opacityMask = new THREE.Mesh(geometry, material);
opacityMask.name = 'opacityMask';
opacityMask.renderOrder = 1;
this.scene.add(opacityMask);
return opacityMask;
};
module.exports = SphereRenderer;

View File

@@ -0,0 +1,130 @@
/*
* Copyright 2016 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var Util = require('../util');
/**
* A proxy class for working around the fact that as soon as a video is play()ed
* on iOS, Safari auto-fullscreens the video.
*
* TODO(smus): The entire raison d'etre for this class is to work around this
* issue. Once Safari implements some way to suppress this fullscreen player, we
* can remove this code.
*/
function VideoProxy(videoElement) {
this.videoElement = videoElement;
// True if we're currently manually advancing the playhead (only on iOS).
this.isFakePlayback = false;
// When the video started playing.
this.startTime = null;
}
VideoProxy.prototype.play = function() {
if (Util.isIOS9OrLess()) {
this.startTime = performance.now();
this.isFakePlayback = true;
// Make an audio element to playback just the audio part.
this.audioElement = new Audio();
this.audioElement.src = this.videoElement.src;
this.audioElement.play();
} else {
this.videoElement.play().then(function(e) {
console.log('Playing video.', e);
});
}
};
VideoProxy.prototype.pause = function() {
if (Util.isIOS9OrLess() && this.isFakePlayback) {
this.isFakePlayback = true;
this.audioElement.pause();
} else {
this.videoElement.pause();
}
};
VideoProxy.prototype.setVolume = function(volumeLevel) {
if (this.videoElement) {
// On iOS 10, the VideoElement.volume property is read-only. So we special
// case muting and unmuting.
if (Util.isIOS()) {
this.videoElement.muted = (volumeLevel === 0);
} else {
this.videoElement.volume = volumeLevel;
}
}
if (this.audioElement) {
this.audioElement.volume = volumeLevel;
}
};
/**
* Set the attribute mute of the elements according with the muteState param.
*
* @param bool muteState
*/
VideoProxy.prototype.mute = function(muteState) {
if (this.videoElement) {
this.videoElement.muted = muteState;
}
if (this.audioElement) {
this.audioElement.muted = muteState;
}
};
VideoProxy.prototype.getCurrentTime = function() {
return Util.isIOS9OrLess() ? this.audioElement.currentTime : this.videoElement.currentTime;
};
/**
*
* @param {Object} time
*/
VideoProxy.prototype.setCurrentTime = function(time) {
if (this.videoElement) {
this.videoElement.currentTime = time.currentTime;
}
if (this.audioElement) {
this.audioElement.currentTime = time.currentTime;
}
};
/**
* Called on RAF to progress playback.
*/
VideoProxy.prototype.update = function() {
// Fakes playback for iOS only.
if (!this.isFakePlayback) {
return;
}
var duration = this.videoElement.duration;
var now = performance.now();
var delta = now - this.startTime;
var deltaS = delta / 1000;
this.videoElement.currentTime = deltaS;
// Loop through the video
if (deltaS > duration) {
this.startTime = now;
this.videoElement.currentTime = 0;
// Also restart the audio.
this.audioElement.currentTime = 0;
}
};
module.exports = VideoProxy;

View File

@@ -0,0 +1,20 @@
/*
* Copyright 2016 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Load EmbedVR.
require('./main');
// Load Analytics for EmbedVR.
require('./analytics');

View File

@@ -0,0 +1,372 @@
/*
* Copyright 2016 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var AdaptivePlayer = require('./adaptive-player');
var EventEmitter = require('eventemitter3');
var Eyes = require('./eyes');
var HotspotRenderer = require('./hotspot-renderer');
var ReticleRenderer = require('./reticle-renderer');
var SphereRenderer = require('./sphere-renderer');
var TWEEN = require('@tweenjs/tween.js');
var Util = require('../util');
var VideoProxy = require('./video-proxy');
var WebVRManager = require('webvr-boilerplate');
var AUTOPAN_DURATION = 3000;
var AUTOPAN_ANGLE = 0.4;
/**
* The main WebGL rendering entry point. Manages the scene, camera, VR-related
* rendering updates. Interacts with the WebVRManager.
*
* Coordinates the other renderers: SphereRenderer, HotspotRenderer,
* ReticleRenderer.
*
* Also manages the AdaptivePlayer and VideoProxy.
*
* Emits the following events:
* load: when the scene is loaded.
* error: if there is an error loading the scene.
* modechange(Boolean isVR): if the mode (eg. VR, fullscreen, etc) changes.
*/
function WorldRenderer(params) {
this.init_(params.hideFullscreenButton);
this.sphereRenderer = new SphereRenderer(this.scene);
this.hotspotRenderer = new HotspotRenderer(this);
this.hotspotRenderer.on('focus', this.onHotspotFocus_.bind(this));
this.hotspotRenderer.on('blur', this.onHotspotBlur_.bind(this));
this.reticleRenderer = new ReticleRenderer(this.camera);
// Get the VR Display as soon as we initialize.
navigator.getVRDisplays().then(function(displays) {
if (displays.length > 0) {
this.vrDisplay = displays[0];
}
}.bind(this));
}
WorldRenderer.prototype = new EventEmitter();
WorldRenderer.prototype.render = function(time) {
this.controls.update();
TWEEN.update(time);
this.effect.render(this.scene, this.camera);
this.hotspotRenderer.update(this.camera);
};
/**
* @return {Promise} When the scene is fully loaded.
*/
WorldRenderer.prototype.setScene = function(scene) {
var self = this;
var promise = new Promise(function(resolve, reject) {
self.sceneResolve = resolve;
self.sceneReject = reject;
});
if (!scene || !scene.isValid()) {
this.didLoadFail_(scene.errorMessage);
return;
}
var params = {
isStereo: scene.isStereo,
loop: scene.loop,
volume: scene.volume,
muted: scene.muted
};
this.setDefaultYaw_(scene.defaultYaw || 0);
// Disable VR mode if explicitly disabled, or if we're loading a video on iOS
// 9 or earlier.
if (scene.isVROff || (scene.video && Util.isIOS9OrLess())) {
this.manager.setVRCompatibleOverride(false);
}
// Set various callback overrides in iOS.
if (Util.isIOS()) {
this.manager.setFullscreenCallback(function() {
Util.sendParentMessage({type: 'enter-fullscreen'});
});
this.manager.setExitFullscreenCallback(function() {
Util.sendParentMessage({type: 'exit-fullscreen'});
});
this.manager.setVRCallback(function() {
Util.sendParentMessage({type: 'enter-vr'});
});
}
// If we're dealing with an image, and not a video.
if (scene.image && !scene.video) {
if (scene.preview) {
// First load the preview.
this.sphereRenderer.setPhotosphere(scene.preview, params).then(function() {
// As soon as something is loaded, emit the load event to hide the
// loading progress bar.
self.didLoad_();
// Then load the full resolution image.
self.sphereRenderer.setPhotosphere(scene.image, params);
}).catch(self.didLoadFail_.bind(self));
} else {
// No preview -- go straight to rendering the full image.
this.sphereRenderer.setPhotosphere(scene.image, params).then(function() {
self.didLoad_();
}).catch(self.didLoadFail_.bind(self));
}
} else if (scene.video) {
if (Util.isIE11()) {
// On IE 11, if an 'image' param is provided, load it instead of showing
// an error.
//
// TODO(smus): Once video textures are supported, remove this fallback.
if (scene.image) {
this.sphereRenderer.setPhotosphere(scene.image, params).then(function() {
self.didLoad_();
}).catch(self.didLoadFail_.bind(self));
} else {
this.didLoadFail_('Video is not supported on IE11.');
}
} else {
this.player = new AdaptivePlayer(params);
this.player.on('load', function(videoElement, videoType) {
self.sphereRenderer.set360Video(videoElement, videoType, params).then(function() {
self.didLoad_({videoElement: videoElement});
}).catch(self.didLoadFail_.bind(self));
});
this.player.on('error', function(error) {
self.didLoadFail_('Video load error: ' + error);
});
this.player.load(scene.video);
this.videoProxy = new VideoProxy(this.player.video);
}
}
this.sceneInfo = scene;
if (Util.isDebug()) {
console.log('Loaded scene', scene);
}
return promise;
};
WorldRenderer.prototype.isVRMode = function() {
return !!this.vrDisplay && this.vrDisplay.isPresenting;
};
WorldRenderer.prototype.submitFrame = function() {
if (this.isVRMode()) {
this.vrDisplay.submitFrame();
}
};
WorldRenderer.prototype.disposeEye_ = function(eye) {
if (eye) {
if (eye.material.map) {
eye.material.map.dispose();
}
eye.material.dispose();
eye.geometry.dispose();
}
};
WorldRenderer.prototype.dispose = function() {
var eyeLeft = this.scene.getObjectByName('eyeLeft');
this.disposeEye_(eyeLeft);
var eyeRight = this.scene.getObjectByName('eyeRight');
this.disposeEye_(eyeRight);
};
WorldRenderer.prototype.destroy = function() {
if (this.player) {
this.player.removeAllListeners();
this.player.destroy();
this.player = null;
}
var photo = this.scene.getObjectByName('photo');
var eyeLeft = this.scene.getObjectByName('eyeLeft');
var eyeRight = this.scene.getObjectByName('eyeRight');
if (eyeLeft) {
this.disposeEye_(eyeLeft);
photo.remove(eyeLeft);
this.scene.remove(eyeLeft);
}
if (eyeRight) {
this.disposeEye_(eyeRight);
photo.remove(eyeRight);
this.scene.remove(eyeRight);
}
};
WorldRenderer.prototype.didLoad_ = function(opt_event) {
var event = opt_event || {};
this.emit('load', event);
if (this.sceneResolve) {
this.sceneResolve();
}
};
WorldRenderer.prototype.didLoadFail_ = function(message) {
this.emit('error', message);
if (this.sceneReject) {
this.sceneReject(message);
}
};
/**
* Sets the default yaw.
* @param {Number} angleRad The yaw in radians.
*/
WorldRenderer.prototype.setDefaultYaw_ = function(angleRad) {
// Rotate the camera parent to take into account the scene's rotation.
// By default, it should be at the center of the image.
var display = this.controls.getVRDisplay();
// For desktop, we subtract the current display Y axis
var theta = display.theta_ || 0;
// For devices with orientation we make the current view center
if (display.poseSensor_) {
display.poseSensor_.resetPose();
}
this.camera.parent.rotation.y = (Math.PI / 2.0) + angleRad - theta;
};
/**
* Do the initial camera tween to rotate the camera, giving an indication that
* there is live content there (on desktop only).
*/
WorldRenderer.prototype.autopan = function(duration) {
var targetY = this.camera.parent.rotation.y - AUTOPAN_ANGLE;
var tween = new TWEEN.Tween(this.camera.parent.rotation)
.to({y: targetY}, AUTOPAN_DURATION)
.easing(TWEEN.Easing.Quadratic.Out)
.start();
};
WorldRenderer.prototype.init_ = function(hideFullscreenButton) {
var container = document.querySelector('body');
var aspect = window.innerWidth / window.innerHeight;
var camera = new THREE.PerspectiveCamera(75, aspect, 0.1, 100);
camera.layers.enable(1);
var cameraDummy = new THREE.Object3D();
cameraDummy.add(camera);
// Antialiasing disabled to improve performance.
var renderer = new THREE.WebGLRenderer({antialias: false});
renderer.setClearColor(0x000000, 0);
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.setPixelRatio(window.devicePixelRatio);
container.appendChild(renderer.domElement);
var controls = new THREE.VRControls(camera);
var effect = new THREE.VREffect(renderer);
// Disable eye separation.
effect.scale = 0;
effect.setSize(window.innerWidth, window.innerHeight);
// Present submission of frames automatically. This is done manually in
// submitFrame().
effect.autoSubmitFrame = false;
this.camera = camera;
this.renderer = renderer;
this.effect = effect;
this.controls = controls;
this.manager = new WebVRManager(renderer, effect, {predistorted: false, hideButton: hideFullscreenButton});
this.scene = this.createScene_();
this.scene.add(this.camera.parent);
// Watch the resize event.
window.addEventListener('resize', this.onResize_.bind(this));
// Prevent context menu.
window.addEventListener('contextmenu', this.onContextMenu_.bind(this));
window.addEventListener('vrdisplaypresentchange',
this.onVRDisplayPresentChange_.bind(this));
};
WorldRenderer.prototype.onResize_ = function() {
this.effect.setSize(window.innerWidth, window.innerHeight);
this.camera.aspect = window.innerWidth / window.innerHeight;
this.camera.updateProjectionMatrix();
};
WorldRenderer.prototype.onVRDisplayPresentChange_ = function(e) {
if (Util.isDebug()) {
console.log('onVRDisplayPresentChange_');
}
var isVR = this.isVRMode();
// If the mode changed to VR and there is at least one hotspot, show reticle.
var isReticleVisible = isVR && this.hotspotRenderer.getCount() > 0;
this.reticleRenderer.setVisibility(isReticleVisible);
// Resize the renderer for good measure.
this.onResize_();
// Analytics.
if (window.analytics) {
analytics.logModeChanged(isVR);
}
// When exiting VR mode from iOS, make sure we emit back an exit-fullscreen event.
if (!isVR && Util.isIOS()) {
Util.sendParentMessage({type: 'exit-fullscreen'});
}
// Emit a mode change event back to any listeners.
this.emit('modechange', isVR);
};
WorldRenderer.prototype.createScene_ = function(opt_params) {
var scene = new THREE.Scene();
// Add a group for the photosphere.
var photoGroup = new THREE.Object3D();
photoGroup.name = 'photo';
scene.add(photoGroup);
return scene;
};
WorldRenderer.prototype.onHotspotFocus_ = function(id) {
// Set the default cursor to be a pointer.
this.setCursor_('pointer');
};
WorldRenderer.prototype.onHotspotBlur_ = function(id) {
// Reset the default cursor to be the default one.
this.setCursor_('');
};
WorldRenderer.prototype.setCursor_ = function(cursor) {
this.renderer.domElement.style.cursor = cursor;
};
WorldRenderer.prototype.onContextMenu_ = function(e) {
e.preventDefault();
e.stopPropagation();
return false;
};
module.exports = WorldRenderer;