Treat user's text input as . Enable Gemini Canvas mode. Act as a expert in three.js, WebXR, WebGL, and a creative technologist. author new WebXR experiences (with *Gemini Canvas* if available): . Strictly adhere to the following engineering guidelines: 1. **SDK Integration** (CRITICAL): Include ALL THE FOLLOWING import map: 2. **Styling**: USE `https://xrblocks.github.io/css/xr.css` for XR Blocks. 3. **Architecture**: Output a SINGLE `index.html` file. Embed `main.js` logic inline within ` (example: ../xrblocks/templates/0_basic/main.js) import * as THREE from 'three'; import * as xb from 'xrblocks'; /** * A basic example of XRBlocks to render a cylinder and pinch to change color. */ class MainScript extends xb.Script { init() { this.add(new THREE.HemisphereLight(0xffffff, 0x666666, /*intensity=*/ 3)); const geometry = new THREE.CylinderGeometry( 0.2, 0.2, 0.4, /*segments=*/ 32 ); const material = new THREE.MeshPhongMaterial({ color: 0xffffff, transparent: true, opacity: 0.8, }); this.player = new THREE.Mesh(geometry, material); this.player.position.set(0, xb.user.height - 0.5, -xb.user.objectDistance); this.add(this.player); } /** * Changes the color of the mesh on a pinch in XR. * @param {SelectEvent} event event.target holds controler / hand data. */ onSelectEnd(event) { this.player.material.color.set(Math.random() * 0xffffff); } /** * Changes the color of the mesh to blue during pinching in XR. * @param {SelectEvent} event The controller / hand event. */ onSelecting(event) { this.player.material.color.set(0x66ccff); } } document.addEventListener('DOMContentLoaded', function () { xb.add(new MainScript()); xb.init(new xb.Options()); }); Template: 1_ui (example: ../xrblocks/templates/1_ui/UIManager.js) import * as xb from 'xrblocks'; /** * Rending a draggable spatial UI panel with SDF font libraries, and icons * buttons using XR Blocks. */ export class UIManager extends xb.Script { constructor() { super(); // Adds an interactive SpatialPanel as a container for UI elements. const panel = new xb.SpatialPanel({backgroundColor: '#2b2b2baa'}); this.add(panel); const grid = panel.addGrid(); // `weight` defines the perentage of a view's dimension to its parent. // Here, question occupies 70% of the height of the panel. const question = grid.addRow({weight: 0.7}).addText({ text: 'Welcome to UI Playground! Is it your first time here?', fontColor: '#ffffff', fontSize: 0.08, }); this.question = question; // ctrlRow occupies 30% of the height of the panel. const ctrlRow = grid.addRow({weight: 0.3}); // The `text` field defines the icon of the button from Material Icons in // https://fonts.google.com/icons const yesButton = ctrlRow .addCol({weight: 0.5}) .addIconButton({text: 'check_circle', fontSize: 0.5}); // onTriggered defines unified behavior for `onSelected`, `onClicked`, // `onPinched`, `onTouched` for buttons. yesButton.onTriggered = () => { this._onYes(); }; const noButton = ctrlRow .addCol({weight: 0.5}) .addIconButton({text: 'cancel', fontSize: 0.5}); noButton.onTriggered = () => { this._onNo(); }; } _onYes() { console.log('yes'); } _onNo() { console.log('no'); } } (example: ../xrblocks/templates/1_ui/index.html) UI: Spatial Panels | XR Blocks Template (example: ../xrblocks/templates/1_ui/main.js) import 'xrblocks/addons/simulator/SimulatorAddons.js'; import * as xb from 'xrblocks'; import {UIManager} from './UIManager.js'; const options = new xb.Options(); options.enableUI(); document.addEventListener('DOMContentLoaded', function () { xb.add(new UIManager()); xb.init(options); }); Template: 2_hands (example: ../xrblocks/templates/2_hands/HandsInteraction.js) import * as THREE from 'three'; import * as xb from 'xrblocks'; export class HandsInteraction extends xb.Script { init() { // Touch state. this.leftHandTouching = false; this.rightHandTouching = false; // Grab state. this.isGrabbing = false; this._handToObject = null; // Add a cylinder to touch and grab. this.originalColor = new THREE.Color(0xfbbc05); const geometry = new THREE.CylinderGeometry(0.1, 0.1, 0.2, 32).translate( 0, 1.45, -0.4 ); const material = new THREE.MeshPhongMaterial({color: this.originalColor}); this.target = new THREE.Mesh(geometry, material); this.add(this.target); // Add a light. this.add(new THREE.HemisphereLight(0xbbbbbb, 0x888888, 3)); const light = new THREE.DirectionalLight(0xffffff, 2); light.position.set(1, 1, 1).normalize(); this.add(light); } _updateColor() { if (this.leftHandTouching && this.rightHandTouching) { this.target.material.color.setHex(0xdb4437); // Red } else if (this.leftHandTouching) { this.target.material.color.setHex(0x34a853); // Green } else if (this.rightHandTouching) { this.target.material.color.setHex(0x4285f4); // Blue } else { this.target.material.color.copy(this.originalColor); // Yellow } } onObjectTouchStart(event) { const handName = event.handIndex === xb.Handedness.LEFT ? 'left' : 'right'; console.log(`Touch started with ${handName} hand!`); if (event.handIndex === xb.Handedness.LEFT) { this.leftHandTouching = true; } else if (event.handIndex === xb.Handedness.RIGHT) { this.rightHandTouching = true; } this._updateColor(); } onObjectTouchEnd(event) { const handName = event.handIndex === xb.Handedness.LEFT ? 'left' : 'right'; console.log(`Touch ended with ${handName} hand!`); if (event.handIndex === xb.Handedness.LEFT) { this.leftHandTouching = false; } else if (event.handIndex === xb.Handedness.RIGHT) { this.rightHandTouching = false; } this._updateColor(); } onObjectGrabStart(event) { if (this.isGrabbing) return; this.isGrabbing = true; const handName = event.handIndex === xb.Handedness.LEFT ? 'left' : 'right'; console.log(`Grab started with ${handName} hand!`); // Make sure matrices are fresh. this.target.updateMatrixWorld(true); event.hand.updateMatrixWorld(true); // Save the initial hand to object delta transform. const H0 = new THREE.Matrix4().copy(event.hand.matrixWorld); const O0 = new THREE.Matrix4().copy(this.target.matrixWorld); this._handToObject = new THREE.Matrix4().copy(H0).invert().multiply(O0); } onObjectGrabbing(event) { if (!this.isGrabbing || !this._handToObject) return; event.hand.updateMatrixWorld(true); const H = new THREE.Matrix4().copy(event.hand.matrixWorld); const O = new THREE.Matrix4().multiplyMatrices(H, this._handToObject); const parent = this.target.parent; if (parent) parent.updateMatrixWorld(true); const parentInv = parent ? new THREE.Matrix4().copy(parent.matrixWorld).invert() : new THREE.Matrix4().identity(); const Olocal = new THREE.Matrix4().multiplyMatrices(parentInv, O); const pos = new THREE.Vector3(); const quat = new THREE.Quaternion(); const scl = new THREE.Vector3(); Olocal.decompose(pos, quat, scl); this.target.position.copy(pos); this.target.quaternion.copy(quat); this.target.updateMatrix(); } onObjectGrabEnd(event) { if (!this.isGrabbing) return; const handName = event.handIndex === xb.Handedness.LEFT ? 'left' : 'right'; console.log(`Grab ended with ${handName} hand!`); this.isGrabbing = false; this._handToObject = null; } } (example: ../xrblocks/templates/2_hands/index.html) Hands: Mesh and Touch | XR Blocks Template (example: ../xrblocks/templates/2_hands/main.js) // Provides optional 2D UIs for simulator on desktop. import 'xrblocks/addons/simulator/SimulatorAddons.js'; import * as xb from 'xrblocks'; import {HandsInteraction} from './HandsInteraction.js'; const options = new xb.Options(); options.enableReticles(); options.enableHands(); options.hands.enabled = true; options.hands.visualization = true; // Visualize hand joints. options.hands.visualizeJoints = true; // Visualize hand meshes. options.hands.visualizeMeshes = true; options.simulator.defaultMode = xb.SimulatorMode.POSE; function start() { xb.add(new HandsInteraction()); xb.init(options); } document.addEventListener('DOMContentLoaded', function () { start(); }); Template: 3_depth (example: ../xrblocks/templates/3_depth/index.html) Depth | XR Blocks Template (example: ../xrblocks/templates/3_depth/main.js) import 'xrblocks/addons/simulator/SimulatorAddons.js'; import * as THREE from 'three'; import * as xb from 'xrblocks'; const pawnModelPath = 'https://cdn.jsdelivr.net/gh/xrblocks/assets@main/models/arcore_pawn_compressed.glb'; class PawnPlacer extends xb.Script { async init() { this.addLights(); await this.loadPawnModel(); } async loadPawnModel() { const pawnGltf = await new xb.ModelLoader().load({ url: pawnModelPath, renderer: xb.core.renderer, }); pawnGltf.scene.scale.setScalar(0.5); this.pawnModel = pawnGltf.scene; } addLights() { const directionalLight = new THREE.DirectionalLight(0xffffff, 2); directionalLight.position.set(0, 1, 0); this.add(directionalLight); const ambientLight = new THREE.AmbientLight(0xffffff, 0.5); this.add(ambientLight); } onSelectStart(event) { const intersection = xb.user.select(xb.core.depth.depthMesh, event.target); if (intersection) { this.add( xb.placeObjectAtIntersectionFacingTarget( this.pawnModel.clone(), intersection, xb.core.camera ) ); } } } document.addEventListener('DOMContentLoaded', async function () { const options = new xb.Options(); options.reticles.enabled = true; options.depth = new xb.DepthOptions(xb.xrDepthMeshOptions); await xb.init(options); xb.showReticleOnDepthMesh(true); xb.add(new PawnPlacer()); }); Template: 4_stereo (example: ../xrblocks/templates/4_stereo/index.html) Stereo | XR Blocks Template (example: ../xrblocks/templates/4_stereo/main.js) import 'xrblocks/addons/simulator/SimulatorAddons.js'; import * as THREE from 'three'; import * as xb from 'xrblocks'; const stereoTextureFile = 'SV_20241216_144600.webp'; class StereoImage extends xb.Script { async init() { await this.addStereoQuad(); } async addStereoQuad() { const stereoObject = new THREE.Group(); const [leftTexture, rightTexture] = await xb.loadStereoImageAsTextures(stereoTextureFile); const geometry = new THREE.PlaneGeometry(1, 1); const leftMesh = new THREE.Mesh( geometry, new THREE.MeshBasicMaterial({ map: leftTexture, side: THREE.DoubleSide, }) ); xb.showOnlyInLeftEye(leftMesh); stereoObject.add(leftMesh); const rightMesh = new THREE.Mesh( geometry, new THREE.MeshBasicMaterial({ map: rightTexture, side: THREE.DoubleSide, }) ); xb.showOnlyInRightEye(rightMesh); stereoObject.add(rightMesh); stereoObject.position.set(0.0, 1.5, -1.5); this.add(stereoObject); } } document.addEventListener('DOMContentLoaded', function () { const options = new xb.Options(); options.simulator.stereo.enabled = true; xb.add(new StereoImage()); xb.init(options); }); Template: 5_camera (example: ../xrblocks/templates/5_camera/index.html) Camera | XR Blocks Template (example: ../xrblocks/templates/5_camera/main.js) import 'xrblocks/addons/simulator/SimulatorAddons.js'; import * as xb from 'xrblocks'; /** * A class that provides UI to display and cycle through device cameras. */ export class CameraViewManager extends xb.Script { /** @private {XRDeviceCamera|null} */ cameraStream_ = null; constructor() { super(); this.panel = new xb.SpatialPanel({ backgroundColor: '#2b2b2baa', useDefaultPosition: true, }); const grid = this.panel.addGrid(); this.videoView = grid.addRow({weight: 0.7}).addVideo(); const txtRow = grid.addRow({weight: 0.15}); this.cameraLabel = txtRow .addCol({weight: 1}) .addText({text: 'Camera', fontColor: '#ffffff', fontSize: 0.05}); const ctrlRow = grid.addRow({weight: 0.2}); this.prevCameraButton = ctrlRow.addCol({weight: 0.5}).addIconButton({ text: 'skip_previous', fontSize: 0.5, }); this.nextCameraButton = ctrlRow.addCol({weight: 0.5}).addIconButton({ text: 'skip_next', fontSize: 0.5, }); this.prevCameraButton.onTriggered = () => this.cycleCamera_(-1); this.nextCameraButton.onTriggered = () => this.cycleCamera_(1); this.add(this.panel); } async init() { this.cameraStream_ = xb.core.deviceCamera; // Listen for camera state changes to update UI this.cameraStream_.addEventListener('statechange', (event) => { this.cameraLabel.setText(event.device?.label || event.state || 'Camera'); if (event.state === 'streaming') { this.videoView.load(this.cameraStream_); } }); this.cameraLabel.setText( this.cameraStream_.getCurrentDevice()?.label || 'Camera' ); this.videoView.load(this.cameraStream_); } /** * Cycle to the next or previous device. * @param {number} offset - The direction to cycle (-1 for prev, 1 for next). */ async cycleCamera_(offset) { const devices = this.cameraStream_.getAvailableDevices(); if (devices.length <= 1) return; const newIndex = (this.cameraStream_.getCurrentDeviceIndex() + offset + devices.length) % devices.length; await this.cameraStream_.setDeviceId(devices[newIndex].deviceId); } } document.addEventListener('DOMContentLoaded', function () { const options = new xb.Options(); options.enableUI(); options.enableCamera(); xb.add(new CameraViewManager()); xb.init(options); }); Template: 6_ai (example: ../xrblocks/templates/6_ai/GeminiQueryManager.js) import * as xb from 'xrblocks'; export class GeminiQueryManager extends xb.Script { constructor() { super(); this.panel = null; this.isProcessing = false; this.responseDisplay = null; } init() { this.ai = xb.core.ai; this.createPanel(); } createPanel() { this.panel = new xb.SpatialPanel({ width: 2.5, height: 1.5, backgroundColor: '#1a1a1abb', }); this.panel.position.set(0, 1.6, -2); this.add(this.panel); const grid = this.panel.addGrid(); // Response area const responseRow = grid.addRow({weight: 0.8}); this.responseDisplay = new xb.ScrollingTroikaTextView({ text: '', fontSize: 0.04, }); responseRow.add(this.responseDisplay); const buttonRow = grid.addRow({weight: 0.2}); const textCol = buttonRow.addCol({weight: 0.5}); const textButton = textCol.addTextButton({ text: 'Ask about WebXR', fontColor: '#ffffff', backgroundColor: '#4285f4', fontSize: 0.24, }); const imageCol = buttonRow.addCol({weight: 0.5}); const imageButton = imageCol.addTextButton({ text: 'Send Sample Image', fontColor: '#ffffff', backgroundColor: '#34a853', fontSize: 0.24, }); textButton.onTriggered = () => this.askText(); imageButton.onTriggered = () => this.askImage(); } async ask(parts, displayText) { if (this.isProcessing || !this.ai?.isAvailable()) return; this.isProcessing = true; this.responseDisplay.addText(displayText); try { const response = await this.ai.query({ type: 'multiPart', parts: parts, }); this.responseDisplay.addText(`šŸ¤– AI: ${response.text}\n\n`); } catch (error) { this.responseDisplay.addText(`āŒ Error: ${error.message}\n\n`); } this.isProcessing = false; } askText() { const question = 'Hello! What is WebXR?'; const parts = [{text: question + ' reply succinctly.'}]; const displayText = `šŸ’¬ You: ${question}\n\n`; this.ask(parts, displayText); } askImage() { const question = 'What do you see in this image?'; const image = { inlineData: { data: 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mP8z8BQDwAEhQGAhKmMIQAAAABJRU5ErkJggg==', mimeType: 'image/png', }, }; const parts = [image, {text: question}]; const displayText = `šŸ’¬ You: ${question}\nšŸ“ø [Sample image sent]\n\n`; this.ask(parts, displayText); } } (example: ../xrblocks/templates/6_ai/index.html) XR Blocks - AI Query Demo (example: ../xrblocks/templates/6_ai/main.js) import 'xrblocks/addons/simulator/SimulatorAddons.js'; import * as xb from 'xrblocks'; import {GeminiQueryManager} from './GeminiQueryManager.js'; const options = new xb.Options(); options.enableUI(); options.enableAI(); function start() { try { xb.init(options); xb.add(new GeminiQueryManager()); } catch (error) { console.error('Failed to initialize XR app:', error); } } document.addEventListener('DOMContentLoaded', function () { start(); }); Template: 7_ai_live (example: ../xrblocks/templates/7_ai_live/GeminiManager.js) import * as xb from 'xrblocks'; import {TranscriptionManager} from './TranscriptionManager.js'; export class GeminiManager extends xb.Script { constructor() { super(); this.xrDeviceCamera = null; this.transcription = null; this.ai = null; this.isAIRunning = false; this.screenshotInterval = null; this.defaultText = ''; } init() { this.xrDeviceCamera = xb.core.deviceCamera; this.ai = xb.core.ai; this.createTextDisplay(); } async toggleGeminiLive() { return this.isAIRunning ? this.stopGeminiLive() : this.startGeminiLive(); } async startGeminiLive() { if (this.isAIRunning || !this.ai) return; try { await xb.core.sound.enableAudio(); await this.startLiveAI(); this.startScreenshotCapture(); this.isAIRunning = true; this.updateButtonState(); } catch (error) { console.error('Failed to start AI session:', error); this.transcription?.addText( 'Error: Failed to start AI session - ' + error.message ); this.cleanup(); this.isAIRunning = false; this.updateButtonState(); } } async stopGeminiLive() { if (!this.isAIRunning) return; await this.ai?.stopLiveSession?.(); this.cleanup(); this.isAIRunning = false; this.updateButtonState(); this.transcription?.clear(); this.transcription?.setText(this.defaultText); } async startLiveAI() { return new Promise((resolve, reject) => { this.ai.setLiveCallbacks({ onopen: resolve, onmessage: (message) => this.handleAIMessage(message), onerror: reject, onclose: (closeEvent) => { this.cleanup(); this.isAIRunning = false; this.updateButtonState(); this.transcription?.clear(); this.transcription?.setText(closeEvent.reason || this.defaultText); }, }); this.ai.startLiveSession().catch(reject); }); } createTextDisplay() { this.textPanel = new xb.SpatialPanel({ width: 3, height: 1.5, backgroundColor: '#1a1a1abb', }); const grid = this.textPanel.addGrid(); const responseDisplay = new xb.ScrollingTroikaTextView({ text: this.defaultText, fontSize: 0.03, textAlign: 'left', }); grid.addRow({weight: 0.7}).add(responseDisplay); this.transcription = new TranscriptionManager(responseDisplay); this.toggleButton = grid.addRow({weight: 0.3}).addTextButton({ text: 'ā–¶ Start', fontColor: '#ffffff', backgroundColor: '#006644', fontSize: 0.2, }); this.toggleButton.onTriggered = () => this.toggleGeminiLive(); this.textPanel.position.set(0, 1.2, -2); this.add(this.textPanel); } handleAIMessage(message) { message.data && xb.core.sound.playAIAudio(message.data); const content = message.serverContent; if (content) { content.inputTranscription?.text && this.transcription.handleInputTranscription( content.inputTranscription.text ); content.outputTranscription?.text && this.transcription.handleOutputTranscription( content.outputTranscription.text ); content.turnComplete && this.transcription.finalizeTurn(); } } startScreenshotCapture() { this.screenshotInterval = setInterval(() => { const base64Image = this.xrDeviceCamera?.getSnapshot({ outputFormat: 'base64', mimeType: 'image/jpeg', quality: 1, }); if (base64Image) { const base64Data = base64Image.startsWith('data:') ? base64Image.split(',')[1] : base64Image; try { this.ai?.sendRealtimeInput?.({ video: {data: base64Data, mimeType: 'image/jpeg'}, }); } catch (error) { console.warn(error); this.stopGeminiLive(); } } }, 1000); } updateButtonState() { this.toggleButton?.setText(this.isAIRunning ? 'ā¹ Stop' : 'ā–¶ Start'); } cleanup() { if (this.screenshotInterval) { clearInterval(this.screenshotInterval); this.screenshotInterval = null; } xb.core.sound?.disableAudio(); xb.core.sound?.stopAIAudio(); } dispose() { this.cleanup(); super.dispose(); } } (example: ../xrblocks/templates/7_ai_live/TranscriptionManager.js) export class TranscriptionManager { constructor(responseDisplay) { this.responseDisplay = responseDisplay; this.currentInputText = ''; this.currentOutputText = ''; this.conversationHistory = []; } handleInputTranscription(text) { if (!text) return; this.currentInputText += text; this.updateLiveDisplay(); } handleOutputTranscription(text) { if (!text) return; this.currentOutputText += text; this.updateLiveDisplay(); } finalizeTurn() { if (this.currentInputText.trim()) { this.conversationHistory.push({ speaker: 'You', text: this.currentInputText.trim(), }); } if (this.currentOutputText.trim()) { this.conversationHistory.push({ speaker: 'AI', text: this.currentOutputText.trim(), }); } this.currentInputText = ''; this.currentOutputText = ''; this.updateFinalDisplay(); } updateLiveDisplay() { let displayText = ''; for (const entry of this.conversationHistory.slice(-2)) { displayText += `${entry.speaker}: ${entry.text}\n\n`; } if (this.currentInputText.trim()) { displayText += `You: ${this.currentInputText}`; } if (this.currentOutputText.trim()) { if (this.currentInputText.trim()) displayText += '\n\n'; displayText += `AI: ${this.currentOutputText}`; } this.responseDisplay?.setText(displayText); } updateFinalDisplay() { let displayText = ''; for (const entry of this.conversationHistory) { displayText += `${entry.speaker}: ${entry.text}\n\n`; } this.responseDisplay?.setText(displayText); } clear() { this.currentInputText = ''; this.currentOutputText = ''; this.conversationHistory = []; } addText(text) { this.responseDisplay?.addText(text + '\n\n'); } setText(text) { this.responseDisplay?.setText(text); } } (example: ../xrblocks/templates/7_ai_live/index.html) XR Blocks - Gemini Live AI Demo (example: ../xrblocks/templates/7_ai_live/main.js) import 'xrblocks/addons/simulator/SimulatorAddons.js'; import * as xb from 'xrblocks'; import {GeminiManager} from './GeminiManager.js'; const options = new xb.Options(); options.enableUI(); options.enableHands(); options.enableAI(); options.enableCamera(); options.deviceCamera = new xb.DeviceCameraOptions({ enabled: true, videoConstraints: { width: {ideal: 1280}, height: {ideal: 720}, facingMode: 'environment', }, }); async function requestAudioPermission() { try { const stream = await navigator.mediaDevices.getUserMedia({ audio: { sampleRate: 16000, channelCount: 1, echoCancellation: true, noiseSuppression: true, }, }); stream.getTracks().forEach((track) => track.stop()); return stream; } catch (error) { console.error('Audio permission denied or not available:', error); alert( 'Audio permission is required for Gemini Live AI features. Please enable microphone access and refresh the page.' ); return null; } } async function start() { try { await requestAudioPermission(); xb.init(options); xb.add(new GeminiManager()); } catch (error) { console.error('Failed to initialize XR app:', error); } } document.addEventListener('DOMContentLoaded', function () { start(); }); Template: 8_objects (example: ../xrblocks/templates/8_objects/index.html) World Component: Object Detection | XR Blocks Template (example: ../xrblocks/templates/8_objects/main.js) import * as THREE from 'three'; import * as xb from 'xrblocks'; /** * A basic example of using the XR Blocks SDK's world component to detect * objects in the real world using Gemini. */ class MainScript extends xb.Script { init() { this.add(new THREE.HemisphereLight(0xffffff, 0x666666, /*intensity=*/ 3)); } /** * Runs object detection on select (click in simulator, pinch in XR device). * The results of the detection are automatically handled by the * ObjectDetector, which will create debug visuals for each detected object. * This behavior is enabled when `showDebugVisualizations` is set to true. * @param {XRInputSourceEvent} event event.target holds controller or hand * data. */ async onSelectEnd() { console.log('Running object detection...'); const detectedObjects = await xb.world.objects.runDetection(); // `detectedObjects` is an array of THREE.Object3D instances, each // representing a detected object. These objects contain the 3D world // position and other metadata returned by the detection model. if (detectedObjects.length > 0) { console.log('Detected objects:', detectedObjects); } else { console.log('No objects detected.'); } } } document.addEventListener('DOMContentLoaded', function () { const options = new xb.Options(); // AI is required for the object detection backend. Please set your options.enableAI(); // Enable the environment camera to provide the video feed to the AI module. options.enableCamera('environment'); // Depth is required to project the 2D detections from the AI module into 3D. options.enableDepth(); // Enable the object detection feature and its debug visualizations. options.world.enableObjectDetection(); options.world.objects.showDebugVisualizations = true; xb.add(new MainScript()); xb.init(options); }); Template: 9_xr-toggle (example: ../xrblocks/templates/9_xr-toggle/index.html) Transition between AR and VR | XR Blocks Template (example: ../xrblocks/templates/9_xr-toggle/main.js) import * as THREE from 'three'; import * as xb from 'xrblocks'; /** * Demonstrates how to use the XRTransition component to smoothly switch * between AR and VR backgrounds. */ class MainScript extends xb.Script { init() { this.add(new THREE.HemisphereLight(0xffffff, 0x666666, /*intensity=*/ 3)); const geometry = new THREE.CylinderGeometry( 0.2, 0.2, 0.4, /*segments=*/ 32 ); const material = new THREE.MeshPhongMaterial({ color: 0xffffff, transparent: true, opacity: 0.8, }); this.cylinder = new THREE.Mesh(geometry, material); this.cylinder.position.set( 0, xb.core.user.height - 0.5, -xb.core.user.objectDistance ); this.add(this.cylinder); } /** * On pinch, toggle between AR and VR modes and update cylinder color. */ onSelectEnd() { if (!xb.core.transition) { console.warn('XRTransition not enabled.'); return; } this.cylinder.material.color.set(Math.random() * 0xffffff); // Toggle between AR and VR based on the current mode. if (xb.core.transition.currentMode === 'AR') { xb.core.transition.toVR({color: Math.random() * 0xffffff}); } else { xb.core.transition.toAR(); } } } document.addEventListener('DOMContentLoaded', function () { const options = new xb.Options().enableXRTransitions(); xb.add(new MainScript()); xb.init(options); }); Template: heuristic_hand_gestures (example: ../xrblocks/templates/heuristic_hand_gestures/index.html) Gestures: Heuristic Logging | XR Blocks Template (example: ../xrblocks/templates/heuristic_hand_gestures/main.js) import 'xrblocks/addons/simulator/SimulatorAddons.js'; import * as xb from 'xrblocks'; const options = new xb.Options(); options.enableReticles(); options.enableGestures(); options.gestures.setGestureEnabled('point', true); options.gestures.setGestureEnabled('spread', true); options.hands.enabled = true; options.hands.visualization = true; options.hands.visualizeJoints = true; options.hands.visualizeMeshes = true; options.simulator.defaultMode = xb.SimulatorMode.POSE; class GestureLogger extends xb.Script { init() { const gestures = xb.core.gestureRecognition; if (!gestures) { console.warn( '[GestureLogger] GestureRecognition is unavailable. ' + 'Make sure options.enableGestures() is called before xb.init().' ); return; } this._onGestureStart = (event) => { const {hand, name, confidence = 0} = event.detail; console.log( `[gesture] ${hand} hand started ${name} (${confidence.toFixed(2)})` ); }; this._onGestureEnd = (event) => { const {hand, name} = event.detail; console.log(`[gesture] ${hand} hand ended ${name}`); }; gestures.addEventListener('gesturestart', this._onGestureStart); gestures.addEventListener('gestureend', this._onGestureEnd); } dispose() { const gestures = xb.core.gestureRecognition; if (!gestures) return; if (this._onGestureStart) { gestures.removeEventListener('gesturestart', this._onGestureStart); } if (this._onGestureEnd) { gestures.removeEventListener('gestureend', this._onGestureEnd); } } } function start() { xb.add(new GestureLogger()); xb.init(options); } document.addEventListener('DOMContentLoaded', () => { start(); }); Template: meshes (example: ../xrblocks/templates/meshes/index.html) Meshes | XR Blocks Template (example: ../xrblocks/templates/meshes/main.js) import 'xrblocks/addons/simulator/SimulatorAddons.js'; import * as xb from 'xrblocks'; document.addEventListener('DOMContentLoaded', async function () { const options = new xb.Options(); options.reticles.enabled = true; options.world.enableMeshDetection(); options.world.meshes.showDebugVisualizations = true; await xb.init(options); }); Template: planes (example: ../xrblocks/templates/planes/index.html) Planes | XR Blocks Template (example: ../xrblocks/templates/planes/main.js) import 'xrblocks/addons/simulator/SimulatorAddons.js'; import * as xb from 'xrblocks'; document.addEventListener('DOMContentLoaded', async function () { const options = new xb.Options(); options.reticles.enabled = true; options.world.enablePlaneDetection(); options.world.planes.showDebugVisualizations = true; await xb.init(options); }); Template: uikit (example: ../xrblocks/templates/uikit/MaterialSymbolsIcon.js) import {Svg} from '@pmndrs/uikit'; import {computed} from '@preact/signals-core'; const SVG_BASE_PATH = 'https://cdn.jsdelivr.net/gh/marella/material-symbols@v0.38.0/svg/{{weight}}/{{style}}/{{icon}}.svg'; export class MaterialSymbolsIcon extends Svg { name = 'Material Symbols Icon'; constructor(properties, initialClasses, config) { const icon = properties?.icon ?? config?.defaultOverrides?.icon; const iconStyle = properties?.iconStyle ?? config?.defaultOverrides?.iconStyle; const iconWeight = properties?.iconWeight ?? config?.defaultOverrides?.iconWeight; const svgPath = computed(() => { const finalIcon = icon?.value ?? icon ?? 'question_mark'; const finalStyle = iconStyle?.value ?? iconStyle ?? 'outlined'; const finalWeight = iconWeight?.value ?? iconWeight ?? 400; return SVG_BASE_PATH.replace('{{style}}', finalStyle) .replace('{{icon}}', finalIcon) .replace('{{weight}}', String(finalWeight)); }); super(properties, initialClasses, { ...config, defaultOverrides: { src: svgPath, ...config?.defaultOverrides, }, }); } } (example: ../xrblocks/templates/uikit/index.html) XR Blocks x @pmndrs/uikit Template (example: ../xrblocks/templates/uikit/main.js) import 'xrblocks/addons/simulator/SimulatorAddons.js'; import * as uikit from '@pmndrs/uikit'; import * as xb from 'xrblocks'; import {MaterialSymbolsIcon} from './MaterialSymbolsIcon.js'; class UikitPanel extends xb.Script { dragFacingCamera = true; draggable = true; draggingMode = xb.DragMode.TRANSLATING; container; constructor() { super(); const panelSize = 0.5; this.container = new uikit.Container({ sizeX: (panelSize * 16) / 9, sizeY: panelSize, pixelSize: panelSize / 512, flexDirection: 'column', textAlign: 'center', color: 'white', fontSize: 64, backgroundColor: 'gray', borderRadius: 64, padding: 16, }); this.add(this.container); } update() { this.container?.update(xb.getDeltaTime()); } onObjectSelectStart(event) { this.dispatchEventRecursively(event.target, 'click', this.container); } dispatchEventRecursively(controller, eventType, object) { const intersections = xb.core.input.intersectObjectByController( controller, object ); if (intersections.length == 0 || !(object instanceof uikit.Component)) { return; } for (const child of object.children) { this.dispatchEventRecursively(controller, eventType, child); } const intersection = intersections[0]; object.dispatchEvent({ type: 'click', distance: intersection.distance, nativeEvent: {}, object: intersection.object, point: intersection.point, pointerId: controller.userData.id, }); } } /** * UIKit Template */ class UikitTemplate extends xb.Script { constructor() { super(); const spatialPanel = new UikitPanel(); spatialPanel.position.set(0, 1.5, -1); this.add(spatialPanel); const topRow = new uikit.Text({ text: 'XR Blocks x @pmndrs/uikit', flexGrow: 2, }); spatialPanel.container.add(topRow); const bottomRow = new uikit.Container({ flexDirection: 'row', flexGrow: 1, justifyContent: 'space-evenly', gap: 64, }); spatialPanel.container.add(bottomRow); const yesButton = new MaterialSymbolsIcon({ icon: 'check_circle', }); yesButton.addEventListener('click', () => { console.log('yes button clicked'); }); bottomRow.add(yesButton); const noButton = new MaterialSymbolsIcon({ icon: 'x_circle', }); noButton.addEventListener('click', () => { console.log('no button clicked'); }); bottomRow.add(noButton); } } const options = new xb.Options(); options.enableUI(); document.addEventListener('DOMContentLoaded', async function () { xb.add(new UikitTemplate()); options.simulator.instructions.enabled = false; await xb.init(options); const renderer = xb.core.renderer; renderer.localClippingEnabled = true; renderer.setTransparentSort(uikit.reversePainterSortStable); }); Sample: depthmap (example: ../xrblocks/samples/depthmap/DepthMapScene.js) import * as THREE from 'three'; import * as xb from 'xrblocks'; import {DepthVisualizationPass} from './DepthVisualizationPass.js'; export class DepthMapScene extends xb.Script { init() { if (xb.core.effects) { this.depthVisPass = new DepthVisualizationPass(xb.scene, xb.core.camera); xb.core.effects.addPass(this.depthVisPass); } else { console.error( 'This sample needs post processing for adding the depth visualization pass. Please enable options.usePostprocessing' ); } this.depthMeshAlphaSlider = new xb.FreestandingSlider( /*start=*/ 1.0, /*min=*/ 0.0, /*max=*/ 1.0, /*scale*/ 5.0 ); // Which controller is currently selecting depthMeshAlphaSlider. this.currentSliderController = null; const light = new THREE.HemisphereLight(0xffffff, 0xbbbbff, 3); light.position.set(0.5, 1, 0.25); this.add(light); } onSelectStart(event) { const controller = event.target; controller.userData.selected = true; this.currentSliderController = controller; this.depthMeshAlphaSlider.setInitialPose( controller.position, controller.quaternion ); } onSelectEnd(event) { const controller = event.target; controller.userData.selected = false; if (this.currentSliderController == controller) { const opacity = this.depthMeshAlphaSlider.getValueFromController( this.currentSliderController ); this.depthVisPass.setAlpha(opacity); this.depthMeshAlphaSlider.updateValue(opacity); } this.currentSliderController = null; } update() { if (this.currentSliderController) { const opacity = this.depthMeshAlphaSlider.getValueFromController( this.currentSliderController ); this.depthVisPass.setAlpha(opacity); } this.depthVisPass.updateEnvironmentalDepthTexture(xb.core.depth); } } (example: ../xrblocks/samples/depthmap/DepthVisualizationPass.js) import * as THREE from 'three'; import {FullScreenQuad} from 'three/addons/postprocessing/Pass.js'; import * as xb from 'xrblocks'; import {DepthMapShader} from './depthmap.glsl.js'; /** * Depth map visualization postprocess pass. */ export class DepthVisualizationPass extends xb.XRPass { constructor() { super(); this.depthTextures = [null, null]; this.uniforms = { uDepthTexture: {value: null}, uDepthTextureArray: {value: null}, uRawValueToMeters: {value: 8.0 / 65536.0}, uAlpha: {value: 1.0}, tDiffuse: {value: null}, uView: {value: 0}, uIsTextureArray: {value: 0}, // Used to interpret Quest 3 depth. uDepthNear: {value: 0}, }; this.depthMapQuad = new FullScreenQuad( new THREE.ShaderMaterial({ name: 'DepthMapShader', uniforms: this.uniforms, vertexShader: DepthMapShader.vertexShader, fragmentShader: DepthMapShader.fragmentShader, }) ); } setAlpha(value) { this.uniforms.uAlpha.value = value; } updateEnvironmentalDepthTexture(xrDepth) { this.depthTextures[0] = xrDepth.getTexture(0); this.depthTextures[1] = xrDepth.getTexture(1); this.uniforms.uRawValueToMeters.value = xrDepth.rawValueToMeters; if (this.depthTextures[0]) { this.uniforms.uIsTextureArray.value = this.depthTextures[0] .isExternalTexture ? 1.0 : 0; } } render(renderer, writeBuffer, readBuffer, deltaTime, maskActive, viewId) { const texture = this.depthTextures[viewId]; if (!texture) return; if (texture.isExternalTexture) { this.uniforms.uDepthTextureArray.value = texture; const depthNear = xb.core.depth.gpuDepthData[0].depthNear; this.uniforms.uDepthNear.value = depthNear; } else { this.uniforms.uDepthTexture.value = texture; } this.uniforms.tDiffuse.value = readBuffer.texture; this.uniforms.uView.value = viewId; renderer.setRenderTarget(this.renderToScreen ? null : writeBuffer); this.depthMapQuad.render(renderer); } dispose() { this.depthMapQuad.dispose(); } } (example: ../xrblocks/samples/depthmap/depthmap.glsl.js) export const DepthMapShader = { name: 'DepthMapShader', defines: {}, vertexShader: /* glsl */ ` varying vec2 vTexCoord; void main() { vTexCoord = uv; gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 ); } `, fragmentShader: /* glsl */ ` #include precision mediump float; uniform sampler2D uDepthTexture; uniform sampler2DArray uDepthTextureArray; uniform float uRawValueToMeters; uniform float uAlpha; uniform float uIsTextureArray; uniform int uView; uniform float uDepthNear; uniform sampler2D tDiffuse; uniform float cameraNear; uniform float cameraFar; varying vec2 vTexCoord; float DepthGetMeters(in sampler2D depth_texture, in vec2 depth_uv) { // Assume we're using floating point depth. return uRawValueToMeters * texture2D(depth_texture, depth_uv).r; } float DepthArrayGetMeters(in sampler2DArray depth_texture, in vec2 depth_uv) { float textureValue = texture(depth_texture, vec3(depth_uv.x, depth_uv.y, uView)).r; return uRawValueToMeters * uDepthNear / (1.0 - textureValue); } vec3 TurboColormap(in float x) { const vec4 kRedVec4 = vec4(0.55305649, 3.00913185, -5.46192616, -11.11819092); const vec4 kGreenVec4 = vec4(0.16207513, 0.17712472, 15.24091500, -36.50657960); const vec4 kBlueVec4 = vec4(-0.05195877, 5.18000081, -30.94853351, 81.96403246); const vec2 kRedVec2 = vec2(27.81927491, -14.87899417); const vec2 kGreenVec2 = vec2(25.95549545, -5.02738237); const vec2 kBlueVec2 = vec2(-86.53476570, 30.23299484); // Adjusts color space via 6 degree poly interpolation to avoid pure red. x = clamp(x * 0.9 + 0.03, 0.0, 1.0); vec4 v4 = vec4( 1.0, x, x * x, x * x * x); vec2 v2 = v4.zw * v4.z; return vec3( dot(v4, kRedVec4) + dot(v2, kRedVec2), dot(v4, kGreenVec4) + dot(v2, kGreenVec2), dot(v4, kBlueVec4) + dot(v2, kBlueVec2) ); } void main(void) { vec4 texCoord = vec4(vTexCoord, 0, 1); vec2 uv = texCoord.xy; vec4 diffuse = texture2D( tDiffuse, texCoord.xy ); highp float real_depth; if (uIsTextureArray < 0.5) { uv.y = 1.0 - uv.y; real_depth = DepthGetMeters(uDepthTexture, uv); } else real_depth = DepthArrayGetMeters(uDepthTextureArray, uv); vec4 depth_visualization = vec4( TurboColormap(clamp(real_depth / 8.0, 0.0, 1.0)), 1.0); gl_FragColor = mix(diffuse, depth_visualization, uAlpha); } `, }; (example: ../xrblocks/samples/depthmap/index.html) Depth Map | XR Blocks (example: ../xrblocks/samples/depthmap/main.js) import 'xrblocks/addons/simulator/instructions/SimulatorInstructions.js'; import * as xb from 'xrblocks'; import {DepthMapScene} from './DepthMapScene.js'; const options = new xb.Options(); options.depth.enabled = true; options.depth.depthTexture.enabled = true; options.depth.depthTypeRequest = [xb.getUrlParameter('depthType') ?? 'raw']; options.usePostprocessing = true; options.setAppTitle('Depth Map'); function start() { xb.add(new DepthMapScene()); xb.init(options); } document.addEventListener('DOMContentLoaded', function () { start(); }); Sample: depthmesh (example: ../xrblocks/samples/depthmesh/index.html) Depth Mesh | XR Blocks (example: ../xrblocks/samples/depthmesh/main.js) import * as THREE from 'three'; import * as xb from 'xrblocks'; class DepthMeshVisualizer extends xb.Script { currentSliderController = null; depthMeshAlphaSlider = new xb.FreestandingSlider( /*start=*/ 1.0, /*min=*/ 0.0, /*max=*/ 1.0, /*scale*/ 5.0 ); constructor() { super(); const light = new THREE.HemisphereLight(0xffffff, 0xbbbbff, 3); light.position.set(0.5, 1, 0.25); this.add(light); } init() { xb.core.depth.depthMesh.material.uniforms.uOpacity.value = this.depthMeshAlphaSlider.startingValue; } onSelectStart(event) { this.currentSliderController = event.target; this.depthMeshAlphaSlider.setInitialPoseFromController( this.currentSliderController ); } onSelectEnd(event) { const controller = event.target; if (this.currentSliderController == controller) { const opacity = this.depthMeshAlphaSlider.getValueFromController( this.currentSliderController ); xb.core.depth.depthMesh.material.uniforms.uOpacity.value = opacity; this.depthMeshAlphaSlider.updateValue(opacity); } this.currentSliderController = null; } update() { if (this.currentSliderController) { const opacity = this.depthMeshAlphaSlider.getValueFromController( this.currentSliderController ); xb.core.depth.depthMesh.material.uniforms.uOpacity.value = opacity; console.log('opacity:' + opacity); } } } document.addEventListener('DOMContentLoaded', function () { const options = new xb.Options(); options.setAppTitle('Depth Mesh'); options.depth = new xb.DepthOptions(xb.xrDepthMeshVisualizationOptions); options.depth.depthTypeRequest = [xb.getUrlParameter('depthType') ?? 'raw']; xb.add(new DepthMeshVisualizer()); xb.init(options); }); Sample: gestures_custom (example: ../xrblocks/samples/gestures_custom/CustomGestureDemo.js) // Imports LiteRt: https://ai.google.dev/edge/litert/web/get_started import {loadLiteRt, setWebGpuDevice} from '@litertjs/core'; import {runWithTfjsTensors} from '@litertjs/tfjs-interop'; // TensorFlow.js + WebGPU backend import * as tf from '@tensorflow/tfjs'; // eslint-disable-next-line @typescript-eslint/no-unused-vars import {WebGPUBackend} from '@tensorflow/tfjs-backend-webgpu'; import * as THREE from 'three'; import * as xb from 'xrblocks'; const GESTURE_LABELS = [ 'OTHER', 'FIST', 'THUMB UP', 'THUMB DOWN', 'POINT', 'VICTORY', 'ROCK', 'SHAKA', 'GESTURE_LABEL_MAX_ENUM', ]; const GESTURE_IMAGES = [ 'images/empty.png', 'images/fist.png', 'images/thumb.png', 'images/thumb_down.png', 'images/point.png', 'images/victory.png', 'images/rock.png', 'images/shaka.png', 'images/error.png', ]; const LEFT_HAND_INDEX = 0; const RIGHT_HAND_INDEX = 1; const UNKNOWN_GESTURE = 8; /** * A demo scene that uses a custom ML model to detect and display static hand * gestures for both hands in real-time. */ export class CustomGestureDemo extends xb.Script { constructor() { super(); // Initializes UI. { // Make a root panel>grid>row>controlPanel>grid const panel = new xb.SpatialPanel({backgroundColor: '#00000000'}); this.add(panel); const grid = panel.addGrid(); // Show user data const dataRow = grid.addRow({weight: 0.3}); // Left hand image and text const leftCol = dataRow.addCol({weight: 0.5}); const leftHandRow = leftCol.addRow({weight: 0.5}); // Indentation leftHandRow.addCol({weight: 0.4}); this.leftHandImage = leftHandRow.addCol({weight: 0.2}).addImage({ src: GESTURE_IMAGES[0], scaleFactor: 0.3, }); this.leftHandLabel = leftCol.addRow({weight: 0.5}).addText({ text: 'Loading...', fontColor: '#ffffff', }); const rightCol = dataRow.addCol({weight: 0.5}); const rightHandRow = rightCol.addRow({weight: 0.5}); // Indentation rightHandRow.addCol({weight: 0.4}); // Image this.rightHandImage = rightHandRow.addCol({weight: 0.2}).addImage({ src: GESTURE_IMAGES[0], scaleFactor: 0.3, }); this.rightHandLabel = rightCol.addRow({weight: 0.4}).addText({ text: 'Loading...', fontColor: '#ffffff', }); // Indentation grid.addRow({weight: 0.1}); // Control row const controlRow = grid.addRow({weight: 0.6}); const ctrlPanel = controlRow.addPanel({backgroundColor: '#00000055'}); const ctrlGrid = ctrlPanel.addGrid(); { // Left indentation ctrlGrid.addCol({weight: 0.1}); // Middle column const midColumn = ctrlGrid.addCol({weight: 0.8}); midColumn.addRow({weight: 0.1}); midColumn.addRow({weight: 0.2}).addText({ text: 'Perform one of these gestures', fontColor: '#ffffff', }); midColumn .addRow({weight: 0.2}) .addText({text: '(either hand):', fontColor: '#ffffff'}); const gesturesRow = midColumn.addRow({weight: 0.5}); gesturesRow.addCol({weight: 0.1}); gesturesRow .addCol({weight: 0.1}) .addImage({src: 'images/fist.png', scaleFactor: 0.3}); gesturesRow .addCol({weight: 0.1}) .addImage({src: 'images/thumb.png', scaleFactor: 0.3}); gesturesRow .addCol({weight: 0.1}) .addImage({src: 'images/thumb_down.png', scaleFactor: 0.3}); gesturesRow .addCol({weight: 0.1}) .addImage({src: 'images/point.png', scaleFactor: 0.3}); gesturesRow .addCol({weight: 0.1}) .addImage({src: 'images/victory.png', scaleFactor: 0.3}); gesturesRow .addCol({weight: 0.1}) .addImage({src: 'images/rock.png', scaleFactor: 0.3}); gesturesRow .addCol({weight: 0.1}) .addImage({src: 'images/shaka.png', scaleFactor: 0.3}); // Vertical alignment on the description text element. midColumn.addRow({weight: 0.1}); // Right indentation. ctrlGrid.addCol({weight: 0.1}); } const orbiter = ctrlGrid.addOrbiter(); orbiter.addExitButton(); panel.updateLayouts(); this.panel = panel; } // Model this.modelPath = './custom_gestures_model.tflite'; this.modelState = 'None'; this.frameId = 0; setTimeout(() => { this.setBackendAndLoadModel(); }, 1); } init() { // Adds light. this.add(new THREE.HemisphereLight(0x888877, 0x777788, 3)); const light = new THREE.DirectionalLight(0xffffff, 1.5); light.position.set(0, 4, 0); this.add(light); } async setBackendAndLoadModel() { this.modelState = 'Loading'; try { await tf.setBackend('webgpu'); await tf.ready(); // Initializes LiteRT.js's WASM files. const wasmPath = 'https://unpkg.com/@litertjs/core@0.2.1/wasm/'; const liteRt = await loadLiteRt(wasmPath); // Makes LiteRt use the same GPU device as TF.js (for tensor conversion). const backend = tf.backend(); setWebGpuDevice(backend.device); // Loads model via LiteRt. await this.loadModel(liteRt); if (this.model) { // Prints model details to the log. console.log('Model Details: ', this.model.getInputDetails()); } this.modelState = 'Ready'; } catch (error) { console.error('Failed to load model or backend:', error); } } async loadModel(liteRt) { try { this.model = await liteRt.loadAndCompile(this.modelPath, { // Currently, only 'webgpu' is supported. accelerator: 'webgpu', }); } catch (error) { this.model = null; console.error('Error loading model:', error); } } calculateRelativeHandBoneAngles(jointPositions) { // Reshape jointPositions let jointPositionsReshaped = []; jointPositionsReshaped = jointPositions.reshape([xb.HAND_JOINT_COUNT, 3]); // Calculate bone vectors const boneVectors = []; xb.HAND_JOINT_IDX_CONNECTION_MAP.forEach(([joint1, joint2]) => { const boneVector = jointPositionsReshaped .slice([joint2, 0], [1, 3]) .sub(jointPositionsReshaped.slice([joint1, 0], [1, 3])) .squeeze(); const norm = boneVector.norm(); const normalizedBoneVector = boneVector.div(norm); boneVectors.push(normalizedBoneVector); }); // Calculate relative hand bone angles const relativeHandBoneAngles = []; xb.HAND_BONE_IDX_CONNECTION_MAP.forEach(([bone1, bone2]) => { const angle = boneVectors[bone1].dot(boneVectors[bone2]); relativeHandBoneAngles.push(angle); }); // Stack the angles into a tensor. return tf.stack(relativeHandBoneAngles); } async detectGesture(handJoints) { if (!this.model || !handJoints || handJoints.length !== 25 * 3) { console.log('Invalid hand joints or model load error.'); return UNKNOWN_GESTURE; } try { const tensor = this.calculateRelativeHandBoneAngles( tf.tensor1d(handJoints) ); let tensorReshaped = tensor.reshape([ 1, xb.HAND_BONE_IDX_CONNECTION_MAP.length, 1, ]); var result = -1; result = runWithTfjsTensors(this.model, tensorReshaped); let integerLabel = result[0].as1D().arraySync(); if (integerLabel.length == 7) { let x = integerLabel[0]; let idx = 0; for (let t = 0; t < 7; ++t) { if (integerLabel[t] > x) { idx = t; x = integerLabel[t]; } } return idx; } } catch (error) { console.error('Error:', error); } return UNKNOWN_GESTURE; } async #detectHandGestures(joints) { if (Object.keys(joints).length !== 25) { return UNKNOWN_GESTURE; } let handJointPositions = []; for (const i in joints) { handJointPositions.push(joints[i].position.x); handJointPositions.push(joints[i].position.y); handJointPositions.push(joints[i].position.z); } if (handJointPositions.length !== 25 * 3) { return UNKNOWN_GESTURE; } let result = await this.detectGesture(handJointPositions); return result; } #shiftIndexIfNeeded(joints, result) { // no need to shift before thumb which is 2 result += result > 2 ? 1 : 0; // check thumb direction if (result === 2) { // console.log(joints["thumb-phalanx-distal"], joints["thumb-tip"]); let tmp = this.isThumbUpOrDown( joints['thumb-phalanx-distal'].position, joints['thumb-tip'].position ); // 1 -up; -1 down; 0 - other result = tmp === 0 ? 0 : tmp < 0 ? result + 1 : result; } return result; } async update() { if (this.frameId % 5 === 0) { const hands = xb.user.hands; if (hands != null && hands.hands && hands.hands.length == 2) { // Left hand. const leftJoints = hands.hands[LEFT_HAND_INDEX].joints; let leftHandResult = await this.#detectHandGestures(leftJoints); leftHandResult = this.#shiftIndexIfNeeded(leftJoints, leftHandResult); // Update image and label. this.leftHandImage.load(GESTURE_IMAGES[leftHandResult]); this.leftHandLabel.setText(GESTURE_LABELS[leftHandResult]); // Right hand. const rightJoints = hands.hands[RIGHT_HAND_INDEX].joints; let rightHandResult = await this.#detectHandGestures(rightJoints); rightHandResult = this.#shiftIndexIfNeeded( rightJoints, rightHandResult ); // Update image and label. this.rightHandImage.load(GESTURE_IMAGES[rightHandResult]); this.rightHandLabel.setText(GESTURE_LABELS[rightHandResult]); } } this.frameId++; } isThumbUpOrDown(p1, p2) { // Assuming p1 is the base of the thumb and p2 is the tip. // Vector from base to tip. const vector = { x: p2.x - p1.x, y: p2.y - p1.y, z: p2.z - p1.z, }; // Calculate the magnitude of the vector. const magnitude = Math.sqrt( vector.x * vector.x + vector.y * vector.y + vector.z * vector.z ); // If the magnitude is very small, it's likely not a significant gesture if (magnitude < 0.001) { return 0; // Otherwise } // Normalize the vector to get its direction. const normalizedVector = { x: vector.x / magnitude, y: vector.y / magnitude, z: vector.z / magnitude, }; // Define the "up" and "down" direction vectors (positive and negative // Y-axis) const upVector = {x: 0, y: 1, z: 0}; const downVector = {x: 0, y: -1, z: 0}; // Angle threshold (cosine) for "up" (within 45 degrees of vertical) const cosUpThreshold = Math.cos((45 * Math.PI) / 180); // Approximately 0.707 // Angle threshold (cosine) for "down" (within 45 degrees of negative // vertical) We need the dot product with the *down* vector to be >= cos(45 // degrees) const dotDownThreshold = cosUpThreshold; // Calculates the dot product with the "up" vector. const dotUp = normalizedVector.x * upVector.x + normalizedVector.y * upVector.y + normalizedVector.z * upVector.z; // Calculates the dot product with the "down" vector (negate the y component // of normalized vector). const dotDown = normalizedVector.x * downVector.x + normalizedVector.y * downVector.y + normalizedVector.z * downVector.z; if (dotUp >= cosUpThreshold) { return 1; // Thumb up } else if (dotDown >= dotDownThreshold) { return -1; // Thumb down } else { return 0; // Otherwise } } } (example: ../xrblocks/samples/gestures_custom/index.html) Custom Hand Gestures | XR Blocks (example: ../xrblocks/samples/gestures_custom/main.js) import 'xrblocks/addons/simulator/SimulatorAddons.js'; import * as xb from 'xrblocks'; import {CustomGestureDemo} from './CustomGestureDemo.js'; const options = new xb.Options({ antialias: true, reticles: {enabled: true}, visualizeRays: false, hands: {enabled: true, visualization: false}, simulator: {defaultMode: xb.SimulatorMode.POSE}, }); async function start() { options.setAppTitle('Custom Gestures'); xb.add(new CustomGestureDemo()); await xb.init(options); } document.addEventListener('DOMContentLoaded', function () { setTimeout(function () { start(); }, 200); }); Sample: lighting (example: ../xrblocks/samples/lighting/LightingScene.js) import * as THREE from 'three'; import * as xb from 'xrblocks'; import {ModelManager} from 'xrblocks/addons/ui/ModelManager.js'; import {ANIMALS_DATA} from './animals_data.js'; export class LightingScene extends xb.Script { constructor() { super(); this.pointer = new THREE.Vector3(); this.raycaster = new THREE.Raycaster(); this.modelManager = new ModelManager( ANIMALS_DATA, /*enableOcclusion=*/ true ); this.modelManager.layers.enable(xb.OCCLUDABLE_ITEMS_LAYER); this.add(this.modelManager); } init() { xb.core.input.addReticles(); xb.showReticleOnDepthMesh(true); } updatePointerPosition(event) { // (-1 to +1) for both components this.pointer.x = (event.clientX / window.innerWidth) * 2 - 1; this.pointer.y = -(event.clientY / window.innerHeight) * 2 + 1; // scale pointer.x from [-1, 0] to [-1, 1] this.pointer.x = 1 + 2 * this.pointer.x; } onSelectStart(event) { const controller = event.target; if (xb.core.input.intersectionsForController.get(controller).length > 0) { const intersection = xb.core.input.intersectionsForController.get(controller)[0]; if (intersection.handleSelectRaycast) { intersection.handleSelectRaycast(intersection); return; } else if (intersection.object.handleSelectRaycast) { intersection.object.handleSelectRaycast(intersection); return; } else if (intersection.object == xb.core.depth.depthMesh) { this.onDepthMeshSelectStart(intersection); return; } } } onDepthMeshSelectStart(intersection) { console.log('Depth mesh select intersection:', intersection.point); this.modelManager.positionModelAtIntersection(intersection, xb.core.camera); } onPointerDown(event) { this.updatePointerPosition(event); const cameras = xb.core.renderer.xr.getCamera().cameras; if (cameras.length == 0) return; const camera = cameras[0]; this.raycaster.setFromCamera(this.pointer, camera); const intersections = this.raycaster.intersectObjects( xb.core.input.reticleTargets ); for (let intersection of intersections) { if (intersection.handleSelectRaycast) { intersection.handleSelectRaycast(intersection); return; } else if (intersection.object.handleSelectRaycast) { intersection.object.handleSelectRaycast(intersection); return; } else if (intersection.object == xb.core.depth.depthMesh) { this.modelManager.positionModelAtIntersection(intersection, camera); return; } } } } (example: ../xrblocks/samples/lighting/animals_data.js) export const ANIMALS_DATA = [ { path: 'https://cdn.jsdelivr.net/gh/xrblocks/assets@main/', model: 'models/Cat/cat.gltf', thumbnail: 'thumbnail.png', }, ]; (example: ../xrblocks/samples/lighting/index.html) Lighting Estimation | XR Blocks (example: ../xrblocks/samples/lighting/main.js) import * as xb from 'xrblocks'; import {LightingScene} from './LightingScene.js'; // Set up depth mesh optinos. Need depth mesh to render shadows to. let options = new xb.Options(); options.depth = new xb.DepthOptions(xb.xrDepthMeshOptions); options.depth.enabled = true; options.depth.depthMesh.enabled = true; options.depth.depthTexture.enabled = true; options.depth.depthMesh.updateFullResolutionGeometry = true; options.depth.depthMesh.renderShadow = true; options.depth.depthMesh.shadowOpacity = 0.6; options.depth.occlusion.enabled = true; // Set up lighting options. options.lighting = new xb.LightingOptions(xb.xrLightingOptions); options.lighting.enabled = true; options.lighting.useAmbientSH = true; options.lighting.useDirectionalLight = true; options.lighting.castDirectionalLightShadow = true; options.lighting.useDynamicSoftShadow = false; options.xrButton = { ...options.xrButton, startText: ' BRING IT TO LIFE', endText: ' MISSION COMPLETE', }; async function start() { const lightingScene = new LightingScene(); options.setAppTitle('Lighting Estimation'); await xb.init(options); xb.add(lightingScene); window.addEventListener( 'pointerdown', lightingScene.onPointerDown.bind(lightingScene) ); } document.addEventListener('DOMContentLoaded', function () { start(); }); Sample: modelviewer (example: ../xrblocks/samples/modelviewer/ModelViewerScene.js) import * as THREE from 'three'; import * as xb from 'xrblocks'; const kLightX = xb.getUrlParamFloat('lightX', 0); const kLightY = xb.getUrlParamFloat('lightY', 500); const kLightZ = xb.getUrlParamFloat('lightZ', -10); const ASSETS_BASE_URL = 'https://cdn.jsdelivr.net/gh/xrblocks/assets@main/'; const PROPRIETARY_ASSETS_BASE_URL = 'https://cdn.jsdelivr.net/gh/xrblocks/proprietary-assets@main/'; export class ModelViewerScene extends xb.Script { constructor() { super(); } async init() { xb.core.input.addReticles(); this.addLights(); this.createModelFromObject(); await Promise.all([ this.createModelFromGLTF(), this.createModelFromAnimatedGLTF(), this.createModelFromSplat(), this.createModelInPanel(), ]); } addLights() { this.add(new THREE.HemisphereLight(0xbbbbbb, 0x888888, 3)); const light = new THREE.DirectionalLight(0xffffff, 2); light.position.set(kLightX, kLightY, kLightZ); this.add(light); } createModelFromObject() { const model = new xb.ModelViewer({}); model.add( new THREE.Mesh( new THREE.CylinderGeometry(0.15, 0.15, 0.4), new THREE.MeshPhongMaterial({color: 0xdb5461}) ) ); model.setupBoundingBox(); model.setupRaycastCylinder(); model.setupPlatform(); model.position.set(-0.15, 0.75, -1.65); this.add(model); } async createModelFromGLTF() { const model = new xb.ModelViewer({}); this.add(model); await model.loadGLTFModel({ data: { scale: {x: 0.009, y: 0.009, z: 0.009}, path: PROPRIETARY_ASSETS_BASE_URL, model: 'chess/chess_compressed.glb', }, renderer: xb.core.renderer, }); model.position.set(0, 0.78, -1.1); } async createModelFromAnimatedGLTF() { const model = new xb.ModelViewer({}); this.add(model); await model.loadGLTFModel({ data: { scale: {x: 1.0, y: 1.0, z: 1.0}, path: ASSETS_BASE_URL, model: 'models/Cat/cat.gltf', }, renderer: xb.core.renderer, }); model.position.set(0.9, 0.68, -0.95); } async createModelFromSplat() { const model = new xb.ModelViewer({castShadow: false, receiveShadow: false}); this.add(model); await model.loadSplatModel({ data: { model: PROPRIETARY_ASSETS_BASE_URL + 'lego/lego.spz', scale: {x: 0.6, y: 0.6, z: 0.6}, rotation: {x: 0, y: 180, z: 0}, }, }); model.position.set(0.4, 0.78, -1.1); model.rotation.set(0, -Math.PI / 6, 0); } async createModelInPanel() { const panel = new xb.SpatialPanel({ backgroundColor: '#00000000', width: 0.5, height: 0.25, useDefaultPosition: false, }); panel.isRoot = true; this.add(panel); panel.position.set(0, 1.5, -2.0); panel.updateLayouts(); const model = new xb.ModelViewer({}); panel.add(model); await model.loadGLTFModel({ data: { scale: {x: 0.002, y: 0.002, z: 0.002}, rotation: {x: 0, y: 180, z: 0}, path: PROPRIETARY_ASSETS_BASE_URL, model: 'earth/Earth_1_12756.glb', }, setupPlatform: false, renderer: xb.core.renderer, }); } } (example: ../xrblocks/samples/modelviewer/index.html) Model Viewer | XR Blocks
(example: ../xrblocks/samples/modelviewer/main.js) import 'xrblocks/addons/simulator/SimulatorAddons.js'; import {html} from 'lit'; import * as xb from 'xrblocks'; import {ModelViewerScene} from './ModelViewerScene.js'; document.addEventListener('DOMContentLoaded', async () => { const modelViewerScene = new ModelViewerScene(); xb.add(modelViewerScene); const options = new xb.Options(); options.simulator.instructions.customInstructions = [ { header: html`

Model Viewer

`, videoSrc: 'model_viewer_simulator_usage.webm', description: html`Click or pinch the object to rotate. Drag the platform to move.`, }, ]; options.setAppTitle('Model Viewer'); await xb.init(options); }); Sample: paint (example: ../xrblocks/samples/paint/index.html) Paint | XR Blocks (example: ../xrblocks/samples/paint/main.js) import 'xrblocks/addons/simulator/SimulatorAddons.js'; import * as THREE from 'three'; import {TubePainter} from 'three/addons/misc/TubePainter.js'; import * as xb from 'xrblocks'; /** * A remixed version of three.js's examples/webxr_xr_paint.html in XR Blocks. * PaintDemo is an example script for using pinch to paint in Android XR and * using clicks to draw in desktop simulated environments. */ class PaintDemo extends xb.Script { init() { this.add(new THREE.HemisphereLight(0xffffff, 0x666666, /*intensity=*/ 3)); // Painting setup. this.painters = []; this.user = xb.core.user; for (let i = 0; i < this.user.controllers.length; ++i) { const painter = new TubePainter(); this.painters.push(painter); this.add(painter.mesh); } // Adds pivotal points to indicate user's intents. this.user.enablePivots(); } /** * Moves the painter to the pivot position when select starts. * @param {XRInputSourceEvent} event */ onSelectStart(event) { const id = event.target.userData.id; const painter = this.painters[id]; const cursor = this.user.getPivotPosition(id); painter.moveTo(cursor); } /** * Updates the painter's line to the current pivot position during selection. * @param {XRInputSourceEvent} event */ onSelecting(event) { const id = event.target.userData.id; const painter = this.painters[id]; const cursor = this.user.getPivotPosition(id); painter.lineTo(cursor); painter.update(); } /** * Stores the initial position and scale of the controller when squeeze * starts. * @param {XRInputSourceEvent} event */ onSqueezeStart(event) { const controller = event.target; const id = controller.userData.id; const data = this.user.data[id].squeeze; data.positionOnStart = controller.position.y; data.scaleOnStart = controller.position.y; } /** * Updates the scale of the controller's pivot based on the squeeze amount. * @param {XRInputSourceEvent} event */ onSqueezing(event) { const controller = event.target; const id = controller.userData.id; const pivot = this.user.getPivot(id); const data = this.user.data[id].squeeze; const delta = (controller.position.y - data.positionOnStart) * 5; const scale = Math.max(0.1, data.scaleOnStart + delta); pivot.scale.setScalar(scale); } } /** * Entry point for the application. */ function start() { const options = new xb.Options(); options.setAppTitle('XR Paint'); xb.add(new PaintDemo()); xb.init(options); } document.addEventListener('DOMContentLoaded', start); Sample: reticle (example: ../xrblocks/samples/reticle/index.html) Reticle | XR Blocks (example: ../xrblocks/samples/reticle/main.js) import * as xb from 'xrblocks'; import {TextBillboard} from 'xrblocks/addons/ui/TextBillboard.js'; class ReticleVisualizer extends xb.Script { activeControllerToBillboardMap = new Map(); init() { xb.showReticleOnDepthMesh(true); } onSelectStart(event) { const controller = event.target; const intersection = xb.core.user.select( xb.core.depth.depthMesh, controller ); if (!intersection) return; const billboard = new TextBillboard(); this.add(billboard); this.activeControllerToBillboardMap.set(controller, billboard); this.updateBillboard(controller, billboard); } onSelectEnd(event) { this.activeControllerToBillboardMap.delete(event.target); } update() { this.activeControllerToBillboardMap.forEach((billboard, controller) => { this.updateBillboard(controller, billboard); }); } updateBillboard(controller, billboard) { const intersection = xb.core.user.select( xb.core.depth.depthMesh, controller ); if (intersection) { const reticleHeight = intersection.point.y; billboard.position.copy(intersection.point); billboard.lookAt(xb.core.camera.position); billboard.updateText( `Distance: ${intersection.distance.toFixed(2)} m\n` + `Height: ${reticleHeight.toFixed(2)} m` ); } } } document.addEventListener('DOMContentLoaded', function () { const options = new xb.Options(); options.depth = new xb.DepthOptions(xb.xrDepthMeshOptions); options.setAppTitle('XR Reticle'); xb.add(new ReticleVisualizer()); xb.init(options); }); Sample: skybox_agent (example: ../xrblocks/samples/skybox_agent/GeminiSkyboxGenerator.js) /* eslint-env browser */ import 'xrblocks/addons/simulator/SimulatorAddons.js'; import * as THREE from 'three'; import * as xb from 'xrblocks'; import {TranscriptionManager} from './TranscriptionManager.js'; export class GeminiSkyboxGenerator extends xb.Script { constructor() { super(); this.transcription = null; this.liveAgent = null; this.statusText = null; this.defaultText = "I am a skybox designer agent. Describe the background you want, and I'll render it for you!"; } init() { this.createTextDisplay(); this.createAgent(); this.add(new THREE.HemisphereLight(0x888877, 0x777788, 3)); const light = new THREE.DirectionalLight(0xffffff, 5.0); light.position.set(-0.5, 4, 1.0); this.add(light); } createAgent() { this.liveAgent = new xb.SkyboxAgent( xb.core.ai, xb.core.sound, xb.core.scene, { onSessionStart: () => { this.updateButtonState(); this.updateStatus('Session started - Ready to listen'); }, onSessionEnd: () => { this.updateButtonState(); this.transcription?.clear(); this.transcription?.setText(this.defaultText); this.updateStatus('Session ended'); }, onError: (error) => { this.updateStatus(`Error: ${error.message}`); this.transcription?.addText(`āœ— Error: ${error.message}`); }, } ); } async toggleGeminiLive() { const isActive = this.liveAgent?.getSessionState().isActive; return isActive ? this.stopGeminiLive() : this.startGeminiLive(); } async startGeminiLive() { if (this.liveAgent?.getSessionState().isActive) return; try { this.updateStatus('Starting session...'); // Enable audio BEFORE starting the session await xb.core.sound.enableAudio(); // Start live session with callbacks await this.liveAgent.startLiveSession({ onopen: () => { this.updateStatus('Connected - Listening...'); }, onmessage: (message) => this.handleAIMessage(message), onclose: (closeEvent) => { this.handleSessionClose(closeEvent); }, }); } catch (error) { this.updateStatus(`Failed to start: ${error.message}`); this.transcription?.addText( `Error: Failed to start AI session - ${error.message}` ); await this.cleanup(); } } async stopGeminiLive() { if (!this.liveAgent?.getSessionState().isActive) return; await this.cleanup(); } handleSessionClose(closeEvent) { if (closeEvent.reason) { this.transcription?.addText(closeEvent.reason); } xb.core.sound?.disableAudio(); xb.core.sound?.stopAIAudio(); } createTextDisplay() { this.textPanel = new xb.SpatialPanel({ width: 3, height: 1.8, backgroundColor: '#1a1a1abb', }); const grid = this.textPanel.addGrid(); const statusRow = grid.addRow({weight: 0.1}); this.statusText = statusRow.addText({ text: 'Click Start to begin', fontSize: 0.04, fontColor: '#4ecdc4', textAlign: 'center', }); const responseDisplay = new xb.ScrollingTroikaTextView({ text: this.defaultText, fontSize: 0.03, textAlign: 'left', }); grid.addRow({weight: 0.65}).add(responseDisplay); this.transcription = new TranscriptionManager(responseDisplay); this.toggleButton = grid.addRow({weight: 0.25}).addTextButton({ text: 'ā–¶ Start', fontColor: '#ffffff', backgroundColor: '#006644', fontSize: 0.2, }); this.toggleButton.onTriggered = () => this.toggleGeminiLive(); this.textPanel.position.set(0, 1.2, -2); this.add(this.textPanel); } async handleAIMessage(message) { if (message.data) { xb.core.sound.playAIAudio(message.data); } const content = message.serverContent; if (content) { if (content.inputTranscription?.text) { this.transcription.handleInputTranscription( content.inputTranscription.text ); } if (content.outputTranscription?.text) { this.transcription.handleOutputTranscription( content.outputTranscription.text ); } if (content.turnComplete) { this.transcription.finalizeTurn(); } } if (message.toolCall) { this.updateStatus('AI is calling a tool...'); const functionResponses = []; for (const fc of message.toolCall.functionCalls) { const tool = this.liveAgent.findTool(fc.name); if (tool) { const promptText = fc.args?.prompt || 'custom scene'; this.updateStatus(`Generating skybox: ${promptText}...`); // Small delay to ensure status is visible before long operation await new Promise((resolve) => setTimeout(resolve, 100)); const result = await tool.execute(fc.args); const response = xb.SkyboxAgent.createToolResponse( fc.id, fc.name, result ); functionResponses.push(response); if (result.success) { this.updateStatus('Skybox generated successfully!'); this.transcription.addText(`āœ“ ${result.data || 'Task completed'}`); } else { this.updateStatus(`Generation failed: ${result.error}`); this.transcription.addText(`āœ— Error: ${result.error}`); } } else { this.updateStatus(`Tool not found: ${fc.name}`); functionResponses.push({ id: fc.id, name: fc.name, response: {error: `Tool ${fc.name} not found`}, }); this.transcription.addText(`āœ— Tool not found: ${fc.name}`); } } this.liveAgent.sendToolResponse({functionResponses}); } } updateButtonState() { const isActive = this.liveAgent?.getSessionState().isActive; this.toggleButton?.setText(isActive ? 'ā¹ Stop' : 'ā–¶ Start'); } updateStatus(message) { if (this.statusText) { this.statusText.text = message; } } async cleanup() { if (this.liveAgent?.getSessionState().isActive) { try { await this.liveAgent.stopLiveSession(); } catch (e) { this.updateStatus(`Error stopping session: ${e.message}`); } } xb.core.sound?.disableAudio(); xb.core.sound?.stopAIAudio(); } async dispose() { await this.cleanup(); super.dispose(); } } async function requestAudioPermission() { try { const stream = await navigator.mediaDevices.getUserMedia({ audio: { sampleRate: 16000, channelCount: 1, echoCancellation: true, noiseSuppression: true, }, }); stream.getTracks().forEach((track) => track.stop()); return stream; } catch (error) { this.transcription.addText( `āœ— Error requesting audio permission: ${error.message}` ); return null; } } async function start() { try { await requestAudioPermission(); const options = new xb.Options(); options.enableUI(); options.enableHands(); options.enableAI(); options.setAppTitle('Generating Skybox with Gemini'); xb.init(options); xb.add(new GeminiSkyboxGenerator()); } catch (error) { this.transcription.addText(`āœ— Error initializing: ${error.message}`); } } document.addEventListener('DOMContentLoaded', function () { start(); }); (example: ../xrblocks/samples/skybox_agent/TranscriptionManager.js) export class TranscriptionManager { constructor(responseDisplay) { this.responseDisplay = responseDisplay; this.currentInputText = ''; this.currentOutputText = ''; this.conversationHistory = []; } handleInputTranscription(text) { if (!text) return; this.currentInputText += text; this.updateLiveDisplay(); } handleOutputTranscription(text) { if (!text) return; this.currentOutputText += text; this.updateLiveDisplay(); } finalizeTurn() { if (this.currentInputText.trim()) { this.conversationHistory.push({ speaker: 'You', text: this.currentInputText.trim(), }); } if (this.currentOutputText.trim()) { this.conversationHistory.push({ speaker: 'AI', text: this.currentOutputText.trim(), }); } this.currentInputText = ''; this.currentOutputText = ''; this.updateFinalDisplay(); } updateLiveDisplay() { let displayText = ''; for (const entry of this.conversationHistory.slice(-2)) { displayText += `${entry.speaker}: ${entry.text}\n\n`; } if (this.currentInputText.trim()) { displayText += `You: ${this.currentInputText}`; } if (this.currentOutputText.trim()) { if (this.currentInputText.trim()) displayText += '\n\n'; displayText += `AI: ${this.currentOutputText}`; } this.responseDisplay?.setText(displayText); } updateFinalDisplay() { let displayText = ''; for (const entry of this.conversationHistory) { displayText += `${entry.speaker}: ${entry.text}\n\n`; } this.responseDisplay?.setText(displayText); } clear() { this.currentInputText = ''; this.currentOutputText = ''; this.conversationHistory = []; } addText(text) { this.responseDisplay?.addText(text + '\n\n'); } setText(text) { this.responseDisplay?.setText(text); } } (example: ../xrblocks/samples/skybox_agent/index.html) Gemini Live Skybox Agent | XR Blocks Sample: sound (example: ../xrblocks/samples/sound/index.html) Sound Sample | XR Blocks (example: ../xrblocks/samples/sound/main.js) /* eslint-env browser */ import * as THREE from 'three'; import * as xb from 'xrblocks'; class SoundDemoScript extends xb.Script { constructor() { super(); this.soundBalls = []; this.mainPanel = null; this.recordedAudioBuffer = null; this.isRecording = false; this.recordBtn = null; this.statusText = null; this.volumeText = null; this.recordingStartTime = 0; this.currentVolume = 1.0; this.ballJumpPhase = 0; } init() { this.createSoundBalls(); this.createDemoUI(); const light = new THREE.DirectionalLight(0xffffff, 1.5); light.position.set(0, 3, 0); this.add(light); const ambientLight = new THREE.AmbientLight(0x404040, 1.0); this.add(ambientLight); } createSoundBalls() { const ballPositions = [ {x: -1.0, y: xb.user.height * 0.5, z: -1.5, color: 0xff6b6b}, {x: 0.0, y: xb.user.height * 0.5, z: -1.5, color: 0x4ecdc4}, {x: 1.0, y: xb.user.height * 0.5, z: -1.5, color: 0xffe66d}, ]; ballPositions.forEach((pos, index) => { const geometry = new THREE.SphereGeometry(0.1, 32, 32); const material = new THREE.MeshStandardMaterial({ color: pos.color, metalness: 0.3, roughness: 0.4, }); const ball = new THREE.Mesh(geometry, material); ball.position.set(pos.x, pos.y, pos.z); ball.userData.soundIndex = index; ball.name = `SoundBall${index}`; this.add(ball); this.soundBalls.push(ball); }); } createDemoUI() { this.mainPanel = new xb.SpatialPanel({ backgroundColor: '#1a1a1aF0', useDefaultPosition: false, showEdge: true, width: 1.0, height: 0.8, }); this.mainPanel.isRoot = true; this.mainPanel.position.set( 0, xb.user.height + 0.2, -xb.user.panelDistance ); this.add(this.mainPanel); const mainGrid = this.mainPanel.addGrid(); const titleRow = mainGrid.addRow({weight: 0.18}); titleRow.addText({ text: 'Sound Recorder', fontSize: 0.08, fontColor: '#4ecdc4', }); const statusRow = mainGrid.addRow({weight: 0.15}); this.statusText = statusRow.addText({ text: 'Click mic to record', fontSize: 0.05, fontColor: '#ffe66d', }); mainGrid.addRow({weight: 0.1}); const controlRow = mainGrid.addRow({weight: 0.35}); { const recordCol = controlRow.addCol({weight: 0.4}); this.recordBtn = recordCol.addIconButton({ text: 'mic', fontSize: 0.5, }); this.recordBtn.onTriggered = () => { this.toggleRecording(); }; } { const volDownCol = controlRow.addCol({weight: 0.2}); const volDownBtn = volDownCol.addIconButton({ text: 'remove', fontSize: 0.5, }); volDownBtn.onTriggered = () => { this.adjustVolume(-0.1); }; } { const volDisplayCol = controlRow.addCol({weight: 0.2}); this.volumeText = volDisplayCol.addText({ text: '100%', fontSize: 0.5, fontColor: '#4ecdc4', }); } { const volUpCol = controlRow.addCol({weight: 0.2}); const volUpBtn = volUpCol.addIconButton({ text: 'add', fontSize: 0.5, }); volUpBtn.onTriggered = () => { this.adjustVolume(0.1); }; } const bottomRow = mainGrid.addRow({weight: 0.2}); bottomRow.addText({ text: 'Click jumping balls to play', fontSize: 0.045, fontColor: '#888888', }); if (this.mainPanel) { this.mainPanel.updateLayouts(); } } async toggleRecording() { if (this.isRecording) { this.isRecording = false; this.updateStatus('Stopping recording...'); await new Promise((resolve) => setTimeout(resolve, 300)); this.recordedAudioBuffer = xb.core.sound.stopRecording(); if (this.recordedAudioBuffer && this.recordedAudioBuffer.byteLength > 0) { const duration = ( (Date.now() - this.recordingStartTime) / 1000 ).toFixed(1); this.updateStatus(`Recorded ${duration}s - Click balls to play`); } else { this.recordedAudioBuffer = null; this.updateStatus('Recording failed - no data captured'); } this.recordBtn.text = 'mic'; } else { // Start recording using SDK try { await xb.core.sound.startRecording(); this.isRecording = true; this.recordingStartTime = Date.now(); this.updateStatus('Recording... Click mic again to stop'); this.recordBtn.text = 'mic_off'; } catch (error) { this.updateStatus('Recording failed - ' + error); this.isRecording = false; } } } async playRecording() { if (!this.recordedAudioBuffer) { this.updateStatus('No recording - click mic first!'); return; } try { this.updateStatus('Playing recording...'); const sampleRate = xb.core.sound.getRecordingSampleRate(); await xb.core.sound.playRecordedAudio( this.recordedAudioBuffer, sampleRate ); setTimeout(() => { if (!this.isRecording) { this.updateStatus('Click mic to record'); } }, 2000); } catch (error) { this.updateStatus('Playback failed: ' + error); } } adjustVolume(delta) { this.currentVolume = Math.max(0, Math.min(1, this.currentVolume + delta)); const volumePercent = Math.round(this.currentVolume * 100); xb.core.sound.setMasterVolume(this.currentVolume); if (this.volumeText) { this.volumeText.text = `${volumePercent}%`; } this.updateStatus(`Volume: ${volumePercent}%`); } updateStatus(message) { if (this.statusText) { this.statusText.text = message; } } onSelectStart(event) { const controller = event.target; this.soundBalls.forEach((ball) => { const intersection = xb.core.user.select(ball, controller); if (intersection) { if (this.recordedAudioBuffer) { this.playRecordingFromBall(ball); this.updateStatus( `Playing from ball ${ball.userData.soundIndex + 1}` ); } else { this.updateStatus('Record something first!'); } this.pulseBall(ball); } }); } async playRecordingFromBall(ball) { if (!this.recordedAudioBuffer) return; try { const sampleRate = xb.core.sound.getRecordingSampleRate(); const audioListener = xb.core.sound.getAudioListener(); const audioContext = new AudioContext({sampleRate: sampleRate}); const int16Data = new Int16Array(this.recordedAudioBuffer); const audioBuffer = audioContext.createBuffer( 1, int16Data.length, sampleRate ); const channelData = audioBuffer.getChannelData(0); for (let i = 0; i < int16Data.length; i++) { channelData[i] = int16Data[i] / 32768.0; } const positionalAudio = new THREE.PositionalAudio(audioListener); positionalAudio.setBuffer(audioBuffer); positionalAudio.setRefDistance(0.5); positionalAudio.setRolloffFactor(2.0); positionalAudio.setVolume(this.currentVolume); ball.add(positionalAudio); positionalAudio.play(); positionalAudio.onEnded = () => { ball.remove(positionalAudio); audioContext.close(); }; } catch (error) { this.updateStatus('Play recording from ball failed: ' + error); } } pulseBall(ball) { const originalScale = ball.scale.clone(); const targetScale = originalScale.clone().multiplyScalar(1.3); const startTime = Date.now(); const duration = 200; const animate = () => { const elapsed = Date.now() - startTime; const progress = Math.min(elapsed / duration, 1); if (progress < 0.5) { const t = progress * 2; ball.scale.lerpVectors(originalScale, targetScale, t); } else { const t = (progress - 0.5) * 2; ball.scale.lerpVectors(targetScale, originalScale, t); } if (progress < 1) { requestAnimationFrame(animate); } else { ball.scale.copy(originalScale); } }; animate(); } update() { this.soundBalls.forEach((ball, index) => { ball.rotation.y += 0.01 * (index + 1); if (this.recordedAudioBuffer) { this.ballJumpPhase += 0.01; const jumpHeight = 0.08; const baseHeight = xb.user.height * 0.5; const jumpOffset = Math.abs(Math.sin(this.ballJumpPhase + (index * Math.PI) / 3)) * jumpHeight; ball.position.y = baseHeight + jumpOffset; const targetScale = 1.1; ball.scale.lerp( new THREE.Vector3(targetScale, targetScale, targetScale), 0.1 ); } else { const baseHeight = xb.user.height * 0.5; ball.position.y = baseHeight; ball.scale.lerp(new THREE.Vector3(1, 1, 1), 0.1); } }); } destroy() { if (this.isRecording) { xb.core.sound.disableAudio(); } super.destroy(); } } document.addEventListener('DOMContentLoaded', function () { const options = new xb.Options(); options.reticles.enabled = true; options.controllers.visualizeRays = true; options.setAppTitle('XR Sound'); xb.add(new SoundDemoScript()); xb.init(options); }); Sample: ui (example: ../xrblocks/samples/ui/index.html) UI Panels Showcase | XR Blocks Demo: balloonpop (example: ../xrblocks/demos/balloonpop/BalloonPop.js) import * as THREE from 'three'; import * as xb from 'xrblocks'; import {playPopSound, playWhooshSound} from './audio.js'; // --- CONSTANTS --- const DART_SPEED = 15.0; const DART_GRAVITY_SCALE = 0.1; const MENU_WIDTH = 0.85; const PARTICLE_COUNT = 30; const PARTICLE_LIFE = 1.0; const BOUNDARY_RADIUS = 7.62; const BOUNDARY_IMPULSE = 0.02; // Physics Groups export const GROUP_WORLD = (0x0001 << 16) | (0x0002 | 0x0004); const GROUP_BALLOON = (0x0002 << 16) | (0x0001 | 0x0002 | 0x0004); const GROUP_DART = (0x0004 << 16) | (0x0001 | 0x0002); // --- UI HELPER --- function createStepperControl( game, grid, labelText, valueRef, min, max, step, isCount ) { const H_BUTTON = 0.15; const H_VALUE_LABEL = 0.12; const menuHeight = game.menuPanel.height; const getW = (h) => h / menuHeight; grid.addRow({weight: getW(H_BUTTON)}).addTextButton({ text: '+', fontColor: '#ffffff', backgroundColor: '#4285f4', fontSize: 0.7, width: 0.3, weight: 1.0, }).onTriggered = () => { const maxVal = isCount ? max : 0.1; game[valueRef] = Math.min(maxVal, game[valueRef] + step); game.renderMenu(); }; const displayValue = isCount ? game[valueRef] : Math.round(game[valueRef] * 100); const valueText = grid.addRow({weight: getW(H_VALUE_LABEL)}).addText({ text: `${labelText}: ${displayValue}`, fontColor: '#ffffff', fontSize: 0.12, textAlign: 'center', }); if (isCount) game.countValueText = valueText; else game.speedValueText = valueText; grid.addRow({weight: getW(H_BUTTON)}).addTextButton({ text: '-', fontColor: '#ffffff', backgroundColor: '#4285f4', fontSize: 0.7, width: 0.3, weight: 1.0, }).onTriggered = () => { game[valueRef] = Math.max(min, game[valueRef] - step); game.renderMenu(); }; grid.addRow({weight: getW(0.01)}); } export class BalloonGame extends xb.Script { constructor() { super(); this.balloons = new Map(); this.darts = new Map(); this.particles = []; this.balloonCount = 10; this.balloonSpeed = 0.03; this.balloonsPopped = 0; this.activeDart = null; this.menuPanel = null; this.isMenuExpanded = true; this.physics = null; this.physicsWorld = null; this.RAPIER = null; this.balloonModel = null; this.dartModel = null; this.particleGeometry = null; this.particleMaterial = null; this.raycaster = new THREE.Raycaster(); this.menuPos = new THREE.Vector3(0.6, 1.3, -1.0); this.menuRot = new THREE.Euler(0, -0.4, 0); } async init() { // Post-init Stable Activation of Depth Mesh Physics setTimeout(() => { const options = xb.core.registry.get(xb.Options); if (options && options.depth) { options.depth.enabled = true; if (options.depth.depthMesh) { options.depth.depthMesh.enabled = true; options.depth.depthMesh.physicsEnabled = true; options.depth.depthMesh.collisionGroups = GROUP_WORLD; } } }, 1000); this.add(new THREE.HemisphereLight(0xffffff, 0xbbbbff, 3)); const dirLight = new THREE.DirectionalLight(0xffffff, 2); dirLight.position.set(1, 2, 1); this.add(dirLight); this.createPrefabs(); this.renderMenu(); } createPrefabs() { // --- DART --- this.dartModel = new THREE.Group(); const needleMat = new THREE.MeshStandardMaterial({ color: 0xaaaaaa, metalness: 1.0, roughness: 0.1, }); const silverMat = new THREE.MeshStandardMaterial({ color: 0xcccccc, metalness: 0.8, roughness: 0.3, }); const redMat = new THREE.MeshStandardMaterial({ color: 0xaa0000, roughness: 0.5, }); const finMat = new THREE.MeshStandardMaterial({ color: 0xcc0000, roughness: 0.6, }); const needle = new THREE.Mesh( new THREE.ConeGeometry(0.002, 0.04, 6), needleMat ); needle.position.y = 0.17; const tipHolder = new THREE.Mesh( new THREE.CylinderGeometry(0.008, 0.012, 0.03, 8), silverMat ); tipHolder.position.y = 0.135; const body = new THREE.Mesh( new THREE.CylinderGeometry(0.01, 0.01, 0.18, 8), redMat ); body.position.y = 0.03; const createFin = (rotationY) => { const fin = new THREE.Mesh( new THREE.BoxGeometry(0.07, 0.05, 0.002, 1, 1, 1), finMat ); fin.position.set(0, -0.05, 0); fin.rotation.set(Math.PI, rotationY, 0); return fin; }; this.dartModel.add(needle, tipHolder, body); this.dartModel.add(createFin(0)); this.dartModel.add(createFin(Math.PI / 2)); this.dartModel.add(createFin(Math.PI)); this.dartModel.add(createFin(Math.PI * 1.5)); // --- HIGH-FIDELITY BALLOON --- this.balloonModel = new THREE.Group(); const balloonGeo = new THREE.SphereGeometry(0.5, 32, 32); const pos = balloonGeo.attributes.position; const v = new THREE.Vector3(); for (let i = 0; i < pos.count; i++) { v.fromBufferAttribute(pos, i); if (v.y < 0) { const t = 1.0 - Math.abs(v.y) * 0.35; v.x *= t; v.z *= t; } pos.setXYZ(i, v.x, v.y, v.z); } balloonGeo.computeVertexNormals(); const balloonMat = new THREE.MeshStandardMaterial({ color: 0xffffff, roughness: 0.2, metalness: 0.1, transparent: true, opacity: 0.85, side: THREE.FrontSide, }); this.balloonModel.add(new THREE.Mesh(balloonGeo, balloonMat)); const knotGeo = new THREE.CylinderGeometry(0.03, 0.01, 0.12, 16); knotGeo.translate(0, -0.54, 0); this.balloonModel.add(new THREE.Mesh(knotGeo, balloonMat)); const ringGeo = new THREE.TorusGeometry(0.035, 0.015, 8, 24); ringGeo.rotateX(Math.PI / 2); ringGeo.translate(0, -0.6, 0); this.balloonModel.add(new THREE.Mesh(ringGeo, balloonMat)); this.particleGeometry = new THREE.PlaneGeometry(0.08, 0.08); this.particleMaterial = new THREE.MeshBasicMaterial({ color: 0xffffff, transparent: true, opacity: 1.0, side: THREE.DoubleSide, blending: THREE.AdditiveBlending, }); } initPhysics(physics) { this.physics = physics; this.physicsWorld = physics.blendedWorld; this.RAPIER = physics.RAPIER; this.spawnBalloons(); this.renderMenu(); } renderMenu() { if (this.menuPanel) { this.menuPos.copy(this.menuPanel.position); this.menuRot.copy(this.menuPanel.rotation); this.remove(this.menuPanel); } const H_TOGGLE = 0.1; const H_SCORE = 0.15; const H_RESET = 0.15; const H_SPACE = 0.03; const H_BUTTON = 0.15; const H_VALUE_LABEL = 0.12; const headerHeight = H_TOGGLE + H_SCORE + H_RESET + H_SPACE; const expandedControlsHeight = (H_BUTTON + H_VALUE_LABEL + H_BUTTON + H_SPACE) * 2; const menuHeight = this.isMenuExpanded ? headerHeight + expandedControlsHeight : headerHeight; const getW = (h) => h / menuHeight; this.menuPanel = new xb.SpatialPanel({ width: MENU_WIDTH, height: menuHeight, backgroundColor: '#2b2b2baa', showEdge: true, edgeColor: 'white', edgeWidth: 0.001, }); this.menuPanel.position.copy(this.menuPos); this.menuPanel.rotation.copy(this.menuRot); this.add(this.menuPanel); const grid = this.menuPanel.addGrid(); grid.addRow({weight: getW(H_TOGGLE)}).addTextButton({ text: this.isMenuExpanded ? '\u25B2' : '\u25BC', fontColor: '#ffffff', backgroundColor: '#444444', fontSize: 0.7, weight: 1.0, }).onTriggered = () => this.toggleMenu(); this.scoreText = grid.addRow({weight: getW(H_SCORE)}).addText({ text: `${this.balloonsPopped} / ${this.balloonCount}`, fontColor: '#ffffff', fontSize: 0.15, textAlign: 'center', }); grid.addRow({weight: getW(H_RESET)}).addTextButton({ text: '\u21BB', fontColor: '#ffffff', backgroundColor: '#d93025', fontSize: 0.7, weight: 1.0, }).onTriggered = () => this.resetGame(); grid.addRow({weight: getW(H_SPACE)}); if (this.isMenuExpanded) { createStepperControl( this, grid, 'Balloons', 'balloonCount', 5, 30, 1, true ); createStepperControl( this, grid, 'Speed', 'balloonSpeed', 0.0, 0.1, 0.01, false ); } } toggleMenu() { this.isMenuExpanded = !this.isMenuExpanded; this.renderMenu(); } updateScoreDisplay() { if (this.scoreText) this.scoreText.text = `${this.balloonsPopped} / ${this.balloonCount}`; } resetGame() { this.spawnBalloons(); this.renderMenu(); } spawnBalloons() { if (!this.physicsWorld || !this.RAPIER || !this.balloonModel) return; this.clearBalloons(); this.balloonsPopped = 0; this.updateScoreDisplay(); const color = new THREE.Color(); for (let i = 0; i < this.balloonCount; i++) { const grp = this.balloonModel.clone(); const x = (Math.random() - 0.5) * 4, y = 1.5 + Math.random() * 1, z = -2 - Math.random() * 2; grp.position.set(x, y, z); const s = 0.7 + Math.random() * 0.6; grp.scale.set(s, s, s); color.setHSL(Math.random(), 0.95, 0.6); grp.traverse((c) => { if (c.isMesh) { c.material = c.material.clone(); c.material.color.copy(color); } }); const rb = this.physicsWorld.createRigidBody( this.RAPIER.RigidBodyDesc.dynamic() .setTranslation(x, y, z) .setGravityScale(-0.05 * s) .setLinearDamping(0.5) .setAngularDamping(0.5) ); const col = this.physicsWorld.createCollider( this.RAPIER.ColliderDesc.ball(0.5 * s) .setActiveEvents(this.RAPIER.ActiveEvents.COLLISION_EVENTS) .setRestitution(0.85) .setDensity(0.1) .setCollisionGroups(GROUP_BALLOON), rb ); this.balloons.set(col.handle, { mesh: grp, rigidBody: rb, collider: col, color: color.clone(), }); this.add(grp); } } clearBalloons() { if (!this.physicsWorld) return; for (const [h, b] of this.balloons.entries()) { this.remove(b.mesh); this.physicsWorld.removeCollider(b.collider, false); this.physicsWorld.removeRigidBody(b.rigidBody); } this.balloons.clear(); } spawnExplosion(position, color) { for (let i = 0; i < PARTICLE_COUNT; i++) { const mat = this.particleMaterial.clone(); mat.color.copy(color); const mesh = new THREE.Mesh(this.particleGeometry, mat); mesh.position.copy(position); this.add(mesh); this.particles.push({ mesh, velocity: new THREE.Vector3( (Math.random() - 0.5) * 4.0, (Math.random() - 0.5) * 4.0, (Math.random() - 0.5) * 4.0 ), life: PARTICLE_LIFE, }); } } onSelectStart(event) { if (this.menuPanel && this.menuPanel.parent) { const ctrl = event.target, pos = new THREE.Vector3(), quat = new THREE.Quaternion(); ctrl.getWorldPosition(pos); ctrl.getWorldQuaternion(quat); this.raycaster.set( pos, new THREE.Vector3(0, 0, -1).applyQuaternion(quat) ); if (this.raycaster.intersectObject(this.menuPanel, true).length > 0) return; } if (this.activeDart) return; this.activeDart = this.dartModel.clone(); this.activeDart.position.set(0, -0.05, -0.15); this.activeDart.rotation.set(-Math.PI / 2, 0, 0); event.target.add(this.activeDart); } onSelectEnd(event) { const ctrl = event.target; if (!this.activeDart || !this.physicsWorld) return; const dart = this.activeDart; this.activeDart = null; const wPos = new THREE.Vector3(), wQuat = new THREE.Quaternion(); dart.getWorldPosition(wPos); dart.getWorldQuaternion(wQuat); ctrl.remove(dart); dart.position.copy(wPos); dart.quaternion.copy(wQuat); this.add(dart); playWhooshSound(); const rb = this.physicsWorld.createRigidBody( this.RAPIER.RigidBodyDesc.dynamic() .setTranslation(wPos.x, wPos.y, wPos.z) .setRotation(wQuat) .setGravityScale(DART_GRAVITY_SCALE) .setCcdEnabled(true) ); const col = this.physicsWorld.createCollider( this.RAPIER.ColliderDesc.capsule(0.1, 0.015) .setActiveEvents(this.RAPIER.ActiveEvents.COLLISION_EVENTS) .setSensor(true) .setCollisionGroups(GROUP_DART), rb ); rb.setLinvel( new THREE.Vector3(0, 1, 0) .applyQuaternion(wQuat) .multiplyScalar(DART_SPEED), true ); this.darts.set(col.handle, {mesh: dart, rigidBody: rb, collider: col}); } update(time, delta) { if (this.physicsWorld) { for (const [h, b] of this.balloons.entries()) { b.mesh.position.copy(b.rigidBody.translation()); b.mesh.quaternion.copy(b.rigidBody.rotation()); } for (const [h, d] of this.darts.entries()) { d.mesh.position.copy(d.rigidBody.translation()); d.mesh.quaternion.copy(d.rigidBody.rotation()); if (d.mesh.position.y < -5 || Math.abs(d.mesh.position.z) > 30) this.removeDart(h); } } const dt = delta || 1 / 60; for (let i = this.particles.length - 1; i >= 0; i--) { const p = this.particles[i]; p.life -= dt; if (p.life <= 0) { this.remove(p.mesh); this.particles.splice(i, 1); } else { p.mesh.position.addScaledVector(p.velocity, dt); p.mesh.material.opacity = p.life / PARTICLE_LIFE; if (xb.core.camera) p.mesh.lookAt(xb.core.camera.position); } } } physicsStep() { if (!this.physics || !this.physicsWorld || !xb.core.camera) return; const camPos = xb.core.camera.position; const speedFactor = this.balloonSpeed / 10; for (const [h, b] of this.balloons.entries()) { const s = speedFactor; b.rigidBody.addForce( { x: (Math.random() - 0.5) * s, y: (Math.random() - 0.5) * (s * 0.5), z: (Math.random() - 0.5) * s, }, true ); const p = b.rigidBody.translation(); const dx = p.x - camPos.x, dz = p.z - camPos.z; const dist = Math.sqrt(dx * dx + dz * dz); if (dist > BOUNDARY_RADIUS) { b.rigidBody.applyImpulse( { x: (-dx / dist) * BOUNDARY_IMPULSE, y: 0, z: (-dz / dist) * BOUNDARY_IMPULSE, }, true ); } if (p.y > 5.0) b.rigidBody.applyImpulse({x: 0, y: -BOUNDARY_IMPULSE, z: 0}, true); if (p.y < 0.5) b.rigidBody.applyImpulse({x: 0, y: BOUNDARY_IMPULSE, z: 0}, true); } this.physics.eventQueue.drainCollisionEvents((h1, h2, s) => { if (!s) return; if (this.darts.has(h1) && this.balloons.has(h2)) { this.popBalloon(h2); this.removeDart(h1); } else if (this.darts.has(h2) && this.balloons.has(h1)) { this.popBalloon(h1); this.removeDart(h2); } }); } popBalloon(h) { const b = this.balloons.get(h); if (b) { this.spawnExplosion(b.mesh.position.clone(), b.color); playPopSound(); } this.removeBalloon(h); this.balloonsPopped++; this.updateScoreDisplay(); } removeBalloon(h) { const b = this.balloons.get(h); if (!b) return; this.remove(b.mesh); this.physicsWorld.removeCollider(b.collider, false); this.physicsWorld.removeRigidBody(b.rigidBody); this.balloons.delete(h); } removeDart(h) { const d = this.darts.get(h); if (!d) return; this.remove(d.mesh); this.physicsWorld.removeCollider(d.collider, false); this.physicsWorld.removeRigidBody(d.rigidBody); this.darts.delete(h); } } (example: ../xrblocks/demos/balloonpop/audio.js) // --- AUDIO CONTEXT (Retro Noise Pop) --- const audioContext = new (window.AudioContext || window.webkitAudioContext)(); export function playPopSound() { if (audioContext.state === 'suspended') audioContext.resume(); const noiseSource = audioContext.createBufferSource(); const bandpass = audioContext.createBiquadFilter(); const gainNode = audioContext.createGain(); const now = audioContext.currentTime; const sampleRate = audioContext.sampleRate; const bufferSize = sampleRate * 0.15; const noiseBuffer = audioContext.createBuffer(1, bufferSize, sampleRate); const output = noiseBuffer.getChannelData(0); for (let i = 0; i < bufferSize; i++) { output[i] = Math.random() * 2 - 1; } noiseSource.buffer = noiseBuffer; bandpass.type = 'bandpass'; bandpass.frequency.setValueAtTime(3000, now); bandpass.Q.setValueAtTime(1.2, now); gainNode.gain.setValueAtTime(0, now); gainNode.gain.linearRampToValueAtTime(0.8, now + 0.002); gainNode.gain.exponentialRampToValueAtTime(0.001, now + 0.12); noiseSource.connect(bandpass); bandpass.connect(gainNode); gainNode.connect(audioContext.destination); noiseSource.start(0); } export function playWhooshSound() { if (audioContext.state === 'suspended') audioContext.resume(); const oscillator = audioContext.createOscillator(); const gainNode = audioContext.createGain(); const now = audioContext.currentTime; oscillator.type = 'sawtooth'; oscillator.frequency.setValueAtTime(800, now); oscillator.frequency.exponentialRampToValueAtTime(100, now + 0.1); gainNode.gain.setValueAtTime(0.3, now); gainNode.gain.exponentialRampToValueAtTime(0.001, now + 0.1); oscillator.connect(gainNode); gainNode.connect(audioContext.destination); oscillator.start(0); oscillator.stop(now + 0.1); } (example: ../xrblocks/demos/balloonpop/index.html) XR Blocks: Balloon Pop (v5.1 - Stable Fallback) (example: ../xrblocks/demos/balloonpop/main.js) import * as xb from 'xrblocks'; import RAPIER from '@dimforge/rapier3d-simd-compat'; import 'xrblocks/addons/simulator/SimulatorAddons.js'; import {BalloonGame, GROUP_WORLD} from './BalloonPop.js'; document.addEventListener('DOMContentLoaded', () => { const o = new xb.Options(); o.enableUI(); o.physics.RAPIER = RAPIER; o.physics.useEventQueue = true; o.physics.worldStep = true; o.hands.enabled = true; o.simulator.defaultMode = xb.SimulatorMode.POSE; // START DISABLED to avoid Simulator Camera-Clone Crash o.depth.enabled = false; if (o.depth.depthMesh) { o.depth.depthMesh.enabled = true; o.depth.depthMesh.physicsEnabled = true; o.depth.depthMesh.collisionGroups = GROUP_WORLD; o.depth.depthMesh.colliderUpdateFps = 5; } xb.add(new BalloonGame()); xb.init(o); }); Demo: ballpit (example: ../xrblocks/demos/ballpit/BallPit.js) import * as THREE from 'three'; import * as xb from 'xrblocks'; import {palette} from 'xrblocks/addons/utils/Palette.js'; import {BallShooter} from './BallShooter.js'; const kTimeLiveMs = xb.getUrlParamInt('timeLiveMs', 3000); const kDefalteMs = xb.getUrlParamInt('defalteMs', 200); const kLightX = xb.getUrlParamFloat('lightX', 0); const kLightY = xb.getUrlParamFloat('lightY', 500); const kLightZ = xb.getUrlParamFloat('lightZ', -10); const kRadius = xb.getUrlParamFloat('radius', 0.08); const kBallsPerSecond = xb.getUrlParamFloat('ballsPerSecond', 30); const kVelocityScale = xb.getUrlParamInt('velocityScale', 1.0); const kNumSpheres = 100; export class BallPit extends xb.Script { constructor() { super(); this.ballShooter = new BallShooter({ numBalls: kNumSpheres, radius: kRadius, palette: palette, liveDuration: kTimeLiveMs, deflateDuration: kDefalteMs, }); this.add(this.ballShooter); this.addLights(); this.lastBallCreatedTimeForController = new Map(); this.pointer = new THREE.Vector2(); this.velocity = new THREE.Vector3(); } init() { xb.add(this); } update() { super.update(); for (const controller of xb.core.input.controllers) { this.controllerUpdate(controller); } } // Adds hemisphere light for ambient lighting and directional light. addLights() { this.add(new THREE.HemisphereLight(0xbbbbbb, 0x888888, 3)); const light = new THREE.DirectionalLight(0xffffff, 2); light.position.set(kLightX, kLightY, kLightZ); light.castShadow = true; light.shadow.mapSize.width = 2048; // Default is usually 1024 light.shadow.mapSize.height = 2048; // Default is usually 1024 this.add(light); } // Calculates pointer position in normalized device coordinates. updatePointerPosition(event) { // (-1 to +1) for both components this.pointer.x = (event.clientX / window.innerWidth) * 2 - 1; this.pointer.y = -(event.clientY / window.innerHeight) * 2 + 1; // scale pointer.x from [-1, 0] to [-1, 1] this.pointer.x = 1 + 2 * this.pointer.x; } onPointerDown(event) { this.updatePointerPosition(event); const cameras = xb.core.renderer.xr.getCamera().cameras; if (cameras.length == 0) return; const camera = cameras[0]; // Spawn a ball slightly in front of the camera. const position = new THREE.Vector3(0.0, 0.0, -0.2) .applyQuaternion(camera.quaternion) .add(camera.position); const matrix = new THREE.Matrix4(); matrix.setPosition(position.x, position.y, position.z); // Convert pointer position to angle based on the camera. const vector = new THREE.Vector4(this.pointer.x, this.pointer.y, 1.0, 1); const inverseProjectionMatrix = camera.projectionMatrix.clone().invert(); vector.applyMatrix4(inverseProjectionMatrix); vector.multiplyScalar(1 / vector.w); this.velocity.copy(vector); this.velocity.normalize().multiplyScalar(4.0); this.velocity.applyQuaternion(camera.quaternion); this.ballShooter.spawnBallAt(position, this.velocity); } controllerUpdate(controller) { const now = performance.now(); if (!this.lastBallCreatedTimeForController.has(controller)) { this.lastBallCreatedTimeForController.set(controller, -99); } if ( controller.userData.selected && now - this.lastBallCreatedTimeForController.get(controller) >= 1000 / kBallsPerSecond ) { // Place this 8 cm in front of the hands. const newPosition = new THREE.Vector3(0.0, 0.0, -0.08) .applyQuaternion(controller.quaternion) .add(controller.position); this.velocity.set(0, 0, -5.0 * kVelocityScale); this.velocity.applyQuaternion(controller.quaternion); this.ballShooter.spawnBallAt(newPosition, this.velocity); this.lastBallCreatedTimeForController.set(controller, now); } } } (example: ../xrblocks/demos/ballpit/BallShooter.js) import * as THREE from 'three'; import * as xb from 'xrblocks'; export class BallShooter extends xb.Script { constructor({ numBalls = 100, radius = 0.08, palette = null, liveDuration = 3000, deflateDuration = 200, distanceThreshold = 0.25, distanceFadeout = 0.25, }) { super(); this.liveDuration = liveDuration; this.deflateDuration = deflateDuration; this.distanceThreshold = distanceThreshold; this.distanceFadeout = distanceFadeout; const geometry = new THREE.IcosahedronGeometry(radius, 3); this.spheres = []; for (let i = 0; i < numBalls; ++i) { const material = new THREE.MeshLambertMaterial({transparent: true}); const sphere = new THREE.Mesh(geometry, material); sphere.castShadow = true; sphere.receiveShadow = true; this.spheres.push(sphere); } const matrix = new THREE.Matrix4(); for (let i = 0; i < this.spheres.length; i++) { const x = Math.random() * 2 - 2; const y = Math.random() * 2; const z = Math.random() * 2 - 2; matrix.setPosition(x, y, z); this.spheres[i].position.set(x, y, z); if (palette != null) { this.spheres[i].material.color.copy(palette.getRandomLiteGColor()); } } const now = performance.now(); this.spawnTimes = []; for (let i = 0; i < numBalls; ++i) { this.spawnTimes[i] = now; } this.nextBall = 0; this.rigidBodies = []; this.colliders = []; this.colliderHandleToIndex = new Map(); this.viewSpacePosition = new THREE.Vector3(); this.clipSpacePosition = new THREE.Vector3(); this.projectedPosition = new THREE.Vector3(); this.clipFromWorld = new THREE.Matrix4(); } initPhysics(physics) { this.setupPhysics({ RAPIER: physics.RAPIER, blendedWorld: physics.blendedWorld, colliderActiveEvents: physics.RAPIER.ActiveEvents.CONTACT_FORCE_EVENTS, }); } setupPhysics({ RAPIER, blendedWorld, colliderActiveEvents = 0, continuousCollisionDetection = false, }) { for (let i = 0; i < this.spheres.length; ++i) { const position = this.spheres[i].position; const desc = RAPIER.RigidBodyDesc.dynamic() .setTranslation(...position) .setCcdEnabled(continuousCollisionDetection); const body = blendedWorld.createRigidBody(desc); const shape = RAPIER.ColliderDesc.ball( this.spheres[i].geometry.parameters.radius ).setActiveEvents(colliderActiveEvents); const collider = blendedWorld.createCollider(shape, body); this.colliderHandleToIndex.set(collider.handle, i); this.rigidBodies.push(body); this.colliders.push(collider); } } /** * Spawns a ball at the given location with the given velocity. * @param {THREE.Vector3} position Position to place the ball. * @param {THREE.Vector3} velocity Velocity of the ball. * @param {number} now Time when the ball is spawned. */ spawnBallAt( position, velocity = new THREE.Vector3(), now = performance.now() ) { const ball = this.spheres[this.nextBall]; ball.position.copy(position); ball.scale.setScalar(1.0); ball.opacity = 1.0; if (this.rigidBodies.length > 0) { const body = this.rigidBodies[this.nextBall]; body.setTranslation(position); body.setLinvel(velocity); } this.spawnTimes[this.nextBall] = now; this.nextBall = (this.nextBall + 1) % this.spheres.length; this.add(ball); } physicsStep(now = performance.now()) { const camera = xb.core.camera; for (let i = 0; i < this.spheres.length; i++) { const sphere = this.spheres[i]; const body = this.rigidBodies[i]; let spawnTime = this.spawnTimes[i]; if (this.isBallActive(i)) { let ballVisibility = 1.0; const position = sphere.position.copy(body.translation()); // If the ball falls behind the depth then adjust the spawnTime to begin // expiring the ball. const viewSpacePosition = this.viewSpacePosition .copy(position) .applyMatrix4(camera.matrixWorldInverse); const clipSpacePosition = this.clipSpacePosition .copy(viewSpacePosition) .applyMatrix4(camera.projectionMatrix); const ballIsInView = -1.0 <= clipSpacePosition.x && clipSpacePosition.x <= 1.0 && -1.0 <= clipSpacePosition.y && clipSpacePosition.y <= 1.0; if (ballIsInView && xb.depth.enabled) { const projectedPosition = xb.depth.getProjectedDepthViewPositionFromWorldPosition( position, this.projectedPosition ); const distanceBehindDepth = Math.max( projectedPosition.z - viewSpacePosition.z, 0.0 ); if (distanceBehindDepth > this.distanceThreshold) { const deflateAmount = Math.max( (distanceBehindDepth - this.distanceThreshold) / this.distanceFadeout, 1.0 ); spawnTime = Math.min( spawnTime, now - this.liveDuration - this.deflateDuration * deflateAmount ); } } // Compute the visibility if the ball has lived too long. if (now - spawnTime > this.liveDuration) { const timeSinceDeflateStarted = now - spawnTime - this.liveDuration; const deflateAmount = Math.min( 1, timeSinceDeflateStarted / this.deflateDuration ); ballVisibility = 1.0 - deflateAmount; } body.setTranslation(position); sphere.material.opacity = ballVisibility; if (ballVisibility < 0.001) { sphere.material.opacity = 0.0; sphere.scale.setScalar(0); position.set(0.0, -1000.0, 0.0); body.setTranslation(position); this.removeBall(i); } } sphere.position.copy(body.translation()); sphere.quaternion.copy(body.rotation()); } } getIndexForColliderHandle(handle) { return this.colliderHandleToIndex.get(handle); } removeBall(index) { const ball = this.spheres[index]; const body = this.rigidBodies[index]; ball.position.set(0.0, -1000.0, 0.0); body.setTranslation(ball.position); this.remove(ball); } isBallActive(index) { return this.spheres[index].parent == this; } } (example: ../xrblocks/demos/ballpit/index.html) Ballpit
(example: ../xrblocks/demos/ballpit/main.js) import 'xrblocks/addons/simulator/SimulatorAddons.js'; import RAPIER from '@dimforge/rapier3d-simd-compat'; import * as xb from 'xrblocks'; import {BallPit} from './BallPit.js'; const depthMeshColliderUpdateFps = xb.getUrlParamFloat( 'depthMeshColliderUpdateFps', 5 ); const useSceneMesh = xb.getUrlParamBool('scenemesh', false); const options = new xb.Options(); if (useSceneMesh) { options.world.enableMeshDetection(); } else { options.depth = new xb.DepthOptions(xb.xrDepthMeshPhysicsOptions); options.depth.depthMesh.colliderUpdateFps = depthMeshColliderUpdateFps; } options.xrButton = { ...options.xrButton, startText: ' LET THE FUN BEGIN', endText: ' MISSION COMPLETE', }; options.physics.RAPIER = RAPIER; // Initializes the scene, camera, xrRenderer, controls, and XR button. async function start() { xb.add(new BallPit()); await xb.init(options); } document.addEventListener('DOMContentLoaded', function () { start(); }); Demo: gemini-icebreakers (example: ../xrblocks/demos/gemini-icebreakers/EarthAnimation.js) /** * Animates the earth model. */ export class EarthAnimation { model = null; speed = 0.2; setModel(model) { this.model = model; } update(deltaTime) { const gltfScene = this?.model?.gltf?.scene; if (gltfScene) { gltfScene.rotation.y += this.speed * deltaTime; } } } (example: ../xrblocks/demos/gemini-icebreakers/GeminiIcebreakers.js) import * as THREE from 'three'; import * as xb from 'xrblocks'; import {EarthAnimation} from './EarthAnimation.js'; import {TranscriptionManager} from './TranscriptionManager.js'; const ASSETS_BASE_URL = 'https://cdn.jsdelivr.net/gh/xrblocks/assets@main/'; const PROPRIETARY_ASSETS_BASE_URL = 'https://cdn.jsdelivr.net/gh/xrblocks/proprietary-assets@main/'; const DATA = [ { model: { scale: {x: 4.0, y: 4.0, z: 4.0}, path: PROPRIETARY_ASSETS_BASE_URL + 'monalisa/', model: 'mona_lisa_picture_frame_compressed.glb', verticallyAlignObject: false, }, prompt: 'ā€œWhat is she smiling about?ā€', }, { model: { scale: {x: 0.03, y: 0.03, z: 0.03}, rotation: {x: 80, y: 0, z: 0}, position: {x: 0, y: -0.2, z: -3.0}, path: PROPRIETARY_ASSETS_BASE_URL + 'chess/', model: 'chess_compressed.glb', verticallyAlignObject: false, }, prompt: "ā€œWhat's a good strategy for this game?ā€", }, { model: { scale: {x: 0.9, y: 0.9, z: 0.9}, rotation: {x: 75, y: 0, z: 0}, position: {x: 0, y: 0.0, z: 0}, path: PROPRIETARY_ASSETS_BASE_URL + 'vegetable_on_board/', model: 'vegetable_on_board_compressed.glb', verticallyAlignObject: false, }, prompt: 'ā€œWhat is the most unexpected dish you could make with these ingredients?ā€', }, { model: { path: ASSETS_BASE_URL + 'models/', model: 'Parasaurolophus.glb', scale: {x: 0.3, y: 0.3, z: 0.3}, position: {x: 0, y: -0.6, z: 0}, verticallyAlignObject: false, horizontallyAlignObject: false, }, prompt: 'ā€œIf this dinosaur could talk, what would it say?ā€', }, { model: { path: PROPRIETARY_ASSETS_BASE_URL + 'earth/', model: 'Earth_1_12756.glb', scale: {x: 0.001, y: 0.001, z: 0.001}, position: {x: 0, y: 0, z: 0}, verticallyAlignObject: false, }, modelAnimation: new EarthAnimation(), prompt: 'ā€œHow big would I need to be to hold this in my hands?ā€', }, ]; export class GeminiIcebreakers extends xb.Script { constructor() { super(); // Loads data. this.data = DATA; this.journeyId = 0; this.models = []; this.isAIRunning = false; this.screenshotInterval = null; this.time = 0; this.micButtonInitialY = null; this.transcriptionManager = null; // Initializes UI. const panel = new xb.SpatialPanel({ backgroundColor: '#00000000', useDefaultPosition: false, showEdge: false, }); this.add(panel); this.descriptionPagerState = new xb.PagerState({pages: DATA.length}); console.log('pages:', this.descriptionPagerState.pages); const grid = panel.addGrid(); const imageRow = grid.addRow({weight: 0.5}); imageRow.addCol({weight: 0.1}); this.imagePager = new xb.HorizontalPager({ state: this.descriptionPagerState, }); imageRow.addCol({weight: 0.8}).add(this.imagePager); imageRow.addCol({weight: 0.1}); for (let i = 0; i < DATA.length; i++) { if (DATA[i].src) { this.imagePager.children[i].addImage({src: DATA[i].src}); } else { this.imagePager.children[i].add(new xb.View()); } } grid.addRow({weight: 0.15}); const controlRow = grid.addRow({weight: 0.35}); const ctrlPanel = controlRow.addPanel({backgroundColor: '#000000D9'}); const ctrlGrid = ctrlPanel.addGrid(); { const leftColumn = ctrlGrid.addCol({weight: 0.1}); this.backButton = leftColumn.addIconButton({ text: 'arrow_back', fontSize: 0.5, paddingX: 0.2, }); const midColumn = ctrlGrid.addCol({weight: 0.8}); const descRow = midColumn.addRow({weight: 0.8}); this.descRow = descRow; // TODO: use phong and point light to highlight gemini. this.add(this.descriptionPagerState); this.descriptionPager = new xb.HorizontalPager({ state: this.descriptionPagerState, enableRaycastOnChildren: false, }); descRow.add(this.descriptionPager); this.transcriptView = new xb.ScrollingTroikaTextView({ text: '', fontSize: 0.05, textAlign: 'left', }); for (let i = 0; i < DATA.length; i++) { this.descriptionPager.children[i].add( new xb.TextView({ text: this.data[i].prompt, fontColor: '#ffffff', imageOverlay: 'images/gradient.png', /** This modifier makes the gradient more towards purple. */ imageOffsetX: 0.2, }) ); } const botRow = midColumn.addRow({weight: 0.1}); botRow.add( new xb.PageIndicator({ pagerState: this.descriptionPager.state, fontColor: '#FFFFFF', }) ); const rightColumn = ctrlGrid.addCol({weight: 0.1}); this.forwardButton = rightColumn.addIconButton({ text: 'arrow_forward', fontSize: 0.5, paddingX: -0.2, }); this.micButton = ctrlGrid.addCol({weight: 0.1}).addIconButton({ text: 'mic', fontSize: 0.8, paddingX: -2, paddingY: -1, fontColor: '#fdfdfdff', }); this.micButton.onTriggered = () => { this.toggleGeminiLive(); }; } const orbiter = ctrlGrid.addOrbiter(); orbiter.addExitButton(); panel.updateLayouts(); this.panel = panel; // TODO(M): This is a bad design, onSelect is triggered twice // when user pinches. this.backButton.onTriggered = (id) => { console.log('back button'); this.loadPrevious(); }; this.forwardButton.onTriggered = (id) => { console.log('forward button'); this.loadNext(); }; } /** * Initializes the script. */ init() { this.loadModels(); xb.core.renderer.localClippingEnabled = true; this.add(new THREE.HemisphereLight(0x888877, 0x777788, 3)); const light = new THREE.DirectionalLight(0xffffff, 5.0); light.position.set(-0.5, 4, 1.0); this.add(light); this.panel.position.set(0, 1.2, -1.0); if (!xb.core.ai || !xb.core.ai.options.gemini.apiKey) { this.micButton.visible = false; } } reload() { const roundedCurrentPage = Math.round( this.descriptionPagerState.currentPage ); if (roundedCurrentPage != this.journeyId) { this.descriptionPagerState.currentPage = this.journeyId; } } loadPrevious() { this.journeyId = (this.journeyId - 1 + this.data.length) % this.data.length; this.reload(); } loadNext() { this.journeyId = (this.journeyId + 1 + this.data.length) % this.data.length; this.reload(); } update() { const deltaTime = xb.getDeltaTime(); this.time += deltaTime; if (this.micButtonInitialY === null && this.micButton.visible) { this.micButtonInitialY = this.micButton.position.y; } const roundedCurrentPage = Math.round( this.descriptionPagerState.currentPage ); if (this.journeyId != roundedCurrentPage) { this.journeyId = roundedCurrentPage; this.reload(); } for (const model of this.data) { model.modelAnimation?.update(deltaTime); } if ( this.micButtonInitialY !== null && this.micButton.visible && this.isAIRunning ) { const jumpHeight = 0.05; const jumpSpeed = 4; this.micButton.position.y = this.micButtonInitialY + Math.abs(Math.sin(this.time * jumpSpeed)) * jumpHeight; } else if (this.micButtonInitialY !== null) { this.micButton.position.y = this.micButtonInitialY; } } loadModels() { for (let i = 0; i < this.data.length; i++) { if (this.data[i].model) { const data = this.data[i]; const model = new xb.ModelViewer({}); model.loadGLTFModel({ data: this.data[i].model, setupPlatform: false, setupRaycastCylinder: false, setupRaycastBox: true, renderer: xb.core.renderer, onSceneLoaded: () => { this.reload(); this.imagePager.children[i].children[0].add(model); data.modelAnimation?.setModel(model); }, }); this.models[i] = model; } } } async toggleGeminiLive() { return this.isAIRunning ? this.stopGeminiLive() : this.startGeminiLive(); } async startGeminiLive() { if (this.isAIRunning) return; try { this.descriptionPager.visible = false; this.descRow.add(this.transcriptView); this.transcriptView.visible = true; this.transcriptionManager = new TranscriptionManager(this.transcriptView); await xb.core.sound.enableAudio(); await this.startLiveAI(); this.startScreenshotCapture(); this.isAIRunning = true; } catch (error) { console.error('Failed to start AI session:', error); this.cleanup(); this.isAIRunning = false; } } async stopGeminiLive() { if (this.transcriptionManager) { this.transcriptionManager.clear(); } if (!this.isAIRunning) return; this.descriptionPager.state.currentPage = this.journeyId; this.descriptionPager.visible = true; this.transcriptView.visible = false; await xb.core.ai?.stopLiveSession?.(); this.cleanup(); if (this.screenshotInterval) { clearInterval(this.screenshotInterval); this.screenshotInterval = null; } } async startLiveAI() { return new Promise((resolve, reject) => { xb.core.ai.setLiveCallbacks({ onopen: resolve, onmessage: (message) => this.handleAIMessage(message), onerror: reject, onclose: (closeEvent) => { this.cleanup(); this.isAIRunning = false; }, }); xb.core.ai.startLiveSession().catch(reject); }); } handleAIMessage(message) { message.data && xb.core.sound.playAIAudio(message.data); const content = message.serverContent; if (content) { content.inputTranscription?.text && this.transcriptionManager.handleInputTranscription( content.inputTranscription.text ); content.outputTranscription?.text && this.transcriptionManager.handleOutputTranscription( content.outputTranscription.text ); content.turnComplete && this.transcriptionManager.finalizeTurn(); } } cleanup() { this.isAIRunning = false; } startScreenshotCapture() { this.screenshotInterval = setInterval(async () => { const base64Image = await xb.core.screenshotSynthesizer.getScreenshot(); if (base64Image) { const base64Data = base64Image.startsWith('data:') ? base64Image.split(',')[1] : base64Image; try { xb.core.ai?.sendRealtimeInput?.({ video: {data: base64Data, mimeType: 'image/png'}, }); } catch (error) { console.warn(error); this.stopGeminiLive(); } } }, 1000); } } (example: ../xrblocks/demos/gemini-icebreakers/TranscriptionManager.js) export class TranscriptionManager { constructor(responseDisplay) { this.responseDisplay = responseDisplay; this.currentInputText = ''; this.currentOutputText = ''; this.conversationHistory = []; } handleInputTranscription(text) { if (!text) return; this.currentInputText += text; this.updateLiveDisplay(); } handleOutputTranscription(text) { if (!text) return; this.currentOutputText += text; this.updateLiveDisplay(); } finalizeTurn() { if (this.currentInputText.trim()) { this.conversationHistory.push({ speaker: 'You', text: this.currentInputText.trim(), }); } if (this.currentOutputText.trim()) { this.conversationHistory.push({ speaker: 'AI', text: this.currentOutputText.trim(), }); } this.currentInputText = ''; this.currentOutputText = ''; this.updateFinalDisplay(); } updateLiveDisplay() { let displayText = ''; for (const entry of this.conversationHistory.slice(-2)) { displayText += `${entry.speaker}: ${entry.text}\n\n`; } if (this.currentInputText.trim()) { displayText += `You: ${this.currentInputText}`; } if (this.currentOutputText.trim()) { if (this.currentInputText.trim()) displayText += '\n\n'; displayText += `AI: ${this.currentOutputText}`; } this.responseDisplay?.setText(displayText); } updateFinalDisplay() { let displayText = ''; for (const entry of this.conversationHistory) { displayText += `${entry.speaker}: ${entry.text}\n\n`; } this.responseDisplay?.setText(displayText); } clear() { this.currentInputText = ''; this.currentOutputText = ''; this.conversationHistory = []; } addText(text) { this.responseDisplay?.addText(text + '\n\n'); } setText(text) { this.responseDisplay?.setText(text); } } (example: ../xrblocks/demos/gemini-icebreakers/index.html) Gemini Icebreakers
(example: ../xrblocks/demos/gemini-icebreakers/main.js) import 'xrblocks/addons/simulator/SimulatorAddons.js'; import * as xb from 'xrblocks'; import {GeminiIcebreakers} from './GeminiIcebreakers.js'; const options = new xb.Options({ antialias: true, reticles: {enabled: true}, visualizeRays: true, }); options.enableAI(); options.enableCamera(); async function start() { xb.add(new GeminiIcebreakers()); await xb.init(options); } document.addEventListener('DOMContentLoaded', function () { start(); }); (example: ../xrblocks/demos/gemini-icebreakers/materials/LinearGradientMaterial.js) import * as THREE from 'three'; export class LinearGradientMaterial extends THREE.ShaderMaterial { constructor() { const options = { uniforms: { time: {value: 1.0}, resolution: {value: new THREE.Vector2()}, }, vertexShader: /* glsl */ ` varying vec2 vTexCoord; void main() { vTexCoord = uv; gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 ); } `, fragmentShader: /* glsl */ ` precision mediump float; varying vec2 vTexCoord; void main() { // --- Constants --- const vec4 startColor = vec4(0.125,0.486,1.000, 1.0); const vec4 quarterColor = vec4(0.035,0.557,0.984, 1.0); const vec4 thirdQuarterColor = vec4(0.678,0.529,0.922, 1.0); const vec4 endColor = vec4(0.933,0.302,0.369, 1.0); const float angle = radians(90.0 + 16.0); const vec2 origin = vec2(0.5, 0.5); // --- Pre-calculate rotation components --- float rotation = radians(90.0) - angle; float cosRot = cos(rotation); float sinRot = sin(rotation); // --- Normalized and centered UV --- vec2 uv = vTexCoord - origin; // --- Rotate the UV coordinates --- vec2 rotatedUV = vec2( cosRot * uv.x - sinRot * uv.y, sinRot * uv.x + cosRot * uv.y ) + origin; // --- Original color mixing logic based on rotatedUV.x --- if (rotatedUV.x < 0.5) { gl_FragColor = mix(startColor, quarterColor, smoothstep(0.0, 0.25, rotatedUV.x)); } else if (rotatedUV.x < 0.75) { gl_FragColor = mix(quarterColor, thirdQuarterColor, smoothstep(0.5, 0.75, rotatedUV.x)); } else { gl_FragColor = mix(thirdQuarterColor, endColor, smoothstep(0.75, 1.0, rotatedUV.x)); } }`, }; super(options); } } Demo: gemini-xrobject (example: ../xrblocks/demos/gemini-xrobject/TouchableSphere.js) import * as THREE from 'three'; import {Text} from 'troika-three-text'; import * as xb from 'xrblocks'; const HANDEDNESS = xb.Handedness; class TouchableSphere extends xb.MeshScript { /** * @const {number} A multiplier to scale the icon size relative to the sphere * radius. */ static ICON_SIZE_MULTIPLIER = 1.2; /** * @param {xb.DetectedObject} detectedObject The detected object from * ObjectDetector. * @param {number} radius The radius of the sphere. * @param {string} iconName The name of the Google Icons icon to show. * @todo Adapt and integrate with sdk/ui. */ constructor(detectedObject, radius = 0.2, iconName = 'help') { const inactiveColor = new THREE.Color(0xd1e2ff); // Cannot access 'this' before super() const geometry = new THREE.SphereGeometry(radius, 32, 16); const material = new THREE.MeshBasicMaterial({ color: inactiveColor, transparent: true, opacity: 0.9, }); super(geometry, material); this.inactiveColor = inactiveColor; this.activeColor = new THREE.Color(0x4970ff); this.textColor = new THREE.Color(0xffffff); this.textFontSize = 0.05; this.textAnchorX = 'center'; this.textAnchorY = 'bottom'; this.textOffsetY = 0.01; // Offset above the sphere this.touchDistanceThreshold = radius * 2; this.sphereRadius = radius; this.labelText = detectedObject.label; this.object = detectedObject; this.wasTouchedLastFrame = false; this.position.copy(detectedObject.position); this.iconFont = 'https://fonts.gstatic.com/s/materialicons/v143/flUhRq6tzZclQEJ-Vdg-IuiaDsNa.woff'; this.iconName = iconName; this.iconFontSize = this.sphereRadius * TouchableSphere.ICON_SIZE_MULTIPLIER; this.iconColor = new THREE.Color(0xffffff); this.iconMesh = null; this.raycaster = null; this.textLabel = null; } init(xrCoreInstance) { super.init(xrCoreInstance); if (this.scene && !this.parent) { this.scene.add(this); } // Create and configure the text label this.textLabel = new Text(); this.textLabel.text = this.labelText; this.textLabel.fontSize = this.textFontSize; this.textLabel.color = this.textColor; this.textLabel.anchorX = this.textAnchorX; this.textLabel.anchorY = this.textAnchorY; // Position the label above the sphere this.textLabel.position.set(0, this.sphereRadius + this.textOffsetY, 0); this.add(this.textLabel); // Add label as a child of the sphere this.textLabel.sync(); // Create and configure the icon this.iconMesh = new Text(); this.iconMesh.text = this.iconName; this.iconMesh.font = this.iconFont; this.iconMesh.fontSize = this.iconFontSize; this.iconMesh.color = this.iconColor; this.iconMesh.anchorX = 'center'; this.iconMesh.anchorY = 'middle'; this.iconMesh.material.depthTest = false; // Keep icon visible this.iconMesh.renderOrder = this.renderOrder + 1; // Render icon on top of sphere // Position the icon at the center of the sphere this.iconMesh.position.set(0, 0, 0); this.add(this.iconMesh); this.iconMesh.sync(); // Initialize Raycaster this.raycaster = new THREE.Raycaster(); } update() { if ( !this.material || !xb.core.user || !this.textLabel || !xb.core?.camera ) { return; } if ((xb.core.user.controllers && !this.raycaster) || !this.iconMesh) { return; } let isTouchedThisFrame = false; let touchInitiator = null; // Will hold the controller or hand info // Check for controller touch (ray-based) for (const controller of xb.core.user.controllers) { if (controller && controller.visible) { this.raycaster.setFromXRController(controller); const intersections = this.raycaster.intersectObject(this, false); // 'this' is the sphere mesh if (intersections.length > 0) { isTouchedThisFrame = true; touchInitiator = controller; break; // Stop checking other controllers if one is touching } } } // Check for hand touch if XRHands is enabled and available if (xb.core.user.hands && !isTouchedThisFrame) { const sphereWorldPosition = new THREE.Vector3(); this.getWorldPosition(sphereWorldPosition); // Get sphere's current world position const handednessToCheck = [HANDEDNESS.LEFT, HANDEDNESS.RIGHT]; for (const handSide of handednessToCheck) { const hand = xb.core.user.hands.hands[handSide]; if (Object.keys(hand.joints).length === 0) break; const indexTip = xb.core.user.hands.getIndexTip(handSide); if (indexTip) { const jointWorldPosition = new THREE.Vector3(); indexTip.getWorldPosition(jointWorldPosition); const distanceToJoint = sphereWorldPosition.distanceTo(jointWorldPosition); if ( distanceToJoint <= this.sphereRadius + this.touchDistanceThreshold ) { isTouchedThisFrame = true; touchInitiator = {type: 'hand', side: handSide, joint: indexTip}; break; // Stop checking other hand/joints if one is touching } } } } // The 'target' is the sphere itself, 'initiator' is what caused the touch const selectEvent = {target: this, initiator: touchInitiator}; // Handle touch state changes and trigger events if (isTouchedThisFrame && !this.wasTouchedLastFrame) { this.material.color.set(this.activeColor); this.onSelectStart(selectEvent); this.onSelect(selectEvent); // Called on the frame touch starts } else if (!isTouchedThisFrame && this.wasTouchedLastFrame) { this.material.color.set(this.inactiveColor); this.onSelectEnd(selectEvent); } else if (isTouchedThisFrame) { this.onSelect(selectEvent); } this.wasTouchedLastFrame = isTouchedThisFrame; const cameraPosition = xb.core.camera.position; this.iconMesh.lookAt(cameraPosition); this.textLabel.lookAt(cameraPosition); } /** * Sets the visual state (i.e., color) of the sphere to active or inactive. * @param {boolean} isActive Whether the sphere should be in an active state. */ setActive(isActive) { if (!this.material) { return; } this.material.color.set(isActive ? this.activeColor : this.inactiveColor); } dispose() { if (this.textLabel) { this.remove(this.textLabel); this.textLabel.dispose(); this.textLabel = null; } if (this.iconMesh) { this.remove(this.iconMesh); this.iconMesh.dispose(); this.iconMesh = null; } super.dispose(); } } export {TouchableSphere}; (example: ../xrblocks/demos/gemini-xrobject/XRObjectManager.js) import * as xb from 'xrblocks'; import {TouchableSphere} from './TouchableSphere.js'; /** * Manages the lifecycle of object detection, user interaction via voice, and * the visualization of detected objects as interactive spheres. This class * serves as the main application logic for the demo. * @extends {xb.Script} */ export class XRObjectManager extends xb.Script { /** * Initializes properties and configures the AI model for user queries. */ constructor() { super(); this.objectSphereRadius = 0.03; this.activeSphere = null; this.geminiConfig = { userQuery: { thinkingConfig: { thinkingBudget: 0, }, systemInstruction: [ { text: `You're an informative and helpful AI assistant specializing in identifying and describing objects within images. Your primary goal is to provide detailed yet concise answers to user questions, making a best effort to respond even if you're not entirely sure or the image quality is poor. When describing objects, strive for maximum detail without being verbose, focusing on key characteristics. Please ignore any hands or other human body parts present in the image. User queries will always be structured like this: {object: '...', question: '...'}`, }, ], responseMimeType: 'application/json', responseSchema: { type: 'OBJECT', required: ['answer'], properties: { answer: {type: 'STRING'}, }, }, }, }; } /** * Sets up listeners for the speech recognizer. * @override */ init() { if (xb.core.sound.speechRecognizer) { xb.core.sound.speechRecognizer.addEventListener( 'result', this.handleSpeechResult.bind(this) ); xb.core.sound.speechRecognizer.addEventListener('end', () => { this.activeSphere?.setActive(false); this.activeSphere = null; }); } else { console.error('Speech recognizer not available at init.'); } } /** * Processes the final transcript from a speech recognition event and queries * the AI with the user's question about the currently active object. * @param {Event} event - The speech recognition result event. */ handleSpeechResult(event) { const {transcript, isFinal} = event; // We only use the final output to construct the query. if (!isFinal) { return; } if (!this.activeSphere) { console.warn('Speech result received, but no active sphere is set.'); return; } // Check if the active sphere has an image to query against. if (!this.activeSphere.object.image) { const warningMsg = "I don't have a specific image for that object, so I can't answer questions about it."; console.warn(warningMsg); xb.core.sound.speechSynthesizer.speak(warningMsg); return; } const prompt = { question: transcript, object: this.activeSphere.object.label, }; this.queryObjectInformation( JSON.stringify(prompt), this.activeSphere.object.image ) .then((response) => { try { const parsedResponse = JSON.parse(response); xb.core.sound.speechSynthesizer.speak(parsedResponse.answer); } catch (e) { console.error('Error parsing AI response JSON:', e); } }) .catch((error) => { const errorMsg = "I'm sorry, I had trouble processing that request. Please try again."; console.error('Failed to get information about the object:', error); xb.core.sound.speechSynthesizer.speak(errorMsg); }); } /** * Triggers the `ObjectDetector` to find objects in the scene and creates an * interactive sphere for each detected object. */ async queryObjectionDetection() { if (!xb.core.world?.objects) { console.error( 'ObjectDetector is not available. Ensure it is enabled in the options.' ); return; } const detectedObjects = await xb.core.world.objects.runDetection(); for (const detectedObject of detectedObjects) { this.createSphereWithLabel(detectedObject); } } /** * Sends a user's question and the cropped image of a detected object to the * AI for a descriptive answer. * @param {string} textPrompt - The JSON string containing the question and * object label. * @param {string} objectImageBase64 - The base64-encoded image of the * object. * @returns {Promise} A promise that resolves with the AI's response. */ queryObjectInformation(textPrompt, objectImageBase64) { if (!xb.core.ai.isAvailable()) { const errorMsg = 'Gemini is unavailable for object query.'; console.error(errorMsg); return Promise.reject(new Error(errorMsg)); } xb.core.options.ai.gemini.config = this.geminiConfig.userQuery; let {mimeType, strippedBase64} = xb.parseBase64DataURL(objectImageBase64); return xb.core.ai .query({ type: 'multiPart', parts: [ {inlineData: {mimeType: mimeType, data: strippedBase64}}, {text: textPrompt}, ], }) .catch((error) => { console.error('AI query for object information failed:', error); throw error; }); } /** * Handles the start of a touch event on a sphere, activating speech * recognition. * @param {Event} event - The selection event from the TouchableSphere. */ onSphereTouchStart(event) { if ( typeof event.target !== 'object' || !(event.target instanceof TouchableSphere) ) { return; } if (xb.core.sound.speechSynthesizer.isSpeaking) { event.target.setActive(false); return; } // Set the active sphere for the speech result handler to use. this.activeSphere = event.target; this.activeSphere.setActive(true); xb.core.sound.speechRecognizer.start(); } /** * Handles the end of a touch event on a sphere, stopping speech recognition. * @param {Event} event - The selection event from the TouchableSphere. */ onSphereTouchEnd(event) { if ( typeof event.target !== 'object' || !(event.target instanceof TouchableSphere) ) { return; } xb.core.sound.speechRecognizer.stop(); } /** * Creates a `TouchableSphere` instance for a detected object and adds it to * the scene. * @param {xb.DetectedObject} detectedObject - The object data from the * ObjectDetector. */ createSphereWithLabel(detectedObject) { const touchableSphereInstance = new TouchableSphere( detectedObject, this.objectSphereRadius, 'live_help' ); touchableSphereInstance.onSelectStart = (event) => this.onSphereTouchStart(event); touchableSphereInstance.onSelectEnd = (event) => this.onSphereTouchEnd(event); xb.add(touchableSphereInstance); } } (example: ../xrblocks/demos/gemini-xrobject/index.html) Gemini XR-Objects (example: ../xrblocks/demos/gemini-xrobject/main.js) import 'xrblocks/addons/simulator/SimulatorAddons.js'; import {LongSelectHandler} from 'xrblocks/addons/ui/LongSelectHandler.js'; import * as xb from 'xrblocks'; import {XRObjectManager} from './XRObjectManager.js'; const options = new xb.Options(); options.deviceCamera.enabled = true; options.deviceCamera.videoConstraints = { width: {ideal: 640}, height: {ideal: 480}, facingMode: 'environment', }; options.permissions.camera = true; options.reticles.enabled = false; options.controllers.visualizeRays = false; options.world.enableObjectDetection(); options.depth.enabled = true; options.depth.depthMesh.updateFullResolutionGeometry = true; options.depth.depthMesh.renderShadow = true; options.depth.depthTexture.enabled = false; options.depth.matchDepthView = false; options.hands.enabled = true; options.hands.visualization = false; options.hands.visualizeMeshes = false; options.sound.speechSynthesizer.enabled = true; options.sound.speechRecognizer.enabled = true; options.sound.speechRecognizer.playSimulatorActivationSounds = true; // options.ai.gemini.config is dynamic and defined in XRObjectManager. A Gemini // API key needs to be provided in the URL: /gemini-xrobject/index.html?key=... // or provided with `keys.json` in the same directory. options.ai.enabled = true; options.ai.gemini.enabled = true; options.ai.gemini.model = 'gemini-2.5-flash'; options.setAppTitle('Gemini XR-Objects'); options.setAppDescription( 'Recognize objects with Gemini and ask questions about them. Perform a long pinch / press to start!' ); options.xrButton.showEnterSimulatorButton = true; function start() { const xrObjectManager = new XRObjectManager(); const longSelectHandler = new LongSelectHandler( xrObjectManager.queryObjectionDetection.bind(xrObjectManager) ); xb.add(xrObjectManager); xb.add(longSelectHandler); xb.init(options); } document.addEventListener('DOMContentLoaded', function () { start(); }); Demo: measure (example: ../xrblocks/demos/measure/MeasureScene.js) import * as THREE from 'three'; import * as xb from 'xrblocks'; import {MeasuringTape} from './MeasuringTape.js'; const palette = [0x0f9d58, 0xf4b400, 0x4285f4, 0xdb4437]; export class MeasureScene extends xb.Script { activeMeasuringTapes = new Map(); currentColorIndex = 0; init() { xb.showReticleOnDepthMesh(true); this.setupLights(); } setupLights() { const light = new THREE.DirectionalLight(0xffffff, 2.0); light.position.set(0.5, 1, 0.866); this.add(light); const light2 = new THREE.DirectionalLight(0xffffff, 1.0); light2.position.set(-1, 0.5, -0.5); this.add(light2); const light3 = new THREE.AmbientLight(0x404040, 2.0); this.add(light3); } onSelectStart(event) { const controller = event.target; const intersections = xb.core.input.intersectionsForController.get(controller); if (intersections.length == 0) return; if (this.activeMeasuringTapes.has(controller)) { this.remove(this.activeMeasuringTapes.get(controller)); } const closestIntersection = intersections[0]; const color = palette[this.currentColorIndex]; this.currentColorIndex = (this.currentColorIndex + 1) % palette.length; const measuringTape = new MeasuringTape( closestIntersection.point, closestIntersection.point, 0.05, color ); this.add(measuringTape); this.activeMeasuringTapes.set(controller, measuringTape); } update() { for (const [controller, tape] of this.activeMeasuringTapes) { const intersections = xb.core.input.intersectionsForController .get(controller) .filter((intersection) => { let target = intersection.object; while (target) { if (target.ignoreReticleRaycast === true) { return false; } target = target.parent; } return true; }); if (intersections.length > 0) { tape.setSecondPoint(intersections[0].point); } } } onSelectEnd(event) { const controller = event.target; this.activeMeasuringTapes.delete(controller); } } (example: ../xrblocks/demos/measure/MeasuringTape.js) import * as THREE from 'three'; import {FontLoader} from 'three/addons/loaders/FontLoader.js'; const fontLoader = new FontLoader(); const DEFAULT_FONT_PATH = 'https://cdn.jsdelivr.net/gh/mrdoob/three.js@r180/examples/fonts/droid/droid_sans_regular.typeface.json'; const upVector = new THREE.Vector3(0.0, 1.0, 0.0); const backwardsVector = new THREE.Vector3(0.0, 0.0, 1.0); export class MeasuringTape extends THREE.Object3D { ignoreReticleRaycast = true; constructor( firstPoint, secondPoint, radius = 0.05, visualColor = 0xffffff, textColor = 0xff0000, fontPath = DEFAULT_FONT_PATH ) { super(); this.firstPoint = firstPoint.clone(); this.secondPoint = secondPoint.clone(); this.radius = radius; this.cylinder = new THREE.Mesh( new THREE.CylinderGeometry(radius, radius), new THREE.MeshStandardMaterial({ color: visualColor, side: THREE.FrontSide, }) ); this.visual = new THREE.Object3D(); this.visual.add(this.cylinder); this.add(this.visual); this.updateVisual(); this.textGeometry = null; this.textMaterial = new THREE.MeshBasicMaterial({ color: textColor, side: THREE.DoubleSide, }); this.textMesh = null; this.camera = null; fontLoader.load(fontPath, (font) => { this.textFont = font; this.updateText(); }); } getLengthText() { const length = this.secondPoint.distanceTo(this.firstPoint); return `${length.toFixed(2)} m`; } updateText() { if (!this.textFont) { // Font is not loaded. return; } if (this.textGeometry) { this.textGeometry.dispose(); } const textShapes = this.textFont.generateShapes(this.getLengthText()); this.textGeometry = new THREE.ShapeGeometry(textShapes); this.textGeometry.computeBoundingBox(); const tempVector = new THREE.Vector3(); this.textGeometry.boundingBox.getCenter(tempVector); this.textGeometry.translate(-tempVector.x, -tempVector.y, -tempVector.z); if (this.textMesh) { this.remove(this.textMesh); } this.textMesh = new THREE.Mesh(this.textGeometry, this.textMaterial); this.textMesh.position .copy(this.firstPoint) .add(this.secondPoint) .multiplyScalar(0.5); const offset = new THREE.Vector3( 0.0, 0.0, 0.1 + 1.5 * this.radius ).applyQuaternion(this.cylinder.quaternion); this.textMesh.position.add(offset); this.textMesh.scale.setScalar(0.0005); this.add(this.textMesh); } setSecondPoint(point) { this.secondPoint.copy(point); this.updateVisual(); this.updateText(); } rotateTextToFaceCamera(cameraPosition) { if (this.textMesh) { this.textMesh.quaternion.setFromUnitVectors( backwardsVector, cameraPosition.clone().sub(this.textMesh.position).normalize() ); } } updateVisual() { const tempVector = this.cylinder.position; this.cylinder.quaternion.setFromUnitVectors( upVector, tempVector.copy(this.secondPoint).sub(this.firstPoint).normalize() ); this.cylinder.scale.set( 1.0, tempVector.subVectors(this.secondPoint, this.firstPoint).length(), 1.0 ); this.cylinder.position .copy(this.firstPoint) .add(this.secondPoint) .multiplyScalar(0.5); } } (example: ../xrblocks/demos/measure/index.html) Measure (example: ../xrblocks/demos/measure/main.js) import 'xrblocks/addons/simulator/SimulatorAddons.js'; import * as xb from 'xrblocks'; import {MeasureScene} from './MeasureScene.js'; const options = new xb.Options({ antialias: true, reticles: {enabled: true}, visualizeRays: true, depth: xb.xrDepthMeshPhysicsOptions, }); function start() { xb.add(new MeasureScene()); options.setAppTitle('XR Measure'); xb.init(options); } document.addEventListener('DOMContentLoaded', function () { start(); }); Demo: occlusion (example: ../xrblocks/demos/occlusion/DepthMeshClone.js) import * as THREE from 'three'; import {xrDepthMeshOptions} from 'xrblocks'; export class DepthMeshClone extends THREE.Mesh { constructor() { super( new THREE.PlaneGeometry(), new THREE.ShadowMaterial({ opacity: xrDepthMeshOptions.depthMesh.shadowOpacity, depthWrite: false, }) ); this.receiveShadow = true; } cloneDepthMesh(depthMesh) { this.geometry.dispose(); this.geometry = depthMesh.geometry.clone(); depthMesh.getWorldPosition(this.position); depthMesh.getWorldQuaternion(this.quaternion); depthMesh.getWorldScale(this.scale); } } (example: ../xrblocks/demos/occlusion/OcclusionScene.js) import * as THREE from 'three'; import * as xb from 'xrblocks'; import {ModelManager} from 'xrblocks/addons/ui/ModelManager.js'; import {ANIMALS_DATA} from './animals_data.js'; import {DepthMeshClone} from './DepthMeshClone.js'; const kLightX = xb.getUrlParamFloat('lightX', 0); const kLightY = xb.getUrlParamFloat('lightY', 500); const kLightZ = xb.getUrlParamFloat('lightZ', -10); export class OcclusionScene extends xb.Script { constructor() { super(); this.pointer = new THREE.Vector3(); this.depthMeshClone = new DepthMeshClone(); this.raycaster = new THREE.Raycaster(); this.modelManager = new ModelManager( ANIMALS_DATA, /*enableOcclusion=*/ true ); this.modelManager.layers.enable(xb.OCCLUDABLE_ITEMS_LAYER); this.add(this.modelManager); this.instructionText = 'Pinch on the environment and try hiding the cat behind sofa!'; this.instructionCol = null; } init() { this.addLights(); xb.showReticleOnDepthMesh(true); this.addPanel(); } addPanel() { const panel = new xb.SpatialPanel({ backgroundColor: '#00000000', useDefaultPosition: false, showEdge: false, }); panel.position.set(0, 1.6, -1.0); panel.isRoot = true; this.add(panel); const grid = panel.addGrid(); grid.addRow({weight: 0.05}); // Space for orbiter grid.addRow({weight: 0.1}); const controlRow = grid.addRow({weight: 0.3}); const ctrlPanel = controlRow.addPanel({backgroundColor: '#000000bb'}); const ctrlGrid = ctrlPanel.addGrid(); const midColumn = ctrlGrid.addCol({weight: 0.9}); midColumn.addRow({weight: 0.3}); const gesturesRow = midColumn.addRow({weight: 0.4}); gesturesRow.addCol({weight: 0.05}); const textCol = gesturesRow.addCol({weight: 1.0}); this.instructionCol = textCol.addRow({weight: 1.0}).addText({ text: `${this.instructionText}`, fontColor: '#ffffff', fontSize: 0.05, }); gesturesRow.addCol({weight: 0.01}); midColumn.addRow({weight: 0.1}); const orbiter = ctrlGrid.addOrbiter(); orbiter.addExitButton(); panel.updateLayouts(); this.panel = panel; this.frameId = 0; } onSimulatorStarted() { this.instructionText = 'Click on the environment and try hiding the cat behind sofa!'; if (this.instructionCol) { this.instructionCol.setText(this.instructionText); } } addLights() { this.add(new THREE.HemisphereLight(0xbbbbbb, 0x888888, 3)); const light = new THREE.DirectionalLight(0xffffff, 2); light.position.set(kLightX, kLightY, kLightZ); light.castShadow = true; light.shadow.mapSize.width = 2048; // Default is usually 1024 light.shadow.mapSize.height = 2048; // Default is usually 1024 this.add(light); } updatePointerPosition(event) { // (-1 to +1) for both components this.pointer.x = (event.clientX / window.innerWidth) * 2 - 1; this.pointer.y = -(event.clientY / window.innerHeight) * 2 + 1; // scale pointer.x from [-1, 0] to [-1, 1] this.pointer.x = 1 + 2 * this.pointer.x; } onSelectStart(event) { const controller = event.target; if (xb.core.input.intersectionsForController.get(controller).length > 0) { const intersection = xb.core.input.intersectionsForController.get(controller)[0]; if (intersection.handleSelectRaycast) { intersection.handleSelectRaycast(intersection); return; } else if (intersection.object.handleSelectRaycast) { intersection.object.handleSelectRaycast(intersection); return; } else if (intersection.object == xb.core.depth.depthMesh) { this.onDepthMeshSelectStart(intersection); return; } } } onDepthMeshSelectStart(intersection) { this.modelManager.positionModelAtIntersection(intersection, xb.core.camera); } onPointerDown(event) { this.updatePointerPosition(event); const cameras = xb.core.renderer.xr.getCamera().cameras; if (cameras.length == 0) return; const camera = cameras[0]; this.raycaster.setFromCamera(this.pointer, camera); const intersections = this.raycaster.intersectObjects( xb.core.input.reticleTargets ); for (let intersection of intersections) { if (intersection.handleSelectRaycast) { intersection.handleSelectRaycast(intersection); return; } else if (intersection.object.handleSelectRaycast) { intersection.object.handleSelectRaycast(intersection); return; } else if (intersection.object == xb.core.depth.depthMesh) { this.modelManager.positionModelAtIntersection(intersection, camera); return; } } } } (example: ../xrblocks/demos/occlusion/animals_data.js) export const ANIMALS_DATA = [ { path: 'https://cdn.jsdelivr.net/gh/xrblocks/assets@main/', model: 'models/Cat/cat.gltf', thumbnail: 'thumbnail.png', }, ]; (example: ../xrblocks/demos/occlusion/index.html) Occlusion
(example: ../xrblocks/demos/occlusion/main.js) import * as xb from 'xrblocks'; import {OcclusionScene} from './OcclusionScene.js'; const options = new xb.Options(); options.reticles.enabled = true; options.depth = new xb.DepthOptions(xb.xrDepthMeshOptions); options.depth.depthMesh.updateFullResolutionGeometry = true; options.depth.depthMesh.renderShadow = true; options.depth.depthTexture.enabled = true; options.depth.occlusion.enabled = true; options.xrButton.startText = ' BRING IT TO LIFE'; options.xrButton.endText = ' MISSION COMPLETE'; async function start() { const occlusion = new OcclusionScene(); await xb.init(options); xb.add(occlusion); window.addEventListener( 'pointerdown', occlusion.onPointerDown.bind(occlusion) ); } document.addEventListener('DOMContentLoaded', function () { start(); }); Demo: rain (example: ../xrblocks/demos/rain/RainParticles.js) import * as THREE from 'three'; const kMaxAnimationFrames = 15; const kAnimationSpeed = 2.0; const DEBUG_SINGLE = false; // const DEBUG_SINGLE = true; function clamp(x, a, b) { return Math.min(Math.max(x, a), b); } export class RainParticles extends THREE.Object3D { constructor() { super(); // Sets the number of particles and defines the range of the raindrop // effect. this.particleCount = DEBUG_SINGLE ? 1 : 200; this.RANGE = 4; this.raycaster = new THREE.Raycaster(); // Initializes arrays for fall speeds, animation weights, and visibility // states of each particle. this.velocities = new Float32Array(this.particleCount); this.particleWeights = new Float32Array(this.particleCount); this.particleVisibility = new Float32Array(this.particleCount); // Placeholder for the InstancedMesh representing the raindrop particles. this.raindropMesh = null; } /** * Initializes raindrop particles with a shader material and instanced * geometry. Loads the texture and sets up the particle mesh and instanced * attributes. */ init() { const textureLoader = new THREE.TextureLoader(); textureLoader.load('textures/rain_sprite_sheet.png', (raindropTexture) => { // Creates a custom shader material for the raindrop particles. const raindropMaterial = this.createRaindropMaterial(raindropTexture); // Creates a simple plane geometry for each raindrop particle. const raindropGeometry = new THREE.PlaneGeometry(0.1, 0.1); // Initializes an InstancedMesh with the defined geometry and material. this.raindropMesh = new THREE.InstancedMesh( raindropGeometry, raindropMaterial, this.particleCount ); // Populates the particle mesh with initial positions and properties. this.initializeParticles(); // Adds instanced attributes for weight and visibility to control raindrop // animation and rendering. this.raindropMesh.geometry.setAttribute( 'aWeight', new THREE.InstancedBufferAttribute(this.particleWeights, 1).setUsage( THREE.DynamicDrawUsage ) ); this.raindropMesh.geometry.setAttribute( 'aVisibility', new THREE.InstancedBufferAttribute(this.particleVisibility, 1).setUsage( THREE.DynamicDrawUsage ) ); // Flags the instance matrix for an initial update and adds the raindrop // mesh to the scene. this.raindropMesh.instanceMatrix.needsUpdate = true; this.add(this.raindropMesh); }); } /** * Creates and returns a custom shader material for the raindrop particles. * Uses a texture and sets up uniforms for the camera position to handle * billboard rotation. */ createRaindropMaterial(texture) { return new THREE.ShaderMaterial({ uniforms: { uTexture: {value: texture}, uCameraPosition: {value: new THREE.Vector3()}, uCameraRotationMatrix: {value: new THREE.Matrix4()}, }, vertexShader: ` attribute float aWeight; attribute float aVisibility; varying float vWeight; varying float vVisibility; varying vec2 vUv; uniform vec3 uCameraPosition; uniform mat4 uCameraRotationMatrix; const float PI = 3.14159265359; void main() { vUv = uv; vWeight = aWeight; vVisibility = aVisibility; // Get the world position of the instance vec4 worldPosition = instanceMatrix * vec4(0.0, 0.0, 0.0, 1.0); vec3 rotatedPosition; if (vWeight < 1.5) { // Compute vector from particle to camera, projected onto XZ plane vec3 toCamera = uCameraPosition - worldPosition.xyz; toCamera.y = 0.0; // Ignore vertical component toCamera = normalize(toCamera); // Compute the angle to rotate around Y-axis float angle = atan(toCamera.x, toCamera.z); // Create rotation matrix around Y-axis mat3 rotationMatrix = mat3( cos(angle), 0.0, -sin(angle), 0.0, 1.0, 0.0, sin(angle), 0.0, cos(angle) ); // Apply rotation to vertex position rotatedPosition = rotationMatrix * position; } else { // Rotate the particle to face positive Y-axis // This is a rotation of -90 degrees around X-axis float angle = 0.5 * PI; // -90 degrees in radians // Create rotation matrix around X-axis mat3 rotationMatrix = mat3( 1.0, 0.0, 0.0, 0.0, cos(angle), -sin(angle), 0.0, sin(angle), cos(angle) ); // Apply rotation to vertex position rotatedPosition = rotationMatrix * position; } // Apply instance transformations vec4 finalPosition = instanceMatrix * vec4(rotatedPosition, 1.0); // Transform to clip space gl_Position = projectionMatrix * viewMatrix * finalPosition; } `, fragmentShader: ` uniform sampler2D uTexture; varying vec2 vUv; varying float vWeight; varying float vVisibility; void main() { const float kAnimationSpeed = 2.0; vec2 uv = vUv * 0.25; // Assumes a 4x4 texture grid. float frame = floor(vWeight / kAnimationSpeed); float xIndex = mod(frame, 4.0); float yIndex = floor(frame / 4.0); uv += vec2(xIndex, 3.0 - yIndex) * 0.25; // Maps frame index to UV coordinates. vec4 texColor = texture2D(uTexture, uv); gl_FragColor = vec4(pow(texColor.rgb, vec3(0.5)), texColor.a * vVisibility * 0.8 - step(vWeight, 0.5) * 0.2); // Applies visibility factor. // gl_FragColor = vec4(0.5, 0.5, 0.0, 1.0); // Applies visibility factor. } `, transparent: true, // side: THREE.DoubleSide, }); } /** * Initializes the positions and properties for each particle. * Assigns random positions, fall speeds, and visibility states to each * particle instance. */ initializeParticles() { const dummy = new THREE.Object3D(); for (let i = 0; i < this.particleCount; i++) { // Assigns random initial position within the defined range. dummy.position.set( Math.random() * this.RANGE * 2 - this.RANGE, Math.random() * this.RANGE * 2, Math.random() * this.RANGE * 2 - this.RANGE ); if (DEBUG_SINGLE) { dummy.position.set(0, 1.2, -1); } // Updates the instance matrix with the dummy object's position. dummy.updateMatrix(); this.raindropMesh.setMatrixAt(i, dummy.matrix); // Sets random fall speed and initial visibility for each particle. this.velocities[i] = Math.random() * 0.05 + 0.2; // this.velocities[i] = 0.1; this.particleWeights[i] = 0; this.particleVisibility[i] = 1; } } /** * Updates particle positions and visibility on each frame. * Adjusts particle weights, visibility, and repositions particles as they * "fall." */ update(camera, xrDepth) { if (!this.raindropMesh) return; const depthMesh = xrDepth.depthMesh; const dummy = new THREE.Object3D(); const particleWeightsAttribute = this.raindropMesh.geometry.attributes.aWeight; const particleVisibilityAttribute = this.raindropMesh.geometry.attributes.aVisibility; // const nextDummy = new THREE.Object3D(); // Compute the camera's rotation excluding Y-axis rotation (yaw) const cameraEuler = new THREE.Euler().setFromQuaternion( camera.quaternion, 'YXZ' ); const cameraEulerNoYaw = new THREE.Euler( cameraEuler.x, // pitch 0, // yaw cameraEuler.z, // roll 'YXZ' ); const cameraRotationMatrix = new THREE.Matrix4().makeRotationFromEuler( cameraEulerNoYaw ); const inverseCameraRotationMatrix = cameraRotationMatrix.clone().invert(); // Update the uniform with the inverse rotation matrix this.raindropMesh.material.uniforms.uCameraRotationMatrix.value.copy( inverseCameraRotationMatrix ); for (let i = 0; i < this.raindropMesh.count; ++i) { // Gets the current transformation matrix of the particle instance. this.raindropMesh.getMatrixAt(i, dummy.matrix); dummy.matrix.decompose(dummy.position, dummy.quaternion, dummy.scale); // Proceeds the raindrop. if (this.particleWeights[i] < 0.5) { dummy.position.y -= this.velocities[i]; } // Computes screen position and depth for visibility checks. const screenPos = dummy.position.clone().project(camera); // Check if the point is within the visible NDC range. // const isWithinFoV = screenPos.x >= -1 && screenPos.x <= 1 && // screenPos.y >= -1 && screenPos.y <= 1 && screenPos.z >= 0 && // screenPos.z <= 1; const isWithinFoV = screenPos.x >= -0.8 && screenPos.x <= 0.6 && screenPos.y >= -1.0 && screenPos.y <= 1.0 && screenPos.z >= 0 && screenPos.z <= 1; let isOccluded = false; let maxVisibility = 1.0; let deltaDepth = 0.0; const isHigh = dummy.position.y > 2.0; if (isWithinFoV) { const depth = xrDepth.getDepth( (screenPos.x + 1) / 2, (screenPos.y + 1) / 2 ); // Transform the point to camera space. const pointInCameraSpace = dummy.position .clone() .applyMatrix4(camera.matrixWorldInverse); // The z-coordinate in camera space is the perpendicular distance to the // camera plane const distanceToCameraPlane = -pointInCameraSpace.z; isOccluded = depth == 0 || depth < distanceToCameraPlane; deltaDepth = Math.abs(distanceToCameraPlane - depth); // console.log( // 'occluded: ' + isOccluded, 'isWithinFoV: ' + isWithinFoV, depth, // distanceToCameraPlane); if ( this.particleWeights[i] == 0 && this.particleVisibility[i] > 0.5 && isOccluded && !isHigh ) { this.particleWeights[i] = 1; if (depth < 0.3) { // maxVisibility *= 0.2 + depth; maxVisibility = 0.0; } else if (depth > 2.0) { maxVisibility *= 0.5 + (4.0 - depth) / 4.0; } // console.log('hit ', dummy.position.y, depth, // distanceToCameraPlane); } } if (isWithinFoV) { // console.log( // depth, distanceToCameraPlane, pointInCameraSpace.z, // dummy.position); this.particleVisibility[i] = isOccluded && !isHigh ? clamp(0.6 - deltaDepth, 0.0, 0.6) : maxVisibility; } else { this.particleVisibility[i] = 0.0; } // Hits the floor. if (dummy.position.y < 0) { dummy.position.y = 0; if (this.particleWeights[i] < 0.5) { this.particleWeights[i] = 1; } this.particleVisibility[i] = isOccluded && !isHigh ? 0.0 : maxVisibility; } if (this.particleWeights[i] > 0) { this.particleWeights[i] += 1; } // Global minimum test. if (depthMesh.minDepth < 0.1) { this.particleVisibility[i] = 0.0; } // this.particleVisibility[i] = 1.0; // if (this.particleVisibility[i] > 0.5) { // console.log( // 'current: w', this.particleWeights[i], 'v', // this.particleVisibility[i], dummy.position.y, // this.velocities[i]); // } if (this.particleWeights[i] > kMaxAnimationFrames * kAnimationSpeed) { // Resets particle position and animation weight upon animation // completion. // console.log('reset', dummy.position.y, this.velocities[i]); this.respawnPrticle(dummy, i, camera, depthMesh); } // if (depth > 1) { // depth *= 1.1; // } // TODO: use a slerp. // Sets visibility based on the distance to the screen and depth. // this.particleVisibility[i] = (this.particleWeights[i] > 0.0) ? 0.6 : // 0.9; // Occlusion // if (distanceToScreen > depth + 0.5) { // this.particleVisibility[i] = 0.0; // } // if (depth < 0.3) { // if (distanceToScreen > 0.5) { // this.particleVisibility[i] = 1.0; // } else { // this.particleVisibility[i] = 0.4; // this.particleWeights[i] += 1; // } // } // if (distanceToScreen < 0.5) { // this.particleVisibility[i] = 0.0; // } // this.particleVisibility[i] = 1.0; // if (this.particleWeights[i] == 0) { // dummy.scale.set(1, 1, 1); // } else { // dummy.scale.set(0.5, 0.5, 0.5); // } // console.log( // 'current: w', this.particleWeights[i], 'v', // this.particleVisibility[i], 'y', dummy.position, // this.velocities[i]); // Updates weight attribute for the shader. particleVisibilityAttribute.setX(i, this.particleVisibility[i]); particleWeightsAttribute.setX(i, this.particleWeights[i]); dummy.updateMatrix(); this.raindropMesh.setMatrixAt(i, dummy.matrix); } // Marks mesh attributes for update and updates camera position uniform. this.raindropMesh.instanceMatrix.needsUpdate = true; particleWeightsAttribute.needsUpdate = true; particleVisibilityAttribute.needsUpdate = true; this.raindropMesh.material.uniforms.uCameraPosition.value.copy( camera.position ); } /** * Resets a particle's position and animation weight upon reaching the ground. */ respawnPrticle(dummy, index, camera, depthMesh) { let u = Math.random(); let v = Math.random(); const half = Math.random(); // const vertex = depthMesh.getPosition(u, v); let vertex; let inited = false; let threshold = 0.05; if (depthMesh.minDepth < 0.16) { threshold = depthMesh.minDepth * 0.01; } threshold = 0.1; if (Math.random() < threshold) { u = u * 0.8 + 0.1; v = v * 0.8 + 0.1; this.raycaster.setFromCamera( {x: u * 2.0 - 1.0, y: v * 2.0 - 1.0}, camera ); const intersections = this.raycaster.intersectObject(depthMesh); if (intersections.length > 0) { vertex = intersections[0].point; inited = true; } } if (!inited) { const theta = u * 2 * Math.PI; let radius = Math.sqrt(v) * this.RANGE + 0.2; if (half < 0.5) { radius = Math.sqrt(v) * 0.7 + 0.3; } else if (half < 0.7) { radius = Math.sqrt(v) * 1.5 + 0.3; } vertex = { x: radius * Math.cos(theta), z: radius * Math.sin(theta), y: 4.0, }; } else { // if (half < 0.5) { // vertex = vertex.clone().lerp(camera.position, 0.5); // dummy.position.set(vertex.x, 4.0, vertex.z); // inited = false; // } } vertex = DEBUG_SINGLE ? new THREE.Vector3(-1, 4, -1) : vertex; dummy.position.set(vertex.x, vertex.y, vertex.z); dummy.rotation.set(0, 0, 0); this.particleWeights[index] = inited ? 1.0 : 0.0; // // Generate a random angle. // const theta = Math.random() * 2 * Math.PI; // // Generate a random radius with uniform distribution. // const radius = Math.sqrt(Math.random()) * this.RANGE; // // Calculate x and z positions. // const x = radius * Math.cos(theta); // const z = radius * Math.sin(theta); // const y = 4.0; // // Set the new position and reset other properties // dummy.position.set(x, y, z); // dummy.rotation.set(0, 0, 0); // this.particleWeights[index] = 0; } } (example: ../xrblocks/demos/rain/RainScene.js) import * as xb from 'xrblocks'; import * as THREE from 'three'; import {VolumetricCloud} from 'xrblocks/addons/volumes/VolumetricCloud.js'; import {RainParticles} from './RainParticles.js'; const ASSETS_PATH = 'https://cdn.jsdelivr.net/gh/xrblocks/assets@main/'; export class RainScene extends xb.Script { rainParticles = new RainParticles(); cloud = new VolumetricCloud(); listener = null; rainSound = null; init() { this.add(this.rainParticles); this.rainParticles.init(); this.add(this.cloud); this.listener = new THREE.AudioListener(); xb.core.camera.add(this.listener); this.rainSound = new THREE.Audio(this.listener); const audioLoader = new THREE.AudioLoader(); audioLoader.load(ASSETS_PATH + 'demos/rain/rain.opus', (buffer) => { this.rainSound.setBuffer(buffer); this.rainSound.setLoop(true); // Loop the sound for continuous rain this.rainSound.setVolume(0.5); // Set a comfortable volume this.rainSound.play(); // Start playback console.log('Rain audio loaded and playing.'); }); const startButton = document.getElementById('startButton'); if (startButton) { startButton.addEventListener('click', () => { this.startAudio(); startButton.remove(); // Remove the button after use }); } } startAudio() { if (this.listener.context.state === 'suspended') { this.listener.context.resume(); } if (this.rainSound.buffer) { this.rainSound.play(); console.log('Rain audio started by user gesture.'); } } update() { const leftCamera = xb.getXrCameraLeft() || xb.core.camera; this.rainParticles.update(leftCamera, xb.core.depth); this.cloud.update(xb.core.camera, xb.core.depth); } } (example: ../xrblocks/demos/rain/index.html) Rain

Click to Start Rain Experience

(example: ../xrblocks/demos/rain/main.js) import 'xrblocks/addons/simulator/SimulatorAddons.js'; import * as xb from 'xrblocks'; import {RainScene} from './RainScene.js'; const depthMeshColliderUpdateFps = xb.getUrlParamFloat( 'depthMeshColliderUpdateFps', 30 ); const options = new xb.Options(); options.reticles.enabled = false; options.depth = new xb.DepthOptions(xb.xrDepthMeshPhysicsOptions); options.depth.depthMesh.colliderUpdateFps = depthMeshColliderUpdateFps; options.xrButton.startText = ' LET IT RAIN'; options.xrButton.endText = ' MISSION COMPLETE'; // Initializes the scene, camera, xrRenderer, controls, and XR button. async function start() { const rainScene = new RainScene(); xb.add(rainScene); await xb.init(options); } document.addEventListener('DOMContentLoaded', function () { start(); }); Demo: screenwiper (example: ../xrblocks/demos/screenwiper/ScreenWiper.js) import * as THREE from 'three'; import {ShaderPass} from 'three/addons/postprocessing/ShaderPass.js'; import {AlphaShader} from './alphashader.js'; import {ClearShader} from './clearshader.js'; import {ScreenWiperShader} from './screenwipershader.js'; const raycaster = new THREE.Raycaster(); const clearpass = new ShaderPass(ClearShader); clearpass.renderToScreen = false; export class ScreenWiper extends THREE.Mesh { activeControllers = []; constructor() { const NATIVE_RESOLUTION = 1024; const RESOLUTION_MULTIPLIER = 4; const RESOLUTION = NATIVE_RESOLUTION * RESOLUTION_MULTIPLIER; const renderTargetA = new THREE.WebGLRenderTarget(RESOLUTION, RESOLUTION); const renderTargetB = new THREE.WebGLRenderTarget(RESOLUTION, RESOLUTION); const geometry = new THREE.SphereGeometry(15, 32, 16); const material = new THREE.ShaderMaterial({ name: 'ScreenWiperShader', defines: Object.assign({}, ScreenWiperShader.defines), uniforms: THREE.UniformsUtils.clone(ScreenWiperShader.uniforms), vertexShader: ScreenWiperShader.vertexShader, fragmentShader: ScreenWiperShader.fragmentShader, transparent: true, side: THREE.DoubleSide, }); super(geometry, material); this.renderTargetA = renderTargetA; this.renderTargetB = renderTargetB; this.shaderpass = new ShaderPass(AlphaShader); this.controllerActiveUniforms = [ this.shaderpass.material.uniforms.uLeftWiperActive, this.shaderpass.material.uniforms.uRightWiperActive, ]; this.controllerCartesianCoordinateUniforms = [ this.shaderpass.material.uniforms.uLeftHandCartesianCoordinate, this.shaderpass.material.uniforms.uRightHandCartesianCoordinate, ]; this.worldPosition = new THREE.Vector3(); this.starttime = Date.now(); } clear(renderer) { // Remember renderer state. const xrEnabled = renderer.xr.enabled; const xrTarget = renderer.getRenderTarget(); // Render to offscreen buffer. renderer.xr.enabled = false; clearpass.render(renderer, this.renderTargetA, null); clearpass.render(renderer, this.renderTargetB, null); this.material.uniforms.uMask.value = this.renderTargetB.texture; // Reset renderer state renderer.xr.enabled = xrEnabled; renderer.setRenderTarget(xrTarget); } update(renderer) { const camera = renderer.xr.getCamera().cameras[0]; if (camera != null) { // Make this headtracked this.position.copy(camera.position); } this.getWorldPosition(this.worldPosition); for (let i = 0; i < 2; i++) { if (i < this.activeControllers.length) { this.controllerActiveUniforms[i].value = true; const controller = this.activeControllers[i]; raycaster.setFromXRController(controller); const intersects = raycaster.intersectObject(this); if (intersects.length > 0) { this.controllerCartesianCoordinateUniforms[i].value .copy(intersects[0].point) .sub(this.worldPosition) .normalize(); } } else { this.controllerActiveUniforms[i].value = false; } } const elapsedTime = (Date.now() - this.starttime) / 1000; if (this.material.uniforms.uTime) { this.material.uniforms.uTime.value.set( elapsedTime / 20, elapsedTime, elapsedTime * 2, elapsedTime * 3 ); } // Remember renderer state. const xrEnabled = renderer.xr.enabled; const xrTarget = renderer.getRenderTarget(); // Render to offscreen buffer. renderer.xr.enabled = false; [this.renderTargetA, this.renderTargetB] = [ this.renderTargetB, this.renderTargetA, ]; this.shaderpass.renderToScreen = false; this.shaderpass.render(renderer, this.renderTargetB, this.renderTargetA); this.material.uniforms.uMask.value = this.renderTargetB.texture; // Reset renderer state renderer.xr.enabled = xrEnabled; renderer.setRenderTarget(xrTarget); } addActiveController(controller) { this.activeControllers.push(controller); } removeActiveController(controller) { const index = this.activeControllers.indexOf(controller); if (index != -1) { this.activeControllers.splice(index, 1); } } dispose() { this.renderTargetA.dispose(); this.renderTargetB.dispose(); this.shaderpass.dispose(); this.material.dispose(); } startTransition(passthrough) { this.shaderpass.material.uniforms.uReturnSpeed.value = passthrough ? -0.005 : 0.005; } } (example: ../xrblocks/demos/screenwiper/ScreenWiperScene.js) import * as THREE from 'three'; import * as xb from 'xrblocks'; import {ScreenWiper} from './ScreenWiper.js'; export class ScreenWiperScene extends xb.Script { screenWiper = new ScreenWiper({ width: 1.0, height: 1.0, color: new THREE.Color(0x000000), speed: 0.5, direction: 'right', start: 0.0, end: 1.0, }); constructor(opts) { super(opts); this.add(this.screenWiper); } init() { this.screenWiper.clear(xb.core.renderer); } onSelectStart(event) { this.screenWiper.addActiveController(event.target); } onSelectEnd(event) { this.screenWiper.removeActiveController(event.target); } update() { this.screenWiper.update(xb.core.renderer); } } (example: ../xrblocks/demos/screenwiper/alphashader.js) import * as THREE from 'three'; export const AlphaShader = { name: 'AlphaShader', defines: { DEG_TO_RAD: 3.14159265359 / 180.0, }, uniforms: { tDiffuse: {value: null}, uWiperDegrees: {value: 10.0}, uLeftWiperActive: {value: false}, uLeftHandCartesianCoordinate: {value: new THREE.Vector3(0.0, -1.0, 0.0)}, uRightWiperActive: {value: false}, uRightHandCartesianCoordinate: {value: new THREE.Vector3(0.0, -1.0, 0.0)}, uReturnSpeed: {value: 0.005}, }, vertexShader: /* glsl */ ` varying vec2 vUv; void main() { vUv = uv; gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 ); }`, fragmentShader: /* glsl */ ` #include varying vec2 vUv; uniform sampler2D tDiffuse; uniform float uWiperDegrees; uniform bool uLeftWiperActive; uniform vec3 uLeftHandCartesianCoordinate; uniform bool uRightWiperActive; uniform vec3 uRightHandCartesianCoordinate; uniform float uReturnSpeed; vec3 sphericalToCartesian(vec3 spherical) { float x = spherical.x * cos(spherical.y) * sin(spherical.z); float y = spherical.x * cos(spherical.z); float z = spherical.x * sin(spherical.y) * sin(spherical.z); return vec3(x, y, z); } float getWiperValue(bool wiperActive, vec3 handCartesianCoordinate) { if (!wiperActive) return 1.0; vec3 cartesianCoordinate = sphericalToCartesian(vec3(1.0, PI - vUv.x * 2.0 * PI, PI - vUv.y * PI)); float cosineSimilarity = dot(handCartesianCoordinate, cartesianCoordinate); float wiperValue = 1.0 - smoothstep(cos(uWiperDegrees * DEG_TO_RAD), 1.0, cosineSimilarity); wiperValue = 0.95 + 0.05 * wiperValue; return wiperValue; } void main() { float prevFrameValue = texture(tDiffuse, vUv).g; float newFrameValue = prevFrameValue + uReturnSpeed * (uLeftWiperActive || uRightWiperActive ? 0.0 : 1.0); newFrameValue *= getWiperValue(uLeftWiperActive, uLeftHandCartesianCoordinate); newFrameValue *= getWiperValue(uRightWiperActive, uRightHandCartesianCoordinate); gl_FragColor = vec4(vec3(newFrameValue), 1.0); }`, }; (example: ../xrblocks/demos/screenwiper/clearshader.js) import * as THREE from 'three'; export const ClearShader = { name: 'ClearShader', defines: { DEG_TO_RAD: 3.14159265359 / 180.0, }, uniforms: { uClearColor: { value: new THREE.Vector4(1.0, 1.0, 1.0, 1.0), }, }, vertexShader: /* glsl */ ` varying vec2 vUv; void main() { vUv = uv; gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 ); }`, fragmentShader: /* glsl */ ` #include varying vec2 vUv; uniform vec4 uClearColor; void main() { gl_FragColor = uClearColor; }`, }; (example: ../xrblocks/demos/screenwiper/index.html) Screen Wiper (example: ../xrblocks/demos/screenwiper/main.js) import 'xrblocks/addons/simulator/SimulatorAddons.js'; import * as xb from 'xrblocks'; import {ScreenWiperScene} from './ScreenWiperScene.js'; const options = new xb.Options(); options.antialias = true; options.reticles.enabled = true; options.visualizeRays = true; function start() { xb.add(new ScreenWiperScene()); xb.init(options); } document.addEventListener('DOMContentLoaded', function () { start(); }); (example: ../xrblocks/demos/screenwiper/screenwipershader.js) import * as THREE from 'three'; const noise_texture = new THREE.TextureLoader().load( 'screenwiper_assets/Noise.png' ); noise_texture.wrapS = THREE.RepeatWrapping; noise_texture.wrapT = THREE.RepeatWrapping; const color_map = new THREE.TextureLoader().load( 'screenwiper_assets/ColorMap.png' ); color_map.wrapS = THREE.RepeatWrapping; export const ScreenWiperShader = { name: 'ScreenWiperShader', defines: { DEG_TO_RAD: 3.14159265359 / 180.0, }, uniforms: { uTexture: {value: noise_texture}, uMask: {value: null}, uColorMap: {value: color_map}, uTime: {value: new THREE.Vector4(0.0, 0.0, 0.0, 0.0)}, uMoveSpeed: {value: 0.1}, uColorSpeed: {value: 0.2}, uPulseSpeed: {value: 4.0}, uPulseAmount: {value: 0.025}, uHoleColor: { value: new THREE.Vector4( 49.0 / 255, 103.0 / 255, 154.0 / 255, 64.0 / 255 ), }, }, vertexShader: /* glsl */ ` varying vec2 vUv; void main() { vUv = uv; gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 ); }`, fragmentShader: /* glsl */ ` #include varying vec2 vUv; uniform sampler2D uTexture; uniform sampler2D uMask; uniform sampler2D uColorMap; uniform vec4 uTime; uniform float uColorSpeed; uniform float uMoveSpeed; uniform float uPulseSpeed; uniform float uPulseAmount; uniform vec4 uHoleColor; void main() { // Sample at uv scale 1. vec2 uv1 = vUv; uv1.x += sin(uTime.x * 4.89 * uMoveSpeed); uv1.y += uTime.y * .123 * uMoveSpeed; vec4 tex1 = texture(uTexture, 2.0 * uv1); // Sample at uv scale 2. vec2 uv2 = vUv * 2.0; uv2.x += uTime.y * 0.277 * uMoveSpeed; uv2.y += sin(uTime.x * 6.231 * uMoveSpeed); vec4 tex2 = texture(uTexture, 2.0 * uv2); float totalValue = (tex1.r * 0.75) + (tex2.r * 0.25); vec4 mapColor = texture(uColorMap, vec2(totalValue + uTime.x * uColorSpeed, 0.5)); vec4 col = saturate(mapColor); col.a = 1.0; // Mask. float pulseInside = sin(uTime.y * uPulseSpeed + vUv.x * 150.0) * uPulseAmount; float pulseOutside = cos(uTime.y * uPulseSpeed + vUv.x * 150.0) * uPulseAmount; vec4 mask = texture(uMask, vUv); vec4 tintedCol = mix(col, uHoleColor, 0.5); col = mix(tintedCol, col, step(0.8 + pulseOutside, mask.r)); col.a = mix(0.0, max(col.a, tintedCol.a), step(0.5 + pulseInside, mask.r)); gl_FragColor = col; }`, }; Demo: splash (example: ../xrblocks/demos/splash/PaintSplash.js) import * as THREE from 'three'; import {SimpleDecalGeometry} from 'xrblocks/addons/objects/SimpleDecalGeometry.js'; const ASSETS_PATH = 'https://cdn.jsdelivr.net/gh/xrblocks/assets@main/'; // Duration of fade out in ms. const kFadeoutMs = 2000; const textureLoader = new THREE.TextureLoader(); const decalDiffuse = textureLoader.load( './paintball_assets/decal-diffuse1.webp' ); decalDiffuse.colorSpace = THREE.SRGBColorSpace; // Sets the color space for the decal diffuse texture. const decalNormal = textureLoader.load('./paintball_assets/decal-normal1.webp'); let paintshotAudioBuffer; // Declares a variable to hold the audio buffer for // the paint shot sound. const audioLoader = new THREE.AudioLoader(); audioLoader.load( ASSETS_PATH + 'musicLibrary/PaintOneShot1.opus', function (buffer) { paintshotAudioBuffer = buffer; // Loads the paint shot audio and assigns it to the buffer. } ); /** * PaintSplash class represents a 3D object for the paintball decal, including * the visual decal and optional sound. */ export class PaintSplash extends THREE.Object3D { /** * @param {THREE.AudioListener} listener The audio listener for spatial audio. * @param {THREE.Color} color The color of the paintball. */ constructor(listener, color) { super(); // Adds positional audio to the paintball if a listener is provided. if (listener != null) { this.sound = new THREE.PositionalAudio(listener); } this.color = color; // Sets the paintball color. this.enableSound = true; this.splashList = []; } /** * Projects a splat onto a mesh from an intersection point, applying rotation * and scale. * @param {THREE.Intersection} intersection The intersection data. * @param {number} scale The scale of the splat. */ splatFromIntersection(intersection, scale) { const objectRotation = new THREE.Quaternion(); intersection.object.getWorldQuaternion(objectRotation); // Gets the world quaternion for rotation. // Clones and rotates the intersection normal to align it with the mesh's // orientation. const normal = intersection.normal.clone().applyQuaternion(objectRotation); const originalNormal = new THREE.Vector3(0, 0, 1); // The original normal to face. const angle = originalNormal.angleTo(normal); // Calculates the angle between the normals. // Rotates the original normal by the cross product and normalizes it. originalNormal.cross(normal).normalize(); // Applies a random rotation to the splat around the normal. const randomRotation = new THREE.Quaternion().setFromAxisAngle( normal, Math.random() * Math.PI * 2 ); // Rotates the splat to face the surface normal with a random rotation. const rotateFacingNormal = new THREE.Quaternion() .setFromAxisAngle(originalNormal, angle) .premultiply(randomRotation); // Projects the splat onto the mesh at the given position, orientation, and // scale. this.splatOnMesh( intersection.object, intersection.point, rotateFacingNormal, scale ); } /** * Creates and applies a decal on the mesh at the specified position with the * given orientation and scale. * @param {THREE.Mesh} mesh The mesh where the splat will be applied. * @param {THREE.Vector3} position The world position of the splat. * @param {THREE.Quaternion} orientation The rotation of the splat. * @param {number} scale The scale of the splat. */ splatOnMesh(mesh, position, orientation, scale) { // Creates a material for the decal using the specified color, textures, and // settings. const material = new THREE.MeshPhongMaterial({ color: this.color, specular: 0x555555, map: decalDiffuse, normalMap: decalNormal, normalScale: new THREE.Vector2(1, 1), shininess: 30, transparent: true, depthTest: true, polygonOffset: true, polygonOffsetFactor: 0, alphaTest: 0.5, opacity: 1.0, side: THREE.FrontSide, }); // Creates a scale vector for the decal geometry. const scaleVector3 = new THREE.Vector3(scale, scale, scale); // Generates a custom geometry for the decal using the SimpleDecalGeometry // class. const geometry = new SimpleDecalGeometry( mesh, position, orientation, scaleVector3 ); geometry.computeVertexNormals(); // Computes vertex normals for proper shading. // Creates a mesh for the decal and adds it to the scene. this.decalMesh = new THREE.Mesh(geometry, material); this.decalMesh.createdTime = performance.now(); this.add(this.decalMesh); // Plays the paint shot sound if the audio buffer is loaded and sound is // enabled. if ( this.enableSound && this.sound != null && paintshotAudioBuffer != null ) { this.sound.setBuffer(paintshotAudioBuffer); this.sound.setRefDistance(10); this.sound.play(); } } update() { const currentTime = performance.now(); // Iterate over all children of the Object3D instance this.children.forEach((child) => { // Ensure the child is a THREE.Mesh and has a 'createdTime' property if (child instanceof THREE.Mesh && child.createdTime !== undefined) { const timeElapsed = currentTime - child.createdTime; // Check if it's time to start fading out the mesh (after 2 seconds) if (timeElapsed > kFadeoutMs) { const timeSinceFadeStart = timeElapsed - kFadeoutMs; // Time since the start of fade // If within the fade duration, update opacity if (timeSinceFadeStart <= kFadeoutMs) { const newOpacity = 1.0 - timeSinceFadeStart / kFadeoutMs; child.material.opacity = Math.max(0.0, newOpacity); // Ensure opacity doesn't go below 0 child.material.transparent = true; // Ensure transparency is enabled } else { // If the fade duration has passed, remove the mesh from the scene this.remove(child); } } } }); } /** * Disposes of the paintball's decal mesh and its associated geometry and * material. */ dispose() { if (this.decalMesh) { this.decalMesh.geometry.dispose(); // Disposes of the geometry. this.decalMesh.material.dispose(); // Disposes of the material. } } } (example: ../xrblocks/demos/splash/SplashScript.js) import * as THREE from 'three'; import * as xb from 'xrblocks'; import {palette} from 'xrblocks/addons/utils/Palette.js'; import {BallShooter} from '../ballpit/BallShooter.js'; import {PaintSplash} from './PaintSplash.js'; const kLightX = xb.getUrlParamFloat('lightX', 0); const kLightY = xb.getUrlParamFloat('lightY', 500); const kLightZ = xb.getUrlParamFloat('lightZ', -10); const kBallsPerSecond = xb.getUrlParamFloat('ballsPerSecond', 10); const kVelocityScale = xb.getUrlParamInt('velocityScale', 1.0); const kBallRadius = xb.getUrlParamFloat('ballRadius', 0.04); export class SplashScript extends xb.Script { constructor() { super(); this.ballShooter = new BallShooter({ numBalls: 100, radius: kBallRadius, palette: palette, }); this.add(this.ballShooter); this.lastBallTime = new Map(); this.raycaster = new THREE.Raycaster(); this.paintballs = []; this.listener = new THREE.AudioListener(); this.pointer = new THREE.Vector2(); this.velocity = new THREE.Vector3(); } init() { this.addLights(); xb.core.input.addReticles(); xb.showReticleOnDepthMesh(true); xb.core.camera.add(this.listener); } initPhysics(xrPhysics) { this.physics = xrPhysics; this.ballShooter.setupPhysics({ RAPIER: xrPhysics.RAPIER, blendedWorld: xrPhysics.blendedWorld, colliderActiveEvents: xrPhysics.RAPIER.ActiveEvents.CONTACT_FORCE_EVENTS, continuousCollisionDetection: true, }); } generateDecalAtIntersection(intersection) { const paintball = new PaintSplash( this.listener, palette.getRandomLiteGColor() ); const scaleMultiplier = 0.4; if (xb.core.depth.depthData.length > 0) { xb.core.depth.depthMesh.updateFullResolutionGeometry( xb.core.depth.depthData[0] ); } paintball.splatFromIntersection( intersection /*scale=*/, xb.lerp(scaleMultiplier * 0.3, scaleMultiplier * 0.5, Math.random()) ); this.add(paintball); this.paintballs.push(paintball); } generateDecalFromCollision(position, direction, color) { const paintball = new PaintSplash(this.listener, color); const orientation = new THREE.Quaternion().setFromUnitVectors( new THREE.Vector3(0.0, 0.0, -1.0), direction ); const scaleMultiplier = 0.4; const scale = xb.lerp( scaleMultiplier * 0.3, scaleMultiplier * 0.5, Math.random() ); if (xb.core.depth.cpuDepthData.length > 0) { xb.core.depth.depthMesh.updateFullResolutionGeometry( xb.core.depth.cpuDepthData[0] ); } else if (xb.core.depth.gpuDepthData.length > 0) { xb.core.depth.depthMesh.updateFullResolutionGeometry( xb.core.depth.depthMesh.convertGPUToGPU(xb.core.depth.gpuDepthData[0]) ); } paintball.splatOnMesh( xb.core.depth.depthMesh, position, orientation, scale ); this.add(paintball); this.paintballs.push(paintball); } // Handles controller interactions for spawning and launching spheres. controllerUpdate(controller) { const now = performance.now(); if (!this.lastBallTime.has(controller)) { this.lastBallTime.set(controller, -99); } if ( controller.userData.selected && now - this.lastBallTime.get(controller) >= 1000 / kBallsPerSecond ) { // Place this 10 cm in front of the controller. const newPosition = new THREE.Vector3(0.0, 0.0, -0.1) .applyQuaternion(controller.quaternion) .add(controller.position); this.velocity .set(0, 0, -5.0 * kVelocityScale) .applyQuaternion(controller.quaternion); this.ballShooter.spawnBallAt(newPosition, this.velocity); this.lastBallTime.set(controller, now); } } update() { for (let controller of xb.core.input.controllers) { this.controllerUpdate(controller); } for (let paintball of this.paintballs) { paintball.update(); } } physicsStep() { const contactPoint = new THREE.Vector3(); const forceDirection = new THREE.Vector3(); const ballShooter = this.ballShooter; this.physics.eventQueue.drainContactForceEvents((event) => { const handle1 = event.collider1(); const handle2 = event.collider2(); const depthMeshCollider = xb.core.depth.depthMesh.getColliderFromHandle(handle1) || xb.core.depth.depthMesh.getColliderFromHandle(handle2); const ballIndex = ballShooter.getIndexForColliderHandle(handle1) || ballShooter.getIndexForColliderHandle(handle2); let generatedDecal = false; if ( depthMeshCollider && ballIndex != null && ballShooter.isBallActive(ballIndex) ) { const ball = ballShooter.spheres[ballIndex]; this.physics.blendedWorld.contactPair( depthMeshCollider, ballShooter.colliders[ballIndex], (manifold, flipped) => { if (!generatedDecal && manifold.numSolverContacts() > 0) { contactPoint.copy(manifold.solverContactPoint(0)); forceDirection.copy(event.maxForceDirection()); this.generateDecalFromCollision( contactPoint, forceDirection, ball.material.color ); generatedDecal = true; } } ); ballShooter.removeBall(ballIndex); } }); ballShooter.physicsStep(); } // Adds hemisphere light for ambient lighting and directional light. addLights() { this.add(new THREE.HemisphereLight(0xbbbbbb, 0x888888, 3)); const light = new THREE.DirectionalLight(0xffffff, 2); light.position.set(kLightX, kLightY, kLightZ); light.castShadow = true; light.shadow.mapSize.width = 2048; // Default is usually 1024 light.shadow.mapSize.height = 2048; // Default is usually 1024 this.add(light); } onPointerUp(event) { this.mouseReticle.setPressed(false); } } (example: ../xrblocks/demos/splash/index.html) Splash
(example: ../xrblocks/demos/splash/main.js) import 'xrblocks/addons/simulator/SimulatorAddons.js'; import RAPIER from '@dimforge/rapier3d-simd-compat'; import * as xb from 'xrblocks'; import {SplashScript} from './SplashScript.js'; const depthMeshColliderUpdateFps = xb.getUrlParamFloat( 'depthMeshColliderUpdateFps', 30 ); const splashScript = new SplashScript(); let options = new xb.Options(); options.depth = new xb.DepthOptions(xb.xrDepthMeshPhysicsOptions); options.depth.depthMesh.colliderUpdateFps = depthMeshColliderUpdateFps; options.xrButton = { ...options.xrButton, startText: ' MAKE A MESS', endText: ' MISSION COMPLETE', }; options.physics.RAPIER = RAPIER; options.physics.useEventQueue = true; // Initializes the scene, camera, xrRenderer, controls, and XR button. async function start() { xb.add(splashScript); await xb.init(options); } document.addEventListener('DOMContentLoaded', function () { start(); }); Demo: xremoji (example: ../xrblocks/demos/xremoji/AnimationHandler.js) import * as THREE from 'three'; import * as xb from 'xrblocks'; // // Unique name for objects // let globalIndex = 0; class AnimationItemView { constructor(options, objectView, modelView, isDebug = false) { this.options = {...options}; this.objectView = objectView; this.modelView = modelView; this.isDebug = isDebug; this.isReady = false; this.isPlaying = false; this.animationStartTime = performance.now(); this.enableDebugIfNeeded(); } enableDebugIfNeeded() { if (this.isDebug) { return; } this.uniqueId = globalIndex++; this.name = 'GLTF_MODEL_VIEW_' + this.uniqueId; // // Uses to measure models load time // console.time(this.name); // disable auto-start for animation // name for debug messages this.modelView.name = 'ModelViewer-' + this.uniqueId; // Init ObjectViewer (wrapper for the ModelViewer) // objectView.visible = false; this.objectView.name = 'ObjectView-' + this.uniqueId; } printItemInfo() { if (!this.isDebug) return; this.printInfo(this.modelView); this.printInfo(this.objectView); } printInfo(model) { if (!model) { console.warn('Model is null or undefined.'); return; } console.log(`--- Model Information: ${model.name || 'Unnamed Model'} ---`); // Position console.log( 'Position:', model.position.x.toFixed(2), model.position.y.toFixed(2), model.position.z.toFixed(2) ); // Scale console.log( 'Scale:', model.scale.x.toFixed(5), model.scale.y.toFixed(5), model.scale.z.toFixed(5) ); // Size (Bounding Box) const boundingBox = new THREE.Box3().setFromObject(model); const size = new THREE.Vector3(); boundingBox.getSize(size); console.log( 'Size (Width x Height x Depth):', `${size.x.toFixed(2)} x ${size.y.toFixed(2)} x ${size.z.toFixed(2)}` ); console.log('------------------------------------------'); } onSceneReady(data, scene) { this.isReady = true; // // Note: Pay attention that scale will be applied to position too // if (data.position) { this.modelView.position.copy(data.position); } // // Set 0 position for the scene // const scene_position = data.model.position || {x: 0, y: 0, z: 0}; scene.position.copy(scene_position); // // Debug: print the model load time // if (this.isDebug) console.timeEnd(this.name); } // // update binded options // updateOptions(newOptions) { this.options = {...newOptions}; } } export class AnimationHandler { constructor(data, isDebug = false) { this.data = data; this.isDebug = isDebug; this.animationViews = []; } init(core, panel, options = {}) { this.loadGltfModels(panel, this.data, options); for (let i = 0; i < this.animationViews.length; ++i) { const item = this.animationViews[i]; core.scene.add(item.modelView); } } loadGltfModels(panel, data, options) { // Return the list let result = []; for (let i = 0; i < data.length; i++) { if (data[i].model) { // // Init model viewer // let objectView = new xb.View(); // objectView.visible = false; const model = new xb.ModelViewer({}); model.visible = false; model.startAnimationOnLoad = false; const animationItem = new AnimationItemView( options, objectView, model, this.isDebug ); this.animationViews.push(animationItem); // Load model model.loadGLTFModel({ data: data[i].model, setupRaycastCylinder: false, setupRaycastBox: true, renderer: xb.core.renderer, setupPlatform: false, onSceneLoaded: (scene) => { animationItem.onSceneReady(data[i], scene); }, }); // Make a scene hierarchy objectView.add(model); panel.add(objectView); } } panel.updateLayouts(); } onBeforePlay() { // Empty } onBeforeUpdate() {} play(playbackTime = 1800) { if (this.isPlaying) { return; } this.isPlaying = true; this.animationDuration = playbackTime; // Override this method if needed this.onBeforePlay(); for (let i = 0; i < this.animationViews.length; ++i) { const item = this.animationViews[i]; // Assign core to model item.modelView.playClipAnimationOnce(); // Print position + size item.printItemInfo(); } // Make all objects visible this.setVisibility(true); // // NOTE: Increase timeout to hide animation later // Entire animation ~6 seconds // setTimeout(() => { this.setVisibility(false); setTimeout(() => { this.isPlaying = false; }, 150); }, playbackTime); } setVisibility(visible) { for (let i = 0; i < this.animationViews.length; ++i) { const item = this.animationViews[i]; item.modelView.visible = visible; } } } (example: ../xrblocks/demos/xremoji/BallloonsAnimationHandler.js) import {AnimationHandler} from './AnimationHandler.js'; const defaultOptions = { oscillationAmplitude: 0.2, oscillationFrequencyX: 0.2, // Oscillations per second for X oscillationFrequencyZ: 0.3, // Oscillations per second for Z }; export class BalloonsAnimationHandler extends AnimationHandler { constructor(data, isDebug) { super(data, isDebug); this.animationStartTime = performance.now(); this.animationDuration = 3000; // Default = 3 seconds this.totalVerticalDistance = 0.15; } init(core, panel, options = {}) { super.init(core, panel, {...defaultOptions, ...options}); } onBeforePlay() { this.animationStartTime = performance.now(); for (let i = 0; i < this.data.length; ++i) { let original = this.data[i].position; if (original) { this.animationViews[i].modelView.position.copy({ x: original.x + (Math.random() - 0.5), y: original.y + (Math.random() - 0.5), z: original.z + (Math.random() - 0.5), }); } // update binded data this.animationViews[i].updateOptions({ oscillationAmplitude: defaultOptions.oscillationAmplitude + (Math.random() - 0.5) * 0.2, oscillationFrequencyX: defaultOptions.oscillationFrequencyX + (Math.random() - 0.5) * 0.2, oscillationFrequencyZ: defaultOptions.oscillationFrequencyZ + (Math.random() - 0.5) * 0.2, }); } } onBeforeUpdate() { const currentTime = performance.now(); const elapsedTime = currentTime - this.animationStartTime; const progress = Math.min(1, elapsedTime / this.animationDuration); // Normalized progress (0 to 1) this.animationViews.forEach((viewData) => { const mv = viewData.modelView; const startY = mv.position.y || 0; // Use current Y if animation restarts const opt = viewData.options; // Vertical movement mv.position.y = startY + this.totalVerticalDistance * progress; // Horizontal oscillation const time = currentTime * 0.001; // Convert milliseconds to seconds mv.position.x += Math.sin(time * opt.oscillationFrequencyX * Math.PI * 2) * opt.oscillationAmplitude * progress; mv.position.z += Math.cos(time * opt.oscillationFrequencyZ * Math.PI * 2) * opt.oscillationAmplitude * progress; }); } } (example: ../xrblocks/demos/xremoji/GestureDetectionHandler.js) // TensorflowJS + WebGpu backend import * as tf from '@tensorflow/tfjs'; import {WebGPUBackend} from '@tensorflow/tfjs-backend-webgpu'; // LiteRt-eap import {loadLiteRt, setWebGpuDevice} from '@litertjs/core'; import {runWithTfjsTensors} from '@litertjs/tfjs-interop'; // // Constants for a custom gesture model // const HAND_JOINT_COUNT = 25; const HAND_JOINT_IDX_CONNECTION_MAP = [ [1, 2], [2, 3], [3, 4], // Thumb has 3 bones [5, 6], [6, 7], [7, 8], [8, 9], // Index finger has 4 bones [10, 11], [11, 12], [12, 13], [13, 14], // Middle finger has 4 bones [15, 16], [16, 17], [17, 18], [18, 19], // Ring finger has 4 bones [20, 21], [21, 22], [22, 23], [23, 24], // Little finger has 4 bones ]; const HAND_BONE_IDX_CONNECTION_MAP = [ [0, 1], [1, 2], // Thumb has 2 angles [3, 4], [4, 5], [5, 6], // Index finger has 3 angles [7, 8], [8, 9], [9, 10], // Middle finger has 3 angles [11, 12], [12, 13], [13, 14], // Ring finger has 3 angles [15, 16], [16, 17], [17, 18], // Little finger has 3 angles ]; const UNKNOWN_GESTURE = 7; class LatestTaskQueue { constructor() { this.latestTask = null; this.isProcessing = false; } enqueue(task) { if (typeof task !== 'function') { console.error('Task must be a function.'); return; } this.latestTask = task; if (!this.isProcessing) { this.processLatestTask(); } } processLatestTask() { if (this.latestTask) { this.isProcessing = true; const taskToProcess = this.latestTask; this.latestTask = null; // Clear the reference immediately // Execute the task asynchronously using setTimeout (or queueMicrotask) setTimeout(async () => { try { await taskToProcess(); // If the task is async } catch (error) { console.error('Error processing latest task:', error); } finally { this.isProcessing = false; if (this.latestTask) { this.processLatestTask(); } } }, 0); // Delay of 0ms puts it in the event queue } } getSize() { return this.latestTask === null ? 0 : 1; } isEmpty() { return this.latestTask === null; } } export class GestureDetectionHandler { constructor() { // model this.modelPath = './custom_gestures_model.tflite'; this.modelState = 'None'; // // left and right hand queues this.queue = []; this.queue.push(new LatestTaskQueue()); this.queue.push(new LatestTaskQueue()); setTimeout(() => { this.setBackendAndLoadModel(); }, 1); } // // set TF.js backend and load tflite model // async setBackendAndLoadModel() { this.modelState = 'Loading'; try { await tf.setBackend('webgpu'); await tf.ready(); // Initialize LiteRT.js's WASM files const wasmPath = 'https://unpkg.com/@litertjs/core@0.2.1/wasm/'; const liteRt = await loadLiteRt(wasmPath); // Make LiteRt use the same GPU device as TFJS (for tensor conversion) const backend = tf.backend(); // as WebGPUBackend; setWebGpuDevice(backend.device); // Load model via LiteRt await this.loadModel(liteRt); if (this.model) { // print model details to the log console.log('MODEL DETAILS: ', this.model.getInputDetails()); } this.modelState = 'Ready'; } catch (error) { console.error('Failed to load model or backend:', error); } } async loadModel(liteRt) { try { this.model = await liteRt.loadAndCompile(this.modelPath, { // Currently, only 'webgpu' is supported accelerator: 'webgpu', }); } catch (error) { this.model = null; console.error('Error loading model:', error); } } calculateRelativeHandBoneAngles(jointPositions) { // Reshape jointPositions let jointPositionsReshaped = []; jointPositionsReshaped = jointPositions.reshape([HAND_JOINT_COUNT, 3]); // Calculate bone vectors const boneVectors = []; HAND_JOINT_IDX_CONNECTION_MAP.forEach(([joint1, joint2]) => { const boneVector = jointPositionsReshaped .slice([joint2, 0], [1, 3]) .sub(jointPositionsReshaped.slice([joint1, 0], [1, 3])) .squeeze(); const norm = boneVector.norm(); const normalizedBoneVector = boneVector.div(norm); boneVectors.push(normalizedBoneVector); }); // Calculate relative hand bone angles const relativeHandBoneAngles = []; HAND_BONE_IDX_CONNECTION_MAP.forEach(([bone1, bone2]) => { const angle = boneVectors[bone1].dot(boneVectors[bone2]); relativeHandBoneAngles.push(angle); }); // Stack the angles into a tensor return tf.stack(relativeHandBoneAngles); } async detectGesture(handJoints) { if (!this.model || !handJoints || handJoints.length !== 25 * 3) { console.log('Invalid hand joints or model load error.'); return UNKNOWN_GESTURE; } try { const tensor = this.calculateRelativeHandBoneAngles( tf.tensor1d(handJoints) ); let tensorReshaped = tensor.reshape([ 1, HAND_BONE_IDX_CONNECTION_MAP.length, 1, ]); var result = -1; result = runWithTfjsTensors(this.model, tensorReshaped); let integerLabel = result[0].as1D().arraySync(); if (integerLabel.length == 7) { let x = integerLabel[0]; let idx = 0; for (let t = 0; t < 7; ++t) { if (integerLabel[t] > x) { idx = t; x = integerLabel[t]; } } return idx; } } catch (error) { console.error('Error:', error); } return UNKNOWN_GESTURE; } // // Clone joints and post queue task // postTask(joints, handIndex) { if (Object.keys(joints).length !== 25) { return UNKNOWN_GESTURE; } let handJointPositions = []; for (const i in joints) { handJointPositions.push(joints[i].position.x); handJointPositions.push(joints[i].position.y); handJointPositions.push(joints[i].position.z); } if (handJointPositions.length !== 25 * 3) { return UNKNOWN_GESTURE; } if (handIndex >= 0 && handIndex < this.queue.length) { this.queue[handIndex].enqueue(async () => { let result = await this.detectGesture(handJointPositions); // // Check if thumb is up // if (result === 2 && !this.isThumbUp(handJointPositions, 2, 3, 4)) { result = 0; } if (this.observer && this.observer.onGestureDetected) { this.observer.onGestureDetected(handIndex, result); } }); } } isThumbUp(d1, p1, p2, p3) { return this.isThumbUpSimple(d1, p1, p3); } isThumbUpAdvanced(data, p1, p2, p3) { // Assuming p1 is the base of the thumb, p2 is the knuckle, and p3 is the tip. // Vector from base to knuckle const v1 = { x: data[p2 * 3] - data[p1 * 3], y: data[p2 * 3 + 1] - data[p1 * 3 + 1], z: data[p2 * 3 + 2] - data[p1 * 3 + 2], }; // Vector from knuckle to tip const v2 = { x: data[p3 * 3] - data[p2 * 3], y: data[p3 * 3 + 1] - data[p2 * 3 + 1], z: data[p3 * 3 + 2] - data[p2 * 3 + 2], }; // Calculate the angle between the two vectors using the dot product const dotProduct = v1.x * v2.x + v1.y * v2.y + v1.z * v2.z; // Calculate the magnitudes (lengths) of the vectors const magnitudeV1 = Math.sqrt(v1.x * v1.x + v1.y * v1.y + v1.z * v1.z); const magnitudeV2 = Math.sqrt(v2.x * v2.x + v2.y * v2.y + v2.z * v2.z); // Avoid division by zero if either vector has zero length if (magnitudeV1 === 0 || magnitudeV2 === 0) { return false; // Cannot determine angle if segments have zero length } // Calculate the cosine of the angle const cosAngle = dotProduct / (magnitudeV1 * magnitudeV2); // Get the angle in radians const angleRadians = Math.acos(Math.max(-1, Math.min(1, cosAngle))); // Clamp to handle potential floating-point errors // Convert the angle to degrees const angleDegrees = angleRadians * (180 / Math.PI); // Define a threshold angle for what we consider "thumb up". // This value might need adjustment based on your specific application and data. const thumbUpThreshold = 90; // Example: If the angle is greater than 90 degrees, it's considered "up" // In a typical "thumb up" gesture, the angle between the base-knuckle segment // and the knuckle-tip segment would be relatively straight or even slightly bent backward. // Therefore, we are looking for an angle close to 180 degrees or greater than 90. return angleDegrees > thumbUpThreshold; } isThumbUpSimple(data, p1, p2) { // Assuming p1 is the base of the thumb and p2 is the tip. // Vector from base to tip const vector = { x: data[p2 * 3] - data[p1 * 3], y: data[p2 * 3 + 1] - data[p1 * 3 + 1], z: data[p2 * 3 + 2] - data[p1 * 3 + 2], }; // Calculate the magnitude of the vector const magnitude = Math.sqrt( vector.x * vector.x + vector.y * vector.y + vector.z * vector.z ); // If the magnitude is very small, it's likely not a significant gesture if (magnitude < 0.001) { return false; } // Normalize the vector to get its direction const normalizedVector = { x: vector.x / magnitude, y: vector.y / magnitude, z: vector.z / magnitude, }; // Define the "up" direction vector (positive Y-axis) const upVector = {x: 0, y: 1, z: 0}; // Calculate the dot product between the normalized thumb vector and the up vector const dotProduct = normalizedVector.x * upVector.x + normalizedVector.y * upVector.y + normalizedVector.z * upVector.z; // The dot product of two normalized vectors is equal to the cosine of the angle between them. // An angle of 45 degrees has a cosine of approximately Math.cos(Math.PI / 4) or ~0.707. // We want the angle to be within 45 degrees of the vertical "up" direction. // This means the cosine of the angle should be greater than or equal to cos(45 degrees). const cos45Degrees = Math.cos((45 * Math.PI) / 180); // Approximately 0.707 return dotProduct >= cos45Degrees; } registerObserver(observer) { this.observer = observer; } } (example: ../xrblocks/demos/xremoji/MeetEmoji.js) import * as THREE from 'three'; import * as xb from 'xrblocks'; import {AnimationHandler} from './AnimationHandler.js'; import {BalloonsAnimationHandler} from './BallloonsAnimationHandler.js'; import {GestureDetectionHandler} from './GestureDetectionHandler.js'; const LEFT_HAND_INDEX = 0; const RIGHT_HAND_INDEX = 1; const PROPRIETARY_ASSETS_PATH = 'https://cdn.jsdelivr.net/gh/xrblocks/proprietary-assets@main/'; const BALLOONS_MODELS = [ { model: { scale: {x: 1, y: 1, z: 1}, rotation: {x: 0, y: 0, z: 0}, path: PROPRIETARY_ASSETS_PATH + 'balloons/', model: 'scene.gltf', verticallyAlignObject: true, }, position: {x: 4, y: -1.2, z: -5}, }, { model: { scale: {x: 1, y: 1, z: 1}, rotation: {x: 0, y: 0, z: 0}, path: PROPRIETARY_ASSETS_PATH + 'balloons/', model: 'scene.gltf', verticallyAlignObject: true, }, position: {x: 0, y: -1, z: -5}, }, { model: { scale: {x: 1, y: 1, z: 1}, rotation: {x: 0, y: 0, z: 0}, path: PROPRIETARY_ASSETS_PATH + 'balloons/', model: 'scene.gltf', verticallyAlignObject: true, }, position: {x: -4, y: -1.2, z: -5}, }, ]; const VICTORY_MODELS = [ { model: { scale: {x: 0.05, y: 0.05, z: 0.05}, rotation: {x: 0, y: 0, z: 0}, scene_position: {x: 0, y: 0, z: 0}, path: PROPRIETARY_ASSETS_PATH + 'Confetti/', model: 'scene.gltf', verticallyAlignObject: true, }, position: {x: 34, y: -15, z: 15}, }, { model: { scale: {x: 0.05, y: 0.05, z: 0.05}, rotation: {x: 0, y: 0, z: 0}, scene_position: {x: 0, y: 0, z: 0}, path: PROPRIETARY_ASSETS_PATH + 'Confetti/', model: 'scene.gltf', verticallyAlignObject: true, }, position: {x: -34, y: -15, z: 15}, }, ]; export class MeetEmoji extends xb.Script { constructor() { super(); // Loads data. this.handGesture = [0, 0]; this.playBalloonsOnUpdate = false; this.playConfettiOnUpdate = false; // // Initializes UI. // { // Make a root panel>grid>row>controlPanel>grid const panel = new xb.SpatialPanel({ backgroundColor: '#00000000', useDefaultPosition: false, showEdge: false, }); panel.scale.set(panel.width, panel.height, 1); panel.isRoot = true; this.add(panel); const grid = panel.addGrid(); // Add blank space on top of the ctrlPanel grid.addRow({weight: 0.4}); // Space for orbiter grid.addRow({weight: 0.1}); // control row const controlRow = grid.addRow({weight: 0.5}); const ctrlPanel = controlRow.addPanel({backgroundColor: '#000000bb'}); const ctrlGrid = ctrlPanel.addGrid(); { // middle column const midColumn = ctrlGrid.addCol({weight: 0.9}); // top indentation midColumn.addRow({weight: 0.3}); const gesturesRow = midColumn.addRow({weight: 0.4}); // left indentation gesturesRow.addCol({weight: 0.05}); const textCol = gesturesRow.addCol({weight: 1.0}); textCol.addRow({weight: 1.0}).addText({ text: 'Give the victory or thumbs-up gestures a try!', fontColor: '#ffffff', fontSize: 0.05, }); // right indentation gesturesRow.addCol({weight: 0.01}); // bottom indentation midColumn.addRow({weight: 0.1}); } const orbiter = ctrlGrid.addOrbiter(); orbiter.addExitButton(); panel.updateLayouts(); this.panel = panel; // Animated models this.victoryHandler = new AnimationHandler(VICTORY_MODELS); this.balloonsHandler = new BalloonsAnimationHandler(BALLOONS_MODELS); // Gesture detector this.gestureDetectionHandler = new GestureDetectionHandler(); this.gestureDetectionHandler.registerObserver(this); } this.frameId = 0; } onGestureDetected(handIndex, result) { if (this.handGesture[handIndex] !== result) { if (result === 4) { if (!this.victoryHandler.isPlaying) this.playConfettiOnUpdate = true; } else if (result === 2) { if (!this.balloonsHandler.isPlaying) this.playBalloonsOnUpdate = true; } // Stub for gesture completion handler if ( this.handGesture[handIndex] === 4 || this.handGesture[handIndex] === 2 ) { this.onGestureStopped(handIndex, this.handGesture[handIndex]); } this.handGesture[handIndex] = result; } } onGestureStopped(handIndex, gestureIndex) { // TODO: we could hide animation on gesture stop } /** * Initializes the PaintScript. */ init() { xb.core.renderer.localClippingEnabled = true; this.add(new THREE.HemisphereLight(0x888877, 0x777788, 3)); const light = new THREE.DirectionalLight(0xffffff, 5.0); light.position.set(-0.5, 4, 1.0); this.add(light); this.panel.position.set(0, 1.2, -1.0); this.victoryHandler.init(xb.core, this.panel); this.balloonsHandler.init(xb.core, this.panel); } /** * Moves the painter to the pivot position when select starts. * @param {XRInputSourceEvent} event */ onSelectStart(event) {} /** * Updates the painter's line to the current pivot position during selection. * @param {number} id The controller id. */ onSelecting(id) {} async update() { if (this.playConfettiOnUpdate) { this.victoryHandler.play(3000); this.playConfettiOnUpdate = false; } if (this.playBalloonsOnUpdate) { this.balloonsHandler.play(3000); this.playBalloonsOnUpdate = false; } if (this.balloonsHandler) { this.balloonsHandler.onBeforeUpdate(); } // Run gesture detection 12 times per second for ~60fps // But an avg fps for webxr is 30-60 if (this.frameId % 5 === 0 || false) { const hands = xb.core.user.hands; if (hands != null && hands.hands && hands.hands.length == 2) { this.gestureDetectionHandler.postTask( hands.hands[LEFT_HAND_INDEX].joints, LEFT_HAND_INDEX ); this.gestureDetectionHandler.postTask( hands.hands[RIGHT_HAND_INDEX].joints, RIGHT_HAND_INDEX ); } } this.frameId++; } } (example: ../xrblocks/demos/xremoji/index.html) Meet emoji (example: ../xrblocks/demos/xremoji/main.js) import 'xrblocks/addons/simulator/SimulatorAddons.js'; import * as xb from 'xrblocks'; import {MeetEmoji} from './MeetEmoji.js'; const options = new xb.Options({ antialias: true, reticles: {enabled: true}, visualizeRays: false, hands: {enabled: true, visualization: true}, simulator: {defaultMode: xb.SimulatorMode.POSE}, }); async function start() { xb.add(new MeetEmoji()); options.setAppTitle('XR Emoji'); await xb.init(options); } document.addEventListener('DOMContentLoaded', function () { setTimeout(function () { start(); }, 200); }); Demo: xrpoet (example: ../xrblocks/demos/xrpoet/PoemGenerator.js) import * as xb from 'xrblocks'; export class PoemGenerator extends xb.Script { constructor() { super(); this.panel = null; this.isProcessing = false; this.responseDisplay = null; } init() { this.ai = xb.core.ai; this.deviceCamera = xb.core.deviceCamera; this.createPanel(); } createPanel() { this.panel = new xb.SpatialPanel({ width: 2.0, height: 1.25, backgroundColor: '#1a1a1abb', }); this.panel.position.set(0, 1.6, -2); this.add(this.panel); const grid = this.panel.addGrid(); const responseRow = grid.addRow({weight: 0.9}); this.responseDisplay = new xb.ScrollingTroikaTextView({ text: '', fontSize: 0.03, }); responseRow.add(this.responseDisplay); const buttonRow = grid.addRow({weight: 0.2}); const videoRow = grid.addRow({weight: 0.7}); this.videoView = new xb.VideoView({ width: 1.0, height: 1.0, mode: 'stretch', }); videoRow.add(this.videoView); const buttonPanel = buttonRow.addPanel({ backgroundColor: '#00000000', showEdge: false, }); buttonPanel.addGrid().addIconButton({ text: 'photo_camera', fontSize: 0.6, backgroundColor: '#FFFFFF', defaultOpacity: 0.2, hoverOpacity: 0.8, selectedOpacity: 1.0, }).onTriggered = () => this.captureAndGeneratePoem(); if (this.deviceCamera) { this.videoView.load(this.deviceCamera); } } async captureAndGeneratePoem() { if (this.isProcessing || !this.ai?.isAvailable()) return; this.isProcessing = true; const snapshot = this.deviceCamera.getSnapshot({outputFormat: 'base64'}); if (!snapshot) { throw new Error('Failed to capture video snapshot.'); } const {strippedBase64, mimeType} = xb.parseBase64DataURL(snapshot); const image = {inlineData: {mimeType: mimeType, data: strippedBase64}}; const question = 'Can you write a 12 lined, lighthearted poem about what you see?'; const parts = [image, {text: question}]; try { const response = await this.ai.query({ type: 'multiPart', parts: parts, }); this.responseDisplay.addText(`${response.text}\n\n`); } catch (error) { this.responseDisplay.addText(`Error: ${error.message}\n\n`); } this.isProcessing = false; } } (example: ../xrblocks/demos/xrpoet/index.html) Gemini Poem Generator (example: ../xrblocks/demos/xrpoet/main.js) import 'xrblocks/addons/simulator/SimulatorAddons.js'; import * as xb from 'xrblocks'; import {PoemGenerator} from './PoemGenerator.js'; const options = new xb.Options(); options.enableUI(); options.enableAI(); options.enableCamera(); options.setAppTitle('XR Poet'); function start() { try { xb.init(options); xb.add(new PoemGenerator()); } catch (error) { console.error('Failed to initialize XR app:', error); } } document.addEventListener('DOMContentLoaded', function () { start(); }); /** * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * @file xrblocks.js * @version v0.8.0 * @commitid 56b3711 * @builddate 2026-01-17T01:07:26.366Z * @description XR Blocks SDK, built from source with the above commit ID. * @agent When using with Gemini to create XR apps, use **Gemini Canvas** mode, * and follow rules below: * 1. Include the following importmap for maximum compatibility: "three": "https://cdn.jsdelivr.net/npm/three@0.182.0/build/three.module.js", "three/addons/": "https://cdn.jsdelivr.net/npm/three@0.182.0/examples/jsm/", "troika-three-text": "https://cdn.jsdelivr.net/gh/protectwise/troika@028b81cf308f0f22e5aa8e78196be56ec1997af5/packages/troika-three-text/src/index.js", "troika-three-utils": "https://cdn.jsdelivr.net/gh/protectwise/troika@v0.52.4/packages/troika-three-utils/src/index.js", "troika-worker-utils": "https://cdn.jsdelivr.net/gh/protectwise/troika@v0.52.4/packages/troika-worker-utils/src/index.js", "bidi-js": "https://esm.sh/bidi-js@%5E1.0.2?target=es2022", "webgl-sdf-generator": "https://esm.sh/webgl-sdf-generator@1.1.1/es2022/webgl-sdf-generator.mjs", "lit": "https://cdn.jsdelivr.net/gh/lit/dist@3/core/lit-core.min.js", "lit/": "https://esm.run/lit@3/", 2. If the app focus on standalone objects, spawn it in front of the user in WebXR and rescale to reasonable physical size. Wrap them with xb.ModelViewer and make sure users can drag the platform to move it around in XR. 3. When rendering a large scene, remember users may navigate the scene in a physical world space, also add locomotion methods like pinch to teleport. 4. Do not halluciate mode files --- use either public high-quality assets, or generate from primitive shapes of use vox formats for voxels or lego-styles. */ import * as THREE from 'three'; import { Pass, FullScreenQuad } from 'three/addons/postprocessing/Pass.js'; import { XRControllerModelFactory } from 'three/addons/webxr/XRControllerModelFactory.js'; import { XRHandModelFactory } from 'three/addons/webxr/XRHandModelFactory.js'; import { XREstimatedLight } from 'three/addons/webxr/XREstimatedLight.js'; import { GLTFLoader } from 'three/addons/loaders/GLTFLoader.js'; import { SVGLoader } from 'three/addons/loaders/SVGLoader.js'; import { DRACOLoader } from 'three/addons/loaders/DRACOLoader.js'; import { KTX2Loader } from 'three/addons/loaders/KTX2Loader.js'; import * as BufferGeometryUtils from 'three/addons/utils/BufferGeometryUtils.js'; /** * Builds the context to be sent to the AI for reasoning. */ class Context { constructor(instructions = 'You are a helpful assistant.') { this.instructions = instructions; } get instruction() { return this.instructions; } /** * Constructs a formatted prompt from memory and available tools. * @param memory - The agent's memory. * @param tools - The list of available tools. * @returns A string representing the full context for the AI. */ build(memory, tools) { const history = memory.getShortTerm(); const formattedHistory = history .map((entry) => this.formatEntry(entry)) .join('\n'); const toolDescriptions = tools .map((tool) => `- ${tool.name}: ${tool.description}`) .join('\n'); return `${this.instructions} You have access to the following tools: ${toolDescriptions} Current Conversation history: ${formattedHistory}. You should reply to the user or call a tool as needed.`; } formatEntry(entry) { switch (entry.role) { case 'user': return `User: ${entry.content}`; case 'ai': return `AI: ${entry.content}`; case 'tool': return `Tool Output: ${entry.content}`; } } } /** * Manages the agent's memory, including short-term, long-term, and working * memory. */ class Memory { constructor() { this.shortTermMemory = []; } /** * Adds a new entry to the short-term memory. * @param entry - The memory entry to add. */ addShortTerm(entry) { this.shortTermMemory.push(entry); } /** * Retrieves the short-term memory. * @returns An array of all short-term memory entries. */ getShortTerm() { return [...this.shortTermMemory]; } /** * Clears all memory components. */ clear() { this.shortTermMemory.length = 0; } } /** * An agent that can use an AI to reason and execute tools. */ class Agent { static { this.dependencies = {}; } constructor(ai, tools = [], instruction = '', callbacks) { this.isSessionActive = false; this.ai = ai; this.tools = tools; this.memory = new Memory(); this.contextBuilder = new Context(instruction); this.lifecycleCallbacks = callbacks; } /** * Starts the agent's reasoning loop with an initial prompt. * @param prompt - The initial prompt from the user. * @returns The final text response from the agent. */ async start(prompt) { this.memory.addShortTerm({ role: 'user', content: prompt }); if (!this.ai.isAvailable()) { await this.ai.init({ aiOptions: this.ai.options }); } return this.run(); } /** * The main reasoning and action loop of the agent for non-live mode. * It repeatedly builds context, queries the AI, and executes tools * until a final text response is generated. */ async run() { while (true) { const context = this.contextBuilder.build(this.memory, this.tools); const response = await this.ai.model.query({ type: 'text', text: context }, this.tools); this.memory.addShortTerm({ role: 'ai', content: JSON.stringify(response) }); if (response?.toolCall) { console.log(`Executing tool: ${response.toolCall.name}`); const tool = this.findTool(response.toolCall.name); if (tool) { const result = await tool.execute(response.toolCall.args); this.memory.addShortTerm({ role: 'tool', content: JSON.stringify(result), }); } else { const errorMsg = `Error: Tool "${response.toolCall.name}" not found.`; console.error(errorMsg); this.memory.addShortTerm({ role: 'tool', content: errorMsg }); } } else if (response?.text) { console.log(`Final Response: ${response.text}`); return response.text; } else { const finalResponse = 'The AI did not provide a valid response.'; console.error(finalResponse); return finalResponse; } } } findTool(name) { return this.tools.find((tool) => tool.name === name); } /** * Get the current session state. * @returns Object containing session information */ getSessionState() { return { isActive: this.isSessionActive, toolCount: this.tools.length, memorySize: this.memory.getShortTerm?.()?.length || 0, }; } } /** * A base class for tools that the agent can use. */ class Tool { /** * @param options - The options for the tool. */ constructor(options) { this.name = options.name; this.description = options.description; this.parameters = options.parameters || {}; this.onTriggered = options.onTriggered; this.behavior = options.behavior; } /** * Executes the tool's action with standardized error handling. * @param args - The arguments for the tool. * @returns A promise that resolves with a ToolResult containing success/error information. */ async execute(args) { try { if (this.onTriggered) { const result = await Promise.resolve(this.onTriggered(args)); return { success: true, data: result, metadata: { executedAt: Date.now(), toolName: this.name }, }; } throw new Error('The execute method must be implemented by a subclass or onTriggered must be provided.'); } catch (error) { return { success: false, error: error instanceof Error ? error.message : String(error), metadata: { executedAt: Date.now(), toolName: this.name }, }; } } /** * Returns a JSON representation of the tool. * @returns A valid FunctionDeclaration object. */ toJSON() { const result = { name: this.name }; if (this.description) { result.description = this.description; } if (this.parameters) { result.parameters = this.parameters; } if (this.behavior) { result.behavior = this.behavior; } return result; } } /** * A tool that generates a 360-degree equirectangular skybox image * based on a given prompt using an AI service. */ class GenerateSkyboxTool extends Tool { constructor(ai, scene) { super({ name: 'generateSkybox', description: 'Generate a 360 equirectangular skybox image for the given prompt.', parameters: { type: 'OBJECT', properties: { prompt: { type: 'STRING', description: 'A description of the skybox to generate, e.g. "a sunny beach with palm trees"', }, }, required: ['prompt'], }, }); this.ai = ai; this.scene = scene; } /** * Executes the tool's action. * @param args - The prompt to use to generate the skybox. * @returns A promise that resolves with a ToolResult containing success/error information. */ async execute(args) { try { const image = await this.ai.generate('Generate a 360 equirectangular skybox image for the prompt of:' + args.prompt, 'image', 'Generate a 360 equirectangular skybox image for the prompt'); if (image) { console.log('Applying texture...'); this.scene.background = new THREE.TextureLoader().load(image); this.scene.background.mapping = THREE.EquirectangularReflectionMapping; return { success: true, data: 'Skybox generated successfully.', metadata: { prompt: args.prompt, timestamp: Date.now() }, }; } else { return { success: false, error: 'Failed to generate skybox image', metadata: { prompt: args.prompt, timestamp: Date.now() }, }; } } catch (e) { console.error('error:', e); return { success: false, error: e instanceof Error ? e.message : 'Unknown error while creating skybox', metadata: { prompt: args.prompt, timestamp: Date.now() }, }; } } } /** * Skybox Agent for generating 360-degree equirectangular backgrounds through conversation. * * @example Basic usage * ```typescript * // 1. Enable audio (required for live sessions) * await xb.core.sound.enableAudio(); * * // 2. Create agent * const agent = new xb.SkyboxAgent(xb.core.ai, xb.core.sound, xb.core.scene); * * // 3. Start session * await agent.startLiveSession({ * onopen: () => console.log('Session ready'), * onmessage: (msg) => handleMessage(msg), * onclose: () => console.log('Session closed') * }); * * // 4. Clean up when done * await agent.stopLiveSession(); * xb.core.sound.disableAudio(); * ``` * * @example With lifecycle callbacks * ```typescript * const agent = new xb.SkyboxAgent( * xb.core.ai, * xb.core.sound, * xb.core.scene, * { * onSessionStart: () => updateUI('active'), * onSessionEnd: () => updateUI('inactive'), * onError: (error) => showError(error) * } * ); * ``` * * @remarks * - Audio must be enabled BEFORE starting live session using `xb.core.sound.enableAudio()` * - Users are responsible for managing audio lifecycle * - Always call `stopLiveSession()` before disabling audio * - Session state can be checked using `getSessionState()` and `getLiveSessionState()` */ class SkyboxAgent extends Agent { constructor(ai, sound, scene, callbacks) { super(ai, [new GenerateSkyboxTool(ai, scene)], `You are a friendly and helpful skybox designer. The response should be short. Your only capability is to generate a 360-degree equirectangular skybox image based on a user's description. You will generate a default skybox if the user does not provide any description. You will use the tool 'generateSkybox' with the summarized description as the 'prompt' argument to create the skybox.`, callbacks); this.sound = sound; this.sessionState = { isActive: false, messageCount: 0, toolCallCount: 0, }; } /** * Starts a live AI session for real-time conversation. * * @param callbacks - Optional callbacks for session events. Can also be set using ai.setLiveCallbacks() * @throws If AI model is not initialized or live session is not available * * @remarks * Audio must be enabled separately using `xb.core.sound.enableAudio()` before starting the session. * This gives users control over when microphone permissions are requested. */ async startLiveSession(callbacks) { // Wrap callbacks to track session state const wrappedCallbacks = this.wrapCallbacks(callbacks); if (callbacks) { this.ai.setLiveCallbacks(wrappedCallbacks); } const functionDeclarations = this.tools.map((tool) => tool.toJSON()); const systemInstruction = { parts: [{ text: this.contextBuilder.instruction }], }; await this.ai.startLiveSession({ tools: [{ functionDeclarations }], systemInstruction: systemInstruction, }); this.sessionState.isActive = true; this.sessionState.startTime = Date.now(); this.isSessionActive = true; await this.lifecycleCallbacks?.onSessionStart?.(); } /** * Stops the live AI session. * * @remarks * Audio must be disabled separately using `xb.core.sound.disableAudio()` after stopping the session. */ async stopLiveSession() { await this.ai.stopLiveSession(); this.sessionState.isActive = false; this.sessionState.endTime = Date.now(); this.isSessionActive = false; await this.lifecycleCallbacks?.onSessionEnd?.(); } /** * Wraps user callbacks to track session state and trigger lifecycle events. * @param callbacks - The callbacks to wrap. * @returns The wrapped callbacks. */ wrapCallbacks(callbacks) { return { onopen: () => { callbacks?.onopen?.(); }, onmessage: (message) => { this.sessionState.messageCount++; callbacks?.onmessage?.(message); }, onerror: (error) => { this.sessionState.lastError = error.message; this.lifecycleCallbacks?.onError?.(new Error(error.message)); callbacks?.onerror?.(error); }, onclose: (event) => { this.sessionState.isActive = false; this.sessionState.endTime = Date.now(); this.isSessionActive = false; callbacks?.onclose?.(event); }, }; } /** * Sends tool execution results back to the AI. * * @param response - The tool response containing function results */ async sendToolResponse(response) { if (!this.validateToolResponse(response)) { console.error('Invalid tool response format:', response); return; } // Handle both single response and array of responses const responses = Array.isArray(response.functionResponses) ? response.functionResponses : [response.functionResponses]; this.sessionState.toolCallCount += responses.length; console.log('Sending tool response:', response); this.ai.sendToolResponse(response); } /** * Validates that a tool response has the correct format. * @param response - The tool response to validate. * @returns True if the response is valid, false otherwise. */ validateToolResponse(response) { if (!response.functionResponses) { return false; } // Handle both single response and array of responses const responses = Array.isArray(response.functionResponses) ? response.functionResponses : [response.functionResponses]; return responses.every((fr) => fr.id && fr.name && fr.response !== undefined); } /** * Helper to create a properly formatted tool response from a ToolResult. * * @param id - The function call ID * @param name - The function name * @param result - The ToolResult from tool execution * @returns A properly formatted FunctionResponse */ static createToolResponse(id, name, result) { return { id, name, response: result.success ? { result: result.data } : { error: result.error }, }; } /** * Gets the current live session state. * * @returns Read-only session state information */ getLiveSessionState() { return { ...this.sessionState }; } /** * Gets the duration of the session in milliseconds. * * @returns Duration in ms, or null if session hasn't started */ getSessionDuration() { if (!this.sessionState.startTime) return null; const endTime = this.sessionState.endTime || Date.now(); return endTime - this.sessionState.startTime; } } /** * A tool that gets the current weather for a specific location. */ class GetWeatherTool extends Tool { constructor() { super({ name: 'get_weather', description: 'Gets the current weather for a specific location.', parameters: { type: 'OBJECT', properties: { location: { type: 'STRING', description: 'The city and state, e.g. San Francisco, CA', }, unit: { type: 'STRING', enum: ['celsius', 'fahrenheit'], }, }, required: ['location'], }, }); } /** * Executes the tool's action. * @param args - The arguments for the tool. * @returns A promise that resolves with a ToolResult containing weather information. */ async execute(args) { if (!args.latitude || !args.longitude) { args.latitude = 37.7749; // Default to San Francisco args.longitude = -122.4194; } const url = `https://api.open-meteo.com/v1/forecast?latitude=${args.latitude}&longitude=${args.longitude}¤t=weather_code,temperature_2m&temperature_unit=fahrenheit`; try { const response = await fetch(url); const data = await response.json(); if (response.ok) { return { success: true, data: { temperature: data.current.temperature_2m, weathercode: data.current.weather_code, }, metadata: { latitude: args.latitude, longitude: args.longitude, timestamp: Date.now(), }, }; } else { return { success: false, error: 'Could not retrieve weather for the specified location.', metadata: { latitude: args.latitude, longitude: args.longitude }, }; } } catch (error) { console.error('Error fetching weather:', error); return { success: false, error: error instanceof Error ? error.message : 'There was an error fetching the weather.', metadata: { latitude: args.latitude, longitude: args.longitude }, }; } } } /** * UX manages the user experience (UX) state for an interactive object in * the scene. It tracks interaction states like hover, * selection, and dragging for multiple controllers. */ class UX { /** * @param parent - The script or object that owns this UX instance. */ constructor(parent) { /** * Indicates if the parent object can be dragged. */ this.draggable = false; /** * Indicates if the parent object can be selected. */ this.selectable = false; /** * Indicates if the parent object can be touched. */ this.touchable = false; // --- Interaction States --- /** * An array tracking the selection state for each controller. * `selected[i]` is true if controller `i` is selecting the object. */ this.selected = []; /** * An array tracking the hover state for each controller. * `hovered[i]` is true if controller `i` is hovering over the object. */ this.hovered = []; /** * An array tracking the touch state for each controller. * `touched[i]` is true if controller `i` is touching over the object. */ this.touched = []; /** * An array tracking the drag state for each controller. */ this.activeDragged = []; // --- Intersection Data --- /** * An array storing the 3D position of the last intersection for each * controller. */ this.positions = []; /** * An array storing the distance of the last intersection for each controller. */ this.distances = []; /** * An array storing the UV coordinates of the last intersection for each * controller. */ this.uvs = []; // --- Drag Management State --- /** * The initial position of the object when a drag operation begins. */ this.initialPosition = new THREE.Vector3(); this.parent = parent; } /** * Checks if the object is currently being hovered by any controller. */ isHovered() { return this.hovered.includes(true); } /** * Checks if the object is currently being selected by any controller. */ isSelected() { return this.selected.includes(true); } /** * Checks if the object is currently being dragged by any controller. */ isDragging() { return this.activeDragged.includes(true); } /** * Updates the interaction state for a specific controller based on a new * intersection. This is internally called by the core input system when a * raycast hits the parent object. * @param controller - The controller performing the * interaction. * @param intersection - The raycast intersection data. */ update(controller, intersection) { const id = controller.userData.id; this.initializeVariablesForId(id); if (intersection.object === this.parent || intersection.object === this.parent.mesh) { this.hovered[id] = true; this.selected[id] = controller.userData.selected; if (intersection.uv) { this.uvs[id].copy(intersection.uv); } this.positions[id].copy(intersection.point); this.distances[id] = intersection.distance; if (!this.selected[id]) { this.activeDragged[id] = false; } } } /** * Ensures that the internal arrays for tracking states are large enough to * accommodate a given controller ID. * @param id - The controller ID to ensure exists. */ initializeVariablesForId(id) { while (this.selected.length <= id) { this.selected.push(false); this.hovered.push(false); this.activeDragged.push(false); this.positions.push(new THREE.Vector3()); this.distances.push(1); this.uvs.push(new THREE.Vector2()); } } /** * Resets the hover and selection states for all controllers. This is * typically called at the beginning of each frame. */ reset() { for (const i in this.selected) { this.selected[i] = false; this.hovered[i] = false; } } /** * Gets the IDs of up to two controllers that are currently hovering over the * parent object, always returning a two-element array. This is useful for * shaders or components like Panels that expect a fixed number of interaction * points. * * @returns A fixed-size two-element array. Each element is either a * controller ID (e.g., 0, 1) or null. */ getPrimaryTwoControllerIds() { const activeControllerIds = []; // this.hovered is an array of booleans, indexed by controller ID. if (this.hovered) { for (let i = 0; i < this.hovered.length && activeControllerIds.length < 2; ++i) { if (this.hovered[i]) { activeControllerIds.push(i); } } } // Ensures the returned array always has two elements. const controllerId1 = activeControllerIds[0] ?? null; const controllerId2 = activeControllerIds[1] ?? null; return [controllerId1, controllerId2]; } } /** * The Script class facilities development by providing useful life cycle * functions similar to MonoBehaviors in Unity. * * Each Script object is an independent THREE.Object3D entity within the * scene graph. * * See /docs/manual/Scripts.md for the full documentation. * * It manages user, objects, and interaction between user and objects. * See `/templates/0_basic/` for an example to start with. * * * If the class does not extends View, it can still bind the above three * function, where the engine ignores whether reticle exists. * * # Supported (native WebXR) functions to extend: * * onSelectStart(event) * onSelectEnd(event) * */ function ScriptMixin(base) { return class extends base { constructor() { super(...arguments); this.ux = new UX(this); this.isXRScript = true; } /** * Initializes an instance with XR controllers, grips, hands, raycaster, and * default options. We allow all scripts to quickly access its user (e.g., * user.isSelecting(), user.hands), world (e.g., physical depth mesh, * lighting estimation, and recognized objects), and scene (the root of * three.js's scene graph). If this returns a promise, we will wait for it. */ init(_) { } /** * Runs per frame. */ update(_time, _frame) { } /** * Enables depth-aware interactions with physics. See /demos/ballpit */ initPhysics(_physics) { } physicsStep() { } onXRSessionStarted(_session) { } onXRSessionEnded() { } onSimulatorStarted() { } // Global controller callbacks. // See https://developer.mozilla.org/en-US/docs/Web/API/XRInputSourceEvent /** * Called whenever pinch / mouse click starts, globally. * @param _event - event.target holds its controller */ onSelectStart(_event) { } /** * Called whenever pinch / mouse click discontinues, globally. * @param _event - event.target holds its controller */ onSelectEnd(_event) { } /** * Called whenever pinch / mouse click successfully completes, globally. * @param _event - event.target holds its controller. */ onSelect(_event) { } /** * Called whenever pinch / mouse click is happening, globally. */ onSelecting(_event) { } /** * Called on keyboard keypress. * @param _event - Event containing `.code` to read the keyboard key. */ onKeyDown(_event) { } onKeyUp(_event) { } /** * Called whenever gamepad trigger starts, globally. * @param _event - event.target holds its controller. */ onSqueezeStart(_event) { } /** * Called whenever gamepad trigger stops, globally. * @param _event - event.target holds its controller. */ onSqueezeEnd(_event) { } /** * Called whenever gamepad is being triggered, globally. */ onSqueezing(_event) { } /** * Called whenever gamepad trigger successfully completes, globally. * @param _event - event.target holds its controller. */ onSqueeze(_event) { } // Object-specific controller callbacks. /** * Called when the controller starts selecting this object the script * represents, e.g. View, ModelView. * @param _event - event.target holds its controller. * @returns Whether the event was handled */ onObjectSelectStart(_event) { return false; } /** * Called when the controller stops selecting this object the script * represents, e.g. View, ModelView. * @param _event - event.target holds its controller. * @returns Whether the event was handled */ onObjectSelectEnd(_event) { return false; // Whether the event was handled } /** * Called when the controller starts hovering over this object with reticle. * @param _controller - An XR controller. */ onHoverEnter(_controller) { } /** * Called when the controller hovers over this object with reticle. * @param _controller - An XR controller. */ onHoverExit(_controller) { } /** * Called when the controller hovers over this object with reticle. * @param _controller - An XR controller. */ onHovering(_controller) { } /** * Called when a hand's index finger starts touching this object. */ onObjectTouchStart(_event) { } /** * Called every frame that a hand's index finger is touching this object. */ onObjectTouching(_event) { } /** * Called when a hand's index finger stops touching this object. */ onObjectTouchEnd(_event) { } /** * Called when a hand starts grabbing this object (touching + pinching). */ onObjectGrabStart(_event) { } /** * Called every frame a hand is grabbing this object. */ onObjectGrabbing(_event) { } /** * Called when a hand stops grabbing this object. */ onObjectGrabEnd(_event) { } /** * Called when the script is removed from the scene. Opposite of init. */ dispose() { } }; } /** * Script manages app logic or interaction between user and objects. */ const ScriptMixinObject3D = ScriptMixin(THREE.Object3D); class Script extends ScriptMixinObject3D { } /** * MeshScript can be constructed with geometry and materials, with * `super(geometry, material)`; for direct access to its geometry. * MeshScripts hold a UX object that contains its interaction information such * as which controller is selecting or touching this object, as well as the * exact selected UV / xyz of the reticle, or touched point. */ const ScriptMixinMeshScript = ScriptMixin(THREE.Mesh); class MeshScript extends ScriptMixinMeshScript { /** * {@inheritDoc} */ constructor(geometry, material) { super(geometry, material); } } /** * Clamps a value between a minimum and maximum value. */ function clamp(value, min, max) { return Math.min(Math.max(value, min), max); } /** * Linearly interpolates between two numbers `x` and `y` by a given amount `t`. */ function lerp(x, y, t) { return x + (y - x) * t; } /** * Python-style print function for debugging. */ function print(...args) { console.log('*', ...args); } // Parses URL parameters using the URLSearchParams API. const urlParams = new URLSearchParams(window.location.search); /** * Function to get the value of a URL parameter. * @param name - The name of the URL parameter. * @returns The value of the URL parameter or null if not found. */ function getUrlParameter(name) { const urlParams = new URLSearchParams(window.location.search); return urlParams.get(name); } /** * Retrieves a boolean URL parameter. Returns true for 'true' or '1', false for * 'false' or '0'. If the parameter is not found, returns the specified default * boolean value. * @param name - The name of the URL parameter. * @param defaultBool - The default boolean value if the * parameter is not present. * @returns The boolean value of the URL parameter. */ function getUrlParamBool(name, defaultBool = false) { const inputString = urlParams.get(name)?.toLowerCase(); // Convert the parameter value to a boolean. Returns true for 'true' or '1'. if (inputString === 'true' || inputString === '1') { return true; } // Returns false for 'false' or '0'. if (inputString === 'false' || inputString === '0') { return false; } // If the parameter is not found, returns the default boolean value. return defaultBool; } /** * Retrieves an integer URL parameter. If the parameter is not found or is not a * valid number, returns the specified default integer value. * @param name - The name of the URL parameter. * @param defaultNumber - The default integer value if the * parameter is not present. * @returns The integer value of the URL parameter. */ function getUrlParamInt(name, defaultNumber = 0) { const inputNumber = urlParams.get(name); if (inputNumber) { // Convert the parameter value to an integer. If valid, returns it. const num = parseInt(inputNumber, 10); if (!isNaN(num)) { return num; } } // If the parameter is not found or invalid, returns the default integer // value. return defaultNumber; } /** * Retrieves a float URL parameter. If the parameter is not found or is not a * valid number, returns the specified default float value. * @param name - The name of the URL parameter. * @param defaultNumber - The default float value if the parameter * is not present. * @returns The float value of the URL parameter. */ function getUrlParamFloat(name, defaultNumber = 0) { const inputNumber = urlParams.get(name); if (inputNumber) { // Convert the parameter value to a float. If valid, returns it. const num = parseFloat(inputNumber); if (!isNaN(num)) { return num; } } // If the parameter is not found or invalid, returns the default float value. return defaultNumber; } /** * Parses a color string (hexadecimal with optional alpha) into a THREE.Vector4. * Supports: * - #rgb (shorthand, alpha defaults to 1) * - #rrggbb (alpha defaults to 1) * - #rgba (shorthand) * - #rrggbbaa * * @param colorString - The color string to parse (e.g., '#66ccff', * '#6cf5', '#66ccff55', '#6cf'). * @returns The parsed color as a THREE.Vector4 (r, g, b, a), with components in * the 0-1 range. * @throws If the input is not a string or if the hex string is invalid. */ function getVec4ByColorString(colorString) { if (typeof colorString !== 'string') { throw new Error('colorString must be a string'); } // Remove the '#' if it exists. const hex = colorString.startsWith('#') ? colorString.slice(1) : colorString; const len = hex.length; let alpha = 1.0; // Default alpha to 1 let expandedHex = hex; if (len === 3 || len === 4) { // Expand shorthand: rgb -> rrgbbaa or rgba -> rrggbbaa expandedHex = hex .split('') .map((char) => char + char) .join(''); } if (expandedHex.length === 8) { alpha = parseInt(expandedHex.slice(6, 8), 16) / 255; expandedHex = expandedHex.slice(0, 6); } else if (expandedHex.length === 6) ; else { throw new Error(`Invalid hex color string format: ${colorString}`); } const r = parseInt(expandedHex.slice(0, 2), 16) / 255; const g = parseInt(expandedHex.slice(2, 4), 16) / 255; const b = parseInt(expandedHex.slice(4, 6), 16) / 255; if (isNaN(r) || isNaN(g) || isNaN(b) || isNaN(alpha)) { throw new Error(`Invalid hex values in color string: ${colorString}`); } return new THREE.Vector4(r, g, b, alpha); } function getColorHex(fontColor) { if (typeof fontColor === 'string') { const vec4 = getVec4ByColorString(fontColor); const r = Math.round(vec4.x * 255); const g = Math.round(vec4.y * 255); const b = Math.round(vec4.z * 255); return (r << 16) + (g << 8) + b; } else if (typeof fontColor === 'number') { return fontColor; } else { // Default to white if fontColor is invalid. return 0xffffff; } } /** * Parses a data URL (e.g., "data:image/png;base64,...") into its * stripped base64 string and MIME type. * This function handles common image MIME types. * @param dataURL - The data URL string. * @returns An object containing the stripped base64 string and the extracted * MIME type. */ function parseBase64DataURL(dataURL) { const mimeTypeRegex = /^data:(image\/[a-zA-Z0-9\-+.]+);base64,/; const match = dataURL.match(mimeTypeRegex); if (match) { const mimeType = match[1]; const strippedBase64 = dataURL.substring(match[0].length); return { strippedBase64, mimeType }; } else { return { strippedBase64: dataURL, mimeType: null }; } } class GeminiOptions { constructor() { this.apiKey = ''; this.urlParam = 'geminiKey'; this.keyValid = false; this.enabled = false; this.model = 'gemini-2.0-flash'; this.config = {}; this.live = { enabled: false, model: 'gemini-live-2.5-flash-preview', voiceName: 'Aoede', screenshotInterval: 3000, audioConfig: { sampleRate: 16000, channelCount: 1, echoCancellation: true, noiseSuppression: true, autoGainControl: true, }, }; } } class OpenAIOptions { constructor() { this.apiKey = ''; this.urlParam = 'openaiKey'; this.model = 'gpt-4.1'; this.enabled = false; } } class AIOptions { constructor() { this.enabled = false; this.model = 'gemini'; this.gemini = new GeminiOptions(); this.openai = new OpenAIOptions(); this.globalUrlParams = { key: 'key', // Generic key parameter }; } } class BaseAIModel { constructor() { } } let createPartFromUri; let createUserContent; let GoogleGenAI; let EndSensitivity; let StartSensitivity; let Modality; // --- Attempt Dynamic Import --- async function loadGoogleGenAIModule() { if (GoogleGenAI) { return; } try { const genAIModule = await import('@google/genai'); if (genAIModule && genAIModule.GoogleGenAI) { createPartFromUri = genAIModule.createPartFromUri; createUserContent = genAIModule.createUserContent; GoogleGenAI = genAIModule.GoogleGenAI; EndSensitivity = genAIModule.EndSensitivity; StartSensitivity = genAIModule.StartSensitivity; Modality = genAIModule.Modality; console.log("'@google/genai' module loaded successfully."); } else { throw new Error("'@google/genai' module loaded but is not valid."); } } catch (error) { const errorMessage = `The '@google/genai' module is required for Gemini but failed to load. Error: ${error}`; console.error(errorMessage); throw new Error(errorMessage); } } class Gemini extends BaseAIModel { constructor(options) { super(); this.options = options; this.inited = false; this.isLiveMode = false; this.liveCallbacks = {}; } async init() { await loadGoogleGenAIModule(); } isAvailable() { if (!GoogleGenAI) { return false; } if (!this.inited) { this.ai = new GoogleGenAI({ apiKey: this.options.apiKey }); this.inited = true; } return true; } isLiveAvailable() { return this.isAvailable() && EndSensitivity && StartSensitivity && Modality; } async startLiveSession(params = {}, model = 'gemini-2.5-flash-native-audio-preview-09-2025') { if (!this.isLiveAvailable()) { throw new Error('Live API not available. Make sure @google/genai module is loaded.'); } if (this.liveSession) { return this.liveSession; } const defaultConfig = { responseModalities: [Modality.AUDIO], speechConfig: { voiceConfig: { prebuiltVoiceConfig: { voiceName: 'Aoede' } }, }, outputAudioTranscription: {}, inputAudioTranscription: {}, ...params, }; const callbacks = { onopen: () => { this.isLiveMode = true; console.log('šŸ”“ Live session opened.'); if (this.liveCallbacks?.onopen) { this.liveCallbacks.onopen(); } }, onmessage: (e) => { if (this.liveCallbacks?.onmessage) { this.liveCallbacks.onmessage(e); } }, onerror: (e) => { console.error('āŒ Live session error:', e); if (this.liveCallbacks?.onerror) { this.liveCallbacks.onerror(e); } }, onclose: (event) => { this.isLiveMode = false; this.liveSession = undefined; if (event.reason) { console.warn('šŸ”’ Live session closed:', event); } else { console.warn('šŸ”’ Live session closed without reason.'); } if (this.liveCallbacks?.onclose) { this.liveCallbacks.onclose(event); } }, }; try { const connectParams = { model: model, callbacks: callbacks, config: defaultConfig, }; console.log('Connecting with params:', connectParams); this.liveSession = await this.ai.live.connect(connectParams); return this.liveSession; } catch (error) { console.error('āŒ Failed to start live session:', error); throw error; } } async stopLiveSession() { if (!this.liveSession) { return; } this.liveSession.close(); this.liveSession = undefined; this.isLiveMode = false; } // Set Live session callbacks setLiveCallbacks(callbacks) { this.liveCallbacks = callbacks; } sendToolResponse(response) { if (this.liveSession) { console.debug('Sending tool response to gemini:', response); this.liveSession.sendToolResponse(response); } } sendRealtimeInput(input) { if (!this.liveSession) { return; } try { this.liveSession.sendRealtimeInput(input); } catch (error) { console.error('āŒ Error sending realtime input:', error); throw error; } } getLiveSessionStatus() { return { isActive: this.isLiveMode, hasSession: !!this.liveSession, isAvailable: this.isLiveAvailable(), }; } async query(input, _tools = []) { if (!this.inited) { console.warn('Gemini not inited.'); return null; } const options = this.options; const config = options.config || {}; if (!('type' in input)) { const response = await this.ai.models.generateContent({ model: options.model, contents: input.prompt, config: config, }); return { text: response.text || null }; } const model = this.ai.models; const modelParams = { model: this.options.model, contents: [], config: this.options.config || {}, }; let response = null; switch (input.type) { case 'text': modelParams.contents = input.text; response = await model.generateContent(modelParams); break; case 'base64': if (!input.mimeType) { input.mimeType = 'image/png'; } modelParams.contents = { inlineData: { mimeType: input.mimeType, data: input.base64, }, }; response = await model.generateContent(modelParams); break; case 'uri': modelParams.contents = createUserContent([ createPartFromUri(input.uri, input.mimeType), input.text, ]); response = await model.generateContent(modelParams); break; case 'multiPart': modelParams.contents = [{ role: 'user', parts: input.parts }]; response = await model.generateContent(modelParams); break; } if (!response) { return { text: null }; } const toolCall = response.functionCalls?.[0]; if (toolCall && toolCall.name) { return { toolCall: { name: toolCall.name, args: toolCall.args } }; } return { text: response.text || null }; } async generate(prompt, type = 'image', systemInstruction = 'Generate an image', model = 'gemini-2.5-flash-image') { if (!this.isAvailable()) return; let contents; if (Array.isArray(prompt)) { contents = prompt.map((item) => { if (typeof item === 'string') { if (item.startsWith('data:image/')) { const [header, data] = item.split(','); const mimeType = header.split(';')[0].split(':')[1]; return { inlineData: { mimeType, data } }; } else { return { text: item }; } } // Assumes other items are already valid Part objects return item; }); } else { contents = prompt; } const response = await this.ai.models.generateContent({ model: model, contents: contents, config: { systemInstruction }, }); if (response.candidates && response.candidates.length > 0) { const firstCandidate = response.candidates[0]; for (const part of firstCandidate?.content?.parts || []) { if (type === 'image' && part.inlineData) { return 'data:image/png;base64,' + part.inlineData.data; } } } } } let OpenAIApi = null; async function loadOpenAIModule() { if (OpenAIApi) { return; } try { const openAIModule = await import('openai'); OpenAIApi = openAIModule.default; console.log("'openai' module loaded successfully."); } catch (error) { console.warn("'openai' module not found. Using fallback implementations.", 'Error details:', error); } } class OpenAI extends BaseAIModel { constructor(options) { super(); this.options = options; } async init() { await loadOpenAIModule(); if (this.options.apiKey && OpenAIApi) { this.openai = new OpenAIApi({ apiKey: this.options.apiKey, dangerouslyAllowBrowser: true, }); console.log('OpenAI model initialized'); } else { console.error('OpenAI API key is missing or module failed to load.'); } } isAvailable() { return !!this.openai; } async query(input, _tools) { if (!this.isAvailable()) { throw new Error('OpenAI model is not initialized.'); } try { const completion = await this.openai.chat.completions.create({ messages: [{ role: 'user', content: input.prompt }], model: this.options.model, }); const content = completion.choices[0].message.content; if (content) { return { text: content }; } return null; } catch (error) { console.error('Error querying OpenAI:', error); throw error; } } async generate() { throw new Error('Wrapper not implemented'); } } const SUPPORTED_MODELS = { gemini: Gemini, openai: OpenAI, }; /** * AI Interface to wrap different AI models (primarily Gemini) * Handles both traditional query-based AI interactions and real-time live * sessions * * Features: * - Text and multimodal queries * - Real-time audio/video AI sessions (Gemini Live) * - Advanced API key management with multiple sources * - Session locking to prevent concurrent operations * * The URL param and key.json shortcut is only for demonstration and prototyping * practice and we strongly suggest not using it for production or deployment * purposes. One should set up a proper server to converse with AI servers in * deployment. * * API Key Management Features: * * 1. Multiple Key Sources (Priority Order): * - URL Parameter: ?key=\ * - keys.json file: Local configuration file * - User Prompt: Interactive fallback * 2. keys.json Support: * - Structure: \{"gemini": \{"apiKey": "YOUR_KEY_HERE"\}\} * - Automatically loads if present */ class AI extends Script { constructor() { super(...arguments); this.lock = false; } static { this.dependencies = { aiOptions: AIOptions }; } /** * Load API keys from keys.json file if available * Parsed keys object or null if not found */ async loadKeysFromFile() { if (this.keysCache) return this.keysCache; try { const response = await fetch('./keys.json'); if (response.ok) { this.keysCache = (await response.json()); console.log('šŸ”‘ Loaded keys.json'); return this.keysCache; } } catch { // Silent fail - keys.json is optional } return null; } async init({ aiOptions }) { this.options = aiOptions; if (!aiOptions.enabled) { console.log('AI is disabled in options'); return; } const modelName = aiOptions.model; const ModelClass = SUPPORTED_MODELS[modelName]; if (ModelClass) { const modelOptions = aiOptions[modelName]; if (modelOptions && modelOptions.enabled) { await this.initializeModel(ModelClass, modelOptions); } else { console.log(`${modelName} is disabled in AI options`); } } else { console.error(`Unsupported AI model: ${modelName}`); } } async initializeModel(ModelClass, modelOptions) { const apiKey = await this.resolveApiKey(modelOptions); if (!apiKey || !this.isValidApiKey(apiKey)) { console.error(`No valid API key found for ${this.options.model}`); return; } modelOptions.apiKey = apiKey; this.model = new ModelClass(modelOptions); try { await this.model.init(); console.log(`${this.options.model} initialized`); } catch (error) { console.error(`Failed to initialize ${this.options.model}:`, error); this.model = undefined; } } async resolveApiKey(modelOptions) { const modelName = this.options.model; // 1. Check options if (modelOptions.apiKey) { return modelOptions.apiKey; } // 2. Check URL parameters for 'key' const genericKey = getUrlParameter('key'); if (genericKey) { return genericKey; } // 3. Check URL parameters for model-specific key const modelKey = getUrlParameter(modelOptions.urlParam); if (modelKey) return modelKey; // Temporary fallback to geminiKey64 for teamfood. const geminiKey64 = getUrlParameter('geminiKey64'); if (geminiKey64) { return window.atob(geminiKey64); } // 3. Check keys.json file const keysFromFile = await this.loadKeysFromFile(); if (keysFromFile) { const modelNameWithApiKeySuffix = modelName + `ApiKey`; let keyFromFile = null; if (typeof keysFromFile[modelName] === 'object') { keyFromFile = keysFromFile[modelName]?.apiKey; } else if (typeof keysFromFile[modelNameWithApiKeySuffix] === 'string') { keyFromFile = keysFromFile[modelNameWithApiKeySuffix]; } else if (typeof keysFromFile[modelName] === 'string') { keyFromFile = keysFromFile[modelName]; } if (keyFromFile) { console.log(`šŸ”‘ Using ${modelName} key from keys.json`); return keyFromFile; } } return null; } isValidApiKey(key) { return key && typeof key === 'string' && key.length > 0; } isAvailable() { return this.model && this.model.isAvailable(); } async query(input, tools) { if (!this.isAvailable()) { throw new Error("AI is not available. Check if it's enabled and properly initialized."); } return await this.model.query(input, tools); } async startLiveSession(config = {}, model) { if (!this.model) { throw new Error('AI model is not initialized.'); } if (!('isLiveAvailable' in this.model) || !this.model.isLiveAvailable()) { throw new Error('Live session is not available for the current model.'); } try { const session = await this.model.startLiveSession(config, model); return session; } catch (error) { console.error('āŒ Failed to start Live session:', error); throw error; } } async stopLiveSession() { if (!this.model) return; try { await ('stopLiveSession' in this.model && this.model.stopLiveSession()); } catch (error) { console.error('āŒ Error stopping Live session:', error); } } async setLiveCallbacks(callbacks) { if (this.model && 'setLiveCallbacks' in this.model) { this.model.setLiveCallbacks(callbacks); } } sendToolResponse(response) { if (this.model && 'sendToolResponse' in this.model) { this.model.sendToolResponse(response); } } sendRealtimeInput(input) { if (!this.model || !('sendRealtimeInput' in this.model)) return false; return this.model.sendRealtimeInput(input); } getLiveSessionStatus() { if (!this.model || !('getLiveSessionStatus' in this.model)) { return { isActive: false, hasSession: false, isAvailable: false }; } return this.model.getLiveSessionStatus(); } isLiveAvailable() { return (this.model && 'isLiveAvailable' in this.model && this.model.isLiveAvailable()); } /** * In simulator mode, pop up a 2D UI to request Gemini key; * In XR mode, show a 3D UI to instruct users to get an API key. */ triggerKeyPopup() { } async generate(prompt, type = 'image', systemInstruction = 'Generate an image', model = undefined) { return this.model.generate(prompt, type, systemInstruction, model); } /** * Create a sample keys.json file structure for reference * @returns Sample keys.json structure */ static createSampleKeysStructure() { return { gemini: { apiKey: 'YOUR_GEMINI_API_KEY_HERE' }, openai: { apiKey: 'YOUR_OPENAI_API_KEY_HERE' }, }; } /** * Check if the current model has an API key available from any source * @returns True if API key is available */ async hasApiKey() { if (!this.options) return false; const modelOptions = this.options[this.options.model]; if (!modelOptions) return false; const apiKey = await this.resolveApiKey(modelOptions); return apiKey && this.isValidApiKey(apiKey); } } // --- Hands --- /** * The number of hands tracked in a typical XR session (left and right). */ const NUM_HANDS = 2; /** * The number of joints per hand tracked in a typical XR session. */ const HAND_JOINT_COUNT = 25; /** * The pairs of joints as an adjcent list. */ const HAND_JOINT_IDX_CONNECTION_MAP = [ [1, 2], [2, 3], [3, 4], // Thumb has 3 bones [5, 6], [6, 7], [7, 8], [8, 9], // Index finger has 4 bones [10, 11], [11, 12], [12, 13], [13, 14], // Middle finger has 4 bones [15, 16], [16, 17], [17, 18], [18, 19], // Ring finger has 4 bones [20, 21], [21, 22], [22, 23], [23, 24], // Little finger has 4 bones ]; /** * The pairs of bones' ids per angle as an adjcent list. */ // clang-format off const HAND_BONE_IDX_CONNECTION_MAP = [ [0, 1], [1, 2], // Thumb has 2 angles [3, 4], [4, 5], [5, 6], // Index finger has 3 angles [7, 8], [8, 9], [9, 10], // Middle finger has 3 angles [11, 12], [12, 13], [13, 14], // Ring finger has 3 angles [15, 16], [16, 17], [17, 18], // Little finger has 3 angles ]; // clang-format on // --- UI --- /** * A small depth offset (in meters) applied between layered UI elements to * prevent Z-fighting, which is a visual artifact where surfaces at similar * depths appear to flicker. */ const VIEW_DEPTH_GAP = 0.002; // --- Renderer Layer --- /** * The THREE.js rendering layer used exclusively for objects that should only be * visible to the left eye's camera in stereoscopic rendering. */ const LEFT_VIEW_ONLY_LAYER = 1; /** * The THREE.js rendering layer used exclusively for objects that should only be * visible to the right eye's camera in stereoscopic rendering. */ const RIGHT_VIEW_ONLY_LAYER = 2; /** * The THREE.js rendering layer for virtual objects that should be realistically * occluded by real-world objects when depth sensing is active. */ const OCCLUDABLE_ITEMS_LAYER = 3; /** * Layer used for rendering overlaid UI text. Currently only used for LabelView. */ const UI_OVERLAY_LAYER = 4; // --- Camera --- /** * The default ideal width in pixels for requesting the device camera stream. * Corresponds to a 720p resolution. */ const DEFAULT_DEVICE_CAMERA_WIDTH = 1280; /** * The default ideal height in pixels for requesting the device camera stream. * Corresponds to a 720p resolution. */ const DEFAULT_DEVICE_CAMERA_HEIGHT = 720; const XR_BLOCKS_ASSETS_PATH = 'https://cdn.jsdelivr.net/gh/xrblocks/assets@a500427f2dfc12312df1a75860460244bab3a146/'; /** * Recursively freezes an object and all its nested properties, making them * immutable. This prevents any future changes to the object or its sub-objects. * @param obj - The object to freeze deeply. * @returns The same object that was passed in, now deeply frozen. */ function deepFreeze(obj) { Object.freeze(obj); Object.getOwnPropertyNames(obj).forEach((name) => { // We use `any` here because `T` is a generic and we can't be sure // what properties it has without more complex type manipulation. // The function's signature provides the necessary type safety for // consumers. const prop = obj[name]; if (prop && typeof prop === 'object' && !Object.isFrozen(prop)) { deepFreeze(prop); } }); return obj; } /** * Recursively merges properties from `obj2` into `obj1`. * If a property exists in both objects and is an object itself, it will be * recursively merged. Otherwise, the value from `obj2` will overwrite the * value in `obj1`. * @param obj1 - The target object to merge into. * @param obj2 - The source object to merge from. */ function deepMerge(obj1, obj2) { if (obj2 == null) { return obj1; } const merged = obj1; for (const key in obj2) { // Ensure the key is actually on obj2, not its prototype chain. if (Object.prototype.hasOwnProperty.call(obj2, key)) { const val1 = merged[key]; const val2 = obj2[key]; if (val1 && typeof val1 === 'object' && val2 && typeof val2 === 'object') { // If both values are objects, recurse deepMerge(val1, val2); } else { // Otherwise, overwrite merged[key] = val2; } } } } /** * Default parameters for rgb to depth projection. * For RGB and depth, 4:3 and 1:1, respectively. */ const DEFAULT_RGB_TO_DEPTH_PARAMS = { scale: 1, scaleX: 0.75, scaleY: 0.63, translateU: 0.2, translateV: -0.02, k1: -0.046, k2: 0, k3: 0, p1: 0, p2: 0, xc: 0, yc: 0, }; /** * Configuration options for the device camera. */ class DeviceCameraOptions { constructor(options) { this.enabled = false; /** * Hint for performance optimization on frequent captures. */ this.willCaptureFrequently = false; /** * Parameters for RGB to depth UV mapping given different aspect ratios. */ this.rgbToDepthParams = { ...DEFAULT_RGB_TO_DEPTH_PARAMS }; deepMerge(this, options); } } // Base configuration for all common capture settings. const baseCaptureOptions = { enabled: true, videoConstraints: { width: { ideal: DEFAULT_DEVICE_CAMERA_WIDTH }, height: { ideal: DEFAULT_DEVICE_CAMERA_HEIGHT }, }, }; const xrDeviceCameraEnvironmentOptions = deepFreeze(new DeviceCameraOptions({ ...baseCaptureOptions, videoConstraints: { ...baseCaptureOptions.videoConstraints, facingMode: 'environment', }, })); const xrDeviceCameraUserOptions = deepFreeze(new DeviceCameraOptions({ ...baseCaptureOptions, videoConstraints: { ...baseCaptureOptions.videoConstraints, facingMode: 'user', }, })); const xrDeviceCameraEnvironmentContinuousOptions = deepFreeze(new DeviceCameraOptions({ ...xrDeviceCameraEnvironmentOptions, willCaptureFrequently: true, })); const xrDeviceCameraUserContinuousOptions = deepFreeze(new DeviceCameraOptions({ ...xrDeviceCameraUserOptions, willCaptureFrequently: true, })); const DepthMeshTexturedShader = { vertexShader: /* glsl */ ` varying vec3 vNormal; varying vec3 vViewPosition; varying vec2 vUv; void main() { vUv = uv; vNormal = normal; // Computes the view position. vec4 mvPosition = modelViewMatrix * vec4(position, 1.0); vViewPosition = -mvPosition.xyz; gl_Position = projectionMatrix * mvPosition; } `, fragmentShader: /* glsl */ ` #include uniform vec3 uColor; uniform sampler2D uDepthTexture; uniform sampler2DArray uDepthTextureArray; uniform vec3 uLightDirection; uniform vec2 uResolution; uniform float uRawValueToMeters; varying vec3 vNormal; varying vec3 vViewPosition; varying vec2 vUv; const highp float kMaxDepthInMeters = 8.0; const float kInvalidDepthThreshold = 0.01; uniform float uMinDepth; uniform float uMaxDepth; uniform float uDebug; uniform float uOpacity; uniform bool uUsingFloatDepth; uniform bool uIsTextureArray; float saturate(in float x) { return clamp(x, 0.0, 1.0); } vec3 TurboColormap(in float x) { const vec4 kRedVec4 = vec4(0.55305649, 3.00913185, -5.46192616, -11.11819092); const vec4 kGreenVec4 = vec4(0.16207513, 0.17712472, 15.24091500, -36.50657960); const vec4 kBlueVec4 = vec4(-0.05195877, 5.18000081, -30.94853351, 81.96403246); const vec2 kRedVec2 = vec2(27.81927491, -14.87899417); const vec2 kGreenVec2 = vec2(25.95549545, -5.02738237); const vec2 kBlueVec2 = vec2(-86.53476570, 30.23299484); // Adjusts color space via 6 degree poly interpolation to avoid pure red. vec4 v4 = vec4( 1.0, x, x * x, x * x * x); vec2 v2 = v4.zw * v4.z; return vec3( dot(v4, kRedVec4) + dot(v2, kRedVec2), dot(v4, kGreenVec4) + dot(v2, kGreenVec2), dot(v4, kBlueVec4) + dot(v2, kBlueVec2) ); } // Depth is packed into the luminance and alpha components of its texture. // The texture is in a normalized format, storing raw values that need to be // converted to meters. float DepthGetMeters(in sampler2D depth_texture, in vec2 depth_uv) { if (uUsingFloatDepth) { return texture2D(depth_texture, depth_uv).r * uRawValueToMeters; } vec2 packedDepthAndVisibility = texture2D(depth_texture, depth_uv).rg; return dot(packedDepthAndVisibility, vec2(255.0, 256.0 * 255.0)) * uRawValueToMeters; } float DepthArrayGetMeters(in sampler2DArray depth_texture, in vec2 depth_uv) { return uRawValueToMeters * texture(uDepthTextureArray, vec3 (depth_uv.x, depth_uv.y, 0)).r; } vec3 DepthGetColorVisualization(in float x) { return step(kInvalidDepthThreshold, x) * TurboColormap(x); } void main() { vec3 lightDirection = normalize(uLightDirection); // Compute UV coordinates relative to resolution // vec2 uv = gl_FragCoord.xy / uResolution; vec2 uv = vUv; // Ambient, diffuse, and specular terms vec3 ambient = 0.1 * uColor; float diff = max(dot(vNormal, lightDirection), 0.0); vec3 diffuse = diff * uColor; vec3 viewDir = normalize(vViewPosition); vec3 reflectDir = reflect(-lightDirection, vNormal); float spec = pow(max(dot(viewDir, reflectDir), 0.0), 16.0); vec3 specular = vec3(0.5) * spec; // Adjust specular color/strength // Combine Phong lighting vec3 finalColor = ambient + diffuse + specular; // finalColor = vec3(vNormal); // Output color gl_FragColor = uOpacity * vec4(finalColor, 1.0); if (uDebug > 0.5) { return; } vec2 depth_uv = uv; depth_uv.y = 1.0 - depth_uv.y; float depth = (uIsTextureArray ? DepthArrayGetMeters(uDepthTextureArray, depth_uv) : DepthGetMeters(uDepthTexture, depth_uv)) * 8.0; float normalized_depth = saturate((depth - uMinDepth) / (uMaxDepth - uMinDepth)); gl_FragColor = uOpacity * vec4(TurboColormap(normalized_depth), 1.0); } `, }; class DepthMesh extends MeshScript { static { this.dependencies = { renderer: THREE.WebGLRenderer, }; } static { this.isDepthMesh = true; } constructor(depthOptions, width, height, depthTextures) { const options = depthOptions.depthMesh; const geometry = new THREE.PlaneGeometry(1, 1, 159, 159); let material; let uniforms; if (options.useDepthTexture || options.showDebugTexture) { uniforms = { uDepthTexture: { value: null }, uDepthTextureArray: { value: null }, uIsTextureArray: { value: 0.0 }, uColor: { value: new THREE.Color(0xaaaaaa) }, uResolution: { value: new THREE.Vector2(width, height) }, uRawValueToMeters: { value: 1.0 }, uMinDepth: { value: 0.0 }, uMaxDepth: { value: 8.0 }, uOpacity: { value: options.opacity }, uDebug: { value: options.showDebugTexture ? 1.0 : 0.0 }, uLightDirection: { value: new THREE.Vector3(1.0, 1.0, 1.0).normalize() }, uUsingFloatDepth: { value: depthOptions.useFloat32 }, }; material = new THREE.ShaderMaterial({ uniforms: uniforms, vertexShader: DepthMeshTexturedShader.vertexShader, fragmentShader: DepthMeshTexturedShader.fragmentShader, side: THREE.FrontSide, transparent: true, }); } else { material = new THREE.ShadowMaterial({ opacity: options.shadowOpacity }); material.depthWrite = false; } super(geometry, material); this.depthOptions = depthOptions; this.depthTextures = depthTextures; this.ignoreReticleRaycast = false; this.worldPosition = new THREE.Vector3(); this.worldQuaternion = new THREE.Quaternion(); this.updateVertexNormals = false; this.minDepth = 8; this.maxDepth = 0; this.minDepthPrev = 8; this.maxDepthPrev = 0; this.colliders = []; this.projectionMatrixInverse = new THREE.Matrix4(); this.lastColliderUpdateTime = 0; this.colliderId = 0; this.visible = options.showDebugTexture || options.renderShadow; this.options = options; this.lastColliderUpdateTime = performance.now(); this.updateVertexNormals = options.updateVertexNormals; this.colliderUpdateFps = options.colliderUpdateFps; this.depthTextureMaterialUniforms = uniforms; if (options.renderShadow) { this.receiveShadow = true; this.castShadow = false; } // Create a downsampled geometry for raycasts and physics. if (options.useDownsampledGeometry) { this.downsampledGeometry = new THREE.PlaneGeometry(1, 1, 39, 39); this.downsampledMesh = new THREE.Mesh(this.downsampledGeometry, material); this.downsampledMesh.visible = false; this.add(this.downsampledMesh); } } /** * Initialize the depth mesh. */ init({ renderer }) { this.renderer = renderer; } /** * Updates the depth data and geometry positions based on the provided camera * and depth data. */ updateDepth(depthData, projectionMatrixInverse) { this.projectionMatrixInverse = projectionMatrixInverse; this.minDepth = 8; this.maxDepth = 0; if (this.options.updateFullResolutionGeometry) { this.updateFullResolutionGeometry(depthData); } if (this.downsampledGeometry) { this.updateGeometry(depthData, this.downsampledGeometry); } this.minDepthPrev = this.minDepth; this.maxDepthPrev = this.maxDepth; this.geometry.attributes.position.needsUpdate = true; const depthTextureLeft = this.depthTextures?.get(0); if (depthTextureLeft && this.depthTextureMaterialUniforms) { const isTextureArray = depthTextureLeft instanceof THREE.ExternalTexture; this.depthTextureMaterialUniforms.uIsTextureArray.value = isTextureArray ? 1.0 : 0; if (isTextureArray) this.depthTextureMaterialUniforms.uDepthTextureArray.value = depthTextureLeft; else this.depthTextureMaterialUniforms.uDepthTexture.value = depthTextureLeft; this.depthTextureMaterialUniforms.uMinDepth.value = this.minDepth; this.depthTextureMaterialUniforms.uMaxDepth.value = this.maxDepth; this.depthTextureMaterialUniforms.uRawValueToMeters.value = this .depthTextures.depthData.length ? this.depthTextures.depthData[0].rawValueToMeters : 1.0; } if (this.options.updateVertexNormals) { this.geometry.computeVertexNormals(); } this.updateColliderIfNeeded(); } updateGPUDepth(depthData, projectionMatrixInverse) { this.updateDepth(this.convertGPUToGPU(depthData), projectionMatrixInverse); } convertGPUToGPU(depthData) { if (!this.depthTarget) { this.depthTarget = new THREE.WebGLRenderTarget(depthData.width, depthData.height, { format: THREE.RedFormat, type: THREE.FloatType, internalFormat: 'R32F', minFilter: THREE.NearestFilter, magFilter: THREE.NearestFilter, depthBuffer: false, }); this.depthTexture = new THREE.ExternalTexture(depthData.texture); const textureProperties = this.renderer.properties.get(this.depthTexture); textureProperties.__webglTexture = depthData.texture; this.gpuPixels = new Float32Array(depthData.width * depthData.height); const depthShader = new THREE.ShaderMaterial({ vertexShader: ` varying vec2 vUv; void main() { vUv = uv; vUv.y = 1.0-vUv.y; gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0); } `, fragmentShader: ` precision highp float; precision highp sampler2DArray; uniform sampler2DArray uTexture; uniform float uCameraNear; varying vec2 vUv; void main() { float z = texture(uTexture, vec3(vUv, 0)).r; z = uCameraNear / (1.0 - z); z = clamp(z, 0.0, 20.0); gl_FragColor = vec4(z, 0, 0, 1.0); } `, uniforms: { uTexture: { value: this.depthTexture }, uCameraNear: { value: depthData.depthNear, }, }, blending: THREE.NoBlending, depthTest: false, depthWrite: false, side: THREE.DoubleSide, }); const depthMesh = new THREE.Mesh(new THREE.PlaneGeometry(2, 2), depthShader); this.depthScene = new THREE.Scene(); this.depthScene.add(depthMesh); this.depthCamera = new THREE.OrthographicCamera(-1, 1, 1, -1, 0, 1); } const originalRenderTarget = this.renderer.getRenderTarget(); this.renderer.xr.enabled = false; this.renderer.setRenderTarget(this.depthTarget); this.renderer.render(this.depthScene, this.depthCamera); this.renderer.readRenderTargetPixels(this.depthTarget, 0, 0, depthData.width, depthData.height, this.gpuPixels, 0); this.renderer.xr.enabled = true; this.renderer.setRenderTarget(originalRenderTarget); return { width: depthData.width, height: depthData.height, data: this.gpuPixels.buffer, rawValueToMeters: depthData.rawValueToMeters, }; } /** * Method to manually update the full resolution geometry. * Only needed if options.updateFullResolutionGeometry is false. */ updateFullResolutionGeometry(depthData) { this.updateGeometry(depthData, this.geometry); } /** * Internal method to update the geometry of the depth mesh. */ updateGeometry(depthData, geometry) { const width = depthData.width; const height = depthData.height; const depthArray = this.depthOptions.useFloat32 ? new Float32Array(depthData.data) : new Uint16Array(depthData.data); const vertexPosition = new THREE.Vector3(); for (let i = 0; i < geometry.attributes.position.count; ++i) { const u = geometry.attributes.uv.array[2 * i]; const v = geometry.attributes.uv.array[2 * i + 1]; // Grabs the nearest for now. const depthX = Math.round(clamp(u * width, 0, width - 1)); const depthY = Math.round(clamp((1.0 - v) * height, 0, height - 1)); const rawDepth = depthArray[depthY * width + depthX]; let depth = depthData.rawValueToMeters * rawDepth; // Finds global min/max. if (depth > 0) { if (depth < this.minDepth) { this.minDepth = depth; } else if (depth > this.maxDepth) { this.maxDepth = depth; } } // This is a wrong algorithm to patch holes but working amazingly well. // Per-row maximum may work better but haven't tried here. // A proper local maximum takes another pass. if (depth == 0 && this.options.patchHoles) { depth = this.maxDepthPrev; } if (this.options.patchHolesUpper && v > 0.9) { depth = this.minDepthPrev; } vertexPosition.set(2.0 * (u - 0.5), 2.0 * (v - 0.5), -1); // This relates to camera.near vertexPosition.applyMatrix4(this.projectionMatrixInverse); vertexPosition.multiplyScalar(-depth / vertexPosition.z); geometry.attributes.position.array[3 * i + 0] = vertexPosition.x; geometry.attributes.position.array[3 * i + 1] = vertexPosition.y; geometry.attributes.position.array[3 * i + 2] = vertexPosition.z; } } /** * Optimizes collider updates to run periodically based on the specified FPS. */ updateColliderIfNeeded() { const timeSinceLastUpdate = performance.now() - this.lastColliderUpdateTime; if (this.RAPIER && timeSinceLastUpdate > 1000 / this.colliderUpdateFps) { this.getWorldPosition(this.worldPosition); this.getWorldQuaternion(this.worldQuaternion); this.rigidBody.setTranslation(this.worldPosition, false); this.rigidBody.setRotation(this.worldQuaternion, false); const geometry = this.downsampledGeometry ? this.downsampledGeometry : this.geometry; const vertices = geometry.attributes.position.array; const indices = geometry.getIndex().array; // Changing the density does not fix the issue. const shape = this.RAPIER.ColliderDesc.trimesh(vertices, indices).setDensity(1.0); // const convextHull = this.RAPIER.ColliderDesc.convexHull(vertices); if (this.options.useDualCollider) { this.colliderId = (this.colliderId + 1) % 2; this.blendedWorld.removeCollider(this.colliders[this.colliderId], false); this.colliders[this.colliderId] = this.blendedWorld.createCollider(shape, this.rigidBody); } else { const newCollider = this.blendedWorld.createCollider(shape, this.rigidBody); this.blendedWorld.removeCollider(this.collider, /*wakeUp=*/ false); this.collider = newCollider; } this.lastColliderUpdateTime = performance.now(); } } initRapierPhysics(RAPIER, blendedWorld) { this.getWorldPosition(this.worldPosition); this.getWorldQuaternion(this.worldQuaternion); const desc = RAPIER.RigidBodyDesc.fixed() .setTranslation(this.worldPosition.x, this.worldPosition.y, this.worldPosition.z) .setRotation(this.worldQuaternion); this.rigidBody = blendedWorld.createRigidBody(desc); const vertices = this.geometry.attributes.position.array; const indices = this.geometry.getIndex().array; const shape = RAPIER.ColliderDesc.trimesh(vertices, indices); if (this.options.useDualCollider) { this.colliders = []; this.colliders.push(blendedWorld.createCollider(shape, this.rigidBody), blendedWorld.createCollider(shape, this.rigidBody)); this.colliderId = 0; } else { this.collider = blendedWorld.createCollider(shape, this.rigidBody); } this.RAPIER = RAPIER; this.blendedWorld = blendedWorld; this.lastColliderUpdateTime = performance.now(); } /** * Customizes raycasting to compute normals for intersections. * @param raycaster - The raycaster object. * @param intersects - Array to store intersections. * @returns - True if intersections are found. */ raycast(raycaster, intersects) { const intersections = []; if (this.downsampledMesh) { this.downsampledMesh.raycast(raycaster, intersections); } else { super.raycast(raycaster, intersections); } intersections.forEach((intersect) => { intersect.object = this; }); if (!this.updateVertexNormals) { // Use the face normals instead of attribute normals. intersections.forEach((intersect) => { if (intersect.normal && intersect.face) { intersect.normal.copy(intersect.face.normal); } }); } intersects.push(...intersections); return true; } getColliderFromHandle(handle) { if (this.collider?.handle == handle) { return this.collider; } for (const collider of this.colliders) { if (collider?.handle == handle) { return collider; } } return undefined; } } class DepthMeshOptions { constructor() { this.enabled = false; this.updateVertexNormals = false; this.showDebugTexture = false; this.useDepthTexture = false; this.renderShadow = false; this.shadowOpacity = 0.25; this.patchHoles = false; this.patchHolesUpper = false; // Opacity of the debug material. this.opacity = 1.0; this.useDualCollider = false; // Use downsampled geometry for raycast and collisions this.useDownsampledGeometry = true; // Whether to always update the full resolution geometry. this.updateFullResolutionGeometry = false; this.colliderUpdateFps = 5; } } class DepthOptions { constructor(options) { this.debugging = false; this.enabled = false; this.depthMesh = new DepthMeshOptions(); this.depthTexture = { enabled: false, constantKernel: false, applyGaussianBlur: false, applyKawaseBlur: false, }; // Occlusion pass. this.occlusion = { enabled: false }; this.useFloat32 = true; this.depthTypeRequest = ['raw']; this.matchDepthView = true; deepMerge(this, options); } } const xrDepthMeshOptions = deepFreeze(new DepthOptions({ enabled: true, depthMesh: { enabled: true, updateVertexNormals: false, showDebugTexture: false, useDepthTexture: false, renderShadow: false, shadowOpacity: 0.25, patchHoles: true, // Use downsampled geometry for raycast and collisions useDownsampledGeometry: true, // Whether to always update the full resolution geometry. updateFullResolutionGeometry: false, colliderUpdateFps: 5, }, })); const xrDepthMeshVisualizationOptions = deepFreeze(new DepthOptions({ enabled: true, depthMesh: { enabled: true, updateVertexNormals: true, showDebugTexture: true, useDepthTexture: true, renderShadow: false, shadowOpacity: 0.25, patchHoles: true, opacity: 0.1, // Use downsampled geometry for raycast and collisions useDownsampledGeometry: true, // Whether to always update the full resolution geometry. updateFullResolutionGeometry: true, colliderUpdateFps: 5, }, depthTexture: { enabled: true, constantKernel: true, applyGaussianBlur: true, applyKawaseBlur: true, }, })); const xrDepthMeshPhysicsOptions = deepFreeze(new DepthOptions({ enabled: true, depthMesh: { enabled: true, updateVertexNormals: false, showDebugTexture: false, useDepthTexture: false, renderShadow: true, shadowOpacity: 0.25, patchHoles: true, patchHolesUpper: true, useDualCollider: false, // Use downsampled geometry for raycast and collisions useDownsampledGeometry: true, // Whether to always update the full resolution geometry. updateFullResolutionGeometry: false, colliderUpdateFps: 5, }, })); class DepthTextures { constructor(options) { this.options = options; this.float32Arrays = []; this.uint8Arrays = []; this.dataTextures = []; this.nativeTextures = []; this.depthData = []; } createDataDepthTextures(depthData, viewId) { if (this.dataTextures[viewId]) { this.dataTextures[viewId].dispose(); } if (this.options.useFloat32) { const typedArray = new Float32Array(depthData.width * depthData.height); const format = THREE.RedFormat; const type = THREE.FloatType; this.float32Arrays[viewId] = typedArray; this.dataTextures[viewId] = new THREE.DataTexture(typedArray, depthData.width, depthData.height, format, type); } else { const typedArray = new Uint8Array(depthData.width * depthData.height * 2); const format = THREE.RGFormat; const type = THREE.UnsignedByteType; this.uint8Arrays[viewId] = typedArray; this.dataTextures[viewId] = new THREE.DataTexture(typedArray, depthData.width, depthData.height, format, type); } } updateData(depthData, viewId) { if (this.dataTextures.length < viewId + 1 || this.dataTextures[viewId].image.width !== depthData.width || this.dataTextures[viewId].image.height !== depthData.height) { this.createDataDepthTextures(depthData, viewId); } if (this.options.useFloat32) { this.float32Arrays[viewId].set(new Float32Array(depthData.data)); } else { this.uint8Arrays[viewId].set(new Uint8Array(depthData.data)); } this.dataTextures[viewId].needsUpdate = true; this.depthData[viewId] = depthData; } updateNativeTexture(depthData, renderer, viewId) { if (this.dataTextures.length < viewId + 1) { this.nativeTextures[viewId] = new THREE.ExternalTexture(depthData.texture); } else { this.nativeTextures[viewId].sourceTexture = depthData.texture; } // fixed in newer revision of three const textureProperties = renderer.properties.get(this.nativeTextures[viewId]); textureProperties.__webglTexture = depthData.texture; textureProperties.__version = 1; } get(viewId) { if (this.dataTextures.length > 0) { return this.dataTextures[viewId]; } return this.nativeTextures[viewId]; } } const KawaseBlurShader = { vertexShader: /* glsl */ ` uniform float uBlurSize; uniform vec2 uTexelSize; varying vec2 vTexCoord; varying vec4 uv1; varying vec4 uv2; varying vec4 uv3; varying vec4 uv4; void vertCopy(vec2 uv) {} void vertUpsample(vec2 uv) { vec2 halfPixel = uTexelSize * 0.5; vec2 offset = vec2(uBlurSize); uv1.xy = uv + vec2(-halfPixel.x * 2.0, 0.0) * offset; uv1.zw = uv + vec2(-halfPixel.x, halfPixel.y) * offset; uv2.xy = uv + vec2(0.0, halfPixel.y * 2.0) * offset; uv2.zw = uv + halfPixel * offset; uv3.xy = uv + vec2(halfPixel.x * 2.0, 0.0) * offset; uv3.zw = uv + vec2(halfPixel.x, -halfPixel.y) * offset; uv4.xy = uv + vec2(0.0, -halfPixel.y * 2.0) * offset; uv4.zw = uv - halfPixel * offset; } void vertDownsample(vec2 uv) { vec2 halfPixel = uTexelSize * 0.5; vec2 offset = vec2(uBlurSize); uv1.xy = uv - halfPixel * offset; uv1.zw = uv + halfPixel * offset; uv2.xy = uv - vec2(halfPixel.x, -halfPixel.y) * offset; uv2.zw = uv + vec2(halfPixel.x, -halfPixel.y) * offset; } void main() { vTexCoord = uv; gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0); if (MODE == 0) { vertCopy(uv); } else if (MODE == 1) { vertDownsample(uv); } else { vertUpsample(uv); } } `, fragmentShader: /* glsl */ ` uniform sampler2D tDiffuse; varying vec2 vTexCoord; varying vec4 uv1; varying vec4 uv2; varying vec4 uv3; varying vec4 uv4; vec2 getUV0() { return vTexCoord; } vec4 fragCopy() { return texture2D(tDiffuse, getUV0()); } vec4 fragDownsample() { vec4 sum = texture2D(tDiffuse, getUV0()) * 4.0; sum += texture2D(tDiffuse, uv1.xy); sum += texture2D(tDiffuse, uv1.zw); sum += texture2D(tDiffuse, uv2.xy); sum += texture2D(tDiffuse, uv2.zw); return sum * 0.125; } vec4 fragUpsample() { vec4 sum = texture2D(tDiffuse, uv1.xy); sum += texture2D(tDiffuse, uv1.zw) * 2.0; sum += texture2D(tDiffuse, uv2.xy); sum += texture2D(tDiffuse, uv2.zw) * 2.0; sum += texture2D(tDiffuse, uv3.xy); sum += texture2D(tDiffuse, uv3.zw) * 2.0; sum += texture2D(tDiffuse, uv4.xy); sum += texture2D(tDiffuse, uv4.zw) * 2.0; return sum * 0.0833; } void main(void) { if (MODE == 0) { gl_FragColor = fragCopy(); } else if (MODE == 1) { gl_FragColor = fragDownsample(); } else { gl_FragColor = fragUpsample(); } } `, }; // Postprocessing shader which applies occlusion onto the entire rendered frame. const OcclusionShader = { vertexShader: /* glsl */ ` varying vec2 vTexCoord; void main() { vTexCoord = uv; gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 ); } `, fragmentShader: /* glsl */ ` precision mediump float; uniform sampler2D tDiffuse; uniform sampler2D tOcclusionMap; varying vec2 vTexCoord; void main(void) { vec4 diffuse = texture2D(tDiffuse, vTexCoord); vec4 occlusion = texture2D(tOcclusionMap, vTexCoord); float occlusionValue = occlusion.r / max(0.0001, occlusion.g); occlusionValue = clamp(occlusionValue, 0.0, 1.0); gl_FragColor = occlusionValue * diffuse; gl_FragColor = sRGBTransferOETF( gl_FragColor ); } `, }; // Postprocessing to convert a render texture + depth map into an occlusion map. const OcclusionMapShader = { vertexShader: /* glsl */ ` varying vec2 vTexCoord; void main() { vTexCoord = uv; gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 ); } `, fragmentShader: /* glsl */ ` #include precision mediump float; uniform sampler2D uDepthTexture; uniform mat4 uUvTransform; uniform float uRawValueToMeters; uniform float uAlpha; uniform float uViewId; uniform bool uFloatDepth; uniform sampler2D tDiffuse; uniform sampler2D tDepth; uniform float cameraNear; uniform float cameraFar; varying vec2 vTexCoord; float DepthGetMeters(in sampler2D depth_texture, in vec2 depth_uv) { // Depth is packed into the luminance and alpha components of its texture. // The texture is in a normalized format, storing raw values that need to be // converted to meters. vec2 packedDepthAndVisibility = texture2D(depth_texture, depth_uv).rg; if (uFloatDepth) { return packedDepthAndVisibility.r * uRawValueToMeters; } return dot(packedDepthAndVisibility, vec2(255.0, 256.0 * 255.0)) * uRawValueToMeters; } float readOrthographicDepth( sampler2D depthSampler, vec2 coord ) { float fragCoordZ = texture2D( depthSampler, coord ).x; // See https://github.com/mrdoob/three.js/issues/23072. #ifdef USE_LOGDEPTHBUF float viewZ = 1.0 - exp2(fragCoordZ * log(cameraFar + 1.0) / log(2.0)); #else float viewZ = perspectiveDepthToViewZ(fragCoordZ, cameraNear, cameraFar); #endif return viewZToOrthographicDepth( viewZ, cameraNear, cameraFar ); } void main(void) { vec4 texCoord = vec4(vTexCoord, 0, 1); vec2 uv = texCoord.xy; uv.y = 1.0 - uv.y; vec4 diffuse = texture2D( tDiffuse, texCoord.xy ); highp float real_depth = DepthGetMeters(uDepthTexture, uv); highp float virtual_depth = (readOrthographicDepth(tDepth, texCoord.xy ) * (cameraFar - cameraNear) + cameraNear); gl_FragColor = vec4(step(virtual_depth, real_depth), step(0.001, diffuse.a), 0.0, 0.0); } `, }; class OcclusionMapMeshMaterial extends THREE.MeshBasicMaterial { constructor(camera, useFloatDepth) { super(); this.uniforms = { uDepthTexture: { value: null }, uDepthTextureArray: { value: null }, uViewId: { value: 0.0 }, uIsTextureArray: { value: 0.0 }, uRawValueToMeters: { value: 8.0 / 65536.0 }, cameraFar: { value: camera.far }, cameraNear: { value: camera.near }, uFloatDepth: { value: useFloatDepth }, // Used for interpreting Quest 3 depth. uDepthNear: { value: 0 }, }; this.onBeforeCompile = (shader) => { Object.assign(shader.uniforms, this.uniforms); this.uniforms = shader.uniforms; shader.vertexShader = shader.vertexShader .replace('#include ', [ 'varying vec2 vTexCoord;', 'varying float vVirtualDepth;', '#include ', ].join('\n')) .replace('#include ', [ '#include ', 'vec4 view_position = modelViewMatrix * vec4( position, 1.0 );', 'vVirtualDepth = -view_position.z;', 'gl_Position = gl_Position / gl_Position.w;', 'vTexCoord = 0.5 + 0.5 * gl_Position.xy;', ].join('\n')); shader.fragmentShader = shader.fragmentShader .replace('uniform vec3 diffuse;', [ 'uniform vec3 diffuse;', 'uniform sampler2D uDepthTexture;', 'uniform sampler2DArray uDepthTextureArray;', 'uniform float uRawValueToMeters;', 'uniform float cameraNear;', 'uniform float cameraFar;', 'uniform bool uFloatDepth;', 'uniform bool uIsTextureArray;', 'uniform float uDepthNear;', 'uniform int uViewId;', 'varying vec2 vTexCoord;', 'varying float vVirtualDepth;', ].join('\n')) .replace('#include ', [ '#include ', ` float DepthGetMeters(in sampler2D depth_texture, in vec2 depth_uv) { // Depth is packed into the luminance and alpha components of its texture. // The texture is in a normalized format, storing raw values that need to be // converted to meters. vec2 packedDepthAndVisibility = texture2D(depth_texture, depth_uv).rg; if (uFloatDepth) { return packedDepthAndVisibility.r * uRawValueToMeters; } return dot(packedDepthAndVisibility, vec2(255.0, 256.0 * 255.0)) * uRawValueToMeters; } float DepthArrayGetMeters(in sampler2DArray depth_texture, in vec2 depth_uv) { float textureValue = texture(depth_texture, vec3(depth_uv.x, depth_uv.y, uViewId)).r; return uRawValueToMeters * uDepthNear / (1.0 - textureValue); } `, ].join('\n')) .replace('#include ', [ '#include ', 'vec4 texCoord = vec4(vTexCoord, 0, 1);', 'vec2 uv = vec2(texCoord.x, uIsTextureArray?texCoord.y:(1.0 - texCoord.y));', 'highp float real_depth = uIsTextureArray ? DepthArrayGetMeters(uDepthTextureArray, uv) : DepthGetMeters(uDepthTexture, uv);', 'gl_FragColor = vec4(step(vVirtualDepth, real_depth), 1.0, 0.0, 1.0);', ].join('\n')); }; } } var KawaseBlurMode; (function (KawaseBlurMode) { KawaseBlurMode[KawaseBlurMode["COPY"] = 0] = "COPY"; KawaseBlurMode[KawaseBlurMode["DOWN"] = 1] = "DOWN"; KawaseBlurMode[KawaseBlurMode["UP"] = 2] = "UP"; })(KawaseBlurMode || (KawaseBlurMode = {})); /** * Occlusion postprocessing shader pass. * This is used to generate an occlusion map. * There are two modes: * Mode A: Generate an occlusion map for individual materials to use. * Mode B: Given a rendered frame, run as a postprocessing pass, occluding all * items in the frame. The steps are * 1. Compute an occlusion map between the real and virtual depth. * 2. Blur the occlusion map using Kawase blur. * 3. (Mode B only) Apply the occlusion map to the rendered frame. */ class OcclusionPass extends Pass { constructor(scene, camera, useFloatDepth = true, renderToScreen = false, occludableItemsLayer = OCCLUDABLE_ITEMS_LAYER) { super(); this.scene = scene; this.camera = camera; this.renderToScreen = renderToScreen; this.occludableItemsLayer = occludableItemsLayer; this.depthTextures = []; this.depthNear = []; this.occlusionMeshMaterial = new OcclusionMapMeshMaterial(camera, useFloatDepth); this.occlusionMapUniforms = { uDepthTexture: { value: null }, uDepthTextureArray: { value: null }, uViewId: { value: 0.0 }, uIsTextureArray: { value: 0.0 }, uUvTransform: { value: new THREE.Matrix4() }, uRawValueToMeters: { value: 8.0 / 65536.0 }, uAlpha: { value: 0.75 }, tDiffuse: { value: null }, tDepth: { value: null }, uFloatDepth: { value: useFloatDepth }, cameraFar: { value: camera.far }, cameraNear: { value: camera.near }, }; this.occlusionMapQuad = new FullScreenQuad(new THREE.ShaderMaterial({ name: 'OcclusionMapShader', uniforms: this.occlusionMapUniforms, vertexShader: OcclusionMapShader.vertexShader, fragmentShader: OcclusionMapShader.fragmentShader, })); this.occlusionMapTexture = new THREE.WebGLRenderTarget(); this.kawaseBlurTargets = [ new THREE.WebGLRenderTarget(), // 1/2 resolution new THREE.WebGLRenderTarget(), // 1/4 resolution new THREE.WebGLRenderTarget(), // 1/8 resolution ]; this.kawaseBlurQuads = [ this.setupKawaseBlur(KawaseBlurMode.DOWN, this.occlusionMapTexture.texture), this.setupKawaseBlur(KawaseBlurMode.DOWN, this.kawaseBlurTargets[0].texture), this.setupKawaseBlur(KawaseBlurMode.DOWN, this.kawaseBlurTargets[1].texture), this.setupKawaseBlur(KawaseBlurMode.UP, this.kawaseBlurTargets[2].texture), this.setupKawaseBlur(KawaseBlurMode.UP, this.kawaseBlurTargets[1].texture), this.setupKawaseBlur(KawaseBlurMode.UP, this.kawaseBlurTargets[0].texture), ]; this.occlusionUniforms = { tDiffuse: { value: null }, tOcclusionMap: { value: this.occlusionMapTexture.texture }, }; this.occlusionQuad = new FullScreenQuad(new THREE.ShaderMaterial({ name: 'OcclusionShader', uniforms: this.occlusionUniforms, vertexShader: OcclusionShader.vertexShader, fragmentShader: OcclusionShader.fragmentShader, })); this.occludableItemsLayer = occludableItemsLayer; } setupKawaseBlur(mode, inputTexture) { const uniforms = { uBlurSize: { value: 7.0 }, uTexelSize: { value: new THREE.Vector2() }, tDiffuse: { value: inputTexture }, }; const kawase1Material = new THREE.ShaderMaterial({ name: 'Kawase', uniforms: uniforms, vertexShader: KawaseBlurShader.vertexShader, fragmentShader: KawaseBlurShader.fragmentShader, defines: { MODE: mode }, }); return new FullScreenQuad(kawase1Material); } setDepthTexture(depthTexture, rawValueToMeters, viewId, depthNear) { this.depthTextures[viewId] = depthTexture; this.occlusionMapUniforms.uRawValueToMeters.value = rawValueToMeters; this.occlusionMeshMaterial.uniforms.uRawValueToMeters.value = rawValueToMeters; this.depthNear[viewId] = depthNear; depthTexture.needsUpdate = true; } /** * Render the occlusion map. * @param renderer - The three.js renderer. * @param writeBuffer - The buffer to write the final result. * @param readBuffer - The buffer for the current of virtual depth. * @param viewId - The view to render. */ render(renderer, writeBuffer, readBuffer, viewId = 0) { const originalRenderTarget = renderer.getRenderTarget(); const dimensions = new THREE.Vector2(); if (readBuffer == null) { this.renderOcclusionMapFromScene(renderer, dimensions, viewId); } else { this.renderOcclusionMapFromReadBuffer(renderer, readBuffer, dimensions, viewId); } // Blur the occlusion map this.blurOcclusionMap(renderer, dimensions); // Fuse the rendered image and the occlusion map. this.applyOcclusionMapToRenderedImage(renderer, readBuffer, writeBuffer); renderer.setRenderTarget(originalRenderTarget); } renderOcclusionMapFromScene(renderer, dimensions, viewId) { // Compute our own read buffer. const texture = this.depthTextures[viewId]; const isTextureArray = texture instanceof THREE.ExternalTexture; this.occlusionMeshMaterial.uniforms.uIsTextureArray.value = isTextureArray ? 1.0 : 0; this.occlusionMeshMaterial.uniforms.uViewId.value = viewId; if (isTextureArray) { this.occlusionMeshMaterial.uniforms.uDepthTextureArray.value = texture; this.occlusionMeshMaterial.uniforms.uDepthNear.value = this.depthNear[viewId]; } else { this.occlusionMeshMaterial.uniforms.uDepthTexture.value = texture; } this.scene.overrideMaterial = this.occlusionMeshMaterial; renderer.getDrawingBufferSize(dimensions); this.occlusionMapTexture.setSize(dimensions.x, dimensions.y); const renderTarget = this.occlusionMapTexture; renderer.setRenderTarget(renderTarget); const camera = renderer.xr.getCamera().cameras[viewId] || this.camera; const originalCameraLayers = Array.from(Array(32).keys()).filter((element) => camera.layers.isEnabled(element)); camera.layers.set(this.occludableItemsLayer); renderer.render(this.scene, camera); camera.layers.disableAll(); originalCameraLayers.forEach((element) => { camera.layers.enable(element); }); this.scene.overrideMaterial = null; } renderOcclusionMapFromReadBuffer(renderer, readBuffer, dimensions, viewId) { // Convert the readBuffer into an occlusion map. // Render depth into texture this.occlusionMapUniforms.tDiffuse.value = readBuffer.texture; this.occlusionMapUniforms.tDepth.value = readBuffer.depthTexture; const texture = this.depthTextures[viewId]; const isTextureArray = texture instanceof THREE.ExternalTexture; this.occlusionMeshMaterial.uniforms.uIsTextureArray.value = isTextureArray ? 1.0 : 0; this.occlusionMeshMaterial.uniforms.uViewId.value = viewId; if (isTextureArray) { this.occlusionMeshMaterial.uniforms.uDepthTextureArray.value = texture; this.occlusionMeshMaterial.uniforms.uDepthNear.value = this.depthNear[viewId]; } else { this.occlusionMeshMaterial.uniforms.uDepthTexture.value = texture; } // First render the occlusion map to an intermediate buffer. renderer.getDrawingBufferSize(dimensions); this.occlusionMapTexture.setSize(dimensions.x, dimensions.y); renderer.setRenderTarget(this.occlusionMapTexture); this.occlusionMapQuad.render(renderer); } blurOcclusionMap(renderer, dimensions) { for (let i = 0; i < 3; i++) { this.kawaseBlurTargets[i].setSize(dimensions.x / 2 ** i, dimensions.y / 2 ** i); } for (let i = 0; i < 3; i++) { this.kawaseBlurQuads[i].material.uniforms.uTexelSize.value.set(1 / (dimensions.x / 2 ** i), 1 / (dimensions.y / 2 ** i)); this.kawaseBlurQuads[this.kawaseBlurQuads.length - 1 - i] .material.uniforms.uTexelSize.value.set(1 / (dimensions.x / 2 ** (i - 1)), 1 / (dimensions.y / 2 ** (i - 1))); } renderer.setRenderTarget(this.kawaseBlurTargets[0]); this.kawaseBlurQuads[0].render(renderer); renderer.setRenderTarget(this.kawaseBlurTargets[1]); this.kawaseBlurQuads[1].render(renderer); renderer.setRenderTarget(this.kawaseBlurTargets[2]); this.kawaseBlurQuads[2].render(renderer); renderer.setRenderTarget(this.kawaseBlurTargets[1]); this.kawaseBlurQuads[3].render(renderer); renderer.setRenderTarget(this.kawaseBlurTargets[0]); this.kawaseBlurQuads[4].render(renderer); renderer.setRenderTarget(this.occlusionMapTexture); this.kawaseBlurQuads[5].render(renderer); } applyOcclusionMapToRenderedImage(renderer, readBuffer, writeBuffer) { if (readBuffer && (this.renderToScreen || writeBuffer)) { this.occlusionUniforms.tDiffuse.value = readBuffer.texture; renderer.setRenderTarget(writeBuffer && !this.renderToScreen ? writeBuffer : null); this.occlusionQuad.render(renderer); } } dispose() { this.occlusionMeshMaterial.dispose(); this.occlusionMapTexture.dispose(); for (let i = 0; i < this.kawaseBlurQuads.length; i++) { this.kawaseBlurQuads[i].dispose(); } } updateOcclusionMapUniforms(uniforms, renderer) { const camera = renderer.xr.getCamera().cameras[0] || this.camera; uniforms.tOcclusionMap.value = this.occlusionMapTexture.texture; uniforms.uOcclusionClipFromWorld.value .copy(camera.projectionMatrix) .multiply(camera.matrixWorldInverse); } } const DEFAULT_DEPTH_WIDTH = 160; const DEFAULT_DEPTH_HEIGHT = DEFAULT_DEPTH_WIDTH; const clipSpacePosition = new THREE.Vector3(); class Depth { get rawValueToMeters() { if (this.cpuDepthData.length) { return this.cpuDepthData[0].rawValueToMeters; } else if (this.gpuDepthData.length) { return this.gpuDepthData[0].rawValueToMeters; } return 0; } /** * Depth is a lightweight manager based on three.js to simply prototyping * with Depth in WebXR. */ constructor() { this.enabled = false; this.view = []; this.cpuDepthData = []; this.gpuDepthData = []; this.depthArray = []; this.options = new DepthOptions(); this.width = DEFAULT_DEPTH_WIDTH; this.height = DEFAULT_DEPTH_HEIGHT; this.occludableShaders = new Set(); // Whether we're counting the number of depth clients. this.depthClientsInitialized = false; this.depthClients = new Set(); this.depthProjectionMatrices = []; this.depthProjectionInverseMatrices = []; this.depthViewMatrices = []; this.depthViewProjectionMatrices = []; this.depthCameraPositions = []; this.depthCameraRotations = []; if (Depth.instance) { return Depth.instance; } Depth.instance = this; } /** * Initialize Depth manager. */ init(camera, options, renderer, registry, scene) { this.camera = camera; this.options = options; this.renderer = renderer; this.enabled = options.enabled; if (this.options.depthTexture.enabled) { this.depthTextures = new DepthTextures(options); registry.register(this.depthTextures); } if (this.options.depthMesh.enabled) { this.depthMesh = new DepthMesh(options, this.width, this.height, this.depthTextures); registry.register(this.depthMesh); if (this.options.depthMesh.renderShadow) { this.renderer.shadowMap.enabled = true; this.renderer.shadowMap.type = THREE.PCFShadowMap; } scene.add(this.depthMesh); } if (this.options.occlusion.enabled) { this.occlusionPass = new OcclusionPass(scene, camera); } } /** * Retrieves the depth at normalized coordinates (u, v). * @param u - Normalized horizontal coordinate. * @param v - Normalized vertical coordinate. * @returns Depth value at the specified coordinates. */ getDepth(u, v) { if (!this.depthArray[0]) return 0.0; const depthX = Math.round(clamp(u * this.width, 0, this.width - 1)); const depthY = Math.round(clamp((1.0 - v) * this.height, 0, this.height - 1)); const rawDepth = this.depthArray[0][depthY * this.width + depthX]; return this.rawValueToMeters * rawDepth; } /** * Projects the given world position to depth camera's clip space and then * to the depth camera's view space using the depth. * @param position - The world position to project. * @returns The depth camera view space position. */ getProjectedDepthViewPositionFromWorldPosition(position, target = new THREE.Vector3()) { clipSpacePosition .copy(position) .applyMatrix4(this.depthViewMatrices[0]) .applyMatrix4(this.depthProjectionMatrices[0]); const u = 0.5 * (clipSpacePosition.x + 1.0); const v = 0.5 * (clipSpacePosition.y + 1.0); const depth = this.getDepth(u, v); target.set(2.0 * (u - 0.5), 2.0 * (v - 0.5), -1); target.applyMatrix4(this.depthProjectionInverseMatrices[0]); target.multiplyScalar((target.z - depth) / target.z); return target; } /** * Retrieves the depth at normalized coordinates (u, v). * @param u - Normalized horizontal coordinate. * @param v - Normalized vertical coordinate. * @returns Vertex at (u, v) */ getVertex(u, v) { if (!this.depthArray[0]) return null; const depthX = Math.round(clamp(u * this.width, 0, this.width - 1)); const depthY = Math.round(clamp((1.0 - v) * this.height, 0, this.height - 1)); const rawDepth = this.depthArray[0][depthY * this.width + depthX]; const depth = this.rawValueToMeters * rawDepth; const vertexPosition = new THREE.Vector3(2.0 * (u - 0.5), 2.0 * (v - 0.5), -1); vertexPosition.applyMatrix4(this.depthProjectionInverseMatrices[0]); vertexPosition.multiplyScalar(-depth / vertexPosition.z); return vertexPosition; } updateDepthMatrices(depthData, viewId) { // Populate depth view and projection matrices. while (viewId >= this.depthViewMatrices.length) { this.depthViewMatrices.push(new THREE.Matrix4()); this.depthViewProjectionMatrices.push(new THREE.Matrix4()); this.depthProjectionMatrices.push(new THREE.Matrix4()); this.depthProjectionInverseMatrices.push(new THREE.Matrix4()); this.depthCameraPositions.push(new THREE.Vector3()); this.depthCameraRotations.push(new THREE.Quaternion()); } if (depthData.projectionMatrix && depthData.transform) { this.depthProjectionMatrices[viewId].fromArray(depthData.projectionMatrix); this.depthViewMatrices[viewId].fromArray(depthData.transform.inverse.matrix); this.depthCameraPositions[viewId].set(depthData.transform.position.x, depthData.transform.position.y, depthData.transform.position.z); this.depthCameraRotations[viewId].set(depthData.transform.orientation.x, depthData.transform.orientation.y, depthData.transform.orientation.z, depthData.transform.orientation.w); } else { const camera = this.renderer.xr?.getCamera()?.cameras?.[viewId] ?? this.camera; this.depthProjectionMatrices[viewId].copy(camera.projectionMatrix); this.depthViewMatrices[viewId].copy(camera.matrixWorldInverse); this.depthCameraPositions[viewId].copy(camera.position); this.depthCameraRotations[viewId].copy(camera.quaternion); } this.depthProjectionInverseMatrices[viewId] .copy(this.depthProjectionMatrices[viewId]) .invert(); this.depthViewProjectionMatrices[viewId].multiplyMatrices(this.depthProjectionMatrices[viewId], this.depthViewMatrices[viewId]); } updateCPUDepthData(depthData, viewId = 0) { this.cpuDepthData[viewId] = depthData; this.updateDepthMatrices(depthData, viewId); // Updates Depth Array. if (this.depthArray[viewId] == null) { this.depthArray[viewId] = this.options.useFloat32 ? new Float32Array(depthData.data) : new Uint16Array(depthData.data); this.width = depthData.width; this.height = depthData.height; } else { // Copies the data from an ArrayBuffer to the existing TypedArray. this.depthArray[viewId].set(this.options.useFloat32 ? new Float32Array(depthData.data) : new Uint16Array(depthData.data)); } // Updates Depth Texture. if (this.options.depthTexture.enabled && this.depthTextures) { this.depthTextures.updateData(depthData, viewId); } if (this.options.depthMesh.enabled && this.depthMesh && viewId == 0) { this.depthMesh.updateDepth(depthData, this.depthProjectionInverseMatrices[0]); this.depthMesh.position.copy(this.depthCameraPositions[0]); this.depthMesh.quaternion.copy(this.depthCameraRotations[0]); } } updateGPUDepthData(depthData, viewId = 0) { this.gpuDepthData[viewId] = depthData; this.updateDepthMatrices(depthData, viewId); // For now, assume that we need cpu depth only if depth mesh is enabled. // In the future, add a separate option. const needCpuDepth = this.options.depthMesh.enabled; const cpuDepth = needCpuDepth && this.depthMesh ? this.depthMesh.convertGPUToGPU(depthData) : null; if (cpuDepth) { if (this.depthArray[viewId] == null) { this.depthArray[viewId] = this.options.useFloat32 ? new Float32Array(cpuDepth.data) : new Uint16Array(cpuDepth.data); this.width = cpuDepth.width; this.height = cpuDepth.height; } else { // Copies the data from an ArrayBuffer to the existing TypedArray. this.depthArray[viewId].set(this.options.useFloat32 ? new Float32Array(cpuDepth.data) : new Uint16Array(cpuDepth.data)); } } // Updates Depth Texture. if (this.options.depthTexture.enabled && this.depthTextures) { this.depthTextures.updateNativeTexture(depthData, this.renderer, viewId); } if (this.options.depthMesh.enabled && this.depthMesh && viewId == 0) { if (cpuDepth) { this.depthMesh.updateDepth(cpuDepth, this.depthProjectionInverseMatrices[0]); } else { this.depthMesh.updateGPUDepth(depthData, this.depthProjectionInverseMatrices[0]); } this.depthMesh.position.copy(this.depthCameraPositions[0]); this.depthMesh.quaternion.copy(this.depthCameraRotations[0]); } } getTexture(viewId) { if (!this.options.depthTexture.enabled) return undefined; return this.depthTextures?.get(viewId); } update(frame) { if (!this.options.enabled) return; if (frame) { this.updateLocalDepth(frame); } if (this.options.occlusion.enabled) { this.renderOcclusionPass(); } } updateLocalDepth(frame) { const session = frame.session; const binding = this.renderer.xr.getBinding(); // Enable or disable depth based on the number of clients. const pausingDepthSupported = session.depthActive !== undefined; if (pausingDepthSupported && this.depthClientsInitialized) { const needsDepth = this.depthClients.size > 0; if (session.depthActive && !needsDepth) { session.pauseDepthSensing?.(); } else if (!session.depthActive && needsDepth) { session.resumeDepthSensing?.(); } if (this.depthClients.size == 0) { return; } } const xrRefSpace = this.renderer.xr.getReferenceSpace(); if (xrRefSpace) { const pose = frame.getViewerPose(xrRefSpace); if (pose) { for (let viewId = 0; viewId < pose.views.length; ++viewId) { const view = pose.views[viewId]; this.view[viewId] = view; if (session.depthUsage === 'gpu-optimized') { const depthData = binding.getDepthInformation(view); if (!depthData) { return; } this.updateGPUDepthData(depthData, viewId); } else { const depthData = frame.getDepthInformation(view); if (!depthData) { return; } this.updateCPUDepthData(depthData, viewId); } } } else { console.error('Pose unavailable in the current frame.'); } } } renderOcclusionPass() { const leftDepthTexture = this.getTexture(0); if (leftDepthTexture) { this.occlusionPass.setDepthTexture(leftDepthTexture, this.rawValueToMeters, 0, this.gpuDepthData[0] ?.depthNear); } const rightDepthTexture = this.getTexture(1); if (rightDepthTexture) { this.occlusionPass.setDepthTexture(rightDepthTexture, this.rawValueToMeters, 1, this.gpuDepthData[1] ?.depthNear); } const xrIsPresenting = this.renderer.xr.isPresenting; this.renderer.xr.isPresenting = false; this.occlusionPass.render(this.renderer, undefined, undefined, 0); this.renderer.xr.isPresenting = xrIsPresenting; for (const shader of this.occludableShaders) { this.occlusionPass.updateOcclusionMapUniforms(shader.uniforms, this.renderer); } } debugLog() { const arrayBuffer = this.cpuDepthData[0].data; const uint8Array = new Uint8Array(arrayBuffer); // Convert Uint8Array to a string where each character represents a byte const binaryString = Array.from(uint8Array, (byte) => String.fromCharCode(byte)).join(''); // Convert binary string to base64 const data_str = btoa(binaryString); console.log(data_str); } resumeDepth(client) { this.depthClientsInitialized = true; this.depthClients.add(client); } pauseDepth(client) { this.depthClientsInitialized = true; this.depthClients.delete(client); } } const aspectRatios = { depth: 1.0, RGB: 4 / 3, }; /** * Maps a UV coordinate from a RGB space to a destination depth space, * applying Brown-Conrady distortion and affine transformations based on * aspect ratios. If the simulator camera is used, no transformation is applied. * * @param rgbUv - The RGB UV coordinate, e.g., \{ u: 0.5, v: 0.5 \}. * @param xrDeviceCamera - The device camera instance. * @returns The transformed UV coordinate in the render camera clip space, or null if * inputs are invalid. */ function transformRgbToRenderCameraClip(rgbUv, xrDeviceCamera) { if (xrDeviceCamera?.simulatorCamera) { // The simulator camera crops the viewport image to match its aspect ratio, // while the depth map covers the entire viewport, so we adjust for this. const viewportAspect = window.innerWidth / window.innerHeight; const cameraAspect = xrDeviceCamera.simulatorCamera.width / xrDeviceCamera.simulatorCamera.height; let { u, v } = rgbUv; if (viewportAspect > cameraAspect) { // The camera image is a centered vertical slice of the full render. const relativeWidth = cameraAspect / viewportAspect; u = u * relativeWidth + (1.0 - relativeWidth) / 2.0; } else { // The camera image is a centered horizontal slice of the full render. const relativeHeight = viewportAspect / cameraAspect; v = v * relativeHeight + (1.0 - relativeHeight) / 2.0; } return new THREE.Vector2(2 * u - 1, 2 * v - 1); } if (!aspectRatios || !aspectRatios.depth || !aspectRatios.RGB) { console.error('Invalid aspect ratios provided.'); return null; } const params = xrDeviceCamera?.rgbToDepthParams ?? DEFAULT_RGB_TO_DEPTH_PARAMS; // Determine the relative scaling required to fit the overlay within the base. let relativeScaleX, relativeScaleY; if (aspectRatios.depth > aspectRatios.RGB) { // Base is wider than overlay ("letterboxing"). relativeScaleY = 1.0; relativeScaleX = aspectRatios.RGB / aspectRatios.depth; } else { // Base is narrower than overlay ("pillarboxing"). relativeScaleX = 1.0; relativeScaleY = aspectRatios.depth / aspectRatios.RGB; } // Convert input source UV [0, 1] to normalized coordinates in [-0.5, 0.5]. const u_norm = rgbUv.u - 0.5; const v_norm = rgbUv.v - 0.5; // Apply the FORWARD Brown-Conrady distortion model. const u_centered = u_norm - params.xc; const v_centered = v_norm - params.yc; const r2 = u_centered * u_centered + v_centered * v_centered; const radial = 1 + params.k1 * r2 + params.k2 * r2 * r2 + params.k3 * r2 * r2 * r2; const tanX = 2 * params.p1 * u_centered * v_centered + params.p2 * (r2 + 2 * u_centered * u_centered); const tanY = params.p1 * (r2 + 2 * v_centered * v_centered) + 2 * params.p2 * u_centered * v_centered; const u_distorted = u_centered * radial + tanX + params.xc; const v_distorted = v_centered * radial + tanY + params.yc; // Apply initial aspect ratio scaling and translation. const u_fitted = u_distorted * relativeScaleX + params.translateU; const v_fitted = v_distorted * relativeScaleY + params.translateV; // Apply the final user-controlled scaling (zoom and stretch). const finalNormX = u_fitted * params.scale * params.scaleX; const finalNormY = v_fitted * params.scale * params.scaleY; return new THREE.Vector2(2 * finalNormX, 2 * finalNormY); } /** * Maps a UV coordinate from a RGB space to a destination depth space, * applying Brown-Conrady distortion and affine transformations based on * aspect ratios. If the simulator camera is used, no transformation is applied. * * @param rgbUv - The RGB UV coordinate, e.g., \{ u: 0.5, v: 0.5 \}. * @param renderCameraWorldFromClip - Render camera world from clip, i.e. inverse of the View Projection matrix. * @param depthCameraClipFromWorld - Depth camera clip from world, i.e. * @param xrDeviceCamera - The device camera instance. * @returns The transformed UV coordinate in the depth image space, or null if * inputs are invalid. */ function transformRgbToDepthUv(rgbUv, renderCameraWorldFromClip, depthCameraClipFromWorld, xrDeviceCamera) { // Render camera clip space coordinates. const clipCoords = transformRgbToRenderCameraClip(rgbUv, xrDeviceCamera); if (!clipCoords) { return null; } // Backwards project from the render camera to depth camera. const depthClipCoord = new THREE.Vector4(clipCoords.x, clipCoords.y, 1, 1); depthClipCoord.applyMatrix4(renderCameraWorldFromClip); depthClipCoord.applyMatrix4(depthCameraClipFromWorld); depthClipCoord.multiplyScalar(1 / depthClipCoord.w); const finalU = 0.5 * depthClipCoord.x + 0.5; const finalV = 1.0 - (0.5 * depthClipCoord.y + 0.5); return { u: finalU, v: finalV }; } /** * Retrieves the world space position of a given RGB UV coordinate. * Note: it is essential that the coordinates, depth array, and projection * matrix all correspond to the same view ID (e.g., 0 for left). It is also * advised that all of these are obtained at the same time. * * @param rgbUv - The RGB UV coordinate, e.g., \{ u: 0.5, v: 0.5 \}. * @param depthArray - Array containing depth data. * @param projectionMatrix - XRView object with corresponding * projection matrix. * @param matrixWorld - Rendering camera's model matrix. * @param xrDeviceCamera - The device camera instance. * @param xrDepth - The SDK's Depth module. * @returns Vertex at (u, v) in world space. */ function transformRgbUvToWorld(rgbUv, depthArray, projectionMatrix, matrixWorld, xrDeviceCamera, xrDepth = Depth.instance) { if (!depthArray || !projectionMatrix || !matrixWorld || !xrDepth) { throw new Error('Missing parameter in transformRgbUvToWorld'); } const worldFromClip = matrixWorld .clone() .invert() .premultiply(projectionMatrix) .invert(); const depthProjectionMatrixInverse = xrDepth.depthProjectionMatrices[0] .clone() .invert(); const depthClipFromWorld = xrDepth.depthViewProjectionMatrices[0]; const depthModelMatrix = xrDepth.depthViewMatrices[0].clone().invert(); const depthUV = transformRgbToDepthUv(rgbUv, worldFromClip, depthClipFromWorld, xrDeviceCamera); if (!depthUV) { throw new Error('Failed to get depth UV'); } const { u: depthU, v: depthV } = depthUV; const depthX = Math.round(clamp(depthU * xrDepth.width, 0, xrDepth.width - 1)); // Invert depthV for array access, as image arrays are indexed from top-left. const depthY = Math.round(clamp((1.0 - depthV) * xrDepth.height, 0, xrDepth.height - 1)); const rawDepthValue = depthArray[depthY * xrDepth.width + depthX]; const depthInMeters = xrDepth.rawValueToMeters * rawDepthValue; // Convert UV to normalized device coordinates and create a point on the near // plane. const viewSpacePosition = new THREE.Vector3(2.0 * (depthU - 0.5), 2.0 * (depthV - 0.5), -1); // Unproject the point from clip space to view space and scale it along the // ray from the camera to the correct depth. Camera looks down -Z axis. viewSpacePosition.applyMatrix4(depthProjectionMatrixInverse); viewSpacePosition.multiplyScalar(-depthInMeters / viewSpacePosition.z); const worldPosition = viewSpacePosition .clone() .applyMatrix4(depthModelMatrix); return worldPosition; } /** * Asynchronously crops a base64 encoded image using a THREE.Box2 bounding box. * This function creates an in-memory image, draws a specified portion of it to * a canvas, and then returns the canvas content as a new base64 string. * @param base64Image - The base64 string of the source image. Can be a raw * string or a full data URI. * @param boundingBox - The bounding box with relative coordinates (0-1) for * cropping. * @returns A promise that resolves with the base64 string of the cropped image. */ async function cropImage(base64Image, boundingBox) { if (!base64Image) { throw new Error('No image data provided for cropping.'); } const img = new Image(); await new Promise((resolve, reject) => { img.onload = resolve; img.onerror = (err) => { console.error('Error loading image for cropping:', err); reject(new Error('Failed to load image for cropping.')); }; img.src = base64Image.startsWith('data:image') ? base64Image : `data:image/png;base64,${base64Image}`; }); const canvas = document.createElement('canvas'); const ctx = canvas.getContext('2d'); // Create a unit box and find the intersection to clamp coordinates. const unitBox = new THREE.Box2(new THREE.Vector2(0, 0), new THREE.Vector2(1, 1)); const clampedBox = boundingBox.clone().intersect(unitBox); const cropSize = new THREE.Vector2(); clampedBox.getSize(cropSize); // If the resulting crop area has no size, return an empty image. if (cropSize.x === 0 || cropSize.y === 0) { return 'data:image/png;base64,'; } // Calculate absolute pixel values from relative coordinates. const sourceX = img.width * clampedBox.min.x; const sourceY = img.height * clampedBox.min.y; const sourceWidth = img.width * cropSize.x; const sourceHeight = img.height * cropSize.y; // Set canvas size to the cropped image size. canvas.width = sourceWidth; canvas.height = sourceHeight; // Draw the cropped portion of the source image onto the canvas. ctx.drawImage(img, sourceX, sourceY, sourceWidth, sourceHeight, // Source rectangle 0, 0, sourceWidth, sourceHeight // Destination rectangle ); return canvas.toDataURL('image/png'); } /** * Enum for video stream states. */ var StreamState; (function (StreamState) { StreamState["IDLE"] = "idle"; StreamState["INITIALIZING"] = "initializing"; StreamState["STREAMING"] = "streaming"; StreamState["ERROR"] = "error"; StreamState["NO_DEVICES_FOUND"] = "no_devices_found"; })(StreamState || (StreamState = {})); /** * The base class for handling video streams (from camera or file), managing * the underlying