I have a barebones Three JS scene that I want to view in VR using WebXR. The scene looks fine in a browser window, but when I enter immersive mode it is distorted around the edges. It’s as if there is a rectangle projected in front of the camera, with everything inside rendering correctly, but everything outside is distorted.
Can anyone see what I’m doing wrong or what I’m missing?
const scene = new THREE.Scene();
scene.background = new THREE.Color(labColors.slate1);
const camera = new THREE.PerspectiveCamera(35, window.innerWidth / window.innerHeight, 0.1, 100);
camera.position.set(0, 1.4, 2);
const ambientLight = new THREE.AmbientLight(0xffffff, 1);
scene.add(ambientLight);
const directionalLight = new THREE.DirectionalLight(0xffffff, 1);
directionalLight.position.set(10, 10, 10);
scene.add(directionalLight);
const room = new THREE.LineSegments(new BoxLineGeometry(20, 10, 20, 20, 10, 20).translate(0, 5, 0), new THREE.LineBasicMaterial({ color: labColors.slate8 }));
scene.add(room);
// Create the renderer
const renderer = new THREE.WebGLRenderer({ antialias: true });
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.setPixelRatio(window.devicePixelRatio);
// Add the XR support and use setAnimationLoop to render the scene instead of requestAnimationFrame
renderer.xr.enabled = true;
renderer.setAnimationLoop(function () {
renderer.render(scene, camera);
});
// Add the automatically created <canvas> element to the page
container.value.append(renderer.domElement);
document.body.appendChild(VRButton.createButton(renderer));
const resize = () => {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.setPixelRatio(window.devicePixelRatio);
};
window.addEventListener("resize", resize);