Loading a GLTF Model in WebXR

So, I´m making a project where I use webxr to load the user inside a 3d gltf model of a room, and the user can navigate this room. Currently I’m also adding a cube and a sphere to the scene, for testing purposes. The problem is, only when I look at a certain direction (I assume that is because the number of textures loaded at the current view changes), The textures of the floor get a strange unintended behavior where they no longer have a marble texture, but their texture becomes the live feed of the camera, strecthed to fit the square shapes that make the floor. Here is the code that loads the 3d model inside the scene, after an AR session started:

function onSessionStarted(session) {
  xrSession = session;
  xrButton.innerHTML = "Parar WebXR";

  session.addEventListener("end", onSessionEnded);
  let canvas = document.createElement("canvas");
  gl = canvas.getContext("webgl", {
    xrCompatible: true,
  });
  scene = new THREE.Scene();
  const loader = new THREE.GLTFLoader();
  loader.load("./textures/apart_06.glb", (object) => {
    object.scene.position.y = -1.5;
    scene.add(object.scene);
  });
  let colors = [0xff0000, 0x00ff00, 0x0000ff, 0xffff00, 0xff00ff, 0x00ffff];
  let materials = [];
  for (let i = 0; i < colors.length; i++) {
    materials.push(new THREE.MeshBasicMaterial({ color: colors[i] }));
  }
  let geometry = new THREE.BoxGeometry(0.3, 0.3, 0.3);
  cube = new THREE.Mesh(geometry, materials);
  cube.position.z = -2;
  scene.add(cube);
  let sphereMaterial = new THREE.MeshBasicMaterial({
    color: 0x00ff00, // Green color
    transparent: true,
    opacity: 0.5,
  });
  geometry = new THREE.SphereGeometry(0.15, 32, 32);

  camera = new THREE.PerspectiveCamera();
  const ambientLight = new THREE.AmbientLight(0xffffff, 1);
  scene.add(ambientLight);
  renderer = new THREE.WebGLRenderer({
    alpha: true,
    preserveDrawingBuffer: true,
    canvas: canvas,
    context: gl,
  });
  renderer.autoClear = false;
  camera.matrixAutoUpdate = false;

  binding = new XRWebGLBinding(session, gl);
  session.updateRenderState({ baseLayer: new XRWebGLLayer(session, gl) });
  session.requestReferenceSpace("local").then((refSpace) => {
    xrRefSpace = refSpace;
    session.requestAnimationFrame(onXRFrame);
  });
}

Here is also the function for rendering the XRFrame:

function onXRFrame(time, frame) {
  renderer.render(scene, camera);
  let session = frame.session;
  session.requestAnimationFrame(onXRFrame);

  gl.bindFramebuffer(gl.FRAMEBUFFER, session.renderState.baseLayer.framebuffer);

  let pose = frame.getViewerPose(xrRefSpace);

  if (pose) {
    const firstView = pose.views[0];
    const viewport = session.renderState.baseLayer.getViewport(firstView);
    renderer.setSize(viewport.width, viewport.height);

    camera.matrix.fromArray(firstView.transform.matrix);
    camera.projectionMatrix.fromArray(firstView.projectionMatrix);
    camera.updateMatrixWorld(true);

    cube.rotation.x += 0.01;
    cube.rotation.y += 0.01;

    const p = pose.transform.position;
    xCoord.innerHTML = "x: " + p.x.toFixed(4);
    yCoord.innerHTML = "y: " + p.y.toFixed(4);
    zCoord.innerHTML = "z: " + p.z.toFixed(4);
  } else {
    xCoord.innerHTML = "No pose";
    yCoord.innerHTML = "No pose";
    zCoord.innerHTML = "No pose";
  }
}

Wondering if anyone could help me?