Unable to load webcam as a texture into shader

Im trying to load my webcam as a texture into three.js. It works fine as long as I load an ordinary image as a texture into the shader but as soon as I load my webcam it turns black.
I guess it has something to do how I load my camera …

Here is a link to codepen:

Here is my code:

const vshader = `
varying vec2 vUv;
    
void main() {
    vUv = uv;
    gl_Position = projectionMatrix * modelViewMatrix * vec4(position,1.0);
}
`;

const fshader = `
varying vec2 vUv;
uniform sampler2D tex;

void main() {
    vec4 col = texture2D(tex, vUv);
    // vec3 col = vec3(1., 0., 1.);
    gl_FragColor = vec4(col);
}
`
var scene;
var camera;
var renderer;
var geometry, material;
var video;
var loader;
var plane;

function sceneSetup() {

  scene = new THREE.Scene();
  camera = new THREE.OrthographicCamera(window.innerWidth / -2, window.innerWidth / 2, window.innerHeight / 2, window.innerHeight / -2, 1, 1000);

  camera.position.z = 2;
  loader = new THREE.TextureLoader();

  renderer = new THREE.WebGLRenderer();
  renderer.setSize(window.innerWidth, window.innerHeight);
  document.body.appendChild(renderer.domElement);
}

function bufferTextureSetup() {
  // TEXTURE WORKS FINE
  const iChannel1 = loader.load('https://i.imgur.com/jlFgGpe.jpg');

  // VIDEO
  video = document.getElementById('video');
  var texture = new THREE.VideoTexture(video);
  var geometry = new THREE.PlaneBufferGeometry(16, 9);

  geometry.scale(0.5, 0.5, 0.5);
  var videomaterial = new THREE.MeshBasicMaterial({
    map: texture
  });

  // 1 Shader
  material = new THREE.ShaderMaterial({
    uniforms: {
      tex: {
        type: "t",
        value: iChannel1
      },
    },
    vertexShader: vshader,
    fragmentShader: fshader,
    wireframe: false
  });
  plane = new THREE.PlaneBufferGeometry(window.innerWidth, window.innerHeight);

  const box = new THREE.Mesh(plane, material);
  scene.add(box);

  update();

  onWindowResize();

  window.addEventListener('resize', onWindowResize, false);

  // load video
  if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {

    var constraints = {
      video: {
        width: 1280,
        height: 720,
        facingMode: 'user'
      }
    };

    navigator.mediaDevices.getUserMedia(constraints).then(function (stream) {

      // apply the stream to the video element used in the texture
      video.srcObject = stream;
      video.play();

    }).catch(function (error) {
      console.error('Unable to access the camera/webcam.', error);

    });

  } else {

    console.error('MediaDevices interface not available.');

  }

}

sceneSetup();
bufferTextureSetup();

function onWindowResize(event) {
  camera.aspect = window.innerWidth / window.innerHeight;
  camera.updateProjectionMatrix();
  renderer.setSize(window.innerWidth, window.innerHeight);
}

function update() {
  requestAnimationFrame(update);
  renderer.render(scene, camera);
}

/cc

Does this example work for you?

hmm. I saw that example. I tried to implement it but Im getting error:
Unable to access the camera/webcam. TypeError: Cannot set property 'srcObject' of null

using this pen:
https://codepen.io/haangglide/pen/XWKryOa

That happens because there is no video element in your HTML markup. Try adding this to the HTML section:

<video id="video" style="display:none" autoplay playsinline></video>

Now it is working. Thanks. I was a little bit confused with this and didnt notice the reference to the DOM. I thought I didnt needed this since Im sampling it as a texture in the shader. Do I always need the video element when using the video as a texture? Since I dont need an element when I am sampling an image?

When loading a texture with TextureLoader, the returned instance of Texture does have an internal HTML image element representing the image data. You need the pendant when using VideoTexture. The video element is the actual data source. Not using it means no data.