I have a video texture that doesn’t appear or play when activated on an iPhone and it gives off this WebGL error
THREE.WebGLState:
(anonymous) @ sse-hooks.f742b80f43…19b0d97b5886cd.js:2
texImage2D @ three.module.js:21276
uploadTexture @ three.module.js:22207
setTexture2D @ three.module.js:21734
safeSetTexture2D @ three.module.js:22627
setValueT1 @ three.module.js:17147
(anonymous) @ three.module.js:17651
setProgram @ three.module.js:26039
(anonymous) @ three.module.js:24882
renderObject @ three.module.js:25631
renderObjects @ three.module.js:25601
(anonymous) @ three.module.js:25398
render @ app.js:376
function render() {
requestAnimationFrame(render);
let deltaTime = clock.getDelta();
TWEEN.update();
hotspots.HotspotUpdate();
if (resizeRendererToDisplaySize(renderer)) {
const canvas = renderer.domElement;
canvas.style.background = "black";
camera.aspect = canvas.clientWidth / canvas.clientHeight;
camera.updateProjectionMatrix();
}
water.position.z += movementSpeed * deltaTime;
modelLoader.frontFoam_L ? animateFrontFoam(deltaTime) : null;
modelLoader.frontFoam_R ? animateFrontFoam(deltaTime) : null;
modelLoader.FoamAnimMat.needsUpdate = true;
controls.update();
renderer.render(scene, camera);
// this is line 376
renderer.clearDepth(); // important! clear the depth buffer
//renderer.render(scene2, camera);
modelLoader.modelAnimationMixers.forEach((_mixer) => {
_mixer.update(deltaTime);
});
//updateMovement();
//requestAnimationFrame(render);
}
I followed this example on threejs and it still does not work.
Below I’ll be comparing my code to the one on github.
It states that I need a startButton variable which has an eventlistener
const startButton = document.getElementById( 'startButton' );
startButton.addEventListener( 'click', function () {
init();
animate();
} );
This is between line 71 - 77
This is my code
$("#launch").click(function () {
$("#c").show();
$("#cwrapper").show();
$("#startContainer").addClass("hidden");
$("#load-wrapper").removeClass("hidden");
$("#load-wrapper").focus();
attachArrowFontSizingHooks();
main.start();
...
I call the render function inside of main.start
function and it looks like this
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
render();
}
Next up it tells me I need to fetch the video from a html tag, play it, add an eventlistener to it and assign it to a texture that then gets assigned to an object material. Like so:
video = document.getElementById( 'video' );
video.play();
video.addEventListener( 'play', function () {
this.currentTime = 3;
} );
texture = new THREE.VideoTexture( video );
const parameters = { color: 0xffffff, map: texture };
This is what I have done
let video = document.getElementById("video"); // link video HMTL element to JS
video.play();
video.addEventListener( 'play', function () {
this.currentTime = 3;
} );
let videoTexture = new THREE.VideoTexture(video); // create texture for video
export let FoamAnimMat = new THREE.MeshBasicMaterial({
alphaMap: videoTexture,
alphaTest: 0.45,
side: THREE.DoubleSide,
fog: false
});
This is my HTML tag
<video
id="video"
loop
crossorigin="anonymous"
style="display: none;"
playsinline
autoplay
muted
>
<source
src="videos/Foam_512.mp4"
type='video/mp4; codecs="avc1.42E01E, mp4a.40.2"'
/>
</video>
This is what I get
This is what appears in the browser(windows/mac)
Why isn’t it working?
UPDATE:
I forked the threejs example and re-built it in codesandbox because I wanted to see if it my video file was corrupt or if the code didn’t work as intended. You can test it here
It didn’t work. But when I visit the same example on the examples page it works. Is sandbox as a platform the problem or I am still missing something?