WebGPU - converting functions

I am not any kind of a shading expert, or three.js expert either, and this question is being posted out of a curiousity.

It would be intended for shading gurus in this forum and if any solution could be found then it would only be temporary and experimental since the three.js library development will probably go a different way.

The question: WebGPU related, can the following function be somehow converted to use the MeshBasicNodeMaterial and maybe its colorNode or any other property, instead of using the ShaderMaterial, and together with the tslFn() functionality (which currently seems to be replaced with Fn() only)?

		// decompress function from TextureUtils.js

		function decompress( texture, maxTextureSize = Infinity, renderer = null ) {

			let _renderer;
			let fullscreenQuadGeometry;
			let fullscreenQuadMaterial;
			let fullscreenQuad;

			if ( ! fullscreenQuadGeometry ) fullscreenQuadGeometry = new THREE.PlaneGeometry( 2, 2, 1, 1 );

			if ( ! fullscreenQuadMaterial ) fullscreenQuadMaterial = new THREE.ShaderMaterial( {
				uniforms: { blitTexture: new THREE.Uniform( texture ) },
				vertexShader: `
					varying vec2 vUv;
					void main(){
						vUv = uv;
						gl_Position = vec4(position.xy * 1.0,0.,.999999);
					}`,
				fragmentShader: `
					uniform sampler2D blitTexture; 
					varying vec2 vUv;
		
					void main(){ 
						gl_FragColor = vec4(vUv.xy, 0, 1);
						
						#ifdef IS_SRGB
						gl_FragColor = LinearTosRGB( texture2D( blitTexture, vUv) );
						#else
						gl_FragColor = texture2D( blitTexture, vUv);
						#endif
					}`
			} );

			fullscreenQuadMaterial.uniforms.blitTexture.value = texture;
			fullscreenQuadMaterial.defines.IS_SRGB = texture.colorSpace == THREE.SRGBColorSpace;
			fullscreenQuadMaterial.needsUpdate = true;

			if ( ! fullscreenQuad ) {

				fullscreenQuad = new THREE.Mesh( fullscreenQuadGeometry, fullscreenQuadMaterial );
				fullscreenQuad.frustrumCulled = false;

			}

			const _camera = new THREE.PerspectiveCamera();
			const _scene = new THREE.Scene();
			_scene.add( fullscreenQuad );

			if ( ! renderer ) {

				renderer = _renderer = new THREE.WebGLRenderer( { antialias: false } );

			}

			renderer.setSize( Math.min( texture.image.width, maxTextureSize ), Math.min( texture.image.height, maxTextureSize ) );
			renderer.clear();
			renderer.render( _scene, _camera );

			const readableTexture = new THREE.Texture( renderer.domElement );

			readableTexture.minFilter = texture.minFilter;
			readableTexture.magFilter = texture.magFilter;
			readableTexture.wrapS = texture.wrapS;
			readableTexture.wrapT = texture.wrapT;
			readableTexture.name = texture.name;

			if ( _renderer ) {

				_renderer.dispose();
				_renderer = null;

			}

			return readableTexture;

		}

Here is the same function, slightly modified and missing functionality, just to provide an insight into what I’m looking for:

		async function decompress( texture, maxTextureSize = Infinity, renderer = null ) {

			let _renderer;
			let fullscreenQuadGeometry;
			let fullscreenQuadMaterial;
			let fullscreenQuad;

			if ( ! fullscreenQuadGeometry ) fullscreenQuadGeometry = new THREE.PlaneGeometry( 2, 2, 1, 1 );

			if ( ! fullscreenQuadMaterial ) fullscreenQuadMaterial = new THREE.MeshBasicNodeMaterial( {} );

			/*
			if ( ! fullscreenQuadMaterial ) fullscreenQuadMaterial = new THREE.ShaderMaterial( {
				uniforms: { blitTexture: new THREE.Uniform( texture ) },
				vertexShader: `
					varying vec2 vUv;
					void main(){
						vUv = uv;
						gl_Position = vec4(position.xy * 1.0,0.,.999999);
					}`,
				fragmentShader: `
					uniform sampler2D blitTexture; 
					varying vec2 vUv;
		
					void main(){ 
						gl_FragColor = vec4(vUv.xy, 0, 1);
						
						#ifdef IS_SRGB
						gl_FragColor = LinearTosRGB( texture2D( blitTexture, vUv) );
						#else
						gl_FragColor = texture2D( blitTexture, vUv);
						#endif
					}`
			} );
			*/

			// fullscreenQuadMaterial.uniforms.blitTexture.value = texture;
			// fullscreenQuadMaterial.defines.IS_SRGB = texture.colorSpace == THREE.SRGBColorSpace;
			texture.colorSpace == THREE.SRGBColorSpace;
			fullscreenQuadMaterial.needsUpdate = true;

			if ( ! fullscreenQuad ) {

				fullscreenQuad = new THREE.Mesh( fullscreenQuadGeometry, fullscreenQuadMaterial );
				fullscreenQuad.frustrumCulled = false;

			}

			const _camera = new THREE.PerspectiveCamera();
			const _scene = new THREE.Scene();
			_scene.add( fullscreenQuad );

			if ( ! renderer ) {

				renderer = _renderer = new THREE.WebGPURenderer( { antialias: false } );
				await renderer.init();

			}

			renderer.setSize( Math.min( texture.image.width, maxTextureSize ), Math.min( texture.image.height, maxTextureSize ) );
			renderer.clear();
			renderer.render( _scene, _camera );

			const readableTexture = new THREE.Texture( renderer.domElement );

			readableTexture.minFilter = texture.minFilter;
			readableTexture.magFilter = texture.magFilter;
			readableTexture.wrapS = texture.wrapS;
			readableTexture.wrapT = texture.wrapT;
			readableTexture.name = texture.name;

			if ( _renderer ) {

				_renderer.dispose();
				_renderer = null;

			}

			return readableTexture;

		}

For those who might be interested, here is a different approach that seems to be working properly but WebGPU currently seems to have an issue with non-power-of-two textures (not really sure if this is three.js related or not).

This approach is just setting the compressed texture as the scene’s background and returning that canvas. It even returns the non-power-of-two textures scaled up but not fully properly applied to the model.

EDIT: I did find out that, just before returning the readableTexture, it is sufficient to resize it back to the original texture size and then it will be applied properly to the model.

      // modified decompress function

      async function decompress( texture, maxTextureSize = Infinity, renderer = null ) {
        let _renderer;

        texture.colorSpace = THREE.SRGBColorSpace;
        texture.needsUpdate = true;

        // WebGPU currently does not seem to like non-power-of-two textures

        let width = texture.image.width;
        let height = texture.image.height;

        if (!THREE.MathUtils.isPowerOfTwo( width ) || !THREE.MathUtils.isPowerOfTwo( height )) {
          width = THREE.MathUtils.ceilPowerOfTwo( width );
          height = THREE.MathUtils.ceilPowerOfTwo( height );
        }

        const _camera = new THREE.PerspectiveCamera();

        // Set the texture as scene's background

        const _scene = new THREE.Scene();
        _scene.background = texture;

        if ( ! renderer ) {
          renderer = _renderer = new THREE.WebGPURenderer( { antialias: false } );
          await renderer.init();
        }

        renderer.setSize( Math.min( width, maxTextureSize ), Math.min( height, maxTextureSize ) );
        renderer.clear();
        renderer.render( _scene, _camera );

        let readableTexture = new THREE.Texture( renderer.domElement );

        readableTexture.colorSpace = THREE.SRGBColorSpace;
        readableTexture.minFilter = texture.minFilter;
        readableTexture.magFilter = texture.magFilter;
        readableTexture.wrapS = texture.wrapS;
        readableTexture.wrapT = texture.wrapT;
        readableTexture.name = texture.name;
        readableTexture.needsUpdate = true;

        if ( _renderer ) {
          _scene.background.dispose();
          texture.dispose();

          _renderer.dispose();
          _renderer = null;
        }

        return readableTexture;
      }

Here is another take on the decompress function, this time with dual check to find out whether the three was mapped to three.module.js or to three.webgpu.js.

The official three.js repository should have 2 versions, TextureUtils.js and TextureUtilsGPU.js, so anyone could actually try combining those into one.

My version is still slightly different and trying to use the scene.background for WebGPU.

	// modified decompress function from TextureUtils.js
	// it is async function and should be called as such

	// will require previously created variables ("scope." part can probably be removed):
	// scope._renderer = null;
	// scope.fullscreenQuad = null;
	// scope.fullscreenQuadGeometry = null;
	// scope.fullscreenQuadMaterial = null;
	// it was done like this in order to re-use some variables which
	// can be disposed of in the function calling the `decompress`

	// will require separate imports of:
	// CanvasTexture, Mesh, MathUtils, PerspectiveCamera, PlaneGeometry, Scene, ShaderMaterial, Uniform
	// LinearFilter, LinearMipmapLinearFilter, RepeatWrapping, SRGBColorSpace

	// the parameter "renderer" was removed in this version
	// try passing cloned version of the texture when calling this function

	// depending on how it is used, the following line can be changed to include "function":
	// async function decompress( texture, maxTextureSize = Infinity ) {

	async decompress( texture, maxTextureSize = Infinity ) {

		console.log( 'THREE.OBJExporter: Decompressing texture...' );

		const scope = this;

		let readableTexture;

		let width = texture.image.width;
		let height = texture.image.height;

		let resize = false;

		// Dual check to find out what "three" is mapped to - "three.module.js" or "three.webgpu.js"
		// Currently, WebGLRenderer import should not be available in "three.webgpu.js"

		if ( scope._renderer === null ) {

			try {

				const { WebGLRenderer } = await import( "three" );
				scope._renderer = new WebGLRenderer( { antialias: false } );

			} catch ( error ) {}

			// If WebGLRenderer import was not provided then check for WebGPURenderer

			if ( scope._renderer === null ) {

				try {

					const { WebGPURenderer } = await import( "three" );
					scope._renderer = new WebGPURenderer( { antialias: false } );
					await scope._renderer.init();

				} catch ( error ) {}

			}

		}

		const _camera = new PerspectiveCamera();
		const _scene = new Scene();

		if ( scope._renderer.isWebGPURenderer ) {

			// Set the texture as the scene's background
			_scene.background = texture;
			_scene.backgroundIntensity = 1.0;
			_scene.background.needsUpdate = true;

		} else {

			if ( scope.fullscreenQuadGeometry === null ) scope.fullscreenQuadGeometry = new PlaneGeometry( 2, 2, 1, 1 );

			if ( scope.fullscreenQuadMaterial === null ) {

				scope.fullscreenQuadMaterial = new ShaderMaterial( {

					uniforms: { blitTexture: new Uniform( texture ) },
					vertexShader: `
					varying vec2 vUv;
					void main(){
						vUv = uv;
						gl_Position = vec4(position.xy * 1.0,0.,.999999);
					}`,
					fragmentShader: `
					uniform sampler2D blitTexture; 
					varying vec2 vUv;
					void main(){ 
						gl_FragColor = vec4(vUv.xy, 0, 1);
						
						#ifdef IS_SRGB
						gl_FragColor = LinearTosRGB( texture2D( blitTexture, vUv) );
						#else
						gl_FragColor = texture2D( blitTexture, vUv);
						#endif
					}`

				} );

			}

			scope.fullscreenQuadMaterial.uniforms.blitTexture.value = texture;
			scope.fullscreenQuadMaterial.needsUpdate = true;

			if ( scope.fullscreenQuad === null ) {

				scope.fullscreenQuad = new Mesh( scope.fullscreenQuadGeometry, scope.fullscreenQuadMaterial );

			}

			scope.fullscreenQuad.frustrumCulled = false;

		}

		if ( scope._renderer.isWebGPURenderer ) {

			// WebGPU currently does not seem to like non-power-of-two textures

			if ( ! MathUtils.isPowerOfTwo( width ) || ! MathUtils.isPowerOfTwo( height )) {

				if ( ! MathUtils.isPowerOfTwo( width ) ) width = MathUtils.ceilPowerOfTwo( width );
				if ( ! MathUtils.isPowerOfTwo( height ) ) height = MathUtils.ceilPowerOfTwo( height );

				resize = true;

			}

		} else {

			if ( scope.fullscreenQuad !== null ) _scene.add( scope.fullscreenQuad );

		}

		scope._renderer.setSize( Math.min( width, maxTextureSize ), Math.min( height, maxTextureSize ) );

		await new Promise( resolve => {

			if ( scope._renderer.isWebGPURenderer ) {

				scope._renderer.clearAsync();
				scope._renderer.renderAsync( _scene, _camera );

			} else {

				scope._renderer.clear();
				scope._renderer.render( _scene, _camera );

			}

			resolve( readableTexture = new CanvasTexture( scope._renderer.domElement ) );

		});

		if ( scope._renderer.isWebGPURenderer && scope._renderer._quad ) {

			if ( scope._renderer._quad.material ) scope._renderer._quad.material.dispose();
			if ( scope._renderer._quad.geometry ) scope._renderer._quad.geometry.dispose();

		}

		// Resize back to original texture size if needed

		if ( scope._renderer.isWebGPURenderer && resize === true ) {

			await new Promise( resolve => {

				let canvas = document.createElement( 'canvas' );
				canvas.width = texture.image.width;
				canvas.height = texture.image.height;

				let ctx = canvas.getContext( '2d' );

				let img = new Image();

				img.onload = function() {

					ctx.drawImage( this, 0, 0, canvas.width, canvas.height );
					resolve( readableTexture.image = canvas );

				}

				img.src = readableTexture.image.toDataURL( 'image/png', 1 );

			});

		}

		readableTexture.colorSpace = texture.colorSpace || SRGBColorSpace;
		readableTexture.minFilter = texture.minFilter || LinearMipmapLinearFilter;
		readableTexture.magFilter = texture.magFilter || LinearFilter;
		readableTexture.wrapS = texture.wrapS || RepeatWrapping;
		readableTexture.wrapT = texture.wrapT || RepeatWrapping;
		readableTexture.name = texture.name;

		readableTexture.needsUpdate = true;

		if ( scope.fullscreenQuad !== null ) {

			_scene.remove( scope.fullscreenQuad );

			if ( scope.fullscreenQuad.material && scope.fullscreenQuad.material.uniforms ) {

				Object.keys( scope.fullscreenQuad.material.uniforms ).forEach( ( key ) => {

					if ( scope.fullscreenQuad.material.uniforms[ key ].value ) {

						let kv = scope.fullscreenQuad.material.uniforms[ key ].value;
						if ( kv.type && kv.type === 1009 ) kv.dispose();

					}

				});

				scope.fullscreenQuad.material.dispose();

			}

			scope.fullscreenQuad.geometry.dispose();
			scope.fullscreenQuad = null;

		} else {

			_scene.background.dispose();

		}

		texture.dispose();

		return readableTexture;

	}

This is a part of my updated OBJExporter so check its code to see the full implementation, which might explain the most comments stated above.

For any further code updates refer to my repository so I don’t have to keep updating it here.

Most of my desktop viewers include OBJ+MTL exports, including experimental WebGPU viewers, and are using this exporter.

For those who like experimenting and poking around, here is another relatively simple way of getting the decompress function imported regardless of whether you are using “three.module.js” or “three.webgpu.js”.

This post initially had a code that I thought was working properly but eventually found out the opposite.

With a little more work I did get things working and those changes are being used by the version of GLTFExporter that I have in my repository. Here is the commit that shows these new changes.

As for the WebGPU side of this story, only my experimental GLTF WebGPU viewer is currently using this GLTFExporter, so give it a try.

Depending on how things progress with the further development of the three.js library, this method might become obsolete and non-functional.