I am not any kind of a shading expert, or three.js expert either, and this question is being posted out of a curiousity.
It would be intended for shading gurus in this forum and if any solution could be found then it would only be temporary and experimental since the three.js library development will probably go a different way.
The question: WebGPU related, can the following function be somehow converted to use the MeshBasicNodeMaterial
and maybe its colorNode
or any other property, instead of using the ShaderMaterial
, and together with the tslFn()
functionality (which currently seems to be replaced with Fn()
only)?
// decompress function from TextureUtils.js
function decompress( texture, maxTextureSize = Infinity, renderer = null ) {
let _renderer;
let fullscreenQuadGeometry;
let fullscreenQuadMaterial;
let fullscreenQuad;
if ( ! fullscreenQuadGeometry ) fullscreenQuadGeometry = new THREE.PlaneGeometry( 2, 2, 1, 1 );
if ( ! fullscreenQuadMaterial ) fullscreenQuadMaterial = new THREE.ShaderMaterial( {
uniforms: { blitTexture: new THREE.Uniform( texture ) },
vertexShader: `
varying vec2 vUv;
void main(){
vUv = uv;
gl_Position = vec4(position.xy * 1.0,0.,.999999);
}`,
fragmentShader: `
uniform sampler2D blitTexture;
varying vec2 vUv;
void main(){
gl_FragColor = vec4(vUv.xy, 0, 1);
#ifdef IS_SRGB
gl_FragColor = LinearTosRGB( texture2D( blitTexture, vUv) );
#else
gl_FragColor = texture2D( blitTexture, vUv);
#endif
}`
} );
fullscreenQuadMaterial.uniforms.blitTexture.value = texture;
fullscreenQuadMaterial.defines.IS_SRGB = texture.colorSpace == THREE.SRGBColorSpace;
fullscreenQuadMaterial.needsUpdate = true;
if ( ! fullscreenQuad ) {
fullscreenQuad = new THREE.Mesh( fullscreenQuadGeometry, fullscreenQuadMaterial );
fullscreenQuad.frustrumCulled = false;
}
const _camera = new THREE.PerspectiveCamera();
const _scene = new THREE.Scene();
_scene.add( fullscreenQuad );
if ( ! renderer ) {
renderer = _renderer = new THREE.WebGLRenderer( { antialias: false } );
}
renderer.setSize( Math.min( texture.image.width, maxTextureSize ), Math.min( texture.image.height, maxTextureSize ) );
renderer.clear();
renderer.render( _scene, _camera );
const readableTexture = new THREE.Texture( renderer.domElement );
readableTexture.minFilter = texture.minFilter;
readableTexture.magFilter = texture.magFilter;
readableTexture.wrapS = texture.wrapS;
readableTexture.wrapT = texture.wrapT;
readableTexture.name = texture.name;
if ( _renderer ) {
_renderer.dispose();
_renderer = null;
}
return readableTexture;
}
Here is the same function, slightly modified and missing functionality, just to provide an insight into what I’m looking for:
async function decompress( texture, maxTextureSize = Infinity, renderer = null ) {
let _renderer;
let fullscreenQuadGeometry;
let fullscreenQuadMaterial;
let fullscreenQuad;
if ( ! fullscreenQuadGeometry ) fullscreenQuadGeometry = new THREE.PlaneGeometry( 2, 2, 1, 1 );
if ( ! fullscreenQuadMaterial ) fullscreenQuadMaterial = new THREE.MeshBasicNodeMaterial( {} );
/*
if ( ! fullscreenQuadMaterial ) fullscreenQuadMaterial = new THREE.ShaderMaterial( {
uniforms: { blitTexture: new THREE.Uniform( texture ) },
vertexShader: `
varying vec2 vUv;
void main(){
vUv = uv;
gl_Position = vec4(position.xy * 1.0,0.,.999999);
}`,
fragmentShader: `
uniform sampler2D blitTexture;
varying vec2 vUv;
void main(){
gl_FragColor = vec4(vUv.xy, 0, 1);
#ifdef IS_SRGB
gl_FragColor = LinearTosRGB( texture2D( blitTexture, vUv) );
#else
gl_FragColor = texture2D( blitTexture, vUv);
#endif
}`
} );
*/
// fullscreenQuadMaterial.uniforms.blitTexture.value = texture;
// fullscreenQuadMaterial.defines.IS_SRGB = texture.colorSpace == THREE.SRGBColorSpace;
texture.colorSpace == THREE.SRGBColorSpace;
fullscreenQuadMaterial.needsUpdate = true;
if ( ! fullscreenQuad ) {
fullscreenQuad = new THREE.Mesh( fullscreenQuadGeometry, fullscreenQuadMaterial );
fullscreenQuad.frustrumCulled = false;
}
const _camera = new THREE.PerspectiveCamera();
const _scene = new THREE.Scene();
_scene.add( fullscreenQuad );
if ( ! renderer ) {
renderer = _renderer = new THREE.WebGPURenderer( { antialias: false } );
await renderer.init();
}
renderer.setSize( Math.min( texture.image.width, maxTextureSize ), Math.min( texture.image.height, maxTextureSize ) );
renderer.clear();
renderer.render( _scene, _camera );
const readableTexture = new THREE.Texture( renderer.domElement );
readableTexture.minFilter = texture.minFilter;
readableTexture.magFilter = texture.magFilter;
readableTexture.wrapS = texture.wrapS;
readableTexture.wrapT = texture.wrapT;
readableTexture.name = texture.name;
if ( _renderer ) {
_renderer.dispose();
_renderer = null;
}
return readableTexture;
}