How do I get the vertex data from my position attribute into a shader with a DataTexture?

I have a parameterizable BufferGeometry. To this I added an index attribute so I can adjust each vertex.
So far I’m satisfied.

Now I need the positions of its neighboring vertices for an exact positioning of some vertices. Therefore I thought I create a DataTexture that contains all vertex coordinates so that I have access to all vertex coordinates in the shader. And in such a way that my vertex index in the position attribute and in the shader always choose exactly the same vertex. Then I can easily read the two neighboring vertices from the shader with index -+ 1.
However, the implementation is proving to be more complicated than expected. Does anyone have experience with bringing vertex data into the vertex shader using DataTexture? My DataTexture doesn’t work.
There are all vertices in the origin but why

import * as THREE from "../resources/libs/three/build/three.module.js";
import { OrbitControls } from '../resources/libs/three/examples/jsm/controls/OrbitControls.js';
import WebGL from '../resources/libs/three/examples/jsm/WebGL.js';

const P = new THREE.Vector3();
const N1 = new THREE.Vector3();
const N2 = new THREE.Vector3();
const N3 = new THREE.Vector3();
const D1 = new THREE.Vector3();
const D2 = new THREE.Vector3();

const VS = `

	precision highp float;
	precision highp int;
	precision highp sampler2D;

	uniform mat4 modelMatrix;
	uniform mat4 modelViewMatrix;
	uniform mat4 viewMatrix;
	uniform mat4 projectionMatrix;
	uniform vec3 cameraPosition;

	uniform sampler2D uSpatialTexture;
	uniform float uTextureSize;

	uniform float time;
	uniform int point;

	// Attributes
	in vec3 position;
	in vec3 normal;
	in int vindex;
	// Outputs
	out vec3 vNormal;
	out float idx;

	void main() { 	

		idx = float(vindex);

		float wStep = 1. / uTextureSize;
		float hWStep = wStep * 0.5;
		float t = float(vindex) * wStep + hWStep;

		vec3 coordFromTex = texture(uSpatialTexture, vec2(t, 0.5)).rgb;
				

		//just initial normals
		vNormal = normalize(normal);	

		/*****************************************************************
			the goal is to get exactly the same result like with the normal way 
			but with reading the vertexcoordinates from the datatexture
		*****************************************************************/			
		vec3 newPosition = coordFromTex + vNormal * 3.;
		gl_Position = projectionMatrix * modelViewMatrix * vec4(coordFromTex, 1.0);

		//the normal way		
		//vec3 newPosition = position + vNormal * 3.;
		//gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
		
		if(vindex == point){
			gl_Position = projectionMatrix * modelViewMatrix * vec4(newPosition, 1.0);
		}
		
}`;


const FS = `

	precision highp float;
	precision highp int;
	precision highp sampler2D;

	uniform sampler2D uSpatialTexture;
	uniform float uTextureSize;

	in vec3 vNormal;
	in float idx;
	out vec4 out_FragColor;
	
	void main() {

		float wStep = 1. / uTextureSize;
		float hWStep = wStep * 0.5;
		float t = idx * wStep + hWStep;

		vec3 coordFromTex = texture(uSpatialTexture, vec2(t, 0.5)).rgb;

		//out_FragColor = vec4(vec3(1., 0., 0.), 1.);
		out_FragColor = vec4(normalize(coordFromTex), 1.);
}`;


class Main {
	constructor(){
		this.init();
		this.animate();	
	}
			
	init(){
		
		if (!WebGL.isWebGL2Available()) {return false;}
		const canvas = document.createElement('canvas');
		const context = canvas.getContext('webgl2');
		this.renderer = new THREE.WebGLRenderer({ canvas: canvas,context: context,antialias: true});					
		this.renderer.setPixelRatio( window.devicePixelRatio ); 
		this.renderer.shadowMap.enabled = true; 
		this.renderer.shadowMap.type = THREE.PCFSoftShadowMap;	
		this.container = document.getElementById('container');
		this.renderer.setSize(this.container.clientWidth, this.container.clientHeight);
		this.container.appendChild( this.renderer.domElement );
		this.aspect = this.container.clientWidth / this.container.clientHeight; 
		this.scene = new THREE.Scene();
		this.scene.background = new THREE.Color( 0x557799 );
		this.camera = new THREE.PerspectiveCamera( 50, this.aspect, 0.1, 10000000 );
		this.camera.position.set( 30, 20, -30 );
		this.controls = new OrbitControls( this.camera, this.renderer.domElement );
		this.controls.screenSpacePanning = true;
		this.controls.minDistance = 5;
		this.controls.maxDistance = 40;
		this.controls.target.set( 0, 2, 0 );
		this.controls.update();
	
		//****************************************************************
	
		this.params = {
			resolution: 5,
			width: 20,
		}

		const positions = [];
		const vertexIndex = [];

		const resolution = this.params.resolution;
		const width = this.params.width;
		const half = width / 2;
  
  		let idx = 0;
        
		for (let x = 0; x <= resolution; x++) {
			const xp = width * x / resolution;
			for (let z = 0; z <= resolution; z++) {
				const zp = width * z / resolution;
				// Compute position
				P.set(xp - half, 0, zp - half);

				positions.push(P.x, P.y, P.z);
				vertexIndex.push(idx);
				idx += 1;
			}
		}

		// Generate indices and normals
		const indices = this.GenerateIndices();
		const normals = this.GenerateNormals(positions, indices);

		const bytesInFloat32 = 4;
		const bytesInInt32 = 4;
		const positionsArray = new Float32Array(new ArrayBuffer(bytesInFloat32 * positions.length));
		const normalsArray = new Float32Array(new ArrayBuffer(bytesInFloat32 * normals.length));
		const indicesArray = new Uint32Array(new ArrayBuffer(bytesInInt32 * indices.length));
		const vIndicesArray = new Uint32Array(new ArrayBuffer(bytesInInt32 * vertexIndex.length));

		positionsArray.set(positions, 0);
		normalsArray.set(normals, 0);
		indicesArray.set(indices, 0);
		vIndicesArray.set(vertexIndex, 0);

		var uniform = {
			point: {value: null},
			uSpatialTexture: {value: null},
			uTextureSize: {value: null},
		}

		this.material = new THREE.RawShaderMaterial({
			glslVersion: THREE.GLSL3,
			uniforms: uniform,
			vertexShader: VS,
			fragmentShader: FS,
			side: THREE.DoubleSide,
			wireframe: true,
		});


		this.geometry = new THREE.BufferGeometry();	
		this.mesh = new THREE.Mesh(this.geometry, this.material);
		this.mesh.castShadow = false;
		this.mesh.receiveShadow = true;
		this.mesh.frustumCulled = false;
		this.mesh.position.set(0, 0, 0);  
		this.mesh.rotation.x = Math.PI;
		this.geometry.setAttribute('position', new THREE.Float32BufferAttribute(positionsArray, 3));
		this.geometry.setAttribute('normal', new THREE.Float32BufferAttribute(normalsArray, 3));
		this.geometry.setAttribute('vindex', new THREE.Int32BufferAttribute(vIndicesArray, 1));
		this.geometry.setIndex(new THREE.BufferAttribute(indicesArray, 1));
		
		this.geometry.attributes.position.needsUpdate = true;
		this.geometry.attributes.normal.needsUpdate = true;
		this.geometry.attributes.vindex.needsUpdate = true;

	
		//here i store all vertex coordinates in a data texture
		this.tex = new THREE.DataTexture(
			positionsArray,
			vIndicesArray.length + 1,	//width
			1, //height
			THREE.RGBFormat, 
			THREE.FloatType
		);
		this.tex.needsUpdate = true;
		
		
		this.mesh.material.uniforms.uSpatialTexture.value = this.tex;
		this.mesh.material.uniforms.uTextureSize.value = vIndicesArray.length + 1;
		this.mesh.material.uniformsNeedUpdate = true;


		this.scene.add( this.mesh );


		const ambientLight = new THREE.AmbientLight( 0xffffff, 0.2 );
		this.scene.add( ambientLight );

		const pointLight = new THREE.PointLight( 0xffffff, 0.8 );
		this.scene.add( this.camera );
		this.camera.add( pointLight );
	
	}//end init
	
	
	GenerateNormals(positions, indices) {
    const normals = new Array(positions.length).fill(0.0);
    for (let i = 0, n = indices.length; i < n; i+= 3) {
      const i1 = indices[i] * 3;
      const i2 = indices[i+1] * 3;
      const i3 = indices[i+2] * 3;

      N1.fromArray(positions, i1);
      N2.fromArray(positions, i2);
      N3.fromArray(positions, i3);

      D1.subVectors(N3, N2);
      D2.subVectors(N1, N2);
      D1.cross(D2);

      normals[i1] += D1.x;
      normals[i2] += D1.x;
      normals[i3] += D1.x;
      normals[i1+1] += D1.y;
      normals[i2+1] += D1.y;
      normals[i3+1] += D1.y;
      normals[i1+2] += D1.z;
      normals[i2+2] += D1.z;
      normals[i3+2] += D1.z;
    }
    return normals;
  }

  GenerateIndices() {
    const resolution = this.params.resolution;
    const indices = [];
    for (let i = 0; i < resolution; i++) {
      for (let j = 0; j < resolution; j++) {
        indices.push(
            i * (resolution + 1) + j,
            (i + 1) * (resolution + 1) + j + 1,
            i * (resolution + 1) + j + 1);
        indices.push(
            (i + 1) * (resolution + 1) + j,
            (i + 1) * (resolution + 1) + j + 1,
            i * (resolution + 1) + j);
      }
    }
    return indices;
  }
	
	
	animate(){
		//requestAnimationFrame( this.animate );  
		requestAnimationFrame( this.animate.bind(this) );  
		this.render();
	}//end animate
	
	
	render(){
		
		//this.controls.update();
		this.camera.updateMatrixWorld();
		this.camera.updateProjectionMatrix(); 
		this.renderer.render(this.scene, this.camera); 

		var index = document.getElementById("testfield1").value;
		if(index == '')this.mesh.material.uniforms.point.value = 100000000000;	//means no vertex selected
		else this.mesh.material.uniforms.point.value = index;
		this.mesh.material.uniformsNeedUpdate = true;
	}//end render
}//end class


new Main();

Hi!

Seems, it needs to be THREE.RGBAFormat, and all the data for points there has to have 4 components per texel. :thinking:
I remember, that it didn’t work for me with RGBFormat here: A Model of Wire, until I changed it to RGBAFormat.

1 Like

I’ve already experimented with the example of THREE and RGBA then I got values, but I wanted to avoid the A value, because it’s only artificially added and has no use at all. That’s why I thought RGB format but that obviously doesn’t work.

Ha! Now it works. Now that you explicitly referred to RGBA, I did exactly that with the RGBA example from THREE. Then I will probably upload two repositories to github later.

The geometry is now completely made up of the coordinates of the DataTexture. I also used the coordinates as color weighting in the fragment shader. The black quadrant is -x, -z, so is black. In the direction of +x, +z it becomes more and more colored

Here the repository:

1 Like