Indexed buffer geometry normals

Can you help me with this problem? Documentation on properly defined indexed buffer geometry with normals is very hard to come by. I haven’t come by any examples yet.

Here is my code
var bufferGeometry = new THREE.BufferGeometry();

	var positions = [];
	var index = [];
	var normals = [];
	var colors = [];
	//var positions = new Float32Array( simX.geometry.faces.length * 3 );
	//simBuffer.addAttribute( 'position', new THREE.BufferAttribute( positions, 3 ) );
	//simBuffer = new THREE.Mesh( simBuffer , material );
	var geometry = simX.geometry;
	var vertices;
	//console.log( simulinMesh );
	geometry.vertices.forEach( function ( vertices, ind ) {
		positions.push( vertices.x );
		positions.push( vertices.y );
		positions.push( vertices.z );
	});
	geometry.faces.forEach( function ( face, ind ) {

		index.push( face.a );
		index.push( face.b );
		index.push( face.c );
		/*
		index.push( (face.a*3) );
		index.push( (face.a*3)+1 );
		index.push( (face.a*3)+2 );
		index.push( (face.b*3) );
		index.push( (face.b*3)+1 );
		index.push( (face.b*3)+2 );
		index.push( (face.c*3) );
		index.push( (face.c*3)+1 );
		index.push( (face.c*3)+2 );
		*/
		
		/*
		vertices = geometry.vertices[ face.a ];
		positions.push( vertices.x );
		positions.push( vertices.y );
		positions.push( vertices.z );
		vertices = geometry.vertices[ face.b ];
		positions.push( vertices.x );
		positions.push( vertices.y );
		positions.push( vertices.z );
		vertices = geometry.vertices[ face.c ];
		positions.push( vertices.x );
		positions.push( vertices.y );
		positions.push( vertices.z );
		*/

		normals.push( face.normal.x );
		normals.push( face.normal.y );
		normals.push( face.normal.z );
		normals.push( face.normal.x );
		normals.push( face.normal.y );
		normals.push( face.normal.z );
		normals.push( face.normal.x );
		normals.push( face.normal.y );
		normals.push( face.normal.z );
	

		colors.push( face.color.r );
		colors.push( face.color.g );
		colors.push( face.color.b );
		colors.push( face.color.r );
		colors.push( face.color.g );
		colors.push( face.color.b );
		colors.push( face.color.r );
		colors.push( face.color.g );
		colors.push( face.color.b );
	} );

	bufferGeometry.addAttribute( 'position', new THREE.Float32BufferAttribute( positions, 3 ) );
	bufferGeometry.setIndex( new THREE.Uint16BufferAttribute( index, 1 ) );
	//bufferGeometry.addAttribute( 'index', new THREE.Float32BufferAttribute( index , 3 ) );
	bufferGeometry.addAttribute( 'normal', new THREE.Float32BufferAttribute( normals, 3 ) );
	bufferGeometry.addAttribute( 'color', new THREE.Float32BufferAttribute( colors, 3 ) );

	simBuffer = new THREE.Mesh( bufferGeometry, material );

	simBuffer.geometry.computeBoundingSphere();
	simBuffer.geometry.attributes.position.needsUpdate = true;

	_smv = simBuffer.geometry.attributes.position.array;

	scene.add( simBuffer );

and I get this as a result
Screenshot%20from%202018-08-04%2013-55-22
Those normals are way off.
I have no idea how to correct this as, there are no examples to go off of.

If I do not index the vertices then I get the correct result…
var bufferGeometry = new THREE.BufferGeometry();

	var positions = [];
	var index = [];
	var normals = [];
	var colors = [];
	//var positions = new Float32Array( simX.geometry.faces.length * 3 );
	//simBuffer.addAttribute( 'position', new THREE.BufferAttribute( positions, 3 ) );
	//simBuffer = new THREE.Mesh( simBuffer , material );
	var geometry = simX.geometry;
	var vertices;
	//console.log( simulinMesh );
	/*
	geometry.vertices.forEach( function ( vertices, ind ) {
		positions.push( vertices.x );
		positions.push( vertices.y );
		positions.push( vertices.z );
	});
	*/
	geometry.faces.forEach( function ( face, ind ) {

		//index.push( face.a );
		//index.push( face.b );
		//index.push( face.c );
		/*
		index.push( (face.a*3) );
		index.push( (face.a*3)+1 );
		index.push( (face.a*3)+2 );
		index.push( (face.b*3) );
		index.push( (face.b*3)+1 );
		index.push( (face.b*3)+2 );
		index.push( (face.c*3) );
		index.push( (face.c*3)+1 );
		index.push( (face.c*3)+2 );
		*/
		
		///*
		vertices = geometry.vertices[ face.a ];
		positions.push( vertices.x );
		positions.push( vertices.y );
		positions.push( vertices.z );
		vertices = geometry.vertices[ face.b ];
		positions.push( vertices.x );
		positions.push( vertices.y );
		positions.push( vertices.z );
		vertices = geometry.vertices[ face.c ];
		positions.push( vertices.x );
		positions.push( vertices.y );
		positions.push( vertices.z );
		//*/

		normals.push( face.normal.x );
		normals.push( face.normal.y );
		normals.push( face.normal.z );
		normals.push( face.normal.x );
		normals.push( face.normal.y );
		normals.push( face.normal.z );
		normals.push( face.normal.x );
		normals.push( face.normal.y );
		normals.push( face.normal.z );
	

		colors.push( face.color.r );
		colors.push( face.color.g );
		colors.push( face.color.b );
		colors.push( face.color.r );
		colors.push( face.color.g );
		colors.push( face.color.b );
		colors.push( face.color.r );
		colors.push( face.color.g );
		colors.push( face.color.b );
	} );

	bufferGeometry.addAttribute( 'position', new THREE.Float32BufferAttribute( positions, 3 ) );
	//bufferGeometry.setIndex( new THREE.Uint16BufferAttribute( index, 1 ) );
	//bufferGeometry.addAttribute( 'index', new THREE.Float32BufferAttribute( index , 3 ) );
	bufferGeometry.addAttribute( 'normal', new THREE.Float32BufferAttribute( normals, 3 ) );
	bufferGeometry.addAttribute( 'color', new THREE.Float32BufferAttribute( colors, 3 ) );

	simBuffer = new THREE.Mesh( bufferGeometry, material );

	simBuffer.geometry.computeBoundingSphere();
	simBuffer.geometry.attributes.position.needsUpdate = true;

	_smv = simBuffer.geometry.attributes.position.array;

	scene.add( simBuffer );

Screenshot%20from%202018-08-04%2014-00-11

I found one work around. changed the material type to phong and added the flatshadding attribute.

however, is there a solution for Lambert?

You might want to have a look at: https://threejs.org/examples/webgl_buffergeometry_indexed.html

It shows how to generate a indexed plane geometry with position, normal and color attribute data.

1 Like

I managed to get the phong material working o.k, but are there any examples for Lambert? the pictures I posted previously are the results when using Lambert materials.

I think you’re thinking about this the wrong way. Normals are just an attribute the same way vertex positions are. If you have a position at some index, you can have a color, a normal, a UV coordinate or anything else for that matter.

What is the problem here?

Pick a vertex of your tetrahedron. Does it have one normal or several?

Your first image has one normal, your second has several. When it has a single normal at each of the “tips” the face interpolates across it’s surface between those two vectors, and you get something resembling a very low res sphere.

When you use non-indexed, it’s also called a triangle soup, and every point/tip actually has many vertices and not one (this is the redundancy you remove when using indecis).

But, for your actual use case, you want a single position but multiple normals. This cannot be done by using a single index. If you have P1 and N1,N2,N3 you have to duplicate the point 3 times. It will have 3 unique indecis, going to three unique vertices (all having the same values though) and three unique normals (all with different values).

The reason why you can’t do this in lambert shader is actually fairly complex. The Phong shader computes this value in the fragment shader, in which it can call some WebGL variables and derive that normal without even having the attribute normal. Lambert shader unfortunately only shades in the vertex shader so you don’t have access to this variable.

The flaw itself is not in the Lambert model just to be clear, but the fact that three.js only computes the lambert model in the vertex shader not the pixel shader.

The proper way to solve this is the same way you would properly solve it for the Phong shader - compute the proper normals and store them in an attribute.

1 Like

Makes sense. I was hoping this wasn’t the case

So if @pailhead is correct, then you can copy their shader and make it per pixel by moving the calculation to the fragment shader.

Idk lambert yet, and if it has to be defined in the vertex shader the way three.js does it… but many things are done in the vertex shader for power consumption reasons, and to take advantage of the interpolation; sometimes they are things that can be done in the fragment shader.