Sorry I couldn’t phrase the problem properly but I will try my best to explain it here.
As in my experience creating a particle with mesh in codes can be quite easy but not for a feat in import GLTF or GLB for me is kinda hard. So I know how can I make particles with any createable shapes but my problem is that doing it in gltf makes me sucks…
So this is my constant shaderMaterial for particles of anything (As I know)…
uniforms: {
color: { value: new THREE.Color( 0xfffccc ) },
pointTexture: { value: new THREE.TextureLoader().load( masker ) },
alphaTest: { value: 0.9 }
},
vertexShader: `
attribute float size;
attribute vec3 customColor;
varying vec3 vColor;
void main() {
vColor = customColor;
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
gl_PointSize = size * ( 300.0 / -mvPosition.z );
gl_Position = projectionMatrix * mvPosition;
}
`,
fragmentShader: `
uniform vec3 color;
uniform sampler2D pointTexture;
uniform float alphaTest;
varying vec3 vColor;
void main() {
gl_FragColor = vec4( color * vColor, 1.0 );
gl_FragColor = gl_FragColor * texture2D( pointTexture, gl_PointCoord );
if ( gl_FragColor.a < alphaTest ) discard;
}
`,
Yes this code is no issue for me but the thing is that when I did this in a gltf function…but it is not accurate…because when I increase the size of a model that I imparted…it will be twice as the or more than that when I increase my bufferGeometry of gltf… Here is the code
var glTFGeometry = new THREE.BufferGeometry();
let loader = new GLTFLoader();
var model;
var model2;
loader.load(gltfItem,function (gltf) {
model = gltf.scene
gltf.scene.traverse( function ( child ) {
if ( child.isMesh ) {
glTFGeometry = child.geometry;
console.log(glTFGeometry)
const material = new THREE.MeshBasicMaterial( { color: 'gray' } );
model2 = new THREE.Mesh( glTFGeometry, material );
model2 = model2
model2.geometry.scale(2,2,2)
scene.add(model2)
glTFGeometry.deleteAttribute( 'normal' );
glTFGeometry.deleteAttribute( 'uv' );
glTFGeometry = BufferGeometryUtils.mergeVertices( glTFGeometry );
//
const positionAttribute = glTFGeometry.getAttribute( 'position' );
console.log(positionAttribute)
const colors = [];
const sizes = [];
const color = new THREE.Color();
for ( let i = 0, l = positionAttribute.count; i < l; i ++ ) {
color.setHSL(1,1,1);
color.toArray( colors, i * 3 );
sizes[ i ] = PARTICLE_SIZE * 0.5;
}
const geometry = new THREE.BufferGeometry();
geometry.setAttribute( 'position', positionAttribute );
geometry.setAttribute( 'customColor', new THREE.Float32BufferAttribute( colors, 3 ) );
geometry.setAttribute( 'size', new THREE.Float32BufferAttribute( sizes, 1 ) );
const material2 = new THREE.ShaderMaterial( {
uniforms: {
color: { value: new THREE.Color( 0x5664d1 ) },
pointTexture: { value: new THREE.TextureLoader().load( masker ) },
alphaTest: { value: 0.9 }
},
vertexShader: `
attribute float size;
attribute vec3 customColor;
varying vec3 vColor;
void main() {
vColor = customColor;
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
gl_PointSize = size * ( 300.0 / -mvPosition.z );
gl_Position = projectionMatrix * mvPosition;
}
`,
fragmentShader: `
uniform vec3 color;
uniform sampler2D pointTexture;
uniform float alphaTest;
varying vec3 vColor;
void main() {
gl_FragColor = vec4( color * vColor, 1.0 );
gl_FragColor = gl_FragColor * texture2D( pointTexture, gl_PointCoord );
if ( gl_FragColor.a < alphaTest ) discard;
}
`,
} );
//
particles2 = new THREE.Points( geometry, material2 );
scene.add( particles2 );
}
} );
scene.add(model)
renderer.render(scene,camera)
})
I’ve been grinding search for this gltf particles but really can’t find much answer here or whatsoever…I’m holding a tight grip if anyone could be some clue or near to this please give me a link or something…maybe I could solve it because I’m too confused how I would load my GLTF BufferGeometry in my render.and it becomes a particles.
.