I’m trying to put a single CanvasTexture on each face of my custom BufferGeometry. However I’m getting these horizontal lines on the outside and inside of my mesh.
This face with the horizontal lines should look more like the adjacent face:
I’m instantiating my texture like this :
angleTexture = new THREE.CanvasTexture(angleTexCanvas);
angleTexture.minFilter = THREE.LinearFilter;
angleTexture.magFilter = THREE.NearestFilter;
angleTexture.mapping = THREE.EquirectangularReflectionMapping;
angleTexture.wrapS = THREE.ClampToEdgeWrapping;
angleTexture.wrapT = THREE.ClampToEdgeWrapping;
When I set the material to use a color, instead of mapping a texture, the colors appear perfectly on all sides. But when I use a CanvasTexture, I get these lines, that Im assuming are stretched pixels.
It’s a custom geometry, though fairly simple, and I’m manually calculating my UVs, like this :
function calculateUVs(mesh) {
const positions = getVerticeInfo(mesh).position;
const indices = mesh.geometry.index.array;
const numRectangles = indices.length / 6; // 6 indices per rectangle
const uvs = new Float32Array(numRectangles * 4 * 2); // 4 vertices per rectangle, 2 UV values per vertex
for (let i = 0; i < numRectangles; i++) {
const rectIndices = [
indices[i * 6], indices[i * 6 + 1], indices[i * 6 + 2],
indices[i * 6 + 3], indices[i * 6 + 4], indices[i * 6 + 5]
];
const uniqueIndices = Array.from(new Set(rectIndices));
let vertices = uniqueIndices.map(index => {
return new THREE.Vector3(
positions[index * 3],
positions[index * 3 + 1],
positions[index * 3 + 2]
);
});
// Compute normal of the rectangle's plane
const normal = new THREE.Vector3();
new THREE.Triangle(vertices[0], vertices[1], vertices[2]).getNormal(normal);
// Project vertices to 2D plane
const projectedVertices = vertices.map(vertex => projectTo2D(vertex, normal));
// Sort projected vertices
projectedVertices.sort((a, b) => a.y - b.y || a.x - b.x);
// Assign UVs based on sorted projected vertices
const uvOrder = [new THREE.Vector2(0, 0), new THREE.Vector2(1, 0), new THREE.Vector2(0, 1), new THREE.Vector2(1, 1)];
projectedVertices.forEach((projVertex, index) => {
const originalIndex = uniqueIndices[vertices.findIndex(v => projectTo2D(v, normal).equals(projVertex))];
uvs[originalIndex * 2] = uvOrder[index].x;
uvs[originalIndex * 2 + 1] = uvOrder[index].y;
});
}
mesh.geometry.setAttribute('uv', new THREE.BufferAttribute(uvs, 2));
}
function projectTo2D(vertex, normal) {
const up = new THREE.Vector3(0, 1, 0);
const axis1 = normal.clone().cross(up).normalize();
const axis2 = normal.clone().cross(axis1).normalize();
return new THREE.Vector2(vertex.dot(axis1), vertex.dot(axis2));
}
function getVerticeInfo(mesh) {
const verticesWorldSpace = [];
const normalsWorldSpace = [];
var positionArray = [];
var normalArray = [];
mesh.geometry.computeVertexNormals();
const positionAttribute = mesh.geometry.getAttribute("position");
const normalAttribute = mesh.geometry.getAttribute("normal");
// Calculate the normal matrix from the mesh's world matrix
const normalMatrix = new THREE.Matrix3().getNormalMatrix(mesh.matrixWorld);
for (let i = 0; i < positionAttribute.count; i++) {
// Extract and transform vertex positions
const vertex = new THREE.Vector3(
positionAttribute.getX(i),
positionAttribute.getY(i),
positionAttribute.getZ(i)
);
vertex.applyMatrix4(mesh.matrixWorld);
// Extract and transform vertex normals
const normal = new THREE.Vector3(
normalAttribute.getX(i),
normalAttribute.getY(i),
normalAttribute.getZ(i)
);
normal.applyMatrix3(normalMatrix).normalize();
positionArray.push(vertex.x, vertex.y, vertex.z);
normalArray.push(normal.x, normal.y, normal.z);
verticesWorldSpace.push(vertex);
normalsWorldSpace.push(normal);
}
Is this a UV problem? Any issues with my UV code? Are my settings off?
Any tips? Thank you.