I am using the Chamelon.js code which has an export feature. This creates an .obj file and a texture.png which is the unwrapped canvas data.
I am struggling though to load this model back in to another scene and set the generated texture on it. The generated obj contains BufferGeometry which is has attributes of normals/position/uvs but I’ve no idea how to get the generated texture mapping properly. At the moment I can apply the texture to the mesh but its projection is entirely wrong and I can only see a tiny portion spread across the entire object.
Any help would be greatly appreciated please!
This is the BufferGeometry I am getting from the exported obj:

I could load an exported OBJ with this code:
import * as THREE from '../build/three.module.js';
import { OBJLoader } from './jsm/loaders/OBJLoader.js';
let camera, scene, renderer;
init().then( animate );
async function init() {
camera = new THREE.PerspectiveCamera( 45, window.innerWidth / window.innerHeight, 1, 2000 );
camera.position.z = 10;
// scene
scene = new THREE.Scene();
const ambientLight = new THREE.AmbientLight( 0xcccccc, 0.4 );
scene.add( ambientLight );
const pointLight = new THREE.PointLight( 0xffffff, 0.8 );
camera.add( pointLight );
scene.add( camera );
//
const textureLoader = new THREE.TextureLoader();
const objLoader = new OBJLoader();
const [ texture, obj ] = await Promise.all( [
textureLoader.loadAsync( 'models/obj/texture.png' ),
objLoader.loadAsync( 'models/obj/model.obj' ),
] );
obj.traverse( function ( child ) {
if ( child.isMesh ) {
child.material.map = texture;
child.geometry.computeVertexNormals();
}
} );
scene.add( obj );
//
renderer = new THREE.WebGLRenderer();
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( window.innerWidth, window.innerHeight );
document.body.appendChild( renderer.domElement );
//
window.addEventListener( 'resize', onWindowResize );
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
}
//
function animate() {
requestAnimationFrame( animate );
render();
}
function render() {
renderer.render( scene, camera );
}
Notice that I had to add child.geometry.computeVertexNormals();
since the exported OBJ has a slight problem: It defines normal indices but without adding normal data. OBJLoader
can’t process such a definition and produce undefined normal data. To fix this, normals are computed from scratch via computeVertexNormals()
.
Old, but great app. Unfortunately has some issues.
The generated texture does NOT work with the original UV map. That means you need to use the exported texture and geometry that has a UV map based on painted faces and disconnected faces.
You might find this https://brushhh.blogspot.com/ interesting.
Ah yes! Thank you so much for looking in to this. The vertex/normals/uv stuff just goes over my head unfortunately so I just couldnt get to the bottom of what was causing the issue. The computeVertexNormals did the trick but it also highlighted I had an issue with my obj file which I have now rectified too.
Thanks!