I’m trying to move my camera to an object and have it fit to the center of my screen. At the moment I’ve been able to move the camera to the exact center of the object but can’t figure out how i’m supposed to set the vector length.
I figure I can take the target vector and subtract from its length based on a variable which will have it always fit the screen. I’ve looked at this example https://codepen.io/discoverthreejs/full/vwVeZB which worked great but it doesn’t seem to work as of a couple days ago. Any advice as to make this as simple as possible would be greatly appreciated!
function onMouseDown(event) {
event.preventDefault();
mouse.x = (event.clientX / window.innerWidth) * 2 - 1;
mouse.y = -(event.clientY / window.innerHeight) * 2 + 1;
raycaster.setFromCamera(mouse, camera);
var intersects = raycaster.intersectObjects(scene.children, true);
var raycast = new THREE.Vector3(raycaster.setFromCamera(mouse,camera));
var target = new THREE.Vector3();
for (var i = 0; i < intersects.length; i++) {
gsap.to( camera.position, {
duration: 1,
x: target.x,
y: target.y,
z: target.z,
onUpdate: function() {
camera.lookAt( intersects[i] );
}
} );
}