Implementing B4D Blockout Feature in three.js: Need Guidance on Removing Undercuts


full video [https://www.youtube.com/watch?v=bEMfhhNotrk]

Hi everyone,

I’m currently working on implementing a B4D (blenderfordental TOOL) blockout (the process of removing undercuts from a 3D model to ensure manufacturability or proper mold release) functionality using three.js. I’ve already managed to implement the undercut detection feature, and now I’m looking to develop the blockout feature that effectively removes these undercuts.

Does anyone have examples, ideas, or references that could help me tackle this? Any guidance would be greatly appreciated.

Thank you in advance!

code:

import Stats from 'three/examples/jsm/libs/stats.module.js';
import * as dat from 'three/examples/jsm/libs/lil-gui.module.min.js';
import * as THREE from 'three';
import { OrbitControls } from 'three/examples/jsm/controls/OrbitControls.js';
import * as BufferGeometryUtils from 'three/examples/jsm/utils/BufferGeometryUtils.js';
import { STLLoader } from 'three/examples/jsm/loaders/STLLoader.js';
import { acceleratedRaycast, computeBoundsTree, disposeBoundsTree } from 'three-mesh-bvh';

THREE.Mesh.prototype.raycast = acceleratedRaycast;
THREE.BufferGeometry.prototype.computeBoundsTree = computeBoundsTree;
THREE.BufferGeometry.prototype.disposeBoundsTree = disposeBoundsTree;

let stats, scene, camera, renderer, controls;
let targetMesh = null;
let undercutMesh = null; 
let material;

const params = {
  matcap: 'Clay',
  displayHelper: false,
  detectUndercuts: false,
  blockout: false,
};

const matcaps = {};

const stlLoader = new STLLoader();
const gui = new dat.GUI();

// STL file loading and processing
function setTargetMeshGeometry(geometry) {
  if (targetMesh) {
    scene.remove(targetMesh);
    targetMesh.geometry.dispose();
  }

  geometry.center();
  geometry.computeBoundingSphere();
  if (geometry.boundingSphere) {
    const radius = geometry.boundingSphere.radius;
    geometry.scale(1 / radius, 1 / radius, 1 / radius);
  }
  geometry.computeVertexNormals();
  targetMesh = new THREE.Mesh(geometry, material);
  scene.add(targetMesh);
}

// Undercut detection functionality
function detectUndercuts() {
  if (!targetMesh) return;

  // Remove previous undercut visualization
  if (undercutMesh) {
    scene.remove(undercutMesh);
    undercutMesh.geometry.dispose();
    undercutMesh.material.dispose();
    undercutMesh = null;
  }

  // Update targetMesh's world matrix
  targetMesh.updateMatrixWorld(true);
  const geometry = targetMesh.geometry;
  const posAttr = geometry.attributes.position;
  const indexAttr = geometry.index;

  const matrixWorld = targetMesh.matrixWorld;
  const cameraPos = camera.position;

  const undercutPositions = [];
  const undercutNormals = [];

  // Determine undercut status for each triangle
  for (let i = 0; i < indexAttr.count; i += 3) {
    const aIndex = indexAttr.getX(i);
    const bIndex = indexAttr.getX(i + 1);
    const cIndex = indexAttr.getX(i + 2);

    const a = new THREE.Vector3().fromBufferAttribute(posAttr, aIndex).applyMatrix4(matrixWorld);
    const b = new THREE.Vector3().fromBufferAttribute(posAttr, bIndex).applyMatrix4(matrixWorld);
    const c = new THREE.Vector3().fromBufferAttribute(posAttr, cIndex).applyMatrix4(matrixWorld);

    // Calculate the triangle center
    const center = new THREE.Vector3().addVectors(a, b).add(c).divideScalar(3);

    // Calculate the triangle's normal in world space
    const ab = new THREE.Vector3().subVectors(b, a);
    const ac = new THREE.Vector3().subVectors(c, a);
    const normal = new THREE.Vector3().crossVectors(ab, ac).normalize();

    // Vector from the triangle center to the camera (view direction)
    const viewVec = new THREE.Vector3().subVectors(cameraPos, center).normalize();

    // If the triangle is facing the camera, the dot product is positive.
    // Only consider as an undercut if not visible (back face, dot <= 0.01)
    const dot = normal.dot(viewVec);
    if (dot <= 0.01) {
      // If the triangle is an undercut, add its vertices
      // (creating separate geometry for visualization to avoid duplicates)
      undercutPositions.push(a.x, a.y, a.z);
      undercutPositions.push(b.x, b.y, b.z);
      undercutPositions.push(c.x, c.y, c.z);

      // Apply the same normal for each vertex of the triangle
      undercutNormals.push(normal.x, normal.y, normal.z);
      undercutNormals.push(normal.x, normal.y, normal.z);
      undercutNormals.push(normal.x, normal.y, normal.z);
    }
  }

  // Create BufferGeometry for undercut visualization
  const undercutGeometry = new THREE.BufferGeometry();
  undercutGeometry.setAttribute('position', new THREE.Float32BufferAttribute(undercutPositions, 3));
  undercutGeometry.setAttribute('normal', new THREE.Float32BufferAttribute(undercutNormals, 3));

  // Use a red, semi-transparent MeshBasicMaterial (with double-sided rendering)
  const redMaterial = new THREE.MeshBasicMaterial({
    color: 0xff0000,
    side: THREE.DoubleSide,
    transparent: true,
    opacity: 0.5
  });

  undercutMesh = new THREE.Mesh(undercutGeometry, redMaterial);
  scene.add(undercutMesh);
}

// Blockout functionality
function applyBlockout() {

}

// Initialization function
function init() {
  renderer = new THREE.WebGLRenderer({ antialias: true });
  renderer.setSize(window.innerWidth, window.innerHeight);
  document.body.appendChild(renderer.domElement);
  
  scene = new THREE.Scene();
  scene.add(new THREE.AmbientLight(0xffffff, 0.6));
  
  camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 0.1, 50);
  camera.position.set(0, 0, 3);
  
  controls = new OrbitControls(camera, renderer.domElement);
  
  matcaps['Clay'] = new THREE.TextureLoader().load('textures/B67F6B_4B2E2A_6C3A34_F3DBC6-256px.png');
  matcaps['Red Wax'] = new THREE.TextureLoader().load('textures/763C39_431510_210504_55241C-256px.png');
  matcaps['Shiny Green'] = new THREE.TextureLoader().load('textures/3B6E10_E3F2C3_88AC2E_99CE51-256px.png');
  matcaps['Normal'] = new THREE.TextureLoader().load('textures/7877EE_D87FC5_75D9C7_1C78C0-256px.png');

  material = new THREE.MeshMatcapMaterial({
    flatShading: true,
    side: THREE.DoubleSide,
  });
  
  stats = new Stats();
  document.body.appendChild(stats.dom);
  
  gui.add({ detectUndercuts }, 'detectUndercuts').name("Detect Undercuts");
  gui.add({ applyBlockout }, 'applyBlockout').name("Apply Blockout");
  
  window.addEventListener('resize', () => {
    camera.aspect = window.innerWidth / window.innerHeight;
    camera.updateProjectionMatrix();
    renderer.setSize(window.innerWidth, window.innerHeight);
  });

  window.addEventListener('dragover', e => {
    e.preventDefault();
  }, false);

  window.addEventListener('drop', e => {
    e.preventDefault();
  
    if (e.dataTransfer.files && e.dataTransfer.files.length > 0) {
      const file = e.dataTransfer.files[0];
      const reader = new FileReader();
  
      reader.addEventListener('load', event => {
        const arrayBuffer = event.target.result;
        const geometry = stlLoader.parse(arrayBuffer);
              
        const positionAttr = geometry.getAttribute('position');
        if (!positionAttr) {
          throw new Error('BufferGeometry has no position attribute.');
        }
        const positions = positionAttr.array; 
        
        const indices = [];
        for (let i = 0; i < positions.length / 3; i += 3) {
          indices.push(i, i + 1, i + 2);
        }
        
        let newGeometry = new THREE.BufferGeometry();
        newGeometry.setAttribute(
          'position',
          new THREE.Float32BufferAttribute(positions, 3)
        );
        newGeometry.setIndex(
          new THREE.Uint32BufferAttribute(indices, 1)
        );
        
        setTargetMeshGeometry(newGeometry);
      }, false);
  
      reader.readAsArrayBuffer(file);
    }
  }, false);
  
  render();
}

function render() {
  material.matcap = matcaps[params.matcap];
  requestAnimationFrame(render);
  stats.update();
  renderer.render(scene, camera);
}

init();

It would be helpful if you provided some background in layman’s terms as to the specific lingo used.

An “undercut” probably is the area marked green, which would be the unlit surface from parallel rays of light coming straight from above?

“B4D” probably refers to “Blender for dentists”?

But what might a “Blockout” be?

1 Like

I’ve updated the post to incorporate some background explanations in layman’s terms based on the feedback. Here’s what’s changed:

  • Undercut: This typically refers to the area that remains unlit when parallel rays of light come straight from above—essentially, the “shadowed” or hidden surface.
  • B4D: blenderfordental TOOL
  • Blockout: This is the process of removing the undercut areas from the model, ensuring it’s properly prepared for manufacturing or other downstream processes.

Thanks for the input!

This looks like a subset of the same computation that is done for directional shadow casting.

You could render a depth map using an orthographic camera from above…

then render the model and transform the fragment from modelspace into the space of the camera used to render the depth target, either directly, or from modelspace to worldspace and then to shadow camera space, to determine if the fragment is “in shadow”.

I’m still at a loss. :thinking:

“Area” sounds like a surface.
Removing” those would leave a void.
When its comes to manufacturing (at the latest), we will be talking about material/volumes.

I’m having a hard time imagining how to manufacture a void, something without a shape or any properties.

Could it be, that you want to replace the undercut surfaces with other surfaces? Please describe the criteria for a “proper preparation”.

2 Likes