Use raw WebGL functionalities together with Three.js

Hello all, I am trying to use some of the external rendering functionalities in raw WebGL, together with Three.js. For example, I want to draw a complex scene as the background using raw WebGL, and draw some dynamic models with animation using Three.js.

However, I have encountered some issues. I have created a basic renderer class that uses WebGL to draw a triangle as the background, but unfortunately, it doesn’t work well when combined with a simple scene in Three.js.

I have created an online code snippet here. You may comment some of the usages in animation() to see the triangle rendering alone.

import * as THREE from 'three';

// init Three.js scene
const camera = new THREE.PerspectiveCamera( 70, window.innerWidth / window.innerHeight, 0.01, 10 );
camera.position.z = 1;

const scene = new THREE.Scene();

const geometry = new THREE.BoxGeometry( 0.2, 0.2, 0.2 );
const material = new THREE.MeshNormalMaterial();

const mesh = new THREE.Mesh( geometry, material );
scene.add( mesh );

const renderer = new THREE.WebGLRenderer( { antialias: true } );
renderer.setSize( window.innerWidth, window.innerHeight );
renderer.setAnimationLoop( animation );
document.body.appendChild( renderer.domElement );

// init native webgl view
class TriangleRenderer {
  constructor(gl) {
    this.gl = gl;
    this.program = null;
    this.positionBuffer = null;
    this.indexBuffer = null;
  }

  initialize() {
    const gl = this.gl;

    const vertexShaderSource = `
      attribute vec2 position;
      void main() {
        gl_Position = vec4(position, 0.0, 1.0);
      }
    `;
    const fragmentShaderSource = `
      precision mediump float;
      void main() {
        gl_FragColor = vec4(1.0, 1.0, 0.0, 1.0);
      }
    `;
    const vertexShader = this.createShader(gl.VERTEX_SHADER, vertexShaderSource);
    const fragmentShader = this.createShader(gl.FRAGMENT_SHADER, fragmentShaderSource);
    this.program = this.createProgram(vertexShader, fragmentShader);

    const positions = [
      0.0, 0.5,
      -0.5, -0.5,
      0.5, -0.5
    ];
    this.positionBuffer = gl.createBuffer();
    gl.bindBuffer(gl.ARRAY_BUFFER, this.positionBuffer);
    gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(positions), gl.STATIC_DRAW);
    const positionAttributeLocation = gl.getAttribLocation(this.program, 'position');
    gl.enableVertexAttribArray(positionAttributeLocation);
    gl.vertexAttribPointer(positionAttributeLocation, 2, gl.FLOAT, false, 0, 0);
    gl.bindBuffer(gl.ARRAY_BUFFER, null);

    const indices = [0, 1, 2];
    this.indexBuffer = gl.createBuffer();
    gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
    gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(indices), gl.STATIC_DRAW);
    gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
  }

  render() {
    const gl = this.gl;

    /// get the current rendering status
    const currentVao = gl.getParameter(gl.VERTEX_ARRAY_BINDING);
    const currentProgram = gl.getParameter(gl.CURRENT_PROGRAM);
    const currentArrayBuffer = gl.getParameter(gl.ARRAY_BUFFER_BINDING);
    const currentElementArrayBuffer = gl.getParameter(gl.ELEMENT_ARRAY_BUFFER_BINDING);

    gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
    gl.clearColor(1, 0, 0, 1); 

    gl.useProgram(this.program);
    gl.bindBuffer(gl.ARRAY_BUFFER, this.positionBuffer);
    gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);

    gl.drawElements(gl.TRIANGLES, 3, gl.UNSIGNED_SHORT, 0);

    // reset to original status
    gl.bindBuffer(gl.ARRAY_BUFFER, currentArrayBuffer);
    gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, currentElementArrayBuffer);
    gl.bindVertexArray(currentVao);
    gl.useProgram(currentProgram);
  }

  createShader(type, source) {
    const gl = this.gl;
    const shader = gl.createShader(type);
    gl.shaderSource(shader, source);
    gl.compileShader(shader);
    if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
      console.error('Shader compilation error:', gl.getShaderInfoLog(shader));
      gl.deleteShader(shader);
      return null;
    }
    return shader;
  }

  createProgram(vertexShader, fragmentShader) {
    const gl = this.gl;
    const program = gl.createProgram();
    gl.attachShader(program, vertexShader);
    gl.attachShader(program, fragmentShader);
    gl.linkProgram(program);
    if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
      console.error('Program linking error:', gl.getProgramInfoLog(program));
      gl.deleteProgram(program);
      return null;
    }
    return program;
  }
}

const nativeRenderer = new TriangleRenderer(renderer.getContext());
nativeRenderer.initialize();

// render loop
function animation( time ) {
  nativeRenderer.render();

  mesh.rotation.x = time / 2000;
  mesh.rotation.y = time / 1000;

  renderer.autoClear = false;
  renderer.render( scene, camera );
}

The renderer works fine on its own, but when used with the Three.js renderer, the triangle flashes at the beginning and then doesn’t get rendered at all.

One solution I have thought of is to save the current binding buffers before rendering and restore them afterward (see TriangleRenderer.render()).

/// get the current rendering status
const currentVao = gl.getParameter(gl.VERTEX_ARRAY_BINDING);
const currentProgram = gl.getParameter(gl.CURRENT_PROGRAM);
const currentArrayBuffer = gl.getParameter(gl.ARRAY_BUFFER_BINDING);
const currentElementArrayBuffer = gl.getParameter(gl.ELEMENT_ARRAY_BUFFER_BINDING);

// ... drawElements ...

// restore
gl.bindBuffer(gl.ARRAY_BUFFER, currentArrayBuffer);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, currentElementArrayBuffer);
gl.bindVertexArray(currentVao);
gl.useProgram(currentProgram);

Unfortunately, it seems that this alone is not enough to resolve the issue.

Could anyone provide some suggestions or guidance on this? Many thanks!

Can you define “external rendering functionalities”? Maybe they can be converted one way or another to a three.js render pipline.

From your code snippet, I see you’re using a vertexShader and a fragmentShader, wish can be used with a ShaderMaterial or a RawShaderMaterial combined with a BufferGeometry.

Or as a post-processing pass?

The answer depends on what you are trying to achieve with those external rendering functionalities.

2 Likes

WebGL is a highly state-based API. It is in general VERY difficult for two applications to share the same WebGL state, since neither knows what state the other has left WebGL in. Personally — I would try to avoid these direct GL calls if at all possible.

If that’s not an option though, one workaround could be to call renderer.resetState() before and/or after your external GL calls.

3 Likes

Why do you need to do this? I’m assuming it’s for performance reasons. If that’s the case, have you tested the performance of your complex background scene in raw WebGL vs in THREE.js?

1 Like

Thank you all! I understand your suggestions to avoid using gl calls direcly, my rendering functionalities are some point cloud rendering with some gl based tasks like depth computation & sorting … they can surely be rewritten with Three.js, but we want to make it with only webgl, and let users to integrate it with their onw environment, maybe three.js or babylon or other libraries. That’s why I am doing this strange things :rofl:

renderer.resetState() did fix the problem! thank you