EffectComposer – Combine multiple renderings

Hello everyone,

I use EffectComposer from the postprocessing.js library.

My goal is to generate 2 successive renderings of the scene but at different resolutions.
Indeed, I would like to render certain effects (mainly volumetric effects) in half resolution and the other elements of the scene in full resolution.
In the end, the goal is to combine the two renderings by stretching the half resolution rendering to full resolution.

Half resolution rendering will use a depthTexture from the full resolution scene to manage depth, so no problem there.

I don’t really know how to combine these two renderings and especially stretch the smaller one to the size of the larger one.

I imagine you would have to use several POSTPROCESSING.RenderPasses with different sizes?

thank you in advance for your help

I managed to render 2 passes which overlap perfectly but I still don’t know how to render them with two different resolutions…

export class HalfResolutionPass extends POSTPROCESSING.Pass {

	constructor(scene, camera, {
		resolutionScale = 0.5,
    width = POSTPROCESSING.Resolution.AUTO_SIZE,
    height = POSTPROCESSING.Resolution.AUTO_SIZE,
    resolutionX = width,
    resolutionY = height
	} = {}) {

		super("RenderPass", scene, camera);

    this.needsSwap = false;
		// this.needsDepthTexture = true;

		this.renderPass = new POSTPROCESSING.RenderPass(scene, camera);

		const renderPass = this.renderPass;

    renderPass.ignoreBackground = true;
    renderPass.skipShadowMapUpdate = true;
    renderPass.clearPass.enabled = false;

    this.renderTarget = new WebGLRenderTarget(1, 1, {
      minFilter: NearestFilter,
      magFilter: NearestFilter
    });

    this.renderTarget.texture.name = "HalfResolutionPass.Target";

		const resolution = this.resolution = new POSTPROCESSING.Resolution(this, resolutionX, resolutionY, resolutionScale);
		resolution.addEventListener("change", (e) => this.setSize(resolution.baseWidth, resolution.baseHeight));
	}

	getResolution() {

		return this.resolution;

	}

	getResolutionScale() {

		return this.resolution.scale;

	}

	setResolutionScale(scale) {

		this.resolution.scale = scale;

	}

  render(renderer, inputBuffer, outputBuffer, deltaTime, stencilTest) {

    // Set inputBuffer to renderTarget
		this.renderPass.render(renderer, inputBuffer, this.renderTarget, deltaTime, stencilTest);

    // tj

	}

	setSize(width, height) {

		const resolution = this.resolution;
		resolution.setBaseSize(width, height);
		this.renderTarget.setSize(resolution.width, resolution.height);

	}

}

const firstRenderPass = new POSTPROCESSING.RenderPass(this.scene, this.cameraMesh);
    this.cameraMesh.composer.addPass(firstRenderPass);
    firstRenderPass.renderToScreen = true;
    
    const testRenderPass = new HalfResolutionPass(this.scene.render.pickingScene, this.cameraMesh, {
      resolutionScale: 0.5
    })
    this.cameraMesh.composer.addPass(testRenderPass);

I succeeded! But now I lose the depth information… So my effects pass has no depth information and certain effects like SSAO no longer work…
I would like to retrieve the depth information either from my renderTargetFull, or use that of my personalized depth pass (which allows for better results with materials with alphaTest).
Do you have any idea how to do it?

const renderPass = new HalfResolutionPass(this.scene, this.cameraMesh, {
      resolutionScale: 0.5
    })
    this.postprocessing.renderPass = renderPass;
    this.cameraMesh.composer.addPass(renderPass);

    const depthPass = new DepthPass(this.scene, this.cameraMesh, {
      resolutionScale: 1,
    });
    depthPass.renderToScreen = false;

    const depthTexture = depthPass.texture;
    depthTexture.generateMipmaps = false;
    depthTexture.magFilter = NearestFilter;
    depthTexture.minFilter = NearestFilter;
    this.postprocessing.depthPass = depthPass;
    this.cameraMesh.composer.addPass(depthPass);
    this.cameraMesh.depthPass = depthPass;
    this.cameraMesh.depthTexture = depthTexture;

    const effects = [];
export class HalfResolutionPass extends POSTPROCESSING.Pass {

	constructor(scene, camera, {
		resolutionScale = 0.5,
	} = {}) {

		super("RenderPass", scene, camera);

    this.needsSwap = false;
		this.clearPass = new POSTPROCESSING.ClearPass();
		this.ignoreBackground = false;
		this.skipShadowMapUpdate = false;
    this.clearPass.overrideClearAlpha = 0;

    this.renderTargetHalf = new WebGLRenderTarget(1, 1, {
      depthBuffer: true
      // alpha: true,
    });
    this.renderTargetFull = new WebGLRenderTarget(1, 1, {
      depthBuffer: true
    });


    this.renderTargetHalf.texture.name = "HalfResolutionPass.Half";
    this.renderTargetFull.texture.name = "HalfResolutionPass.Full";

    this.upsamplingMaterial = new POSTPROCESSING.UpsamplingMaterial();

    this.upsamplingMaterial.vertexShader = `
    varying vec2 vUv;

    void main() {
        vUv = position.xy * 0.5 + 0.5;
        gl_Position = vec4(position.xy, 1.0, 1.0);
    }
    `
    this.upsamplingMaterial.fragmentShader =`
      uniform highp sampler2D inputBuffer;
      uniform highp sampler2D supportBuffer;
      
      varying vec2 vUv;
      
      void main() {
        vec4 lowColor = texture2D(inputBuffer, vUv);
        vec4 highColor = texture2D(supportBuffer, vUv);

        float lowAlpha = lowColor.a;
        float highAlpha = highColor.a;

        // LowColor doit être au premier plan
        if (lowAlpha > 0.0) {
          gl_FragColor = mix(lowColor, highColor, 1.0 - lowAlpha);
        } else {
          gl_FragColor = highColor;
        }

    
        #include <colorspace_fragment>
      }
    `
		this.fullscreenMaterial = this.upsamplingMaterial;
    this.resolution = new Vector2();
    this.resolutionScale = resolutionScale;
    this.finalScene = new Scene();
	}

  set renderToScreen(value) {

		super.renderToScreen = value;
		this.clearPass.renderToScreen = value;

	}

  get clear() {

		return this.clearPass.enabled;

	}

	set clear(value) {

		this.clearPass.enabled = value;

	}


  render(renderer, inputBuffer, outputBuffer, deltaTime, stencilTest) { 

    // Pas rendre le fond
    this.clearPass.setSize(this.resolution.x, this.resolution.y);
    // this.clearPass.render(renderer, inputBuffer)
    this.clearPass.render(renderer, this.renderTargetFull)
    this.clearPass.setSize(this.resolution.x * this.resolutionScale, this.resolution.y * this.resolutionScale);
    this.clearPass.render(renderer, this.renderTargetHalf)
    

    const halfMeshes = [];
    const fullMeshes = [];
    this.fullscreenMaterial = null;
    this.screen.visible = false;
    this.scene.children.forEach((child) => {
      if (!child.visible) {
        return
      }
      if (child.halfResolution) {
        halfMeshes.push(child);
        child.visible = true;
      } else {
        fullMeshes.push(child);
        child.visible = false;
      }
    });

    const background = this.scene.background;
    this.scene.background = null;
    renderer.shadowMap.autoUpdate = false;

    renderer.setRenderTarget(this.renderTargetHalf);
    renderer.render(this.scene, this.camera);

    for (let i = 0; i < fullMeshes.length; i++) {
      const child = fullMeshes[i];
      child.visible = true;
    }

    for (let i = 0; i < halfMeshes.length; i++) {
      const child = halfMeshes[i];
      child.visible = false;
    }

    renderer.shadowMap.autoUpdate = true;
    
    renderer.setRenderTarget(this.renderTargetFull);
    renderer.render(this.scene, this.camera);


    for (let i = 0; i < fullMeshes.length; i++) {
      const child = fullMeshes[i];
      child.visible = false;
    }

    this.fullscreenMaterial = this.upsamplingMaterial;
    this.screen.visible = true;
    this.fullscreenMaterial.uniforms.inputBuffer.value = this.renderTargetHalf.texture;
    this.fullscreenMaterial.uniforms.supportBuffer.value = this.renderTargetFull.texture;

    this.scene.background = null;
    renderer.shadowMap.autoUpdate = false;

    renderer.setRenderTarget(inputBuffer);
    renderer.render(this.scene, this.camera);
    
    this.scene.background = background;
    renderer.shadowMap.autoUpdate = true;

    for (let i = 0; i < halfMeshes.length; i++) {
      const child = halfMeshes[i];
      child.visible = true;
    }
    for (let i = 0; i < fullMeshes.length; i++) {
      const child = fullMeshes[i];
      child.visible = true;
    }
  }

	setSize(width, height) {

		const resolution = this.resolution;
    resolution.set(width, height);
    this.renderTargetHalf.setSize(Math.round(resolution.x * this.resolutionScale), Math.round(resolution.y * this.resolutionScale));
    this.renderTargetFull.setSize(resolution.x, resolution.y);

	}

  initialize(renderer, alpha, frameBufferType) {
    if(frameBufferType !== undefined) {
      this.renderTargetHalf.texture.type = frameBufferType;
      this.renderTargetFull.texture.type = frameBufferType;

      if(frameBufferType !== UnsignedByteType) {

				this.downsamplingMaterial.defines.FRAMEBUFFER_PRECISION_HIGH = "1";
				this.upsamplingMaterial.defines.FRAMEBUFFER_PRECISION_HIGH = "1";

			} else if(renderer !== null && renderer.outputColorSpace === SRGBColorSpace) {

				this.renderTargetHalf.texture.colorSpace = SRGBColorSpace;
        this.renderTargetFull.texture.colorSpace = SRGBColorSpace;

			}
    }
  }
}

don’t postpro effects by definition have their own configurable resolution? i think i have dialed down specific effects in the chain by giving them a custom size. btw i would suggest not using ssao any longer but n8ao GitHub - N8python/n8ao: An efficient and visually pleasing implementation of SSAO with an emphasis on temporal stability and artist control. this one has halfRes baked in (kicks in in distance). it easily outperforms ssao, and with just 6 samples looks better to me than the old one with 20 or more.

1 Like