Connect the Horizontal Scrolling slider using GSAP&ScrollTrigger to the distortion effect created with Three.js

Hi. I’m new to Three.js and GSAP. I created a horizontal scrolling slider of images with GSAP&ScrollTrigger, and also a ‘EffectCanvas’ class for the distortion effect. I would like to show this distortion effect on each image while scrolling, but I don’t know how to connect the scrolling to the effect.
I tried to put it into the ‘constructor’ part of EffectCanvas and run it in render(), but it didn’t work.

Could you help me with this, please? Thank you very much!!

Here is the code:

gsap.registerPlugin(ScrollTrigger);

// 1st Slider Scroll
//Get the gallery from the DOM
const gallery = document.querySelector(".gallery");

//Get the gallery total width
let galleryWidth = gallery.offsetWidth;
console.log(galleryWidth);

//Get the amount to scroll horizontally by subtracting the window width from the full width of the gallery
let amountToScroll = (galleryWidth - window.innerWidth) + 100;
console.log(amountToScroll);

//Set the animation for the gallery
function sliderAnim() {
    gsap.to(gallery, {
        //Set position on the X axis
        x: -amountToScroll,
        ease: "none", // Remove easing
        scrollTrigger: {
            trigger: ".gallery-wrapper", // Trigger element
            start: "top top",
            end: "+=" + amountToScroll,
            pin: true,
            scrub: true, //Set animation progress to scroll position
            markers: true
        }
    });
}

sliderAnim();

//Initialize Lenis smooth scrolling
const lenis = new Lenis();

function raf(time) {
    lenis.raf(time);
    requestAnimationFrame(raf);
}

requestAnimationFrame(raf);

class EffectCanvas {
    constructor() {
        this.container = document.querySelector('#sliderOne');
        this.width = this.container.offsetWidth;
        this.height = this.container.offsetHeight;

        this.images = [...document.querySelectorAll('#slider-inner1 img')];
        console.log(this.images);

        this.meshItems = []; // Used to store all meshes we will be creating.
        this.setupCamera();
        this.createMeshItems();



        this.render();
    }



    // Getter function used to get screen dimensions used for the camera and mesh materials
    get viewport() {
        let width = window.innerWidth;
        let height = window.innerHeight;
        let aspectRatio = width / height;
        return {
            width,
            height,
            aspectRatio
        };
    }

    setupCamera() {
        window.addEventListener('resize', this.resize.bind(this));

        // Create new scene
        this.scene = new THREE.Scene();

        // Initialize perspective camera
        let perspective = 1000;
        const fov = (180 * (2 * Math.atan(window.innerHeight / 2 / perspective))) / Math.PI; //2 * Math.atan((this.height / 2) / perspective) * (180 / Math.PI);
        this.camera = new THREE.PerspectiveCamera(fov, this.viewport.aspectRatio, 1, 1000);
        this.camera.position.set(0, 0, perspective) // set the camera position on the z axis, equal to this.camera.position.z = 1000;

        //renderer
        this.renderer = new THREE.WebGLRenderer({
            antialias: true,
            alpha: true
        });
        this.renderer.setSize(this.viewport.width, this.viewport.height); 
        this.renderer.setPixelRatio(window.devicePixelRatio); 
        this.container.appendChild(this.renderer.domElement); 
    }

    resize() {
        init();
        this.camera.aspect = this.viewport.aspectRatio; 
        this.camera.updateProjectionMatrix(); 
        this.renderer.setSize(this.viewport.width, this.viewport.height);
    }

    createMeshItems() {
        this.images.forEach(image => {
            let meshItem = new MeshItem(image, this.scene);
            this.meshItems.push(meshItem);
        });
    }

    render() {
        //animate();
        for (let i = 0; i < this.meshItems.length; i++) {
            this.meshItems[i].render();
        }
        this.renderer.render(this.scene, this.camera);
        requestAnimationFrame(this.render.bind(this));
    }

}

class MeshItem {
    constructor(element, scene) {
        this.element = element;
        this.scene = scene;
        this.offset = new THREE.Vector2(0, 0);
        this.sizes = new THREE.Vector2(0, 0);
        this.createMesh();

        console.log(this.element);
    }



    getDimensions() {
        const { width, height, top, left } = this.element.getBoundingClientRect();
        this.sizes.set(width, height);
        this.container = document.querySelector('#sliderOne');
        this.width = this.container.offsetWidth;
        this.height = this.container.offsetHeight;
        this.offset.set(left - this.width / 2 + width / 2., -top + this.height / 2 - height / 1.99); // make sure the meshs is in the same position as the image and move from the right to the left 
    }

    createMesh() {
        this.geometry = new THREE.PlaneGeometry(1, 1, 100, 100); // new THREE.PlaneGeometry(100, 100, 10, 10)
        //let material = new THREE.MeshBasicMaterial({ color: 0xffff00, side: THREE.DoubleSide });

        this.imageTexture = new THREE.TextureLoader().load(this.element.src);
        console.log(this.imageTexture);

        this.uniforms = {
            uTexture: { value: this.imageTexture },
            uOffset: { value: new THREE.Vector2(0, 0) },
            uAlpha: { value: 1.0 } // A value of 0.0 indicates fully transparent, 1.0 is fully opaque. alpha - controls the default clear alpha value. When set to true, the value is 0. Otherwise it's 1. Default is false.
        }
        this.material = new THREE.ShaderMaterial({
            uniforms: this.uniforms,
            vertexShader: vertexShader,
            fragmentShader: fragmentShader
        });
        this.mesh = new THREE.Mesh(this.geometry, this.material);
        this.getDimensions();
        this.mesh.position.set(this.offset.x, this.offset.y, 0);
        this.mesh.scale.set(this.sizes.x, this.sizes.y, 1);

        this.scene.add(this.mesh);
    }

    render() {
        // repeatedly called
        this.getDimensions();
        this.mesh.position.set(this.offset.x, this.offset.y, 0);
        this.mesh.scale.set(this.sizes.x, this.sizes.y, 0);
        this.uniforms.uOffset.value.set(-value * 0.06, 0.0); 
}

new EffectCanvas();
  1. Are the images part of the webpage or part of the WebGL canvas? If you’re trying to recreate something like this effect - the images have to be rendered within a single, static canvas element, as WebGL textures - not as separate <img /> or <canvas /> elements.

  2. To pass pretty much any value (as long as it’s numeric in one way or another ofc), you just need to pass it to material uniform on every frame (see line 47 and line 129 for an example.)

1 Like

Hi mjurczyk,

Thank you for your message. Yes, the images are rendered as WebGL textures and the opacity of the images in is set to 0:

this.imageTexture = new THREE.TextureLoader().load(this.element.src);
this.uniforms = {
            uTexture: { value: this.imageTexture },
            uOffset: { value: new THREE.Vector2(0, 0) },
            uAlpha: { value: 1.0 } // A value of 0.0 indicates fully transparent, 1.0 is fully opaque. alpha - controls the default clear alpha value. When set to true, the value is 0. Otherwise it's 1. Default is false.
        }

So where can I implement the gsap timeline into this Three.js class?