Search code examples
javascriptthree.js

Render second scene to texture not working


I'm trying to learn something new in three.js. My goal is to be able to use what a second camera sees in a separate scene as a texture for the main scene. Or alternatively to be able to use what a second camera sees in the main scene as a texture. But i only see a black screen. I posted my code for it here. I hope someone recognizes where my mistake is, because I just can't figure it out.

In 3 steps:

  • texture = second camera view
  • material use texture
  • apply material ordinary to a mesh

E.g.

var camera, controls, scene, renderer, container, aspect;
    
function main() {
init();
animate();
}

function init() {
    renderer = new THREE.WebGLRenderer( { antialias: true } );
    renderer.setPixelRatio( window.devicePixelRatio ); 
    renderer.shadowMap.enabled = true; 
    renderer.shadowMap.type = THREE.PCFSoftShadowMap;
             
    container = document.getElementById('container');
    renderer.setSize(container.clientWidth, container.clientHeight);
    container.appendChild( renderer.domElement );

    aspect = container.clientWidth / container.clientHeight; 
    scene = new THREE.Scene();
    scene.background = new THREE.Color( 0x000000 );
    
    camera = new THREE.PerspectiveCamera( 60, container.clientWidth / container.clientHeight, 1, 1000000 );
    
    camera.position.set(0, 0, 200);

    controls = new THREE.OrbitControls( camera, renderer.domElement );
    controls.enableZoom = true;
    controls.enabled = true;
    controls.target.set(0, 0, 0);
    
    //-----End three basic setups-----


    var tex = generateTexture(renderer);
    

    var plane = new THREE.Mesh(
        new THREE.PlaneBufferGeometry(100.0, 100.0),
        new THREE.MeshBasicMaterial({
           color: 0x00caff,
           map: tex,
           side: THREE.DoubleSide,
        })
    );
    scene.add(plane);
}//-------End init----------

function animate() {
    requestAnimationFrame( animate );  
    render();    
}//-------End animate----------

function render() { 
    camera.updateMatrixWorld();
    camera.updateProjectionMatrix(); 
    renderer.render(scene, camera); 
}//-------End render----------


function generateTexture(renderer) {
    var resolution = 2000;
    var textureScene = new THREE.Scene();
    textureScene.background = new THREE.Color(0x404040);

    var renderTarget = new THREE.WebGLRenderTarget(resolution, resolution, {minFilter: THREE.LinearFilter, magFilter: THREE.LinearFilter, format: THREE.RGBFormat});
        
    var textureCamera = new THREE.PerspectiveCamera(60, aspect, 0.1, 100000.0);
        
    textureCamera.position.set(0, 0, 200);
    textureCamera.lookAt(0, 0, 0);
    
    var geometry = new THREE.SphereGeometry( 60, 32, 16 ); 
    var material = new THREE.MeshBasicMaterial( { color: 0xffff00 } ); 
    var sphere = new THREE.Mesh( geometry, material); 
    textureScene.add( sphere );
    
    renderer.render(textureScene, textureCamera, renderTarget, true);
        
    var texture = renderTarget.texture;
        
    renderer.setRenderTarget(null);
        
    return texture;
}//----- End generateTexture ------

Solution

  • Are you copying this approach from a tutorial? What version of three.js are you using? I'm asking because you're using renderer.render(scene, camera, target, true); but the docs state that .render() only accepts two arguments, so passing a renderTarget doesn't do anything.

    I recommend you copy the approach in this demo, you can see the source code by clicking on the < > icon. The essential part is as follows:

    // Render first scene into texture
    renderer.setRenderTarget( renderTarget );
    renderer.clear();
    renderer.render( textureScene, textureCamera );
    
    // Render full scene to canvas
    renderer.setRenderTarget( null );
    renderer.clear();
    renderer.render( scene, camera );