2
votes

I've been having issues with my Blender materials showing up in three.js. One important detail I keep seeing mentioned on SO and elsewhere is that the GLTF format has limited support for exporting your node setup in Blender and it's best to stick with the basics like Principled BSDF. However, even with a basic Principled BSDF setup(pictured below), creating a grey reflective cube with roughness 0, I can't see any of the reflectiveness in three.js(also pictured).

enter image description here

enter image description here

Here's my three.js scene:

import * as THREE from "three"
import {GLTFLoader} from "three/examples/jsm/loaders/GLTFLoader.js"
import {OrbitControls} from "three/examples/jsm/controls/OrbitControls.js"

(async () => {
    const gScene = new THREE.Scene()
    const canvas = document.querySelector("#c")
    const gRenderer = new THREE.WebGLRenderer({canvas})
    const gCamera = new THREE.PerspectiveCamera(50, canvas.clientWidth/canvas.clientHeight, 0.1, 1000)
    gCamera.position.x = 0
    gCamera.position.Y = 100
    gCamera.position.z = 20

    const controls = new OrbitControls(gCamera, canvas)
    controls.target.set(0,5,0)
    controls.update()

    const ambientLight = new THREE.AmbientLight(0xffffff, 40)
    gScene.add(ambientLight)

    const gltfLoader = new GLTFLoader()

    let scene = await new Promise((resolve, reject) => {
        gltfLoader.load('./scene.glb', (loadedGLB) => {
            resolve(loadedGLB.scene)
        })
    })
    gScene.add(scene)

    function resizeRendererToDisplaySize(renderer){
        const canvas = renderer.domElement
        const pixelRatio = window.devicePixelRatio
        const width = canvas.clientWidth * pixelRatio | 0
        const height = canvas.clientHeight * pixelRatio | 0 
        const needResize = canvas.width !== width || canvas.height !== height 
        if (needResize){
            renderer.setSize(width, height, false)
        }
        return needResize
    }
    function render(){
        if(resizeRendererToDisplaySize(gRenderer)){
            let canvas = gRenderer.domElement
            gCamera.aspect = canvas.clientWidth/ canvas.clientHeight
            gCamera.updateProjectionMatrix()
        }
        gRenderer.render(gScene,gCamera)

        requestAnimationFrame(render)
    }
    requestAnimationFrame(render)
})()

Blend file: https://drive.google.com/file/d/1rJXmlKuZsNXl3zRt8YqaB7nUamOfKRw3/view?usp=sharing

1

1 Answers

3
votes

You can achieve what you are looking for by adding a HDR environment map to your scene. The idea is to load a respective texture e.g. via RGBELoader, pre-processing it with PMREMGenerator and then applying it to Scene.environment. By doing so the texture is set as the environment map for all PBR materials in your scene.

I suggest you have a look at the basic glTF example from the repository for more details. Also make sure to use the same renderer configuration (meaning ensure to work in sRGB color space and use tone mapping).

https://threejs.org/examples/webgl_loader_gltf