1. Three.js 全局光照(Global Illumination, GI)技术

全局光照模拟光线在场景中的多次反弹,创造更真实的光照效果。Three.js 提供了多种GI实现方案。

基于烘焙的GI

光照贴图(Lightmaps)

javascript

// 使用 Blender/Substance 等工具预烘焙
const textureLoader = new THREE.TextureLoader();
const lightmap = textureLoader.load('lightmap.jpg');

const material = new THREE.MeshStandardMaterial({
  map: baseColorTexture,
  lightMap: lightmap,
  lightMapIntensity: 1.0
});

辐照度贴图(Irradiance Maps)

javascript

// HDR环境贴图转换为辐照度贴图
new THREE.PMREMGenerator(renderer)
  .fromEquirectangular(hdrEquirect)
  .texture;

// 使用
const material = new THREE.MeshStandardMaterial({
  envMap: pmremTexture,
  envMapIntensity: 1.0
});

实时GI方案

辐照探针(Irradiance Probes)

javascript

class IrradianceProbeSystem {
  constructor(scene, renderer) {
    this.probes = [];
    this.renderer = renderer;
    this.scene = scene;

    // 创建PMREM生成器
    this.pmremGenerator = new THREE.PMREMGenerator(renderer);
    this.pmremGenerator.compileCubemapShader();
  }

  addProbe(position, resolution = 64) {
    const probe = {
      position: position.clone(),
      renderTarget: new THREE.WebGLCubeRenderTarget(resolution, {
        format: THREE.RGBAFormat,
        generateMipmaps: true
      }),
      camera: new THREE.CubeCamera(0.1, 1000, probe.renderTarget),
      texture: null
    };

    this.scene.add(probe.camera);
    this.probes.push(probe);
    return probe;
  }

  updateProbe(probe) {
    // 渲染场景到立方体贴图
    probe.camera.position.copy(probe.position);
    probe.camera.update(this.renderer, this.scene);

    // 生成辐照度贴图
    probe.texture = this.pmremGenerator.fromCubemap(probe.renderTarget.texture).texture;
  }

  // 获取最近探针的辐照度
  getIrradianceAt(position) {
    let nearestProbe = null;
    let minDistance = Infinity;

    for (const probe of this.probes) {
      const distance = position.distanceTo(probe.position);
      if (distance < minDistance) {
        minDistance = distance;
        nearestProbe = probe;
      }
    }

    return nearestProbe ? nearestProbe.texture : null;
  }

  // 三线性插值
  getTrilinearIrradiance(position) {
    if (this.probes.length < 2) return this.getIrradianceAt(position);

    // 找到最近的三个探针
    const sorted = this.probes
      .map(probe => ({
        probe,
        distance: position.distanceTo(probe.position)
      }))
      .sort((a, b) => a.distance - b.distance);

    const [p1, p2, p3] = sorted.slice(0, 3);
    const totalWeight = p1.distance + p2.distance + p3.distance;

    // 加权混合
    const w1 = 1 - p1.distance / totalWeight;
    const w2 = 1 - p2.distance / totalWeight;
    const w3 = 1 - p3.distance / totalWeight;

    // 实际应用中需要渲染混合
    return p1.probe.texture; // 简化版本
  }
}

反射探针(Reflection Probes)

javascript

class ReflectionProbeManager {
  constructor() {
    this.probes = new Map();
    this.updateInterval = 30;
    this.frameCount = 0;

    this.initProbeShaders();
  }

  initProbeShaders() {
    // 创建用于混合探针的着色器材质
    this.blendMaterial = new THREE.ShaderMaterial({
      uniforms: {
        probe1: { value: null },
        probe2: { value: null },
        weight: { value: 0.5 },
        roughness: { value: 0.0 }
      },
      vertexShader: `
        varying vec3 vWorldPosition;
        varying vec3 vNormal;

        void main() {
          vec4 worldPosition = modelMatrix * vec4(position, 1.0);
          vWorldPosition = worldPosition.xyz;
          vNormal = normalize(normalMatrix * normal);
          gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
        }
      `,
      fragmentShader: `
        uniform samplerCube probe1;
        uniform samplerCube probe2;
        uniform float weight;
        uniform float roughness;

        varying vec3 vWorldPosition;
        varying vec3 vNormal;

        // 根据粗糙度模糊mipmap级别
        float getMipLevel(float roughness) {
          return roughness * 8.0; // 假设有9个mipmap级别
        }

        void main() {
          vec3 viewDir = normalize(cameraPosition - vWorldPosition);
          vec3 reflectDir = reflect(-viewDir, vNormal);

          float mipLevel = getMipLevel(roughness);

          vec4 color1 = textureCube(probe1, reflectDir, mipLevel);
          vec4 color2 = textureCube(probe2, reflectDir, mipLevel);

          gl_FragColor = mix(color1, color2, weight);
        }
      `
    });
  }

  createProbe(name, position, options = {}) {
    const resolution = options.resolution || 128;
    const near = options.near || 0.1;
    const far = options.far || 1000;

    const renderTarget = new THREE.WebGLCubeRenderTarget(resolution, {
      format: THREE.RGBFormat,
      generateMipmaps: true,
      minFilter: THREE.LinearMipmapLinearFilter
    });

    const camera = new THREE.CubeCamera(near, far, renderTarget);
    camera.position.copy(position);

    const probe = {
      name,
      camera,
      renderTarget,
      position: position.clone(),
      influenceRadius: options.influenceRadius || 5,
      intensity: options.intensity || 1.0,
      lastUpdate: 0
    };

    this.probes.set(name, probe);
    return probe;
  }

  updateProbe(probe, scene, renderer) {
    // 保存当前渲染状态
    const currentRenderTarget = renderer.getRenderTarget();

    // 更新探针
    probe.camera.update(renderer, scene);
    probe.lastUpdate = this.frameCount;

    // 恢复渲染状态
    renderer.setRenderTarget(currentRenderTarget);
  }

  // 动态更新策略
  updateDynamic(scene, renderer) {
    this.frameCount++;

    // 每N帧更新一次所有探针
    if (this.frameCount % this.updateInterval === 0) {
      this.probes.forEach(probe => {
        this.updateProbe(probe, scene, renderer);
      });
      return;
    }

    // 基于距离的优先级更新
    const cameraPosition = renderer.getCamera().position;

    const sortedProbes = Array.from(this.probes.values())
      .map(probe => ({
        probe,
        distance: cameraPosition.distanceTo(probe.position),
        timeSinceUpdate: this.frameCount - probe.lastUpdate
      }))
      .sort((a, b) => {
        // 优先更新距离近且长时间未更新的探针
        const priorityA = a.distance * 0.7 + a.timeSinceUpdate * 0.3;
        const priorityB = b.distance * 0.7 + b.timeSinceUpdate * 0.3;
        return priorityB - priorityA; // 降序
      });

    // 每帧更新前N个高优先级探针
    const probesToUpdate = sortedProbes.slice(0, 2);
    probesToUpdate.forEach(({ probe }) => {
      this.updateProbe(probe, scene, renderer);
    });
  }

  getProbeForPosition(position) {
    let bestProbe = null;
    let minWeightedDistance = Infinity;

    this.probes.forEach(probe => {
      const distance = position.distanceTo(probe.position);
      const normalizedDistance = distance / probe.influenceRadius;

      if (normalizedDistance <= 1) {
        // 使用平滑的衰减函数
        const weight = 1 - Math.pow(normalizedDistance, 2);
        const weightedDistance = distance / weight;

        if (weightedDistance < minWeightedDistance) {
          minWeightedDistance = weightedDistance;
          bestProbe = probe;
        }
      }
    });

    return bestProbe;
  }
}

基于Voxel的GI(VXGI)

javascript

class VoxelGI {
  constructor(renderer, scene) {
    this.renderer = renderer;
    this.scene = scene;

    // Voxel参数
    this.gridSize = 64; // 64x64x64体素
    this.cellSize = 0.5; // 每个体素0.5单位
    this.worldSize = this.gridSize * this.cellSize;

    // 3D纹理用于存储体素数据
    this.initVoxelTexture();
    this.initVoxelizationShader();
  }

  initVoxelTexture() {
    // 创建3D纹理
    const size = this.gridSize;
    const data = new Float32Array(size * size * size * 4); // RGBA

    this.voxelTexture = new THREE.Data3DTexture(
      data, size, size, size
    );
    this.voxelTexture.format = THREE.RGBAFormat;
    this.voxelTexture.type = THREE.FloatType;
    this.voxelTexture.minFilter = THREE.LinearFilter;
    this.voxelTexture.magFilter = THREE.LinearFilter;
    this.voxelTexture.wrapS = THREE.ClampToEdgeWrapping;
    this.voxelTexture.wrapT = THREE.ClampToEdgeWrapping;
    this.voxelTexture.wrapR = THREE.ClampToEdgeWrapping;
    this.voxelTexture.needsUpdate = true;
  }

  initVoxelizationShader() {
    // 体素化着色器
    this.voxelizationMaterial = new THREE.ShaderMaterial({
      uniforms: {
        worldSize: { value: this.worldSize },
        gridSize: { value: this.gridSize },
        voxelTexture: { value: this.voxelTexture }
      },
      vertexShader: `
        uniform float worldSize;
        uniform float gridSize;

        varying vec3 vPosition;
        varying vec3 vNormal;
        varying vec3 vColor;

        void main() {
          vPosition = (modelMatrix * vec4(position, 1.0)).xyz;
          vNormal = normalize(normalMatrix * normal);
          vColor = color;

          // 转换到体素空间 [-0.5, 0.5]
          vec3 voxelPos = vPosition / worldSize;
          gl_Position = vec4(voxelPos, 1.0);
        }
      `,
      fragmentShader: `
        uniform sampler3D voxelTexture;
        uniform float gridSize;

        varying vec3 vPosition;
        varying vec3 vNormal;
        varying vec3 vColor;

        void main() {
          // 计算体素坐标
          vec3 voxelCoord = (vPosition / worldSize + 0.5) * gridSize;

          // 存储到3D纹理(实际需要多遍渲染)
          // 这里简化表示
          gl_FragColor = vec4(vColor, 1.0);
        }
      `
    });
  }

  voxelizeScene() {
    // 多遍渲染体素化
    for (let i = 0; i < 6; i++) { // 6个方向
      this.renderVoxelizationPass(i);
    }
  }

  renderVoxelizationPass(direction) {
    // 设置正交投影相机从不同方向渲染
    const camera = new THREE.OrthographicCamera(
      -this.worldSize / 2, this.worldSize / 2,
      this.worldSize / 2, -this.worldSize / 2,
      0, this.worldSize
    );

    // 根据方向设置相机位置
    const directions = [
      new THREE.Vector3(1, 0, 0),   // +X
      new THREE.Vector3(-1, 0, 0),  // -X
      new THREE.Vector3(0, 1, 0),   // +Y
      new THREE.Vector3(0, -1, 0),  // -Y
      new THREE.Vector3(0, 0, 1),   // +Z
      new THREE.Vector3(0, 0, -1)   // -Z
    ];

    camera.position.copy(directions[direction]).multiplyScalar(this.worldSize);
    camera.lookAt(0, 0, 0);

    // 渲染到3D纹理切片
    // ... 实际实现需要渲染到多个FBO
  }

  // 圆锥追踪(Cone Tracing)
  coneTrace(worldPos, normal) {
    // 简化版圆锥追踪
    const numCones = 6;
    const coneDirections = [
      normal,
      ...this.generateHemisphereSamples(normal, numCones - 1)
    ];

    let totalRadiance = new THREE.Vector3(0, 0, 0);

    coneDirections.forEach(dir => {
      const radiance = this.traceCone(worldPos, dir);
      totalRadiance.add(radiance);
    });

    return totalRadiance.multiplyScalar(1 / numCones);
  }

  traceCone(origin, direction) {
    let accumulatedColor = new THREE.Vector3(0, 0, 0);
    let t = 0.1; // 起始距离
    let coneRadius = 0.05;

    for (let i = 0; i < 20; i++) { // 最大步数
      const samplePos = origin.clone().add(direction.clone().multiplyScalar(t));

      // 采样体素纹理
      const sample = this.sampleVoxel(samplePos);

      if (sample.a > 0.1) { // 遇到几何体
        // 应用衰减
        const attenuation = Math.exp(-t * 0.5);
        accumulatedColor.add(
          new THREE.Vector3(sample.r, sample.g, sample.b)
            .multiplyScalar(attenuation)
        );
        break;
      }

      t += coneRadius * 2; // 增加步长
      coneRadius += 0.01;  // 圆锥逐渐变宽
    }

    return accumulatedColor;
  }
}

光线追踪GI(WebGL2)

javascript

class RayTracedGI {
  constructor(renderer, scene, camera) {
    this.renderer = renderer;
    this.scene = scene;
    this.camera = camera;

    // 检查WebGL2支持
    this.supportsWebGL2 = renderer.capabilities.isWebGL2;

    if (this.supportsWebGL2) {
      this.initRayTracing();
    }
  }

  initRayTracing() {
    // 创建计算着色器用于光线追踪
    this.rtMaterial = new THREE.ShaderMaterial({
      uniforms: {
        sceneTexture: { value: null },
        depthTexture: { value: null },
        normalTexture: { value: null },
        cameraProjectionInverse: { value: new THREE.Matrix4() },
        cameraWorldMatrix: { value: new THREE.Matrix4() },
        frame: { value: 0 },
        bvhTexture: { value: null }
      },
      vertexShader: `
        varying vec2 vUv;
        void main() {
          vUv = uv;
          gl_Position = vec4(position, 1.0);
        }
      `,
      fragmentShader: `
        #version 300 es
        precision highp float;

        uniform sampler2D sceneTexture;
        uniform sampler2D depthTexture;
        uniform sampler2D normalTexture;
        uniform mat4 cameraProjectionInverse;
        uniform mat4 cameraWorldMatrix;
        uniform int frame;
        uniform sampler2D bvhTexture;

        in vec2 vUv;
        out vec4 fragColor;

        // 光线结构
        struct Ray {
          vec3 origin;
          vec3 direction;
        };

        // 光线相交结果
        struct Hit {
          float distance;
          vec3 position;
          vec3 normal;
          vec3 color;
          bool hit;
        };

        // 从深度重建世界位置
        vec3 reconstructPosition(vec2 uv, float depth) {
          vec4 clipSpace = vec4(uv * 2.0 - 1.0, depth * 2.0 - 1.0, 1.0);
          vec4 viewSpace = cameraProjectionInverse * clipSpace;
          viewSpace.xyz /= viewSpace.w;
          vec4 worldSpace = cameraWorldMatrix * viewSpace;
          return worldSpace.xyz;
        }

        // 生成随机数
        float random(vec2 st) {
          return fract(sin(dot(st.xy, vec2(12.9898, 78.233))) * 43758.5453123);
        }

        // 在半球内生成随机方向
        vec3 randomHemisphereDirection(vec3 normal, vec2 rnd) {
          // 使用余弦加权采样
          float u = rnd.x;
          float v = rnd.y;
          float phi = 2.0 * 3.141592 * u;
          float cosTheta = sqrt(1.0 - v);
          float sinTheta = sqrt(v);

          vec3 tangent = normalize(cross(normal, vec3(0.0, 1.0, 0.0)));
          if (length(tangent) < 0.001) {
            tangent = cross(normal, vec3(1.0, 0.0, 0.0));
          }
          vec3 bitangent = cross(normal, tangent);

          return normalize(
            tangent * (cos(phi) * sinTheta) +
            bitangent * (sin(phi) * sinTheta) +
            normal * cosTheta
          );
        }

        // 路径追踪
        vec3 pathTrace(Ray ray, int maxBounces) {
          vec3 throughput = vec3(1.0);
          vec3 radiance = vec3(0.0);

          for (int bounce = 0; bounce < maxBounces; bounce++) {
            // 简化:直接返回环境颜色
            if (bounce == 0) {
              radiance = texture(sceneTexture, vUv).rgb;
              break;
            }

            // 实际实现需要BVH遍历
            Hit hit;
            hit.hit = false;

            if (!hit.hit) {
              // 未命中,使用环境光
              radiance += throughput * vec3(0.1);
              break;
            }

            // 累积贡献
            throughput *= hit.color;

            // 俄罗斯轮盘赌终止
            float p = max(throughput.r, max(throughput.g, throughput.b));
            if (random(vUv + float(frame)) > p) {
              break;
            }
            throughput /= p;

            // 生成新的射线方向
            vec2 rnd = vec2(
              random(vUv + vec2(float(frame), float(bounce))),
              random(vUv + vec2(float(bounce), float(frame)))
            );
            ray.origin = hit.position + hit.normal * 0.001;
            ray.direction = randomHemisphereDirection(hit.normal, rnd);
          }

          return radiance;
        }

        void main() {
          // 采样深度和法线
          float depth = texture(depthTexture, vUv).r;
          vec3 normal = texture(normalTexture, vUv).rgb * 2.0 - 1.0;

          // 重建位置
          vec3 worldPos = reconstructPosition(vUv, depth);

          // 生成初始射线
          Ray ray;
          ray.origin = worldPos;

          // 根据材质属性决定射线方向
          // 对于漫反射表面,向半球发射
          vec2 rnd = vec2(
            random(vUv + vec2(float(frame), 0.0)),
            random(vUv + vec2(0.0, float(frame)))
          );
          ray.direction = randomHemisphereDirection(normal, rnd);

          // 执行路径追踪
          vec3 indirectLight = pathTrace(ray, 3);

          // 与直接光混合
          vec3 directLight = texture(sceneTexture, vUv).rgb;
          vec3 finalColor = directLight + indirectLight * 0.5;

          fragColor = vec4(finalColor, 1.0);
        }
      `
    });

    // 创建用于光线追踪的全屏四边形
    this.rtQuad = new THREE.Mesh(
      new THREE.PlaneGeometry(2, 2),
      this.rtMaterial
    );
    this.rtScene = new THREE.Scene();
    this.rtScene.add(this.rtQuad);
  }

  render() {
    if (!this.supportsWebGL2) {
      console.warn('WebGL2 not supported, falling back to standard rendering');
      this.renderer.render(this.scene, this.camera);
      return;
    }

    // 渲染G-Buffer
    this.renderGBuffer();

    // 执行光线追踪
    this.renderRayTracing();
  }

  renderGBuffer() {
    // 创建G-Buffer
    const size = new THREE.Vector2();
    this.renderer.getSize(size);

    if (!this.gBuffer) {
      this.gBuffer = new THREE.WebGLRenderTarget(
        size.x, size.y,
        {
          depthBuffer: true,
          depthTexture: new THREE.DepthTexture(size.x, size.y),
          textures: [
            new THREE.WebGLRenderTarget(size.x, size.y), // 颜色
            new THREE.WebGLRenderTarget(size.x, size.y), // 法线
            new THREE.WebGLRenderTarget(size.x, size.y)  // 材质属性
          ]
        }
      );
    }

    // 渲染到G-Buffer
    this.renderer.setRenderTarget(this.gBuffer);
    this.renderer.render(this.scene, this.camera);
  }

  renderRayTracing() {
    // 更新uniforms
    this.rtMaterial.uniforms.sceneTexture.value = this.gBuffer.texture[0];
    this.rtMaterial.uniforms.depthTexture.value = this.gBuffer.depthTexture;
    this.rtMaterial.uniforms.normalTexture.value = this.gBuffer.texture[1];
    this.rtMaterial.uniforms.cameraProjectionInverse.value 
      .copy(this.camera.projectionMatrix).invert();
    this.rtMaterial.uniforms.cameraWorldMatrix.value.copy(this.camera.matrixWorld);
    this.rtMaterial.uniforms.frame.value++;

    // 执行光线追踪
    this.renderer.setRenderTarget(null);
    this.renderer.render(this.rtScene, new THREE.Camera());
  }
}

辐照度体积(Irradiance Volumes)

javascript

class IrradianceVolume {
  constructor(bounds, resolution) {
    this.bounds = bounds; // THREE.Box3
    this.resolution = resolution; // {x: 8, y: 8, z: 8}

    // 创建3D纹理存储辐照度
    this.initIrradianceTexture();

    // 计算体素大小
    this.voxelSize = new THREE.Vector3(
      (bounds.max.x - bounds.min.x) / resolution.x,
      (bounds.max.y - bounds.min.y) / resolution.y,
      (bounds.max.z - bounds.min.z) / resolution.z
    );
  }

  initIrradanceTexture() {
    const { x: rx, y: ry, z: rz } = this.resolution;
    const size = rx * ry * rz;

    // 存储9个球谐系数(RGB各3个系数)
    const data = new Float32Array(size * 9 * 3);

    this.irradianceTexture = new THREE.Data3DTexture(
      data, rx, ry, rz
    );
    this.irradianceTexture.format = THREE.RGBFormat;
    this.irradianceTexture.type = THREE.FloatType;
    this.irradianceTexture.needsUpdate = true;
  }

  // 计算球谐系数
  computeSphericalHarmonics(samples) {
    // 简化的球谐系数计算
    const coeffs = new Array(9).fill(0).map(() => new THREE.Vector3(0, 0, 0));

    samples.forEach(sample => {
      const { direction, color } = sample;
      const x = direction.x, y = direction.y, z = direction.z;

      // 前4阶球谐基函数
      const basis = [
        0.282095,                    // l=0, m=0
        0.488603 * y,                // l=1, m=-1
        0.488603 * z,                // l=1, m=0
        0.488603 * x,                // l=1, m=1
        1.092548 * x * y,            // l=2, m=-2
        1.092548 * y * z,            // l=2, m=-1
        0.946175 * z * z - 0.315391, // l=2, m=0
        1.092548 * x * z,            // l=2, m=1
        0.546274 * (x * x - y * y)   // l=2, m=2
      ];

      basis.forEach((b, i) => {
        coeffs[i].x += color.r * b;
        coeffs[i].y += color.g * b;
        coeffs[i].z += color.b * b;
      });
    });

    // 归一化
    const weight = 4 * Math.PI / samples.length;
    coeffs.forEach(c => c.multiplyScalar(weight));

    return coeffs;
  }

  // 从世界坐标获取体素坐标
  worldToVoxel(worldPos) {
    return new THREE.Vector3(
      Math.floor((worldPos.x - this.bounds.min.x) / this.voxelSize.x),
      Math.floor((worldPos.y - this.bounds.min.y) / this.voxelSize.y),
      Math.floor((worldPos.z - this.bounds.min.z) / this.voxelSize.z)
    );
  }

  // 三线性采样辐照度
  sampleIrradiance(worldPos) {
    const voxelPos = this.worldToVoxel(worldPos);
    const frac = new THREE.Vector3(
      ((worldPos.x - this.bounds.min.x) % this.voxelSize.x) / this.voxelSize.x,
      ((worldPos.y - this.bounds.min.y) % this.voxelSize.y) / this.voxelSize.y,
      ((worldPos.z - this.bounds.min.z) % this.voxelSize.z) / this.voxelSize.z
    );

    // 采样周围8个体素
    const irradiance = new THREE.Vector3(0, 0, 0);

    for (let dx = 0; dx <= 1; dx++) {
      for (let dy = 0; dy <= 1; dy++) {
        for (let dz = 0; dz <= 1; dz++) {
          const sampleVoxel = voxelPos.clone().add(new THREE.Vector3(dx, dy, dz));

          // 边界检查
          if (sampleVoxel.x < 0 || sampleVoxel.x >= this.resolution.x ||
              sampleVoxel.y < 0 || sampleVoxel.y >= this.resolution.y ||
              sampleVoxel.z < 0 || sampleVoxel.z >= this.resolution.z) {
            continue;
          }

          // 采样球谐系数(简化)
          const weight = 
            (dx ? frac.x : 1 - frac.x) *
            (dy ? frac.y : 1 - frac.y) *
            (dz ? frac.z : 1 - frac.z);

          // 从3D纹理获取辐照度(简化)
          irradiance.add(new THREE.Vector3(1, 1, 1).multiplyScalar(weight));
        }
      }
    }

    return irradiance;
  }

  // 更新体素数据
  updateVoxel(voxelPos, irradianceData) {
    const { x: vx, y: vy, z: vz } = voxelPos;
    const { x: rx, y: ry } = this.resolution;

    const index = (vz * ry * rx + vy * rx + vx) * 9 * 3;

    // 将球谐系数写入3D纹理数据
    irradianceData.forEach((coeff, i) => {
      this.irradianceTexture.image.data[index + i * 3 + 0] = coeff.x;
      this.irradianceTexture.image.data[index + i * 3 + 1] = coeff.y;
      this.irradianceTexture.image.data[index + i * 3 + 2] = coeff.z;
    });

    this.irradianceTexture.needsUpdate = true;
  }
}

混合GI方案

javascript

class HybridGI {
  constructor(renderer, scene, camera) {
    this.renderer = renderer;
    this.scene = scene;
    this.camera = camera;

    // 多种GI技术混合
    this.techniques = {
      irradianceProbes: new IrradianceProbeSystem(scene, renderer),
      reflectionProbes: new ReflectionProbeManager(),
      ssgi: null, // 屏幕空间GI
      lightmap: null
    };

    // 根据性能自动选择技术
    this.autoSelectTechnique();

    this.initCompositeShader();
  }

  autoSelectTechnique() {
    // 检测设备性能
    const isMobile = /Mobi|Android/i.test(navigator.userAgent);
    const gpuTier = this.estimateGPUTier();

    if (isMobile || gpuTier === 'low') {
      // 低端设备:仅使用光照贴图
      this.activeTechniques = ['lightmap'];
    } else if (gpuTier === 'medium') {
      // 中端设备:辐照探针 + 屏幕空间反射
      this.activeTechniques = ['irradianceProbes', 'ssr'];
    } else {
      // 高端设备:完整GI
      this.activeTechniques = ['irradianceProbes', 'reflectionProbes', 'ssgi'];
    }
  }

  estimateGPUTier() {
    const gl = this.renderer.getContext();
    const debugInfo = gl.getExtension('WEBGL_debug_renderer_info');

    if (debugInfo) {
      const renderer = gl.getParameter(debugInfo.UNMASKED_RENDERER_WEBGL);

      // 简单分类
      if (renderer.includes('Intel') || 
          renderer.includes('Mali') || 
          renderer.includes('Adreno')) {
        return 'low';
      } else if (renderer.includes('GeForce GTX') ||
                 renderer.includes('Radeon RX')) {
        return 'high';
      }
    }

    return 'medium';
  }

  initCompositeShader() {
    // 混合所有GI贡献的着色器
    this.compositeMaterial = new THREE.ShaderMaterial({
      uniforms: {
        tDirect: { value: null },
        tIndirectDiffuse: { value: null },
        tIndirectSpecular: { value: null },
        tSSR: { value: null },
        tAO: { value: null },

        // 贡献权重
        diffuseWeight: { value: 1.0 },
        specularWeight: { value: 0.5 },
        ssrWeight: { value: 0.3 },
        aoWeight: { value: 0.5 }
      },
      vertexShader: `
        varying vec2 vUv;
        varying vec3 vWorldPosition;
        varying vec3 vNormal;

        void main() {
          vUv = uv;
          vec4 worldPosition = modelMatrix * vec4(position, 1.0);
          vWorldPosition = worldPosition.xyz;
          vNormal = normalize(normalMatrix * normal);
          gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
        }
      `,
      fragmentShader: `
        uniform sampler2D tDirect;
        uniform sampler2D tIndirectDiffuse;
        uniform sampler2D tIndirectSpecular;
        uniform sampler2D tSSR;
        uniform sampler2D tAO;

        uniform float diffuseWeight;
        uniform float specularWeight;
        uniform float ssrWeight;
        uniform float aoWeight;

        varying vec2 vUv;
        varying vec3 vWorldPosition;
        varying vec3 vNormal;

        void main() {
          // 采样所有贡献
          vec3 direct = texture2D(tDirect, vUv).rgb;
          vec3 indirectDiffuse = texture2D(tIndirectDiffuse, vUv).rgb;
          vec3 indirectSpecular = texture2D(tIndirectSpecular, vUv).rgb;
          vec3 ssr = texture2D(tSSR, vUv).rgb;
          float ao = texture2D(tAO, vUv).r;

          // 计算材质属性(从G-Buffer)
          float roughness = 0.5; // 应从G-Buffer获取
          float metalness = 0.0;

          // 混合间接漫反射
          vec3 totalDiffuse = direct + indirectDiffuse * diffuseWeight;

          // 混合镜面反射(根据粗糙度)
          float specularMix = mix(ssrWeight, specularWeight, roughness);
          vec3 totalSpecular = mix(indirectSpecular, ssr, specularMix);

          // 应用AO
          totalDiffuse *= mix(1.0, ao, aoWeight);

          // 最终颜色(基于能量守恒)
          vec3 dielectric = totalDiffuse + totalSpecular;
          vec3 metal = direct * texture2D(tIndirectSpecular, vUv).rgb;
          vec3 finalColor = mix(dielectric, metal, metalness);

          gl_FragColor = vec4(finalColor, 1.0);
        }
      `
    });
  }

  // 渐进式更新
  updateProgressive() {
    this.frameCount = (this.frameCount || 0) + 1;

    // 不同频率更新不同技术
    if (this.frameCount % 1 === 0) {
      // 每帧:屏幕空间效果
      this.updateScreenSpaceEffects();
    }

    if (this.frameCount % 30 === 0) {
      // 每30帧:更新部分探针
      this.updateSomeProbes();
    }

    if (this.frameCount % 300 === 0) {
      // 每300帧:更新所有探针
      this.updateAllProbes();
    }
  }

  // 动态分辨率
  adaptResolution() {
    const fps = this.getFPS();

    if (fps < 30 && this.resolutionScale > 0.5) {
      this.resolutionScale = Math.max(0.5, this.resolutionScale - 0.1);
      this.updateRenderTargets();
    } else if (fps > 55 && this.resolutionScale < 1.0) {
      this.resolutionScale = Math.min(1.0, this.resolutionScale + 0.1);
      this.updateRenderTargets();
    }
  }
}

性能优化策略

javascript

class GIOptimizer {
  constructor() {
    this.optimizations = {
      // 距离剔除
      distanceCulling: true,
      maxGI Distance: 50,

      // LOD系统
      useLOD: true,
      lodLevels: [
        { distance: 10, resolution: 1.0 },
        { distance: 20, resolution: 0.5 },
        { distance: 50, resolution: 0.25 }
      ],

      // 异步计算
      asyncUpdates: true,
      workerThreads: navigator.hardwareConcurrency || 4,

      // 缓存
      useCache: true,
      cacheDuration: 1000 // 毫秒
    };

    this.initWorkers();
  }

  initWorkers() {
    if (this.optimizations.asyncUpdates && window.Worker) {
      this.giWorker = new Worker('gi-worker.js');

      this.giWorker.onmessage = (event) => {
        const { type, data } = event.data;

        switch (type) {
          case 'irradianceProbe':
            this.applyProbeData(data);
            break;
          case 'lightmap':
            this.applyLightmapData(data);
            break;
          case 'voxelData':
            this.applyVoxelData(data);
            break;
        }
      };
    }
  }

  // 计算优先级队列
  calculateUpdatePriority(objects, camera) {
    return objects.map(obj => {
      const distance = obj.position.distanceTo(camera.position);
      const screenSize = this.estimateScreenSize(obj, camera);
      const lastUpdate = obj.lastGICalculation || 0;
      const timeSinceUpdate = Date.now() - lastUpdate;

      // 优先级公式
      const priority = 
        (1 / (distance + 1)) * 0.4 +        // 距离:越近优先级越高
        screenSize * 0.3 +                  // 屏幕尺寸:越大优先级越高
        (timeSinceUpdate / 1000) * 0.3;     // 时间:越久未更新优先级越高

      return { obj, priority };
    })
    .sort((a, b) => b.priority - a.priority);
  }

  estimateScreenSize(mesh, camera) {
    // 简化的屏幕空间尺寸估算
    const bounds = new THREE.Box3().setFromObject(mesh);
    const size = bounds.getSize(new THREE.Vector3());
    const diagonal = size.length();

    const distance = mesh.position.distanceTo(camera.position);
    const screenSize = diagonal / (distance + 0.001);

    return Math.min(screenSize, 1.0);
  }

  // 批次处理
  processInBatches(items, batchSize, processFn) {
    const batches = [];
    for (let i = 0; i < items.length; i += batchSize) {
      batches.push(items.slice(i, i + batchSize));
    }

    let currentBatch = 0;

    const processNextBatch = () => {
      if (currentBatch >= batches.length) return;

      const batch = batches[currentBatch];
      processFn(batch);

      currentBatch++;

      // 下一帧继续
      requestAnimationFrame(processNextBatch);
    };

    processNextBatch();
  }

  // 内存管理
  manageGIMemory() {
    const maxMemoryMB = 512;
    const usedMemory = this.estimateGIMemoryUsage();

    if (usedMemory > maxMemoryMB) {
      this.freeUnusedResources();
    }
  }

  estimateGIMemoryUsage() {
    let total = 0;

    // 纹理内存估算
    this.giTextures.forEach(texture => {
      const size = texture.image.width * texture.image.height * 
                   (texture.format === THREE.RGBAFormat ? 4 : 3);
      total += size;
    });

    // 几何体内存估算
    this.giMeshes.forEach(mesh => {
      const geometry = mesh.geometry;
      const vertices = geometry.attributes.position.count;
      total += vertices * 3 * 4; // 每个顶点3个float
    });

    return total / (1024 * 1024); // 转换为MB
  }
}

总结

Three.js中的全局光照技术包括:

  1. 烘焙方案:光照贴图、辐照度贴图(性能好,适合静态场景)

  2. 实时方案

  3. 混合方案:结合多种技术的最佳效果

选择建议

优化关键

全局光照是实时渲染的圣杯,Three.js提供了从简单到复杂的多种实现方案,可以根据项目需求和目标硬件选择合适的技术栈。