1. WebGL 2.0特性应用

Three.js 中的 WebGL 2.0 特性应用能够显著增强图形渲染能力。以下是关键特性及其应用方式:

开启 WebGL 2.0 渲染器

// 创建 WebGL 2.0 渲染器
const renderer = new THREE.WebGLRenderer({
    context: gl, // 可传入已有 WebGL2 上下文
    canvas: canvas,
    antialias: true
});

// 或自动检测并创建
const renderer = new THREE.WebGLRenderer();
if (renderer.capabilities.isWebGL2) {
    console.log('WebGL 2.0 可用');
}

3D 纹理(Texture3D)

// 创建 3D 纹理
const size = 128;
const data = new Float32Array(size * size * size * 4);

for (let i = 0; i < data.length; i += 4) {
    data[i] = Math.random();     // R
    data[i + 1] = Math.random(); // G
    data[i + 2] = Math.random(); // B
    data[i + 3] = 1.0;           // A
}

const texture3D = new THREE.DataTexture3D(data, size, size, size);
texture3D.format = THREE.RGBAFormat;
texture3D.type = THREE.FloatType;
texture3D.needsUpdate = true;

变换反馈(Transform Feedback)

// 粒子系统更新示例
const transformFeedbackShader = `
    // WebGL 2.0 变换反馈着色器
    #version 300 es
    in vec3 position;
    in vec3 velocity;

    out vec3 vPosition;
    out vec3 vVelocity;

    uniform float deltaTime;

    void main() {
        vVelocity = velocity;
        vPosition = position + velocity * deltaTime;
    }
`;

多重采样渲染目标(MSAA Render Targets)

// 创建多重采样渲染目标
const renderTarget = new THREE.WebGLRenderTarget(1024, 1024, {
    samples: 4,  // 4x MSAA
    type: THREE.HalfFloatType
});

// 渲染到多重采样目标
renderer.setRenderTarget(renderTarget);
renderer.render(scene, camera);

// 解析到屏幕
renderer.setRenderTarget(null);
renderer.render(screenScene, screenCamera);

实例化数组(Instanced Arrays)

// 使用实例化渲染大量相同对象
const geometry = new THREE.InstancedBufferGeometry();
geometry.copy(new THREE.BoxGeometry());

const count = 1000;
const offsets = new Float32Array(count * 3);
const colors = new Float32Array(count * 3);

for (let i = 0; i < count; i++) {
    offsets[i * 3] = Math.random() * 100 - 50;
    offsets[i * 3 + 1] = Math.random() * 100 - 50;
    offsets[i * 3 + 2] = Math.random() * 100 - 50;

    colors[i * 3] = Math.random();
    colors[i * 3 + 1] = Math.random();
    colors[i * 3 + 2] = Math.random();
}

geometry.setAttribute('offset', 
    new THREE.InstancedBufferAttribute(offsets, 3));
geometry.setAttribute('color', 
    new THREE.InstancedBufferAttribute(colors, 3));

const material = new THREE.RawShaderMaterial({
    vertexShader: `
        attribute vec3 offset;
        attribute vec3 color;
        varying vec3 vColor;

        void main() {
            vColor = color;
            gl_Position = projectionMatrix * 
                         modelViewMatrix * 
                         vec4(position + offset, 1.0);
        }
    `,
    fragmentShader: `
        varying vec3 vColor;
        void main() {
            gl_FragColor = vec4(vColor, 1.0);
        }
    `
});

const mesh = new THREE.Mesh(geometry, material);
scene.add(mesh);

统一缓冲区对象(UBO)

// 创建 UBO
const uniformData = {
    time: { value: 0 },
    resolution: { value: new THREE.Vector2() }
};

// 在着色器中使用
const shaderMaterial = new THREE.ShaderMaterial({
    uniforms: uniformData,
    vertexShader: `
        layout(std140) uniform CommonUniforms {
            float time;
            vec2 resolution;
        };

        void main() {
            // 使用 UBO 数据
            vec3 pos = position;
            pos.y += sin(time + position.x) * 0.5;
            gl_Position = projectionMatrix * 
                         modelViewMatrix * 
                         vec4(pos, 1.0);
        }
    `,
    fragmentShader: `
        layout(std140) uniform CommonUniforms {
            float time;
            vec2 resolution;
        };

        void main() {
            vec2 uv = gl_FragCoord.xy / resolution;
            gl_FragColor = vec4(uv, sin(time) * 0.5 + 0.5, 1.0);
        }
    `
});

纹理数组(Texture Arrays)

// 创建纹理数组
const textureArray = new THREE.TextureArray(512, 512, 8);

// 加载多层纹理
for (let i = 0; i < 8; i++) {
    const texture = new THREE.TextureLoader().load(`layer${i}.png`);
    textureArray.setTexture(texture, i);
}

// 在着色器中使用
const material = new THREE.ShaderMaterial({
    uniforms: {
        textureArray: { value: textureArray },
        layerIndex: { value: 0 }
    },
    vertexShader: `
        out float vLayer;
        void main() {
            vLayer = layerIndex;
            gl_Position = projectionMatrix * 
                         modelViewMatrix * 
                         vec4(position, 1.0);
        }
    `,
    fragmentShader: `
        in float vLayer;
        uniform sampler2DArray textureArray;

        void main() {
            vec4 color = texture(textureArray, 
                vec3(gl_PointCoord, vLayer));
            gl_FragColor = color;
        }
    `
});

计算着色器(Compute Shader)

// 创建计算着色器材质
const computeMaterial = new THREE.RawShaderMaterial({
    uniforms: {
        inputTexture: { value: null },
        outputTexture: { value: null }
    },
    computeShader: `
        #version 310 es
        layout(local_size_x = 8, local_size_y = 8) in;
        layout(rgba32f, binding = 0) readonly uniform image2D inputTexture;
        layout(rgba32f, binding = 1) writeonly uniform image2D outputTexture;

        void main() {
            ivec2 coord = ivec2(gl_GlobalInvocationID.xy);
            vec4 data = imageLoad(inputTexture, coord);

            // 执行计算
            vec4 result = vec4(data.rgb * 2.0, 1.0);

            imageStore(outputTexture, coord, result);
        }
    `
});

深度纹理(Depth Texture)

// 启用深度纹理
const depthMaterial = new THREE.ShaderMaterial({
    uniforms: {
        depthTexture: { value: null },
        cameraNear: { value: camera.near },
        cameraFar: { value: camera.far }
    },
    vertexShader: `
        void main() {
            gl_Position = projectionMatrix * 
                         modelViewMatrix * 
                         vec4(position, 1.0);
        }
    `,
    fragmentShader: `
        uniform sampler2D depthTexture;
        uniform float cameraNear;
        uniform float cameraFar;

        float linearizeDepth(float depth) {
            float z = depth * 2.0 - 1.0;
            return (2.0 * cameraNear * cameraFar) / 
                   (cameraFar + cameraNear - z * (cameraFar - cameraNear));
        }

        void main() {
            float depth = texture2D(depthTexture, gl_FragCoord.xy).r;
            float linearDepth = linearizeDepth(depth);
            gl_FragColor = vec4(vec3(linearDepth), 1.0);
        }
    `
});

实用函数与检测

// 检测 WebGL 2.0 支持
function checkWebGL2Support() {
    const canvas = document.createElement('canvas');
    const gl = canvas.getContext('webgl2');
    return gl instanceof WebGL2RenderingContext;
}

// 获取 WebGL 2.0 扩展
const extensions = {
    colorBufferFloat: renderer.extensions.get('EXT_color_buffer_float'),
    textureFloatLinear: renderer.extensions.get('OES_texture_float_linear'),
    instancedArrays: renderer.extensions.get('ANGLE_instanced_arrays')
};

// 性能优化:使用顶点数组对象(VAO)
const vaoExtension = renderer.extensions.get('OES_vertex_array_object');
if (vaoExtension) {
    const vao = vaoExtension.createVertexArrayOES();
    vaoExtension.bindVertexArrayOES(vao);
}

1.1. 应用示例:体积渲染

class VolumeRenderer {
    constructor() {
        this.volumeTexture = null;
        this.renderTarget = null;

        // 创建体积渲染材质
        this.material = new THREE.ShaderMaterial({
            uniforms: {
                volume: { value: null },
                steps: { value: 256 },
                threshold: { value: 0.1 }
            },
            vertexShader: volumeVertexShader,
            fragmentShader: volumeFragmentShader,
            side: THREE.BackSide,
            transparent: true
        });
    }

    async loadVolumeData(url) {
        // 加载体积数据并创建 3D 纹理
        const data = await this.loadRawData(url);
        this.volumeTexture = new THREE.DataTexture3D(
            data, width, height, depth
        );
        this.material.uniforms.volume.value = this.volumeTexture;
    }
}

1.2. 最佳实践建议

  1. 渐进增强:检测 WebGL 2.0 支持,为不支持的用户提供降级方案
  2. 性能监控:使用 EXT_disjoint_timer_query 进行 GPU 时间查询
  3. 内存管理:及时删除不再需要的纹理和缓冲区
  4. 错误处理:检查着色器编译和链接状态

通过合理应用这些 WebGL 2.0 特性,可以创建更复杂、性能更高的 Three.js 应用。