WebGPU基础(四)- 地形
内容纲要
地形其实就是带高度的纹理,又称高程纹理或高度贴图。
engine
修改之前的engine代码:
//...
import { setDevice, device, setCameraUniformBuffer, cameraUniformBuffer } from './common';
//...
let _cameraUniformBuffer = device.createBuffer({
size: this.matrixSize,
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
});
setCameraUniformBuffer(_cameraUniformBuffer);
//...
在common中增加cameraUniformBuffer:
//...
export var cameraUniformBuffer: GPUBuffer;
export function setCameraUniformBuffer(_cameraUniformBuffer: GPUBuffer) {
cameraUniformBuffer = _cameraUniformBuffer;
}
//...
平面
修改之前创建物体的类,首先需要创建一个用来生成地形的方法:
//...
export function generatePlane(numSegX: number, numSegY: number, width: number, height: number): Mesh {
const result = new Mesh();
const normal = [0, 0, 1]
const xstep = width / numSegX;
const ystep = height / numSegY;
const widthHalf = width / 2;
const heightHalf = width / 2;
for (let x = - widthHalf; x < widthHalf; x += xstep) {
for (let y = - heightHalf; y < heightHalf; y += ystep) {
const x0 = x;
const y0 = y;
const x1 = x0 + xstep;
const y1 = y0 + ystep;
result.vertices.push({
pos: [x0, y0, 0],
norm: normal,
uv: [1 - (x0 + widthHalf) / width, (y0 + heightHalf) / height],
});
result.vertices.push({
pos: [x1, y0, 0],
norm: normal,
uv: [1 - (x1 + widthHalf) / width, (y0 + heightHalf) / height],
});
result.vertices.push({
pos: [x0, y1, 0],
norm: normal,
uv: [1 - (x0 + widthHalf) / width, (y1 + heightHalf) / height],
});
result.vertices.push({
pos: [x0, y1, 0],
norm: normal,
uv: [1 - (x0 + widthHalf) / width, (y1 + heightHalf) / height],
});
result.vertices.push({
pos: [x1, y0, 0],
norm: normal,
uv: [1 - (x1 + widthHalf) / width, (y0 + heightHalf) / height],
});
result.vertices.push({
pos: [x1, y1, 0],
norm: normal,
uv: [1 - (x1 + widthHalf) / width, (y1 + heightHalf) / height],
});
}
}
return result;
}
地形
然后需要将地形放在平面上,依旧需要管线和队列:
constructor(parameter: Parameter3D, heightBitmap: ImageBitmap) {
this.setTransformation(parameter);
this.mesh = generatePlane(this.numSegX, this.numSegY, this.width, this.height);
this.renderPipeline = device.createRenderPipeline({
vertex: {
module: device.createShaderModule({ code: vertexShadowCode, }),
entryPoint: 'main',
buffers: [
{
arrayStride: this.stride,
attributes: [
{
shaderLocation: 0,
offset: 0,
format: 'float32x3',
},
{
shaderLocation: 1,
offset: 3 * 4,
format: 'float32x3',
},
{
shaderLocation: 2,
offset: (3 + 3) * 4,
format: 'float32x2',
},
],
} as GPUVertexBufferLayout,
],
},
fragment: {
module: device.createShaderModule({ code: fragShaderCode, }),
entryPoint: 'main',
targets: [
{
format: 'bgra8unorm' as GPUTextureFormat,
},
],
},
primitive: {
topology: 'triangle-list',
cullMode: 'back',
},
depthStencil: {
depthWriteEnabled: true,
depthCompare: 'less',
format: 'depth24plus-stencil8',
},
});
this.verticesBuffer = device.createBuffer({
size: this.mesh.vertices.length * this.stride,
usage: GPUBufferUsage.VERTEX,
mappedAtCreation: true,
});
const mapping = new Float32Array(this.verticesBuffer.getMappedRange());
for (let i = 0; i < this.mesh.vertices.length; i++) {
// (3 * 4) + (3 * 4) + (2 * 4)
mapping.set([(this.mesh as any).vertices[i].pos[0],
(this.mesh as any).vertices[i].pos[1],
(this.mesh as any).vertices[i].pos[2]], this.perVertex * i + 0);
mapping.set((this.mesh as any).vertices[i].norm, this.perVertex * i + 3);
mapping.set((this.mesh as any).vertices[i].uv, this.perVertex * i + 6);
}
this.verticesBuffer.unmap();
this.transformationBuffer = device.createBuffer({
size: this.uniformBufferSize,
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
});
const entries = [
{
binding: 0,
resource: {
buffer: this.transformationBuffer,
offset: 0,
size: this.matrixSize * 2,
},
},
{
binding: 1,
resource: {
buffer: cameraUniformBuffer,
offset: 0,
size: this.matrixSize,
},
},
];
let height = device.createTexture({
size: [heightBitmap.width, heightBitmap.height, 1],
format: 'rgba8unorm',
usage: GPUTextureUsage.TEXTURE_BINDING |
GPUTextureUsage.COPY_DST |
GPUTextureUsage.RENDER_ATTACHMENT,
});
device.queue.copyExternalImageToTexture(
{ source: heightBitmap },
{ texture: height },
[heightBitmap.width, heightBitmap.height, 1]
);
entries.push({
binding: 2,
resource: height.createView(),
} as any);
this.transformationBindGroup = device.createBindGroup({
layout: this.renderPipeline.getBindGroupLayout(0),
entries: entries as Iterable<GPUBindGroupEntry>,
});
}
绘制
绘制的时候依然需要从common中获取device:
public draw(passEncoder: GPURenderPassEncoder, device: GPUDevice) {
this.updateTransformationMatrix()
passEncoder.setPipeline(this.renderPipeline);
device.queue.writeBuffer(
this.transformationBuffer,
0,
this.transformMatrix.buffer,
this.transformMatrix.byteOffset,
this.transformMatrix.byteLength
);
device.queue.writeBuffer(
this.transformationBuffer,
64,
this.rotateMatrix.buffer,
this.rotateMatrix.byteOffset,
this.rotateMatrix.byteLength
);
passEncoder.setVertexBuffer(0, this.verticesBuffer);
passEncoder.setBindGroup(0, this.transformationBindGroup);
passEncoder.draw(this.mesh.vertices.length, 1, 0, 0);
}
接着是更新和设置矩阵:
private updateTransformationMatrix() {
const transform = mat4.create();
const rotate = mat4.create();
mat4.translate(transform, transform, vec3.fromValues(this.x, this.y, this.z))
mat4.rotateX(transform, transform, this.rotX);
mat4.rotateY(transform, transform, this.rotY);
mat4.rotateZ(transform, transform, this.rotZ);
mat4.rotateX(rotate, rotate, this.rotX);
mat4.rotateY(rotate, rotate, this.rotY);
mat4.rotateZ(rotate, rotate, this.rotZ);
mat4.copy(this.transformMatrix, transform)
mat4.copy(this.rotateMatrix, rotate)
}
private setTransformation(parameter?: Parameter3D) {
if (parameter == null) {
return;
}
this.x = parameter.x ? parameter.x : 0;
this.y = parameter.y ? parameter.y : 0;
this.z = parameter.z ? parameter.z : 0;
this.rotX = parameter.rotX ? parameter.rotX : 0;
this.rotY = parameter.rotY ? parameter.rotY : 0;
this.rotZ = parameter.rotZ ? parameter.rotZ : 0;
this.width = parameter.width ? parameter.width : 1;
this.height = parameter.height ? parameter.height : 1;
this.numSegX = parameter.numSegX ? parameter.numSegX : 1;
this.numSegY = parameter.numSegY ? parameter.numSegY : 1;
}
场景
相机
场景和相机都不需要修改。
实例化
实例化之后,先创建bitmap:
//...
const heigtmap = document.createElement('img');
heigtmap.src = hmpng3;
await heigtmap.decode();
const heightBitmap = await createImageBitmap(heigtmap);
//...
将bitmap放到平面上,并添加到场景中:
const plane1 = new Plane({
y: -posy, width: width, height: height, rotX: -Math.PI / 2,
numSegX: 512, numSegY: 512
}, heightBitmap);
scene.add(plane1);
交互
修改鼠标中键交互:
canvas.onwheel = (e: WheelEvent) => {
const delta = e.deltaY / 100;
if (camera.z > -delta) {
camera.z += e.deltaY / 100
}
}
其他的鼠标、手势、键盘等交互操作保持不变。
注意:例子中的方法和属性可能随时调整,具体使用请参考W3C的官方文档
效果预览地址:

code enjoy! 🦖🦖🦖
作者:indeex
著作权归作者所有。商业转载请联系作者获得授权,非商业转载请注明出处。