[英]Three.js: Cannot display mesh created with texture array
我正在基于你放置在沙盒世界中的立方体构建一个非常原始的游戏(一个完全独特的概念,它将彻底改变我们所知道的游戏)并且我正在处理块生成。 这是我到目前为止所拥有的:
我的块在 object 文字中定义:
import * as THREE from 'three';
const loader = new THREE.TextureLoader();
interface BlockAttrs
{
breakable: boolean;
empty: boolean;
}
export interface Block
{
attrs: BlockAttrs;
mat_bottom?: THREE.MeshBasicMaterial;
mat_side?: THREE.MeshBasicMaterial;
mat_top?: THREE.MeshBasicMaterial;
}
interface BlockList
{
[key: string]: Block;
}
export const Blocks: BlockList = {
air:
{
attrs:
{
breakable: false,
empty: true,
},
},
grass:
{
attrs:
{
breakable: true,
empty: false,
},
mat_bottom: new THREE.MeshBasicMaterial({map: loader.load("/tex/dirt.png")}),
mat_side: new THREE.MeshBasicMaterial({map: loader.load("/tex/grass-side.png")}),
mat_top: new THREE.MeshBasicMaterial({map: loader.load("/tex/grass-top.png")}),
},
};
这是我的Chunk
class:
import * as THREE from 'three';
import { BufferGeometryUtils } from 'three/examples/jsm/utils/BufferGeometryUtils';
import { Block, Blocks } from './blocks';
const px = 0; const nx = 1; const py = 2;
const ny = 3; const pz = 4; const nz = 5;
export default class Chunk
{
private static readonly faces = [
new THREE.PlaneGeometry(1,1)
.rotateY(Math.PI / 2)
.translate(0.5, 0, 0),
new THREE.PlaneGeometry(1,1)
.rotateY(-Math.PI / 2)
.translate(-0.5, 0, 0),
new THREE.PlaneGeometry(1,1)
.rotateX(-Math.PI / 2)
.translate(0, 0.5, 0),
new THREE.PlaneGeometry(1,1)
.rotateX(Math.PI / 2)
.translate(0, -0.5, 0),
new THREE.PlaneGeometry(1,1)
.translate(0, 0, 0.5),
new THREE.PlaneGeometry(1,1)
.rotateY(Math.PI)
.translate(0, 0, -0.5)
];
private structure: Array<Array<Array<Block>>>;
public static readonly size = 16;
private materials = Array<THREE.MeshBasicMaterial>();
private terrain = Array<THREE.BufferGeometry>();
constructor ()
{
this.structure = new Array<Array<Array<Block>>>(Chunk.size);
for (let x = 0; x < Chunk.size; x++)
{
this.structure[x] = new Array<Array<Block>>(Chunk.size);
for (let y = 0; y < Chunk.size; y++)
{
this.structure[x][y] = new Array<Block>(Chunk.size);
for (let z = 0; z < Chunk.size; z++)
if ((x+y+z) % 2)
this.structure[x][y][z] = Blocks.grass;
else
this.structure[x][y][z] = Blocks.air;
}
}
}
private blockEmpty (x: number, y: number, z: number): boolean
{
let empty = true;
if (
x >= 0 && x < Chunk.size &&
y >= 0 && y < Chunk.size &&
z >= 0 && z < Chunk.size
) {
empty = this.structure[x][y][z].attrs.empty;
}
return empty;
}
private generateBlockFaces (x: number, y: number, z: number): void
{
if (this.blockEmpty(x+1, y, z))
{
this.terrain.push(Chunk.faces[px].clone().translate(x, y, z));
this.materials.push(this.structure[x][y][z].mat_side);
}
if (this.blockEmpty(x, y, z+1))
{
this.terrain.push(Chunk.faces[nx].clone().translate(x, y, z));
this.materials.push(this.structure[x][y][z].mat_side);
}
if (this.blockEmpty(x, y-1, z))
{
this.terrain.push(Chunk.faces[py].clone().translate(x, y, z));
this.materials.push(this.structure[x][y][z].mat_bottom);
}
if (this.blockEmpty(x, y+1, z))
{
this.terrain.push(Chunk.faces[ny].clone().translate(x, y, z));
this.materials.push(this.structure[x][y][z].mat_top);
}
if (this.blockEmpty(x, y, z-1))
{
this.terrain.push(Chunk.faces[pz].clone().translate(x, y, z));
this.materials.push(this.structure[x][y][z].mat_side);
}
if (this.blockEmpty(x-1, y, z))
{
this.terrain.push(Chunk.faces[nz].clone().translate(x, y, z));
this.materials.push(this.structure[x][y][z].mat_side);
}
}
public generateTerrain (): THREE.Mesh
{
this.terrain = new Array<THREE.BufferGeometry>();
this.materials = new Array<THREE.MeshBasicMaterial>();
for (let x = 0; x < Chunk.size; x++)
for (let y = 0; y < Chunk.size; y++)
for (let z = 0; z < Chunk.size; z++)
if (!this.structure[x][y][z].attrs.empty)
this.generateBlockFaces(x, y, z);
return new THREE.Mesh(
BufferGeometryUtils.mergeBufferGeometries(this.terrain),
this.materials
);
}
}
我知道网格创建器应该与 model 分离,但现在我正在试验。 class 的工作原理如下:
首先, constructor()
创建一个Block
的 3D 矩阵。 我已将其设置为以air
和grass
的棋盘图案创建它,因此其他所有方块都是空的。
接下来,我从场景中调用generateTerrain()
:
this.chunk = new Chunk();
this.add(this.chunk.generateTerrain());
调用此方法时,它会为每个非空块进入generateBlockFaces
,并将适当的PlaneGeometry
s 推送到terrain
数组中,并将适当的THREE.MeshBasicMaterial
送到materials
数组中。 然后我使用BufferGeometryUtils.mergeBufferGeometries
合并几何,并创建通过合并几何和materials
数组的网格。
我遇到的问题是,在传递new THREE.MeshNormalMaterial
或任何其他材料时创建网格效果很好,但当我传递materials
数组时却不行。 传递数组会创建 object (和console.log
显示它创建时没有错误),但它不是用场景绘制的。
我是否误以为materials
数组将为每个面分配材质? 我究竟做错了什么?
在文档中找到对THREE.UVMapping的引用后,我解决了它。 将几何图形发送到 GPU 时,纹理坐标需要是顶点坐标的双向。 为此,我在我的块中定义了以下三个属性:
uv_bottom: [
stone_row / Textures.rows, (stone_col+1) / Textures.cols,
(stone_row+1) / Textures.rows, (stone_col+1) / Textures.cols,
stone_row / Textures.rows, stone_col / Textures.cols,
(stone_row+1) / Textures.rows, stone_col / Textures.cols,
],
uv_side: [
stone_row / Textures.rows, (stone_col+1) / Textures.cols,
(stone_row+1) / Textures.rows, (stone_col+1) / Textures.cols,
stone_row / Textures.rows, stone_col / Textures.cols,
(stone_row+1) / Textures.rows, stone_col / Textures.cols,
],
uv_top: [
stone_row / Textures.rows, (stone_col+1) / Textures.cols,
(stone_row+1) / Textures.rows, (stone_col+1) / Textures.cols,
stone_row / Textures.rows, stone_col / Textures.cols,
(stone_row+1) / Textures.rows, stone_col / Textures.cols,
],
Textures.rows
和Textures.cols
引用了我的纹理图集(所有纹理都存储在 png 网格中的文件)具有的列数和行数,并且每个块都有自己的row
和col
文件,引用其 position。 然后,我创建了一个private uv = Array<Array<number>>();
在我的Chunk
class 中并修改地形生成器以将块的 uv arrays 推到它。 例如,这是对正z面的处理方式(请注意,为了提高效率,我交换了y
和z
):
if (this.blockEmpty(x, z+1, y))
{
this.terrain.push(Chunk.faces[pz].clone().translate(x, y, z));
this.uv.push(this.structure[x][z][y].uv_side);
}
现在, BufferGeometry
只接受'uv'
arrays 作为类型(在本例中为Float32Array
),所以我必须从this.uv
的扁平版本构造一个。 这就是地形生成器 function 现在的样子:
public generateTerrain (): THREE.Mesh
{
this.terrain = new Array<THREE.BufferGeometry>();
for (let x = 0; x < Chunk.base; x++)
for (let z = 0; z < Chunk.base; z++)
for (let y = 0; y < Chunk.build_height; y++)
if (!this.structure[x][z][y].attrs.empty)
this.generateBlockFaces(x, z, y);
const geometry = BufferGeometryUtils.mergeBufferGeometries(this.terrain);
geometry.setAttribute('uv', new THREE.BufferAttribute(new Float32Array(this.uv.flat()), 2));
return new THREE.Mesh(geometry, Textures.material);
}
如您所见,我使用的材质来自Textures
class。 这是整个导入的文件:
import * as THREE from 'three';
export default class Textures
{
private static readonly loader = new THREE.TextureLoader();
public static readonly rows = 3;
public static readonly cols = 2;
public static readonly atlas = Textures.loader.load("/tex/atlas.png");
public static readonly material = new THREE.MeshBasicMaterial({map: Textures.atlas});
}
Textures.atlas.magFilter = THREE.NearestFilter;
Textures.atlas.minFilter = THREE.NearestFilter;
就是这样:地形现在生成渲染每个块的纹理,我对此非常高兴:D
对于想要使用材质数组的人:
const materials = [
new THREE.MeshBasicMaterial( { color: 'red' } ),
new THREE.MeshBasicMaterial( { color: 'blue' } )
];
const geometries = [
new THREE.PlaneGeometry( 1, 1 ),
new THREE.PlaneGeometry( 1, 1 )
];
geometries[ 1 ].rotateX( Math.PI * -0.5 );
// Add groups that set the materialIndex of each vertex in a group
// For this code all the vertices in each geometry have the same material.
// If you load in a model that already has multiple textures you don't need to do this.
geometries[ 0 ].addGroup( 0, geometries[0].attributes.position.count, 0 );
geometries[ 1 ].addGroup( 0, geometries[1].attributes.position.count, 1 );
// Setting true on the second argument enables groups for the merged geometry.
const mergedGeometry = BufferGeometryUtils.mergeBufferGeometries( geometries, true );
const multiMaterialMesh = new THREE.Mesh( mergedGeometry, materials );
scene.add( multiMaterialMesh );
声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.