You're breathtaking!
This commit is contained in:
790
Source/Engine/Tools/ModelTool/ModelTool.Assimp.cpp
Normal file
790
Source/Engine/Tools/ModelTool/ModelTool.Assimp.cpp
Normal file
@@ -0,0 +1,790 @@
|
||||
// Copyright (c) 2012-2020 Wojciech Figat. All rights reserved.
|
||||
|
||||
#if COMPILE_WITH_MODEL_TOOL && USE_ASSIMP
|
||||
|
||||
#include "ModelTool.h"
|
||||
#include "Engine/Core/Log.h"
|
||||
#include "Engine/Core/Math/Matrix.h"
|
||||
#include "Engine/Platform/FileSystem.h"
|
||||
#include "Engine/Tools/TextureTool/TextureTool.h"
|
||||
|
||||
// Import Assimp library
|
||||
// Source: https://github.com/assimp/assimp
|
||||
#define ASSIMP_BUILD_NO_EXPORT
|
||||
#include <ThirdParty/assimp/Importer.hpp>
|
||||
#include <ThirdParty/assimp/types.h>
|
||||
#include <ThirdParty/assimp/config.h>
|
||||
#include <ThirdParty/assimp/scene.h>
|
||||
#include <ThirdParty/assimp/version.h>
|
||||
#include <ThirdParty/assimp/postprocess.h>
|
||||
#include <ThirdParty/assimp/LogStream.hpp>
|
||||
#include <ThirdParty/assimp/DefaultLogger.hpp>
|
||||
#include <ThirdParty/assimp/Logger.hpp>
|
||||
using namespace Assimp;
|
||||
|
||||
class AssimpLogStream : public LogStream
|
||||
{
|
||||
public:
|
||||
|
||||
AssimpLogStream()
|
||||
{
|
||||
DefaultLogger::create("");
|
||||
DefaultLogger::get()->attachStream(this);
|
||||
}
|
||||
|
||||
~AssimpLogStream()
|
||||
{
|
||||
DefaultLogger::get()->detatchStream(this);
|
||||
DefaultLogger::kill();
|
||||
}
|
||||
|
||||
public:
|
||||
|
||||
void write(const char* message) override
|
||||
{
|
||||
String s(message);
|
||||
s.Replace('\n', ' ');
|
||||
LOG(Info, "[Assimp]: {0}", s);
|
||||
}
|
||||
};
|
||||
|
||||
Vector2 ToVector2(const aiVector2D& v)
|
||||
{
|
||||
return Vector2(v.x, v.y);
|
||||
}
|
||||
|
||||
Vector2 ToVector2(const aiVector3D& v)
|
||||
{
|
||||
return Vector2(v.x, v.y);
|
||||
}
|
||||
|
||||
Vector3 ToVector3(const aiVector3D& v)
|
||||
{
|
||||
return Vector3(v.x, v.y, v.z);
|
||||
}
|
||||
|
||||
Color ToColor(const aiColor3D& v)
|
||||
{
|
||||
return Color(v.r, v.g, v.b, 1.0f);
|
||||
}
|
||||
|
||||
Color ToColor(const aiColor4D& v)
|
||||
{
|
||||
return Color(v.r, v.g, v.b, v.a);
|
||||
}
|
||||
|
||||
Quaternion ToQuaternion(const aiQuaternion& v)
|
||||
{
|
||||
return Quaternion(v.x, v.y, v.z, v.w);
|
||||
}
|
||||
|
||||
Matrix ToMatrix(const aiMatrix4x4& mat)
|
||||
{
|
||||
return Matrix(mat.a1, mat.b1, mat.c1, mat.d1,
|
||||
mat.a2, mat.b2, mat.c2, mat.d2,
|
||||
mat.a3, mat.b3, mat.c3, mat.d3,
|
||||
mat.a4, mat.b4, mat.c4, mat.d4);
|
||||
}
|
||||
|
||||
struct AssimpNode
|
||||
{
|
||||
/// <summary>
|
||||
/// The parent index. The root node uses value -1.
|
||||
/// </summary>
|
||||
int32 ParentIndex;
|
||||
|
||||
/// <summary>
|
||||
/// The local transformation of the bone, relative to parent bone.
|
||||
/// </summary>
|
||||
Transform LocalTransform;
|
||||
|
||||
/// <summary>
|
||||
/// The name of this bone.
|
||||
/// </summary>
|
||||
String Name;
|
||||
|
||||
/// <summary>
|
||||
/// The LOD index of the data in this node (used to separate meshes across different level of details).
|
||||
/// </summary>
|
||||
int32 LodIndex;
|
||||
};
|
||||
|
||||
struct AssimpBone
|
||||
{
|
||||
/// <summary>
|
||||
/// The index of the related node.
|
||||
/// </summary>
|
||||
int32 NodeIndex;
|
||||
|
||||
/// <summary>
|
||||
/// The parent bone index. The root bone uses value -1.
|
||||
/// </summary>
|
||||
int32 ParentBoneIndex;
|
||||
|
||||
/// <summary>
|
||||
/// The name of this bone.
|
||||
/// </summary>
|
||||
String Name;
|
||||
|
||||
/// <summary>
|
||||
/// The matrix that transforms from mesh space to bone space in bind pose.
|
||||
/// </summary>
|
||||
Matrix OffsetMatrix;
|
||||
|
||||
bool operator<(const AssimpBone& other) const
|
||||
{
|
||||
return NodeIndex < other.NodeIndex;
|
||||
}
|
||||
};
|
||||
|
||||
struct AssimpImporterData
|
||||
{
|
||||
ImportedModelData& Model;
|
||||
const String Path;
|
||||
const aiScene* Scene;
|
||||
const ModelTool::Options& Options;
|
||||
|
||||
Array<AssimpNode> Nodes;
|
||||
Array<AssimpBone> Bones;
|
||||
Dictionary<int32, Array<int32>> MeshIndexToNodeIndex;
|
||||
|
||||
AssimpImporterData(const char* path, ImportedModelData& model, const ModelTool::Options& options, const aiScene* scene)
|
||||
: Model(model)
|
||||
, Path(path)
|
||||
, Scene(scene)
|
||||
, Options(options)
|
||||
, Nodes(static_cast<int32>(scene->mNumMeshes * 4.0f))
|
||||
, MeshIndexToNodeIndex(static_cast<int32>(scene->mNumMeshes * 8.0f))
|
||||
{
|
||||
}
|
||||
|
||||
int32 FindNode(const String& name, StringSearchCase caseSensitivity = StringSearchCase::CaseSensitive)
|
||||
{
|
||||
for (int32 i = 0; i < Nodes.Count(); i++)
|
||||
{
|
||||
if (Nodes[i].Name.Compare(name, caseSensitivity) == 0)
|
||||
return i;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
int32 FindBone(const String& name, StringSearchCase caseSensitivity = StringSearchCase::CaseSensitive)
|
||||
{
|
||||
for (int32 i = 0; i < Bones.Count(); i++)
|
||||
{
|
||||
if (Bones[i].Name.Compare(name, caseSensitivity) == 0)
|
||||
return i;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
int32 FindBone(const int32 nodeIndex)
|
||||
{
|
||||
for (int32 i = 0; i < Bones.Count(); i++)
|
||||
{
|
||||
if (Bones[i].NodeIndex == nodeIndex)
|
||||
return i;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
};
|
||||
|
||||
void ProcessNodes(AssimpImporterData& data, aiNode* aNode, int32 parentIndex)
|
||||
{
|
||||
const int32 nodeIndex = data.Nodes.Count();
|
||||
|
||||
// Assign the index of the node to the index of the mesh
|
||||
for (unsigned i = 0; i < aNode->mNumMeshes; i++)
|
||||
{
|
||||
int meshIndex = aNode->mMeshes[i];
|
||||
data.MeshIndexToNodeIndex[meshIndex].Add(nodeIndex);
|
||||
}
|
||||
|
||||
// Create node
|
||||
AssimpNode node;
|
||||
node.ParentIndex = parentIndex;
|
||||
node.Name = aNode->mName.C_Str();
|
||||
|
||||
// Pick node LOD index
|
||||
if (parentIndex == -1 || !data.Options.ImportLODs)
|
||||
{
|
||||
node.LodIndex = 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
node.LodIndex = data.Nodes[parentIndex].LodIndex;
|
||||
if (node.LodIndex == 0)
|
||||
{
|
||||
node.LodIndex = ModelTool::DetectLodIndex(node.Name);
|
||||
}
|
||||
ASSERT(Math::IsInRange(node.LodIndex, 0, MODEL_MAX_LODS - 1));
|
||||
}
|
||||
|
||||
Matrix transform = ToMatrix(aNode->mTransformation);
|
||||
transform.Decompose(node.LocalTransform);
|
||||
data.Nodes.Add(node);
|
||||
|
||||
// Process the children
|
||||
for (unsigned i = 0; i < aNode->mNumChildren; i++)
|
||||
{
|
||||
ProcessNodes(data, aNode->mChildren[i], nodeIndex);
|
||||
}
|
||||
}
|
||||
|
||||
bool ProcessMesh(AssimpImporterData& data, const aiMesh* aMesh, MeshData& mesh, String& errorMsg)
|
||||
{
|
||||
// Properties
|
||||
mesh.Name = aMesh->mName.C_Str();
|
||||
mesh.MaterialSlotIndex = aMesh->mMaterialIndex;
|
||||
|
||||
// Vertex positions
|
||||
mesh.Positions.Set((const Vector3*)aMesh->mVertices, aMesh->mNumVertices);
|
||||
|
||||
// Texture coordinates
|
||||
if (aMesh->mTextureCoords[0])
|
||||
{
|
||||
mesh.UVs.Resize(aMesh->mNumVertices, false);
|
||||
aiVector3D* a = aMesh->mTextureCoords[0];
|
||||
for (uint32 v = 0; v < aMesh->mNumVertices; v++)
|
||||
{
|
||||
mesh.UVs[v] = *(Vector2*)a;
|
||||
a++;
|
||||
}
|
||||
}
|
||||
|
||||
// Normals
|
||||
if (aMesh->mNormals)
|
||||
{
|
||||
mesh.Normals.Set((const Vector3*)aMesh->mNormals, aMesh->mNumVertices);
|
||||
}
|
||||
|
||||
// Tangents
|
||||
if (aMesh->mTangents)
|
||||
{
|
||||
mesh.Tangents.Set((const Vector3*)aMesh->mTangents, aMesh->mNumVertices);
|
||||
}
|
||||
|
||||
// Indices
|
||||
const int32 indicesCount = aMesh->mNumFaces * 3;
|
||||
mesh.Indices.Resize(indicesCount, false);
|
||||
for (unsigned faceIndex = 0, i = 0; faceIndex < aMesh->mNumFaces; faceIndex++)
|
||||
{
|
||||
const auto face = &aMesh->mFaces[faceIndex];
|
||||
if (face->mNumIndices != 3)
|
||||
{
|
||||
errorMsg = TEXT("All faces in a mesh must be trangles!");
|
||||
return true;
|
||||
}
|
||||
|
||||
mesh.Indices[i++] = face->mIndices[0];
|
||||
mesh.Indices[i++] = face->mIndices[1];
|
||||
mesh.Indices[i++] = face->mIndices[2];
|
||||
}
|
||||
|
||||
// Lightmap UVs
|
||||
if (data.Options.LightmapUVsSource == ModelLightmapUVsSource::Disable)
|
||||
{
|
||||
// No lightmap UVs
|
||||
}
|
||||
else if (data.Options.LightmapUVsSource == ModelLightmapUVsSource::Generate)
|
||||
{
|
||||
// Generate lightmap UVs
|
||||
if (mesh.GenerateLightmapUVs())
|
||||
{
|
||||
LOG(Error, "Failed to generate lightmap uvs");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Select input channel index
|
||||
int32 inputChannelIndex;
|
||||
switch (data.Options.LightmapUVsSource)
|
||||
{
|
||||
case ModelLightmapUVsSource::Channel0:
|
||||
inputChannelIndex = 0;
|
||||
break;
|
||||
case ModelLightmapUVsSource::Channel1:
|
||||
inputChannelIndex = 1;
|
||||
break;
|
||||
case ModelLightmapUVsSource::Channel2:
|
||||
inputChannelIndex = 2;
|
||||
break;
|
||||
case ModelLightmapUVsSource::Channel3:
|
||||
inputChannelIndex = 3;
|
||||
break;
|
||||
default:
|
||||
inputChannelIndex = INVALID_INDEX;
|
||||
break;
|
||||
}
|
||||
|
||||
// Check if has that channel texcoords
|
||||
if (inputChannelIndex >= 0 && inputChannelIndex < AI_MAX_NUMBER_OF_TEXTURECOORDS && aMesh->mTextureCoords[inputChannelIndex])
|
||||
{
|
||||
mesh.LightmapUVs.Resize(aMesh->mNumVertices, false);
|
||||
aiVector3D* a = aMesh->mTextureCoords[inputChannelIndex];
|
||||
for (uint32 v = 0; v < aMesh->mNumVertices; v++)
|
||||
{
|
||||
mesh.LightmapUVs[v] = *(Vector2*)a;
|
||||
a++;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
LOG(Warning, "Cannot import model lightmap uvs. Missing texcoords channel {0}.", inputChannelIndex);
|
||||
}
|
||||
}
|
||||
|
||||
// Vertex Colors
|
||||
if (data.Options.ImportVertexColors && aMesh->mColors[0])
|
||||
{
|
||||
mesh.Colors.Resize(aMesh->mNumVertices, false);
|
||||
aiColor4D* a = aMesh->mColors[0];
|
||||
for (uint32 v = 0; v < aMesh->mNumVertices; v++)
|
||||
{
|
||||
mesh.Colors[v] = *(Color*)a;
|
||||
a++;
|
||||
}
|
||||
}
|
||||
|
||||
// Blend Indices and Blend Weights
|
||||
if (aMesh->mNumBones > 0 && aMesh->mBones && data.Model.Types & ImportDataTypes::Skeleton)
|
||||
{
|
||||
const int32 vertexCount = mesh.Positions.Count();
|
||||
mesh.BlendIndices.Resize(vertexCount);
|
||||
mesh.BlendWeights.Resize(vertexCount);
|
||||
mesh.BlendIndices.SetAll(Int4::Zero);
|
||||
mesh.BlendWeights.SetAll(Vector4::Zero);
|
||||
|
||||
// Build skinning clusters and fill controls points data stutcture
|
||||
for (unsigned boneId = 0; boneId < aMesh->mNumBones; boneId++)
|
||||
{
|
||||
const auto aBone = aMesh->mBones[boneId];
|
||||
|
||||
// Find the node where the bone is mapped - based on the name
|
||||
const String boneName(aBone->mName.C_Str());
|
||||
const int32 nodeIndex = data.FindNode(boneName);
|
||||
if (nodeIndex == -1)
|
||||
{
|
||||
LOG(Warning, "Invalid mesh bone linkage. Mesh: {0}, bone: {1}. Skipping...", mesh.Name, boneName);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Create bone if missing
|
||||
int32 boneIndex = data.FindBone(boneName);
|
||||
if (boneIndex == -1)
|
||||
{
|
||||
// Find the parent bone
|
||||
int32 parentBoneIndex = -1;
|
||||
for (int32 i = nodeIndex; i != -1; i = data.Nodes[i].ParentIndex)
|
||||
{
|
||||
parentBoneIndex = data.FindBone(i);
|
||||
if (parentBoneIndex != -1)
|
||||
break;
|
||||
}
|
||||
|
||||
// Add bone
|
||||
boneIndex = data.Bones.Count();
|
||||
data.Bones.EnsureCapacity(Math::Max(128, boneIndex + 16));
|
||||
data.Bones.Resize(boneIndex + 1);
|
||||
auto& bone = data.Bones[boneIndex];
|
||||
|
||||
// Setup bone
|
||||
bone.Name = boneName;
|
||||
bone.NodeIndex = nodeIndex;
|
||||
bone.ParentBoneIndex = parentBoneIndex;
|
||||
bone.OffsetMatrix = ToMatrix(aBone->mOffsetMatrix);
|
||||
}
|
||||
|
||||
// Apply the bone influences
|
||||
for (unsigned vtxWeightId = 0; vtxWeightId < aBone->mNumWeights; vtxWeightId++)
|
||||
{
|
||||
const auto vtxWeight = aBone->mWeights[vtxWeightId];
|
||||
|
||||
if (vtxWeight.mWeight <= 0 || vtxWeight.mVertexId >= (unsigned)vertexCount)
|
||||
continue;
|
||||
|
||||
auto& indices = mesh.BlendIndices[vtxWeight.mVertexId];
|
||||
auto& weights = mesh.BlendWeights[vtxWeight.mVertexId];
|
||||
|
||||
for (int32 k = 0; k < 4; k++)
|
||||
{
|
||||
if (vtxWeight.mWeight >= weights.Raw[k])
|
||||
{
|
||||
for (int32 l = 2; l >= k; l--)
|
||||
{
|
||||
indices.Raw[l + 1] = indices.Raw[l];
|
||||
weights.Raw[l + 1] = weights.Raw[l];
|
||||
}
|
||||
|
||||
indices.Raw[k] = boneIndex;
|
||||
weights.Raw[k] = vtxWeight.mWeight;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mesh.NormalizeBlendWeights();
|
||||
}
|
||||
|
||||
// Blend Shapes
|
||||
if (aMesh->mNumAnimMeshes > 0 && data.Model.Types & ImportDataTypes::Skeleton && data.Options.ImportBlendShapes)
|
||||
{
|
||||
mesh.BlendShapes.EnsureCapacity(aMesh->mNumAnimMeshes);
|
||||
for (unsigned int animMeshIndex = 0; animMeshIndex < aMesh->mNumAnimMeshes; animMeshIndex++)
|
||||
{
|
||||
const aiAnimMesh* aAnimMesh = aMesh->mAnimMeshes[animMeshIndex];
|
||||
|
||||
BlendShape& blendShapeData = mesh.BlendShapes.AddOne();
|
||||
blendShapeData.Name = aAnimMesh->mName.C_Str();
|
||||
if (blendShapeData.Name.IsEmpty())
|
||||
blendShapeData.Name = mesh.Name + TEXT("_blend_shape_") + StringUtils::ToString(animMeshIndex);
|
||||
blendShapeData.Weight = aAnimMesh->mWeight;
|
||||
blendShapeData.Vertices.Resize(aAnimMesh->mNumVertices);
|
||||
for (int32 i = 0; i < blendShapeData.Vertices.Count(); i++)
|
||||
blendShapeData.Vertices[i].VertexIndex = i;
|
||||
|
||||
const aiVector3D* shapeVertices = aAnimMesh->mVertices;
|
||||
if (shapeVertices)
|
||||
{
|
||||
for (int32 i = 0; i < blendShapeData.Vertices.Count(); i++)
|
||||
blendShapeData.Vertices[i].PositionDelta = ToVector3(shapeVertices[i]) - mesh.Positions[i];
|
||||
}
|
||||
else
|
||||
{
|
||||
for (int32 i = 0; i < blendShapeData.Vertices.Count(); i++)
|
||||
blendShapeData.Vertices[i].PositionDelta = Vector3::Zero;
|
||||
}
|
||||
|
||||
const aiVector3D* shapeNormals = aAnimMesh->mNormals;
|
||||
if (shapeNormals)
|
||||
{
|
||||
for (int32 i = 0; i < blendShapeData.Vertices.Count(); i++)
|
||||
blendShapeData.Vertices[i].NormalDelta = ToVector3(shapeNormals[i]) - mesh.Normals[i];
|
||||
}
|
||||
else
|
||||
{
|
||||
for (int32 i = 0; i < blendShapeData.Vertices.Count(); i++)
|
||||
blendShapeData.Vertices[i].NormalDelta = Vector3::Zero;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
bool ImportTexture(AssimpImporterData& data, aiString& aFilename, int32& textureIndex, TextureEntry::TypeHint type)
|
||||
{
|
||||
// Find texture file path
|
||||
const String filename = String(aFilename.C_Str()).TrimTrailing();
|
||||
String path;
|
||||
if (ModelTool::FindTexture(data.Path, filename, path))
|
||||
return true;
|
||||
|
||||
// Check if already used
|
||||
textureIndex = 0;
|
||||
while (textureIndex < data.Model.Textures.Count())
|
||||
{
|
||||
if (data.Model.Textures[textureIndex].FilePath == path)
|
||||
return true;
|
||||
textureIndex++;
|
||||
}
|
||||
|
||||
// Import texture
|
||||
auto& texture = data.Model.Textures.AddOne();
|
||||
texture.FilePath = path;
|
||||
texture.Type = type;
|
||||
texture.AssetID = Guid::Empty;
|
||||
return true;
|
||||
}
|
||||
|
||||
bool ImportMaterialTexture(AssimpImporterData& data, const aiMaterial* aMaterial, aiTextureType aTextureType, int32& textureIndex, TextureEntry::TypeHint type)
|
||||
{
|
||||
aiString aFilename;
|
||||
return aMaterial->GetTexture(aTextureType, 0, &aFilename, nullptr, nullptr, nullptr, nullptr) == AI_SUCCESS &&
|
||||
ImportTexture(data, aFilename, textureIndex, type);
|
||||
}
|
||||
|
||||
bool ImportMaterials(AssimpImporterData& data, String& errorMsg)
|
||||
{
|
||||
const uint32 materialsCount = (uint32)data.Scene->mNumMaterials;
|
||||
data.Model.Materials.Resize(materialsCount, false);
|
||||
for (uint32 i = 0; i < materialsCount; i++)
|
||||
{
|
||||
auto& materialSlot = data.Model.Materials[i];
|
||||
const aiMaterial* aMaterial = data.Scene->mMaterials[i];
|
||||
|
||||
aiString aName;
|
||||
if (aMaterial->Get(AI_MATKEY_NAME, aName) == AI_SUCCESS)
|
||||
materialSlot.Name = String(aName.C_Str()).TrimTrailing();
|
||||
materialSlot.AssetID = Guid::Empty;
|
||||
aiColor3D aColor;
|
||||
if (aMaterial->Get(AI_MATKEY_COLOR_DIFFUSE, aColor) == AI_SUCCESS)
|
||||
materialSlot.Diffuse.Color = ToColor(aColor);
|
||||
bool aBoolean;
|
||||
if (aMaterial->Get(AI_MATKEY_TWOSIDED, aBoolean) == AI_SUCCESS)
|
||||
materialSlot.TwoSided = aBoolean;
|
||||
bool aFloat;
|
||||
if (aMaterial->Get(AI_MATKEY_OPACITY, aFloat) == AI_SUCCESS)
|
||||
materialSlot.Opacity.Value = aFloat;
|
||||
|
||||
if (data.Model.Types & ImportDataTypes::Textures)
|
||||
{
|
||||
ImportMaterialTexture(data, aMaterial, aiTextureType_DIFFUSE, materialSlot.Diffuse.TextureIndex, TextureEntry::TypeHint::ColorRGB);
|
||||
ImportMaterialTexture(data, aMaterial, aiTextureType_EMISSIVE, materialSlot.Emissive.TextureIndex, TextureEntry::TypeHint::ColorRGB);
|
||||
ImportMaterialTexture(data, aMaterial, aiTextureType_NORMALS, materialSlot.Normals.TextureIndex, TextureEntry::TypeHint::Normals);
|
||||
ImportMaterialTexture(data, aMaterial, aiTextureType_OPACITY, materialSlot.Opacity.TextureIndex, TextureEntry::TypeHint::ColorRGBA);
|
||||
|
||||
if (materialSlot.Diffuse.TextureIndex != -1)
|
||||
{
|
||||
// Detect using alpha mask in diffuse texture
|
||||
materialSlot.Diffuse.HasAlphaMask = TextureTool::HasAlpha(data.Model.Textures[materialSlot.Diffuse.TextureIndex].FilePath);
|
||||
if (materialSlot.Diffuse.HasAlphaMask)
|
||||
data.Model.Textures[materialSlot.Diffuse.TextureIndex].Type = TextureEntry::TypeHint::ColorRGBA;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
bool ImportMeshes(AssimpImporterData& data, String& errorMsg)
|
||||
{
|
||||
for (unsigned i = 0; i < data.Scene->mNumMeshes; i++)
|
||||
{
|
||||
const auto aMesh = data.Scene->mMeshes[i];
|
||||
|
||||
// Skip invalid meshes
|
||||
if (aMesh->mPrimitiveTypes != aiPrimitiveType_TRIANGLE || aMesh->mNumVertices == 0 || aMesh->mNumFaces == 0 || aMesh->mFaces[0].mNumIndices != 3)
|
||||
continue;
|
||||
|
||||
// Skip unused meshes
|
||||
if (!data.MeshIndexToNodeIndex.ContainsKey(i))
|
||||
continue;
|
||||
|
||||
// Import mesh data
|
||||
MeshData* meshData = New<MeshData>();
|
||||
if (ProcessMesh(data, aMesh, *meshData, errorMsg))
|
||||
return true;
|
||||
|
||||
auto& nodesWithMesh = data.MeshIndexToNodeIndex[i];
|
||||
for (int32 j = 0; j < nodesWithMesh.Count(); j++)
|
||||
{
|
||||
const auto nodeIndex = nodesWithMesh[j];
|
||||
auto& node = data.Nodes[nodeIndex];
|
||||
const int32 lodIndex = node.LodIndex;
|
||||
|
||||
// The first mesh instance uses meshData directly while others have to clone it
|
||||
if (j != 0)
|
||||
{
|
||||
meshData = New<MeshData>(*meshData);
|
||||
}
|
||||
|
||||
// Link mesh
|
||||
meshData->NodeIndex = nodeIndex;
|
||||
if (data.Model.LODs.Count() <= lodIndex)
|
||||
data.Model.LODs.Resize(lodIndex + 1);
|
||||
data.Model.LODs[lodIndex].Meshes.Add(meshData);
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
void ImportCurve(aiVectorKey* keys, uint32 keysCount, LinearCurve<Vector3>& curve)
|
||||
{
|
||||
if (keys == nullptr || keysCount == 0)
|
||||
return;
|
||||
|
||||
const auto keyframes = curve.Resize(keysCount);
|
||||
|
||||
for (uint32 i = 0; i < keysCount; i++)
|
||||
{
|
||||
auto& aKey = keys[i];
|
||||
auto& key = keyframes[i];
|
||||
|
||||
key.Time = (float)aKey.mTime;
|
||||
key.Value = ToVector3(aKey.mValue);
|
||||
}
|
||||
}
|
||||
|
||||
void ImportCurve(aiQuatKey* keys, uint32 keysCount, LinearCurve<Quaternion>& curve)
|
||||
{
|
||||
if (keys == nullptr || keysCount == 0)
|
||||
return;
|
||||
|
||||
const auto keyframes = curve.Resize(keysCount);
|
||||
|
||||
for (uint32 i = 0; i < keysCount; i++)
|
||||
{
|
||||
auto& aKey = keys[i];
|
||||
auto& key = keyframes[i];
|
||||
|
||||
key.Time = (float)aKey.mTime;
|
||||
key.Value = ToQuaternion(aKey.mValue);
|
||||
}
|
||||
}
|
||||
|
||||
static bool AssimpInited = false;
|
||||
|
||||
bool ModelTool::ImportDataAssimp(const char* path, ImportedModelData& data, const Options& options, String& errorMsg)
|
||||
{
|
||||
// Prepare
|
||||
if (!AssimpInited)
|
||||
{
|
||||
AssimpInited = true;
|
||||
|
||||
// Log Assimp version
|
||||
LOG(Info, "Assimp {0}.{1}.{2}", aiGetVersionMajor(), aiGetVersionMinor(), aiGetVersionRevision());
|
||||
}
|
||||
Importer importer;
|
||||
AssimpLogStream assimpLogStream;
|
||||
bool importMeshes = (data.Types & ImportDataTypes::Geometry) != 0;
|
||||
bool importAnimations = (data.Types & ImportDataTypes::Animations) != 0;
|
||||
|
||||
// Setup import flags
|
||||
unsigned int flags =
|
||||
aiProcess_JoinIdenticalVertices |
|
||||
aiProcess_LimitBoneWeights |
|
||||
aiProcess_Triangulate |
|
||||
aiProcess_GenUVCoords |
|
||||
aiProcess_FindDegenerates |
|
||||
aiProcess_FindInvalidData |
|
||||
//aiProcess_ValidateDataStructure |
|
||||
aiProcess_ConvertToLeftHanded;
|
||||
if (importMeshes)
|
||||
{
|
||||
if (options.CalculateNormals)
|
||||
flags |= aiProcess_FixInfacingNormals | aiProcess_GenSmoothNormals;
|
||||
if (options.CalculateTangents)
|
||||
flags |= aiProcess_CalcTangentSpace;
|
||||
if (options.OptimizeMeshes)
|
||||
flags |= aiProcess_OptimizeMeshes | aiProcess_SplitLargeMeshes | aiProcess_ImproveCacheLocality;
|
||||
if (options.MergeMeshes)
|
||||
flags |= aiProcess_RemoveRedundantMaterials;
|
||||
}
|
||||
|
||||
// Setup import options
|
||||
importer.SetPropertyFloat(AI_CONFIG_PP_GSN_MAX_SMOOTHING_ANGLE, options.SmoothingNormalsAngle);
|
||||
importer.SetPropertyFloat(AI_CONFIG_PP_CT_MAX_SMOOTHING_ANGLE, options.SmoothingTangentsAngle);
|
||||
//importer.SetPropertyInteger(AI_CONFIG_PP_SLM_TRIANGLE_LIMIT, MAX_uint16);
|
||||
importer.SetPropertyBool(AI_CONFIG_IMPORT_FBX_READ_CAMERAS, false);
|
||||
importer.SetPropertyBool(AI_CONFIG_IMPORT_FBX_READ_LIGHTS, false);
|
||||
importer.SetPropertyBool(AI_CONFIG_IMPORT_FBX_READ_TEXTURES, false);
|
||||
importer.SetPropertyBool(AI_CONFIG_IMPORT_FBX_READ_ANIMATIONS, importAnimations);
|
||||
//importer.SetPropertyBool(AI_CONFIG_IMPORT_FBX_PRESERVE_PIVOTS, false); // TODO: optimize pivots when https://github.com/assimp/assimp/issues/1068 gets fixed
|
||||
importer.SetPropertyBool(AI_CONFIG_IMPORT_FBX_OPTIMIZE_EMPTY_ANIMATION_CURVES, true);
|
||||
|
||||
// Import file
|
||||
const auto scene = importer.ReadFile(path, flags);
|
||||
if (scene == nullptr)
|
||||
{
|
||||
LOG_STR(Warning, String(importer.GetErrorString()));
|
||||
LOG_STR(Warning, String(path));
|
||||
LOG_STR(Warning, StringUtils::ToString(flags));
|
||||
errorMsg = importer.GetErrorString();
|
||||
return true;
|
||||
}
|
||||
|
||||
// Process imported scene nodes
|
||||
AssimpImporterData assimpData(path, data, options, scene);
|
||||
ProcessNodes(assimpData, scene->mRootNode, -1);
|
||||
|
||||
// Import materials
|
||||
if (data.Types & ImportDataTypes::Materials)
|
||||
{
|
||||
if (ImportMaterials(assimpData, errorMsg))
|
||||
{
|
||||
LOG(Warning, "Failed to import materials.");
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Import geometry
|
||||
if (data.Types & ImportDataTypes::Geometry)
|
||||
{
|
||||
if (ImportMeshes(assimpData, errorMsg))
|
||||
{
|
||||
LOG(Warning, "Failed to import meshes.");
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Import skeleton
|
||||
if (data.Types & ImportDataTypes::Skeleton)
|
||||
{
|
||||
data.Skeleton.Nodes.Resize(assimpData.Nodes.Count(), false);
|
||||
for (int32 i = 0; i < assimpData.Nodes.Count(); i++)
|
||||
{
|
||||
auto& node = data.Skeleton.Nodes[i];
|
||||
auto& aNode = assimpData.Nodes[i];
|
||||
|
||||
node.Name = aNode.Name;
|
||||
node.ParentIndex = aNode.ParentIndex;
|
||||
node.LocalTransform = aNode.LocalTransform;
|
||||
}
|
||||
|
||||
data.Skeleton.Bones.Resize(assimpData.Bones.Count(), false);
|
||||
for (int32 i = 0; i < assimpData.Bones.Count(); i++)
|
||||
{
|
||||
auto& bone = data.Skeleton.Bones[i];
|
||||
auto& aBone = assimpData.Bones[i];
|
||||
|
||||
const auto boneNodeIndex = aBone.NodeIndex;
|
||||
const auto parentBoneNodeIndex = aBone.ParentBoneIndex == -1 ? -1 : assimpData.Bones[aBone.ParentBoneIndex].NodeIndex;
|
||||
|
||||
bone.ParentIndex = aBone.ParentBoneIndex;
|
||||
bone.NodeIndex = aBone.NodeIndex;
|
||||
bone.LocalTransform = CombineTransformsFromNodeIndices(assimpData.Nodes, parentBoneNodeIndex, boneNodeIndex);
|
||||
bone.OffsetMatrix = aBone.OffsetMatrix;
|
||||
}
|
||||
}
|
||||
|
||||
// Import animations
|
||||
if (data.Types & ImportDataTypes::Animations)
|
||||
{
|
||||
if (scene->HasAnimations())
|
||||
{
|
||||
const auto animIndex = Math::Clamp<int32>(options.AnimationIndex, 0, scene->mNumAnimations - 1);
|
||||
const auto animations = scene->mAnimations[animIndex];
|
||||
data.Animation.Channels.Resize(animations->mNumChannels, false);
|
||||
data.Animation.Duration = animations->mDuration;
|
||||
data.Animation.FramesPerSecond = animations->mTicksPerSecond != 0.0 ? animations->mTicksPerSecond : 25.0;
|
||||
|
||||
for (unsigned i = 0; i < animations->mNumChannels; i++)
|
||||
{
|
||||
const auto aAnim = animations->mChannels[i];
|
||||
auto& anim = data.Animation.Channels[i];
|
||||
|
||||
anim.NodeName = aAnim->mNodeName.C_Str();
|
||||
|
||||
ImportCurve(aAnim->mPositionKeys, aAnim->mNumPositionKeys, anim.Position);
|
||||
ImportCurve(aAnim->mRotationKeys, aAnim->mNumRotationKeys, anim.Rotation);
|
||||
ImportCurve(aAnim->mScalingKeys, aAnim->mNumScalingKeys, anim.Scale);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
LOG(Warning, "Loaded scene has no animations");
|
||||
}
|
||||
}
|
||||
|
||||
// Import nodes
|
||||
if (data.Types & ImportDataTypes::Nodes)
|
||||
{
|
||||
data.Nodes.Resize(assimpData.Nodes.Count());
|
||||
for (int32 i = 0; i < assimpData.Nodes.Count(); i++)
|
||||
{
|
||||
auto& node = data.Nodes[i];
|
||||
auto& aNode = assimpData.Nodes[i];
|
||||
|
||||
node.Name = aNode.Name;
|
||||
node.ParentIndex = aNode.ParentIndex;
|
||||
node.LocalTransform = aNode.LocalTransform;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
#endif
|
||||
980
Source/Engine/Tools/ModelTool/ModelTool.AutodeskFbxSdk.cpp
Normal file
980
Source/Engine/Tools/ModelTool/ModelTool.AutodeskFbxSdk.cpp
Normal file
@@ -0,0 +1,980 @@
|
||||
// Copyright (c) 2012-2020 Wojciech Figat. All rights reserved.
|
||||
|
||||
#if COMPILE_WITH_MODEL_TOOL && USE_AUTODESK_FBX_SDK
|
||||
|
||||
#include "ModelTool.h"
|
||||
#include "Engine/Core/Log.h"
|
||||
#include "Engine/Core/Collections/Array.h"
|
||||
#include "Engine/Core/Math/Matrix.h"
|
||||
#include "Engine/Threading/Threading.h"
|
||||
|
||||
// Import Autodesk FBX SDK
|
||||
#define FBXSDK_NEW_API
|
||||
#include <fbxsdk.h>
|
||||
|
||||
class FbxSdkManager
|
||||
{
|
||||
public:
|
||||
|
||||
static FbxManager* Manager;
|
||||
static CriticalSection Locker;
|
||||
|
||||
static void Init()
|
||||
{
|
||||
if (Manager == nullptr)
|
||||
{
|
||||
LOG_STR(Info, String("Autodesk FBX SDK " FBXSDK_VERSION_STRING_FULL));
|
||||
|
||||
Manager = FbxManager::Create();
|
||||
if (Manager == nullptr)
|
||||
{
|
||||
LOG(Fatal, "Autodesk FBX SDK failed to initialize.");
|
||||
return;
|
||||
}
|
||||
|
||||
FbxIOSettings* ios = FbxIOSettings::Create(Manager, IOSROOT);
|
||||
ios->SetBoolProp(IMP_FBX_TEXTURE, false);
|
||||
ios->SetBoolProp(IMP_FBX_GOBO, false);
|
||||
Manager->SetIOSettings(ios);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
FbxManager* FbxSdkManager::Manager = nullptr;
|
||||
CriticalSection FbxSdkManager::Locker;
|
||||
|
||||
Matrix ToFlaxType(const FbxAMatrix& value)
|
||||
{
|
||||
Matrix native;
|
||||
for (int32 row = 0; row < 4; row++)
|
||||
for (int32 col = 0; col < 4; col++)
|
||||
native.Values[row][col] = (float)value[col][row];
|
||||
|
||||
return native;
|
||||
}
|
||||
|
||||
Vector3 ToFlaxType(const FbxVector4& value)
|
||||
{
|
||||
Vector3 native;
|
||||
native.X = (float)value[0];
|
||||
native.Y = (float)value[1];
|
||||
native.Z = (float)value[2];
|
||||
|
||||
return native;
|
||||
}
|
||||
|
||||
Vector3 ToFlaxType(const FbxDouble3& value)
|
||||
{
|
||||
Vector3 native;
|
||||
native.X = (float)value[0];
|
||||
native.Y = (float)value[1];
|
||||
native.Z = (float)value[2];
|
||||
|
||||
return native;
|
||||
}
|
||||
|
||||
Vector2 ToFlaxType(const FbxVector2& value)
|
||||
{
|
||||
Vector2 native;
|
||||
native.X = (float)value[0];
|
||||
native.Y = 1 - (float)value[1];
|
||||
|
||||
return native;
|
||||
}
|
||||
|
||||
Color ToFlaxType(const FbxColor& value)
|
||||
{
|
||||
Color native;
|
||||
native.R = (float)value[0];
|
||||
native.G = (float)value[1];
|
||||
native.B = (float)value[2];
|
||||
native.A = (float)value[3];
|
||||
|
||||
return native;
|
||||
}
|
||||
|
||||
int ToFlaxType(const int& value)
|
||||
{
|
||||
return value;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a single node in the FBX transform hierarchy.
|
||||
/// </summary>
|
||||
struct Node
|
||||
{
|
||||
/// <summary>
|
||||
/// The parent index. The root node uses value -1.
|
||||
/// </summary>
|
||||
int32 ParentIndex;
|
||||
|
||||
/// <summary>
|
||||
/// The local transformation of the node, relative to parent node.
|
||||
/// </summary>
|
||||
Transform LocalTransform;
|
||||
|
||||
/// <summary>
|
||||
/// The name of this node.
|
||||
/// </summary>
|
||||
String Name;
|
||||
|
||||
/// <summary>
|
||||
/// The LOD index of the data in this node (used to separate meshes across different level of details).
|
||||
/// </summary>
|
||||
int32 LodIndex;
|
||||
|
||||
Matrix GeomTransform;
|
||||
Matrix WorldTransform;
|
||||
FbxNode* FbxNode;
|
||||
};
|
||||
|
||||
struct Bone
|
||||
{
|
||||
/// <summary>
|
||||
/// The index of the related node.
|
||||
/// </summary>
|
||||
int32 NodeIndex;
|
||||
|
||||
/// <summary>
|
||||
/// The parent bone index. The root bone uses value -1.
|
||||
/// </summary>
|
||||
int32 ParentBoneIndex;
|
||||
|
||||
/// <summary>
|
||||
/// The name of this bone.
|
||||
/// </summary>
|
||||
String Name;
|
||||
|
||||
/// <summary>
|
||||
/// The matrix that transforms from mesh space to bone space in bind pose.
|
||||
/// </summary>
|
||||
Matrix OffsetMatrix;
|
||||
};
|
||||
|
||||
struct ImporterData
|
||||
{
|
||||
ImportedModelData& Model;
|
||||
const FbxScene* Scene;
|
||||
const ModelTool::Options& Options;
|
||||
|
||||
Array<Node> Nodes;
|
||||
Array<Bone> Bones;
|
||||
|
||||
Dictionary<FbxMesh*, MeshData*> Meshes;
|
||||
Array<FbxSurfaceMaterial*> Materials;
|
||||
|
||||
ImporterData(ImportedModelData& model, const ModelTool::Options& options, const FbxScene* scene)
|
||||
: Model(model)
|
||||
, Scene(scene)
|
||||
, Options(options)
|
||||
, Nodes(256)
|
||||
, Meshes(256)
|
||||
, Materials(64)
|
||||
{
|
||||
}
|
||||
|
||||
int32 FindNode(FbxNode* fbxNode)
|
||||
{
|
||||
for (int32 i = 0; i < Nodes.Count(); i++)
|
||||
{
|
||||
if (Nodes[i].FbxNode == fbxNode)
|
||||
return i;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
int32 FindNode(const String& name, StringSearchCase caseSensitivity = StringSearchCase::CaseSensitive)
|
||||
{
|
||||
for (int32 i = 0; i < Nodes.Count(); i++)
|
||||
{
|
||||
if (Nodes[i].Name.Compare(name, caseSensitivity) == 0)
|
||||
return i;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
int32 FindBone(const String& name, StringSearchCase caseSensitivity = StringSearchCase::CaseSensitive)
|
||||
{
|
||||
for (int32 i = 0; i < Bones.Count(); i++)
|
||||
{
|
||||
if (Bones[i].Name.Compare(name, caseSensitivity) == 0)
|
||||
return i;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
int32 FindBone(const int32 nodeIndex)
|
||||
{
|
||||
for (int32 i = 0; i < Bones.Count(); i++)
|
||||
{
|
||||
if (Bones[i].NodeIndex == nodeIndex)
|
||||
return i;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
};
|
||||
|
||||
void ProcessNodes(ImporterData& data, FbxNode* fbxNode, int32 parentIndex)
|
||||
{
|
||||
const int32 nodeIndex = data.Nodes.Count();
|
||||
|
||||
Vector3 translation = ToFlaxType(fbxNode->EvaluateLocalTranslation(FbxTime(0)));
|
||||
Vector3 rotationEuler = ToFlaxType(fbxNode->EvaluateLocalRotation(FbxTime(0)));
|
||||
Vector3 scale = ToFlaxType(fbxNode->EvaluateLocalScaling(FbxTime(0)));
|
||||
Quaternion rotation = Quaternion::Euler(rotationEuler);
|
||||
|
||||
// Create node
|
||||
Node node;
|
||||
node.ParentIndex = parentIndex;
|
||||
node.Name = String(fbxNode->GetNameWithoutNameSpacePrefix().Buffer());
|
||||
node.LocalTransform = Transform(translation, rotation, scale);
|
||||
node.FbxNode = fbxNode;
|
||||
|
||||
// Geometry transform is applied to geometry (mesh data) only, it is not inherited by children, so we store it separately
|
||||
Vector3 geomTrans = ToFlaxType(fbxNode->GeometricTranslation.Get());
|
||||
Vector3 geomRotEuler = ToFlaxType(fbxNode->GeometricRotation.Get());
|
||||
Vector3 geomScale = ToFlaxType(fbxNode->GeometricScaling.Get());
|
||||
Quaternion geomRotation = Quaternion::Euler(geomRotEuler);
|
||||
Transform(geomTrans, geomRotation, geomScale).GetWorld(node.GeomTransform);
|
||||
|
||||
// Pick node LOD index
|
||||
if (parentIndex == -1 || !data.Options.ImportLODs)
|
||||
{
|
||||
node.LodIndex = 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
node.LodIndex = data.Nodes[parentIndex].LodIndex;
|
||||
if (node.LodIndex == 0)
|
||||
{
|
||||
node.LodIndex = ModelTool::DetectLodIndex(node.Name);
|
||||
}
|
||||
ASSERT(Math::IsInRange(node.LodIndex, 0, MODEL_MAX_LODS - 1));
|
||||
}
|
||||
|
||||
if (parentIndex == -1)
|
||||
{
|
||||
node.LocalTransform.GetWorld(node.WorldTransform);
|
||||
}
|
||||
else
|
||||
{
|
||||
node.WorldTransform = data.Nodes[parentIndex].WorldTransform * node.LocalTransform.GetWorld();
|
||||
}
|
||||
data.Nodes.Add(node);
|
||||
|
||||
// Process the children
|
||||
for (int i = 0; i < fbxNode->GetChildCount(); i++)
|
||||
{
|
||||
ProcessNodes(data, fbxNode->GetChild(i), nodeIndex);
|
||||
}
|
||||
}
|
||||
|
||||
template<class TFBX, class TNative>
|
||||
void ReadLayerData(FbxMesh* fbxMesh, FbxLayerElementTemplate<TFBX>& layer, Array<TNative>& output)
|
||||
{
|
||||
if (layer.GetDirectArray().GetCount() == 0)
|
||||
return;
|
||||
|
||||
int32 vertexCount = fbxMesh->GetControlPointsCount();
|
||||
int32 triangleCount = fbxMesh->GetPolygonCount();
|
||||
output.Resize(vertexCount);
|
||||
|
||||
switch (layer.GetMappingMode())
|
||||
{
|
||||
case FbxLayerElement::eByControlPoint:
|
||||
{
|
||||
for (int vertexIndex = 0; vertexIndex < vertexCount; vertexIndex++)
|
||||
{
|
||||
int index = 0;
|
||||
if (layer.GetReferenceMode() == FbxGeometryElement::eDirect)
|
||||
index = vertexIndex;
|
||||
else if (layer.GetReferenceMode() == FbxGeometryElement::eIndexToDirect)
|
||||
index = layer.GetIndexArray().GetAt(vertexIndex);
|
||||
|
||||
output[vertexIndex] = ToFlaxType(layer.GetDirectArray().GetAt(index));
|
||||
}
|
||||
}
|
||||
break;
|
||||
case FbxLayerElement::eByPolygonVertex:
|
||||
{
|
||||
int indexByPolygonVertex = 0;
|
||||
|
||||
for (int polygonIndex = 0; polygonIndex < triangleCount; polygonIndex++)
|
||||
{
|
||||
const int polygonSize = fbxMesh->GetPolygonSize(polygonIndex);
|
||||
for (int i = 0; i < polygonSize; i++)
|
||||
{
|
||||
int index = 0;
|
||||
if (layer.GetReferenceMode() == FbxGeometryElement::eDirect)
|
||||
index = indexByPolygonVertex;
|
||||
else if (layer.GetReferenceMode() == FbxGeometryElement::eIndexToDirect)
|
||||
index = layer.GetIndexArray().GetAt(indexByPolygonVertex);
|
||||
|
||||
int vertexIndex = fbxMesh->GetPolygonVertex(polygonIndex, i);
|
||||
output[vertexIndex] = ToFlaxType(layer.GetDirectArray().GetAt(index));
|
||||
|
||||
indexByPolygonVertex++;
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
case FbxLayerElement::eAllSame:
|
||||
{
|
||||
output[0] = ToFlaxType(layer.GetDirectArray().GetAt(0));
|
||||
for (int vertexIndex = 1; vertexIndex < vertexCount; vertexIndex++)
|
||||
{
|
||||
output[vertexIndex] = output[0];
|
||||
}
|
||||
}
|
||||
break;
|
||||
default:
|
||||
LOG(Warning, "Unsupported layer mapping mode.");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
bool IsGroupMappingModeByEdge(FbxLayerElement* layerElement)
|
||||
{
|
||||
return layerElement->GetMappingMode() == FbxLayerElement::eByEdge;
|
||||
}
|
||||
|
||||
bool ProcessMesh(ImporterData& data, FbxMesh* fbxMesh, MeshData& mesh, String& errorMsg)
|
||||
{
|
||||
// Properties
|
||||
mesh.Name = fbxMesh->GetName();
|
||||
mesh.MaterialSlotIndex = -1;
|
||||
if (fbxMesh->GetElementMaterial())
|
||||
{
|
||||
const auto materialIndices = &(fbxMesh->GetElementMaterial()->GetIndexArray());
|
||||
if (materialIndices)
|
||||
{
|
||||
mesh.MaterialSlotIndex = materialIndices->GetAt(0);
|
||||
}
|
||||
}
|
||||
|
||||
int32 vertexCount = fbxMesh->GetControlPointsCount();
|
||||
int32 triangleCount = fbxMesh->GetPolygonCount();
|
||||
FbxVector4* controlPoints = fbxMesh->GetControlPoints();
|
||||
FbxGeometryElementNormal* normalElement = fbxMesh->GetElementNormal();
|
||||
FbxGeometryElementTangent* tangentElement = fbxMesh->GetElementTangent();
|
||||
|
||||
// Regenerate data if necessary
|
||||
if (normalElement == nullptr || data.Options.CalculateNormals)
|
||||
{
|
||||
fbxMesh->GenerateNormals(true, false, false);
|
||||
normalElement = fbxMesh->GetElementNormal();
|
||||
}
|
||||
if (tangentElement == nullptr || data.Options.CalculateTangents)
|
||||
{
|
||||
fbxMesh->GenerateTangentsData(0, true);
|
||||
tangentElement = fbxMesh->GetElementTangent();
|
||||
}
|
||||
|
||||
bool needEdgeIndexing = false;
|
||||
if (normalElement)
|
||||
needEdgeIndexing |= IsGroupMappingModeByEdge(normalElement);
|
||||
|
||||
// Vertex positions
|
||||
mesh.Positions.Resize(vertexCount, false);
|
||||
for (int32 i = 0; i < vertexCount; i++)
|
||||
{
|
||||
mesh.Positions[i] = ToFlaxType(controlPoints[i]);
|
||||
}
|
||||
|
||||
// Indices
|
||||
const int32 indexCount = triangleCount * 3;
|
||||
mesh.Indices.Resize(indexCount, false);
|
||||
int* fbxIndices = fbxMesh->GetPolygonVertices();
|
||||
for (int32 i = 0; i < indexCount; i++)
|
||||
{
|
||||
mesh.Indices[i] = fbxIndices[i];
|
||||
}
|
||||
|
||||
// Texture coordinates
|
||||
FbxGeometryElementUV* texcoords = fbxMesh->GetElementUV(0);
|
||||
if (texcoords)
|
||||
{
|
||||
ReadLayerData(fbxMesh, *texcoords, mesh.UVs);
|
||||
}
|
||||
|
||||
// Normals
|
||||
if (normalElement)
|
||||
{
|
||||
ReadLayerData(fbxMesh, *normalElement, mesh.Normals);
|
||||
}
|
||||
|
||||
// Tangents
|
||||
if (tangentElement)
|
||||
{
|
||||
ReadLayerData(fbxMesh, *tangentElement, mesh.Tangents);
|
||||
}
|
||||
|
||||
// Lightmap UVs
|
||||
if (data.Options.LightmapUVsSource == ModelLightmapUVsSource::Disable)
|
||||
{
|
||||
// No lightmap UVs
|
||||
}
|
||||
else if (data.Options.LightmapUVsSource == ModelLightmapUVsSource::Generate)
|
||||
{
|
||||
// Generate lightmap UVs
|
||||
if (mesh.GenerateLightmapUVs())
|
||||
{
|
||||
// TODO: we could propagate this message to Debug Console in editor? or create interface to gather some msgs from importing service
|
||||
LOG(Warning, "Failed to generate lightmap uvs");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Select input channel index
|
||||
int32 inputChannelIndex;
|
||||
switch (data.Options.LightmapUVsSource)
|
||||
{
|
||||
case ModelLightmapUVsSource::Channel0:
|
||||
inputChannelIndex = 0;
|
||||
break;
|
||||
case ModelLightmapUVsSource::Channel1:
|
||||
inputChannelIndex = 1;
|
||||
break;
|
||||
case ModelLightmapUVsSource::Channel2:
|
||||
inputChannelIndex = 2;
|
||||
break;
|
||||
case ModelLightmapUVsSource::Channel3:
|
||||
inputChannelIndex = 3;
|
||||
break;
|
||||
default:
|
||||
inputChannelIndex = INVALID_INDEX;
|
||||
break;
|
||||
}
|
||||
|
||||
// Check if has that channel texcoords
|
||||
if (inputChannelIndex >= 0 && inputChannelIndex < fbxMesh->GetElementUVCount() && fbxMesh->GetElementUV(inputChannelIndex))
|
||||
{
|
||||
ReadLayerData(fbxMesh, *fbxMesh->GetElementUV(inputChannelIndex), mesh.LightmapUVs);
|
||||
}
|
||||
else
|
||||
{
|
||||
// TODO: we could propagate this message to Debug Console in editor? or create interface to gather some msgs from importing service
|
||||
LOG(Warning, "Cannot import model lightmap uvs. Missing texcoords channel {0}.", inputChannelIndex);
|
||||
}
|
||||
}
|
||||
|
||||
// Vertex Colors
|
||||
if (data.Options.ImportVertexColors && fbxMesh->GetElementVertexColorCount() > 0)
|
||||
{
|
||||
auto vertexColorElement = fbxMesh->GetElementVertexColor(0);
|
||||
ReadLayerData(fbxMesh, *vertexColorElement, mesh.Colors);
|
||||
}
|
||||
|
||||
// Blend Indices and Blend Weights
|
||||
const int skinDeformerCount = fbxMesh->GetDeformerCount(FbxDeformer::eSkin);
|
||||
if (skinDeformerCount > 0)
|
||||
{
|
||||
const int32 vertexCount = mesh.Positions.Count();
|
||||
mesh.BlendIndices.Resize(vertexCount);
|
||||
mesh.BlendWeights.Resize(vertexCount);
|
||||
mesh.BlendIndices.SetAll(Int4::Zero);
|
||||
mesh.BlendWeights.SetAll(Vector4::Zero);
|
||||
|
||||
for (int deformerIndex = 0; deformerIndex < skinDeformerCount; deformerIndex++)
|
||||
{
|
||||
FbxSkin* skin = FbxCast<FbxSkin>(fbxMesh->GetDeformer(deformerIndex, FbxDeformer::eSkin));
|
||||
int totalClusterCount = skin->GetClusterCount();
|
||||
|
||||
for (int clusterIndex = 0; clusterIndex < totalClusterCount; ++clusterIndex)
|
||||
{
|
||||
FbxCluster* cluster = skin->GetCluster(clusterIndex);
|
||||
int indexCount = cluster->GetControlPointIndicesCount();
|
||||
if (indexCount == 0)
|
||||
continue;
|
||||
FbxNode* link = cluster->GetLink();
|
||||
const String boneName(link->GetName());
|
||||
|
||||
// Find the node where the bone is mapped - based on the name
|
||||
const int32 nodeIndex = data.FindNode(link);
|
||||
if (nodeIndex == -1)
|
||||
{
|
||||
LOG(Warning, "Invalid mesh bone linkage. Mesh: {0}, bone: {1}. Skipping...", mesh.Name, boneName);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Create bone if missing
|
||||
int32 boneIndex = data.FindBone(boneName);
|
||||
if (boneIndex == -1)
|
||||
{
|
||||
// Find the parent bone
|
||||
int32 parentBoneIndex = -1;
|
||||
for (int32 i = nodeIndex; i != -1; i = data.Nodes[i].ParentIndex)
|
||||
{
|
||||
parentBoneIndex = data.FindBone(i);
|
||||
if (parentBoneIndex != -1)
|
||||
break;
|
||||
}
|
||||
|
||||
// Add bone
|
||||
boneIndex = data.Bones.Count();
|
||||
data.Bones.EnsureCapacity(Math::Max(128, boneIndex + 16));
|
||||
data.Bones.Resize(boneIndex + 1);
|
||||
auto& bone = data.Bones[boneIndex];
|
||||
|
||||
FbxAMatrix transformMatrix;
|
||||
FbxAMatrix transformLinkMatrix;
|
||||
cluster->GetTransformMatrix(transformMatrix);
|
||||
cluster->GetTransformLinkMatrix(transformLinkMatrix);
|
||||
const auto globalBindposeInverseMatrix = transformLinkMatrix.Inverse() * transformMatrix;
|
||||
|
||||
// Setup bone
|
||||
bone.Name = boneName;
|
||||
bone.NodeIndex = nodeIndex;
|
||||
bone.ParentBoneIndex = parentBoneIndex;
|
||||
bone.OffsetMatrix = ToFlaxType(globalBindposeInverseMatrix);
|
||||
}
|
||||
|
||||
// Apply the bone influences
|
||||
int* cluserIndices = cluster->GetControlPointIndices();
|
||||
double* cluserWeights = cluster->GetControlPointWeights();
|
||||
for (int j = 0; j < indexCount; j++)
|
||||
{
|
||||
const int vtxWeightId = cluserIndices[j];
|
||||
|
||||
if (vtxWeightId >= vertexCount)
|
||||
continue;
|
||||
|
||||
const auto vtxWeight = (float)cluserWeights[j];
|
||||
|
||||
if (vtxWeight <= 0 || isnan(vtxWeight) || isinf(vtxWeight))
|
||||
continue;
|
||||
|
||||
auto& indices = mesh.BlendIndices[vtxWeightId];
|
||||
auto& weights = mesh.BlendWeights[vtxWeightId];
|
||||
|
||||
for (int32 k = 0; k < 4; k++)
|
||||
{
|
||||
if (vtxWeight >= weights.Raw[k])
|
||||
{
|
||||
for (int32 l = 2; l >= k; l--)
|
||||
{
|
||||
indices.Raw[l + 1] = indices.Raw[l];
|
||||
weights.Raw[l + 1] = weights.Raw[l];
|
||||
}
|
||||
|
||||
indices.Raw[k] = boneIndex;
|
||||
weights.Raw[k] = vtxWeight;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mesh.NormalizeBlendWeights();
|
||||
}
|
||||
|
||||
// Blend Shapes
|
||||
const int blendShapeDeformerCount = fbxMesh->GetDeformerCount(FbxDeformer::eBlendShape);
|
||||
if (blendShapeDeformerCount > 0 && data.Model.Types & ImportDataTypes::Skeleton && data.Options.ImportBlendShapes)
|
||||
{
|
||||
mesh.BlendShapes.EnsureCapacity(blendShapeDeformerCount);
|
||||
for (int deformerIndex = 0; deformerIndex < skinDeformerCount; deformerIndex++)
|
||||
{
|
||||
FbxBlendShape* blendShape = FbxCast<FbxBlendShape>(fbxMesh->GetDeformer(deformerIndex, FbxDeformer::eBlendShape));
|
||||
|
||||
const int blendShapeChannelCount = blendShape->GetBlendShapeChannelCount();
|
||||
for (int32 channelIndex = 0; channelIndex < blendShapeChannelCount; channelIndex++)
|
||||
{
|
||||
FbxBlendShapeChannel* blendShapeChannel = blendShape->GetBlendShapeChannel(channelIndex);
|
||||
|
||||
// Use last shape
|
||||
const int shapeCount = blendShapeChannel->GetTargetShapeCount();
|
||||
if (shapeCount == 0)
|
||||
continue;
|
||||
FbxShape* shape = blendShapeChannel->GetTargetShape(shapeCount - 1);
|
||||
|
||||
int shapeControlPointsCount = shape->GetControlPointsCount();
|
||||
if (shapeControlPointsCount != vertexCount)
|
||||
continue;
|
||||
|
||||
BlendShape& blendShapeData = mesh.BlendShapes.AddOne();
|
||||
blendShapeData.Name = blendShapeChannel->GetName();
|
||||
const auto dotPos = blendShapeData.Name.Find('.');
|
||||
if (dotPos != -1)
|
||||
blendShapeData.Name = blendShapeData.Name.Substring(dotPos + 1);
|
||||
blendShapeData.Weight = blendShapeChannel->GetTargetShapeCount() > 1 ? (float)(blendShapeChannel->DeformPercent.Get() / 100.0) : 1.0f;
|
||||
|
||||
FbxVector4* shapeControlPoints = shape->GetControlPoints();
|
||||
blendShapeData.Vertices.Resize(shapeControlPointsCount);
|
||||
for (int32 i = 0; i < blendShapeData.Vertices.Count(); i++)
|
||||
blendShapeData.Vertices[i].VertexIndex = i;
|
||||
for (int i = 0; i < blendShapeData.Vertices.Count(); i++)
|
||||
blendShapeData.Vertices[i].PositionDelta = ToFlaxType(shapeControlPoints[i] - controlPoints[i]);
|
||||
// TODO: support importing normals from blend shape
|
||||
for (int32 i = 0; i < blendShapeData.Vertices.Count(); i++)
|
||||
blendShapeData.Vertices[i].NormalDelta = Vector3::Zero;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Flip the Y in texcoords
|
||||
for (int32 i = 0; i < mesh.UVs.Count(); i++)
|
||||
mesh.UVs[i].Y = 1.0f - mesh.UVs[i].Y;
|
||||
for (int32 i = 0; i < mesh.LightmapUVs.Count(); i++)
|
||||
mesh.LightmapUVs[i].Y = 1.0f - mesh.LightmapUVs[i].Y;
|
||||
|
||||
// Handle missing material case (could never happen but it's better to be sure it will work)
|
||||
if (mesh.MaterialSlotIndex == -1)
|
||||
{
|
||||
mesh.MaterialSlotIndex = 0;
|
||||
LOG(Warning, "Mesh \'{0}\' has missing material slot.", mesh.Name);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
bool ImportMesh(ImporterData& data, int32 nodeIndex, FbxMesh* fbxMesh, String& errorMsg)
|
||||
{
|
||||
auto& model = data.Model;
|
||||
|
||||
// Skip invalid meshes
|
||||
if (!fbxMesh->IsTriangleMesh() || fbxMesh->GetControlPointsCount() == 0 || fbxMesh->GetPolygonCount() == 0)
|
||||
return false;
|
||||
|
||||
// Check if that mesh has been already imported (instanced geometry)
|
||||
MeshData* meshData = nullptr;
|
||||
if (data.Meshes.TryGet(fbxMesh, meshData) && meshData)
|
||||
{
|
||||
// Clone mesh
|
||||
meshData = New<MeshData>(*meshData);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Import mesh data
|
||||
meshData = New<MeshData>();
|
||||
if (ProcessMesh(data, fbxMesh, *meshData, errorMsg))
|
||||
return true;
|
||||
}
|
||||
|
||||
// Link mesh
|
||||
meshData->NodeIndex = nodeIndex;
|
||||
auto& node = data.Nodes[nodeIndex];
|
||||
const auto lodIndex = node.LodIndex;
|
||||
if (model.LODs.Count() <= lodIndex)
|
||||
model.LODs.Resize(lodIndex + 1);
|
||||
model.LODs[lodIndex].Meshes.Add(meshData);
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
bool ImportMesh(ImporterData& data, int32 nodeIndex, String& errorMsg)
|
||||
{
|
||||
auto fbxNode = data.Nodes[nodeIndex].FbxNode;
|
||||
|
||||
// Process the node's attributes
|
||||
for (int i = 0; i < fbxNode->GetNodeAttributeCount(); i++)
|
||||
{
|
||||
auto attribute = fbxNode->GetNodeAttributeByIndex(i);
|
||||
if (!attribute)
|
||||
continue;
|
||||
|
||||
switch (attribute->GetAttributeType())
|
||||
{
|
||||
case FbxNodeAttribute::eNurbs:
|
||||
case FbxNodeAttribute::eNurbsSurface:
|
||||
case FbxNodeAttribute::ePatch:
|
||||
{
|
||||
FbxGeometryConverter geomConverter(FbxSdkManager::Manager);
|
||||
attribute = geomConverter.Triangulate(attribute, true);
|
||||
|
||||
if (attribute->GetAttributeType() == FbxNodeAttribute::eMesh)
|
||||
{
|
||||
FbxMesh* mesh = static_cast<FbxMesh*>(attribute);
|
||||
mesh->RemoveBadPolygons();
|
||||
|
||||
if (ImportMesh(data, nodeIndex, mesh, errorMsg))
|
||||
return true;
|
||||
}
|
||||
}
|
||||
break;
|
||||
case FbxNodeAttribute::eMesh:
|
||||
{
|
||||
FbxMesh* mesh = static_cast<FbxMesh*>(attribute);
|
||||
mesh->RemoveBadPolygons();
|
||||
|
||||
if (!mesh->IsTriangleMesh())
|
||||
{
|
||||
FbxGeometryConverter geomConverter(FbxSdkManager::Manager);
|
||||
geomConverter.Triangulate(mesh, true);
|
||||
attribute = fbxNode->GetNodeAttribute();
|
||||
mesh = static_cast<FbxMesh*>(attribute);
|
||||
}
|
||||
|
||||
if (ImportMesh(data, nodeIndex, mesh, errorMsg))
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
bool ImportMeshes(ImporterData& data, String& errorMsg)
|
||||
{
|
||||
for (int32 i = 0; i < data.Nodes.Count(); i++)
|
||||
{
|
||||
if (ImportMesh(data, i, errorMsg))
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
void ImportCurve(aiVectorKey* keys, uint32 keysCount, LinearCurve<Vector3>& curve)
|
||||
{
|
||||
if (keys == nullptr || keysCount == 0)
|
||||
return;
|
||||
|
||||
const auto keyframes = curve.Resize(keysCount);
|
||||
|
||||
for (uint32 i = 0; i < keysCount; i++)
|
||||
{
|
||||
auto& aKey = keys[i];
|
||||
auto& key = keyframes[i];
|
||||
|
||||
key.Time = (float)aKey.mTime;
|
||||
key.Value = ToVector3(aKey.mValue);
|
||||
}
|
||||
}
|
||||
|
||||
void ImportCurve(aiQuatKey* keys, uint32 keysCount, LinearCurve<Quaternion>& curve)
|
||||
{
|
||||
if (keys == nullptr || keysCount == 0)
|
||||
return;
|
||||
|
||||
const auto keyframes = curve.Resize(keysCount);
|
||||
|
||||
for (uint32 i = 0; i < keysCount; i++)
|
||||
{
|
||||
auto& aKey = keys[i];
|
||||
auto& key = keyframes[i];
|
||||
|
||||
key.Time = (float)aKey.mTime;
|
||||
key.Value = ToQuaternion(aKey.mValue);
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
/// <summary>
|
||||
/// Bakes the node transformations.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// FBX stores transforms in a more complex way than just translation-rotation-scale as used by Flax Engine.
|
||||
/// Instead they also support rotations offsets and pivots, scaling pivots and more. We wish to bake all this data
|
||||
/// into a standard transform so we can access it using node's local TRS properties (e.g. FbxNode::LclTranslation).
|
||||
/// </remarks>
|
||||
/// <param name="scene">The FBX scene.</param>
|
||||
void BakeTransforms(FbxScene* scene)
|
||||
{
|
||||
double frameRate = FbxTime::GetFrameRate(scene->GetGlobalSettings().GetTimeMode());
|
||||
|
||||
Array<FbxNode*> todo;
|
||||
todo.Push(scene->GetRootNode());
|
||||
|
||||
while (todo.HasItems())
|
||||
{
|
||||
FbxNode* node = todo.Pop();
|
||||
|
||||
FbxVector4 zero(0, 0, 0);
|
||||
FbxVector4 one(1, 1, 1);
|
||||
|
||||
// Activate pivot converting
|
||||
node->SetPivotState(FbxNode::eSourcePivot, FbxNode::ePivotActive);
|
||||
node->SetPivotState(FbxNode::eDestinationPivot, FbxNode::ePivotActive);
|
||||
|
||||
// We want to set all these to 0 (1 for scale) and bake them into the transforms
|
||||
node->SetPostRotation(FbxNode::eDestinationPivot, zero);
|
||||
node->SetPreRotation(FbxNode::eDestinationPivot, zero);
|
||||
node->SetRotationOffset(FbxNode::eDestinationPivot, zero);
|
||||
node->SetScalingOffset(FbxNode::eDestinationPivot, zero);
|
||||
node->SetRotationPivot(FbxNode::eDestinationPivot, zero);
|
||||
node->SetScalingPivot(FbxNode::eDestinationPivot, zero);
|
||||
|
||||
// We account for geometric properties separately during node traversal
|
||||
node->SetGeometricTranslation(FbxNode::eDestinationPivot, node->GetGeometricTranslation(FbxNode::eSourcePivot));
|
||||
node->SetGeometricRotation(FbxNode::eDestinationPivot, node->GetGeometricRotation(FbxNode::eSourcePivot));
|
||||
node->SetGeometricScaling(FbxNode::eDestinationPivot, node->GetGeometricScaling(FbxNode::eSourcePivot));
|
||||
|
||||
// Flax assumes euler angles are in YXZ order
|
||||
node->SetRotationOrder(FbxNode::eDestinationPivot, FbxEuler::eOrderYXZ);
|
||||
|
||||
// Keep interpolation as is
|
||||
node->SetQuaternionInterpolation(FbxNode::eDestinationPivot, node->GetQuaternionInterpolation(FbxNode::eSourcePivot));
|
||||
|
||||
for (int i = 0; i < node->GetChildCount(); i++)
|
||||
{
|
||||
FbxNode* childNode = node->GetChild(i);
|
||||
todo.Push(childNode);
|
||||
}
|
||||
}
|
||||
|
||||
scene->GetRootNode()->ConvertPivotAnimationRecursive(nullptr, FbxNode::eDestinationPivot, frameRate, false);
|
||||
}
|
||||
|
||||
bool ModelTool::ImportDataAutodeskFbxSdk(const char* path, ImportedModelData& data, const Options& options, String& errorMsg)
|
||||
{
|
||||
ScopeLock lock(FbxSdkManager::Locker);
|
||||
|
||||
// Initialize
|
||||
FbxSdkManager::Init();
|
||||
auto scene = FbxScene::Create(FbxSdkManager::Manager, "Scene");
|
||||
if (scene == nullptr)
|
||||
{
|
||||
errorMsg = TEXT("Failed to create FBX scene");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Import file
|
||||
bool importMeshes = (data.Types & ImportDataTypes::Geometry) != 0;
|
||||
bool importAnimations = (data.Types & ImportDataTypes::Animations) != 0;
|
||||
FbxImporter* importer = FbxImporter::Create(FbxSdkManager::Manager, "");
|
||||
auto ios = FbxSdkManager::Manager->GetIOSettings();
|
||||
ios->SetBoolProp(IMP_FBX_MODEL, importMeshes);
|
||||
ios->SetBoolProp(IMP_FBX_ANIMATION, importAnimations);
|
||||
if (!importer->Initialize(path, -1, ios))
|
||||
{
|
||||
errorMsg = String::Format(TEXT("Failed to initialize FBX importer. {0}"), String(importer->GetStatus().GetErrorString()));
|
||||
return false;
|
||||
}
|
||||
if (!importer->Import(scene))
|
||||
{
|
||||
errorMsg = String::Format(TEXT("Failed to import FBX scene. {0}"), String(importer->GetStatus().GetErrorString()));
|
||||
importer->Destroy();
|
||||
return false;
|
||||
}
|
||||
{
|
||||
const FbxAxisSystem fileCoordSystem = scene->GetGlobalSettings().GetAxisSystem();
|
||||
FbxAxisSystem bsCoordSystem(FbxAxisSystem::eDirectX);
|
||||
if (fileCoordSystem != bsCoordSystem)
|
||||
bsCoordSystem.ConvertScene(scene);
|
||||
}
|
||||
importer->Destroy();
|
||||
importer = nullptr;
|
||||
|
||||
BakeTransforms(scene);
|
||||
|
||||
// TODO: optimizeMeshes
|
||||
|
||||
// Process imported scene nodes
|
||||
ImporterData importerData(data, options, scene);
|
||||
ProcessNodes(importerData, scene->GetRootNode(), -1);
|
||||
|
||||
// Add all materials
|
||||
for (int i = 0; i < scene->GetMaterialCount(); i++)
|
||||
{
|
||||
importerData.Materials.Add(scene->GetMaterial(i));
|
||||
}
|
||||
|
||||
// Import geometry (meshes and materials)
|
||||
if (data.Types & ImportDataTypes::Geometry)
|
||||
{
|
||||
if (ImportMeshes(importerData, errorMsg))
|
||||
{
|
||||
LOG(Warning, "Failed to import meshes.");
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: remove unused materials if meshes merging is disabled
|
||||
|
||||
// Import skeleton
|
||||
if (data.Types & ImportDataTypes::Skeleton)
|
||||
{
|
||||
data.Skeleton.Nodes.Resize(importerData.Nodes.Count(), false);
|
||||
for (int32 i = 0; i < importerData.Nodes.Count(); i++)
|
||||
{
|
||||
auto& node = data.Skeleton.Nodes[i];
|
||||
auto& fbxNode = importerData.Nodes[i];
|
||||
|
||||
node.Name = fbxNode.Name;
|
||||
node.ParentIndex = fbxNode.ParentIndex;
|
||||
node.LocalTransform = fbxNode.LocalTransform;
|
||||
}
|
||||
|
||||
data.Skeleton.Bones.Resize(importerData.Bones.Count(), false);
|
||||
for (int32 i = 0; i < importerData.Bones.Count(); i++)
|
||||
{
|
||||
auto& bone = data.Skeleton.Bones[i];
|
||||
auto& fbxBone = importerData.Bones[i];
|
||||
|
||||
const auto boneNodeIndex = fbxBone.NodeIndex;
|
||||
const auto parentBoneNodeIndex = fbxBone.ParentBoneIndex == -1 ? -1 : importerData.Bones[fbxBone.ParentBoneIndex].NodeIndex;
|
||||
|
||||
bone.ParentIndex = fbxBone.ParentBoneIndex;
|
||||
bone.NodeIndex = fbxBone.NodeIndex;
|
||||
bone.LocalTransform = CombineTransformsFromNodeIndices(importerData.Nodes, parentBoneNodeIndex, boneNodeIndex);
|
||||
bone.OffsetMatrix = fbxBone.OffsetMatrix;
|
||||
}
|
||||
}
|
||||
/*
|
||||
// Import animations
|
||||
if (data.Types & ImportDataTypes::Animations)
|
||||
{
|
||||
if (scene->HasAnimations())
|
||||
{
|
||||
const auto animations = scene->mAnimations[0];
|
||||
data.Animation.Channels.Resize(animations->mNumChannels, false);
|
||||
data.Animation.Duration = animations->mDuration;
|
||||
data.Animation.FramesPerSecond = animations->mTicksPerSecond != 0.0 ? animations->mTicksPerSecond : 25.0;
|
||||
|
||||
for (unsigned i = 0; i < animations->mNumChannels; i++)
|
||||
{
|
||||
const auto aAnim = animations->mChannels[i];
|
||||
auto& anim = data.Animation.Channels[i];
|
||||
|
||||
anim.NodeName = aAnim->mNodeName.C_Str();
|
||||
|
||||
ImportCurve(aAnim->mPositionKeys, aAnim->mNumPositionKeys, anim.Position);
|
||||
ImportCurve(aAnim->mRotationKeys, aAnim->mNumRotationKeys, anim.Rotation);
|
||||
ImportCurve(aAnim->mScalingKeys, aAnim->mNumScalingKeys, anim.Scale);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
LOG(Warning, "Loaded scene has no animations");
|
||||
}
|
||||
}
|
||||
*/
|
||||
// Import nodes
|
||||
if (data.Types & ImportDataTypes::Nodes)
|
||||
{
|
||||
data.Nodes.Resize(importerData.Nodes.Count());
|
||||
for (int32 i = 0; i < importerData.Nodes.Count(); i++)
|
||||
{
|
||||
auto& node = data.Nodes[i];
|
||||
auto& aNode = importerData.Nodes[i];
|
||||
|
||||
node.Name = aNode.Name;
|
||||
node.ParentIndex = aNode.ParentIndex;
|
||||
node.LocalTransform = aNode.LocalTransform;
|
||||
}
|
||||
}
|
||||
|
||||
// Export materials info
|
||||
const int32 materialsCount = importerData.Materials.Count();
|
||||
data.Materials.Resize(materialsCount, false);
|
||||
for (int32 i = 0; i < importerData.Materials.Count(); i++)
|
||||
{
|
||||
auto& material = data.Materials[i];
|
||||
const auto fbxMaterial = importerData.Materials[i];
|
||||
|
||||
material.Name = String(fbxMaterial->GetName()).TrimTrailing();
|
||||
material.MaterialID = Guid::Empty;
|
||||
}
|
||||
|
||||
scene->Clear();
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
#endif
|
||||
64
Source/Engine/Tools/ModelTool/ModelTool.Build.cs
Normal file
64
Source/Engine/Tools/ModelTool/ModelTool.Build.cs
Normal file
@@ -0,0 +1,64 @@
|
||||
// Copyright (c) 2012-2020 Wojciech Figat. All rights reserved.
|
||||
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using Flax.Build;
|
||||
using Flax.Build.NativeCpp;
|
||||
|
||||
/// <summary>
|
||||
/// Model data utilities module.
|
||||
/// </summary>
|
||||
public class ModelTool : EngineModule
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public override void Setup(BuildOptions options)
|
||||
{
|
||||
base.Setup(options);
|
||||
|
||||
bool useAssimp = true;
|
||||
bool useAutodeskFbxSdk = false;
|
||||
bool useOpenFBX = true;
|
||||
|
||||
if (useAssimp)
|
||||
{
|
||||
options.PrivateDependencies.Add("assimp");
|
||||
options.PrivateDefinitions.Add("USE_ASSIMP");
|
||||
}
|
||||
|
||||
if (useAutodeskFbxSdk)
|
||||
{
|
||||
options.PrivateDefinitions.Add("USE_AUTODESK_FBX_SDK");
|
||||
|
||||
// FBX SDK 2020.0.1 VS2015
|
||||
// TODO: convert this into AutodeskFbxSdk and implement proper SDK lookup with multiple versions support
|
||||
// TODO: link against dll with delay loading
|
||||
var sdkRoot = @"C:\Program Files\Autodesk\FBX\FBX SDK\2020.0.1";
|
||||
var libSubDir = "lib\\vs2015\\x64\\release";
|
||||
options.PrivateIncludePaths.Add(Path.Combine(sdkRoot, "include"));
|
||||
options.OutputFiles.Add(Path.Combine(sdkRoot, libSubDir, "libfbxsdk-md.lib"));
|
||||
options.OutputFiles.Add(Path.Combine(sdkRoot, libSubDir, "zlib-md.lib"));
|
||||
options.OutputFiles.Add(Path.Combine(sdkRoot, libSubDir, "libxml2-md.lib"));
|
||||
}
|
||||
|
||||
if (useOpenFBX)
|
||||
{
|
||||
options.PrivateDependencies.Add("OpenFBX");
|
||||
options.PrivateDefinitions.Add("USE_OPEN_FBX");
|
||||
}
|
||||
|
||||
options.PrivateDependencies.Add("TextureTool");
|
||||
options.PrivateDefinitions.Add("COMPILE_WITH_ASSETS_IMPORTER");
|
||||
|
||||
options.PrivateDependencies.Add("DirectXMesh");
|
||||
options.PrivateDependencies.Add("UVAtlas");
|
||||
options.PrivateDependencies.Add("meshoptimizer");
|
||||
|
||||
options.PublicDefinitions.Add("COMPILE_WITH_MODEL_TOOL");
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public override void GetFilesToDeploy(List<string> files)
|
||||
{
|
||||
files.Add(Path.Combine(FolderPath, "ModelTool.h"));
|
||||
}
|
||||
}
|
||||
1267
Source/Engine/Tools/ModelTool/ModelTool.OpenFBX.cpp
Normal file
1267
Source/Engine/Tools/ModelTool/ModelTool.OpenFBX.cpp
Normal file
File diff suppressed because it is too large
Load Diff
103
Source/Engine/Tools/ModelTool/ModelTool.Options.cpp
Normal file
103
Source/Engine/Tools/ModelTool/ModelTool.Options.cpp
Normal file
@@ -0,0 +1,103 @@
|
||||
// Copyright (c) 2012-2020 Wojciech Figat. All rights reserved.
|
||||
|
||||
#if COMPILE_WITH_MODEL_TOOL
|
||||
|
||||
#include "ModelTool.h"
|
||||
#include "Engine/Core/Log.h"
|
||||
#include "Engine/Serialization/Serialization.h"
|
||||
|
||||
BoundingBox ImportedModelData::LOD::GetBox() const
|
||||
{
|
||||
if (Meshes.IsEmpty())
|
||||
return BoundingBox::Empty;
|
||||
|
||||
BoundingBox box;
|
||||
Meshes[0]->CalculateBox(box);
|
||||
for (int32 i = 1; i < Meshes.Count(); i++)
|
||||
{
|
||||
if (Meshes[i]->Positions.HasItems())
|
||||
{
|
||||
BoundingBox t;
|
||||
Meshes[i]->CalculateBox(t);
|
||||
BoundingBox::Merge(box, t, box);
|
||||
}
|
||||
}
|
||||
|
||||
return box;
|
||||
}
|
||||
|
||||
void ModelTool::Options::Serialize(SerializeStream& stream, const void* otherObj)
|
||||
{
|
||||
SERIALIZE_GET_OTHER_OBJ(ModelTool::Options);
|
||||
|
||||
SERIALIZE(Type);
|
||||
SERIALIZE(CalculateNormals);
|
||||
SERIALIZE(SmoothingNormalsAngle);
|
||||
SERIALIZE(FlipNormals);
|
||||
SERIALIZE(CalculateTangents);
|
||||
SERIALIZE(SmoothingTangentsAngle);
|
||||
SERIALIZE(OptimizeMeshes);
|
||||
SERIALIZE(MergeMeshes);
|
||||
SERIALIZE(ImportLODs);
|
||||
SERIALIZE(ImportVertexColors);
|
||||
SERIALIZE(ImportBlendShapes);
|
||||
SERIALIZE(LightmapUVsSource);
|
||||
SERIALIZE(Scale);
|
||||
SERIALIZE(Rotation);
|
||||
SERIALIZE(Translation);
|
||||
SERIALIZE(CenterGeometry);
|
||||
SERIALIZE(Duration);
|
||||
SERIALIZE(FramesRange);
|
||||
SERIALIZE(DefaultFrameRate);
|
||||
SERIALIZE(SamplingRate);
|
||||
SERIALIZE(SkipEmptyCurves);
|
||||
SERIALIZE(OptimizeKeyframes);
|
||||
SERIALIZE(EnableRootMotion);
|
||||
SERIALIZE(RootNodeName);
|
||||
SERIALIZE(AnimationIndex);
|
||||
SERIALIZE(GenerateLODs);
|
||||
SERIALIZE(BaseLOD);
|
||||
SERIALIZE(LODCount);
|
||||
SERIALIZE(TriangleReduction);
|
||||
SERIALIZE(ImportMaterials);
|
||||
SERIALIZE(ImportTextures);
|
||||
SERIALIZE(RestoreMaterialsOnReimport);
|
||||
}
|
||||
|
||||
void ModelTool::Options::Deserialize(DeserializeStream& stream, ISerializeModifier* modifier)
|
||||
{
|
||||
DESERIALIZE(Type);
|
||||
DESERIALIZE(CalculateNormals);
|
||||
DESERIALIZE(SmoothingNormalsAngle);
|
||||
DESERIALIZE(FlipNormals);
|
||||
DESERIALIZE(CalculateTangents);
|
||||
DESERIALIZE(SmoothingTangentsAngle);
|
||||
DESERIALIZE(OptimizeMeshes);
|
||||
DESERIALIZE(MergeMeshes);
|
||||
DESERIALIZE(ImportLODs);
|
||||
DESERIALIZE(ImportVertexColors);
|
||||
DESERIALIZE(ImportBlendShapes);
|
||||
DESERIALIZE(LightmapUVsSource);
|
||||
DESERIALIZE(Scale);
|
||||
DESERIALIZE(Rotation);
|
||||
DESERIALIZE(Translation);
|
||||
DESERIALIZE(CenterGeometry);
|
||||
DESERIALIZE(Duration);
|
||||
DESERIALIZE(FramesRange);
|
||||
DESERIALIZE(DefaultFrameRate);
|
||||
DESERIALIZE(SamplingRate);
|
||||
DESERIALIZE(SkipEmptyCurves);
|
||||
DESERIALIZE(OptimizeKeyframes);
|
||||
DESERIALIZE(EnableRootMotion);
|
||||
DESERIALIZE(RootNodeName);
|
||||
DESERIALIZE(AnimationIndex);
|
||||
DESERIALIZE(GenerateLODs);
|
||||
DESERIALIZE(BaseLOD);
|
||||
DESERIALIZE(LODCount);
|
||||
DESERIALIZE(TriangleReduction);
|
||||
DESERIALIZE(ImportMaterials);
|
||||
DESERIALIZE(ImportTextures);
|
||||
DESERIALIZE(RestoreMaterialsOnReimport);
|
||||
}
|
||||
|
||||
#endif
|
||||
1275
Source/Engine/Tools/ModelTool/ModelTool.cpp
Normal file
1275
Source/Engine/Tools/ModelTool/ModelTool.cpp
Normal file
File diff suppressed because it is too large
Load Diff
280
Source/Engine/Tools/ModelTool/ModelTool.h
Normal file
280
Source/Engine/Tools/ModelTool/ModelTool.h
Normal file
@@ -0,0 +1,280 @@
|
||||
// Copyright (c) 2012-2020 Wojciech Figat. All rights reserved.
|
||||
|
||||
#pragma once
|
||||
|
||||
#if COMPILE_WITH_MODEL_TOOL
|
||||
|
||||
#include "Engine/Core/Config.h"
|
||||
#include "Engine/Serialization/ISerializable.h"
|
||||
#include "Engine/Graphics/Models/ModelData.h"
|
||||
#include "Engine/Graphics/Models/SkeletonData.h"
|
||||
#include "Engine/Animations/AnimationData.h"
|
||||
|
||||
class JsonWriter;
|
||||
|
||||
/// <summary>
|
||||
/// The model file import data types (used as flags).
|
||||
/// </summary>
|
||||
enum class ImportDataTypes : int32
|
||||
{
|
||||
/// <summary>
|
||||
/// Imports materials and meshes.
|
||||
/// </summary>
|
||||
Geometry = 1 << 0,
|
||||
|
||||
/// <summary>
|
||||
/// Imports the skeleton bones hierarchy.
|
||||
/// </summary>
|
||||
Skeleton = 1 << 1,
|
||||
|
||||
/// <summary>
|
||||
/// Imports the animations.
|
||||
/// </summary>
|
||||
Animations = 1 << 2,
|
||||
|
||||
/// <summary>
|
||||
/// Imports the scene nodes hierarchy.
|
||||
/// </summary>
|
||||
Nodes = 1 << 3,
|
||||
|
||||
/// <summary>
|
||||
/// Imports the materials.
|
||||
/// </summary>
|
||||
Materials = 1 << 4,
|
||||
|
||||
/// <summary>
|
||||
/// Imports the textures.
|
||||
/// </summary>
|
||||
Textures = 1 << 5,
|
||||
};
|
||||
|
||||
DECLARE_ENUM_OPERATORS(ImportDataTypes);
|
||||
|
||||
/// <summary>
|
||||
/// Imported model data container. Represents unified model source file data (meshes, animations, skeleton, materials).
|
||||
/// </summary>
|
||||
class ImportedModelData
|
||||
{
|
||||
public:
|
||||
|
||||
struct LOD
|
||||
{
|
||||
Array<MeshData*> Meshes;
|
||||
|
||||
BoundingBox GetBox() const;
|
||||
};
|
||||
|
||||
struct Node
|
||||
{
|
||||
/// <summary>
|
||||
/// The parent node index. The root node uses value -1.
|
||||
/// </summary>
|
||||
int32 ParentIndex;
|
||||
|
||||
/// <summary>
|
||||
/// The local transformation of the node, relative to the parent node.
|
||||
/// </summary>
|
||||
Transform LocalTransform;
|
||||
|
||||
/// <summary>
|
||||
/// The name of this node.
|
||||
/// </summary>
|
||||
String Name;
|
||||
};
|
||||
|
||||
public:
|
||||
|
||||
/// <summary>
|
||||
/// The import data types types.
|
||||
/// </summary>
|
||||
ImportDataTypes Types;
|
||||
|
||||
/// <summary>
|
||||
/// The textures slots.
|
||||
/// </summary>
|
||||
Array<TextureEntry> Textures;
|
||||
|
||||
/// <summary>
|
||||
/// The material slots.
|
||||
/// </summary>
|
||||
Array<MaterialSlotEntry> Materials;
|
||||
|
||||
/// <summary>
|
||||
/// The level of details data.
|
||||
/// </summary>
|
||||
Array<LOD> LODs;
|
||||
|
||||
/// <summary>
|
||||
/// The skeleton data.
|
||||
/// </summary>
|
||||
SkeletonData Skeleton;
|
||||
|
||||
/// <summary>
|
||||
/// The scene nodes.
|
||||
/// </summary>
|
||||
Array<Node> Nodes;
|
||||
|
||||
/// <summary>
|
||||
/// The node animations.
|
||||
/// </summary>
|
||||
AnimationData Animation;
|
||||
|
||||
public:
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="ImportedModelData"/> class.
|
||||
/// </summary>
|
||||
/// <param name="types">The types.</param>
|
||||
ImportedModelData(ImportDataTypes types)
|
||||
{
|
||||
Types = types;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Finalizes an instance of the <see cref="ImportedModelData"/> class.
|
||||
/// </summary>
|
||||
~ImportedModelData()
|
||||
{
|
||||
// Ensure to cleanup data
|
||||
for (int32 i = 0; i < LODs.Count(); i++)
|
||||
LODs[i].Meshes.ClearDelete();
|
||||
}
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Import models and animations helper.
|
||||
/// </summary>
|
||||
class FLAXENGINE_API ModelTool
|
||||
{
|
||||
public:
|
||||
|
||||
/// <summary>
|
||||
/// Declares the imported data type.
|
||||
/// </summary>
|
||||
DECLARE_ENUM_EX_3(ModelType, int32, 0, Model, SkinnedModel, Animation);
|
||||
|
||||
/// <summary>
|
||||
/// Declares the imported animation clip duration.
|
||||
/// </summary>
|
||||
DECLARE_ENUM_EX_2(AnimationDuration, int32, 0, Imported, Custom);
|
||||
|
||||
/// <summary>
|
||||
/// Importing model options
|
||||
/// </summary>
|
||||
struct Options : public ISerializable
|
||||
{
|
||||
ModelType Type = ModelType::Model;
|
||||
|
||||
// Geometry
|
||||
bool CalculateNormals = false;
|
||||
float SmoothingNormalsAngle = 175.0f;
|
||||
bool FlipNormals = false;
|
||||
float SmoothingTangentsAngle = 45.0f;
|
||||
bool CalculateTangents = true;
|
||||
bool OptimizeMeshes = true;
|
||||
bool MergeMeshes = true;
|
||||
bool ImportLODs = true;
|
||||
bool ImportVertexColors = true;
|
||||
bool ImportBlendShapes = false;
|
||||
ModelLightmapUVsSource LightmapUVsSource = ModelLightmapUVsSource::Disable;
|
||||
|
||||
// Transform
|
||||
float Scale = 1.0f;
|
||||
Quaternion Rotation = Quaternion::Identity;
|
||||
Vector3 Translation = Vector3::Zero;
|
||||
bool CenterGeometry = false;
|
||||
|
||||
// Animation
|
||||
AnimationDuration Duration = AnimationDuration::Imported;
|
||||
Vector2 FramesRange = Vector2::Zero;
|
||||
float DefaultFrameRate = 0.0f;
|
||||
float SamplingRate = 0.0f;
|
||||
bool SkipEmptyCurves = true;
|
||||
bool OptimizeKeyframes = true;
|
||||
bool EnableRootMotion = false;
|
||||
String RootNodeName;
|
||||
int32 AnimationIndex = -1;
|
||||
|
||||
// Level Of Detail
|
||||
bool GenerateLODs = false;
|
||||
int32 BaseLOD = 0;
|
||||
int32 LODCount = 4;
|
||||
float TriangleReduction = 0.5f;
|
||||
|
||||
// Materials
|
||||
bool ImportMaterials = true;
|
||||
bool ImportTextures = true;
|
||||
bool RestoreMaterialsOnReimport = true;
|
||||
|
||||
public:
|
||||
|
||||
// [ISerializable]
|
||||
void Serialize(SerializeStream& stream, const void* otherObj) override;
|
||||
void Deserialize(DeserializeStream& stream, ISerializeModifier* modifier) override;
|
||||
};
|
||||
|
||||
public:
|
||||
|
||||
/// <summary>
|
||||
/// Imports the model source file data.
|
||||
/// </summary>
|
||||
/// <param name="path">The file path.</param>
|
||||
/// <param name="data">The output data.</param>
|
||||
/// <param name="options">The import options.</param>
|
||||
/// <param name="errorMsg">The error message container.</param>
|
||||
/// <returns>True if fails, otherwise false.</returns>
|
||||
static bool ImportData(const String& path, ImportedModelData& data, Options options, String& errorMsg);
|
||||
|
||||
/// <summary>
|
||||
/// Imports the model.
|
||||
/// </summary>
|
||||
/// <param name="path">The file path.</param>
|
||||
/// <param name="meshData">The output data.</param>
|
||||
/// <param name="options">The import options.</param>
|
||||
/// <param name="errorMsg">The error message container.</param>
|
||||
/// <param name="autoImportOutput">The output folder for the additional imported data - optional. Used to auto-import textures and material assets.</param>
|
||||
/// <returns>True if fails, otherwise false.</returns>
|
||||
static bool ImportModel(const String& path, ModelData& meshData, Options options, String& errorMsg, const String& autoImportOutput = String::Empty);
|
||||
|
||||
public:
|
||||
|
||||
static int32 DetectLodIndex(const String& nodeName);
|
||||
static bool FindTexture(const String& sourcePath, const String& file, String& path);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the local transformations to go from rootIndex to index.
|
||||
/// </summary>
|
||||
/// <param name="nodes">The nodes containing the local transformations.</param>
|
||||
/// <param name="rootIndex">The root index.</param>
|
||||
/// <param name="index">The current index.</param>
|
||||
/// <returns>The transformation at this index.</returns>
|
||||
template<typename Node>
|
||||
static Transform CombineTransformsFromNodeIndices(Array<Node>& nodes, int32 rootIndex, int32 index)
|
||||
{
|
||||
if (index == -1 || index == rootIndex)
|
||||
return Transform::Identity;
|
||||
|
||||
auto result = nodes[index].LocalTransform;
|
||||
if (index != rootIndex)
|
||||
{
|
||||
const auto parentTransform = CombineTransformsFromNodeIndices(nodes, rootIndex, nodes[index].ParentIndex);
|
||||
result = parentTransform.LocalToWorld(result);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
#if USE_ASSIMP
|
||||
static bool ImportDataAssimp(const char* path, ImportedModelData& data, const Options& options, String& errorMsg);
|
||||
#endif
|
||||
#if USE_AUTODESK_FBX_SDK
|
||||
static bool ImportDataAutodeskFbxSdk(const char* path, ImportedModelData& data, const Options& options, String& errorMsg);
|
||||
#endif
|
||||
#if USE_OPEN_FBX
|
||||
static bool ImportDataOpenFBX(const char* path, ImportedModelData& data, const Options& options, String& errorMsg);
|
||||
#endif
|
||||
};
|
||||
|
||||
#endif
|
||||
346
Source/Engine/Tools/ModelTool/SpatialSort.cpp
Normal file
346
Source/Engine/Tools/ModelTool/SpatialSort.cpp
Normal file
@@ -0,0 +1,346 @@
|
||||
/*
|
||||
---------------------------------------------------------------------------
|
||||
Open Asset Import Library (assimp)
|
||||
---------------------------------------------------------------------------
|
||||
|
||||
Copyright (c) 2006-2018, assimp team
|
||||
|
||||
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use of this software in source and binary forms,
|
||||
with or without modification, are permitted provided that the following
|
||||
conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above
|
||||
copyright notice, this list of conditions and the
|
||||
following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the
|
||||
following disclaimer in the documentation and/or other
|
||||
materials provided with the distribution.
|
||||
|
||||
* Neither the name of the assimp team, nor the names of its
|
||||
contributors may be used to endorse or promote products
|
||||
derived from this software without specific prior
|
||||
written permission of the assimp team.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
---------------------------------------------------------------------------
|
||||
*/
|
||||
|
||||
/** @file Implementation of the helper class to quickly find vertices close to a given position */
|
||||
|
||||
#include "SpatialSort.h"
|
||||
#if COMPILE_WITH_MODEL_TOOL
|
||||
#include <assimp/ai_assert.h>
|
||||
|
||||
using namespace Assimp;
|
||||
|
||||
// CHAR_BIT seems to be defined under MVSC, but not under GCC. Pray that the correct value is 8.
|
||||
#ifndef CHAR_BIT
|
||||
# define CHAR_BIT 8
|
||||
#endif
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// Constructs a spatially sorted representation from the given position array.
|
||||
SpatialSort::SpatialSort(const aiVector3D* pPositions, unsigned int pNumPositions,
|
||||
unsigned int pElementOffset)
|
||||
|
||||
// define the reference plane. We choose some arbitrary vector away from all basic axises
|
||||
// in the hope that no model spreads all its vertices along this plane.
|
||||
: mPlaneNormal(0.8523f, 0.34321f, 0.5736f)
|
||||
{
|
||||
mPlaneNormal.Normalize();
|
||||
Fill(pPositions, pNumPositions, pElementOffset);
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
SpatialSort::SpatialSort()
|
||||
: mPlaneNormal(0.8523f, 0.34321f, 0.5736f)
|
||||
{
|
||||
mPlaneNormal.Normalize();
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// Destructor
|
||||
SpatialSort::~SpatialSort()
|
||||
{
|
||||
// nothing to do here, everything destructs automatically
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
void SpatialSort::Fill(const aiVector3D* pPositions, unsigned int pNumPositions,
|
||||
unsigned int pElementOffset,
|
||||
bool pFinalize /*= true */)
|
||||
{
|
||||
mPositions.clear();
|
||||
Append(pPositions, pNumPositions, pElementOffset, pFinalize);
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
void SpatialSort::Finalize()
|
||||
{
|
||||
std::sort(mPositions.begin(), mPositions.end());
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
void SpatialSort::Append(const aiVector3D* pPositions, unsigned int pNumPositions,
|
||||
unsigned int pElementOffset,
|
||||
bool pFinalize /*= true */)
|
||||
{
|
||||
// store references to all given positions along with their distance to the reference plane
|
||||
const size_t initial = mPositions.size();
|
||||
mPositions.reserve(initial + (pFinalize ? pNumPositions : pNumPositions * 2));
|
||||
for (unsigned int a = 0; a < pNumPositions; a++)
|
||||
{
|
||||
const char* tempPointer = reinterpret_cast<const char*>(pPositions);
|
||||
const aiVector3D* vec = reinterpret_cast<const aiVector3D*>(tempPointer + a * pElementOffset);
|
||||
|
||||
// store position by index and distance
|
||||
ai_real distance = *vec * mPlaneNormal;
|
||||
mPositions.push_back(Entry(static_cast<unsigned int>(a + initial), *vec, distance));
|
||||
}
|
||||
|
||||
if (pFinalize)
|
||||
{
|
||||
// now sort the array ascending by distance.
|
||||
Finalize();
|
||||
}
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// Returns an iterator for all positions close to the given position.
|
||||
void SpatialSort::FindPositions(const aiVector3D& pPosition,
|
||||
ai_real pRadius, std::vector<unsigned int>& poResults) const
|
||||
{
|
||||
const ai_real dist = pPosition * mPlaneNormal;
|
||||
const ai_real minDist = dist - pRadius, maxDist = dist + pRadius;
|
||||
|
||||
// clear the array
|
||||
poResults.clear();
|
||||
|
||||
// quick check for positions outside the range
|
||||
if (mPositions.size() == 0)
|
||||
return;
|
||||
if (maxDist < mPositions.front().mDistance)
|
||||
return;
|
||||
if (minDist > mPositions.back().mDistance)
|
||||
return;
|
||||
|
||||
// do a binary search for the minimal distance to start the iteration there
|
||||
unsigned int index = (unsigned int)mPositions.size() / 2;
|
||||
unsigned int binaryStepSize = (unsigned int)mPositions.size() / 4;
|
||||
while (binaryStepSize > 1)
|
||||
{
|
||||
if (mPositions[index].mDistance < minDist)
|
||||
index += binaryStepSize;
|
||||
else
|
||||
index -= binaryStepSize;
|
||||
|
||||
binaryStepSize /= 2;
|
||||
}
|
||||
|
||||
// depending on the direction of the last step we need to single step a bit back or forth
|
||||
// to find the actual beginning element of the range
|
||||
while (index > 0 && mPositions[index].mDistance > minDist)
|
||||
index--;
|
||||
while (index < (mPositions.size() - 1) && mPositions[index].mDistance < minDist)
|
||||
index++;
|
||||
|
||||
// Mow start iterating from there until the first position lays outside of the distance range.
|
||||
// Add all positions inside the distance range within the given radius to the result aray
|
||||
std::vector<Entry>::const_iterator it = mPositions.begin() + index;
|
||||
const ai_real pSquared = pRadius * pRadius;
|
||||
while (it->mDistance < maxDist)
|
||||
{
|
||||
if ((it->mPosition - pPosition).SquareLength() < pSquared)
|
||||
poResults.push_back(it->mIndex);
|
||||
++it;
|
||||
if (it == mPositions.end())
|
||||
break;
|
||||
}
|
||||
|
||||
// that's it
|
||||
}
|
||||
|
||||
namespace
|
||||
{
|
||||
// Binary, signed-integer representation of a single-precision floating-point value.
|
||||
// IEEE 754 says: "If two floating-point numbers in the same format are ordered then they are
|
||||
// ordered the same way when their bits are reinterpreted as sign-magnitude integers."
|
||||
// This allows us to convert all floating-point numbers to signed integers of arbitrary size
|
||||
// and then use them to work with ULPs (Units in the Last Place, for high-precision
|
||||
// computations) or to compare them (integer comparisons are faster than floating-point
|
||||
// comparisons on many platforms).
|
||||
typedef ai_int BinFloat;
|
||||
|
||||
// --------------------------------------------------------------------------------------------
|
||||
// Converts the bit pattern of a floating-point number to its signed integer representation.
|
||||
BinFloat ToBinary(const ai_real& pValue)
|
||||
{
|
||||
// If this assertion fails, signed int is not big enough to store a float on your platform.
|
||||
// Please correct the declaration of BinFloat a few lines above - but do it in a portable,
|
||||
// #ifdef'd manner!
|
||||
static_assert( sizeof(BinFloat) >= sizeof(ai_real), "sizeof(BinFloat) >= sizeof(ai_real)");
|
||||
|
||||
#if defined( _MSC_VER)
|
||||
// If this assertion fails, Visual C++ has finally moved to ILP64. This means that this
|
||||
// code has just become legacy code! Find out the current value of _MSC_VER and modify
|
||||
// the #if above so it evaluates false on the current and all upcoming VC versions (or
|
||||
// on the current platform, if LP64 or LLP64 are still used on other platforms).
|
||||
static_assert( sizeof(BinFloat) == sizeof(ai_real), "sizeof(BinFloat) == sizeof(ai_real)");
|
||||
|
||||
// This works best on Visual C++, but other compilers have their problems with it.
|
||||
const BinFloat binValue = reinterpret_cast<BinFloat const &>(pValue);
|
||||
#else
|
||||
// On many compilers, reinterpreting a float address as an integer causes aliasing
|
||||
// problems. This is an ugly but more or less safe way of doing it.
|
||||
union {
|
||||
ai_real asFloat;
|
||||
BinFloat asBin;
|
||||
} conversion;
|
||||
conversion.asBin = 0; // zero empty space in case sizeof(BinFloat) > sizeof(float)
|
||||
conversion.asFloat = pValue;
|
||||
const BinFloat binValue = conversion.asBin;
|
||||
#endif
|
||||
|
||||
// floating-point numbers are of sign-magnitude format, so find out what signed number
|
||||
// representation we must convert negative values to.
|
||||
// See http://en.wikipedia.org/wiki/Signed_number_representations.
|
||||
|
||||
// Two's complement?
|
||||
if ((-42 == (~42 + 1)) && (binValue & 0x80000000))
|
||||
return BinFloat(1 << (CHAR_BIT * sizeof(BinFloat) - 1)) - binValue;
|
||||
// One's complement?
|
||||
else if ((-42 == ~42) && (binValue & 0x80000000))
|
||||
return BinFloat(-0) - binValue;
|
||||
// Sign-magnitude?
|
||||
else if ((-42 == (42 | (-0))) && (binValue & 0x80000000)) // -0 = 1000... binary
|
||||
return binValue;
|
||||
else
|
||||
return binValue;
|
||||
}
|
||||
} // namespace
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// Fills an array with indices of all positions identical to the given position. In opposite to
|
||||
// FindPositions(), not an epsilon is used but a (very low) tolerance of four floating-point units.
|
||||
void SpatialSort::FindIdenticalPositions(const aiVector3D& pPosition,
|
||||
std::vector<unsigned int>& poResults) const
|
||||
{
|
||||
// Epsilons have a huge disadvantage: they are of constant precision, while floating-point
|
||||
// values are of log2 precision. If you apply e=0.01 to 100, the epsilon is rather small, but
|
||||
// if you apply it to 0.001, it is enormous.
|
||||
|
||||
// The best way to overcome this is the unit in the last place (ULP). A precision of 2 ULPs
|
||||
// tells us that a float does not differ more than 2 bits from the "real" value. ULPs are of
|
||||
// logarithmic precision - around 1, they are 1*(2^24) and around 10000, they are 0.00125.
|
||||
|
||||
// For standard C math, we can assume a precision of 0.5 ULPs according to IEEE 754. The
|
||||
// incoming vertex positions might have already been transformed, probably using rather
|
||||
// inaccurate SSE instructions, so we assume a tolerance of 4 ULPs to safely identify
|
||||
// identical vertex positions.
|
||||
static const int toleranceInULPs = 4;
|
||||
// An interesting point is that the inaccuracy grows linear with the number of operations:
|
||||
// multiplying to numbers, each inaccurate to four ULPs, results in an inaccuracy of four ULPs
|
||||
// plus 0.5 ULPs for the multiplication.
|
||||
// To compute the distance to the plane, a dot product is needed - that is a multiplication and
|
||||
// an addition on each number.
|
||||
static const int distanceToleranceInULPs = toleranceInULPs + 1;
|
||||
// The squared distance between two 3D vectors is computed the same way, but with an additional
|
||||
// subtraction.
|
||||
static const int distance3DToleranceInULPs = distanceToleranceInULPs + 1;
|
||||
|
||||
// Convert the plane distance to its signed integer representation so the ULPs tolerance can be
|
||||
// applied. For some reason, VC won't optimize two calls of the bit pattern conversion.
|
||||
const BinFloat minDistBinary = ToBinary(pPosition * mPlaneNormal) - distanceToleranceInULPs;
|
||||
const BinFloat maxDistBinary = minDistBinary + 2 * distanceToleranceInULPs;
|
||||
|
||||
// clear the array in this strange fashion because a simple clear() would also deallocate
|
||||
// the array which we want to avoid
|
||||
poResults.resize(0);
|
||||
|
||||
// do a binary search for the minimal distance to start the iteration there
|
||||
unsigned int index = (unsigned int)mPositions.size() / 2;
|
||||
unsigned int binaryStepSize = (unsigned int)mPositions.size() / 4;
|
||||
while (binaryStepSize > 1)
|
||||
{
|
||||
// Ugly, but conditional jumps are faster with integers than with floats
|
||||
if (minDistBinary > ToBinary(mPositions[index].mDistance))
|
||||
index += binaryStepSize;
|
||||
else
|
||||
index -= binaryStepSize;
|
||||
|
||||
binaryStepSize /= 2;
|
||||
}
|
||||
|
||||
// depending on the direction of the last step we need to single step a bit back or forth
|
||||
// to find the actual beginning element of the range
|
||||
while (index > 0 && minDistBinary < ToBinary(mPositions[index].mDistance))
|
||||
index--;
|
||||
while (index < (mPositions.size() - 1) && minDistBinary > ToBinary(mPositions[index].mDistance))
|
||||
index++;
|
||||
|
||||
// Now start iterating from there until the first position lays outside of the distance range.
|
||||
// Add all positions inside the distance range within the tolerance to the result array
|
||||
std::vector<Entry>::const_iterator it = mPositions.begin() + index;
|
||||
while (ToBinary(it->mDistance) < maxDistBinary)
|
||||
{
|
||||
if (distance3DToleranceInULPs >= ToBinary((it->mPosition - pPosition).SquareLength()))
|
||||
poResults.push_back(it->mIndex);
|
||||
++it;
|
||||
if (it == mPositions.end())
|
||||
break;
|
||||
}
|
||||
|
||||
// that's it
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
unsigned int SpatialSort::GenerateMappingTable(std::vector<unsigned int>& fill, ai_real pRadius) const
|
||||
{
|
||||
fill.resize(mPositions.size(),UINT_MAX);
|
||||
ai_real dist, maxDist;
|
||||
|
||||
unsigned int t = 0;
|
||||
const ai_real pSquared = pRadius * pRadius;
|
||||
for (size_t i = 0; i < mPositions.size();)
|
||||
{
|
||||
dist = mPositions[i].mPosition * mPlaneNormal;
|
||||
maxDist = dist + pRadius;
|
||||
|
||||
fill[mPositions[i].mIndex] = t;
|
||||
const aiVector3D& oldPos = mPositions[i].mPosition;
|
||||
for (++i; i < fill.size() && mPositions[i].mDistance < maxDist
|
||||
&& (mPositions[i].mPosition - oldPos).SquareLength() < pSquared; ++i)
|
||||
{
|
||||
fill[mPositions[i].mIndex] = t;
|
||||
}
|
||||
++t;
|
||||
}
|
||||
|
||||
#ifdef ASSIMP_BUILD_DEBUG
|
||||
|
||||
// debug invariant: mPositions[i].mIndex values must range from 0 to mPositions.size()-1
|
||||
for (size_t i = 0; i < fill.size(); ++i) {
|
||||
ai_assert(fill[i]<mPositions.size());
|
||||
}
|
||||
|
||||
#endif
|
||||
return t;
|
||||
}
|
||||
|
||||
#endif
|
||||
184
Source/Engine/Tools/ModelTool/SpatialSort.h
Normal file
184
Source/Engine/Tools/ModelTool/SpatialSort.h
Normal file
@@ -0,0 +1,184 @@
|
||||
/*
|
||||
Open Asset Import Library (assimp)
|
||||
----------------------------------------------------------------------
|
||||
|
||||
Copyright (c) 2006-2018, assimp team
|
||||
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use of this software in source and binary forms,
|
||||
with or without modification, are permitted provided that the
|
||||
following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above
|
||||
copyright notice, this list of conditions and the
|
||||
following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the
|
||||
following disclaimer in the documentation and/or other
|
||||
materials provided with the distribution.
|
||||
|
||||
* Neither the name of the assimp team, nor the names of its
|
||||
contributors may be used to endorse or promote products
|
||||
derived from this software without specific prior
|
||||
written permission of the assimp team.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
----------------------------------------------------------------------
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
/** Small helper classes to optimise finding vertizes close to a given location */
|
||||
#ifndef AI_SPATIALSORT_H_INC
|
||||
#define AI_SPATIALSORT_H_INC
|
||||
|
||||
#include <vector>
|
||||
#include <assimp/types.h>
|
||||
|
||||
namespace Assimp
|
||||
{
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
/** A little helper class to quickly find all vertices in the epsilon environment of a given
|
||||
* position. Construct an instance with an array of positions. The class stores the given positions
|
||||
* by their indices and sorts them by their distance to an arbitrary chosen plane.
|
||||
* You can then query the instance for all vertices close to a given position in an average O(log n)
|
||||
* time, with O(n) worst case complexity when all vertices lay on the plane. The plane is chosen
|
||||
* so that it avoids common planes in usual data sets. */
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
class ASSIMP_API SpatialSort
|
||||
{
|
||||
public:
|
||||
|
||||
SpatialSort();
|
||||
|
||||
// ------------------------------------------------------------------------------------
|
||||
/** Constructs a spatially sorted representation from the given position array.
|
||||
* Supply the positions in its layout in memory, the class will only refer to them
|
||||
* by index.
|
||||
* @param pPositions Pointer to the first position vector of the array.
|
||||
* @param pNumPositions Number of vectors to expect in that array.
|
||||
* @param pElementOffset Offset in bytes from the beginning of one vector in memory
|
||||
* to the beginning of the next vector. */
|
||||
SpatialSort(const aiVector3D* pPositions, unsigned int pNumPositions,
|
||||
unsigned int pElementOffset);
|
||||
|
||||
/** Destructor */
|
||||
~SpatialSort();
|
||||
|
||||
public:
|
||||
|
||||
// ------------------------------------------------------------------------------------
|
||||
/** Sets the input data for the SpatialSort. This replaces existing data, if any.
|
||||
* The new data receives new indices in ascending order.
|
||||
*
|
||||
* @param pPositions Pointer to the first position vector of the array.
|
||||
* @param pNumPositions Number of vectors to expect in that array.
|
||||
* @param pElementOffset Offset in bytes from the beginning of one vector in memory
|
||||
* to the beginning of the next vector.
|
||||
* @param pFinalize Specifies whether the SpatialSort's internal representation
|
||||
* is finalized after the new data has been added. Finalization is
|
||||
* required in order to use #FindPosition() or #GenerateMappingTable().
|
||||
* If you don't finalize yet, you can use #Append() to add data from
|
||||
* other sources.*/
|
||||
void Fill(const aiVector3D* pPositions, unsigned int pNumPositions,
|
||||
unsigned int pElementOffset,
|
||||
bool pFinalize = true);
|
||||
|
||||
// ------------------------------------------------------------------------------------
|
||||
/** Same as #Fill(), except the method appends to existing data in the #SpatialSort. */
|
||||
void Append(const aiVector3D* pPositions, unsigned int pNumPositions,
|
||||
unsigned int pElementOffset,
|
||||
bool pFinalize = true);
|
||||
|
||||
// ------------------------------------------------------------------------------------
|
||||
/** Finalize the spatial hash data structure. This can be useful after
|
||||
* multiple calls to #Append() with the pFinalize parameter set to false.
|
||||
* This is finally required before one of #FindPositions() and #GenerateMappingTable()
|
||||
* can be called to query the spatial sort.*/
|
||||
void Finalize();
|
||||
|
||||
// ------------------------------------------------------------------------------------
|
||||
/** Returns an iterator for all positions close to the given position.
|
||||
* @param pPosition The position to look for vertices.
|
||||
* @param pRadius Maximal distance from the position a vertex may have to be counted in.
|
||||
* @param poResults The container to store the indices of the found positions.
|
||||
* Will be emptied by the call so it may contain anything.
|
||||
* @return An iterator to iterate over all vertices in the given area.*/
|
||||
void FindPositions(const aiVector3D& pPosition, ai_real pRadius,
|
||||
std::vector<unsigned int>& poResults) const;
|
||||
|
||||
// ------------------------------------------------------------------------------------
|
||||
/** Fills an array with indices of all positions identical to the given position. In
|
||||
* opposite to FindPositions(), not an epsilon is used but a (very low) tolerance of
|
||||
* four floating-point units.
|
||||
* @param pPosition The position to look for vertices.
|
||||
* @param poResults The container to store the indices of the found positions.
|
||||
* Will be emptied by the call so it may contain anything.*/
|
||||
void FindIdenticalPositions(const aiVector3D& pPosition,
|
||||
std::vector<unsigned int>& poResults) const;
|
||||
|
||||
// ------------------------------------------------------------------------------------
|
||||
/** Compute a table that maps each vertex ID referring to a spatially close
|
||||
* enough position to the same output ID. Output IDs are assigned in ascending order
|
||||
* from 0...n.
|
||||
* @param fill Will be filled with numPositions entries.
|
||||
* @param pRadius Maximal distance from the position a vertex may have to
|
||||
* be counted in.
|
||||
* @return Number of unique vertices (n). */
|
||||
unsigned int GenerateMappingTable(std::vector<unsigned int>& fill,
|
||||
ai_real pRadius) const;
|
||||
|
||||
protected:
|
||||
/** Normal of the sorting plane, normalized. The center is always at (0, 0, 0) */
|
||||
aiVector3D mPlaneNormal;
|
||||
|
||||
/** An entry in a spatially sorted position array. Consists of a vertex index,
|
||||
* its position and its pre-calculated distance from the reference plane */
|
||||
struct Entry
|
||||
{
|
||||
unsigned int mIndex; ///< The vertex referred by this entry
|
||||
aiVector3D mPosition; ///< Position
|
||||
ai_real mDistance; ///< Distance of this vertex to the sorting plane
|
||||
|
||||
Entry()
|
||||
: mIndex(999999999)
|
||||
, mPosition()
|
||||
, mDistance(99999.)
|
||||
{
|
||||
// empty
|
||||
}
|
||||
|
||||
Entry(unsigned int pIndex, const aiVector3D& pPosition, ai_real pDistance)
|
||||
: mIndex(pIndex)
|
||||
, mPosition(pPosition)
|
||||
, mDistance(pDistance)
|
||||
{
|
||||
// empty
|
||||
}
|
||||
|
||||
bool operator <(const Entry& e) const
|
||||
{
|
||||
return mDistance < e.mDistance;
|
||||
}
|
||||
};
|
||||
|
||||
// all positions, sorted by distance to the sorting plane
|
||||
std::vector<Entry> mPositions;
|
||||
};
|
||||
} // end of namespace Assimp
|
||||
|
||||
#endif // AI_SPATIALSORT_H_INC
|
||||
91
Source/Engine/Tools/ModelTool/VertexTriangleAdjacency.cpp
Normal file
91
Source/Engine/Tools/ModelTool/VertexTriangleAdjacency.cpp
Normal file
@@ -0,0 +1,91 @@
|
||||
// Copyright (c) 2012-2020 Wojciech Figat. All rights reserved.
|
||||
|
||||
#if COMPILE_WITH_MODEL_TOOL
|
||||
|
||||
#include "VertexTriangleAdjacency.h"
|
||||
#include "Engine/Core/Math/Math.h"
|
||||
|
||||
VertexTriangleAdjacency::VertexTriangleAdjacency(uint32* indices, int32 indicesCount, uint32 vertexCount, bool computeNumTriangles)
|
||||
{
|
||||
// Compute the number of referenced vertices if it wasn't specified by the caller
|
||||
const uint32* const indicesEnd = indices + indicesCount;
|
||||
if (vertexCount == 0)
|
||||
{
|
||||
for (uint32* triangle = indices; triangle != indicesEnd; triangle += 3)
|
||||
{
|
||||
ASSERT(nullptr != triangle);
|
||||
vertexCount = Math::Max(vertexCount, triangle[0]);
|
||||
vertexCount = Math::Max(vertexCount, triangle[1]);
|
||||
vertexCount = Math::Max(vertexCount, triangle[2]);
|
||||
}
|
||||
}
|
||||
|
||||
NumVertices = vertexCount;
|
||||
uint32* pi;
|
||||
|
||||
// Allocate storage
|
||||
if (computeNumTriangles)
|
||||
{
|
||||
pi = LiveTriangles = new uint32[vertexCount + 1];
|
||||
Platform::MemoryClear(LiveTriangles, sizeof(uint32) * (vertexCount + 1));
|
||||
OffsetTable = new uint32[vertexCount + 2] + 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
pi = OffsetTable = new uint32[vertexCount + 2] + 1;
|
||||
Platform::MemoryClear(OffsetTable, sizeof(uint32) * (vertexCount + 1));
|
||||
LiveTriangles = nullptr; // Important, otherwise the d'tor would crash
|
||||
}
|
||||
|
||||
// Get a pointer to the end of the buffer
|
||||
uint32* piEnd = pi + vertexCount;
|
||||
*piEnd++ = 0u;
|
||||
|
||||
// First pass: compute the number of faces referencing each vertex
|
||||
for (uint32* triangle = indices; triangle != indicesEnd; triangle += 3)
|
||||
{
|
||||
pi[triangle[0]]++;
|
||||
pi[triangle[1]]++;
|
||||
pi[triangle[2]]++;
|
||||
}
|
||||
|
||||
// Second pass: compute the final offset table
|
||||
int32 iSum = 0;
|
||||
uint32* piCurOut = OffsetTable;
|
||||
for (uint32* piCur = pi; piCur != piEnd; ++piCur, piCurOut++)
|
||||
{
|
||||
const int32 iLastSum = iSum;
|
||||
iSum += *piCur;
|
||||
*piCurOut = iLastSum;
|
||||
}
|
||||
pi = this->OffsetTable;
|
||||
|
||||
// Third pass: compute the final table
|
||||
AdjacencyTable = new uint32[iSum];
|
||||
iSum = 0;
|
||||
for (uint32* triangle = indices; triangle != indicesEnd; triangle += 3, iSum++)
|
||||
{
|
||||
uint32 idx = triangle[0];
|
||||
AdjacencyTable[pi[idx]++] = iSum;
|
||||
|
||||
idx = triangle[1];
|
||||
AdjacencyTable[pi[idx]++] = iSum;
|
||||
|
||||
idx = triangle[2];
|
||||
AdjacencyTable[pi[idx]++] = iSum;
|
||||
}
|
||||
|
||||
// Fourth pass: undo the offset computations made during the third pass
|
||||
// We could do this in a separate buffer, but this would be TIMES slower.
|
||||
OffsetTable--;
|
||||
*OffsetTable = 0u;
|
||||
}
|
||||
|
||||
VertexTriangleAdjacency::~VertexTriangleAdjacency()
|
||||
{
|
||||
delete[] OffsetTable;
|
||||
delete[] AdjacencyTable;
|
||||
delete[] LiveTriangles;
|
||||
}
|
||||
|
||||
#endif
|
||||
80
Source/Engine/Tools/ModelTool/VertexTriangleAdjacency.h
Normal file
80
Source/Engine/Tools/ModelTool/VertexTriangleAdjacency.h
Normal file
@@ -0,0 +1,80 @@
|
||||
// Copyright (c) 2012-2020 Wojciech Figat. All rights reserved.
|
||||
|
||||
#pragma once
|
||||
|
||||
#if COMPILE_WITH_MODEL_TOOL
|
||||
|
||||
#include "Engine/Core/Config.h"
|
||||
#include "Engine/Core/Types/BaseTypes.h"
|
||||
#include "Engine/Platform/Platform.h"
|
||||
|
||||
/// <summary>
|
||||
/// The VertexTriangleAdjacency class computes a vertex-triangle adjacency map from a given index buffer.
|
||||
/// </summary>
|
||||
class VertexTriangleAdjacency
|
||||
{
|
||||
public:
|
||||
|
||||
/// <summary>
|
||||
/// Construction from an existing index buffer
|
||||
/// </summary>
|
||||
/// <param name="indices">The index buffer.</param>
|
||||
/// <param name="indicesCount">The number of triangles in the buffer.</param>
|
||||
/// <param name="vertexCount">The number of referenced vertices. This value is computed automatically if 0 is specified.</param>
|
||||
/// <param name="computeNumTriangles">If you want the class to compute a list containing the number of referenced triangles per vertex per vertex - pass true.</param>
|
||||
VertexTriangleAdjacency(uint32* indices, int32 indicesCount, uint32 vertexCount = 0, bool computeNumTriangles = true);
|
||||
|
||||
/// <summary>
|
||||
/// Destructor
|
||||
/// </summary>
|
||||
~VertexTriangleAdjacency();
|
||||
|
||||
public:
|
||||
|
||||
/// <summary>
|
||||
/// The offset table
|
||||
/// </summary>
|
||||
uint32* OffsetTable;
|
||||
|
||||
/// <summary>
|
||||
/// The adjacency table.
|
||||
/// </summary>
|
||||
uint32* AdjacencyTable;
|
||||
|
||||
/// <summary>
|
||||
/// The table containing the number of referenced triangles per vertex.
|
||||
/// </summary>
|
||||
uint32* LiveTriangles;
|
||||
|
||||
/// <summary>
|
||||
/// The total number of referenced vertices.
|
||||
/// </summary>
|
||||
uint32 NumVertices;
|
||||
|
||||
public:
|
||||
|
||||
/// <summary>
|
||||
/// Gets all triangles adjacent to a vertex.
|
||||
/// </summary>
|
||||
/// <param name="vertexIndex">The index of the vertex.</param>
|
||||
/// <returns>A pointer to the adjacency list.</returns>
|
||||
uint32* GetAdjacentTriangles(uint32 vertexIndex) const
|
||||
{
|
||||
ASSERT(vertexIndex >= 0 && vertexIndex < NumVertices);
|
||||
return &AdjacencyTable[OffsetTable[vertexIndex]];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the number of triangles that are referenced by a vertex. This function returns a reference that can be modified.
|
||||
/// </summary>
|
||||
/// <param name="vertexIndex">The index of the vertex.</param>
|
||||
/// <returns>The number of referenced triangles</returns>
|
||||
uint32& GetNumTrianglesPtr(uint32 vertexIndex) const
|
||||
{
|
||||
ASSERT(vertexIndex >= 0 && vertexIndex < NumVertices);
|
||||
ASSERT(nullptr != LiveTriangles);
|
||||
return LiveTriangles[vertexIndex];
|
||||
}
|
||||
};
|
||||
|
||||
#endif
|
||||
Reference in New Issue
Block a user