From a874ea78fb53b92f2d339d7fabcab2e1b91a1382 Mon Sep 17 00:00:00 2001 From: mckay Date: Wed, 19 Feb 2025 23:56:31 +0800 Subject: [PATCH] feat: Enhance LossManager with advanced loss computation methods - Added `position_loss` method to calculate manifold loss using mean absolute value - Implemented `normals_loss` method to compute normal vector loss with gradient calculation - Updated `train.py` to log input and output tensor shapes during training - Modified network architecture in `train.py` by increasing hidden layer dimensions from [64, 64, 64] to [256, 256, 256] --- code/conversion/loss.py | 36 ++++++++++++++++++++++++++++++++++-- code/conversion/train.py | 14 +++----------- 2 files changed, 37 insertions(+), 13 deletions(-) diff --git a/code/conversion/loss.py b/code/conversion/loss.py index c70c433..c5bc997 100644 --- a/code/conversion/loss.py +++ b/code/conversion/loss.py @@ -1,11 +1,43 @@ - - +import torch class LossManager: def __init__(self): pass + def position_loss(self, outputs): + """ + 计算流型损失的逻辑 + + :param outputs: 模型的输出 + :return: 计算得到的流型损失值 + """ + + # 计算流型损失(这里使用均方误差作为示例) + manifold_loss = (outputs.abs()).mean() # 计算流型损失 + return manifold_loss + + def normals_loss(self, cur_data: torch.Tensor, mnfld_pnts: torch.Tensor, all_fi: torch.Tensor, patch_sup: bool) -> torch.Tensor: + """ + 计算法线损失 + + :param cur_data: 当前数据,包含法线信息 + :param mnfld_pnts: 流型点 + :param all_fi: 所有流型预测值 + :param patch_sup: 是否支持补丁 + :return: 计算得到的法线损失 + """ + # 提取法线 + normals = cur_data[:, -self.d_in:] + + # 计算分支梯度 + branch_grad = gradient(mnfld_pnts, all_fi[:, 0]) # 计算分支梯度 + + # 计算法线损失 + normals_loss = (((branch_grad - normals).abs()).norm(2, dim=1)).mean() # 计算法线损失 + + return self.normals_lambda * normals_loss # 返回加权后的法线损失 + def compute_loss(self, outputs): """ 计算流型损失的逻辑 diff --git a/code/conversion/train.py b/code/conversion/train.py index 51fb2b0..a46a4fd 100644 --- a/code/conversion/train.py +++ b/code/conversion/train.py @@ -23,7 +23,7 @@ class NHREPNet_Training: # 初始化模型 d_in = 6 # 输入维度,例如 3D 坐标 - dims_sdf = [64, 64, 64] # 隐藏层维度 + dims_sdf = [256, 256, 256] # 隐藏层维度 csg_tree, _ = self.dataset.get_csg_tree() self.loss_manager = LossManager() self.model = NHRepNet(d_in, dims_sdf, csg_tree).to(self.device) # 实例化模型并移动到设备 @@ -51,9 +51,11 @@ class NHREPNet_Training: # 获取输入数据 input_data = self.dataset.get_data().to(self.device) # 获取数据并移动到设备 + logger.info(f"输入数据: {input_data.shape}") # 前向传播 outputs = self.model(input_data) # 使用模型进行前向传播 + logger.info(f"输出数据: {outputs.shape}") # 计算损失 loss = self.loss_manager.compute_loss(outputs) # 计算损失 @@ -68,17 +70,7 @@ class NHREPNet_Training: logger.info(f'Epoch [{epoch}/{self.nepochs}], Average Loss: {avg_loss:.4f}') self.writer.add_scalar('Loss/train', avg_loss, epoch) # 记录损失到 TensorBoard - def compute_loss(self, outputs): - """ - 计算流型损失的逻辑 - :param outputs: 模型的输出 - :return: 计算得到的流型损失值 - """ - - # 计算流型损失(这里使用均方误差作为示例) - manifold_loss = (outputs.abs()).mean() # 计算流型损失 - return manifold_loss if __name__ == "__main__": data_dir = '../data/input_data' # 数据目录