Browse Source

feat: Add advanced loss computation methods to LossManager

- Implemented `eikonal_loss` to compute gradient regularization for non-manifold points
- Added `offsurface_loss` to penalize points far from the surface with near-zero predictions
- Introduced `consistency_loss` to enforce prediction consistency between manifold and non-manifold points
NH-Rep
mckay 3 weeks ago
parent
commit
ee7bf687b2
  1. 24
      code/conversion/loss.py

24
code/conversion/loss.py

@ -38,6 +38,30 @@ class LossManager:
return self.normals_lambda * normals_loss # 返回加权后的法线损失
def eikonal_loss(self, nonmnfld_pnts, nonmnfld_pred_all):
"""
计算Eikonal损失
"""
grad_loss_h = torch.zeros(1).cuda() # 初始化 Eikonal 损失
single_nonmnfld_grad = gradient(nonmnfld_pnts, nonmnfld_pred_all[:,0]) # 计算非流形点的梯度
eikonal_loss = ((single_nonmnfld_grad.norm(2, dim=-1) - 1) ** 2).mean() # 计算 Eikonal 损失
return eikonal_loss
def offsurface_loss(self, nonmnfld_pnts, nonmnfld_pred_all):
"""
Eo
惩罚远离表面但是预测值接近0的点
"""
offsurface_loss = torch.exp(-100.0 * torch.abs(nonmnfld_pred_all[:,0])).mean() # 计算离表面损失
return offsurface_loss
def consistency_loss(self, mnfld_pnts, mnfld_pred_all, all_fi):
"""
惩罚流形点预测值和非流形点预测值不一致的点
"""
mnfld_consistency_loss = (mnfld_pred - all_fi[:,0]).abs().mean() # 计算流形一致性损失
return mnfld_consistency_loss
def compute_loss(self, outputs):
"""
计算流型损失的逻辑

Loading…
Cancel
Save