点云滤波及其评价指标计算

主程序main.py

import numpy as np
import open3d as o3d
from utils import chamfer_distance_unit_sphere
from utils import hausdorff_distance_unit_sphere
import torch
from pytorch3d.loss import chamfer_distance
import time


def main():
    pcd = o3d.io.read_point_cloud('./data/bun_zipper.ply')
    pcd_noisy = add_noise(pcd, 0.004)

    # 提取点坐标
    points_original = np.asarray(pcd .points)
    points_noisy = np.asarray(pcd_noisy.points)

    pcd_denoised_temp = guided_filter(pcd_noisy, 0.01, 0.1)  # 第一次去噪
    pcd_denoised = guided_filter(pcd_denoised_temp, 0.01, 0.1)  # 第二次去噪

    # 从去噪后的点云中提取点的坐标
    points_denoised = np.asarray(pcd_denoised.points)
    # 将 NumPy 数组转换为 PyTorch 张量,并添加批处理维度
    points_denoised_tensor = torch.from_numpy(points_denoised[None, :, :]).float()  # 形状变为 (1, P, 3)
    points_original_tensor = torch.from_numpy(points_original[None, :, :]).float()  # 形状变为 (1, P, 3)

    # 计算归一化平均倒角距离
    avg_chamfer = chamfer_distance_unit_sphere(points_denoised_tensor,  points_original_tensor, batch_reduction='mean', point_reduction='mean')
    print("avg_chamfer", avg_chamfer)

    # 归一化豪斯多夫距离
    avg_hausdorff = hausdorff_distance_unit_sphere(points_denoised_tensor, points_original_tensor)
    print("avg_hausdorff", avg_hausdorff)

    #avg_ p2m = pointwise_p2m_distance_normalized(pcl, verts, faces)
    #print("avg_ p2m", avg_ p2m)


    # 获取当前时间用于文件名
    timestamp = time.strftime("%Y%m%d_%H%M%S")
    # 保存含噪和(未实际去噪的)去噪点云
    o3d.io.write_point_cloud(f'./data/bun_zipper_noisy_{timestamp}.ply', pcd_noisy)
    o3d.io.write_point_cloud(f'./data/bun_zipper_denoised_{timestamp}.ply', pcd_denoised_temp)
    o3d.visualization.draw_geometries([pcd_denoised])


def add_noise(pcd, noise_level):
    # Convert point cloud to numpy array
    points = np.asarray(pcd.points)
    # Add Gaussian noise to each point
    noisy_points = points + np.random.normal(0, noise_level, points.shape)
    # Create a new PointCloud object and set its points
    pcd_noisy = o3d.geometry.PointCloud()
    pcd_noisy.points = o3d.utility.Vector3dVector(noisy_points)

    # Optionally, copy other attributes like normals, colors, etc.
    # if hasattr(pcd, 'normals'):
    #     pcd_noisy.normals = pcd.normals
    # if hasattr(pcd, 'colors'):
    #     pcd_noisy.colors = pcd.colors
    return pcd_noisy


def guided_filter(pcd, radius, epsilon):
    # 创建 KDTree
    kdtree = o3d.geometry.KDTreeFlann(pcd)
    # 获取点数据
    points = np.asarray(pcd.points)
    num_points = len(points)
    # 用于存储新点数据的数组
    new_points = []

    for i in range(num_points):
        # 搜索邻域点
        [_, idx, _] = kdtree.search_radius_vector_3d(points[i], radius)
        if len(idx) < 3:
            # 如果邻域点不足,则保留原点
            new_points.append(points[i])
            continue

            # 获取邻域点坐标
        neighbors = points[idx]
        # 计算均值和协方差矩阵
        mean = np.mean(neighbors, axis=0)
        cov = np.cov(neighbors.T)
        # 为了数值稳定性,添加一个小的正数到协方差矩阵
        cov_inv = np.linalg.inv(cov + epsilon * np.eye(3))
        # 计算变换矩阵 A 和偏移向量 b
        A = cov @ cov_inv
        b = mean - A @ mean
        # 应用变换到当前点
        new_point = A @ points[i] + b
        new_points.append(new_point)

        # 将新点数据转换为 open3d 的 PointCloud 对象
    pcd_denoised = o3d.geometry.PointCloud()
    pcd_denoised.points = o3d.utility.Vector3dVector(np.asarray(new_points))
    return pcd_denoised


if __name__ == '__main__':
    main()

评价指标utils.py

import math
import torch
import pytorch3d
import pytorch3d.loss
import pytorch3d.structures
from pytorch3d.loss import chamfer_distance
from pytorch3d.loss.point_mesh_distance import point_face_distance
from torch_cluster import fps
import numpy as np


class FCLayer(torch.nn.Module):

    def __init__(self, in_features, out_features, bias=True, activation=None):
        super().__init__()

        self.linear = torch.nn.Linear(in_features, out_features, bias=bias)

        if activation is None:
            self.activation = torch.nn.Identity()
        elif activation == 'relu':
            self.activation = torch.nn.ReLU()
        elif activation == 'elu':
            self.activation = torch.nn.ELU(alpha=1.0)
        elif activation == 'lrelu':
            self.activation = torch.nn.LeakyReLU(0.1)
        else:
            raise ValueError()

    def forward(self, x):
        return self.activation(self.linear(x))


def standard_normal_logprob(z):
    logZ = -0.5 * math.log(2 * math.pi)
    return logZ - z.pow(2) / 2


def truncated_normal_(tensor, mean=0, std=1, trunc_std=2):
    """
    Taken from https://discuss.pytorch.org/t/implementing-truncated-normal-initializer/4778/15
    """
    size = tensor.shape
    tmp = tensor.new_empty(size + (4,)).normal_()
    valid = (tmp < trunc_std) & (tmp > -trunc_std)
    ind = valid.max(-1, keepdim=True)[1]
    tensor.data.copy_(tmp.gather(-1, ind).squeeze(-1))
    tensor.data.mul_(std).add_(mean)
    return tensor


def normalize_sphere(pc, radius=1.0):
    """
    Args:
        pc: A batch of point clouds, (B, N, 3).
    """
    ## Center
    p_max = pc.max(dim=-2, keepdim=True)[0]
    p_min = pc.min(dim=-2, keepdim=True)[0]
    center = (p_max + p_min) / 2    # (B, 1, 3)
    pc = pc - center
    ## Scale
    scale = (pc ** 2).sum(dim=-1, keepdim=True).sqrt().max(dim=-2, keepdim=True)[0] / radius  # (B, N, 1)
    pc = pc / scale
    return pc, center, scale


def normalize_std(pc, std=1.0):
    """
    Args:
        pc: A batch of point clouds, (B, N, 3).
    """
    center = pc.mean(dim=-2, keepdim=True)   # (B, 1, 3)
    pc = pc - center
    scale = pc.view(pc.size(0), -1).std(dim=-1).view(pc.size(0), 1, 1) / std
    pc = pc / scale
    return pc, center, scale


def normalize_pcl(pc, center, scale):
    return (pc - center) / scale


def denormalize_pcl(pc, center, scale):
    return pc * scale + center


def chamfer_distance_unit_sphere(gen, ref, batch_reduction='mean', point_reduction='mean'):

    ref, center, scale = normalize_sphere(ref)
    gen = normalize_pcl(gen, center, scale)
    return pytorch3d.loss.chamfer_distance(gen, ref, batch_reduction=batch_reduction, point_reduction=point_reduction)


def farthest_point_sampling(pcls, num_pnts):
    """
    Args:
        pcls:  A batch of point clouds, (B, N, 3).
        num_pnts:  Target number of points.
    """
    ratio = 0.01 + num_pnts / pcls.size(1)
    sampled = []
    indices = []
    for i in range(pcls.size(0)):
        idx = fps(pcls[i], ratio=ratio, random_start=False)[:num_pnts]
        sampled.append(pcls[i:i+1, idx, :])
        indices.append(idx)
    sampled = torch.cat(sampled, dim=0)
    return sampled, indices


def point_mesh_bidir_distance_single_unit_sphere(pcl, verts, faces):
    """
    Args:
        pcl:    (N, 3).
        verts:  (M, 3).
        faces:  LongTensor, (T, 3).
    Returns:
        Squared pointwise distances, (N, ).
    """
    assert pcl.dim() == 2 and verts.dim() == 2 and faces.dim() == 2, 'Batch is not supported.'
    
    # Normalize mesh
    verts, center, scale = normalize_sphere(verts.unsqueeze(0))
    verts = verts[0]
    # Normalize pcl
    pcl = normalize_pcl(pcl.unsqueeze(0), center=center, scale=scale)
    pcl = pcl[0]

    # print('%.6f %.6f' % (verts.abs().max().item(), pcl.abs().max().item()))

    # Convert them to pytorch3d structures
    pcls = pytorch3d.structures.Pointclouds([pcl])
    meshes = pytorch3d.structures.Meshes([verts], [faces])
    return pytorch3d.loss.point_mesh_face_distance(meshes, pcls)

def pointwise_p2m_distance_normalized(pcl, verts, faces):
    assert pcl.dim() == 2 and verts.dim() == 2 and faces.dim() == 2, 'Batch is not supported.'
    
    # Normalize mesh
    verts, center, scale = normalize_sphere(verts.unsqueeze(0))
    verts = verts[0]
    # Normalize pcl
    pcl = normalize_pcl(pcl.unsqueeze(0), center=center, scale=scale)
    pcl = pcl[0]

    # Convert them to pytorch3d structures
    pcls = pytorch3d.structures.Pointclouds([pcl])
    meshes = pytorch3d.structures.Meshes([verts], [faces])

    # packed representation for pointclouds
    points = pcls.points_packed()  # (P, 3)
    points_first_idx = pcls.cloud_to_packed_first_idx()
    max_points = pcls.num_points_per_cloud().max().item()

    # packed representation for faces
    verts_packed = meshes.verts_packed()
    faces_packed = meshes.faces_packed()
    tris = verts_packed[faces_packed]  # (T, 3, 3)
    tris_first_idx = meshes.mesh_to_faces_packed_first_idx()
    max_tris = meshes.num_faces_per_mesh().max().item()

    # point to face distance: shape (P,)
    point_to_face = point_face_distance(
        points, points_first_idx, tris, tris_first_idx, max_points
    )
    return point_to_face


def hausdorff_distance_unit_sphere(gen, ref):
    """
    Args:
        gen:    (B, N, 3)
        ref:    (B, N, 3)
    Returns:
        (B, )
    """
    ref, center, scale = normalize_sphere(ref)
    gen = normalize_pcl(gen, center, scale)

    dists_ab, _, _ = pytorch3d.ops.knn_points(ref, gen, K=1)
    dists_ab = dists_ab[:,:,0].max(dim=1, keepdim=True)[0]  # (B, 1)
    # print(dists_ab)

    dists_ba, _, _ = pytorch3d.ops.knn_points(gen, ref, K=1)
    dists_ba = dists_ba[:,:,0].max(dim=1, keepdim=True)[0]  # (B, 1)
    # print(dists_ba)
    
    dists_hausdorff = torch.max(torch.cat([dists_ab, dists_ba], dim=1), dim=1)[0]

    return dists_hausdorff

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

空谷传声~

您的鼓励是我最大的动力!

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值