主程序main.py
import numpy as np
import open3d as o3d
from utils import chamfer_distance_unit_sphere
from utils import hausdorff_distance_unit_sphere
import torch
from pytorch3d.loss import chamfer_distance
import time
def main():
pcd = o3d.io.read_point_cloud('./data/bun_zipper.ply')
pcd_noisy = add_noise(pcd, 0.004)
points_original = np.asarray(pcd .points)
points_noisy = np.asarray(pcd_noisy.points)
pcd_denoised_temp = guided_filter(pcd_noisy, 0.01, 0.1)
pcd_denoised = guided_filter(pcd_denoised_temp, 0.01, 0.1)
points_denoised = np.asarray(pcd_denoised.points)
points_denoised_tensor = torch.from_numpy(points_denoised[None, :, :]).float()
points_original_tensor = torch.from_numpy(points_original[None, :, :]).float()
avg_chamfer = chamfer_distance_unit_sphere(points_denoised_tensor, points_original_tensor, batch_reduction='mean', point_reduction='mean')
print("avg_chamfer", avg_chamfer)
avg_hausdorff = hausdorff_distance_unit_sphere(points_denoised_tensor, points_original_tensor)
print("avg_hausdorff", avg_hausdorff)
timestamp = time.strftime("%Y%m%d_%H%M%S")
o3d.io.write_point_cloud(f'./data/bun_zipper_noisy_{timestamp}.ply', pcd_noisy)
o3d.io.write_point_cloud(f'./data/bun_zipper_denoised_{timestamp}.ply', pcd_denoised_temp)
o3d.visualization.draw_geometries([pcd_denoised])
def add_noise(pcd, noise_level):
points = np.asarray(pcd.points)
noisy_points = points + np.random.normal(0, noise_level, points.shape)
pcd_noisy = o3d.geometry.PointCloud()
pcd_noisy.points = o3d.utility.Vector3dVector(noisy_points)
return pcd_noisy
def guided_filter(pcd, radius, epsilon):
kdtree = o3d.geometry.KDTreeFlann(pcd)
points = np.asarray(pcd.points)
num_points = len(points)
new_points = []
for i in range(num_points):
[_, idx, _] = kdtree.search_radius_vector_3d(points[i], radius)
if len(idx) < 3:
new_points.append(points[i])
continue
neighbors = points[idx]
mean = np.mean(neighbors, axis=0)
cov = np.cov(neighbors.T)
cov_inv = np.linalg.inv(cov + epsilon * np.eye(3))
A = cov @ cov_inv
b = mean - A @ mean
new_point = A @ points[i] + b
new_points.append(new_point)
pcd_denoised = o3d.geometry.PointCloud()
pcd_denoised.points = o3d.utility.Vector3dVector(np.asarray(new_points))
return pcd_denoised
if __name__ == '__main__':
main()
评价指标utils.py
import math
import torch
import pytorch3d
import pytorch3d.loss
import pytorch3d.structures
from pytorch3d.loss import chamfer_distance
from pytorch3d.loss.point_mesh_distance import point_face_distance
from torch_cluster import fps
import numpy as np
class FCLayer(torch.nn.Module):
def __init__(self, in_features, out_features, bias=True, activation=None):
super().__init__()
self.linear = torch.nn.Linear(in_features, out_features, bias=bias)
if activation is None:
self.activation = torch.nn.Identity()
elif activation == 'relu':
self.activation = torch.nn.ReLU()
elif activation == 'elu':
self.activation = torch.nn.ELU(alpha=1.0)
elif activation == 'lrelu':
self.activation = torch.nn.LeakyReLU(0.1)
else:
raise ValueError()
def forward(self, x):
return self.activation(self.linear(x))
def standard_normal_logprob(z):
logZ = -0.5 * math.log(2 * math.pi)
return logZ - z.pow(2) / 2
def truncated_normal_(tensor, mean=0, std=1, trunc_std=2):
"""
Taken from https://discuss.pytorch.org/t/implementing-truncated-normal-initializer/4778/15
"""
size = tensor.shape
tmp = tensor.new_empty(size + (4,)).normal_()
valid = (tmp < trunc_std) & (tmp > -trunc_std)
ind = valid.max(-1, keepdim=True)[1]
tensor.data.copy_(tmp.gather(-1, ind).squeeze(-1))
tensor.data.mul_(std).add_(mean)
return tensor
def normalize_sphere(pc, radius=1.0):
"""
Args:
pc: A batch of point clouds, (B, N, 3).
"""
p_max = pc.max(dim=-2, keepdim=True)[0]
p_min = pc.min(dim=-2, keepdim=True)[0]
center = (p_max + p_min) / 2
pc = pc - center
scale = (pc ** 2).sum(dim=-1, keepdim=True).sqrt().max(dim=-2, keepdim=True)[0] / radius
pc = pc / scale
return pc, center, scale
def normalize_std(pc, std=1.0):
"""
Args:
pc: A batch of point clouds, (B, N, 3).
"""
center = pc.mean(dim=-2, keepdim=True)
pc = pc - center
scale = pc.view(pc.size(0), -1).std(dim=-1).view(pc.size(0), 1, 1) / std
pc = pc / scale
return pc, center, scale
def normalize_pcl(pc, center, scale):
return (pc - center) / scale
def denormalize_pcl(pc, center, scale):
return pc * scale + center
def chamfer_distance_unit_sphere(gen, ref, batch_reduction='mean', point_reduction='mean'):
ref, center, scale = normalize_sphere(ref)
gen = normalize_pcl(gen, center, scale)
return pytorch3d.loss.chamfer_distance(gen, ref, batch_reduction=batch_reduction, point_reduction=point_reduction)
def farthest_point_sampling(pcls, num_pnts):
"""
Args:
pcls: A batch of point clouds, (B, N, 3).
num_pnts: Target number of points.
"""
ratio = 0.01 + num_pnts / pcls.size(1)
sampled = []
indices = []
for i in range(pcls.size(0)):
idx = fps(pcls[i], ratio=ratio, random_start=False)[:num_pnts]
sampled.append(pcls[i:i+1, idx, :])
indices.append(idx)
sampled = torch.cat(sampled, dim=0)
return sampled, indices
def point_mesh_bidir_distance_single_unit_sphere(pcl, verts, faces):
"""
Args:
pcl: (N, 3).
verts: (M, 3).
faces: LongTensor, (T, 3).
Returns:
Squared pointwise distances, (N, ).
"""
assert pcl.dim() == 2 and verts.dim() == 2 and faces.dim() == 2, 'Batch is not supported.'
verts, center, scale = normalize_sphere(verts.unsqueeze(0))
verts = verts[0]
pcl = normalize_pcl(pcl.unsqueeze(0), center=center, scale=scale)
pcl = pcl[0]
pcls = pytorch3d.structures.Pointclouds([pcl])
meshes = pytorch3d.structures.Meshes([verts], [faces])
return pytorch3d.loss.point_mesh_face_distance(meshes, pcls)
def pointwise_p2m_distance_normalized(pcl, verts, faces):
assert pcl.dim() == 2 and verts.dim() == 2 and faces.dim() == 2, 'Batch is not supported.'
verts, center, scale = normalize_sphere(verts.unsqueeze(0))
verts = verts[0]
pcl = normalize_pcl(pcl.unsqueeze(0), center=center, scale=scale)
pcl = pcl[0]
pcls = pytorch3d.structures.Pointclouds([pcl])
meshes = pytorch3d.structures.Meshes([verts], [faces])
points = pcls.points_packed()
points_first_idx = pcls.cloud_to_packed_first_idx()
max_points = pcls.num_points_per_cloud().max().item()
verts_packed = meshes.verts_packed()
faces_packed = meshes.faces_packed()
tris = verts_packed[faces_packed]
tris_first_idx = meshes.mesh_to_faces_packed_first_idx()
max_tris = meshes.num_faces_per_mesh().max().item()
point_to_face = point_face_distance(
points, points_first_idx, tris, tris_first_idx, max_points
)
return point_to_face
def hausdorff_distance_unit_sphere(gen, ref):
"""
Args:
gen: (B, N, 3)
ref: (B, N, 3)
Returns:
(B, )
"""
ref, center, scale = normalize_sphere(ref)
gen = normalize_pcl(gen, center, scale)
dists_ab, _, _ = pytorch3d.ops.knn_points(ref, gen, K=1)
dists_ab = dists_ab[:,:,0].max(dim=1, keepdim=True)[0]
dists_ba, _, _ = pytorch3d.ops.knn_points(gen, ref, K=1)
dists_ba = dists_ba[:,:,0].max(dim=1, keepdim=True)[0]
dists_hausdorff = torch.max(torch.cat([dists_ab, dists_ba], dim=1), dim=1)[0]
return dists_hausdorff