Skip to content

Commit 041e36f

Browse files
committed
Fixed Entropy regularization error
1 parent 6b85553 commit 041e36f

2 files changed

Lines changed: 78 additions & 4 deletions

File tree

camera_pose_visualizer.py

Lines changed: 73 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,73 @@
1+
import json
2+
import os
3+
import numpy as np
4+
import matplotlib as mpl
5+
import matplotlib.pyplot as plt
6+
from matplotlib.patches import Patch
7+
from mpl_toolkits.mplot3d.art3d import Poly3DCollection
8+
import pdb
9+
10+
class CameraPoseVisualizer:
11+
def __init__(self, xlim, ylim, zlim):
12+
self.fig = plt.figure(figsize=(18, 7))
13+
self.ax = self.fig.gca(projection='3d')
14+
self.ax.set_aspect("auto")
15+
self.ax.set_xlim(xlim)
16+
self.ax.set_ylim(ylim)
17+
self.ax.set_zlim(zlim)
18+
self.ax.set_xlabel('x')
19+
self.ax.set_ylabel('y')
20+
self.ax.set_zlabel('z')
21+
print('initialize camera pose visualizer')
22+
23+
def extrinsic2pyramid(self, extrinsic, color='r', focal_len_scaled=5, aspect_ratio=0.3):
24+
focal_len_scaled = -1*focal_len_scaled
25+
vertex_std = np.array([[0, 0, 0, 1],
26+
[focal_len_scaled * aspect_ratio, -focal_len_scaled * aspect_ratio, focal_len_scaled, 1],
27+
[focal_len_scaled * aspect_ratio, focal_len_scaled * aspect_ratio, focal_len_scaled, 1],
28+
[-focal_len_scaled * aspect_ratio, focal_len_scaled * aspect_ratio, focal_len_scaled, 1],
29+
[-focal_len_scaled * aspect_ratio, -focal_len_scaled * aspect_ratio, focal_len_scaled, 1]])
30+
vertex_transformed = vertex_std @ extrinsic.T
31+
meshes = [[vertex_transformed[0, :-1], vertex_transformed[1][:-1], vertex_transformed[2, :-1]],
32+
[vertex_transformed[0, :-1], vertex_transformed[2, :-1], vertex_transformed[3, :-1]],
33+
[vertex_transformed[0, :-1], vertex_transformed[3, :-1], vertex_transformed[4, :-1]],
34+
[vertex_transformed[0, :-1], vertex_transformed[4, :-1], vertex_transformed[1, :-1]],
35+
[vertex_transformed[1, :-1], vertex_transformed[2, :-1], vertex_transformed[3, :-1], vertex_transformed[4, :-1]]]
36+
self.ax.add_collection3d(
37+
Poly3DCollection(meshes, facecolors=color, linewidths=0.3, edgecolors=color, alpha=0.35))
38+
39+
def customize_legend(self, list_label):
40+
list_handle = []
41+
for idx, label in enumerate(list_label):
42+
color = plt.cm.rainbow(idx / len(list_label))
43+
patch = Patch(color=color, label=label)
44+
list_handle.append(patch)
45+
plt.legend(loc='right', bbox_to_anchor=(1.8, 0.5), handles=list_handle)
46+
47+
def colorbar(self, max_frame_length):
48+
cmap = mpl.cm.rainbow
49+
norm = mpl.colors.Normalize(vmin=0, vmax=max_frame_length)
50+
self.fig.colorbar(mpl.cm.ScalarMappable(norm=norm, cmap=cmap), orientation='vertical', label='Frame Number')
51+
52+
def show(self):
53+
plt.title('Extrinsic Parameters')
54+
plt.show()
55+
56+
if __name__ == '__main__':
57+
poses = []
58+
with open(os.path.join('data/nerf_synthetic/chair/', 'transforms_train.json'), 'r') as fp:
59+
meta = json.load(fp)
60+
for frame in meta['frames']:
61+
poses.append(np.array(frame['transform_matrix']))
62+
t_arr = np.array([pose[:3,-1] for pose in poses])
63+
maxes = t_arr.max(axis=0)
64+
mins = t_arr.min(axis=0)
65+
66+
# argument : the minimum/maximum value of x, y, z
67+
visualizer = CameraPoseVisualizer([mins[0]-1, maxes[0]+1], [mins[1]-1, maxes[1]+1], [mins[2]-1, maxes[2]+1])
68+
69+
# argument : extrinsic matrix, color, scaled focal length(z-axis length of frame body of camera
70+
for pose in poses:
71+
visualizer.extrinsic2pyramid(pose, 'c', 1)
72+
73+
visualizer.show()

run_nerf.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -360,9 +360,11 @@ def raw2outputs(raw, z_vals, rays_d, raw_noise_std=0, white_bkgd=False, pytest=F
360360
rgb_map = rgb_map + (1.-acc_map[...,None])
361361

362362
# Calculate weights sparsity loss
363-
mask = weights.sum(-1) > 0.5
364-
entropy = Categorical(probs = weights+1e-5).entropy()
365-
sparsity_loss = entropy * mask
363+
try:
364+
entropy = Categorical(probs = torch.cat([weights, 1.0-weights.sum(-1, keepdim=True)+1e-6], dim=-1)).entropy()
365+
except:
366+
pdb.set_trace()
367+
sparsity_loss = entropy
366368

367369
return rgb_map, disp_map, acc_map, weights, depth_map, sparsity_loss
368370

@@ -645,7 +647,6 @@ def train():
645647
elif args.dataset_type == 'blender':
646648
images, poses, render_poses, hwf, i_split, bounding_box = load_blender_data(args.datadir, args.half_res, args.testskip)
647649
args.bounding_box = bounding_box
648-
pdb.set_trace()
649650
print('Loaded blender', images.shape, render_poses.shape, hwf, args.datadir)
650651
i_train, i_val, i_test = i_split
651652

0 commit comments

Comments
 (0)