-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtest_shape_space.py
106 lines (66 loc) · 3.26 KB
/
test_shape_space.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
from shapemaker import *
def test_ellipse(index):
dataset = np.load(open(r"dataset/dataset_ellipsoid.npy", "rb"),allow_pickle=True)
network = ParkEtAl(3+3, [520]*7 , [4], FourierFeatures=True, num_features = 8, sigma = 3 )
network.load_state_dict(torch.load(r"models/shape_space_ellipse.pth", map_location=device))
network.to(device)
network.eval()
x = np.array([dataset[index][1].detach().cpu().numpy()])
latent = Tensor( x )
shape_space_toParaview(network, 127, index, latent)
return
def test_8D(index):
dataset = np.load(open(r"dataset/dataset_16D.npy", "rb"),allow_pickle=True)
network = ParkEtAl(3+16, [520]*7 , [4], FourierFeatures=True, num_features = 8, sigma = 3 )
#network = FeatureSpaceNetwork(3, [520]*7 , [4], FourierFeatures=True, num_features = 8, sigma = 3, feature_space=8 )
network.load_state_dict(torch.load(r"models/shape_space_16D_NoEncoder_AFF.pth", map_location=device))
network.to(device)
network.eval()
x = np.array([dataset[index][1].detach().cpu().numpy()])
latent = Tensor( x )
shape_space_toParaview(network, 160, index, latent)
return
def test_shape(index):
autoencoder = PointNetAutoEncoder(3,2000,16)
autoencoder.load_state_dict(torch.load(r"models/autoencoder64_16D_AT2.pth", map_location=device))
autoencoder.to(device)
autoencoder.eval()
dataset = np.load(open(r"dataset/dataset_16D.npy", "rb"),allow_pickle=True)
network = FeatureSpaceNetwork2(3, [520]*7 , [4], FourierFeatures=True, num_features = 8, sigma = 3, feature_space=16 )
#network = ParkEtAl(3+16, [520]*7 , [4], FourierFeatures=True, num_features = 8, sigma = 3 )
network.load_state_dict(torch.load(r"models/shape_space_16D_AT2.pth", map_location=device))
network.to(device)
network.eval()
point = [.5, -.5, -.5 ]
point = Tensor(point)
points = [dataset[index][0]]
# points = points.cuda()
points = np.array(points)
points = Variable( Tensor(points) , requires_grad=True).to(device)
inputs = torch.transpose(points, 1, 2)
reconstructed_points, global_feat = autoencoder(inputs)
#x = torch.cat((point, global_feat[0]))
# print(network(x))
shape_space_toParaview2(network, 256, index, global_feat)
return
def test_face(index):
autoencoder = PointNetAutoEncoder(3,23725,12)
autoencoder.load_state_dict(torch.load(r"models/face_ae5.pth", map_location=device))
autoencoder.to(device)
autoencoder.eval()
dataset = np.load(open(r"dataset/dataset_faces100.npy", "rb"),allow_pickle=True)
network = FeatureSpaceNetwork2(3, [520]*7 , [4], FourierFeatures=True, num_features = 8, sigma = 3, feature_space=12 )
#network = ParkEtAl(3+16, [520]*7 , [4], FourierFeatures=True, num_features = 8, sigma = 3 )
network.load_state_dict(torch.load(r"models/face_space5.pth", map_location=device))
network.to(device)
network.eval()
points = [dataset[index][0]]
points = np.array(points)
points = Variable( Tensor(points) , requires_grad=True).to(device)
inputs = torch.transpose(points, 1, 2)
reconstructed_points, global_feat = autoencoder(inputs)
shape_space_toParaview2(network, 120, index, global_feat)
return
for i in range(15):
print(i)
test_face(i)