-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathJORNet.py
78 lines (65 loc) · 3.8 KB
/
JORNet.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
import HALNet as HALNet
from HALNet import HALNet as HALNet_class
import torch.nn as nn
from magic import cudafy
import numpy as np
class JORNet(HALNet_class):
innerprod1_size = 256 * 16 * 16
crop_res = (128, 128)
#innerprod1_size = 65536
def map_out_to_loss(self, innerprod1_size):
return cudafy(nn.Linear(in_features=innerprod1_size, out_features=200), self.use_cuda)
def map_out_conv(self, in_channels):
return cudafy(HALNet.HALNetConvBlock(
kernel_size=3, stride=1, filters=21, in_channels=in_channels, padding=1),
self.use_cuda)
def __init__(self, params_dict):
super(JORNet, self).__init__(params_dict)
self.num_joints = 20
self.main_loss_conv = cudafy(HALNet.HALNetConvBlock(
kernel_size=3, stride=1, filters=21, in_channels=256, padding=1),
self.use_cuda)
self.main_loss_deconv1 = cudafy(nn.Upsample(size=self.crop_res, mode='bilinear'), self.use_cuda)
#self.main_loss_deconv2 = cudafy(nn.Upsample(scale_factor=1, mode='bilinear'),
# self.use_cuda)
if self.cross_entropy:
self.softmax_final = cudafy(HALNet.
SoftmaxLogProbability2D(), self.use_cuda)
self.innerproduct1_joint1 = cudafy(
nn.Linear(in_features=524288, out_features=200), self.use_cuda)
self.innerproduct2_joint1 = cudafy(
nn.Linear(in_features=200, out_features=self.num_joints * 3), self.use_cuda)
self.innerproduct1_joint2 = cudafy(
nn.Linear(in_features=262144, out_features=200), self.use_cuda)
self.innerproduct2_joint2 = cudafy(
nn.Linear(in_features=200, out_features=self.num_joints * 3), self.use_cuda)
self.innerproduct1_joint3 = cudafy(
nn.Linear(in_features=131072, out_features=200), self.use_cuda)
self.innerproduct2_joint3 = cudafy(
nn.Linear(in_features=200, out_features=self.num_joints * 3), self.use_cuda)
self.innerproduct1_joint_main = cudafy(
nn.Linear(in_features=65536, out_features=200), self.use_cuda)
self.innerproduct2_join_main = cudafy(
nn.Linear(in_features=200, out_features=self.num_joints * 3), self.use_cuda)
def forward(self, x):
out_intermed_hm1, out_intermed_hm2, out_intermed_hm3, conv4fout, \
res3aout, res4aout, conv4eout = self.forward_subnet(x)
out_intermed_hm_main = self.forward_main_loss(conv4fout)
innerprod1_size = res3aout.shape[1] * res3aout.shape[2] * res3aout.shape[3]
out_intermed_j1 = res3aout.view(-1, innerprod1_size)
out_intermed_j1 = self.innerproduct1_joint1(out_intermed_j1)
out_intermed_j1 = self.innerproduct2_joint1(out_intermed_j1)
innerprod1_size = res4aout.shape[1] * res4aout.shape[2] * res4aout.shape[3]
out_intermed_j2 = res4aout.view(-1, innerprod1_size)
out_intermed_j2 = self.innerproduct1_joint2(out_intermed_j2)
out_intermed_j2 = self.innerproduct2_joint2(out_intermed_j2)
innerprod1_size = conv4eout.shape[1] * conv4eout.shape[2] * conv4eout.shape[3]
out_intermed_j3 = conv4eout.view(-1, innerprod1_size)
out_intermed_j3 = self.innerproduct1_joint3(out_intermed_j3)
out_intermed_j3 = self.innerproduct2_joint3(out_intermed_j3)
innerprod1_size = conv4fout.shape[1] * conv4fout.shape[2] * conv4fout.shape[3]
out_intermed_j_main = conv4fout.view(-1, innerprod1_size)
out_intermed_j_main = self.innerproduct1_joint_main(out_intermed_j_main)
out_intermed_j_main = self.innerproduct2_join_main(out_intermed_j_main)
return out_intermed_hm1, out_intermed_hm2, out_intermed_hm3, out_intermed_hm_main,\
out_intermed_j1, out_intermed_j2, out_intermed_j3, out_intermed_j_main