-
Notifications
You must be signed in to change notification settings - Fork 10
/
VLAD.py
49 lines (40 loc) · 2.02 KB
/
VLAD.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
from keras import engine
from keras import backend as K
import tensorflow as tf
class VladPooling(engine.Layer):
'''
This layer follows the NetVlad, GhostVlad
'''
def __init__(self, mode, k_centers, g_centers=0, **kwargs):
self.k_centers = k_centers
self.g_centers = g_centers
self.mode = mode
super(VladPooling, self).__init__(**kwargs)
def build(self, input_shape):
self.cluster = self.add_weight(shape=[self.k_centers+self.g_centers, input_shape[0][-1]],
name='centers',
initializer='orthogonal')
self.built = True
def compute_output_shape(self, input_shape):
assert input_shape
return (input_shape[0][0], self.k_centers*input_shape[0][-1])
def call(self, x):
# feat : bz x W x H x D, cluster_score: bz X W x H x clusters.
feat, cluster_score = x
num_features = feat.shape[-1]
# softmax normalization to get soft-assignment.
# A : bz x W x H x clusters
max_cluster_score = K.max(cluster_score, -1, keepdims=True)
exp_cluster_score = K.exp(cluster_score - max_cluster_score)
A = exp_cluster_score / K.sum(exp_cluster_score, axis=-1, keepdims = True)
# Now, need to compute the residual, self.cluster: clusters x D
A = K.expand_dims(A, -1) # A : bz x W x H x clusters x 1
feat_broadcast = K.expand_dims(feat, -2) # feat_broadcast : bz x W x H x 1 x D
feat_res = feat_broadcast - self.cluster # feat_res : bz x W x H x clusters x D
weighted_res = tf.multiply(A, feat_res) # weighted_res : bz x W x H x clusters x D
cluster_res = K.sum(weighted_res, [1, 2])
if self.mode == 'gvlad':
cluster_res = cluster_res[:, :self.k_centers, :]
cluster_l2 = K.l2_normalize(cluster_res, -1)
outputs = K.reshape(cluster_l2, [-1, int(self.k_centers) * int(num_features)])
return outputs