This repository has been archived by the owner on Apr 9, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 2
/
TriangulationEmbedding.py
89 lines (78 loc) · 3.81 KB
/
TriangulationEmbedding.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
# Copyright 2018 Juhan Bae, Ruijian An. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tensorflow as tf
import tensorflow.contrib.slim as slim
import math
class TriangulationEmbeddingModule:
""" End-to-end trainable Triangulation Embedding module. """
def __init__(self,
feature_size,
num_descriptors,
num_anchors,
add_batch_norm,
is_training,
scope_id=None):
""" Initialize class TriangulationEmbeddingModule.
:param feature_size: int
The number of features in each descriptors.
:param num_descriptors: int
Total number of descriptors.
:param num_anchors: int
The number of anchors.
:param add_batch_norm: bool
True iff batch normalization is used at the end.
:param is_training: bool
True iff the module is being trained.
:param scope_id: String
The scope id.
"""
self.feature_size = feature_size
self.num_descriptors = num_descriptors
self.num_anchors = num_anchors
self.add_batch_norm = add_batch_norm
self.is_training = is_training
self.scope_id = scope_id
def forward(self, inputs, **unused_params):
""" Forward method for TriangulationEmbeddingModule.
:param inputs: 2D Tensor with dimension '(batch_size * num_descriptors) x feature_size'
:return: 3D Tensor with dimension 'batch_size x num_descriptors x (feature_size * num_anchors)'
"""
anchor_weights = tf.get_variable("anchor_weights{}".format("" if self.scope_id is None else str(self.scope_id)),
[self.feature_size, self.num_anchors],
initializer=tf.random_normal_initializer(
stddev=1 / math.sqrt(self.num_anchors)),
dtype=tf.float32)
tf.summary.histogram("anchor_weights{}".format("" if self.scope_id is None else str(self.scope_id)),
anchor_weights)
anchor_weights = tf.transpose(anchor_weights)
anchor_weights = tf.reshape(anchor_weights, [1, self.feature_size * self.num_anchors])
# Tile inputs to subtract all anchors.
tiled_inputs = tf.tile(inputs, [1, self.num_anchors])
# -> (batch_size * num_descriptors) x (feature_size * num_anchors)
t_emb = tf.subtract(tiled_inputs, anchor_weights)
# -> (batch_size * num_descriptors) x (feature_size * num_anchors)
t_emb = tf.reshape(t_emb, [-1, self.num_anchors, self.feature_size])
# -> (batch_size * num_descriptors) x num_anchors x feature_size; Keep normalized residuals.
t_emb = tf.nn.l2_normalize(t_emb, 2)
t_emb = tf.reshape(t_emb, [-1, self.feature_size * self.num_anchors])
# -> (batch_size * num_descriptors) x (feature_size * num_anchors)
if self.add_batch_norm:
t_emb = slim.batch_norm(
t_emb,
center=True,
scale=True,
is_training=self.is_training,
scope="t_emb_bn")
activation = tf.reshape(t_emb, [-1, self.num_descriptors, self.feature_size * self.num_anchors])
return activation