Skip to content

Commit

Permalink
CHG: Add Reshape + flatten from ethereon#147
Browse files Browse the repository at this point in the history
  • Loading branch information
cecabert committed Aug 15, 2019
1 parent 5893492 commit 8def90c
Show file tree
Hide file tree
Showing 4 changed files with 75 additions and 7 deletions.
7 changes: 4 additions & 3 deletions kaffe/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
'EuclideanLoss': shape_scalar,
'Eltwise': shape_identity,
'Exp': shape_identity,
'Flatten': shape_not_implemented,
'Flatten': flatten_shape,
'HDF5Data': shape_data,
'HDF5Output': shape_identity,
'HingeLoss': shape_scalar,
Expand All @@ -38,7 +38,9 @@
'Normalize': shape_identity,
'Pooling': shape_pool,
'Power': shape_identity,
'PReLU': shape_identity,
'ReLU': shape_identity,
'Reshape': reshape_shape,
'Scale': shape_identity,
'Sigmoid': shape_identity,
'SigmoidCrossEntropyLoss': shape_scalar,
Expand All @@ -49,8 +51,7 @@
'Slice': shape_not_implemented,
'TanH': shape_identity,
'WindowData': shape_not_implemented,
'Threshold': shape_identity,
'PReLU': shape_identity,
'Threshold': shape_identity
}

LAYER_TYPES = LAYER_DESCRIPTORS.keys()
Expand Down
30 changes: 30 additions & 0 deletions kaffe/shapes.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,36 @@ def shape_concat(node):
return tuple(output_shape)


def reshape_shape(node) :
input_shape = node.get_only_parent().output_shape
input_shape_pr = input_shape.channels*input_shape.height*input_shape.width
input_shape_arr = [input_shape.batch_size,input_shape.channels,input_shape.height,input_shape.width]
pr = 1
axes = node.parameters.shape.dim
new_shape = [input_shape.batch_size,1,1,1]
for j in range(1,len(axes)) :
if axes[j] == 0 :
new_shape[j] = input_shape_arr[j]
pr *= new_shape[j]
elif not axes[j] == -1 :
new_shape[j] = int(axes[j])
pr *= new_shape[j]
elif axes[j] == -1 :
new_shape[j] = -1

for j in range(1,len(new_shape)) :
if new_shape[j] == -1 :
new_shape[j] = int(input_shape_pr/pr)

return TensorShape(new_shape[0],new_shape[1],new_shape[2],new_shape[3])


def flatten_shape(node) :
shape1 = node.get_only_parent().output_shape

return TensorShape(shape1.batch_size,shape1.channels*shape1.height*shape1.width,1,1)


def shape_convolution(node):
return get_strided_kernel_output_shape(node, math.floor)

Expand Down
20 changes: 18 additions & 2 deletions kaffe/tensorflow/network.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,11 +255,27 @@ def dropout(self, input, keep_prob, name):
return tf.nn.dropout(input, keep, name=name)

@layer
def l2_normalize(self, input):
def reshape(self,input,b,x,y,c,name,transpose = False) :
if transpose :
input = tf.reshape(input,[-1,c,x,y])
return tf.transpose(input,(0,2,3,1))

return tf.reshape(input,[-1,x,y,c],name = name)

@layer
def flatten(self,input,name):
input = tf.transpose(input,(0,3,1,2))
dim = 1
for d in input.get_shape()[1:].as_list():
dim *= d
return tf.reshape(input,[-1,dim],name = name)

@layer
def l2_normalize(self, input, name):
# NOTE: Currently, only inference is supported
with tf.variable_scope(name) as scope:
shp = input.get_shape().as_list()
outputs = tf.nn.l2_normalize(x=input, axis=-1)
alpha = self.make_var('alpha', shape=[-1:])
alpha = self.make_var('alpha', shape=shp[-1:])
outputs = tf.multiply(outputs, alpha)
return outputs
25 changes: 23 additions & 2 deletions kaffe/tensorflow/transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,10 @@ def map_lrn(self, node):
return TensorFlowNode('lrn', int(params.local_size / 2), alpha, params.beta)

def map_concat(self, node):
axis = (2, 3, 1, 0)[node.parameters.axis]
if node.parents[0].kind == 'Flatten':
axis = node.parameters.axis
else :
axis = (2, 3, 1, 0)[node.parameters.axis]
return TensorFlowNode('concat', axis)

def map_dropout(self, node):
Expand All @@ -161,9 +164,27 @@ def map_eltwise(self, node):
except KeyError:
raise KaffeError('Unknown elementwise operation: {}'.format(op_code))

def map_reshape(self,node) :
shape = node.output_shape
new_shape = [0]*4
new_shape[0] = shape[0]
new_shape[1] = shape[2]
new_shape[2] = shape[3]
new_shape[3] = shape[1]
parent_shape = node.get_only_parent().output_shape

## we need to transpose again if a fc layer is reshaped to conv
kwargs = {'transpose' : False}
if parent_shape.height == 1 and parent_shape.width == 1 :
kwargs['transpose'] = True
return TensorFlowNode('reshape',new_shape[0],new_shape[1],new_shape[2],new_shape[3],**kwargs)

def map_flatten(self,node) :
return TensorFlowNode('flatten')

def map_normalize(self, node):
return TensorFlowNode('l2_normalize')

def commit(self, chains):
return chains

Expand Down

0 comments on commit 8def90c

Please sign in to comment.