Skip to content

Commit 4b7e3ed

Browse files
abhigyan7Ram81
authored andcommittedNov 5, 2018
Added linear activation support for Keras (Cloud-CV#426)
* Add PaneElement for linear activation * Add linear activation layer data to data.js * Add support for export of Linear activation layer to keras * Add support for import of Linear activation layer from keras * Add unit tests for linear activation layer * Fix comment formatting * add Linear activation to layer map in export_json.py
1 parent b68ee94 commit 4b7e3ed

File tree

9 files changed

+78
-1
lines changed

9 files changed

+78
-1
lines changed
 

‎ide/static/js/data.js

+30
Original file line numberDiff line numberDiff line change
@@ -3065,6 +3065,36 @@ export default {
30653065
},
30663066
learn: true
30673067
},
3068+
Linear: { // Only Keras
3069+
name: 'linear',
3070+
color: '#009688',
3071+
endpoint: {
3072+
src: ['Bottom'],
3073+
trg: ['Top']
3074+
},
3075+
params: {
3076+
inplace: {
3077+
name: 'Inplace operation',
3078+
value: true,
3079+
type: 'checkbox',
3080+
required: false
3081+
},
3082+
caffe: {
3083+
name: 'Available Caffe',
3084+
value: false,
3085+
type: 'checkbox',
3086+
required: false
3087+
}
3088+
},
3089+
props: {
3090+
name: {
3091+
name: 'Name',
3092+
value: '',
3093+
type: 'text'
3094+
}
3095+
},
3096+
learn: false
3097+
},
30683098
/* ********** Utility Layers ********** */
30693099
Flatten: {
30703100
name: 'flatten',

‎ide/static/js/pane.js

+3
Original file line numberDiff line numberDiff line change
@@ -322,6 +322,9 @@ class Pane extends React.Component {
322322
<PaneElement setDraggingLayer={this.props.setDraggingLayer}
323323
handleClick={this.props.handleClick}
324324
id="Scale_Button">Scale</PaneElement>
325+
<PaneElement setDraggingLayer={this.props.setDraggingLayer}
326+
handleClick={this.props.handleClick}
327+
id="Linear_Button">Linear</PaneElement>
325328
</div>
326329
</div>
327330
</div>

‎ide/tasks.py

+1
Original file line numberDiff line numberDiff line change
@@ -78,6 +78,7 @@ def export_keras_json(net, net_name, is_tf, reply_channel):
7878
'TanH': activation,
7979
'Sigmoid': activation,
8080
'HardSigmoid': activation,
81+
'Linear': activation,
8182
'Dropout': dropout,
8283
'Flatten': flatten,
8384
'Reshape': reshape,

‎keras_app/views/export_json.py

+1
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,7 @@ def export_json(request, is_tf=False):
4949
'TanH': activation,
5050
'Sigmoid': activation,
5151
'HardSigmoid': activation,
52+
'Linear': activation,
5253
'Dropout': dropout,
5354
'Flatten': flatten,
5455
'Reshape': reshape,

‎keras_app/views/import_json.py

+1
Original file line numberDiff line numberDiff line change
@@ -62,6 +62,7 @@ def import_json(request):
6262
'tanh': Activation,
6363
'sigmoid': Activation,
6464
'hard_sigmoid': Activation,
65+
'linear': Activation,
6566
'Dropout': Dropout,
6667
'Flatten': Flatten,
6768
'Reshape': Reshape,

‎keras_app/views/layers_export.py

+2
Original file line numberDiff line numberDiff line change
@@ -123,6 +123,8 @@ def activation(layer, layer_in, layerId, tensor=True):
123123
out[layerId] = Activation('softsign')
124124
elif (layer['info']['type'] == 'HardSigmoid'):
125125
out[layerId] = Activation('hard_sigmoid')
126+
elif (layer['info']['type'] == 'Linear'):
127+
out[layerId] = Activation('linear')
126128
if tensor:
127129
out[layerId] = out[layerId](*layer_in)
128130
return out

‎keras_app/views/layers_import.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,8 @@ def Activation(layer):
4141
'selu': 'SELU',
4242
'softplus': 'Softplus',
4343
'softsign': 'Softsign',
44-
'hard_sigmoid': 'HardSigmoid'
44+
'hard_sigmoid': 'HardSigmoid',
45+
'linear': 'Linear'
4546
}
4647
if (layer.__class__.__name__ == 'Activation'):
4748
return jsonLayer(activationMap[layer.activation.func_name], {}, layer)

‎tests/unit/keras_app/keras_export_test.json

+15
Original file line numberDiff line numberDiff line change
@@ -777,6 +777,21 @@
777777
"inplace": true
778778
}
779779
},
780+
"Linear": {
781+
"connection": {
782+
"input": [
783+
"l0"
784+
],
785+
"ouput": []
786+
},
787+
"info": {
788+
"phase": null,
789+
"type": "Linear"
790+
},
791+
"params": {
792+
"inplace": true
793+
}
794+
},
780795
"Upsample": {
781796
"info": {
782797
"phase": null,

‎tests/unit/keras_app/test_views.py

+23
Original file line numberDiff line numberDiff line change
@@ -285,6 +285,11 @@ def test_keras_import(self):
285285
model.add(ThresholdedReLU(theta=1, input_shape=(15,)))
286286
model.build()
287287
self.keras_type_test(model, 0, 'ThresholdedReLU')
288+
# Linear
289+
model = Sequential()
290+
model.add(Activation('linear', input_shape=(15,)))
291+
model.build()
292+
self.keras_type_test(model, 0, 'Linear')
288293

289294

290295
class DropoutImportTest(unittest.TestCase, HelperFunctions):
@@ -943,6 +948,24 @@ def test_keras_export(self):
943948
self.assertEqual(model.layers[1].__class__.__name__, 'Activation')
944949

945950

951+
class LinearActivationExportTest(unittest.TestCase):
952+
def setUp(self):
953+
self.client = Client()
954+
955+
def test_keras_export(self):
956+
tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'keras_app',
957+
'keras_export_test.json'), 'r')
958+
response = json.load(tests)
959+
tests.close()
960+
net = yaml.safe_load(json.dumps(response['net']))
961+
net = {'l0': net['Input'], 'l1': net['Linear']}
962+
net['l0']['connection']['output'].append('l1')
963+
inp = data(net['l0'], '', 'l0')['l0']
964+
net = activation(net['l1'], [inp], 'l1')
965+
model = Model(inp, net['l1'])
966+
self.assertEqual(model.layers[1].__class__.__name__, 'Activation')
967+
968+
946969
class DropoutExportTest(unittest.TestCase):
947970
def setUp(self):
948971
self.client = Client()

0 commit comments

Comments
 (0)
Please sign in to comment.