@@ -93,6 +93,15 @@ def add_layer_into_RModel(rmodel, layer_data):
9393 import numpy as np
9494 from ROOT .TMVA .Experimental import SOFIE
9595
96+ def move_operator (op ):
97+ """
98+ Wrap an operator into a std::unique_ptr to pass it to RModel::AddOperator().
99+ """
100+ import ROOT
101+
102+ ROOT .SetOwnership (op , False )
103+ return ROOT .std .unique_ptr [type (op )](op )
104+
96105 keras_version = get_keras_version ()
97106
98107 fLayerType = layer_data ["layerType" ]
@@ -153,12 +162,12 @@ def add_layer_into_RModel(rmodel, layer_data):
153162 if fLayerType == "GlobalAveragePooling2D" :
154163 if layer_data ["channels_last" ]:
155164 op = SOFIE .ROperator_Transpose ("float" )([0 , 3 , 1 , 2 ], inputs [0 ], LayerName + "PreTrans" )
156- rmodel .AddOperatorReference ( op )
165+ rmodel .AddOperator ( move_operator ( op ) )
157166 inputs [0 ] = LayerName + "PreTrans"
158167 outputs [0 ] = LayerName + "Squeeze"
159- rmodel .AddOperatorReference ( mapKerasLayer [fLayerType ](layer_data ))
168+ rmodel .AddOperator ( move_operator ( mapKerasLayer [fLayerType ](layer_data ) ))
160169 op = SOFIE .ROperator_Reshape (SOFIE .ReshapeOpMode .Squeeze , [2 , 3 ], LayerName + "Squeeze" , fLayerOutput )
161- rmodel .AddOperatorReference ( op )
170+ rmodel .AddOperator ( move_operator ( op ) )
162171
163172 # Similar case is with Batchnorm, ONNX assumes that the 'axis' is always 1, but Keras
164173 # gives the user the choice of specifying it. So, we have to transpose the input layer
@@ -178,26 +187,26 @@ def add_layer_into_RModel(rmodel, layer_data):
178187 fAttrPerm [1 ] = axis
179188 fAttrPerm [axis ] = 1
180189 op = SOFIE .ROperator_Transpose ("float" )(fAttrPerm , inputs [0 ], LayerName + "PreTrans" )
181- rmodel .AddOperatorReference ( op )
190+ rmodel .AddOperator ( move_operator ( op ) )
182191 inputs [0 ] = LayerName + "PreTrans"
183192 outputs [0 ] = LayerName + "PostTrans"
184- rmodel .AddOperatorReference ( mapKerasLayer [fLayerType ](layer_data ))
193+ rmodel .AddOperator ( move_operator ( mapKerasLayer [fLayerType ](layer_data ) ))
185194 op = SOFIE .ROperator_Transpose ("float" )(fAttrPerm , LayerName + "PostTrans" , fLayerOutput )
186- rmodel .AddOperatorReference ( op )
195+ rmodel .AddOperator ( move_operator ( op ) )
187196
188197 elif fLayerType == "MaxPooling2D" or fLayerType == "AveragePooling2D" :
189198 if layer_data ["channels_last" ]:
190199 op = SOFIE .ROperator_Transpose ("float" )([0 , 3 , 1 , 2 ], inputs [0 ], LayerName + "PreTrans" )
191- rmodel .AddOperatorReference ( op )
200+ rmodel .AddOperator ( move_operator ( op ) )
192201 inputs [0 ] = LayerName + "PreTrans"
193202 outputs [0 ] = LayerName + "PostTrans"
194- rmodel .AddOperatorReference ( mapKerasLayer [fLayerType ](layer_data ))
203+ rmodel .AddOperator ( move_operator ( mapKerasLayer [fLayerType ](layer_data ) ))
195204 if layer_data ["channels_last" ]:
196205 op = SOFIE .ROperator_Transpose ("float" )([0 , 2 , 3 , 1 ], LayerName + "PostTrans" , fLayerOutput )
197- rmodel .AddOperatorReference ( op )
206+ rmodel .AddOperator ( move_operator ( op ) )
198207
199208 else :
200- rmodel .AddOperatorReference ( mapKerasLayer [fLayerType ](layer_data ))
209+ rmodel .AddOperator ( move_operator ( mapKerasLayer [fLayerType ](layer_data ) ))
201210
202211 return rmodel
203212
@@ -229,20 +238,20 @@ def add_layer_into_RModel(rmodel, layer_data):
229238 if fLayerType == "Conv2D" :
230239 if layer_data ["channels_last" ]:
231240 op = SOFIE .ROperator_Transpose ("float" )([0 , 3 , 1 , 2 ], inputs [0 ], LayerName + "PreTrans" )
232- rmodel .AddOperatorReference ( op )
241+ rmodel .AddOperator ( move_operator ( op ) )
233242 inputs [0 ] = LayerName + "PreTrans"
234243 layer_data ["layerInput" ] = inputs
235244 outputs [0 ] = LayerName + fLayerType
236245 layer_data ["layerOutput" ] = outputs
237246 op = mapKerasLayerWithActivation [fLayerType ](layer_data )
238- rmodel .AddOperatorReference ( op )
247+ rmodel .AddOperator ( move_operator ( op ) )
239248 Activation_layer_input = LayerName + fLayerType
240249 if fLayerType == "Conv2D" :
241250 if layer_data ["channels_last" ]:
242251 op = SOFIE .ROperator_Transpose ("float" )(
243252 [0 , 2 , 3 , 1 ], LayerName + fLayerType , LayerName + "PostTrans"
244253 )
245- rmodel .AddOperatorReference ( op )
254+ rmodel .AddOperator ( move_operator ( op ) )
246255 Activation_layer_input = LayerName + "PostTrans"
247256
248257 # Adding the activation function
@@ -251,7 +260,7 @@ def add_layer_into_RModel(rmodel, layer_data):
251260 layer_data ["layerInput" ] = inputs
252261 layer_data ["layerOutput" ] = outputs
253262
254- rmodel .AddOperatorReference ( mapKerasLayer [LayerActivation ](layer_data ))
263+ rmodel .AddOperator ( move_operator ( mapKerasLayer [LayerActivation ](layer_data ) ))
255264
256265 else : # if layer is conv and the activation is linear, we need to add transpose before and after
257266 if fLayerType == "Conv2D" :
@@ -260,15 +269,15 @@ def add_layer_into_RModel(rmodel, layer_data):
260269 fLayerOutput = outputs [0 ]
261270 if layer_data ["channels_last" ]:
262271 op = SOFIE .ROperator_Transpose ("float" )([0 , 3 , 1 , 2 ], inputs [0 ], LayerName + "PreTrans" )
263- rmodel .AddOperatorReference ( op )
272+ rmodel .AddOperator ( move_operator ( op ) )
264273 inputs [0 ] = LayerName + "PreTrans"
265274 layer_data ["layerInput" ] = inputs
266275 outputs [0 ] = LayerName + "PostTrans"
267- rmodel .AddOperatorReference ( mapKerasLayerWithActivation [fLayerType ](layer_data ))
276+ rmodel .AddOperator ( move_operator ( mapKerasLayerWithActivation [fLayerType ](layer_data ) ))
268277 if fLayerType == "Conv2D" :
269278 if layer_data ["channels_last" ]:
270279 op = SOFIE .ROperator_Transpose ("float" )([0 , 2 , 3 , 1 ], LayerName + "PostTrans" , fLayerOutput )
271- rmodel .AddOperatorReference ( op )
280+ rmodel .AddOperator ( move_operator ( op ) )
272281 return rmodel
273282 else :
274283 raise Exception ("TMVA.SOFIE - parsing keras layer " + fLayerType + " is not yet supported" )
0 commit comments