@@ -152,15 +152,15 @@ def aten_ops_fmod(
152
152
return impl .elementwise .fmod (network , target , SourceIR .ATEN , name , args [0 ], args [1 ])
153
153
154
154
155
- @dynamo_tensorrt_converter (torch .ops .aten .gelu .default ) # type: ignore[misc]
156
- def aten_ops_gelu (
155
+ @dynamo_tensorrt_converter (torch .ops .aten .relu .default )
156
+ def aten_ops_relu (
157
157
network : TRTNetwork ,
158
158
target : Target ,
159
159
args : Tuple [Argument , ...],
160
160
kwargs : Dict [str , Argument ],
161
161
name : str ,
162
162
) -> Union [TRTTensor , Sequence [TRTTensor ]]:
163
- return impl .activation .gelu (
163
+ return impl .activation .relu (
164
164
network ,
165
165
target ,
166
166
SourceIR .ATEN ,
@@ -169,61 +169,171 @@ def aten_ops_gelu(
169
169
)
170
170
171
171
172
- @dynamo_tensorrt_converter (torch .ops .aten .matmul ) # type: ignore[misc]
173
- @dynamo_tensorrt_converter (torch .ops .aten .mm .default ) # type: ignore[misc]
174
- @dynamo_tensorrt_converter (torch .ops .aten .mv .default ) # type: ignore[misc]
175
- def aten_ops_matmul (
172
+ @dynamo_tensorrt_converter (torch .ops .aten .sigmoid .default )
173
+ def aten_ops_sigmoid (
176
174
network : TRTNetwork ,
177
175
target : Target ,
178
176
args : Tuple [Argument , ...],
179
177
kwargs : Dict [str , Argument ],
180
178
name : str ,
181
179
) -> Union [TRTTensor , Sequence [TRTTensor ]]:
182
- return impl .matmul . matrix_multiply (
180
+ return impl .activation . sigmoid (
183
181
network ,
184
182
target ,
185
183
SourceIR .ATEN ,
186
184
name ,
187
185
args [0 ],
188
- args [1 ],
189
186
)
190
187
191
188
192
- @dynamo_tensorrt_converter (torch .ops .aten .layer_norm .default ) # type: ignore[misc]
193
- def aten_ops_layernorm (
189
+ @dynamo_tensorrt_converter (torch .ops .aten .tanh .default )
190
+ def aten_ops_tanh (
194
191
network : TRTNetwork ,
195
192
target : Target ,
196
193
args : Tuple [Argument , ...],
197
194
kwargs : Dict [str , Argument ],
198
195
name : str ,
199
196
) -> Union [TRTTensor , Sequence [TRTTensor ]]:
200
- return impl .normalization .layer_norm (
197
+ return impl .activation .tanh (
198
+ network ,
199
+ target ,
200
+ SourceIR .ATEN ,
201
+ name ,
202
+ args [0 ],
203
+ )
204
+
205
+
206
+ @dynamo_tensorrt_converter (torch .ops .aten .leaky_relu .default )
207
+ def aten_ops_leaky_relu (
208
+ network : TRTNetwork ,
209
+ target : Target ,
210
+ args : Tuple [Argument , ...],
211
+ kwargs : Dict [str , Argument ],
212
+ name : str ,
213
+ ) -> Union [TRTTensor , Sequence [TRTTensor ]]:
214
+ return impl .activation .leaky_relu (
215
+ network ,
216
+ target ,
217
+ SourceIR .ATEN ,
218
+ name ,
219
+ args [0 ],
220
+ args_bounds_check (args , 1 , 0.01 ),
221
+ )
222
+
223
+
224
+ @dynamo_tensorrt_converter (torch .ops .aten .elu .default )
225
+ def aten_ops_elu (
226
+ network : TRTNetwork ,
227
+ target : Target ,
228
+ args : Tuple [Argument , ...],
229
+ kwargs : Dict [str , Argument ],
230
+ name : str ,
231
+ ) -> Union [TRTTensor , Sequence [TRTTensor ]]:
232
+ return impl .activation .elu (
233
+ network ,
234
+ target ,
235
+ SourceIR .ATEN ,
236
+ name ,
237
+ args [0 ],
238
+ alpha = args_bounds_check (args , 1 , 1.0 ),
239
+ beta = args_bounds_check (args , 2 , None ),
240
+ )
241
+
242
+
243
+ @dynamo_tensorrt_converter (torch .ops .aten .softplus .default )
244
+ def aten_ops_softplus (
245
+ network : TRTNetwork ,
246
+ target : Target ,
247
+ args : Tuple [Argument , ...],
248
+ kwargs : Dict [str , Argument ],
249
+ name : str ,
250
+ ) -> Union [TRTTensor , Sequence [TRTTensor ]]:
251
+ return impl .activation .softplus (
252
+ network ,
253
+ target ,
254
+ SourceIR .ATEN ,
255
+ name ,
256
+ args [0 ],
257
+ beta = args_bounds_check (args , 1 , 1 ),
258
+ )
259
+
260
+
261
+ @dynamo_tensorrt_converter (torch .ops .aten .clip .default )
262
+ def aten_ops_clip (
263
+ network : TRTNetwork ,
264
+ target : Target ,
265
+ args : Tuple [Argument , ...],
266
+ kwargs : Dict [str , Argument ],
267
+ name : str ,
268
+ ) -> Union [TRTTensor , Sequence [TRTTensor ]]:
269
+ return impl .activation .clip (
270
+ network ,
271
+ target ,
272
+ SourceIR .ATEN ,
273
+ name ,
274
+ args [0 ],
275
+ alpha = args_bounds_check (args , 1 ),
276
+ beta = args_bounds_check (args , 2 ),
277
+ )
278
+
279
+
280
+ @dynamo_tensorrt_converter (torch .ops .aten .hardsigmoid .default )
281
+ def aten_ops_hard_sigmoid (
282
+ network : TRTNetwork ,
283
+ target : Target ,
284
+ args : Tuple [Argument , ...],
285
+ kwargs : Dict [str , Argument ],
286
+ name : str ,
287
+ ) -> Union [TRTTensor , Sequence [TRTTensor ]]:
288
+ return impl .activation .hard_sigmoid (
289
+ network ,
290
+ target ,
291
+ SourceIR .ATEN ,
292
+ name ,
293
+ args [0 ],
294
+ alpha = args_bounds_check (args , 1 , 1 / 6 ),
295
+ beta = args_bounds_check (args , 2 , 1 / 2 ),
296
+ )
297
+
298
+
299
+ @dynamo_tensorrt_converter (torch .ops .aten .matmul ) # type: ignore[misc]
300
+ @dynamo_tensorrt_converter (torch .ops .aten .mm .default ) # type: ignore[misc]
301
+ @dynamo_tensorrt_converter (torch .ops .aten .mv .default ) # type: ignore[misc]
302
+ def aten_ops_matmul (
303
+ network : TRTNetwork ,
304
+ target : Target ,
305
+ args : Tuple [Argument , ...],
306
+ kwargs : Dict [str , Argument ],
307
+ name : str ,
308
+ ) -> Union [TRTTensor , Sequence [TRTTensor ]]:
309
+ return impl .matmul .matrix_multiply (
201
310
network ,
202
311
target ,
203
312
SourceIR .ATEN ,
204
313
name ,
205
314
args [0 ],
206
315
args [1 ],
207
- args [2 ],
208
- args [3 ],
209
- args [4 ],
210
316
)
211
317
212
318
213
- @dynamo_tensorrt_converter (torch .ops .aten .relu .default ) # type: ignore[misc]
214
- def aten_ops_relu (
319
+ @dynamo_tensorrt_converter (torch .ops .aten .layer_norm .default ) # type: ignore[misc]
320
+ def aten_ops_layernorm (
215
321
network : TRTNetwork ,
216
322
target : Target ,
217
323
args : Tuple [Argument , ...],
218
324
kwargs : Dict [str , Argument ],
219
325
name : str ,
220
326
) -> Union [TRTTensor , Sequence [TRTTensor ]]:
221
- return impl .activation . relu (
327
+ return impl .normalization . layer_norm (
222
328
network ,
223
329
target ,
224
330
SourceIR .ATEN ,
225
331
name ,
226
332
args [0 ],
333
+ args [1 ],
334
+ args [2 ],
335
+ args [3 ],
336
+ args [4 ],
227
337
)
228
338
229
339
0 commit comments