Skip to content

Commit 391b199

Browse files
committed
Expose kernels in kernels/__init__.py
1 parent 56971d6 commit 391b199

File tree

1 file changed

+79
-0
lines changed

1 file changed

+79
-0
lines changed

src/ntops/kernels/__init__.py

Lines changed: 79 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,79 @@
1+
from ntops.kernels import (
2+
abs,
3+
add,
4+
addmm,
5+
bitwise_and,
6+
bitwise_not,
7+
bitwise_or,
8+
bmm,
9+
clamp,
10+
cos,
11+
div,
12+
dropout,
13+
eq,
14+
exp,
15+
ge,
16+
gelu,
17+
gt,
18+
isinf,
19+
isnan,
20+
layer_norm,
21+
le,
22+
lt,
23+
mm,
24+
mul,
25+
ne,
26+
neg,
27+
pow,
28+
relu,
29+
rms_norm,
30+
rotary_position_embedding,
31+
rsqrt,
32+
scaled_dot_product_attention,
33+
sigmoid,
34+
silu,
35+
sin,
36+
softmax,
37+
sub,
38+
tanh,
39+
)
40+
41+
__all__ = [
42+
"abs",
43+
"add",
44+
"addmm",
45+
"bitwise_and",
46+
"bitwise_not",
47+
"bitwise_or",
48+
"bmm",
49+
"clamp",
50+
"cos",
51+
"div",
52+
"dropout",
53+
"eq",
54+
"exp",
55+
"ge",
56+
"gelu",
57+
"gt",
58+
"isinf",
59+
"isnan",
60+
"layer_norm",
61+
"le",
62+
"lt",
63+
"mm",
64+
"mul",
65+
"ne",
66+
"neg",
67+
"pow",
68+
"relu",
69+
"rms_norm",
70+
"rotary_position_embedding",
71+
"rsqrt",
72+
"scaled_dot_product_attention",
73+
"sigmoid",
74+
"silu",
75+
"sin",
76+
"softmax",
77+
"sub",
78+
"tanh",
79+
]

0 commit comments

Comments
 (0)