|
5 | 5 | import ninetoothed |
6 | 6 | import torch |
7 | 7 |
|
8 | | -import ntops.kernels.abs |
9 | | -import ntops.kernels.add |
10 | | -import ntops.kernels.addmm |
11 | | -import ntops.kernels.bitwise_and |
12 | | -import ntops.kernels.bitwise_not |
13 | | -import ntops.kernels.bitwise_or |
14 | | -import ntops.kernels.bmm |
15 | | -import ntops.kernels.clamp |
16 | | -import ntops.kernels.cos |
17 | | -import ntops.kernels.div |
18 | | -import ntops.kernels.dropout |
19 | | -import ntops.kernels.eq |
20 | | -import ntops.kernels.exp |
21 | | -import ntops.kernels.ge |
22 | | -import ntops.kernels.gelu |
23 | | -import ntops.kernels.gt |
24 | | -import ntops.kernels.isinf |
25 | | -import ntops.kernels.isnan |
26 | | -import ntops.kernels.layer_norm |
27 | | -import ntops.kernels.le |
28 | | -import ntops.kernels.lt |
29 | | -import ntops.kernels.mm |
30 | | -import ntops.kernels.mul |
31 | | -import ntops.kernels.ne |
32 | | -import ntops.kernels.neg |
33 | | -import ntops.kernels.pow |
34 | | -import ntops.kernels.relu |
35 | | -import ntops.kernels.rms_norm |
36 | | -import ntops.kernels.rotary_position_embedding |
37 | | -import ntops.kernels.rsqrt |
38 | | -import ntops.kernels.scaled_dot_product_attention |
39 | | -import ntops.kernels.sigmoid |
40 | | -import ntops.kernels.silu |
41 | | -import ntops.kernels.sin |
42 | | -import ntops.kernels.softmax |
43 | | -import ntops.kernels.sub |
44 | | -import ntops.kernels.tanh |
| 8 | +import ntops |
45 | 9 | from ntops.kernels.scaled_dot_product_attention import CausalVariant |
46 | 10 |
|
47 | 11 |
|
|
0 commit comments