|
| 1 | +from ..tensor.operators import TensorBackend |
| 2 | +from ..tensor.functions import rand, zeros |
| 3 | +from .module import Module, Parameter |
| 4 | +from ..backends import fast_conv, fast_ops |
| 5 | + |
| 6 | +BACKEND = TensorBackend(fast_ops.FastOps) |
| 7 | + |
| 8 | + |
| 9 | +class Linear(Module): |
| 10 | + def __init__(self, in_size, out_size): |
| 11 | + super().__init__() |
| 12 | + |
| 13 | + # He initialization |
| 14 | + scale = (2.0 / in_size) ** 0.5 |
| 15 | + self.weights = Parameter(scale * rand((in_size, out_size), backend=BACKEND)) |
| 16 | + self.bias = Parameter(zeros((out_size,), backend=BACKEND)) |
| 17 | + self.out_size = out_size |
| 18 | + |
| 19 | + def forward(self, x): |
| 20 | + batch, in_size = x.shape |
| 21 | + return ( |
| 22 | + x.view(batch, in_size) @ self.weights.value.view(in_size, self.out_size) |
| 23 | + ).view(batch, self.out_size) + self.bias.value |
| 24 | + |
| 25 | + |
| 26 | +class Conv1d(Module): |
| 27 | + def __init__(self, in_channels, out_channels, kernel_width): |
| 28 | + super().__init__() |
| 29 | + |
| 30 | + # He initialization |
| 31 | + fan_in = in_channels * kernel_width |
| 32 | + scale = (2.0 / fan_in) ** 0.5 |
| 33 | + self.weights = Parameter( |
| 34 | + scale * rand((out_channels, in_channels, kernel_width), backend=BACKEND) |
| 35 | + ) |
| 36 | + self.bias = Parameter(zeros((1, out_channels, 1), backend=BACKEND)) |
| 37 | + |
| 38 | + def forward(self, input): |
| 39 | + out = fast_conv.conv1d(input, self.weights.value) + self.bias.value |
| 40 | + return out |
| 41 | + |
| 42 | + |
| 43 | +class Conv2d(Module): |
| 44 | + def __init__(self, in_channels, out_channels, kh, kw): |
| 45 | + super().__init__() |
| 46 | + |
| 47 | + # He initialization |
| 48 | + fan_in = in_channels * kh * kw |
| 49 | + scale = (2.0 / fan_in) ** 0.5 |
| 50 | + self.weights = Parameter( |
| 51 | + scale * rand((out_channels, in_channels, kh, kw), backend=BACKEND) |
| 52 | + ) |
| 53 | + self.bias = Parameter(zeros((out_channels, 1, 1), backend=BACKEND)) |
| 54 | + |
| 55 | + def forward(self, input): |
| 56 | + out = fast_conv.conv2d(input, self.weights.value) + self.bias.value |
| 57 | + return out |
0 commit comments