Skip to content

Commit 2bccb2a

Browse files
Edge-Sevennamkhanh20xx
andauthored
docs: fix typos in some files (#3119)
Fix typos in some files Co-authored-by: khanhkhanhlele <[email protected]>
1 parent f006ab7 commit 2bccb2a

File tree

3 files changed

+3
-3
lines changed

3 files changed

+3
-3
lines changed

src/anomalib/data/datamodules/base/image.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -216,7 +216,7 @@ def _update_subset_augmentations(
216216
if model_resize.antialias != aug_resize.antialias:
217217
msg = f"Conflicting antialiasing setting found between augmentations and model transforms. You are \
218218
using a Resize transform in your input data augmentations. Please be aware that the model also \
219-
applies a Resize transform with a different antialising setting. Using conflicting \
219+
applies a Resize transform with a different antialiasing setting. Using conflicting \
220220
antialiasing settings can lead to unexpected behaviour, so it is recommended to use the same \
221221
antialiasing setting between augmentations and model transforms. Augmentations: \
222222
antialias={aug_resize.antialias}, Model transforms: antialias={model_resize.antialias}"

src/anomalib/engine/accelerator/xpu.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ def parse_devices(devices: str | list | torch.device) -> list:
3535

3636
@staticmethod
3737
def get_parallel_devices(devices: list) -> list[torch.device]:
38-
"""Generates a list of parrallel devices."""
38+
"""Generates a list of parallel devices."""
3939
return [torch.device("xpu", idx) for idx in devices]
4040

4141
@staticmethod

src/anomalib/models/image/dsr/torch_model.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -770,7 +770,7 @@ def __init__(self, base_width: int, out_channels: int = 1) -> None:
770770
self.fin_out = nn.Sequential(nn.Conv2d(base_width, out_channels, kernel_size=3, padding=1))
771771

772772
def forward(self, b1: torch.Tensor, b2: torch.Tensor, b3: torch.Tensor, b4: torch.Tensor) -> torch.Tensor:
773-
"""Decodes latent represnetations into an image.
773+
"""Decodes latent representations into an image.
774774
775775
Args:
776776
b1 (torch.Tensor): First (top level) quantized feature map.

0 commit comments

Comments
 (0)