Skip to content

Tensor duck #40

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
67 changes: 67 additions & 0 deletions test/test_torchlike.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
# Test ability to type-check user defined classes which have a "torch-like" interface
# The required interface is defined as the protocol TensorLike in tensor_details.py

from __future__ import annotations
import pytest
import torch
from torch import rand

from torchtyping import TensorType, TensorTypeMixin
from typeguard import typechecked


# New class that supports the tensor-like interface
class MyTensor:
def __init__(self, tensor: torch.Tensor = torch.zeros(2, 3)):
self.tensor = tensor
self.dtype = self.tensor.dtype
self.layout = "something special"
self.names = self.tensor.names
self.shape = self.tensor.shape

def is_floating_point(self) -> bool:
return self.dtype == torch.float32

# Add tensors and take the mean over the last dimension
# Output drops the last dimension
def __add__(self, o: torch.Tensor) -> MyTensor:
res = self.tensor + o
res_reduced = torch.mean(res, -1)
res_myt = MyTensor(res_reduced)
return res_myt


# Create a type corresponding to the new class
class MyTensorType(MyTensor, TensorTypeMixin):
base_cls = MyTensor


# make flake8 happy
x = y = None


def test_my_tensor1():
@typechecked
def func(x: MyTensorType["x", "y"], y: TensorType["x", "y"]) -> MyTensorType["x"]:
return x + y

@typechecked
def bad_func_spec(
x: MyTensorType["x", "y"], y: TensorType["x", "y"]
) -> MyTensorType["x", "y"]:
return x + y

my_t: MyTensor = MyTensor()
func(my_t, rand((2, 3)))

# Incorrect input dimensions for x
with pytest.raises(TypeError):
func(MyTensor(rand(1)), rand((2, 3)))

# Incorrect input dimensions for y
with pytest.raises(TypeError):
func(my_t, rand(1))

# Incorrect spec for return dimensions
with pytest.raises(TypeError):
bad_func_spec(my_t, rand((2, 3)))
2 changes: 1 addition & 1 deletion torchtyping/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
TensorDetail,
)

from .tensor_type import TensorType
from .tensor_type import TensorType, TensorTypeMixin
from .typechecker import patch_typeguard

__version__ = "0.1.4"
67 changes: 54 additions & 13 deletions torchtyping/tensor_details.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,24 +4,65 @@
import collections
import torch

from typing import Optional, Union
from typing import Optional, Union, runtime_checkable, Protocol, Tuple, Any


ellipsis = type(...)


# Define a Protocol (PEP 544) class to represent "tensor-like" objects
# These are objects which support the interface given below
@runtime_checkable
class TensorLike(Protocol):
# We assume the class has a default constructor
def __init__(self):
pass

@property
def dtype(self) -> torch.dtype:
pass

# leave the layout definition open because tensor-like classes are likely
# to extend it with new storage types
@property
def layout(self) -> Any:
pass

@property
def names(self) -> Tuple[str, ...]:
pass

@property
def shape(self) -> Tuple[int, ...]:
pass

def is_floating_point(self) -> bool:
pass


class MyTensor:
def __init__(self):
self.dtype = torch.float32
self.layout = "very special"
self.names = (None, None)
self.shape = (1, 1)

def is_floating_point(self):
return self.dtype == torch.float32


class TensorDetail(metaclass=abc.ABCMeta):
@abc.abstractmethod
def __repr__(self) -> str:
raise NotImplementedError

@abc.abstractmethod
def check(self, tensor: torch.Tensor) -> bool:
def check(self, tensor: TensorLike) -> bool:
raise NotImplementedError

@classmethod
@abc.abstractmethod
def tensor_repr(cls, tensor: torch.Tensor) -> str:
def tensor_repr(cls, tensor: TensorLike) -> str:
raise NotImplementedError


Expand Down Expand Up @@ -69,7 +110,7 @@ def __repr__(self) -> str:
out += ", is_named"
return out

def check(self, tensor: torch.Tensor) -> bool:
def check(self, tensor: TensorLike) -> bool:
self_names = [self_dim.name for self_dim in self.dims]
self_shape = [self_dim.size for self_dim in self.dims]

Expand Down Expand Up @@ -103,7 +144,7 @@ def check(self, tensor: torch.Tensor) -> bool:
return True

@classmethod
def tensor_repr(cls, tensor: torch.Tensor) -> str:
def tensor_repr(cls, tensor: TensorLike) -> str:
dims = []
check_names = any(name is not None for name in tensor.names)
for name, size in zip(tensor.names, tensor.shape):
Expand Down Expand Up @@ -133,11 +174,11 @@ def __init__(self, *, dtype, **kwargs) -> None:
def __repr__(self) -> str:
return repr(self.dtype)

def check(self, tensor: torch.Tensor) -> bool:
def check(self, tensor: TensorLike) -> bool:
return self.dtype == tensor.dtype

@classmethod
def tensor_repr(cls, tensor: torch.Tensor) -> str:
def tensor_repr(cls, tensor: TensorLike) -> str:
return repr(cls(dtype=tensor.dtype))


Expand All @@ -149,23 +190,23 @@ def __init__(self, *, layout, **kwargs) -> None:
def __repr__(self) -> str:
return repr(self.layout)

def check(self, tensor: torch.Tensor) -> bool:
def check(self, tensor: TensorLike) -> bool:
return self.layout == tensor.layout

@classmethod
def tensor_repr(cls, tensor: torch.Tensor) -> str:
def tensor_repr(cls, tensor: TensorLike) -> str:
return repr(cls(layout=tensor.layout))


class _FloatDetail(TensorDetail):
def __repr__(self) -> str:
return "is_float"

def check(self, tensor: torch.Tensor) -> bool:
def check(self, tensor: TensorLike) -> bool:
return tensor.is_floating_point()

@classmethod
def tensor_repr(cls, tensor: torch.Tensor) -> str:
def tensor_repr(cls, tensor: TensorLike) -> str:
return "is_float" if tensor.is_floating_point() else ""


Expand All @@ -177,11 +218,11 @@ class _NamedTensorDetail(TensorDetail):
def __repr__(self) -> str:
raise RuntimeError

def check(self, tensor: torch.Tensor) -> bool:
def check(self, tensor: TensorLike) -> bool:
raise RuntimeError

@classmethod
def tensor_repr(cls, tensor: torch.Tensor) -> str:
def tensor_repr(cls, tensor: TensorLike) -> str:
raise RuntimeError


Expand Down
3 changes: 2 additions & 1 deletion torchtyping/tensor_type.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
LayoutDetail,
ShapeDetail,
TensorDetail,
TensorLike,
)
from .utils import frozendict

Expand All @@ -25,7 +26,7 @@
from typing_extensions import Annotated

# Not Type[Annotated...] as we want to use this in instance checks.
_AnnotatedType = type(Annotated[torch.Tensor, ...])
_AnnotatedType = type(Annotated[TensorLike, ...])


# For use when we have a plain TensorType, without any [].
Expand Down
17 changes: 8 additions & 9 deletions torchtyping/typechecker.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,18 @@
import inspect
import sys
import torch
import typeguard

from .tensor_details import _Dim, _no_name, ShapeDetail
from .tensor_details import _Dim, _no_name, ShapeDetail, TensorLike
from .tensor_type import _AnnotatedType

from typing import Any, Dict, List, Tuple

# get_args is available in python version 3.8
# get_type_hints with include_extras parameter is available in 3.9 PEP 593.
if sys.version_info >= (3, 9):
from typing import get_type_hints, get_args, Type
from typing import get_type_hints, get_args
else:
from typing_extensions import get_type_hints, get_args, Type
from typing_extensions import get_type_hints, get_args


# TYPEGUARD PATCHER
Expand Down Expand Up @@ -60,7 +59,7 @@ def _to_string(name, detail_reprs: List[str]) -> str:


def _check_tensor(
argname: str, value: Any, origin: Type[torch.Tensor], metadata: Dict[str, Any]
argname: str, value: Any, origin: TensorLike, metadata: Dict[str, Any]
):
details = metadata["details"]
if not isinstance(value, origin) or any(
Expand All @@ -69,7 +68,7 @@ def _check_tensor(
expected_string = _to_string(
metadata["cls_name"], [repr(detail) for detail in details]
)
if isinstance(value, torch.Tensor):
if isinstance(value, TensorLike):
given_string = _to_string(
metadata["cls_name"], [detail.tensor_repr(value) for detail in details]
)
Expand Down Expand Up @@ -253,7 +252,7 @@ def _check_memo(memo):
dims.append(_Dim(name=dim.name, size=size))
detail = detail.update(dims=tuple(dims))
_check_tensor(
argname, value, torch.Tensor, {"cls_name": cls_name, "details": [detail]}
argname, value, TensorLike, {"cls_name": cls_name, "details": [detail]}
)


Expand All @@ -274,7 +273,7 @@ class _CallMemo(typeguard._CallMemo):
"name_to_size",
"name_to_shape",
)
value_info: List[Tuple[str, torch.Tensor, str, Dict[str, Any]]]
value_info: List[Tuple[str, TensorLike, str, Dict[str, Any]]]
name_to_size: Dict[str, int]
name_to_shape: Dict[str, Tuple[int]]

Expand All @@ -301,7 +300,7 @@ def check_type(*args, **kwargs):
# Now check if it's annotating a tensor
if is_torchtyping_annotation:
base_cls, *all_metadata = get_args(expected_type)
if not issubclass(base_cls, torch.Tensor):
if not isinstance(base_cls(), TensorLike):
Copy link
Author

@corwinjoy corwinjoy Nov 4, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm not sure about this last change. As mentioned, the protocol class only supports isintance() because it has properties. This means I had to require default construction.
But, I think this test may be unnecessary - after all the other tests I think we know this is a TensorLike element?
I think it might be better to just get rid of this test. @patrick-kidger

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In fact, it does seem I have a strong motivation to remove this. The case where I want to apply it is to check shape signatures on an abstract base class so default construction may not be an option.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I have updated the PR accordingly.

is_torchtyping_annotation = False
# Now check if the annotation's metadata is our metadata
if is_torchtyping_annotation:
Expand Down