|
1 | 1 | # This file is a part of the `allegro` package. Please see LICENSE and README at the root for information on using it.
|
2 |
| -from math import pi |
3 |
| -import torch |
4 |
| -from e3nn.util.jit import compile_mode |
5 |
| -from nequip.utils.global_dtype import _GLOBAL_DTYPE |
6 |
| - |
7 |
| - |
8 |
| -@compile_mode("script") |
9 |
| -class PerClassSpline(torch.nn.Module): |
10 |
| - """Module implementing the spline required for a two-body scalar embedding. |
11 |
| -
|
12 |
| - Per-class splines with finite support for [0, 1], and will go to zero smoothly at 1. |
13 |
| -
|
14 |
| - Args: |
15 |
| - num_classes (int) : number of classes or categories (for ``index_select`` operation) |
16 |
| - num_channels (int) : number of output channels |
17 |
| - num_splines (int) : number of spline basis functions |
18 |
| - spline_span (int) : number of spline basis functions that overlap on spline grid points |
19 |
| - """ |
20 |
| - |
21 |
| - def __init__( |
22 |
| - self, |
23 |
| - num_classes: int, |
24 |
| - num_channels: int, |
25 |
| - num_splines: int, |
26 |
| - spline_span: int, |
27 |
| - dtype: torch.dtype = _GLOBAL_DTYPE, |
28 |
| - ): |
29 |
| - super().__init__() |
30 |
| - |
31 |
| - # === sanity check === |
32 |
| - assert 0 <= spline_span <= num_splines |
33 |
| - assert num_splines > 0 |
34 |
| - |
35 |
| - # === save inputs parameters === |
36 |
| - self.num_classes = num_classes |
37 |
| - self.num_channels = num_channels |
38 |
| - self.num_splines = num_splines |
39 |
| - self.spline_span = spline_span |
40 |
| - self.dtype = dtype |
41 |
| - |
42 |
| - # === spline grid parameters === |
43 |
| - lower = ( |
44 |
| - torch.arange( |
45 |
| - -self.spline_span, self.num_splines - spline_span, dtype=self.dtype |
46 |
| - ) |
47 |
| - / self.num_splines |
48 |
| - ) |
49 |
| - diff = (self.spline_span + 1) / self.num_splines |
50 |
| - |
51 |
| - self.register_buffer("lower", lower) |
52 |
| - self.register_buffer("upper", lower + diff) |
53 |
| - self._const = 2 * pi / diff |
54 |
| - |
55 |
| - # === use torch.nn.Embedding for spline weights === |
56 |
| - self.class_embed = torch.nn.Embedding( |
57 |
| - num_embeddings=self.num_classes, |
58 |
| - embedding_dim=self.num_channels * self.num_splines, |
59 |
| - dtype=dtype, |
60 |
| - ) |
61 |
| - |
62 |
| - def extra_repr(self) -> str: |
63 |
| - msg = f"num classes : {self.num_classes}\n" |
64 |
| - msg += f"num channels: {self.num_channels}\n" |
65 |
| - msg += f"num splines : {self.num_splines}\n" |
66 |
| - msg += f"spline span : {self.spline_span}" |
67 |
| - return msg |
68 |
| - |
69 |
| - def forward(self, x: torch.Tensor, classes: torch.Tensor) -> torch.Tensor: |
70 |
| - """ |
71 |
| - Args: |
72 |
| - x (torch.Tensor) : input tensor with shape (z, 1) |
73 |
| - classes (torch.Tensor): class tensor with shape (z,) whose values are integer indices from 0 to num_classes - 1 |
74 |
| - """ |
75 |
| - # index out weights based on classes: -> (z, num_channels, num_splines) |
76 |
| - spline_weights = self.class_embed(classes).view( |
77 |
| - classes.size(0), self.num_channels, self.num_splines |
78 |
| - ) |
79 |
| - spline_basis = self._get_basis(x) |
80 |
| - # (z, num_channels, num_splines), (z, num_splines) -> (z, num_channels) |
81 |
| - return torch.bmm(spline_weights, spline_basis.unsqueeze(-1)).squeeze(-1) |
82 |
| - |
83 |
| - def _get_basis(self, x: torch.Tensor) -> torch.Tensor: |
84 |
| - # construct spline basis |
85 |
| - # x: (z, 1) -> spline_basis: (z, num_splines) |
86 |
| - normalized_x = self._const * ( |
87 |
| - torch.clamp(x, min=self.lower, max=self.upper) - self.lower |
88 |
| - ) |
89 |
| - return 0.25 * (1 - torch.cos(normalized_x)).square() |
| 2 | +from math import pi |
| 3 | +import torch |
| 4 | +from e3nn.util.jit import compile_mode |
| 5 | +from nequip.utils.global_dtype import _GLOBAL_DTYPE |
| 6 | + |
| 7 | + |
| 8 | +@compile_mode("script") |
| 9 | +class PerClassSpline(torch.nn.Module): |
| 10 | + """Module implementing the spline required for a two-body scalar embedding. |
| 11 | +
|
| 12 | + Per-class splines with finite support for [0, 1], and will go to zero smoothly at 1. |
| 13 | +
|
| 14 | + Args: |
| 15 | + num_classes (int) : number of classes or categories (for ``index_select`` operation) |
| 16 | + num_channels (int) : number of output channels |
| 17 | + num_splines (int) : number of spline basis functions |
| 18 | + spline_span (int) : number of spline basis functions that overlap on spline grid points |
| 19 | + """ |
| 20 | + |
| 21 | + def __init__( |
| 22 | + self, |
| 23 | + num_classes: int, |
| 24 | + num_channels: int, |
| 25 | + num_splines: int, |
| 26 | + spline_span: int, |
| 27 | + dtype: torch.dtype = _GLOBAL_DTYPE, |
| 28 | + ): |
| 29 | + super().__init__() |
| 30 | + |
| 31 | + # === sanity check === |
| 32 | + assert 0 <= spline_span <= num_splines |
| 33 | + assert num_splines > 0 |
| 34 | + |
| 35 | + # === save inputs parameters === |
| 36 | + self.num_classes = num_classes |
| 37 | + self.num_channels = num_channels |
| 38 | + self.num_splines = num_splines |
| 39 | + self.spline_span = spline_span |
| 40 | + self.dtype = dtype |
| 41 | + |
| 42 | + # === spline grid parameters === |
| 43 | + lower = ( |
| 44 | + torch.arange( |
| 45 | + -self.spline_span, self.num_splines - spline_span, dtype=self.dtype |
| 46 | + ) |
| 47 | + / self.num_splines |
| 48 | + ) |
| 49 | + diff = (self.spline_span + 1) / self.num_splines |
| 50 | + |
| 51 | + self.register_buffer("lower", lower) |
| 52 | + self.register_buffer("upper", lower + diff) |
| 53 | + self._const = 2 * pi / diff |
| 54 | + |
| 55 | + # === use torch.nn.Embedding for spline weights === |
| 56 | + self.class_embed = torch.nn.Embedding( |
| 57 | + num_embeddings=self.num_classes, |
| 58 | + embedding_dim=self.num_channels * self.num_splines, |
| 59 | + dtype=dtype, |
| 60 | + ) |
| 61 | + |
| 62 | + def extra_repr(self) -> str: |
| 63 | + msg = f"num classes : {self.num_classes}\n" |
| 64 | + msg += f"num channels: {self.num_channels}\n" |
| 65 | + msg += f"num splines : {self.num_splines}\n" |
| 66 | + msg += f"spline span : {self.spline_span}" |
| 67 | + return msg |
| 68 | + |
| 69 | + def forward(self, x: torch.Tensor, classes: torch.Tensor) -> torch.Tensor: |
| 70 | + """ |
| 71 | + Args: |
| 72 | + x (torch.Tensor) : input tensor with shape (z, 1) |
| 73 | + classes (torch.Tensor): class tensor with shape (z,) whose values are integer indices from 0 to num_classes - 1 |
| 74 | + """ |
| 75 | + # index out weights based on classes: -> (z, num_channels, num_splines) |
| 76 | + spline_weights = self.class_embed(classes).view( |
| 77 | + classes.size(0), self.num_channels, self.num_splines |
| 78 | + ) |
| 79 | + spline_basis = self._get_basis(x) |
| 80 | + # (z, num_channels, num_splines), (z, num_splines) -> (z, num_channels) |
| 81 | + return torch.bmm(spline_weights, spline_basis.unsqueeze(-1)).squeeze(-1) |
| 82 | + |
| 83 | + def _get_basis(self, x: torch.Tensor) -> torch.Tensor: |
| 84 | + # construct spline basis |
| 85 | + # x: (z, 1) -> spline_basis: (z, num_splines) |
| 86 | + normalized_x = self._const * ( |
| 87 | + torch.clamp(x, min=self.lower, max=self.upper) - self.lower |
| 88 | + ) |
| 89 | + return 0.25 * (1 - torch.cos(normalized_x)).square() |
0 commit comments