Skip to content
New issue

Have a question about this project? # for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “#”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? # to your account

added indexed categorical layer evaluation #313

Merged
merged 2 commits into from
Nov 8, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 26 additions & 3 deletions cirkit/backend/torch/layers/input.py
Original file line number Diff line number Diff line change
Expand Up @@ -296,6 +296,9 @@
raise ValueError(f"The number of folds and shape of 'probs' must match the layer's")
self.probs = probs
self.logits = logits
self.idx_mode = (
len(torch.unique(self.scope_idx)) > 4096 or self.num_categories > 256
)

def _valid_parameter_shape(self, p: TorchParameter) -> bool:
if p.num_folds != self.num_folds:
Expand Down Expand Up @@ -323,10 +326,30 @@
def log_unnormalized_likelihood(self, x: Tensor) -> Tensor:
if x.is_floating_point():
x = x.long() # The input to Categorical should be discrete
x = F.one_hot(x, self.num_categories) # (F, C, B, 1, num_categories)
x = x.squeeze(dim=3) # (F, C, B, num_categories)
logits = torch.log(self.probs()) if self.logits is None else self.logits()
x = torch.einsum("fcbi,fkci->fbk", x.to(logits.dtype), logits)
if self.idx_mode:
if self.num_channels == 1:
x = (

Check warning on line 332 in cirkit/backend/torch/layers/input.py

View check run for this annotation

Codecov / codecov/patch

cirkit/backend/torch/layers/input.py#L332

Added line #L332 was not covered by tests
logits[:, :, 0, :]
.transpose(1, 2)[range(self.num_folds), x[:, 0, :, 0].t()]
.transpose(0, 1)
)
else:
x = x[..., 0].permute(2, 0, 1)
x = (

Check warning on line 339 in cirkit/backend/torch/layers/input.py

View check run for this annotation

Codecov / codecov/patch

cirkit/backend/torch/layers/input.py#L338-L339

Added lines #L338 - L339 were not covered by tests
logits[
torch.arange(self.num_folds).unsqueeze(1),
:,
torch.arange(self.num_channels).unsqueeze(0),
x,
]
.sum(2)
.transpose(0, 1)
)
else:
x = F.one_hot(x, self.num_categories) # (F, C, B, 1, num_categories)
x = x.squeeze(dim=3) # (F, C, B, num_categories)
x = torch.einsum("fcbi,fkci->fbk", x.to(logits.dtype), logits)
return x

def log_partition_function(self) -> Tensor:
Expand Down
Loading