Skip to content

Commit 1b19bcb

Browse files
committed
Fix grad params
1 parent 23ebbc8 commit 1b19bcb

File tree

1 file changed

+3
-2
lines changed

1 file changed

+3
-2
lines changed

tests/attr/layer/test_layer_gradient_x_activation.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -113,8 +113,9 @@ def test_gradient_activation_embedding_no_grad(self) -> None:
113113
input1 = torch.tensor([2, 5, 0, 1])
114114
input2 = torch.tensor([3, 0, 0, 2])
115115
model = BasicEmbeddingModel()
116-
# for param in model.parameters():
117-
# param.requires_grad = False
116+
for param in model.parameters():
117+
param.requires_grad = False
118+
118119
with torch.no_grad():
119120
layer_act = LayerGradientXActivation(model, model.embedding1)
120121
self.assertEqual(

0 commit comments

Comments
 (0)