Skip to content

Commit

Permalink
try fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
younesbelkada committed May 13, 2022
1 parent 3f936df commit a8ad75e
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions tests/models/opt/test_modeling_opt.py
Original file line number Diff line number Diff line change
Expand Up @@ -281,7 +281,7 @@ def test_inference_no_head(self):
attention_mask = input_ids.ne(model.config.pad_token_id)
with torch.no_grad():
output = model(input_ids=input_ids, attention_mask=attention_mask).last_hidden_state
expected_shape = torch.Size((1, 11, 1024))
expected_shape = torch.Size((1, 11, 512))
self.assertEqual(output.shape, expected_shape)
expected_slice = torch.tensor(
[[0.7144, 0.8143, -1.2813], [0.7144, 0.8143, -1.2813], [-0.0467, 2.5911, -2.1845]], device=torch_device
Expand Down Expand Up @@ -348,8 +348,8 @@ def test_generation(self):
GEN_OUTPUT = []

tokenizer = GPT2Tokenizer.from_pretrained("patrickvonplaten/opt_gpt2_tokenizer")
for model in self.all_model_path:
model = OPTForCausalLM.from_pretrained(self.path_model)
for path_model in self.all_model_path:
model = OPTForCausalLM.from_pretrained(path_model)
model = model.eval()
model.config.eos_token_id = tokenizer.eos_token_id

Expand Down

0 comments on commit a8ad75e

Please # to comment.