Skip to content

Commit

Permalink
..
Browse files Browse the repository at this point in the history
  • Loading branch information
ShashankMosaicML committed Jan 25, 2025
1 parent 753524e commit d1ba5f2
Showing 1 changed file with 7 additions and 7 deletions.
14 changes: 7 additions & 7 deletions llmfoundry/models/mpt/modeling_mpt.py
Original file line number Diff line number Diff line change
Expand Up @@ -633,13 +633,13 @@ def _resolve_reuse_state_layer_idx(
parent_config['attn_config'][reuse_type] = override_config['attn_config'
][reuse_type]

if override_config != parent_config and not (
'allow_mismatch' in override_config and
override_config['allow_mismatch']
):
raise ValueError(
'For reusing the kv cache of a previous layer, the previous layer should match the block config as the current layer.',
)
# if override_config != parent_config and not (
# 'allow_mismatch' in override_config and
# override_config['allow_mismatch']
# ):
# raise ValueError(
# 'For reusing the kv cache of a previous layer, the previous layer should match the block config as the current layer.',
# )

return reuse_state_layer_idx

Expand Down

0 comments on commit d1ba5f2

Please # to comment.