sylwia-kuros commited on
Commit
5db407b
·
verified ·
1 Parent(s): 13d60ce

Update modeling_minicpm.py for compatibility with transformers 4.49

Browse files
Files changed (1) hide show
  1. modeling_llava_qwen2.py +1 -1
modeling_llava_qwen2.py CHANGED
@@ -2032,7 +2032,7 @@ class Qwen2ForCausalLM(Qwen2PreTrainedModel):
2032
  if isinstance(past_key_values, Cache):
2033
  cache_length = past_key_values.get_seq_length()
2034
  past_length = past_key_values.seen_tokens
2035
- max_cache_length = past_key_values.get_max_length()
2036
  else:
2037
  cache_length = past_length = past_key_values[0][0].shape[2]
2038
  max_cache_length = None
 
2032
  if isinstance(past_key_values, Cache):
2033
  cache_length = past_key_values.get_seq_length()
2034
  past_length = past_key_values.seen_tokens
2035
+ max_cache_length = past_key_values.get_max_length() if hasattr(past_key_values, "get_max_length") else past_key_values.get_max_cache_shape()
2036
  else:
2037
  cache_length = past_length = past_key_values[0][0].shape[2]
2038
  max_cache_length = None