addressing the error: get_max_length
#7
by
rzgar
- opened
- modeling_llava_qwen2.py +2 -1
modeling_llava_qwen2.py
CHANGED
@@ -2032,7 +2032,8 @@ class Qwen2ForCausalLM(Qwen2PreTrainedModel):
|
|
2032 |
if isinstance(past_key_values, Cache):
|
2033 |
cache_length = past_key_values.get_seq_length()
|
2034 |
past_length = past_key_values.seen_tokens
|
2035 |
-
max_cache_length = past_key_values.get_max_length()
|
|
|
2036 |
else:
|
2037 |
cache_length = past_length = past_key_values[0][0].shape[2]
|
2038 |
max_cache_length = None
|
|
|
2032 |
if isinstance(past_key_values, Cache):
|
2033 |
cache_length = past_key_values.get_seq_length()
|
2034 |
past_length = past_key_values.seen_tokens
|
2035 |
+
#max_cache_length = past_key_values.get_max_length()
|
2036 |
+
max_cache_length = past_key_values.get_seq_length() if hasattr(past_key_values, 'get_seq_length') else None
|
2037 |
else:
|
2038 |
cache_length = past_length = past_key_values[0][0].shape[2]
|
2039 |
max_cache_length = None
|