Fix ignore warning
#21
by
callanwu
- opened
- modeling_chatglm.py +2 -2
modeling_chatglm.py
CHANGED
@@ -1115,9 +1115,9 @@ class ChatGLMForConditionalGeneration(ChatGLMPreTrainedModel):
|
|
1115 |
" recommend using `max_new_tokens` to control the maximum length of the generation.",
|
1116 |
UserWarning,
|
1117 |
)
|
1118 |
-
elif generation_config.max_new_tokens is not None:
|
1119 |
generation_config.max_length = generation_config.max_new_tokens + input_ids_seq_length
|
1120 |
-
if not has_default_max_length:
|
1121 |
logger.warn(
|
1122 |
f"Both `max_new_tokens` (={generation_config.max_new_tokens}) and `max_length`(="
|
1123 |
f"{generation_config.max_length}) seem to have been set. `max_new_tokens` will take precedence. "
|
|
|
1115 |
" recommend using `max_new_tokens` to control the maximum length of the generation.",
|
1116 |
UserWarning,
|
1117 |
)
|
1118 |
+
elif generation_config.max_new_tokens is not None :
|
1119 |
generation_config.max_length = generation_config.max_new_tokens + input_ids_seq_length
|
1120 |
+
if not has_default_max_length and generation_config.max_length is not None:
|
1121 |
logger.warn(
|
1122 |
f"Both `max_new_tokens` (={generation_config.max_new_tokens}) and `max_length`(="
|
1123 |
f"{generation_config.max_length}) seem to have been set. `max_new_tokens` will take precedence. "
|