callanwu commited on
Commit
1859e4a
1 Parent(s): 7fabe56

Fix ignore warning

Browse files

See https://github.com/THUDM/ChatGLM2-6B/issues/639

Files changed (1) hide show
  1. modeling_chatglm.py +1 -1
modeling_chatglm.py CHANGED
@@ -1107,7 +1107,7 @@ class ChatGLMForConditionalGeneration(ChatGLMPreTrainedModel):
1107
  )
1108
  elif generation_config.max_new_tokens is not None:
1109
  generation_config.max_length = generation_config.max_new_tokens + input_ids_seq_length
1110
- if not has_default_max_length:
1111
  logger.warn(
1112
  f"Both `max_new_tokens` (={generation_config.max_new_tokens}) and `max_length`(="
1113
  f"{generation_config.max_length}) seem to have been set. `max_new_tokens` will take precedence. "
 
1107
  )
1108
  elif generation_config.max_new_tokens is not None:
1109
  generation_config.max_length = generation_config.max_new_tokens + input_ids_seq_length
1110
+ if not has_default_max_length and generation_config.max_length is not None:
1111
  logger.warn(
1112
  f"Both `max_new_tokens` (={generation_config.max_new_tokens}) and `max_length`(="
1113
  f"{generation_config.max_length}) seem to have been set. `max_new_tokens` will take precedence. "