Skip to content

Commit

Permalink
Change max length adaption messages to debug (#465)
Browse files Browse the repository at this point in the history
  • Loading branch information
katalinic-gc authored Jul 24, 2023
1 parent e101001 commit 0de94b7
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions optimum/graphcore/generation/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1739,7 +1739,7 @@ def _on_device_greedy_search(
raise ValueError("Context length (input_ids.shape[-1]) > 1 is not supported yet.")

if (max_length - context_length) % self.on_device_generation_steps != 0:
logger.info(
logger.debug(
"`max_length - context_length` does not evenly divide `on_device_generation_steps` "
f"({max_length - context_length} vs {self.on_device_generation_steps}). Generation will be done "
f"{self.on_device_generation_steps} tokens at a time and stop short of `max_length` so as not to exceed it."
Expand Down Expand Up @@ -1841,7 +1841,7 @@ def _on_device_beam_search(
raise ValueError("Context length (input_ids.shape[-1]) > 1 is not supported yet.")

if (max_length - context_length) % self.on_device_generation_steps != 0:
logger.info(
logger.debug(
"`max_length - context_length` does not evenly divide `on_device_generation_steps` "
f"({max_length - context_length} vs {self.on_device_generation_steps}). Generation will be done "
f"{self.on_device_generation_steps} tokens at a time and stop short of `max_length` so as not to exceed it."
Expand Down

0 comments on commit 0de94b7

Please sign in to comment.