Skip to content

Commit fd9f294

Browse files
Update llama.py: Added how many input tokens in ValueError exception
1 parent d938e59 commit fd9f294

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

llama_cpp/llama.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -814,7 +814,7 @@ def _create_completion(
814814
llama_cpp.llama_reset_timings(self.ctx)
815815

816816
if len(prompt_tokens) > self._n_ctx:
817-
raise ValueError(f"Requested tokens exceed context window of {self._n_ctx}")
817+
raise ValueError(f"Requested tokens ({len(prompt_tokens)}) exceed context window of {self._n_ctx}")
818818

819819
# Truncate max_tokens if requested tokens would exceed the context window
820820
max_tokens = (

0 commit comments

Comments
 (0)