Skip to content

Commit ff9faaa

Browse files
authored
Merge pull request #385 from nb-programmer/main
Update llama.py: Added how many input tokens in ValueError exception
2 parents 37d5192 + fd9f294 commit ff9faaa

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

llama_cpp/llama.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -814,7 +814,7 @@ def _create_completion(
814814
llama_cpp.llama_reset_timings(self.ctx)
815815

816816
if len(prompt_tokens) > self._n_ctx:
817-
raise ValueError(f"Requested tokens exceed context window of {self._n_ctx}")
817+
raise ValueError(f"Requested tokens ({len(prompt_tokens)}) exceed context window of {self._n_ctx}")
818818

819819
# Truncate max_tokens if requested tokens would exceed the context window
820820
max_tokens = (

0 commit comments

Comments
 (0)