diff --git a/src/llama_cpp_agent/messages_formatter.py b/src/llama_cpp_agent/messages_formatter.py index 995ef3f..bb43e4a 100644 --- a/src/llama_cpp_agent/messages_formatter.py +++ b/src/llama_cpp_agent/messages_formatter.py @@ -160,7 +160,7 @@ def _format_response( llama_3_prompt_markers = { Roles.system: PromptMarkers("""<|start_header_id|>system<|end_header_id|>\n""", """<|eot_id|>"""), Roles.user: PromptMarkers("""<|start_header_id|>user<|end_header_id|>\n""", """<|eot_id|>"""), - Roles.assistant: PromptMarkers("""<|start_header_id|>assistant<|end_header_id|>\n""", """<|eot_id|>"""), + Roles.assistant: PromptMarkers("""<|start_header_id|>assistant<|end_header_id|>\n\n""", """<|eot_id|>"""), Roles.tool: PromptMarkers("""<|start_header_id|>function_calling_results<|end_header_id|>\n""", """<|eot_id|>"""), } @@ -180,7 +180,7 @@ def _format_response( gemma_2_prompt_markers = { Roles.system: PromptMarkers("""""", """\n\n"""), Roles.user: PromptMarkers("""user\n""", """\n"""), - Roles.assistant: PromptMarkers("""model\n""", """\n"""), + Roles.assistant: PromptMarkers("""model\n\n""", """\n"""), Roles.tool: PromptMarkers("", ""), } code_ds_prompt_markers = { @@ -337,7 +337,7 @@ def _format_response( "", phi_3_chat_prompt_markers, True, - ["<|end|>", "<|end_of_turn|>"], + ["<|end|>", "<|endoftext|>"], use_user_role_for_function_call_result=True, )