Skip to content

Commit ad7ca84

Browse files
committed
server types: Move 'model' parameter to clarify it is used
The 'model' parameter has been supported since abetlen#931. Its placement in this section was copied from an older version of the file, and hasn't been corrected since. Correcting this will make it clearer what parameters are supported by llama-cpp-python.
1 parent 7c4aead commit ad7ca84

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

llama_cpp/server/types.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -107,6 +107,7 @@
107107

108108

109109
class CreateCompletionRequest(BaseModel):
110+
model: Optional[str] = model_field
110111
prompt: Union[str, List[str]] = Field(
111112
default="", description="The prompt to generate completions for."
112113
)
@@ -138,7 +139,6 @@ class CreateCompletionRequest(BaseModel):
138139
seed: Optional[int] = Field(None)
139140

140141
# ignored or currently unsupported
141-
model: Optional[str] = model_field
142142
n: Optional[int] = 1
143143
best_of: Optional[int] = 1
144144
user: Optional[str] = Field(default=None)
@@ -190,6 +190,7 @@ class ChatCompletionRequestMessage(BaseModel):
190190

191191

192192
class CreateChatCompletionRequest(BaseModel):
193+
model: Optional[str] = model_field
193194
messages: List[llama_cpp.ChatCompletionRequestMessage] = Field(
194195
default=[], description="A list of messages to generate completions for."
195196
)
@@ -237,7 +238,6 @@ class CreateChatCompletionRequest(BaseModel):
237238
)
238239

239240
# ignored or currently unsupported
240-
model: Optional[str] = model_field
241241
n: Optional[int] = 1
242242
user: Optional[str] = Field(None)
243243

0 commit comments

Comments
 (0)