File tree Expand file tree Collapse file tree 3 files changed +18
-2
lines changed
langchain_oci/chat_models Expand file tree Collapse file tree 3 files changed +18
-2
lines changed Original file line number Diff line number Diff line change @@ -30,7 +30,14 @@ This repository includes two main integration categories:
3030``` python
3131from langchain_oci import ChatOCIGenAI
3232
33- llm = ChatOCIGenAI()
33+ llm = ChatOCIGenAI(
34+ model_id = " MY_MODEL_ID" ,
35+ service_endpoint = " MY_SERVICE_ENDPOINT" ,
36+ compartment_id = " MY_COMPARTMENT_ID" ,
37+ model_kwargs = {" max_tokens" : 1024 }, # Use max_completion_tokens instead of max_tokens for OpenAI models
38+ auth_profile = " MY_AUTH_PROFILE" ,
39+ is_stream = True ,
40+ auth_type = " SECURITY_TOKEN"
3441llm.invoke(" Sing a ballad of LangChain." )
3542```
3643
Original file line number Diff line number Diff line change @@ -1146,6 +1146,15 @@ def _prepare_request(
11461146 if stop is not None :
11471147 _model_kwargs [self ._provider .stop_sequence_key ] = stop
11481148
1149+ # Warn if using max_tokens with OpenAI models
1150+ if self .model_id and self .model_id .startswith ("openai." ) and "max_tokens" in _model_kwargs :
1151+ import warnings
1152+ warnings .warn (
1153+ f"OpenAI models require 'max_completion_tokens' instead of 'max_tokens'." ,
1154+ UserWarning ,
1155+ stacklevel = 2
1156+ )
1157+
11491158 chat_params = {** _model_kwargs , ** kwargs , ** oci_params }
11501159
11511160 if not self .model_id :
Original file line number Diff line number Diff line change @@ -14,7 +14,7 @@ license = "UPL"
1414python = " >=3.9,<4.0"
1515langchain-core = " >=0.3.20,<1.0.0"
1616langchain = " >=0.3.20,<1.0.0"
17- oci = " >=2.144 .0"
17+ oci = " >=2.161 .0"
1818pydantic = " >=2,<3"
1919aiohttp = " >=3.12.14"
2020
You can’t perform that action at this time.
0 commit comments