Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 26 additions & 0 deletions libs/oci/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,32 @@ structured_llm = llm.with_structured_output(Joke)
structured_llm.invoke("Tell me a joke about programming")
```

### 5. Use OpenAI Responses API
`ChatOCIOpenAI` supports OpenAI Responses API.

```python
from langchain_oci import (
ChatOCIOpenAI,
OCISessionAuth,
)
client = ChatOCIOpenAI(
auth=OCISessionAuth(profile_name="MY_PROFILE_NAME"),
compartment_id="MY_COMPARTMENT_ID",
region="MY_REGION",
override_url="MY_OVERRIDE_URL",
model="MY_MODEL",
conversation_store_id="MY_CONVERSATION_STORE_ID"
)
messages = [
(
"system",
"You are a helpful translator. Translate the user sentence to French.",
),
("human", "I love programming."),
]
response = client.invoke(messages)
```


## OCI Data Science Model Deployment Examples

Expand Down
Empty file added libs/oci/examples/__init__.py
Empty file.
Empty file.
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
from langchain_core.prompts import ChatPromptTemplate
from pydantic import BaseModel, Field

from langchain_oci import ChatOCIOpenAI, OCISessionAuth

COMPARTMENT_ID = ""
CONVERSATION_STORE_ID = ""
OVERRIDE_URL = ""
REGION = ""
MODEL = ""
PROFILE_NAME = ""


def get_oci_openai_client():
return ChatOCIOpenAI(
auth=OCISessionAuth(profile_name=PROFILE_NAME),
compartment_id=COMPARTMENT_ID,
region=REGION,
override_url=OVERRIDE_URL,
model=MODEL,
conversation_store_id=CONVERSATION_STORE_ID,
)


def do_model_invoke():
client = get_oci_openai_client()
messages = [
(
"system",
"You are a helpful translator. Translate the user sentence to French.",
),
("human", "I love programming."),
]
response = client.invoke(messages)
return response


def do_prompt_chaining():
client = get_oci_openai_client()
prompt = ChatPromptTemplate.from_messages(
[
(
"system",
"You are a helpful assistant that translates {input_language} to {output_language}.",
),
("human", "{input}"),
]
)

chain = prompt | client
response = chain.invoke(
{
"input_language": "English",
"output_language": "German",
"input": "I love programming.",
}
)
return response


def do_function_calling():
class GetWeather(BaseModel):
"""Get the current weather in a given location"""

location: str = Field(
..., description="The city and state, e.g. San Francisco, CA"
)

client = get_oci_openai_client()
llm_with_tools = client.bind_tools([GetWeather])
response = llm_with_tools.invoke(
"what is the weather like in San Francisco",
)
return response


def do_web_search():
client = get_oci_openai_client()
tool = {"type": "web_search_preview"}
llm_with_tools = client.bind_tools([tool])

response = llm_with_tools.invoke("What was a positive news story from today?")
return response


def do_hosted_mcp_calling():
client = get_oci_openai_client()
llm_with_mcp_tools = client.bind_tools(
[
{
"type": "mcp",
"server_label": "deepwiki",
"server_url": "https://mcp.deepwiki.com/mcp",
"require_approval": "never",
}
]
)
response = llm_with_mcp_tools.invoke(
"What transport protocols does the 2025-03-26 version of the MCP "
"spec (modelcontextprotocol/modelcontextprotocol) support?"
)
return response


def main():
do_model_invoke()
do_prompt_chaining()
do_function_calling()
do_web_search()
do_hosted_mcp_calling()


if __name__ == "__main__":
main()
12 changes: 12 additions & 0 deletions libs/oci/langchain_oci/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,13 @@
ChatOCIModelDeploymentVLLM,
)
from langchain_oci.chat_models.oci_generative_ai import ChatOCIGenAI
from langchain_oci.chat_models.oci_generative_ai_responses_api import (
ChatOCIOpenAI,
OCIInstancePrincipleAuth,
OCIResourcePrincipleAuth,
OCISessionAuth,
OCIUserPrincipleAuth,
)
from langchain_oci.embeddings.oci_data_science_model_deployment_endpoint import (
OCIModelDeploymentEndpointEmbeddings,
)
Expand All @@ -32,4 +39,9 @@
"OCIModelDeploymentLLM",
"OCIModelDeploymentTGI",
"OCIModelDeploymentVLLM",
"ChatOCIOpenAI",
"OCISessionAuth",
"OCIResourcePrincipleAuth",
"OCIInstancePrincipleAuth",
"OCIUserPrincipleAuth",
]
Loading