Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion .github/workflows/lint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,4 +34,9 @@ jobs:
args: check .

- name: Run Mypy
run: uv run mypy stackone_ai --exclude stackone_ai/server.py
run: |
if [[ "${{ matrix.python-version }}" == "3.9" ]]; then
uv run mypy stackone_ai --exclude stackone_ai/server.py
else
uv run mypy stackone_ai
fi
52 changes: 52 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,58 @@ for tool_call in response.tool_calls:

</details>

<details>
<summary>LangGraph Integration</summary>

StackOne tools convert to LangChain tools, which LangGraph consumes via its prebuilt nodes:

Prerequisites:

```bash
pip install langgraph langchain-openai
```

```python
from langchain_openai import ChatOpenAI
from typing import Annotated
from typing_extensions import TypedDict

from langgraph.graph import StateGraph, START, END
from langgraph.graph.message import add_messages
from langgraph.prebuilt import tools_condition

from stackone_ai import StackOneToolSet
from stackone_ai.integrations.langgraph import to_tool_node, bind_model_with_tools

# Prepare tools
toolset = StackOneToolSet()
tools = toolset.get_tools("hris_*", account_id="your-account-id")
langchain_tools = tools.to_langchain()

class State(TypedDict):
messages: Annotated[list, add_messages]

# Build a small agent loop: LLM -> maybe tools -> back to LLM
graph = StateGraph(State)
graph.add_node("tools", to_tool_node(langchain_tools))

def call_llm(state: dict):
llm = ChatOpenAI(model="gpt-4o-mini")
llm = bind_model_with_tools(llm, langchain_tools)
resp = llm.invoke(state["messages"]) # returns AIMessage with optional tool_calls
return {"messages": state["messages"] + [resp]}
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

With add_messages, returning state["messages"] + [resp] duplicates messages; return only the new message so the reducer can append it.

Prompt for AI agents
Address the following comment on README.md at line 146:

<comment>With add_messages, returning state[&quot;messages&quot;] + [resp] duplicates messages; return only the new message so the reducer can append it.</comment>

<file context>
@@ -110,6 +110,52 @@ for tool_call in response.tool_calls:
+    llm = ChatOpenAI(model=&quot;gpt-4o-mini&quot;)
+    llm = bind_model_with_tools(llm, langchain_tools)
+    resp = llm.invoke(state[&quot;messages&quot;])  # returns AIMessage with optional tool_calls
+    return {&quot;messages&quot;: state[&quot;messages&quot;] + [resp]}
+
+graph.add_node(&quot;llm&quot;, call_llm)
</file context>
Suggested change
return {"messages": state["messages"] + [resp]}
return {"messages": [resp]}


graph.add_node("llm", call_llm)
graph.add_edge(START, "llm")
graph.add_conditional_edges("llm", tools_condition)
graph.add_edge("tools", "llm")
app = graph.compile()

_ = app.invoke({"messages": [("user", "Get employee with id emp123") ]})
```

</details>

<details>
<summary>CrewAI Integration (Python 3.10+)</summary>

Expand Down
39 changes: 0 additions & 39 deletions examples/langgraph_tool_node.py

This file was deleted.

15 changes: 14 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ mcp = [
examples = [
"crewai>=0.102.0; python_version>='3.10'",
"langchain-openai>=0.3.6",
"langgraph>=0.2.0",
"openai>=1.63.2",
"python-dotenv>=1.0.1",
]
Expand Down Expand Up @@ -105,7 +106,7 @@ select = [
]

[tool.mypy]
python_version = "3.9"
python_version = "3.10"
disallow_untyped_defs = true
disallow_incomplete_defs = true
check_untyped_defs = true
Expand All @@ -115,7 +116,19 @@ warn_redundant_casts = true
warn_unused_ignores = true
warn_return_any = true
warn_unreachable = true
exclude = [
"^.venv/",
]

[[tool.mypy.overrides]]
module = "bm25s"
ignore_missing_imports = true

[[tool.mypy.overrides]]
module = "langgraph.*"
ignore_missing_imports = true

[[tool.mypy.overrides]]
module = "mcp.*"
ignore_missing_imports = true
ignore_errors = true
20 changes: 20 additions & 0 deletions stackone_ai/integrations/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
"""Integration helpers for external frameworks.

Currently includes:

- LangGraph helpers to turn StackOne tools into a `ToolNode` or `ToolExecutor`.
"""

from .langgraph import (
bind_model_with_tools,
create_react_agent,
to_tool_executor,
to_tool_node,
)

__all__ = [
"to_tool_node",
"to_tool_executor",
"bind_model_with_tools",
"create_react_agent",
]
94 changes: 94 additions & 0 deletions stackone_ai/integrations/langgraph.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
"""LangGraph integration helpers.

These utilities convert StackOne tools into LangGraph prebuilt components.

Usage:
from stackone_ai import StackOneToolSet
from stackone_ai.integrations.langgraph import to_tool_node

toolset = StackOneToolSet()
tools = toolset.get_tools("hris_*", account_id="...")
node = to_tool_node(tools) # langgraph.prebuilt.ToolNode
"""

from __future__ import annotations

from collections.abc import Sequence
from typing import TYPE_CHECKING, Any

from langchain_core.tools import BaseTool

from stackone_ai.models import Tools

if TYPE_CHECKING: # pragma: no cover - only for typing
try:
from langgraph.prebuilt import ToolNode
except Exception: # pragma: no cover
Copy link

Copilot AI Sep 2, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Using a bare Exception catch is too broad. Consider catching ImportError or ModuleNotFoundError specifically since this is checking for missing dependencies.

Suggested change
except Exception: # pragma: no cover
except ImportError: # pragma: no cover

Copilot uses AI. Check for mistakes.

class ToolNode: # type: ignore[no-redef]
pass


def _ensure_langgraph() -> None:
try:
from langgraph import prebuilt as _ # noqa: F401
except Exception as e: # pragma: no cover
Copy link

Copilot AI Sep 2, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Using a bare Exception catch is too broad. Consider catching ImportError or ModuleNotFoundError specifically since this is checking for missing dependencies.

Suggested change
except Exception as e: # pragma: no cover
except ImportError as e: # pragma: no cover

Copilot uses AI. Check for mistakes.
raise ImportError(
"LangGraph is not installed. Install with `pip install langgraph` or "
"`pip install 'stackone-ai[examples]'`"
) from e


def _to_langchain_tools(tools: Tools | Sequence[BaseTool]) -> Sequence[BaseTool]:
if isinstance(tools, Tools):
return tools.to_langchain()
return tools


def to_tool_node(tools: Tools | Sequence[BaseTool], **kwargs: Any) -> Any:
"""Create a LangGraph `ToolNode` from StackOne tools or LangChain tools.

Accepts either a `Tools` collection from this SDK or an existing sequence of
LangChain `BaseTool` instances and returns a LangGraph `ToolNode` suitable
for inclusion in a graph.
"""
_ensure_langgraph()
from langgraph.prebuilt import ToolNode # local import with helpful error

langchain_tools = _to_langchain_tools(tools)
return ToolNode(langchain_tools, **kwargs)


def to_tool_executor(tools: Tools | Sequence[BaseTool], **kwargs: Any) -> Any:
"""Create a LangGraph `ToolNode` from StackOne tools or LangChain tools.

Note: ToolExecutor has been deprecated in favor of ToolNode.
This function now returns a ToolNode for compatibility.
"""
_ensure_langgraph()
from langgraph.prebuilt import ToolNode # local import with helpful error

langchain_tools = _to_langchain_tools(tools)
return ToolNode(langchain_tools, **kwargs)


def bind_model_with_tools(model: Any, tools: Tools | Sequence[BaseTool]) -> Any:
"""Bind tools to an LLM that supports LangChain's `.bind_tools()` API.

This is a tiny helper that converts a `Tools` collection to LangChain tools
and calls `model.bind_tools(...)`.
"""
langchain_tools = _to_langchain_tools(tools)
return model.bind_tools(langchain_tools)


def create_react_agent(llm: Any, tools: Tools | Sequence[BaseTool], **kwargs: Any) -> Any:
"""Create a LangGraph ReAct agent using StackOne tools.

Thin wrapper around `langgraph.prebuilt.create_react_agent` that accepts a
`Tools` collection from this SDK.
"""
_ensure_langgraph()
from langgraph.prebuilt import create_react_agent as _create

return _create(llm, _to_langchain_tools(tools), **kwargs)
6 changes: 3 additions & 3 deletions stackone_ai/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
)
)

try: # type: ignore[unreachable]
try:
import mcp.types as types
from mcp.server import NotificationOptions, Server
from mcp.server.models import InitializationOptions
Expand Down Expand Up @@ -56,7 +56,7 @@ def tool_needs_account_id(tool_name: str) -> bool:
return True


@app.list_tools()
@app.list_tools() # type: ignore[misc]
async def list_tools() -> list[Tool]:
"""List all available StackOne tools as MCP tools."""
if not toolset:
Expand Down Expand Up @@ -114,7 +114,7 @@ async def list_tools() -> list[Tool]:
) from e


@app.call_tool()
@app.call_tool() # type: ignore[misc]
async def call_tool(
name: str, arguments: dict[str, Any]
) -> list[TextContent | ImageContent | EmbeddedResource]:
Expand Down
Loading