Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions Multi-Agent Deep Researcher/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# Python-generated files
__pycache__/
*.py[oc]
build/
dist/
wheels/
*.egg-info

# Virtual environments
.venv
.env
1 change: 1 addition & 0 deletions Multi-Agent Deep Researcher/.python-version
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
3.11
38 changes: 38 additions & 0 deletions Multi-Agent Deep Researcher/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
# Agentic Deep Researcher

Agentic Deep Researcher is a cutting-edge project designed to explore and implement advanced research methodologies using artificial intelligence. This project aims to assist researchers in automating complex tasks, generating insights, and enhancing productivity.

We use:

- LinkUp (Search Tool)
- CrewAI (Agentic design)
- Deepseek V3 (LLM)
- Streamlit to wrap the logic in an interactive UI

### SetUp

Run these commands in project root

```
uv venv # creates a virtual environment (if not yet)
source .venv/bin/activate # or `.venv\Scripts\activate` on Windows
uv pip install -e . # installs the project in editable mode
```

### Run the Application

Run the application with:

```bash
streamlit run app.py
```

## 📬 Stay Updated with Our Newsletter!

**Get a FREE Data Science eBook** 📖 with 150+ essential lessons in Data Science when you subscribe to our newsletter! Stay in the loop with the latest tutorials, insights, and exclusive resources. [Subscribe now!](https://join.dailydoseofds.com)

[![Daily Dose of Data Science Newsletter](https://github.com/patchy631/ai-engineering/blob/main/resources/join_ddods.png)](https://join.dailydoseofds.com)

## Contribution

Contributions are welcome! Feel free to fork this repository and submit pull requests with your improvements.
137 changes: 137 additions & 0 deletions Multi-Agent Deep Researcher/agents.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,137 @@
import os
from typing import Type
from dotenv import load_dotenv
from pydantic import BaseModel, Field
from linkup import LinkupClient
from crewai import Agent, Task, Crew, Process, LLM
from crewai.tools import BaseTool

# Load environment variables (for non-LinkUp settings)
load_dotenv()


def get_llm_client():
"""Initialize and return the LLM client"""
return LLM(
model="openrouter/deepseek/deepseek-chat:free",
base_url="https://openrouter.ai/api/v1",
api_key=os.getenv("OPENROUTER_API_KEY")
)
Comment on lines +13 to +19
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

Guard against missing OPENROUTER_API_KEY and cache the LLM instance
get_llm_client() will silently pass None to LLM if the environment variable is unset, leading to a late‑failing HTTP 401 that is hard to trace. In addition, a new client is created for every call, which is wasteful.

+from functools import lru_cache
+
-def get_llm_client():
+@lru_cache(maxsize=1)
+def get_llm_client() -> LLM:
     """Initialize and return the LLM client"""
-    return LLM(
+    api_key = os.getenv("OPENROUTER_API_KEY")
+    if not api_key:
+        raise EnvironmentError("OPENROUTER_API_KEY is not set")
+
+    return LLM(
         model="openrouter/deepseek/deepseek-chat:free",
         base_url="https://openrouter.ai/api/v1",
-        api_key=os.getenv("OPENROUTER_API_KEY")
+        api_key=api_key,
     )
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
def get_llm_client():
"""Initialize and return the LLM client"""
return LLM(
model="openrouter/deepseek/deepseek-chat:free",
base_url="https://openrouter.ai/api/v1",
api_key=os.getenv("OPENROUTER_API_KEY")
)
from functools import lru_cache
@lru_cache(maxsize=1)
def get_llm_client() -> LLM:
"""Initialize and return the LLM client"""
api_key = os.getenv("OPENROUTER_API_KEY")
if not api_key:
raise EnvironmentError("OPENROUTER_API_KEY is not set")
return LLM(
model="openrouter/deepseek/deepseek-chat:free",
base_url="https://openrouter.ai/api/v1",
api_key=api_key,
)


# Define LinkUp Search Tool


class LinkUpSearchInput(BaseModel):
"""Input schema for LinkUp Search Tool."""
query: str = Field(description="The search query to perform")
depth: str = Field(default="standard",
description="Depth of search: 'standard' or 'deep'")
output_type: str = Field(
default="searchResults", description="Output type: 'searchResults', 'sourcedAnswer', or 'structured'")


class LinkUpSearchTool(BaseTool):
name: str = "LinkUp Search"
description: str = "Search the web for information using LinkUp and return comprehensive results"
args_schema: Type[BaseModel] = LinkUpSearchInput

def __init__(self):
super().__init__()

def _run(self, query: str, depth: str = "standard", output_type: str = "searchResults") -> str:
"""Execute LinkUp search and return results."""
try:
# Initialize LinkUp client with API key from environment variables
linkup_client = LinkupClient(api_key=os.getenv("LINKUP_API_KEY"))

# Perform search
search_response = linkup_client.search(
query=query,
depth=depth,
output_type=output_type
)

return str(search_response)
except Exception as e:
return f"Error occurred while searching: {str(e)}"
Comment on lines +33 to +56
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

Improve error handling & signature of _run

  1. Returning a stringified exception hides the stack trace from CrewAI and makes automated retries impossible.
  2. If LINKUP_API_KEY is missing, the call should fail fast rather than creating a client that raises later.
  3. Consider returning the native search_response object; downstream agents can still cast to str if needed.
  4. CrewAI expects an _arun coroutine for async tools—worth adding for network I/O.
-        try:
-            # Initialize LinkUp client with API key from environment variables
-            linkup_client = LinkupClient(api_key=os.getenv("LINKUP_API_KEY"))
+        api_key = os.getenv("LINKUP_API_KEY")
+        if not api_key:
+            raise EnvironmentError("LINKUP_API_KEY is not set")
+
+        try:
+            linkup_client = LinkupClient(api_key=api_key)
             ...
-            return str(search_response)
-        except Exception as e:
-            return f"Error occurred while searching: {str(e)}"
+            return search_response
+        except Exception as e:
+            # Re‑raise so CrewAI can surface the root cause
+            raise RuntimeError("LinkUp search failed") from e
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
class LinkUpSearchTool(BaseTool):
name: str = "LinkUp Search"
description: str = "Search the web for information using LinkUp and return comprehensive results"
args_schema: Type[BaseModel] = LinkUpSearchInput
def __init__(self):
super().__init__()
def _run(self, query: str, depth: str = "standard", output_type: str = "searchResults") -> str:
"""Execute LinkUp search and return results."""
try:
# Initialize LinkUp client with API key from environment variables
linkup_client = LinkupClient(api_key=os.getenv("LINKUP_API_KEY"))
# Perform search
search_response = linkup_client.search(
query=query,
depth=depth,
output_type=output_type
)
return str(search_response)
except Exception as e:
return f"Error occurred while searching: {str(e)}"
class LinkUpSearchTool(BaseTool):
name: str = "LinkUp Search"
description: str = "Search the web for information using LinkUp and return comprehensive results"
args_schema: Type[BaseModel] = LinkUpSearchInput
def __init__(self):
super().__init__()
def _run(self, query: str, depth: str = "standard", output_type: str = "searchResults") -> str:
"""Execute LinkUp search and return results."""
api_key = os.getenv("LINKUP_API_KEY")
if not api_key:
raise EnvironmentError("LINKUP_API_KEY is not set")
try:
linkup_client = LinkupClient(api_key=api_key)
# Perform search
search_response = linkup_client.search(
query=query,
depth=depth,
output_type=output_type
)
return search_response
except Exception as e:
# Re‑raise so CrewAI can surface the root cause
raise RuntimeError("LinkUp search failed") from e



def create_research_crew(query: str):
"""Create and configure the research crew with all agents and tasks"""
# Initialize tools
linkup_search_tool = LinkUpSearchTool()

# Get LLM client
client = get_llm_client()

web_searcher = Agent(
role="Web Searcher",
goal="Find the most relevant information on the web, along with source links (urls).",
backstory="An expert at formulating search queries and retrieving relevant information. Passes the results to the 'Research Analyst' only.",
verbose=True,
allow_delegation=True,
tools=[linkup_search_tool],
llm=client,
)

# Define the research analyst
research_analyst = Agent(
role="Research Analyst",
goal="Analyze and synthesize raw information into structured insights, along with source links (urls).",
backstory="An expert at analyzing information, identifying patterns, and extracting key insights. If required, can delagate the task of fact checking/verification to 'Web Searcher' only. Passes the final results to the 'Technical Writer' only.",
verbose=True,
allow_delegation=True,
llm=client,
)

# Define the technical writer
technical_writer = Agent(
role="Technical Writer",
goal="Create well-structured, clear, and comprehensive responses, along with source links (urls).",
backstory="An expert at communicating complex information in an accessible way.",
verbose=True,
allow_delegation=False,
llm=client,
)

# Define tasks
search_task = Task(
description=f"Search for comprehensive information about: {query}.",
agent=web_searcher,
expected_output="Detailed raw search results including sources (urls).",
tools=[linkup_search_tool]
)

analysis_task = Task(
description="Analyze the raw search results, identify key information, verify facts and prepare a structured analysis.",
agent=research_analyst,
expected_output="A structured analysis of the information with verified facts and key insights, along with source links",
context=[search_task]
)

writing_task = Task(
description="Create a comprehensive, well-organized response based on the research analysis.",
agent=technical_writer,
expected_output="A clear, comprehensive response that directly answers the query with proper citations/source links (urls).",
context=[analysis_task]
)

# Create the crew
crew = Crew(
agents=[web_searcher, research_analyst, technical_writer],
tasks=[search_task, analysis_task, writing_task],
verbose=True,
process=Process.sequential
)

return crew


def run_research(query: str):
"""Run the research process and return results"""
try:
crew = create_research_crew(query)
result = crew.kickoff()
return result.raw
except Exception as e:
return f"Error: {str(e)}"
Comment on lines +130 to +137
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue

crew.kickoff() already returns the final answer – .raw may not exist
In current CrewAI versions kickoff() returns a plain str. Accessing .raw will raise AttributeError, masking the real result.

-        result = crew.kickoff()
-        return result.raw
+        return crew.kickoff()
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
def run_research(query: str):
"""Run the research process and return results"""
try:
crew = create_research_crew(query)
result = crew.kickoff()
return result.raw
except Exception as e:
return f"Error: {str(e)}"
def run_research(query: str):
"""Run the research process and return results"""
try:
crew = create_research_crew(query)
return crew.kickoff()
except Exception as e:
return f"Error: {str(e)}"

83 changes: 83 additions & 0 deletions Multi-Agent Deep Researcher/app.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
import streamlit as st
from agents import run_research
import os

# Set up page configuration
st.set_page_config(page_title="🔍 Agentic Deep Researcher", layout="wide")

# Initialize session state variables
if "linkup_api_key" not in st.session_state:
st.session_state.linkup_api_key = ""
if "messages" not in st.session_state:
st.session_state.messages = []

def reset_chat():
st.session_state.messages = []

# Sidebar: Linkup Configuration with updated logo link
with st.sidebar:
col1, col2 = st.columns([1, 3])
with col1:
st.write("")
st.image(
"https://avatars.githubusercontent.com/u/175112039?s=200&v=4", width=65)
with col2:
st.header("Linkup Configuration")
st.write("Deep Web Search")

st.markdown("[Get your API key](https://app.linkup.so/sign-up)",
unsafe_allow_html=True)

linkup_api_key = st.text_input(
"Enter your Linkup API Key", type="password")
if linkup_api_key:
st.session_state.linkup_api_key = linkup_api_key
# Update the environment variable
os.environ["LINKUP_API_KEY"] = linkup_api_key
st.success("API Key stored successfully!")
Comment on lines +31 to +37
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

Validate API key before indicating success

The code stores any non-empty input as the API key without validation. Consider verifying the key format or making a test request before showing a success message.

if linkup_api_key:
    st.session_state.linkup_api_key = linkup_api_key
    # Update the environment variable
    os.environ["LINKUP_API_KEY"] = linkup_api_key
-   st.success("API Key stored successfully!")
+   # Validate the API key format (basic check)
+   if linkup_api_key.startswith("lk-") and len(linkup_api_key) > 10:
+       st.success("API Key stored successfully!")
+   else:
+       st.warning("API key format might be incorrect. Please verify your key.")
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
linkup_api_key = st.text_input(
"Enter your Linkup API Key", type="password")
if linkup_api_key:
st.session_state.linkup_api_key = linkup_api_key
# Update the environment variable
os.environ["LINKUP_API_KEY"] = linkup_api_key
st.success("API Key stored successfully!")
linkup_api_key = st.text_input(
"Enter your Linkup API Key", type="password")
if linkup_api_key:
st.session_state.linkup_api_key = linkup_api_key
# Update the environment variable
os.environ["LINKUP_API_KEY"] = linkup_api_key
# Validate the API key format (basic check)
if linkup_api_key.startswith("lk-") and len(linkup_api_key) > 10:
st.success("API Key stored successfully!")
else:
st.warning("API key format might be incorrect. Please verify your key.")


# Main Chat Interface Header with powered by logos from original code links
col1, col2 = st.columns([6, 1])
with col1:
st.markdown("<h2 style='color: #0066cc;'>🔍 Agentic Deep Researcher</h2>",
unsafe_allow_html=True)
powered_by_html = """
<div style='display: flex; align-items: center; gap: 10px; margin-top: 5px;'>
<span style='font-size: 20px; color: #666;'>Powered by</span>
<img src="https://cdn.prod.website-files.com/66cf2bfc3ed15b02da0ca770/66d07240057721394308addd_Logo%20(1).svg" width="80">
<span style='font-size: 20px; color: #666;'>and</span>
<img src="https://framerusercontent.com/images/wLLGrlJoyqYr9WvgZwzlw91A8U.png?scale-down-to=512" width="100">
</div>
"""
st.markdown(powered_by_html, unsafe_allow_html=True)
with col2:
st.button("Clear ↺", on_click=reset_chat)

# Add spacing between header and chat history
st.markdown("<div style='height: 30px;'></div>", unsafe_allow_html=True)

# Display chat history
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])

# Accept user input and process the research query
if prompt := st.chat_input("Ask a question about your documents..."):
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.markdown(prompt)

if not st.session_state.linkup_api_key:
response = "Please enter your Linkup API Key in the sidebar."
else:
with st.spinner("Researching... This may take a moment..."):
try:
result = run_research(prompt)
response = result
except Exception as e:
response = f"An error occurred: {str(e)}"

with st.chat_message("assistant"):
st.markdown(response)
st.session_state.messages.append(
{"role": "assistant", "content": response})
Comment on lines +1 to +83
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

💡 Verification agent

🧩 Analysis chain

Consider implementing environment variable file loading

Since you're using python-dotenv as a dependency, consider implementing proper .env file loading for API keys rather than relying solely on session state.

import streamlit as st
from agents import run_research
import os
+from dotenv import load_dotenv

+# Load environment variables from .env file if present
+load_dotenv()

# Set up page configuration
st.set_page_config(page_title="🔍 Agentic Deep Researcher", layout="wide")

# Initialize session state variables
+# Try to get API key from environment first
+initial_api_key = os.environ.get("LINKUP_API_KEY", "")
if "linkup_api_key" not in st.session_state:
-    st.session_state.linkup_api_key = ""
+    st.session_state.linkup_api_key = initial_api_key

This change would allow users to store their API key in a .env file for development, providing a better experience for returning users and aligning with the dependencies declared in pyproject.toml.


🏁 Script executed:

#!/bin/bash
# Check if dotenv is imported anywhere in the codebase
grep -r "from dotenv import" --include="*.py" .
# Check if .env is mentioned in any files
grep -r "\.env" --include="*.py" .

Length of output: 3539


🏁 Script executed:

#!/bin/bash
# Check if python-dotenv is listed as a dependency in pyproject.toml
grep -R "dotenv" -n pyproject.toml || echo "No dotenv entry in pyproject.toml"

# Check if python-dotenv appears in requirements.txt
if [ -f requirements.txt ]; then
  grep -R "dotenv" -n requirements.txt || echo "No dotenv entry in requirements.txt"
else
  echo "requirements.txt not found"
fi

Length of output: 252


🏁 Script executed:

#!/bin/bash
# Search for project metadata files in repo (2 levels deep)
echo "Searching for pyproject.toml, requirements.txt, Pipfile..."
fd --type f --max-depth 2 pyproject.toml || echo "No pyproject.toml found under 2 levels"
fd --type f --max-depth 2 requirements.txt || echo "No requirements.txt found under 2 levels"
fd --type f --max-depth 2 Pipfile || echo "No Pipfile found under 2 levels"

# If a pyproject.toml exists in the Multi-Agent Deep Researcher directory, check for dotenv
if [ -f "Multi-Agent Deep Researcher/pyproject.toml" ]; then
  echo "Checking for dotenv in Multi-Agent Deep Researcher/pyproject.toml..."
  grep -R "dotenv" -n "Multi-Agent Deep Researcher/pyproject.toml" || echo "No dotenv entry in that file"
fi

Length of output: 856


Load environment variables from your .env file
Since python-dotenv (≥1.1.0) is already declared in Multi-Agent Deep Researcher/pyproject.toml, add a call to load_dotenv() at the top of app.py so users can store their LINKUP_API_KEY in a .env file.

• File: Multi-Agent Deep Researcher/app.py
• Suggested changes:

 import streamlit as st
 from agents import run_research
 import os
+from dotenv import load_dotenv

+# Load variables from .env into os.environ
+load_dotenv()

 # Set up page configuration
 st.set_page_config(page_title="🔍 Agentic Deep Researcher", layout="wide")

 # Initialize session state variables
- if "linkup_api_key" not in st.session_state:
-     st.session_state.linkup_api_key = ""
+ # Prefill from environment if available
+ initial_api_key = os.environ.get("LINKUP_API_KEY", "")
+ if "linkup_api_key" not in st.session_state:
+     st.session_state.linkup_api_key = initial_api_key

This ensures returning users can simply drop their API key into a .env file without retyping it each session.

📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
import streamlit as st
from agents import run_research
import os
# Set up page configuration
st.set_page_config(page_title="🔍 Agentic Deep Researcher", layout="wide")
# Initialize session state variables
if "linkup_api_key" not in st.session_state:
st.session_state.linkup_api_key = ""
if "messages" not in st.session_state:
st.session_state.messages = []
def reset_chat():
st.session_state.messages = []
# Sidebar: Linkup Configuration with updated logo link
with st.sidebar:
col1, col2 = st.columns([1, 3])
with col1:
st.write("")
st.image(
"https://avatars.githubusercontent.com/u/175112039?s=200&v=4", width=65)
with col2:
st.header("Linkup Configuration")
st.write("Deep Web Search")
st.markdown("[Get your API key](https://app.linkup.so/sign-up)",
unsafe_allow_html=True)
linkup_api_key = st.text_input(
"Enter your Linkup API Key", type="password")
if linkup_api_key:
st.session_state.linkup_api_key = linkup_api_key
# Update the environment variable
os.environ["LINKUP_API_KEY"] = linkup_api_key
st.success("API Key stored successfully!")
# Main Chat Interface Header with powered by logos from original code links
col1, col2 = st.columns([6, 1])
with col1:
st.markdown("<h2 style='color: #0066cc;'>🔍 Agentic Deep Researcher</h2>",
unsafe_allow_html=True)
powered_by_html = """
<div style='display: flex; align-items: center; gap: 10px; margin-top: 5px;'>
<span style='font-size: 20px; color: #666;'>Powered by</span>
<img src="https://cdn.prod.website-files.com/66cf2bfc3ed15b02da0ca770/66d07240057721394308addd_Logo%20(1).svg" width="80">
<span style='font-size: 20px; color: #666;'>and</span>
<img src="https://framerusercontent.com/images/wLLGrlJoyqYr9WvgZwzlw91A8U.png?scale-down-to=512" width="100">
</div>
"""
st.markdown(powered_by_html, unsafe_allow_html=True)
with col2:
st.button("Clear ↺", on_click=reset_chat)
# Add spacing between header and chat history
st.markdown("<div style='height: 30px;'></div>", unsafe_allow_html=True)
# Display chat history
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
# Accept user input and process the research query
if prompt := st.chat_input("Ask a question about your documents..."):
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.markdown(prompt)
if not st.session_state.linkup_api_key:
response = "Please enter your Linkup API Key in the sidebar."
else:
with st.spinner("Researching... This may take a moment..."):
try:
result = run_research(prompt)
response = result
except Exception as e:
response = f"An error occurred: {str(e)}"
with st.chat_message("assistant"):
st.markdown(response)
st.session_state.messages.append(
{"role": "assistant", "content": response})
import streamlit as st
from agents import run_research
import os
from dotenv import load_dotenv
# Load variables from .env into os.environ
load_dotenv()
# Set up page configuration
st.set_page_config(page_title="🔍 Agentic Deep Researcher", layout="wide")
# Initialize session state variables
# Prefill from environment if available
initial_api_key = os.environ.get("LINKUP_API_KEY", "")
if "linkup_api_key" not in st.session_state:
st.session_state.linkup_api_key = initial_api_key
if "messages" not in st.session_state:
st.session_state.messages = []
def reset_chat():
st.session_state.messages = []
# Sidebar: Linkup Configuration with updated logo link
with st.sidebar:
col1, col2 = st.columns([1, 3])
with col1:
st.write("")
st.image(
"https://avatars.githubusercontent.com/u/175112039?s=200&v=4", width=65)
with col2:
st.header("Linkup Configuration")
st.write("Deep Web Search")
st.markdown("[Get your API key](https://app.linkup.so/sign-up)",
unsafe_allow_html=True)
linkup_api_key = st.text_input(
"Enter your Linkup API Key", type="password")
if linkup_api_key:
st.session_state.linkup_api_key = linkup_api_key
# Update the environment variable
os.environ["LINKUP_API_KEY"] = linkup_api_key
st.success("API Key stored successfully!")
# Main Chat Interface Header with powered by logos from original code links
col1, col2 = st.columns([6, 1])
with col1:
st.markdown("<h2 style='color: #0066cc;'>🔍 Agentic Deep Researcher</h2>",
unsafe_allow_html=True)
powered_by_html = """
<div style='display: flex; align-items: center; gap: 10px; margin-top: 5px;'>
<span style='font-size: 20px; color: #666;'>Powered by</span>
<img src="https://cdn.prod.website-files.com/66cf2bfc3ed15b02da0ca770/66d07240057721394308addd_Logo%20(1).svg" width="80">
<span style='font-size: 20px; color: #666;'>and</span>
<img src="https://framerusercontent.com/images/wLLGrlJoyqYr9WvgZwzlw91A8U.png?scale-down-to=512" width="100">
</div>
"""
st.markdown(powered_by_html, unsafe_allow_html=True)
with col2:
st.button("Clear ↺", on_click=reset_chat)
# Add spacing between header and chat history
st.markdown("<div style='height: 30px;'></div>", unsafe_allow_html=True)
# Display chat history
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
# Accept user input and process the research query
if prompt := st.chat_input("Ask a question about your documents..."):
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.markdown(prompt)
if not st.session_state.linkup_api_key:
response = "Please enter your Linkup API Key in the sidebar."
else:
with st.spinner("Researching... This may take a moment..."):
try:
result = run_research(prompt)
response = result
except Exception as e:
response = f"An error occurred: {str(e)}"
with st.chat_message("assistant"):
st.markdown(response)
st.session_state.messages.append(
{"role": "assistant", "content": response})

14 changes: 14 additions & 0 deletions Multi-Agent Deep Researcher/pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
[project]
name = "agentic-deep-researcher"
version = "0.1.0"
description = "Deep Research Agent using Crew AI and LinkUp API"
readme = "README.md"
requires-python = ">=3.11"
dependencies = [
"crewai>=0.114.0",
"linkup-sdk>=0.2.4",
"openai>=1.75.0",
"python-dotenv>=1.1.0",
"streamlit>=1.44.1",
"streamlit-crewai-process-output>=0.1.1",
]
Loading