Skip to content

Commit

Permalink
Formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
ashpreetbedi committed Jan 25, 2025
1 parent 357d6c6 commit a8116dd
Show file tree
Hide file tree
Showing 4 changed files with 77 additions and 45 deletions.
54 changes: 41 additions & 13 deletions cookbook/examples/streamlit/llm_os/app.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from typing import List

import nest_asyncio
import streamlit as st
from agno.agent import Agent
Expand All @@ -15,7 +16,9 @@
page_icon=":orange_heart:",
)
st.title("LLM OS")
st.markdown("##### :orange_heart: built using [Agno](https://github.com/phidatahq/agno)")
st.markdown(
"##### :orange_heart: built using [Agno](https://github.com/phidatahq/agno)"
)


def main() -> None:
Expand All @@ -40,7 +43,9 @@ def main() -> None:
# Get calculator_enabled from session state if set
calculator_enabled = st.session_state["calculator_enabled"]
# Checkbox for enabling calculator
calculator = st.sidebar.checkbox("Calculator", value=calculator_enabled, help="Enable calculator.")
calculator = st.sidebar.checkbox(
"Calculator", value=calculator_enabled, help="Enable calculator."
)
if calculator_enabled != calculator:
st.session_state["calculator_enabled"] = calculator
calculator_enabled = calculator
Expand All @@ -52,7 +57,9 @@ def main() -> None:
# Get file_tools_enabled from session state if set
file_tools_enabled = st.session_state["file_tools_enabled"]
# Checkbox for enabling shell tools
file_tools = st.sidebar.checkbox("File Tools", value=file_tools_enabled, help="Enable file tools.")
file_tools = st.sidebar.checkbox(
"File Tools", value=file_tools_enabled, help="Enable file tools."
)
if file_tools_enabled != file_tools:
st.session_state["file_tools_enabled"] = file_tools
file_tools_enabled = file_tools
Expand All @@ -64,7 +71,11 @@ def main() -> None:
# Get ddg_search_enabled from session state if set
ddg_search_enabled = st.session_state["ddg_search_enabled"]
# Checkbox for enabling web search
ddg_search = st.sidebar.checkbox("Web Search", value=ddg_search_enabled, help="Enable web search using DuckDuckGo.")
ddg_search = st.sidebar.checkbox(
"Web Search",
value=ddg_search_enabled,
help="Enable web search using DuckDuckGo.",
)
if ddg_search_enabled != ddg_search:
st.session_state["ddg_search_enabled"] = ddg_search
ddg_search_enabled = ddg_search
Expand All @@ -76,7 +87,9 @@ def main() -> None:
# Get shell_tools_enabled from session state if set
shell_tools_enabled = st.session_state["shell_tools_enabled"]
# Checkbox for enabling shell tools
shell_tools = st.sidebar.checkbox("Shell Tools", value=shell_tools_enabled, help="Enable shell tools.")
shell_tools = st.sidebar.checkbox(
"Shell Tools", value=shell_tools_enabled, help="Enable shell tools."
)
if shell_tools_enabled != shell_tools:
st.session_state["shell_tools_enabled"] = shell_tools
shell_tools_enabled = shell_tools
Expand Down Expand Up @@ -159,7 +172,9 @@ def main() -> None:
st.session_state["llm_os"] = llm_os
except RuntimeError as e:
st.error(f"Database Error: {str(e)}")
st.info("Please make sure your PostgreSQL database is running at postgresql+psycopg://ai:ai@localhost:5532/ai")
st.info(
"Please make sure your PostgreSQL database is running at postgresql+psycopg://ai:ai@localhost:5532/ai"
)
return
else:
llm_os = st.session_state["llm_os"]
Expand All @@ -177,11 +192,14 @@ def main() -> None:
if llm_os.memory and not st.session_state["messages"]:
logger.debug("Loading chat history")
st.session_state["messages"] = [
{"role": message.role, "content": message.content} for message in llm_os.memory.messages
{"role": message.role, "content": message.content}
for message in llm_os.memory.messages
]
elif not st.session_state["messages"]:
logger.debug("No chat history found")
st.session_state["messages"] = [{"role": "agent", "content": "Ask me questions..."}]
st.session_state["messages"] = [
{"role": "agent", "content": "Ask me questions..."}
]

# Display chat history first (all previous messages)
for message in st.session_state["messages"]:
Expand Down Expand Up @@ -219,7 +237,9 @@ def main() -> None:
st.session_state["url_scrape_key"] = 0

input_url = st.sidebar.text_input(
"Add URL to Knowledge Base", type="default", key=st.session_state["url_scrape_key"]
"Add URL to Knowledge Base",
type="default",
key=st.session_state["url_scrape_key"],
)
add_url_button = st.sidebar.button("Add URL")
if add_url_button:
Expand All @@ -240,7 +260,9 @@ def main() -> None:
st.session_state["file_uploader_key"] = 100

uploaded_file = st.sidebar.file_uploader(
"Add a PDF :page_facing_up:", type="pdf", key=st.session_state["file_uploader_key"]
"Add a PDF :page_facing_up:",
type="pdf",
key=st.session_state["file_uploader_key"],
)
if uploaded_file is not None:
alert = st.sidebar.info("Processing PDF...", icon="🧠")
Expand All @@ -264,10 +286,14 @@ def main() -> None:
if llm_os.team and len(llm_os.team) > 0:
for team_member in llm_os.team:
if team_member.memory and len(team_member.memory.messages) > 0:
with st.status(f"{team_member.name} Memory", expanded=False, state="complete"):
with st.status(
f"{team_member.name} Memory", expanded=False, state="complete"
):
with st.container():
_team_member_memory_container = st.empty()
_team_member_memory_container.json(team_member.memory.get_messages())
_team_member_memory_container.json(
team_member.memory.get_messages()
)

# Remove the run history section entirely
if st.sidebar.button("New Run"):
Expand All @@ -280,7 +306,9 @@ def restart_agent():
for key in ["llm_os", "messages"]: # Removed "llm_os_run_id"
st.session_state.pop(key, None)
st.session_state["url_scrape_key"] = st.session_state.get("url_scrape_key", 0) + 1
st.session_state["file_uploader_key"] = st.session_state.get("file_uploader_key", 100) + 1
st.session_state["file_uploader_key"] = (
st.session_state.get("file_uploader_key", 100) + 1
)
st.rerun()


Expand Down
38 changes: 21 additions & 17 deletions cookbook/examples/streamlit/llm_os/os_agent.py
Original file line number Diff line number Diff line change
@@ -1,24 +1,24 @@
import os
from pathlib import Path
from typing import Optional, List
from textwrap import dedent
from typing import List, Optional

from agno.agent import Agent
from agno.embedder.openai import OpenAIEmbedder
from agno.knowledge import AgentKnowledge
from agno.models.openai import OpenAIChat
from agno.storage.agent.postgres import PostgresAgentStorage
from agno.tools import Toolkit
from agno.tools.calculator import CalculatorTools
from agno.tools.duckdb import DuckDbTools
from agno.tools.duckduckgo import DuckDuckGoTools
from agno.tools.exa import ExaTools
from agno.tools.file import FileTools
from agno.tools.python import PythonTools
from agno.tools.shell import ShellTools
from agno.tools.yfinance import YFinanceTools
from agno.tools.duckdb import DuckDbTools
from agno.tools.python import PythonTools
from agno.knowledge import AgentKnowledge
from agno.storage.agent.postgres import PostgresAgentStorage
from agno.vectordb.qdrant import Qdrant
from agno.embedder.openai import OpenAIEmbedder
from agno.utils.log import logger
import os
from agno.vectordb.qdrant import Qdrant
from dotenv import load_dotenv

load_dotenv()
Expand Down Expand Up @@ -75,21 +75,26 @@ def get_llm_os(

if data_analyst:
data_analyst_agent: Agent = Agent(
tools=[DuckDbTools()],
show_tool_calls=True,
instructions="Use this file for Movies data: https://phidata-public.s3.amazonaws.com/demo_data/IMDB-Movie-Data.csv",

)
tools=[DuckDbTools()],
show_tool_calls=True,
instructions="Use this file for Movies data: https://phidata-public.s3.amazonaws.com/demo_data/IMDB-Movie-Data.csv",
)
team.append(data_analyst_agent)
extra_instructions.append(
"To answer questions about my favorite movies, delegate the task to the `Data Analyst`."
)

if python_agent_enable:
python_agent: Agent = Agent(tools=[PythonTools(base_dir=Path("tmp/python"))], show_tool_calls=True , instructions="To write and run Python code, delegate the task to the `Python Agent`.")
python_agent: Agent = Agent(
tools=[PythonTools(base_dir=Path("tmp/python"))],
show_tool_calls=True,
instructions="To write and run Python code, delegate the task to the `Python Agent`.",
)

team.append(python_agent)
extra_instructions.append("To write and run Python code, delegate the task to the `Python Agent`.")
extra_instructions.append(
"To write and run Python code, delegate the task to the `Python Agent`."
)
if research_agent_enable:
research_agent = Agent(
name="Research Agent",
Expand Down Expand Up @@ -239,9 +244,8 @@ def get_llm_os(
"Carefully read the information you have gathered and provide a clear and concise answer to the user.",
"Do not use phrases like 'based on my knowledge' or 'depending on the information'.",
"You can delegate tasks to an AI Agent in your team depending of their role and the tools available to them.",
extra_instructions,
extra_instructions,
],

storage=PostgresAgentStorage(db_url=db_url, table_name="llm_os_runs"),
# Define the knowledge base
knowledge=AgentKnowledge(
Expand Down
2 changes: 1 addition & 1 deletion cookbook/models/google/gemini/storage_and_memory.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
"""Run `pip install duckduckgo-search pgvector google.generativeai` to install dependencies."""

from agno.agent import Agent
from agno.knowledge.pdf_url import PDFUrlKnowledgeBase
from agno.memory import AgentMemory
from agno.memory.db.postgres import PgMemoryDb
from agno.models.google import Gemini
from agno.knowledge.pdf_url import PDFUrlKnowledgeBase
from agno.storage.agent.postgres import PostgresAgentStorage
from agno.tools.duckduckgo import DuckDuckGoTools
from agno.vectordb.pgvector import PgVector
Expand Down
28 changes: 14 additions & 14 deletions libs/agno/agno/models/google/gemini.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,29 +6,34 @@
from pathlib import Path
from typing import Any, Callable, Dict, Iterator, List, Optional, Union

from agno.media import Image, Audio, Video
from agno.models.base import Model, Metrics
from agno.media import Audio, Image, Video
from agno.models.base import Metrics, Model
from agno.models.message import Message
from agno.models.response import ModelResponse
from agno.tools.function import Function
from agno.tools import Toolkit
from agno.tools.function import Function
from agno.utils.log import logger

try:
import google.generativeai as genai
from google.ai.generativelanguage_v1beta.types import (
Part,
FunctionCall as GeminiFunctionCall,
)
from google.ai.generativelanguage_v1beta.types import (
FunctionResponse as GeminiFunctionResponse,
)
from google.generativeai import GenerativeModel
from google.generativeai.types.generation_types import GenerateContentResponse
from google.generativeai.types.content_types import FunctionDeclaration, Tool as GeminiTool
from google.generativeai.types import file_types
from google.ai.generativelanguage_v1beta.types import (
Part,
)
from google.ai.generativelanguage_v1beta.types.generative_service import (
GenerateContentResponse as ResultGenerateContentResponse,
)
from google.api_core.exceptions import PermissionDenied
from google.generativeai import GenerativeModel
from google.generativeai.types import file_types
from google.generativeai.types.content_types import FunctionDeclaration
from google.generativeai.types.content_types import Tool as GeminiTool
from google.generativeai.types.generation_types import GenerateContentResponse
from google.protobuf.struct_pb2 import Struct
except (ModuleNotFoundError, ImportError):
raise ImportError("`google-generativeai` not installed. Please install it using `pip install google-generativeai`")
Expand Down Expand Up @@ -186,11 +191,7 @@ def _format_messages(messages: List[Message]) -> List[Dict[str, Any]]:

# Add role to the message for the model
role = (
"model"
if message.role in ["system", "developer"]
else "user"
if message.role == "tool"
else message.role
"model" if message.role in ["system", "developer"] else "user" if message.role == "tool" else message.role
)
message_for_model["role"] = role

Expand Down Expand Up @@ -425,7 +426,6 @@ def request_kwargs(self) -> Dict[str, Any]:
request_params["tools"] = [GeminiTool(function_declarations=self.function_declarations)]
return request_params


def add_tool(
self,
tool: Union[Toolkit, Callable, Dict, Function],
Expand Down

0 comments on commit a8116dd

Please sign in to comment.