Skip to content

Commit

Permalink
Merge branch 'main' of https://github.com/CogitoNTNU/jarvis
Browse files Browse the repository at this point in the history
  • Loading branch information
EldarAlvik committed Nov 7, 2024
2 parents 611ac15 + 6b47ba3 commit d1948ea
Show file tree
Hide file tree
Showing 11 changed files with 97 additions and 10 deletions.
Binary file added core/faiss_index.bin
Binary file not shown.
1 change: 1 addition & 0 deletions core/graphAgent.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ def __init__(self):
self.workflow.add_node("use_calendar_tool", calendar_tool_decider)
self.workflow.add_node("calendar_decider", calendar_decision_agent)
self.workflow.add_node("other_agent", other_agent)


self.workflow.add_edge(START, "jarvis_agent")
self.workflow.add_edge("perplexity_agent", "tools")
Expand Down
3 changes: 1 addition & 2 deletions core/graphstate.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,5 +10,4 @@ class GraphState(TypedDict):
data: dict
tool_decision: Literal["use_tool", "generate"]
agent_decision: Literal["perplexity", "calendar", "other"]
calendar_decision: Literal["use_calendar_tool", "return_to_jarvis"]

calendar_decision: Literal["use_calendar_tool", "return_to_jarvis"]
1 change: 1 addition & 0 deletions core/id_to_metadata.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"210453397504": {"user_id": "1", "text": "Samtale fra 2024-11-07:\n{\"chat_summary\":\"The human greeted the AI with 'hi' twice, and the AI responded with 'success' both times.\"}"}, "210453397505": {"user_id": "1", "text": "Samtale fra 2024-11-07:\n{\"chat_summary\":\"The human bought a new red Audi A7 car yesterday.\"}"}}
52 changes: 50 additions & 2 deletions core/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import logging
log = logging.getLogger('werkzeug')
log.setLevel(logging.ERROR)
from collections import defaultdict

#
# Setup
Expand All @@ -31,6 +32,9 @@
# Agent instantiation
jarvis = Graph() # API key is configured in agent.py

# Initialize active_chats with the correct format
active_chats = defaultdict(lambda: {"chat_history": []})

#
#
# HTTP(S) routes below
Expand Down Expand Up @@ -67,27 +71,71 @@ def summarize_store():
# Base event that's fired when a user connects
@socketio.on('connect')
def connect(data):
session_id = request.sid
emit("You're connected to Jarvis streaming server...")
print('UI connected to backend')
print(f'Session ID: {session_id}')

# Base event that's fired when user gracefully disconnects
@socketio.on('disconnect')
def disconnect():
session_id = request.sid
if session_id in active_chats:
# Get the chat history before deleting it
chat_history = active_chats[session_id]

try:
# Call summarize_chat directly instead of the route handler
summary = summarize_chat(chat_history)
# Then call embed_and_store directly
embed_and_store(summary, "1") # Using dummy user_id "1"
print(f'Chat history summarized and stored')
except Exception as e:
print(f'Error summarizing chat history: {e}')

# Clean up the chat history
del active_chats[session_id]

print('UI disconnected')
print(f'Session ID: {session_id}')

# Custom event. Fired when the user sends a prompt.
@socketio.on('user_prompt')
def handle_prompt(data):
try:
conversation_id = data['conversation_id'] # grabs the conversation ID
session_id = request.sid
conversation_id = data['conversation_id']

# Create new chat entry with human message
chat_entry = {
"human_message": data['prompt'],
"ai_message": "" # Will be filled when AI responds
}

socketio.emit("start_message")
asyncio.run(jarvis.run(data['prompt'], socketio), debug=True) # prompts Jarvis and hands off emitting to the graphAgent.

# Run the AI response
async def run_and_store():
response = await jarvis.run(data['prompt'], socketio)
# Update the AI response in the chat entry
chat_entry["ai_message"] = response
# Add completed chat entry to history
active_chats[session_id]["chat_history"].append(chat_entry)

asyncio.run(run_and_store(), debug=True)

return jsonify({"status": "success"})
except Exception as e:
print(f'Something very bad happened: {e}')
return jsonify({"status": "error"})

@socketio.on('get_chat_history')
def get_chat_history():
session_id = request.sid
if session_id in active_chats:
return active_chats[session_id]
return {"chat_history": []}

if __name__ == '__main__':
socketio.run(app, debug=True, host='0.0.0.0', port=PORT, allow_unsafe_werkzeug=True)

Expand Down
7 changes: 4 additions & 3 deletions core/noder.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ def jarvis_agent(state: GraphState):
template= """
Your job is to determine if you need tools to answer the
users question and answer with only the name of the option
chosen.
chosen. You have access to chat history using tools, thus also some personal data can be retrieved.
Here are previous messages:
Expand Down Expand Up @@ -53,7 +53,7 @@ def tool_agent_decider(state: GraphState):
Your options for agents are the following:
- "perplexity": This agent has access to tools that use the perplexity API and tools
for doing a RAG-search. These tools are the following: {perplexity_tools}
for doing a chat history search. These tools are the following: {perplexity_tools}
- "calendar": This agent has access to calendar tools
These tools are the following: {calendar_tools}
- "other": Other tools available: {other_tools}
Expand Down Expand Up @@ -113,7 +113,7 @@ def perplexity_agent(state: GraphState):
prompt = PromptTemplate(
template= """
Your job is to create tool_calls to tools using the perplexity API or
to tools that do a RAG-search. The tool or tools you decide
to tools that do a RAG-search on the chat history. The tool or tools you decide
to call should help answer the users question.
Here are previous messages:
Expand Down Expand Up @@ -202,6 +202,7 @@ def other_agent(state: GraphState):
Your job is to create tool_calls to tools.
The tool or tools you decide
to call should help answer the users question.
You can also use the chat history search tool to help answer the users question.
Here are previous messages:
Expand Down
12 changes: 10 additions & 2 deletions core/static/socketEvents.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,16 @@
// TODO: add this port to .env later
var socket = io("ws://localhost:3000"); // websocket querying port 3001, where Flask is running.

socket.on("connect", () => {
socket.emit("message", { data: "I'm connected!" });
socket.on('connect', () => {
socket.emit('message', {data: 'I\'m connected!'});

socket.emit('get_chat_history', (history) => {
// Restore chat history to UI
history.chat_history.forEach(entry => {
addUserMessage(entry.human_message);
addMessage(entry.ai_message);
});
});
});

let tokenCounter = 0;
Expand Down
24 changes: 24 additions & 0 deletions core/tools/rag_search.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
from rag import similarity_search
from langchain_core.tools import tool
from langchain_core.tools.structured import StructuredTool
from typing import List, Tuple


@tool
def rag_search(query: str)-> List[Tuple[str, float]]:
"""
Use this tool to get relevant conversation history
Args:
query (str): The query to search for
Returns:
List[Tuple[str, float]]: Relevant chat history
"""
result = similarity_search(query, "1")

return result


def get_tool() -> StructuredTool:
return rag_search
4 changes: 4 additions & 0 deletions core/tools/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import tools.create_time_to_iso as create_time_to_iso_format
import tools.current_time_iso as current_time_iso_format
import tools.add_time as add_time
import tools.rag_search as rag_search


def get_tools() -> list[StructuredTool]:
Expand All @@ -24,13 +25,15 @@ def get_tools() -> list[StructuredTool]:
tools.append(read_calendar_event.get_tool())
tools.append(create_time_to_iso_format.get_tool())
tools.append(current_time_iso_format.get_tool())
tools.append(rag_search.get_tool())

return tools

def get_perplexity_based_tools() -> list[StructuredTool]:
tools = []
tools.append(weather.get_tool())
tools.append(web_search.get_tool())
tools.append(rag_search.get_tool())

return tools

Expand All @@ -49,5 +52,6 @@ def get_other_tools() -> list[StructuredTool]:
tools.append(find_files.get_tool())
tools.append(read_file.get_tool())
tools.append(read_pdf.get_tool())
tools.append(rag_search.get_tool())

return tools
2 changes: 1 addition & 1 deletion core/tools/weather.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def weather_forecast(user_query: str) -> str:

# chat completion without streaming
response = client.chat.completions.create(
model="llama-3.1-sonar-large-128k-online",
model="llama-3.1-sonar-huge-128k-online",
messages=messages,
temperature=0.0,
)
Expand Down
1 change: 1 addition & 0 deletions core/user_to_vector_ids.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"1": [210453397504, 210453397505]}

0 comments on commit d1948ea

Please sign in to comment.