Skip to content

Commit

Permalink
Merge branch 'main' of https://github.com/CogitoNTNU/jarvis
Browse files Browse the repository at this point in the history
  • Loading branch information
EldarAlvik committed Oct 22, 2024
2 parents e4919d9 + 837d5ad commit 5cfd534
Show file tree
Hide file tree
Showing 22 changed files with 374 additions and 174 deletions.
3 changes: 3 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
OPENAI_API_KEY="your_api_key"
LANGSMITH_API_KEY="your_langsmith_api_key" #Find it here: https://smith.langchain.com
PORT="3000"
43 changes: 43 additions & 0 deletions .github/workflows/cd.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
name: CD

on:
workflow_run:
workflows: ["CI"]
types:
- completed

jobs:
build:
runs-on: ubuntu-latest

# Only runs if CI was successful
if: ${{ github.event.workflow_run.conclusion == 'success' }}

steps:
# Checkout the repository
- name: Checkout code
uses: actions/checkout@v3

- name: Set up QEMU
uses: docker/setup-qemu-action@v2

- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2

- name: Log in to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}

- name: Extract short SHA
id: git_sha
run: echo "GIT_SHA=$(git rev-parse --short $GITHUB_SHA)" >> $GITHUB_ENV

- name: Build and Push Docker Backend Image
run: |
docker build -t ghcr.io/cogitontnu/jarvis-core:${{ env.GIT_SHA }} ./core
docker push ghcr.io/cogitontnu/jarvis-core:${{ env.GIT_SHA }}
## Add Build and Push for Docker Frontend Image when it becomes relevant
20 changes: 20 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
name: CI

on:
push:
branches: ["main"]
pull_request:
branches: ["main"]

jobs:
build:
runs-on: ubuntu-latest
strategy:
fail-fast: true
steps:
- uses: actions/checkout@v3
- name: Build docker image and run tests
run: |
docker compose build
docker compose up -d
docker compose down
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
# Custom Ignores
user_data


# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
Expand Down
3 changes: 2 additions & 1 deletion benchmarking/readme.md
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
### Benchmarking
Standardized tasks and tests for Jarvis to evaluate performance.
Standardized tasks and tests for Jarvis to evaluate performance.

19 changes: 12 additions & 7 deletions core/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,15 @@



class Agent1:
llm = ChatOpenAI(
model = Model.gpt_4o,
temperature=0,
max_tokens=512,
#streaming=True, #Can't use because of metadata
)


class Agent:
def __init__(self, model_type) -> None:
#Langsmith Tracing
Expand All @@ -33,6 +42,7 @@ def __init__(self, model_type) -> None:
# Defining edges between nodes
self.workflow.add_edge(START, "chatbot")
self.workflow.add_edge("tools", "chatbot")
self.workflow.add_edge("chatbot", END)

# Defining conditional edges
self.workflow.add_conditional_edges(
Expand Down Expand Up @@ -75,19 +85,14 @@ def run(self, user_prompt: str) -> tuple[str, int]:
response and the total amount of tokens used.
"""
first = True
for event in self.graph.stream({"messages": [("user", user_prompt)]}):
for event in self.graph.stream("tell me about orcas?"):
for value in event.values():
messages = value["messages"][-1]
gathered = ""
# if messages.content and not isinstance(messages, HumanMessage):
# print(messages.content, end="|", flush=True)

if isinstance(messages, AIMessageChunk):
if first:
gathered = messages
first = False
else:
gathered += messages
gathered += messages

if isinstance(messages, BaseMessage):
if hasattr(messages, 'usage_metadata'):
Expand Down
2 changes: 1 addition & 1 deletion core/ai_message.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
class Ai_message:
def __init__(self, message:str, token_count:int) -> None:
self.message = message
self.token_count = token_count
self.token_count = token_count
6 changes: 1 addition & 5 deletions core/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,7 @@
PERPLEXITY_API_KEY = os.getenv("PERPLEXITY_API_KEY")

#add langsmith api to env as LANGSMITH_API_KEY = "your_api_key" on EU server
LANGSMITH_API_KEY: str
if os.getenv(key="LANGSMITH_API_KEY"):
LANGSMITH_API_KEY: str = os.getenv(key="LANGSMITH_API_KEY")
else:
LANGSMITH_API_KEY: str = "lmao"
LANGSMITH_API_KEY = os.getenv("LANGSMITH_API_KEY", "no_key")

os.environ["LANGCHAIN_TRACING_V2"] = "true"
os.environ["LANGCHAIN_ENDPOINT"] = "https://eu.api.smith.langchain.com"
Expand Down
85 changes: 0 additions & 85 deletions core/graph.py

This file was deleted.

116 changes: 116 additions & 0 deletions core/graphAgent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
from langchain_openai import ChatOpenAI
from graphstate import GraphState
from tools.tools import get_tools
from langgraph.graph import StateGraph, START, END
from langgraph.prebuilt import ToolNode, tools_condition
from langchain_core.messages import BaseMessage, AIMessageChunk, HumanMessage, AIMessage
from models import Model
import json
from config import OPENAI_API_KEY
from Agents.simpleagent import SimpleAgent
#from agent import Agent, Agent1
import asyncio
from time import sleep


class Graph:
def __init__(self):
LANGCHAIN_TRACING_V2: str = "true"

self.llm = SimpleAgent.llm

self.llm_with_tools = self.llm.bind_tools(get_tools())
self.workflow = StateGraph(GraphState)
# Adding nodes to the workflow
self.workflow.add_node("chatbot", self.chatbot)
self.workflow.add_node("tools", ToolNode(get_tools()))
# TODO: Visualize these tools

# Defining edges between nodes
self.workflow.add_edge(START, "chatbot")
self.workflow.add_edge("tools", "chatbot")
self.workflow.add_edge("chatbot", END)

# Defining conditional edges
self.workflow.add_conditional_edges(
"chatbot",
tools_condition
)
self.graph = self.workflow.compile()

#with open("core/graph_node_network.png", 'wb') as f:
#f.write(self.graph.get_graph().draw_mermaid_png())

def chatbot(self, state: GraphState):
"""
Simple bot that invokes the list of previous messages
and returns the result which will be added to the list of messages.
"""
return {"messages": [self.llm_with_tools.invoke(state["messages"])]}


# UNFINISHED
def run_stream_only(self, user_prompt: str):
"""
Run the agent, returning a token stream.
"""
print('Running stream...')
print(user_prompt)
print(type(user_prompt))
for chunk in self.llm.stream(user_prompt):
yield chunk.content

#for running the agent comment out for testing in terminal
async def run(self, user_prompt: str, socketio):
"""
Run the agent with a user prompt and emit the response and total tokens via socket
"""
try:
input = {"messages": [("human", user_prompt)]}
socketio.emit("start_message", " ")
async for event in self.graph.astream_events(input, version='v2'):
event_type = event.get('event')

# Passes over events that are start events
if event_type == 'on_chain_start':
print("This event is on_chain_start")
continue

# Returns the AI response
# //TODO Fix that it streams chuncks it rather than AIMessage
if event_type == 'on_chain_end':
print(event['data'])
for message in event['data']['output']['messages']:
if isinstance(message, AIMessage):
data = message.content
socketio.emit("chunk", data)

if hasattr(message, 'usage_metadata'):
usage_metadata = message.usage_metadata
if usage_metadata:
total_tokens = usage_metadata.get('total_tokens')
socketio.emit("tokens", total_tokens)

return "success"
except Exception as e:
print(e)
return "error"

# for event in self.graph.stream(input):
#print(event)
# for value in event.values():
# messages = value["messages"][-1]
# gathered = ""
# # if messages.content and not isinstance(messages, HumanMessage):
# # print(messages.content, end="|", flush=True)

# if isinstance(messages, AIMessageChunk):
# if first:
# gathered = messages
# first = False
# else:
# gathered += messages

# if isinstance(messages, BaseMessage):
# total_tokens = messages.usage_metadata.get('total_tokens', 0)
# return messages.content, total_tokens
2 changes: 1 addition & 1 deletion core/graphtools.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ class AgentState(TypedDict):
messages: Annotated[Sequence[BaseMessage], operator.add]
sender: str

class gaphtool:
class graphtool:
def __init__(self, graph):
self.graph = graph
self.nodes = graph.nodes()
Expand Down
Loading

0 comments on commit 5cfd534

Please sign in to comment.