Skip to content

Commit

Permalink
Merge branch 'main' into astream_events_experiment
Browse files Browse the repository at this point in the history
  • Loading branch information
JonBergland committed Oct 17, 2024
2 parents b995545 + 3e01f4a commit 8eeb862
Show file tree
Hide file tree
Showing 21 changed files with 184 additions and 66 deletions.
3 changes: 3 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
OPENAI_API_KEY="your_api_key"
LANGSMITH_API_KEY="your_langsmith_api_key" #Find it here: https://smith.langchain.com
PORT="3000"
43 changes: 43 additions & 0 deletions .github/workflows/cd.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
name: CD

on:
workflow_run:
workflows: ["CI"]
types:
- completed

jobs:
build:
runs-on: ubuntu-latest

# Only runs if CI was successful
if: ${{ github.event.workflow_run.conclusion == 'success' }}

steps:
# Checkout the repository
- name: Checkout code
uses: actions/checkout@v3

- name: Set up QEMU
uses: docker/setup-qemu-action@v2

- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2

- name: Log in to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}

- name: Extract short SHA
id: git_sha
run: echo "GIT_SHA=$(git rev-parse --short $GITHUB_SHA)" >> $GITHUB_ENV

- name: Build and Push Docker Backend Image
run: |
docker build -t ghcr.io/cogitontnu/jarvis-core:${{ env.GIT_SHA }} ./core
docker push ghcr.io/cogitontnu/jarvis-core:${{ env.GIT_SHA }}
## Add Build and Push for Docker Frontend Image when it becomes relevant
20 changes: 20 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
name: CI

on:
push:
branches: ["main"]
pull_request:
branches: ["main"]

jobs:
build:
runs-on: ubuntu-latest
strategy:
fail-fast: true
steps:
- uses: actions/checkout@v3
- name: Build docker image and run tests
run: |
docker compose build
docker compose up -d
docker compose down
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
# Custom Ignores
user_data


# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
Expand Down
3 changes: 2 additions & 1 deletion benchmarking/readme.md
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
### Benchmarking
Standardized tasks and tests for Jarvis to evaluate performance.
Standardized tasks and tests for Jarvis to evaluate performance.

Empty file added core/Agents/proofreader.py
Empty file.
12 changes: 12 additions & 0 deletions core/Agents/simpleagent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
from langchain_openai import ChatOpenAI
from models import Model
from config import OPENAI_API_KEY


class SimpleAgent:
llm = ChatOpenAI(
model = Model.gpt_4o,
temperature=0,
max_tokens=512,
streaming=True
)
2 changes: 1 addition & 1 deletion core/ai_message.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
class Ai_message:
def __init__(self, message:str, token_count:int) -> None:
self.message = message
self.token_count = token_count
self.token_count = token_count
6 changes: 1 addition & 5 deletions core/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,7 @@
PERPLEXITY_API_KEY = os.getenv("PERPLEXITY_API_KEY")

#add langsmith api to env as LANGSMITH_API_KEY = "your_api_key" on EU server
LANGSMITH_API_KEY: str
if os.getenv(key="LANGSMITH_API_KEY"):
LANGSMITH_API_KEY: str = os.getenv(key="LANGSMITH_API_KEY")
else:
LANGSMITH_API_KEY: str = "lmao"
LANGSMITH_API_KEY = os.getenv("LANGSMITH_API_KEY", "no_key")

os.environ["LANGCHAIN_TRACING_V2"] = "true"
os.environ["LANGCHAIN_ENDPOINT"] = "https://eu.api.smith.langchain.com"
Expand Down
10 changes: 6 additions & 4 deletions core/graphAgent.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,17 @@
from models import Model
import json
from config import OPENAI_API_KEY
from agent import Agent, Agent1
from Agents.simpleagent import SimpleAgent
#from agent import Agent, Agent1
import asyncio
from time import sleep


class Graph:
def __init__(self):
LANGCHAIN_TRACING_V2: str = "true"

self.llm = Agent1.llm
self.llm = SimpleAgent.llm

self.llm_with_tools = self.llm.bind_tools(get_tools())
self.workflow = StateGraph(GraphState)
Expand Down Expand Up @@ -62,9 +65,9 @@ async def run(self, user_prompt: str, socketio):
"""
Run the agent with a user prompt and emit the response and total tokens via socket
"""
total_tokens = 0
try:
input = {"messages": [("human", user_prompt)]}
socketio.emit("start_message", " ")
async for event in self.graph.astream_events(input, version='v2'):
event_type = event.get('event')

Expand All @@ -88,7 +91,6 @@ async def run(self, user_prompt: str, socketio):
total_tokens = usage_metadata.get('total_tokens')
socketio.emit("tokens", total_tokens)


return "success"
except Exception as e:
print(e)
Expand Down
2 changes: 1 addition & 1 deletion core/graphtools.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ class AgentState(TypedDict):
messages: Annotated[Sequence[BaseMessage], operator.add]
sender: str

class gaphtool:
class graphtool:
def __init__(self, graph):
self.graph = graph
self.nodes = graph.nodes()
Expand Down
25 changes: 19 additions & 6 deletions core/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,19 @@
from flask_socketio import SocketIO, send, emit
from flask_cors import CORS
from config import PORT
import asyncio
import asyncio
from modules.user_data_setup import check_folders
from modules.chat import read_chat
import logging
log = logging.getLogger('werkzeug')
log.setLevel(logging.ERROR)

#
# Setup
#
print("J is booting up....")
check_folders() # Check directories are made for user data
read_chat("1")

#
# Server config
Expand Down Expand Up @@ -62,15 +74,16 @@ def disconnect():
@socketio.on('user_prompt')
def handle_prompt(data):
try:
print("Hello!")
conversation_id = data['conversation_id'] # grabs the conversation ID
socketio.emit("start_message")
response = asyncio.run(jarvis.run(data['prompt'], socketio), debug=True) # prompts Jarvis and hands off emitting to the graphAgent.

return jsonify({"status": response})
asyncio.run(jarvis.run(data['prompt'], socketio), debug=True) # prompts Jarvis and hands off emitting to the graphAgent.
return jsonify({"status": "success"})
except Exception as e:
print(f'Something very bad happened: {e}')
return jsonify({"status": "error"})

if __name__ == '__main__':
socketio.run(app, debug=True, host='0.0.0.0', port=PORT, allow_unsafe_werkzeug=True)
socketio.run(app, debug=True, host='0.0.0.0', port=PORT, allow_unsafe_werkzeug=True)

# hello
27 changes: 27 additions & 0 deletions core/modules/chat.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import os
import json

def read_chat(id: str) -> dict:
'''
Uses chat_id to get the chat JSON file and returns a python dict object.
'''
dirname = os.path.dirname(os.path.dirname(__file__)) # Creates folder in core named user_data
filepath = os.path.join(dirname, f'user_data/chats/{id}.json')
# Open and read the JSON file
with open(filepath, 'r') as file:
data = json.load(file)
return data

def upsert_chat(chat_object: dict):
'''
Upserts a chat dictionary object, saving it as json file in the user_data folder.
Upserting means to update or create if the file doesn't exist yet. Overwriting previous data.
'''
try:
print("hey")
except Exception as e:
return e


# json.dumps() - From python to json
# json.load() - From json to python
18 changes: 18 additions & 0 deletions core/modules/user_data_setup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import os

def check_folders():
main_folder = "user_data/"
pathsToCheck = ["chats", "images", "pdfs", "fbx"]
for directory in pathsToCheck:
path = main_folder + directory # creates path user_data/chats for example. Everything should be under user_data as its gitignored.
check_and_create_folder(path) # Does a relative folder check, and builds the directory if it doesn't exist

def check_and_create_folder(path):
dirname = os.path.dirname(os.path.dirname(__file__)) # Creates folder in core named user_data
relativedir = os.path.join(dirname, path)
if not os.path.exists(relativedir):
try:
print("Created user_data director under core folder. This is first-time setup.")
os.makedirs(path)
except Exception as e:
print(e)
Binary file modified core/requirements.txt
Binary file not shown.
19 changes: 8 additions & 11 deletions core/static/chat.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
// When user sends a message (pressing send button) this funciton runs
sendMessage = async () => {
sendMessage = () => {
let userInput = ""
try{
let chat_text_field = document.getElementById('chat_input_text')
Expand All @@ -8,13 +8,17 @@ sendMessage = async () => {
chat_text_field.value = ""
chat_history = document.getElementById("chat_history")
chat_history.scrollTop = chat_history.scrollHeight;
}catch(e){
} catch(e){
console.log(e)
}

// Send the message via the open socket
try{
let res = await socket.emit('user_prompt', {prompt: userInput, conversation_id: state.activeConversationId})
console.log("User promt is: " + userInput);
const payload = {prompt: userInput, conversation_id: state.activeConversationId}
console.log("Payload is: ", payload);
let res = socket.emit('user_prompt', payload)
console.log("Prompt sent to backend");
// Stream to the current active AI chat box
}catch(e){
console.log("Something went wrong", e)
Expand All @@ -30,7 +34,7 @@ addStreamedChunk = (messagePart) => {
}
}

let endStreamedAIMessage = () => {
endStreamedAIMessage = () => {
if (state.activeAIMessage) {
console.log("Message end")
let output = state.activeAIMessage.innerHTML
Expand All @@ -40,14 +44,7 @@ let endStreamedAIMessage = () => {
} else {
console.log("No active AI message to end.")
}

}

let startStreamedAIMessage = (uuid) => {
console.log("Message start")
addMessage(uuid); // Create an AI message when it begins streaming.
let ai_message = document.getElementById(uuid)
state.activeAIMessage = ai_message // Active element gets added to the state.
}

// Generates unique id on socket.on("start_message")
Expand Down
6 changes: 3 additions & 3 deletions core/static/index.css
Original file line number Diff line number Diff line change
Expand Up @@ -146,11 +146,11 @@ body {
}

.chat{
width: 65%;
width: 100%;
}

.chatHistory {
width: 250px;
width: 400px;
height: 80vh;
margin: 6px;
margin-top: 8px;
Expand Down Expand Up @@ -205,7 +205,7 @@ p{
}

.processesContainer{
width: 250px;
width: 400px;
height: 80vh;
margin-top: 8px;
border-radius: 10px;
Expand Down
12 changes: 8 additions & 4 deletions core/static/socketEvents.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,13 @@ let uuid = 0

// prints chunks that are streamed to the console and adds them to the chat
socket.on("chunk", async (chunk)=>{
console.log(chunk);
if(!state.activeAIMessage){
console.log("STARTED MESSAGE")
uuid = generateUUID();
await addStreamedMessage(uuid, "");
ai_message = document.getElementById(uuid)
state.activeAIMessage = ai_message
}
await addStreamedMessage(uuid, chunk);
})

Expand All @@ -23,9 +29,7 @@ socket.on("tokens", async (tokens) => {
})

socket.on("start_message", async () => {
uuid = generateUUID();
console.log(uuid);
await addStreamedMessage(uuid, "");

})

// Remember to parse the streamed response
Expand Down
4 changes: 2 additions & 2 deletions core/tools/google_calender_create.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,8 @@ def get_tool() -> StructuredTool:
summary = "Test Event"
location = "Online"
description = "This is a test event created by the Google Calendar tool"
start_time = "2024-10-09T12:00:00Z" # Format: YYYY-MM-DDTHH:MM:SSZ
end_time = "2024-10-09T15:00:00Z" # Format: YYYY-MM-DDTHH:MM:SSZ
start_time = "2024-10-16T12:00:00Z" # Format: YYYY-MM-DDTHH:MM:SSZ
end_time = "2024-10-16T15:00:00Z" # Format: YYYY-MM-DDTHH:MM:SSZ

result = create_calendar_event(summary, location, description, start_time, end_time)
print(result)
3 changes: 2 additions & 1 deletion core/tools/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import tools.read_file as read_file
import tools.read_pdf as read_pdf
import tools.weather as weather
import tools.google_calender_create as create_calender_event

def get_tools() -> list[StructuredTool]:
tools = []
Expand All @@ -14,6 +15,6 @@ def get_tools() -> list[StructuredTool]:
tools.append(read_file.get_tool())
tools.append(read_pdf.get_tool())
tools.append(weather.get_tool())

tools.append(create_calender_event.get_tool())

return tools
Loading

0 comments on commit 8eeb862

Please sign in to comment.