diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..f735c81 --- /dev/null +++ b/.env.example @@ -0,0 +1,3 @@ +OPENAI_API_KEY="your_api_key" +LANGSMITH_API_KEY="your_langsmith_api_key" #Find it here: https://smith.langchain.com +PORT="3000" \ No newline at end of file diff --git a/.github/workflows/cd.yml b/.github/workflows/cd.yml new file mode 100644 index 0000000..0b9b3ec --- /dev/null +++ b/.github/workflows/cd.yml @@ -0,0 +1,43 @@ +name: CD + +on: + workflow_run: + workflows: ["CI"] + types: + - completed + +jobs: + build: + runs-on: ubuntu-latest + + # Only runs if CI was successful + if: ${{ github.event.workflow_run.conclusion == 'success' }} + + steps: + # Checkout the repository + - name: Checkout code + uses: actions/checkout@v3 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v2 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + + - name: Log in to GitHub Container Registry + uses: docker/login-action@v2 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract short SHA + id: git_sha + run: echo "GIT_SHA=$(git rev-parse --short $GITHUB_SHA)" >> $GITHUB_ENV + + - name: Build and Push Docker Backend Image + run: | + docker build -t ghcr.io/cogitontnu/jarvis-core:${{ env.GIT_SHA }} ./core + docker push ghcr.io/cogitontnu/jarvis-core:${{ env.GIT_SHA }} + + ## Add Build and Push for Docker Frontend Image when it becomes relevant \ No newline at end of file diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..9b037a9 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,20 @@ +name: CI + +on: + push: + branches: ["main"] + pull_request: + branches: ["main"] + +jobs: + build: + runs-on: ubuntu-latest + strategy: + fail-fast: true + steps: + - uses: actions/checkout@v3 + - name: Build docker image and run tests + run: | + docker compose build + docker compose up -d + docker compose down \ No newline at end of file diff --git a/.gitignore b/.gitignore index 75b8591..19a4e35 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,7 @@ +# Custom Ignores +user_data + + # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] diff --git a/benchmarking/readme.md b/benchmarking/readme.md index 1e59d9c..2f21e31 100644 --- a/benchmarking/readme.md +++ b/benchmarking/readme.md @@ -1,2 +1,3 @@ ### Benchmarking -Standardized tasks and tests for Jarvis to evaluate performance. \ No newline at end of file +Standardized tasks and tests for Jarvis to evaluate performance. + diff --git a/core/Agents/proofreader.py b/core/Agents/proofreader.py new file mode 100644 index 0000000..e69de29 diff --git a/core/Agents/simpleagent.py b/core/Agents/simpleagent.py new file mode 100644 index 0000000..04249bf --- /dev/null +++ b/core/Agents/simpleagent.py @@ -0,0 +1,12 @@ +from langchain_openai import ChatOpenAI +from models import Model +from config import OPENAI_API_KEY + + +class SimpleAgent: + llm = ChatOpenAI( + model = Model.gpt_4o, + temperature=0, + max_tokens=512, + streaming=True + ) diff --git a/core/ai_message.py b/core/ai_message.py index aabaf40..19e0934 100644 --- a/core/ai_message.py +++ b/core/ai_message.py @@ -1,4 +1,4 @@ class Ai_message: def __init__(self, message:str, token_count:int) -> None: self.message = message - self.token_count = token_count + self.token_count = token_count \ No newline at end of file diff --git a/core/config.py b/core/config.py index b562f0e..b9cd51a 100644 --- a/core/config.py +++ b/core/config.py @@ -7,11 +7,7 @@ PERPLEXITY_API_KEY = os.getenv("PERPLEXITY_API_KEY") #add langsmith api to env as LANGSMITH_API_KEY = "your_api_key" on EU server -LANGSMITH_API_KEY: str -if os.getenv(key="LANGSMITH_API_KEY"): - LANGSMITH_API_KEY: str = os.getenv(key="LANGSMITH_API_KEY") -else: - LANGSMITH_API_KEY: str = "lmao" +LANGSMITH_API_KEY = os.getenv("LANGSMITH_API_KEY", "no_key") os.environ["LANGCHAIN_TRACING_V2"] = "true" os.environ["LANGCHAIN_ENDPOINT"] = "https://eu.api.smith.langchain.com" diff --git a/core/graphAgent.py b/core/graphAgent.py index 61588a1..cb3e9ab 100644 --- a/core/graphAgent.py +++ b/core/graphAgent.py @@ -7,14 +7,17 @@ from models import Model import json from config import OPENAI_API_KEY -from agent import Agent, Agent1 +from Agents.simpleagent import SimpleAgent +#from agent import Agent, Agent1 +import asyncio +from time import sleep class Graph: def __init__(self): LANGCHAIN_TRACING_V2: str = "true" - self.llm = Agent1.llm + self.llm = SimpleAgent.llm self.llm_with_tools = self.llm.bind_tools(get_tools()) self.workflow = StateGraph(GraphState) @@ -62,9 +65,9 @@ async def run(self, user_prompt: str, socketio): """ Run the agent with a user prompt and emit the response and total tokens via socket """ - total_tokens = 0 try: input = {"messages": [("human", user_prompt)]} + socketio.emit("start_message", " ") async for event in self.graph.astream_events(input, version='v2'): event_type = event.get('event') @@ -88,7 +91,6 @@ async def run(self, user_prompt: str, socketio): total_tokens = usage_metadata.get('total_tokens') socketio.emit("tokens", total_tokens) - return "success" except Exception as e: print(e) diff --git a/core/graphtools.py b/core/graphtools.py index 446eb4e..1d3e4df 100644 --- a/core/graphtools.py +++ b/core/graphtools.py @@ -16,7 +16,7 @@ class AgentState(TypedDict): messages: Annotated[Sequence[BaseMessage], operator.add] sender: str -class gaphtool: +class graphtool: def __init__(self, graph): self.graph = graph self.nodes = graph.nodes() diff --git a/core/main.py b/core/main.py index b677559..7dbcd70 100644 --- a/core/main.py +++ b/core/main.py @@ -6,7 +6,19 @@ from flask_socketio import SocketIO, send, emit from flask_cors import CORS from config import PORT -import asyncio +import asyncio +from modules.user_data_setup import check_folders +from modules.chat import read_chat +import logging +log = logging.getLogger('werkzeug') +log.setLevel(logging.ERROR) + +# +# Setup +# +print("J is booting up....") +check_folders() # Check directories are made for user data +read_chat("1") # # Server config @@ -62,15 +74,16 @@ def disconnect(): @socketio.on('user_prompt') def handle_prompt(data): try: - print("Hello!") conversation_id = data['conversation_id'] # grabs the conversation ID socketio.emit("start_message") - response = asyncio.run(jarvis.run(data['prompt'], socketio), debug=True) # prompts Jarvis and hands off emitting to the graphAgent. - - return jsonify({"status": response}) + asyncio.run(jarvis.run(data['prompt'], socketio), debug=True) # prompts Jarvis and hands off emitting to the graphAgent. + + return jsonify({"status": "success"}) except Exception as e: print(f'Something very bad happened: {e}') return jsonify({"status": "error"}) if __name__ == '__main__': - socketio.run(app, debug=True, host='0.0.0.0', port=PORT, allow_unsafe_werkzeug=True) \ No newline at end of file + socketio.run(app, debug=True, host='0.0.0.0', port=PORT, allow_unsafe_werkzeug=True) + +# hello \ No newline at end of file diff --git a/core/modules/chat.py b/core/modules/chat.py new file mode 100644 index 0000000..c191301 --- /dev/null +++ b/core/modules/chat.py @@ -0,0 +1,27 @@ +import os +import json + +def read_chat(id: str) -> dict: + ''' + Uses chat_id to get the chat JSON file and returns a python dict object. + ''' + dirname = os.path.dirname(os.path.dirname(__file__)) # Creates folder in core named user_data + filepath = os.path.join(dirname, f'user_data/chats/{id}.json') + # Open and read the JSON file + with open(filepath, 'r') as file: + data = json.load(file) + return data + +def upsert_chat(chat_object: dict): + ''' + Upserts a chat dictionary object, saving it as json file in the user_data folder. + Upserting means to update or create if the file doesn't exist yet. Overwriting previous data. + ''' + try: + print("hey") + except Exception as e: + return e + + +# json.dumps() - From python to json +# json.load() - From json to python \ No newline at end of file diff --git a/core/modules/user_data_setup.py b/core/modules/user_data_setup.py new file mode 100644 index 0000000..98773f7 --- /dev/null +++ b/core/modules/user_data_setup.py @@ -0,0 +1,18 @@ +import os + +def check_folders(): + main_folder = "user_data/" + pathsToCheck = ["chats", "images", "pdfs", "fbx"] + for directory in pathsToCheck: + path = main_folder + directory # creates path user_data/chats for example. Everything should be under user_data as its gitignored. + check_and_create_folder(path) # Does a relative folder check, and builds the directory if it doesn't exist + +def check_and_create_folder(path): + dirname = os.path.dirname(os.path.dirname(__file__)) # Creates folder in core named user_data + relativedir = os.path.join(dirname, path) + if not os.path.exists(relativedir): + try: + print("Created user_data director under core folder. This is first-time setup.") + os.makedirs(path) + except Exception as e: + print(e) \ No newline at end of file diff --git a/core/requirements.txt b/core/requirements.txt index 0631011..6885a17 100644 Binary files a/core/requirements.txt and b/core/requirements.txt differ diff --git a/core/static/chat.js b/core/static/chat.js index a3deec0..edd9acd 100644 --- a/core/static/chat.js +++ b/core/static/chat.js @@ -1,5 +1,5 @@ // When user sends a message (pressing send button) this funciton runs -sendMessage = async () => { +sendMessage = () => { let userInput = "" try{ let chat_text_field = document.getElementById('chat_input_text') @@ -8,13 +8,17 @@ sendMessage = async () => { chat_text_field.value = "" chat_history = document.getElementById("chat_history") chat_history.scrollTop = chat_history.scrollHeight; - }catch(e){ + } catch(e){ console.log(e) } // Send the message via the open socket try{ - let res = await socket.emit('user_prompt', {prompt: userInput, conversation_id: state.activeConversationId}) + console.log("User promt is: " + userInput); + const payload = {prompt: userInput, conversation_id: state.activeConversationId} + console.log("Payload is: ", payload); + let res = socket.emit('user_prompt', payload) + console.log("Prompt sent to backend"); // Stream to the current active AI chat box }catch(e){ console.log("Something went wrong", e) @@ -30,7 +34,7 @@ addStreamedChunk = (messagePart) => { } } -let endStreamedAIMessage = () => { +endStreamedAIMessage = () => { if (state.activeAIMessage) { console.log("Message end") let output = state.activeAIMessage.innerHTML @@ -40,14 +44,7 @@ let endStreamedAIMessage = () => { } else { console.log("No active AI message to end.") } - -} -let startStreamedAIMessage = (uuid) => { - console.log("Message start") - addMessage(uuid); // Create an AI message when it begins streaming. - let ai_message = document.getElementById(uuid) - state.activeAIMessage = ai_message // Active element gets added to the state. } // Generates unique id on socket.on("start_message") diff --git a/core/static/index.css b/core/static/index.css index e5e5324..00d7c34 100644 --- a/core/static/index.css +++ b/core/static/index.css @@ -146,11 +146,11 @@ body { } .chat{ - width: 65%; + width: 100%; } .chatHistory { - width: 250px; + width: 400px; height: 80vh; margin: 6px; margin-top: 8px; @@ -205,7 +205,7 @@ p{ } .processesContainer{ - width: 250px; + width: 400px; height: 80vh; margin-top: 8px; border-radius: 10px; diff --git a/core/static/socketEvents.js b/core/static/socketEvents.js index 5f26638..d5bf7b6 100644 --- a/core/static/socketEvents.js +++ b/core/static/socketEvents.js @@ -12,7 +12,13 @@ let uuid = 0 // prints chunks that are streamed to the console and adds them to the chat socket.on("chunk", async (chunk)=>{ - console.log(chunk); + if(!state.activeAIMessage){ + console.log("STARTED MESSAGE") + uuid = generateUUID(); + await addStreamedMessage(uuid, ""); + ai_message = document.getElementById(uuid) + state.activeAIMessage = ai_message + } await addStreamedMessage(uuid, chunk); }) @@ -23,9 +29,7 @@ socket.on("tokens", async (tokens) => { }) socket.on("start_message", async () => { - uuid = generateUUID(); - console.log(uuid); - await addStreamedMessage(uuid, ""); + }) // Remember to parse the streamed response diff --git a/core/tools/google_calender_create.py b/core/tools/google_calender_create.py index 0cac798..cf4540f 100644 --- a/core/tools/google_calender_create.py +++ b/core/tools/google_calender_create.py @@ -61,8 +61,8 @@ def get_tool() -> StructuredTool: summary = "Test Event" location = "Online" description = "This is a test event created by the Google Calendar tool" - start_time = "2024-10-09T12:00:00Z" # Format: YYYY-MM-DDTHH:MM:SSZ - end_time = "2024-10-09T15:00:00Z" # Format: YYYY-MM-DDTHH:MM:SSZ + start_time = "2024-10-16T12:00:00Z" # Format: YYYY-MM-DDTHH:MM:SSZ + end_time = "2024-10-16T15:00:00Z" # Format: YYYY-MM-DDTHH:MM:SSZ result = create_calendar_event(summary, location, description, start_time, end_time) print(result) \ No newline at end of file diff --git a/core/tools/tools.py b/core/tools/tools.py index a960fb2..fdfcfc7 100644 --- a/core/tools/tools.py +++ b/core/tools/tools.py @@ -5,6 +5,7 @@ import tools.read_file as read_file import tools.read_pdf as read_pdf import tools.weather as weather +import tools.google_calender_create as create_calender_event def get_tools() -> list[StructuredTool]: tools = [] @@ -14,6 +15,6 @@ def get_tools() -> list[StructuredTool]: tools.append(read_file.get_tool()) tools.append(read_pdf.get_tool()) tools.append(weather.get_tool()) - + tools.append(create_calender_event.get_tool()) return tools \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index f344eaa..d3a6956 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,23 +1,12 @@ services: - # ui-service: - # build: ./ui - # env_file: .env - # restart: unless-stopped - # volumes: - # - ./ui:/app # Mount the application code - # - /app/node_modules - # networks: - # - backend - # stop_signal: SIGINT - # ports: - # - "3000:3000" - llm-service: build: ./core - env_file: .env restart: unless-stopped environment: - - FLASK_ENV=development # Autorestarts flask when code changes are detected + FLASK_ENV: ${FLASK_ENV} # Autorestarts flask when code changes are detected + OPENAI_API_KEY: ${OPENAI_API_KEY} + LANGSMITH_API_KEY: ${LANGSMITH_API_KEY} + PORT: ${PORT} volumes: - ./core:/app # Mount the application code to detect live changes networks: @@ -26,18 +15,6 @@ services: ports: - "3000:3000" -# speech-service: -# build: ./speech -# env_file: .env -# restart: unless-stopped -# environment: -# volumes: -# - ./speech:/app -# networks: -# - backend -# stop_signal: SIGINT - # ports: - # - "3069:3069" #nice networks: backend: