Skip to content

Commit

Permalink
bump to 0.2.0
Browse files Browse the repository at this point in the history
  • Loading branch information
dmulyalin committed Jan 19, 2025
1 parent 9f9bade commit 7f0b444
Show file tree
Hide file tree
Showing 4 changed files with 49 additions and 12 deletions.
17 changes: 17 additions & 0 deletions docs/norfab_changelog.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,20 @@
## 0.2.0

### CHANGES

1. refactored `get_circuits` to use `threadpoolexecutor` to fetch circuits path from netbox
2. adding `job_data` json load to nornir cli, cfg and test tasks

### BUGS

1. Fixing netbox `get_devices` dry run test
2. Fixed netbox `get_circuits` devices site retrieval handling

## FEATURES

1. Added cache to Netbox `get_circuits` and `get_devices` tasks
2. Added new `agent` worker to stsart working on use cases to interface with LLMs

## 0.1.1

### BUGS
Expand Down
2 changes: 1 addition & 1 deletion docs/workers/agent/services_agent_service_tasks_chat.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,4 +30,4 @@ nf#

## Python API Reference

::: norfab.workers.nornir_worker.NornirWorker.task
::: norfab.workers.agent_worker.AgentWorker.chat
21 changes: 12 additions & 9 deletions norfab/workers/agent_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,13 +92,7 @@ def get_status(self):

def _chat_ollama(self, user_input, template=None) -> str:
"""
Handles the chat interaction with the user by processing the input through a language model.
:param user_input: The input provided by the user.
:param template: A template string for formatting the prompt. Defaults to
this string: 'Question: {user_input}; Answer: Let's think step by step.
Provide answer in markdown format.'
:returns: result of the language model's processing.
Handles the chat interaction with Ollama LLM.
"""
self.event(f"Received user input '{user_input[:50]}..'")
ret = Result(task=f"{self.name}:chat")
Expand All @@ -116,8 +110,17 @@ def _chat_ollama(self, user_input, template=None) -> str:

return ret

def chat(self, **kwargs):
def chat(self, user_input, template=None) -> str:
"""
Handles the chat interaction with the user by processing the input through a language model.
:param user_input: The input provided by the user.
:param template: A template string for formatting the prompt. Defaults to
this string: 'Question: {user_input}; Answer: Let's think step by step.
Provide answer in markdown format.'
:returns: language model's response
"""
if self.llm_flavour == "ollama":
return self._chat_ollama(**kwargs)
return self._chat_ollama(user_input, template)
else:
raise Exception(f"Unsupported llm flavour {self.llm_flavour}")
21 changes: 19 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "norfab"
version = "0.1.2"
version = "0.2.0"
description = "Network Automations Fabric [NorFab] - communication networks automations"
authors = ["Denis Mulyalin <[email protected]>"]
maintainers = ["Denis Mulyalin <[email protected]>"]
Expand All @@ -27,7 +27,7 @@ psutil = ">=6.0.0,<=7.0.0"
tornado = ">=6.1.0,<=7.0.0"

# PICLE Client Dependencies
picle = { version = ">=0.8.0,<1.0.0", optional = true }
picle = { version = ">=0.8.4,<1.0.0", optional = true }
rich = { version = ">=13.0.0,<14.0.0", optional = true }
tabulate = { version = ">=0.9.0,<1.0.0", optional = true }
pydantic = { version = ">=2.3.0,<3.0.0", optional = true }
Expand Down Expand Up @@ -61,6 +61,11 @@ N2G = { version = "0.3.*", optional = true }
dnspython = { version = "2.4.2", optional = true }
pythonping = { version = "1.1.4", optional = true }
robotframework = { version = "7.0", optional = true }
langchain = { version = "0.3.14", optional = true }
langchain-community = { version = "0.3.14", optional = true }
langchain-core = { version = "0.3.30", optional = true }
langchain-ollama = { version = "0.2.2", optional = true }
ollama = { version = "0.4.6", optional = true }

# Netbox Service Dependencies
diskcache = { version = "5.6.3", optional = true }
Expand Down Expand Up @@ -134,6 +139,13 @@ netboxservice = [
robot = [
"robot"
]
agent = [
"langchain",
"langchain-community",
"langchain-core",
"langchain-ollama",
"ollama",
]
full = [
"picle",
"rich",
Expand Down Expand Up @@ -171,4 +183,9 @@ full = [
"requests",
"pynetbox",
"robotframework",
"langchain",
"langchain-community",
"langchain-core",
"langchain-ollama",
"ollama",
]

0 comments on commit 7f0b444

Please sign in to comment.