Skip to content

Commit

Permalink
Merge pull request #242 from FullStackWithLawrence/next
Browse files Browse the repository at this point in the history
Next
  • Loading branch information
lpm0073 authored Feb 5, 2025
2 parents e751dd5 + 53254e0 commit 184d30b
Show file tree
Hide file tree
Showing 6 changed files with 12 additions and 11 deletions.
2 changes: 1 addition & 1 deletion .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"cornflakes.linter.executablePath": "/Users/mcdaniel/desktop/aws-openai/venv/bin/flake8",
"cornflakes.linter.executablePath": "./venv/bin/flake8",
"[python]": {
"editor.defaultFormatter": "ms-python.black-formatter"
}
Expand Down
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ ifeq ($(OS),Windows_NT)
PYTHON = python.exe
ACTIVATE_VENV = venv\Scripts\activate
else
PYTHON = python3.11
PYTHON = python3.12
ACTIVATE_VENV = source venv/bin/activate
endif
PIP = $(PYTHON) -m pip
Expand Down
2 changes: 1 addition & 1 deletion models/__version__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
# Managed via automated CI/CD in .github/workflows/semanticVersionBump.yml.
__version__ = "1.3.2"
__version__ = "1.3.4"
9 changes: 5 additions & 4 deletions models/hybrid_search_retreiver.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,14 @@
import textwrap
from typing import Union

# pinecone integration
from langchain.cache import InMemoryCache

# embedding
from langchain.globals import set_llm_cache
from langchain.prompts import PromptTemplate
from langchain.schema import BaseMessage, HumanMessage, SystemMessage

# pinecone integration
from langchain_community.cache import InMemoryCache

# hybrid search capability
from langchain_community.retrievers.pinecone_hybrid_search import (
PineconeHybridSearchRetriever,
Expand Down Expand Up @@ -110,7 +110,8 @@ def cached_chat_request(
human_message = HumanMessage(content=str(human_message))
messages = [system_message, human_message]
# pylint: disable=not-callable
retval = self.chat(messages)
# retval = self.chat(messages)
retval = self.chat.invoke(messages)
return retval

def prompt_with_template(
Expand Down
2 changes: 1 addition & 1 deletion models/pinecone.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
# pinecone integration
# import pinecone
from pinecone import Pinecone, ServerlessSpec
from pinecone.core.client.exceptions import PineconeApiException
from pinecone.core.openapi.shared.exceptions import PineconeApiException
from pinecone.models import IndexList

# this project
Expand Down
6 changes: 3 additions & 3 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ flake8-coding==1.3.2
pre-commit==4.0.1
isort==6.0.0
mypy==1.14.1
pylint==3.3.3
pylint==3.3.4
bandit==1.7.10
pydocstringformatter==0.7.3
tox==4.23.2
Expand All @@ -21,10 +21,10 @@ codespell==2.4.1
# ------------
python-decouple==3.8
langchainhub==0.1.21
langchain-openai==0.3.3
langchain-openai==0.1.25
langchain-experimental
openai>=1.40.0
langchain==0.3.17
langchain==0.2.11
langchain-pinecone==0.1.3
langchain-experimental
pinecone-client==5.0.1
Expand Down

0 comments on commit 184d30b

Please sign in to comment.