momenaca commited on
Commit
0e0d14f
·
1 Parent(s): 8ca00e0

fix bug related to OpenAI endpoint

Browse files
app.py CHANGED
@@ -5,8 +5,8 @@ from pinecone import Pinecone
5
  from huggingface_hub import whoami
6
  from langchain.prompts import ChatPromptTemplate
7
  from langchain_community.embeddings import HuggingFaceBgeEmbeddings
8
- from langchain_openai import AzureChatOpenAI
9
  from langchain.prompts.prompt import PromptTemplate
 
10
  from langchain.memory import ConversationBufferMemory
11
  from langchain_community.vectorstores import Pinecone as PineconeVectorstore
12
  from celsius_csrd_chatbot.utils import (
@@ -14,14 +14,12 @@ from celsius_csrd_chatbot.utils import (
14
  make_pairs,
15
  _format_chat_history,
16
  _combine_documents,
17
- get_llm,
18
  init_env,
19
  parse_output_llm_with_sources,
20
  )
21
  from celsius_csrd_chatbot.agent import make_graph_agent, display_graph
22
 
23
  init_env()
24
- chat_model_init = get_llm()
25
  demo_name = "ESRS_QA"
26
  hf_model = "BAAI/bge-base-en-v1.5"
27
 
@@ -33,7 +31,7 @@ embeddings = HuggingFaceBgeEmbeddings(
33
  pc = Pinecone(api_key=os.getenv("PINECONE_API_KEY"))
34
  index = pc.Index(os.getenv("PINECONE_API_INDEX"))
35
  vectorstore = PineconeVectorstore(index, embeddings, "page_content")
36
- llm = AzureChatOpenAI()
37
  agent = make_graph_agent(llm, vectorstore)
38
 
39
  memory = ConversationBufferMemory(
 
5
  from huggingface_hub import whoami
6
  from langchain.prompts import ChatPromptTemplate
7
  from langchain_community.embeddings import HuggingFaceBgeEmbeddings
 
8
  from langchain.prompts.prompt import PromptTemplate
9
+ from langchain_groq import ChatGroq
10
  from langchain.memory import ConversationBufferMemory
11
  from langchain_community.vectorstores import Pinecone as PineconeVectorstore
12
  from celsius_csrd_chatbot.utils import (
 
14
  make_pairs,
15
  _format_chat_history,
16
  _combine_documents,
 
17
  init_env,
18
  parse_output_llm_with_sources,
19
  )
20
  from celsius_csrd_chatbot.agent import make_graph_agent, display_graph
21
 
22
  init_env()
 
23
  demo_name = "ESRS_QA"
24
  hf_model = "BAAI/bge-base-en-v1.5"
25
 
 
31
  pc = Pinecone(api_key=os.getenv("PINECONE_API_KEY"))
32
  index = pc.Index(os.getenv("PINECONE_API_INDEX"))
33
  vectorstore = PineconeVectorstore(index, embeddings, "page_content")
34
+ llm = ChatGroq(temperature=0, model_name="llama-3.2-90b-text-preview")
35
  agent = make_graph_agent(llm, vectorstore)
36
 
37
  memory = ConversationBufferMemory(
celsius_csrd_chatbot/chains/esrs_categorization.py CHANGED
@@ -5,7 +5,7 @@ def make_esrs_categorization_node():
5
 
6
  def categorize_message(state):
7
  query = state["query"]
8
- pattern = r"ESRS \d+[A-Z0-9]*"
9
  esrs_truth = [
10
  "ESRS 1",
11
  "ESRS 2",
 
5
 
6
  def categorize_message(state):
7
  query = state["query"]
8
+ pattern = r"ESRS \d+[A-Z0-9]*|ESRS [A-Z]+\d+[A-Z0-9]*"
9
  esrs_truth = [
10
  "ESRS 1",
11
  "ESRS 2",
celsius_csrd_chatbot/utils.py CHANGED
@@ -3,7 +3,6 @@ import re
3
  from typing import Tuple, List
4
  from dotenv import load_dotenv
5
  from msal import ConfidentialClientApplication
6
- from langchain_openai import AzureChatOpenAI
7
  from langchain.schema import format_document
8
 
9
 
 
3
  from typing import Tuple, List
4
  from dotenv import load_dotenv
5
  from msal import ConfidentialClientApplication
 
6
  from langchain.schema import format_document
7
 
8
 
poetry.lock CHANGED
The diff for this file is too large to render. See raw diff
 
pyproject.toml CHANGED
@@ -8,19 +8,18 @@ package-mode = true
8
 
9
  [tool.poetry.dependencies]
10
  python = ">=3.10,<3.13"
11
- langchain = "^0.2.5"
12
  gradio = {extras = ["oauth"], version = "^4.36.1"}
13
  sentence-transformers = "^3.0.1"
14
- langchain-community = "^0.2.5"
15
  msal = "^1.28.1"
16
  loadenv = "^0.1.1"
17
  openai = "^1.34.0"
18
- langchain-openai = "^0.1.8"
19
  pinecone = "^4.0.0"
20
  pinecone-client = "^5.0.1"
21
  langgraph = "^0.2.0"
22
- langchain-core = "^0.2.29"
23
  ipython = "^8.26.0"
 
 
 
24
 
25
 
26
  [build-system]
 
8
 
9
  [tool.poetry.dependencies]
10
  python = ">=3.10,<3.13"
 
11
  gradio = {extras = ["oauth"], version = "^4.36.1"}
12
  sentence-transformers = "^3.0.1"
 
13
  msal = "^1.28.1"
14
  loadenv = "^0.1.1"
15
  openai = "^1.34.0"
 
16
  pinecone = "^4.0.0"
17
  pinecone-client = "^5.0.1"
18
  langgraph = "^0.2.0"
 
19
  ipython = "^8.26.0"
20
+ langchain-groq = "^0.2.0"
21
+ langchain-core = "^0.3.12"
22
+ langchain = "^0.3.3"
23
 
24
 
25
  [build-system]