aizanlabs commited on
Commit
b5fc613
·
verified ·
1 Parent(s): 2379bd3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -8
app.py CHANGED
@@ -71,7 +71,7 @@ class DocumentRetrievalAndGeneration:
71
  return generate_text
72
  def initialize_llm2(self,model_id):
73
  try:
74
- pipe = pipeline("text-generation", model="google/gemma-2-9b")
75
  except:
76
  try:
77
  pipe = pipeline("text-generation", model="microsoft/Phi-3-mini-4k-instruct", trust_remote_code=True)
@@ -123,7 +123,7 @@ class DocumentRetrievalAndGeneration:
123
  print("############################")
124
  else:
125
  print(f"Index {idx} is out of bounds. Skipping.")
126
-
127
  prompt = f"""<s>
128
  You are a knowledgeable assistant with access to a comprehensive database.
129
  I need you to answer my question and provide related information in a specific format.
@@ -132,7 +132,7 @@ class DocumentRetrievalAndGeneration:
132
  Include a final answer without additional comments, sign-offs, or extra phrases. Be direct and to the point.
133
  content
134
  Here's my question:
135
- Query:{query}
136
  Solution==>
137
  RETURN ONLY SOLUTION . IF THEIR IS NO ANSWER RELATABLE IN RETRIEVED CHUNKS , RETURN " NO SOLUTION AVAILABLE"
138
  IF THE QUERY AND THE RETRIEVED CHUNKS DO NOT CORRELATE MEANINGFULLY, OR IF THE QUERY IS NOT RELEVANT TO TDA2 OR RELATED TOPICS, THEN "NO SOLUTION AVAILABLE."
@@ -151,11 +151,15 @@ class DocumentRetrievalAndGeneration:
151
  Solution:"NO SOLUTION AVAILABLE"
152
  </s>
153
  """
154
- messages = [
155
- {"role": "user", "content": prompt},
156
- ]
157
-
158
- generated_response=pipe(messages)
 
 
 
 
159
  # messages = [{"role": "user", "content": prompt}]
160
  # encodeds = self.llm.tokenizer.apply_chat_template(messages, return_tensors="pt")
161
  # model_inputs = encodeds.to(self.llm.device)
 
71
  return generate_text
72
  def initialize_llm2(self,model_id):
73
  try:
74
+ client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
75
  except:
76
  try:
77
  pipe = pipeline("text-generation", model="microsoft/Phi-3-mini-4k-instruct", trust_remote_code=True)
 
123
  print("############################")
124
  else:
125
  print(f"Index {idx} is out of bounds. Skipping.")
126
+ # {query}
127
  prompt = f"""<s>
128
  You are a knowledgeable assistant with access to a comprehensive database.
129
  I need you to answer my question and provide related information in a specific format.
 
132
  Include a final answer without additional comments, sign-offs, or extra phrases. Be direct and to the point.
133
  content
134
  Here's my question:
135
+ Query:
136
  Solution==>
137
  RETURN ONLY SOLUTION . IF THEIR IS NO ANSWER RELATABLE IN RETRIEVED CHUNKS , RETURN " NO SOLUTION AVAILABLE"
138
  IF THE QUERY AND THE RETRIEVED CHUNKS DO NOT CORRELATE MEANINGFULLY, OR IF THE QUERY IS NOT RELEVANT TO TDA2 OR RELATED TOPICS, THEN "NO SOLUTION AVAILABLE."
 
151
  Solution:"NO SOLUTION AVAILABLE"
152
  </s>
153
  """
154
+ messages = [{"role": "system", "content": prompt}]
155
+ messages.append({"role": "user", "content": message})
156
+ response = ""
157
+
158
+ for message in client.chat_completion(messages,max_tokens=2048,stream=True,temperature=0.7):
159
+ token = message.choices[0].delta.content
160
+ response += token
161
+ # yield response
162
+ generated_response=response
163
  # messages = [{"role": "user", "content": prompt}]
164
  # encodeds = self.llm.tokenizer.apply_chat_template(messages, return_tensors="pt")
165
  # model_inputs = encodeds.to(self.llm.device)