Spaces:
Sleeping
Sleeping
mannadamay12
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -72,14 +72,22 @@ def respond(message, history, system_message, max_tokens, temperature, top_p):
|
|
72 |
# Generate prompt
|
73 |
prompt = generate_prompt(context=context, question=message, system_prompt=system_message)
|
74 |
|
75 |
-
#
|
76 |
-
|
77 |
-
|
|
|
|
|
78 |
max_new_tokens=max_tokens,
|
79 |
temperature=temperature,
|
80 |
top_p=top_p,
|
81 |
-
repetition_penalty=1.15
|
82 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
83 |
)[0]['generated_text']
|
84 |
|
85 |
yield output.strip()
|
|
|
72 |
# Generate prompt
|
73 |
prompt = generate_prompt(context=context, question=message, system_prompt=system_message)
|
74 |
|
75 |
+
# Set up the pipeline
|
76 |
+
text_pipeline = pipeline(
|
77 |
+
"text-generation",
|
78 |
+
model=model,
|
79 |
+
tokenizer=tokenizer,
|
80 |
max_new_tokens=max_tokens,
|
81 |
temperature=temperature,
|
82 |
top_p=top_p,
|
83 |
+
repetition_penalty=1.15
|
84 |
+
)
|
85 |
+
|
86 |
+
# Generate response
|
87 |
+
output = text_pipeline(
|
88 |
+
prompt,
|
89 |
+
return_full_text=False,
|
90 |
+
max_new_tokens=max_tokens
|
91 |
)[0]['generated_text']
|
92 |
|
93 |
yield output.strip()
|