Spaces:
Sleeping
Sleeping
feat: add
Browse files
app.py
CHANGED
@@ -7,14 +7,18 @@ def greet(name):
|
|
7 |
|
8 |
|
9 |
@spaces.GPU
|
10 |
-
def infer(
|
11 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
12 |
|
13 |
-
messages = [
|
14 |
-
{"role": "user", "content": name},
|
15 |
-
]
|
16 |
-
pipe = pipeline("text-generation", model="microsoft/Phi-3-mini-4k-instruct", trust_remote_code=True)
|
17 |
-
return pipe(messages)
|
18 |
|
19 |
text_input = gr.Textbox(label="Input Text", placeholder="test")
|
20 |
|
|
|
7 |
|
8 |
|
9 |
@spaces.GPU
|
10 |
+
def infer(input_text: str = "Who are you?"):
|
11 |
+
|
12 |
+
# messages = [
|
13 |
+
# {"role": "user", "content": name},
|
14 |
+
# ]
|
15 |
+
model = transformers.AutoModelForCausalLM.from_pretrained("microsoft/Phi-3-mini-4k-instruct", trust_remote_code=True)
|
16 |
+
token = transformers.AutoTokenizer.from_pretrained("microsoft/Phi-3-mini-4k-instruct", trust_remote_code=True)
|
17 |
+
token.encode(input_text, return_tensors="pt" )
|
18 |
+
output = model(input_ids)
|
19 |
+
print(output)
|
20 |
+
return output
|
21 |
|
|
|
|
|
|
|
|
|
|
|
22 |
|
23 |
text_input = gr.Textbox(label="Input Text", placeholder="test")
|
24 |
|