File size: 1,338 Bytes
ce10f9a
5b4c169
a3ae69c
4ffc5f1
ce10f9a
 
07fca4f
d43b4cf
ce10f9a
604d57b
a06cfd4
 
47f2ac0
 
49c5437
 
47f2ac0
d5b1d28
49c5437
 
d5b1d28
49c5437
 
 
 
 
 
1917b0b
 
ce8a810
cc11b4b
d5b1d28
 
 
 
 
 
ce8a810
d5b1d28
 
 
 
 
1917b0b
a3ae69c
d5b1d28
a3ae69c
d5b1d28
a3ae69c
ce10f9a
a3ae69c
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
import os 
import requests
import gradio as gr 

api_token = os.environ.get("TOKEN")

API_URL = "https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-8B-Instruct"
headers = {"Authorization": f"Bearer {api_token}"}

def query(payload):
        response = requests.post(API_URL, headers=headers, json=payload)
        return response.json()




def analyze_sentiment(text):
    output = query({
    "inputs":   f'''<|begin_of_text|>
    <|start_header_id|>system<|end_header_id|>
you are a feeling analyser and you'll say only "positive" if i'm feeling positive and "negativ" if i'm feeling sad   <|eot_id|>
    <|start_header_id|>user<|end_header_id|>
    {text}
    <|eot_id|>
    <|start_header_id|>assistant<|end_header_id|>
    '''
})

    # Assurez-vous de gérer correctement la sortie de l'API
    if isinstance(output, list) and len(output) > 0:
        return output[0].get('generated_text', 'Erreur: Réponse inattendue')
    if isinstance(output, list) and len(output) > 0:
        response = output[0].get('generated_text', '').strip().lower()
    if 'positive' in response:
       return 'positive'
    elif 'negative' in response:
       return 'negative'
    else:
       return "Erreur: Réponse inattendue"





demo = gr.Interface(
    fn = query,
    inputs=["text"],
    outputs=["text"]
)

demo.launch()