|
import os |
|
|
|
import gradio as gr |
|
import torch |
|
from transformers import pipeline |
|
|
|
from utils import clean_text |
|
|
|
|
|
pipeline = pipeline( |
|
task="text-classification", |
|
model="fakespotailabs/roberta-base-ai-text-detection-v1", |
|
device="cuda" if torch.cuda.is_available() else "cpu", |
|
token=os.environ.get("ACCESS_TOKEN") |
|
) |
|
|
|
|
|
def predict(text): |
|
cleaned_text = clean_text(text) |
|
predictions = pipeline(cleaned_text, top_k=None)[0] |
|
return { |
|
p["label"]: p["score"] for p in predictions |
|
} |
|
|
|
|
|
demo = gr.Interface( |
|
predict, |
|
inputs=gr.Textbox(), |
|
outputs=gr.Label(num_top_classes=2), |
|
title="AI Text Detector" |
|
) |
|
demo.launch() |
|
|