Spaces:
Sleeping
Sleeping
File size: 2,368 Bytes
b6a3b8d 9095ed7 b6a3b8d 88c0f01 9095ed7 6d0fffb 9095ed7 b6a3b8d ab93e4c b6a3b8d caabf1a 63f0c80 309a4e8 b6a3b8d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 |
import uvicorn
import streamlit as st
import streamlit.components.v1 as components
from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
from transformers import MBartForConditionalGeneration, MBartTokenizer, MBartConfig
# Load and display index.html
html_string = open('index.html', 'r').read()
components.html(html_string, height=1200)
# Load model and tokenizer at startup
MODEL_PATH = "GobLyne/Rumi-Jawi-Translater" # Path to your model folder
tokenizer = MBartTokenizer.from_pretrained(MODEL_PATH)
model = MBartForConditionalGeneration.from_pretrained(MODEL_PATH)
app = FastAPI()
# List of allowed origins (the frontend URL you are serving the HTML from)
origins = [
"http://127.0.0.1:5500", # Your frontend origin (adjust port if needed)
"http://localhost:5500", # Another common frontend origin
"https://goblyne.github.io/Jawi-Baru-Transliteration/",
"http://35.175.72.198:8501",
"http://10.27.69.196:8501",
"http://localhost:8501",
"https://huggingface.co/spaces/GobLyne/Jawi-Translation",
"https://goblyne-jawi-translation.hf.space",
]
# Add CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=origins, # Allow these origins
allow_credentials=True,
allow_methods=["*"], # Allow all HTTP methods (GET, POST, etc.)
allow_headers=["*"], # Allow all headers
)
# Input model for translation
class TranslationRequest(BaseModel):
text: str
@app.get('/')
def index():
return {'message': 'Jawi Translater API'}
@app.post("/translate")
async def translate(data: TranslationRequest):
sentence = data.text.strip()
if not sentence:
raise HTTPException(status_code=400, detail="No text provided for translation")
try:
# Translate the text
inputs = tokenizer(sentence, return_tensors="pt")
translated_tokens = model.generate(**inputs, decoder_start_token_id=tokenizer.lang_code_to_id["ar_AR"], early_stopping=True, max_length=120)
pred = tokenizer.batch_decode(translated_tokens, skip_special_tokens=True)[0]
pred = pred.replace("ar_AR", "").strip()
return {"translated_text": pred}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
# Run with this command inside terminal
# uvicorn main:app --reload |