Spaces:
Sleeping
Sleeping
bushra1dajam
commited on
Upload 4 files
Browse files- app.py +218 -0
- logo.png +0 -0
- svm_model.pkl +3 -0
- tfidf_vectorizer.pkl +3 -0
app.py
ADDED
@@ -0,0 +1,218 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import transformers
|
3 |
+
from transformers import AutoTokenizer, AutoModel , AutoModelForCausalLM
|
4 |
+
from transformers import AutoModelForSeq2SeqLM
|
5 |
+
import pickle
|
6 |
+
import numpy as np
|
7 |
+
import pandas as pd
|
8 |
+
import seaborn as sns
|
9 |
+
import matplotlib.pyplot as plt
|
10 |
+
import nltk
|
11 |
+
from nltk.tokenize import word_tokenize
|
12 |
+
import re
|
13 |
+
import string
|
14 |
+
from nltk.corpus import stopwords
|
15 |
+
from tashaphyne.stemming import ArabicLightStemmer
|
16 |
+
import pyarabic.araby as araby
|
17 |
+
from sklearn.feature_extraction.text import TfidfVectorizer
|
18 |
+
import streamlit as st
|
19 |
+
nltk.download('punkt')
|
20 |
+
|
21 |
+
|
22 |
+
|
23 |
+
with open('tfidf_vectorizer.pkl', 'rb') as f:
|
24 |
+
vectorizer = pickle.load(f)
|
25 |
+
|
26 |
+
with open('svm_model.pkl', 'rb') as f:
|
27 |
+
model_classify = pickle.load(f)
|
28 |
+
|
29 |
+
|
30 |
+
model = AutoModelForSeq2SeqLM.from_pretrained("bushra1dajam/AraBART")
|
31 |
+
tokenizer = AutoTokenizer.from_pretrained('bushra1dajam/AraBART')
|
32 |
+
|
33 |
+
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
34 |
+
model.to(device)
|
35 |
+
|
36 |
+
def summarize_text(text):
|
37 |
+
inputs = tokenizer("summarize: " + text, return_tensors="pt", max_length=512, truncation=True)
|
38 |
+
inputs = {k: v.to(device) for k, v in inputs.items()}
|
39 |
+
|
40 |
+
summary_ids = model.generate(
|
41 |
+
inputs["input_ids"],
|
42 |
+
max_length=512,
|
43 |
+
num_beams=8,
|
44 |
+
#no_repeat_ngram_size=4, # Prevents larger n-gram repetitions
|
45 |
+
early_stopping=True)
|
46 |
+
summary = tokenizer.decode(summary_ids[0], skip_special_tokens=True)
|
47 |
+
return summary
|
48 |
+
|
49 |
+
def remove_numbers(text):
|
50 |
+
cleaned_text = re.sub(r'\d+', '', text)
|
51 |
+
return cleaned_text
|
52 |
+
|
53 |
+
def Removing_non_arabic(text):
|
54 |
+
text =re.sub(r'[^0-9\u0600-\u06ff\u0750-\u077f\ufb50-\ufbc1\ufbd3-\ufd3f\ufd50-\ufd8f\ufd50-\ufd8f\ufe70-\ufefc\uFDF0-\uFDFD.0-9ู -ูฉ]+', ' ',text)
|
55 |
+
return text
|
56 |
+
|
57 |
+
nltk.download('stopwords')
|
58 |
+
ara_punctuations = '''`รทรุ<>_()*&^%][ูุ/:"ุ.,'{}~ยฆ+|!โโฆโโู''' + string.punctuation
|
59 |
+
stop_words = stopwords.words()
|
60 |
+
|
61 |
+
def remove_punctuations(text):
|
62 |
+
translator = str.maketrans('', '', ara_punctuations)
|
63 |
+
text = text.translate(translator)
|
64 |
+
|
65 |
+
return text
|
66 |
+
|
67 |
+
|
68 |
+
def remove_tashkeel(text):
|
69 |
+
text = text.strip()
|
70 |
+
text = re.sub("[ุฅุฃูฑุขุง]", "ุง", text)
|
71 |
+
text = re.sub("ู", "ู", text)
|
72 |
+
text = re.sub("ุค", "ุก", text)
|
73 |
+
text = re.sub("ุฆ", "ุก", text)
|
74 |
+
text = re.sub("ุฉ", "ู", text)
|
75 |
+
noise = re.compile(""" ู | # Tashdid
|
76 |
+
ู | # Fatha
|
77 |
+
ู | # Tanwin Fath
|
78 |
+
ู | # Damma
|
79 |
+
ู | # Tanwin Damm
|
80 |
+
ู | # Kasra
|
81 |
+
ู | # Tanwin Kasr
|
82 |
+
ู | # Sukun
|
83 |
+
ู # Tatwil/Kashida
|
84 |
+
""", re.VERBOSE)
|
85 |
+
text = re.sub(noise, '', text)
|
86 |
+
text = re.sub(r'(.)\1+', r"\1\1", text)
|
87 |
+
return araby.strip_tashkeel(text)
|
88 |
+
|
89 |
+
arabic_stopwords = stopwords.words("arabic")
|
90 |
+
def remove_stop_words(text):
|
91 |
+
Text=[i for i in str(text).split() if i not in arabic_stopwords]
|
92 |
+
return " ".join(Text)
|
93 |
+
|
94 |
+
def tokenize_text(text):
|
95 |
+
tokens = word_tokenize(text)
|
96 |
+
return tokens
|
97 |
+
|
98 |
+
def Arabic_Light_Stemmer(text):
|
99 |
+
|
100 |
+
Arabic_Stemmer = ArabicLightStemmer()
|
101 |
+
text=[Arabic_Stemmer.light_stem(y) for y in text]
|
102 |
+
|
103 |
+
return " " .join(text)
|
104 |
+
|
105 |
+
def preprocess_text(text):
|
106 |
+
text = remove_numbers(text)
|
107 |
+
text = Removing_non_arabic(text)
|
108 |
+
text = remove_punctuations(text)
|
109 |
+
text = remove_stop_words(text)
|
110 |
+
text = remove_tashkeel(text)
|
111 |
+
text = tokenize_text(text)
|
112 |
+
text = Arabic_Light_Stemmer(text)
|
113 |
+
return text
|
114 |
+
|
115 |
+
class_mapping = {
|
116 |
+
0: "ุฌูุงุฆูุฉ",
|
117 |
+
1: "ุงุญูุงู ุดุฎุตูุฉ",
|
118 |
+
2: "ุนุงู
ุฉ"
|
119 |
+
}
|
120 |
+
st.markdown("""
|
121 |
+
<style>
|
122 |
+
body {
|
123 |
+
background-color: #f0f4f8;
|
124 |
+
direction: rtl;
|
125 |
+
font-family: 'Arial', sans-serif;
|
126 |
+
}
|
127 |
+
|
128 |
+
.logo-container {
|
129 |
+
display: flex;
|
130 |
+
justify-content: center;
|
131 |
+
align-items: center;
|
132 |
+
margin-bottom: 20px;
|
133 |
+
}
|
134 |
+
|
135 |
+
.stTextArea textarea, .stText {
|
136 |
+
text-align: right;
|
137 |
+
}
|
138 |
+
|
139 |
+
.stButton>button {
|
140 |
+
background-color: #3498db;
|
141 |
+
color: white;
|
142 |
+
font-family: 'Arial', sans-serif;
|
143 |
+
}
|
144 |
+
|
145 |
+
.stButton>button:hover {
|
146 |
+
background-color: #2980b9;
|
147 |
+
}
|
148 |
+
|
149 |
+
h1, h2, h3, h4, h5, h6, .stSubheader {
|
150 |
+
text-align: right;
|
151 |
+
}
|
152 |
+
|
153 |
+
.home-title {
|
154 |
+
text-align: center;
|
155 |
+
font-size: 40px;
|
156 |
+
color: #3498db;
|
157 |
+
}
|
158 |
+
|
159 |
+
.home-description {
|
160 |
+
text-align: center;
|
161 |
+
font-size: 20px;
|
162 |
+
color: #2c3e50;
|
163 |
+
}
|
164 |
+
|
165 |
+
.larger-text {
|
166 |
+
font-size: 24px;
|
167 |
+
color: #2c3e50;
|
168 |
+
}
|
169 |
+
</style>
|
170 |
+
""", unsafe_allow_html=True)
|
171 |
+
|
172 |
+
|
173 |
+
# Function for the Home Page
|
174 |
+
def home_page():
|
175 |
+
st.markdown('<h1 class="home-title">ู
ุฑุญุจุง ุจู ูู ุชุทุจูู ูุฌูุฒ</h1>', unsafe_allow_html=True)
|
176 |
+
st.markdown('<p class="home-description">ุชุทุจูู ูุฌูุฒ ููุฏู
ูู ุฎุฏู
ุฉ ุงูุชุตููู ูุงูู
ูุฎุต ูููุตูุต ุงููุงููููุฉ. ูู
ููู ุฅุฏุฎุงู ุงููุตูุต ููุง ููุญุตูู ุนูู ุชุตููู ุฏููู ูู
ูุฎุต ุดุงู
ู.</p>', unsafe_allow_html=True)
|
177 |
+
|
178 |
+
|
179 |
+
def main_page():
|
180 |
+
st.title("ุตูู ููุฎุต")
|
181 |
+
|
182 |
+
# Input text area
|
183 |
+
input_text = st.text_area("ุงุฏุฎู ุงููุต", "")
|
184 |
+
|
185 |
+
if st.button('ุตูู ููุฎุต'):
|
186 |
+
if input_text:
|
187 |
+
prepro = preprocess_text(input_text)
|
188 |
+
features = vectorizer.transform([prepro])
|
189 |
+
prediction = model_classify.predict(features)
|
190 |
+
classifiy = prediction[0]
|
191 |
+
classifiy_class = class_mapping.get(classifiy, "ูู
ูุชู
ุงูุชุนุฑู")
|
192 |
+
|
193 |
+
# Generate the summarized text
|
194 |
+
summarized_text = summarize_text(input_text)
|
195 |
+
|
196 |
+
st.markdown('<p class="larger-text">ุชุตููู ุงููุถูุฉ :</p>', unsafe_allow_html=True)
|
197 |
+
st.write(classifiy_class)
|
198 |
+
|
199 |
+
st.markdown('<p class="larger-text">ู
ูุฎุต ูููุถูุฉ :</p>', unsafe_allow_html=True)
|
200 |
+
st.write(summarized_text)
|
201 |
+
|
202 |
+
def app():
|
203 |
+
# Sidebar navigation with logo inside the sidebar
|
204 |
+
with st.sidebar:
|
205 |
+
st.markdown('<div class="logo-container">', unsafe_allow_html=True)
|
206 |
+
st.image("logo.png", width=200) # Make sure you have the logo file in your app folder
|
207 |
+
st.markdown('</div>', unsafe_allow_html=True)
|
208 |
+
|
209 |
+
st.header("ุชุทููู ูุฌูุฒ")
|
210 |
+
page_selection = st.selectbox("ุงุฎุชุฑ ุตูุญุฉ", ["ุงูุฑุฆูุณูุฉ", " ุตูู ููุฎุต !"])
|
211 |
+
|
212 |
+
if page_selection == "ุงูุฑุฆูุณูุฉ":
|
213 |
+
home_page()
|
214 |
+
elif page_selection == " ุตูู ููุฎุต !":
|
215 |
+
main_page()
|
216 |
+
|
217 |
+
if __name__ == "__main__":
|
218 |
+
app()
|
logo.png
ADDED
svm_model.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:56e1780885b58ab910fe9ac58d65ea5f0ddfb81e1527d6e2c0296b39b8a53351
|
3 |
+
size 1625610
|
tfidf_vectorizer.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7a69fa5f5c65c4043d928a2b1350315e12709b89b647340ba86b2c08cacefb0d
|
3 |
+
size 231319
|