Spaces:
Runtime error
Runtime error
aliasgerovs
commited on
Commit
·
20dc449
1
Parent(s):
86676c5
Updated structure
Browse files- __pycache__/ai_generate.cpython-310.pyc +0 -0
- __pycache__/ai_generate.cpython-39.pyc +0 -0
- __pycache__/gptzero_free.cpython-310.pyc +0 -0
- __pycache__/gptzero_free.cpython-39.pyc +0 -0
- __pycache__/humanize.cpython-310.pyc +0 -0
- __pycache__/humanize.cpython-39.pyc +0 -0
- app.py +316 -180
- humanize.py +9 -9
- nohup.out +662 -0
- test.py +394 -8
__pycache__/ai_generate.cpython-310.pyc
CHANGED
Binary files a/__pycache__/ai_generate.cpython-310.pyc and b/__pycache__/ai_generate.cpython-310.pyc differ
|
|
__pycache__/ai_generate.cpython-39.pyc
ADDED
Binary file (1.87 kB). View file
|
|
__pycache__/gptzero_free.cpython-310.pyc
ADDED
Binary file (3.58 kB). View file
|
|
__pycache__/gptzero_free.cpython-39.pyc
ADDED
Binary file (3.58 kB). View file
|
|
__pycache__/humanize.cpython-310.pyc
CHANGED
Binary files a/__pycache__/humanize.cpython-310.pyc and b/__pycache__/humanize.cpython-310.pyc differ
|
|
__pycache__/humanize.cpython-39.pyc
ADDED
Binary file (2.71 kB). View file
|
|
app.py
CHANGED
@@ -1,48 +1,138 @@
|
|
|
|
1 |
import gradio as gr
|
|
|
|
|
2 |
from humanize import paraphrase_text
|
3 |
-
from gradio_client import Client
|
4 |
from ai_generate import generate
|
5 |
import requests
|
6 |
-
import http.client
|
7 |
-
import json
|
8 |
from gptzero_free import GPT2PPL
|
9 |
|
10 |
-
def on_first_button_click():
|
11 |
-
return gr.update(visible=True)
|
12 |
|
13 |
-
def
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
20 |
|
21 |
def humanize(
|
22 |
-
text,
|
23 |
-
model,
|
24 |
-
temperature=1.2,
|
25 |
-
repetition_penalty=1,
|
26 |
-
top_k=50,
|
27 |
-
length_penalty=1
|
|
|
28 |
result = paraphrase_text(
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
)
|
36 |
-
return result
|
37 |
|
38 |
ai_check_options = [
|
39 |
"Polygraf AI",
|
40 |
-
"Sapling AI",
|
41 |
-
|
42 |
-
"GPTZero Free"
|
43 |
]
|
44 |
|
45 |
-
def ai_generated_test_polygraf(text):
|
46 |
url = "http://34.66.10.188/ai-vs-human"
|
47 |
access_key = "6mcemwsFycVVgVjMFwKXki3zJka1r7N4u$Z0Y|x$gecC$hdNtpQf-SpL0+=k;u%BZ"
|
48 |
headers = {
|
@@ -54,7 +144,7 @@ def ai_generated_test_polygraf(text):
|
|
54 |
response = requests.post(url, headers=headers, json=data)
|
55 |
return response.json()
|
56 |
|
57 |
-
def ai_generated_test_sapling(text):
|
58 |
response = requests.post(
|
59 |
"https://api.sapling.ai/api/v1/aidetect",
|
60 |
json={
|
@@ -64,199 +154,245 @@ def ai_generated_test_sapling(text):
|
|
64 |
)
|
65 |
return { "AI" : response.json()['score'], "HUMAN" : 1 - response.json()['score']}
|
66 |
|
67 |
-
def ai_generated_test_copyleak(text):
|
68 |
-
|
69 |
-
conn = http.client.HTTPSConnection("api.copyleaks.com")
|
70 |
-
payload = payload = "{\n \"text\": \"" + text + "\"\n}"
|
71 |
-
|
72 |
-
headers = {
|
73 |
-
'Authorization': "9ea35187-f43a-4dc2-bfdf-25825b78eaf1",
|
74 |
-
'Content-Type': "application/json",
|
75 |
-
'Accept': "application/json"
|
76 |
-
}
|
77 |
-
scanID = "9ea35187-f43a-4dc2-bfdf-25825b78eaf1"
|
78 |
-
conn.request("POST", f"/v2/writer-detector/{scanID}/check", payload, headers)
|
79 |
-
|
80 |
-
res = conn.getresponse()
|
81 |
-
data = res.read()
|
82 |
-
print(data.decode("utf-8"))
|
83 |
-
print(data.decode("utf-8")["results"]["summary"])
|
84 |
-
return data.decode("utf-8")["results"]["summary"]
|
85 |
-
|
86 |
-
|
87 |
-
gptzero_model = GPT2PPL()
|
88 |
|
89 |
def ai_generated_test_gptzero(text):
|
|
|
90 |
result = gptzero_model(text)
|
91 |
print(result)
|
92 |
return result
|
93 |
|
94 |
-
|
95 |
-
def ai_check(text, option):
|
96 |
if option == 'Polygraf AI':
|
97 |
return ai_generated_test_polygraf(text)
|
98 |
elif option == 'Sapling AI':
|
99 |
return ai_generated_test_sapling(text)
|
100 |
-
|
101 |
-
# return ai_generated_test_copyleak(text)
|
102 |
-
elif option == "GPTZero Free":
|
103 |
return ai_generated_test_gptzero(text)
|
104 |
else:
|
105 |
return ai_generated_test_polygraf(text)
|
106 |
|
107 |
-
def update_visibility_api(model):
|
108 |
if model in ['OpenAI GPT 3.5', 'OpenAI GPT 4']:
|
109 |
return gr.update(visible=True)
|
110 |
else:
|
111 |
return gr.update(visible=False)
|
112 |
|
113 |
-
|
114 |
-
|
115 |
-
|
116 |
-
|
117 |
-
|
118 |
-
|
119 |
-
|
120 |
-
|
121 |
-
|
122 |
-
|
123 |
-
|
124 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
125 |
)
|
126 |
-
|
127 |
-
|
128 |
-
|
129 |
-
|
130 |
-
|
131 |
)
|
132 |
-
|
133 |
-
|
134 |
-
|
135 |
-
|
136 |
-
|
137 |
-
|
138 |
-
|
139 |
-
|
140 |
-
|
141 |
-
|
142 |
-
|
143 |
-
|
144 |
-
|
145 |
-
|
146 |
-
|
147 |
-
|
148 |
-
|
149 |
-
|
150 |
-
|
151 |
-
|
152 |
-
|
153 |
-
|
154 |
-
|
155 |
-
|
156 |
-
|
157 |
-
|
158 |
-
|
159 |
-
|
160 |
-
|
161 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
162 |
|
163 |
-
|
164 |
-
|
165 |
-
|
166 |
-
|
167 |
-
|
168 |
-
|
169 |
-
|
170 |
-
|
171 |
-
|
172 |
-
|
173 |
-
|
174 |
-
|
175 |
-
|
176 |
-
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
-
|
181 |
-
|
182 |
-
|
183 |
-
|
184 |
-
|
185 |
-
|
186 |
-
|
187 |
-
|
188 |
-
|
189 |
-
|
190 |
-
minimum=0.0,
|
191 |
-
maximum=2.0,
|
192 |
-
step=0.1,
|
193 |
-
value=1.0,
|
194 |
-
label="Length Penalty", visible= False
|
195 |
)
|
196 |
-
|
197 |
-
|
|
|
|
|
|
|
|
|
198 |
|
199 |
-
|
200 |
-
fn=
|
201 |
inputs=[
|
202 |
input_topic,
|
203 |
-
|
204 |
-
input_tonality,
|
205 |
input_length,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
206 |
ai_generator,
|
207 |
input_api
|
208 |
],
|
209 |
-
outputs=[
|
210 |
)
|
211 |
|
212 |
-
|
213 |
-
|
214 |
-
|
215 |
-
|
216 |
-
|
217 |
-
only_ai_check_btn.click(on_first_button_click, inputs=None, outputs=model_dropdown)
|
218 |
-
only_ai_check_btn.click(on_first_button_click, inputs=None, outputs=humanizer_btn)
|
219 |
-
only_ai_check_btn.click(on_first_button_click, inputs=None, outputs=temperature_slider)
|
220 |
-
only_ai_check_btn.click(on_first_button_click, inputs=None, outputs=controls_markdown)
|
221 |
-
only_ai_check_btn.click(on_first_button_click, inputs=None, outputs=top_k_slider)
|
222 |
-
only_ai_check_btn.click(on_first_button_click, inputs=None, outputs=top_token_markdown)
|
223 |
-
only_ai_check_btn.click(on_first_button_click, inputs=None, outputs=repetition_penalty_slider)
|
224 |
-
only_ai_check_btn.click(on_first_button_click, inputs=None, outputs=penalize_repeated_markdown)
|
225 |
-
only_ai_check_btn.click(on_first_button_click, inputs=None, outputs=length_penalty_slider)
|
226 |
-
only_ai_check_btn.click(on_first_button_click, inputs=None, outputs=penalize_markdown)
|
227 |
|
228 |
-
|
229 |
fn=humanize,
|
230 |
inputs=[
|
231 |
-
|
232 |
model_dropdown,
|
233 |
temperature_slider,
|
234 |
repetition_penalty_slider,
|
235 |
top_k_slider,
|
236 |
length_penalty_slider,
|
237 |
],
|
238 |
-
outputs=[
|
239 |
)
|
240 |
|
|
|
|
|
|
|
|
|
|
|
241 |
|
242 |
-
|
243 |
-
fn=ai_check,
|
244 |
-
inputs=[ai_label, ai_detector_dropdown],
|
245 |
-
outputs=[bcLabel],
|
246 |
-
api_name="ai_check",
|
247 |
-
)
|
248 |
-
|
249 |
-
only_ai_check_btn2.click(
|
250 |
-
fn=ai_check,
|
251 |
-
inputs=[ai_label, ai_detector_dropdown2],
|
252 |
-
outputs=[bcLabel2]
|
253 |
-
)
|
254 |
-
|
255 |
-
humanizer_btn.click(on_first_button_click, inputs=None, outputs=humanized_markdown)
|
256 |
-
humanizer_btn.click(on_first_button_click, inputs=None, outputs=output_label)
|
257 |
-
humanizer_btn.click(on_first_button_click, inputs=None, outputs=ai_detector_dropdown2)
|
258 |
-
humanizer_btn.click(on_first_button_click, inputs=None, outputs=only_ai_check_btn2)
|
259 |
-
humanizer_btn.click(on_first_button_click, inputs=None, outputs=bcLabel2)
|
260 |
|
261 |
if __name__ == "__main__":
|
262 |
-
demo
|
|
|
|
1 |
+
import openai
|
2 |
import gradio as gr
|
3 |
+
from typing import Dict, List
|
4 |
+
import re
|
5 |
from humanize import paraphrase_text
|
|
|
6 |
from ai_generate import generate
|
7 |
import requests
|
|
|
|
|
8 |
from gptzero_free import GPT2PPL
|
9 |
|
|
|
|
|
10 |
|
11 |
+
def clean_text(text: str) -> str:
|
12 |
+
paragraphs = text.split('\n\n')
|
13 |
+
cleaned_paragraphs = []
|
14 |
+
for paragraph in paragraphs:
|
15 |
+
cleaned = re.sub(r'\s+', ' ', paragraph).strip()
|
16 |
+
cleaned = re.sub(r'(?<=\.) ([a-z])', lambda x: x.group(1).upper(), cleaned)
|
17 |
+
cleaned_paragraphs.append(cleaned)
|
18 |
+
return '\n'.join(cleaned_paragraphs)
|
19 |
+
|
20 |
+
def format_and_correct(text: str) -> str:
|
21 |
+
"""Correct formatting and grammar without changing content significantly."""
|
22 |
+
prompt = f"""
|
23 |
+
Please correct the formatting, grammar, and spelling errors in the following text without changing its content significantly. Ensure proper paragraph breaks and maintain the original content:
|
24 |
+
{text}
|
25 |
+
"""
|
26 |
+
corrected_text = generate(prompt, "Groq", None)
|
27 |
+
return clean_text(corrected_text)
|
28 |
+
|
29 |
+
|
30 |
+
def generate_prompt(settings: Dict[str, str]) -> str:
|
31 |
+
"""Generate a detailed prompt based on user settings."""
|
32 |
+
prompt = f"""
|
33 |
+
Write a {settings['article_length']} {settings['format']} on {settings['topic']}.
|
34 |
+
|
35 |
+
Style and Tone:
|
36 |
+
- Writing style: {settings['writing_style']}
|
37 |
+
- Tone: {settings['tone']}
|
38 |
+
- Target audience: {settings['user_category']}
|
39 |
+
|
40 |
+
Content:
|
41 |
+
- Depth: {settings['depth_of_content']}
|
42 |
+
- Structure: {', '.join(settings['structure'])}
|
43 |
+
|
44 |
+
Keywords to incorporate:
|
45 |
+
{', '.join(settings['keywords'])}
|
46 |
+
|
47 |
+
Additional requirements:
|
48 |
+
- Include {settings['num_examples']} relevant examples or case studies
|
49 |
+
- Incorporate data or statistics from {', '.join(settings['references'])}
|
50 |
+
- End with a {settings['conclusion_type']} conclusion
|
51 |
+
- Add a "References" section at the end with at least 3 credible sources, formatted as [1], [2], etc.
|
52 |
+
- Do not make any headline, title bold.
|
53 |
+
|
54 |
+
Ensure proper paragraph breaks for better readability.
|
55 |
+
Avoid any references to artificial intelligence, language models, or the fact that this is generated by an AI, and do not mention something like here is the article etc.
|
56 |
+
"""
|
57 |
+
return prompt
|
58 |
+
|
59 |
+
def generate_article(
|
60 |
+
topic: str,
|
61 |
+
keywords: str,
|
62 |
+
article_length: str,
|
63 |
+
format: str,
|
64 |
+
writing_style: str,
|
65 |
+
tone: str,
|
66 |
+
user_category: str,
|
67 |
+
depth_of_content: str,
|
68 |
+
structure: str,
|
69 |
+
references: str,
|
70 |
+
num_examples: str,
|
71 |
+
conclusion_type: str,
|
72 |
+
ai_model: str,
|
73 |
+
api_key: str = None
|
74 |
+
) -> str:
|
75 |
+
"""Generate an article based on user-defined settings."""
|
76 |
+
settings = {
|
77 |
+
"topic": topic,
|
78 |
+
"keywords": [k.strip() for k in keywords.split(',')],
|
79 |
+
"article_length": article_length,
|
80 |
+
"format": format,
|
81 |
+
"writing_style": writing_style,
|
82 |
+
"tone": tone,
|
83 |
+
"user_category": user_category,
|
84 |
+
"depth_of_content": depth_of_content,
|
85 |
+
"structure": [s.strip() for s in structure.split(',')],
|
86 |
+
"references": [r.strip() for r in references.split(',')],
|
87 |
+
"num_examples": num_examples,
|
88 |
+
"conclusion_type": conclusion_type
|
89 |
+
}
|
90 |
+
|
91 |
+
prompt = generate_prompt(settings)
|
92 |
+
|
93 |
+
if ai_model in ['OpenAI GPT 3.5', 'OpenAI GPT 4']:
|
94 |
+
response = openai.ChatCompletion.create(
|
95 |
+
model="gpt-4" if ai_model == 'OpenAI GPT 4' else "gpt-3.5-turbo",
|
96 |
+
messages=[
|
97 |
+
{"role": "system", "content": "You are a professional content writer with expertise in various fields."},
|
98 |
+
{"role": "user", "content": prompt}
|
99 |
+
],
|
100 |
+
max_tokens=3000,
|
101 |
+
n=1,
|
102 |
+
stop=None,
|
103 |
+
temperature=0.7,
|
104 |
+
)
|
105 |
+
article = response.choices[0].message.content.strip()
|
106 |
+
else:
|
107 |
+
article = generate(prompt, ai_model, api_key)
|
108 |
+
|
109 |
+
return clean_text(article)
|
110 |
|
111 |
def humanize(
|
112 |
+
text: str,
|
113 |
+
model: str,
|
114 |
+
temperature: float = 1.2,
|
115 |
+
repetition_penalty: float = 1,
|
116 |
+
top_k: int = 50,
|
117 |
+
length_penalty: float = 1
|
118 |
+
) -> str:
|
119 |
result = paraphrase_text(
|
120 |
+
text=text,
|
121 |
+
model_name=model,
|
122 |
+
temperature=temperature,
|
123 |
+
repetition_penalty=repetition_penalty,
|
124 |
+
top_k=top_k,
|
125 |
+
length_penalty=length_penalty,
|
126 |
)
|
127 |
+
return format_and_correct(result)
|
128 |
|
129 |
ai_check_options = [
|
130 |
"Polygraf AI",
|
131 |
+
# "Sapling AI",
|
132 |
+
"GPTZero"
|
|
|
133 |
]
|
134 |
|
135 |
+
def ai_generated_test_polygraf(text: str) -> Dict:
|
136 |
url = "http://34.66.10.188/ai-vs-human"
|
137 |
access_key = "6mcemwsFycVVgVjMFwKXki3zJka1r7N4u$Z0Y|x$gecC$hdNtpQf-SpL0+=k;u%BZ"
|
138 |
headers = {
|
|
|
144 |
response = requests.post(url, headers=headers, json=data)
|
145 |
return response.json()
|
146 |
|
147 |
+
def ai_generated_test_sapling(text: str) -> Dict:
|
148 |
response = requests.post(
|
149 |
"https://api.sapling.ai/api/v1/aidetect",
|
150 |
json={
|
|
|
154 |
)
|
155 |
return { "AI" : response.json()['score'], "HUMAN" : 1 - response.json()['score']}
|
156 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
157 |
|
158 |
def ai_generated_test_gptzero(text):
|
159 |
+
gptzero_model = GPT2PPL()
|
160 |
result = gptzero_model(text)
|
161 |
print(result)
|
162 |
return result
|
163 |
|
164 |
+
def ai_check(text: str, option: str) -> Dict:
|
|
|
165 |
if option == 'Polygraf AI':
|
166 |
return ai_generated_test_polygraf(text)
|
167 |
elif option == 'Sapling AI':
|
168 |
return ai_generated_test_sapling(text)
|
169 |
+
elif option == "GPTZero":
|
|
|
|
|
170 |
return ai_generated_test_gptzero(text)
|
171 |
else:
|
172 |
return ai_generated_test_polygraf(text)
|
173 |
|
174 |
+
def update_visibility_api(model: str):
|
175 |
if model in ['OpenAI GPT 3.5', 'OpenAI GPT 4']:
|
176 |
return gr.update(visible=True)
|
177 |
else:
|
178 |
return gr.update(visible=False)
|
179 |
|
180 |
+
def format_references(text: str) -> str:
|
181 |
+
"""Extract and format references from the generated text."""
|
182 |
+
lines = text.split('\n')
|
183 |
+
references = []
|
184 |
+
article_text = []
|
185 |
+
in_references = False
|
186 |
+
|
187 |
+
for line in lines:
|
188 |
+
if line.strip().lower() == "references":
|
189 |
+
in_references = True
|
190 |
+
continue
|
191 |
+
if in_references:
|
192 |
+
references.append(line.strip())
|
193 |
+
else:
|
194 |
+
article_text.append(line)
|
195 |
+
|
196 |
+
formatted_refs = []
|
197 |
+
for i, ref in enumerate(references, 1):
|
198 |
+
formatted_refs.append(f"[{i}] {ref}\n")
|
199 |
+
|
200 |
+
return "\n\n".join(article_text) + "\n\nReferences:\n" + "\n".join(formatted_refs)
|
201 |
+
|
202 |
+
def generate_and_format(
|
203 |
+
topic, keywords, article_length, format, writing_style, tone, user_category,
|
204 |
+
depth_of_content, structure, references, num_examples, conclusion_type, ai_model, api_key
|
205 |
+
):
|
206 |
+
article = generate_article(
|
207 |
+
topic, keywords, article_length, format, writing_style, tone, user_category,
|
208 |
+
depth_of_content, structure, references, num_examples, conclusion_type, ai_model, api_key
|
209 |
+
)
|
210 |
+
return format_references(article)
|
211 |
+
|
212 |
+
def copy_to_input(text):
|
213 |
+
return text
|
214 |
+
|
215 |
+
def create_interface():
|
216 |
+
with gr.Blocks(theme=gr.themes.Default(
|
217 |
+
primary_hue=gr.themes.colors.pink,
|
218 |
+
secondary_hue=gr.themes.colors.yellow,
|
219 |
+
neutral_hue=gr.themes.colors.gray
|
220 |
+
)) as demo:
|
221 |
+
gr.Markdown("# Polygraf AI Content Writer", elem_classes="text-center text-3xl mb-6")
|
222 |
+
|
223 |
+
with gr.Row():
|
224 |
+
with gr.Column(scale=2):
|
225 |
+
with gr.Group():
|
226 |
+
gr.Markdown("## Article Configuration", elem_classes="text-xl mb-4")
|
227 |
+
input_topic = gr.Textbox(label="Topic", placeholder="Enter the main topic of your article", elem_classes="input-highlight-pink")
|
228 |
+
input_keywords = gr.Textbox(label="Keywords", placeholder="Enter comma-separated keywords", elem_classes="input-highlight-yellow")
|
229 |
+
|
230 |
+
with gr.Row():
|
231 |
+
input_format = gr.Dropdown(
|
232 |
+
choices=['Article', 'Essay', 'Blog post', 'Report', 'Research paper', 'News article', 'White paper'],
|
233 |
+
value='Article',
|
234 |
+
label="Format",
|
235 |
+
elem_classes="input-highlight-turquoise"
|
236 |
+
)
|
237 |
+
input_length = gr.Dropdown(
|
238 |
+
choices=["Short (500 words)", "Medium (1000 words)", "Long (2000+ words)", "Very Long (3000+ words)"],
|
239 |
+
value="Medium (1000 words)",
|
240 |
+
label="Article Length",
|
241 |
+
elem_classes="input-highlight-pink"
|
242 |
+
)
|
243 |
+
|
244 |
+
with gr.Row():
|
245 |
+
input_writing_style = gr.Dropdown(
|
246 |
+
choices=["Formal", "Informal", "Technical", "Conversational", "Journalistic", "Academic", "Creative"],
|
247 |
+
value="Formal",
|
248 |
+
label="Writing Style",
|
249 |
+
elem_classes="input-highlight-yellow"
|
250 |
+
)
|
251 |
+
input_tone = gr.Dropdown(
|
252 |
+
choices=["Friendly", "Professional", "Neutral", "Enthusiastic", "Skeptical", "Humorous"],
|
253 |
+
value="Professional",
|
254 |
+
label="Tone",
|
255 |
+
elem_classes="input-highlight-turquoise"
|
256 |
+
)
|
257 |
+
|
258 |
+
input_user_category = gr.Dropdown(
|
259 |
+
choices=["Students", "Professionals", "Researchers", "General Public", "Policymakers", "Entrepreneurs"],
|
260 |
+
value="General Public",
|
261 |
+
label="Target Audience",
|
262 |
+
elem_classes="input-highlight-pink"
|
263 |
)
|
264 |
+
input_depth = gr.Dropdown(
|
265 |
+
choices=["Surface-level overview", "Moderate analysis", "In-depth research", "Comprehensive study"],
|
266 |
+
value="Moderate analysis",
|
267 |
+
label="Depth of Content",
|
268 |
+
elem_classes="input-highlight-yellow"
|
269 |
)
|
270 |
+
input_structure = gr.Dropdown(
|
271 |
+
choices=[
|
272 |
+
"Introduction, Body, Conclusion",
|
273 |
+
"Abstract, Introduction, Methods, Results, Discussion, Conclusion",
|
274 |
+
"Executive Summary, Problem Statement, Analysis, Recommendations, Conclusion",
|
275 |
+
"Introduction, Literature Review, Methodology, Findings, Analysis, Conclusion"
|
276 |
+
],
|
277 |
+
value="Introduction, Body, Conclusion",
|
278 |
+
label="Structure",
|
279 |
+
elem_classes="input-highlight-turquoise"
|
280 |
+
)
|
281 |
+
input_references = gr.Dropdown(
|
282 |
+
choices=["Academic journals", "Industry reports", "Government publications", "News outlets", "Expert interviews", "Case studies"],
|
283 |
+
value="News outlets",
|
284 |
+
label="References",
|
285 |
+
elem_classes="input-highlight-pink"
|
286 |
+
)
|
287 |
+
input_num_examples = gr.Dropdown(
|
288 |
+
choices=["1-2", "3-4", "5+"],
|
289 |
+
value="1-2",
|
290 |
+
label="Number of Examples/Case Studies",
|
291 |
+
elem_classes="input-highlight-yellow"
|
292 |
+
)
|
293 |
+
input_conclusion = gr.Dropdown(
|
294 |
+
choices=["Summary", "Call to Action", "Future Outlook", "Thought-provoking Question"],
|
295 |
+
value="Summary",
|
296 |
+
label="Conclusion Type",
|
297 |
+
elem_classes="input-highlight-turquoise"
|
298 |
+
)
|
299 |
+
|
300 |
+
with gr.Group():
|
301 |
+
gr.Markdown("## AI Model Configuration", elem_classes="text-xl mb-4")
|
302 |
+
ai_generator = gr.Dropdown(
|
303 |
+
choices=['Llama 3', 'Groq', 'Mistral', 'Gemma', 'OpenAI GPT 3.5', 'OpenAI GPT 4'],
|
304 |
+
value='Llama 3',
|
305 |
+
label="AI Model",
|
306 |
+
elem_classes="input-highlight-pink"
|
307 |
+
)
|
308 |
+
input_api = gr.Textbox(label="API Key", visible=False)
|
309 |
+
ai_generator.change(update_visibility_api, ai_generator, input_api)
|
310 |
+
|
311 |
+
generate_btn = gr.Button("Generate Article", variant="primary")
|
312 |
|
313 |
+
with gr.Column(scale=3):
|
314 |
+
output_article = gr.Textbox(label="Generated Article", lines=20)
|
315 |
+
|
316 |
+
with gr.Row():
|
317 |
+
with gr.Column():
|
318 |
+
ai_detector_dropdown = gr.Radio(
|
319 |
+
choices=ai_check_options, label="Select AI Detector", value="Polygraf AI")
|
320 |
+
ai_check_btn = gr.Button("AI Check")
|
321 |
+
ai_check_result = gr.Label(label="AI Check Result")
|
322 |
+
|
323 |
+
humanize_btn = gr.Button("Humanize")
|
324 |
+
humanized_output = gr.Textbox(label="Humanized Article", lines=20)
|
325 |
+
copy_to_input_btn = gr.Button("Copy to Input for AI Check")
|
326 |
+
|
327 |
+
with gr.Accordion("Advanced Humanizer Settings", open=False):
|
328 |
+
with gr.Row():
|
329 |
+
model_dropdown = gr.Radio(
|
330 |
+
choices=[
|
331 |
+
"Base Model",
|
332 |
+
"Large Model",
|
333 |
+
"XL Model",
|
334 |
+
# "XL Law Model",
|
335 |
+
# "XL Marketing Model",
|
336 |
+
# "XL Child Style Model",
|
337 |
+
],
|
338 |
+
value="Large Model",
|
339 |
+
label="Humanizer Model Version"
|
|
|
|
|
|
|
|
|
|
|
340 |
)
|
341 |
+
with gr.Row():
|
342 |
+
temperature_slider = gr.Slider(minimum=0.5, maximum=2.0, step=0.1, value=1.2, label="Temperature")
|
343 |
+
top_k_slider = gr.Slider(minimum=0, maximum=300, step=25, value=50, label="Top k")
|
344 |
+
with gr.Row():
|
345 |
+
repetition_penalty_slider = gr.Slider(minimum=1.0, maximum=2.0, step=0.1, value=1, label="Repetition Penalty")
|
346 |
+
length_penalty_slider = gr.Slider(minimum=0.0, maximum=2.0, step=0.1, value=1.0, label="Length Penalty")
|
347 |
|
348 |
+
generate_btn.click(
|
349 |
+
fn=generate_and_format,
|
350 |
inputs=[
|
351 |
input_topic,
|
352 |
+
input_keywords,
|
|
|
353 |
input_length,
|
354 |
+
input_format,
|
355 |
+
input_writing_style,
|
356 |
+
input_tone,
|
357 |
+
input_user_category,
|
358 |
+
input_depth,
|
359 |
+
input_structure,
|
360 |
+
input_references,
|
361 |
+
input_num_examples,
|
362 |
+
input_conclusion,
|
363 |
ai_generator,
|
364 |
input_api
|
365 |
],
|
366 |
+
outputs=[output_article],
|
367 |
)
|
368 |
|
369 |
+
ai_check_btn.click(
|
370 |
+
fn=ai_check,
|
371 |
+
inputs=[output_article, ai_detector_dropdown],
|
372 |
+
outputs=[ai_check_result],
|
373 |
+
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
374 |
|
375 |
+
humanize_btn.click(
|
376 |
fn=humanize,
|
377 |
inputs=[
|
378 |
+
output_article,
|
379 |
model_dropdown,
|
380 |
temperature_slider,
|
381 |
repetition_penalty_slider,
|
382 |
top_k_slider,
|
383 |
length_penalty_slider,
|
384 |
],
|
385 |
+
outputs=[humanized_output],
|
386 |
)
|
387 |
|
388 |
+
copy_to_input_btn.click(
|
389 |
+
fn=copy_to_input,
|
390 |
+
inputs=[humanized_output],
|
391 |
+
outputs=[output_article],
|
392 |
+
)
|
393 |
|
394 |
+
return demo
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
395 |
|
396 |
if __name__ == "__main__":
|
397 |
+
demo = create_interface()
|
398 |
+
demo.launch(server_name="0.0.0.0", share=True)
|
humanize.py
CHANGED
@@ -22,15 +22,15 @@ else:
|
|
22 |
model_config = {
|
23 |
"Base Model": "polygraf-ai/poly-humanizer-base",
|
24 |
"Large Model": "polygraf-ai/poly-humanizer-large",
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
}
|
35 |
|
36 |
# cache the base models, tokenizers, and adapters
|
|
|
22 |
model_config = {
|
23 |
"Base Model": "polygraf-ai/poly-humanizer-base",
|
24 |
"Large Model": "polygraf-ai/poly-humanizer-large",
|
25 |
+
"XL Model": {
|
26 |
+
"path": "google/flan-t5-xl",
|
27 |
+
"adapters": {
|
28 |
+
"XL Model Adapter": "polygraf-ai/poly-humanizer-XL-adapter",
|
29 |
+
# "XL Law Model Adapter": "polygraf-ai/poly-humanizer-XL-law-adapter",
|
30 |
+
# "XL Marketing Model Adapter": "polygraf-ai/marketing-cleaned-13K-grad-acum-4-full",
|
31 |
+
# "XL Child Style Model Adapter": "polygraf-ai/poly-humanizer-XL-children-adapter-checkpoint-4000",
|
32 |
+
},
|
33 |
+
},
|
34 |
}
|
35 |
|
36 |
# cache the base models, tokenizers, and adapters
|
nohup.out
ADDED
@@ -0,0 +1,662 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
|
2 |
+
[nltk_data] Package punkt is already up-to-date!
|
3 |
+
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
|
4 |
+
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
|
5 |
+
Number of available GPUs: 2
|
6 |
+
Using GPU: 1
|
7 |
+
|
8 |
+
You are using the default legacy behaviour of the <class 'transformers.models.t5.tokenization_t5.T5Tokenizer'>. This is expected, and simply means that the `legacy` (previous) behavior will be used so nothing changes for you. If you want to use the new behaviour, set `legacy=False`. This should only be set if you understand what it means, and thoroughly read the reason why this was added as explained in https://github.com/huggingface/transformers/pull/24565
|
9 |
+
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
|
10 |
+
2024-07-19 15:06:17.829140: E external/local_xla/xla/stream_executor/cuda/cuda_dnn.cc:9261] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered
|
11 |
+
2024-07-19 15:06:17.829280: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:607] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered
|
12 |
+
2024-07-19 15:06:17.971816: E external/local_xla/xla/stream_executor/cuda/cuda_blas.cc:1515] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered
|
13 |
+
2024-07-19 15:06:18.251726: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.
|
14 |
+
To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.
|
15 |
+
Loaded adapter: XL Model Adapter, Num. params: 3000752128
|
16 |
+
Loaded adapter: XL Law Model Adapter, Num. params: 3151747072
|
17 |
+
Loaded adapter: XL Marketing Model Adapter, Num. params: 3302742016
|
18 |
+
Loaded adapter: XL Child Style Model Adapter, Num. params: 3453736960
|
19 |
+
Traceback (most recent call last):
|
20 |
+
File "/home/aliasgarov/article_writer/app.py", line 4, in <module>
|
21 |
+
from ai_generate import generate
|
22 |
+
File "/home/aliasgarov/article_writer/ai_generate.py", line 5, in <module>
|
23 |
+
from groq import Groq
|
24 |
+
ModuleNotFoundError: No module named 'groq'
|
25 |
+
[nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
|
26 |
+
[nltk_data] Package punkt is already up-to-date!
|
27 |
+
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
|
28 |
+
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
|
29 |
+
Number of available GPUs: 2
|
30 |
+
Using GPU: 1
|
31 |
+
|
32 |
+
You are using the default legacy behaviour of the <class 'transformers.models.t5.tokenization_t5.T5Tokenizer'>. This is expected, and simply means that the `legacy` (previous) behavior will be used so nothing changes for you. If you want to use the new behaviour, set `legacy=False`. This should only be set if you understand what it means, and thoroughly read the reason why this was added as explained in https://github.com/huggingface/transformers/pull/24565
|
33 |
+
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
|
34 |
+
2024-07-19 15:08:22.269594: E external/local_xla/xla/stream_executor/cuda/cuda_dnn.cc:9261] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered
|
35 |
+
2024-07-19 15:08:22.269757: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:607] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered
|
36 |
+
2024-07-19 15:08:22.403768: E external/local_xla/xla/stream_executor/cuda/cuda_blas.cc:1515] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered
|
37 |
+
2024-07-19 15:08:22.680206: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.
|
38 |
+
To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.
|
39 |
+
Loaded adapter: XL Model Adapter, Num. params: 3000752128
|
40 |
+
Loaded adapter: XL Law Model Adapter, Num. params: 3151747072
|
41 |
+
Loaded adapter: XL Marketing Model Adapter, Num. params: 3302742016
|
42 |
+
Loaded adapter: XL Child Style Model Adapter, Num. params: 3453736960
|
43 |
+
Traceback (most recent call last):
|
44 |
+
File "/home/aliasgarov/article_writer/app.py", line 4, in <module>
|
45 |
+
from ai_generate import generate
|
46 |
+
File "/home/aliasgarov/article_writer/ai_generate.py", line 7, in <module>
|
47 |
+
groq_client = Groq(
|
48 |
+
File "/opt/conda/lib/python3.10/site-packages/groq/_client.py", line 89, in __init__
|
49 |
+
raise GroqError(
|
50 |
+
groq.GroqError: The api_key client option must be set either by passing api_key to the client or by setting the GROQ_API_KEY environment variable
|
51 |
+
[nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
|
52 |
+
[nltk_data] Package punkt is already up-to-date!
|
53 |
+
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
|
54 |
+
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
|
55 |
+
Number of available GPUs: 2
|
56 |
+
Using GPU: 1
|
57 |
+
|
58 |
+
You are using the default legacy behaviour of the <class 'transformers.models.t5.tokenization_t5.T5Tokenizer'>. This is expected, and simply means that the `legacy` (previous) behavior will be used so nothing changes for you. If you want to use the new behaviour, set `legacy=False`. This should only be set if you understand what it means, and thoroughly read the reason why this was added as explained in https://github.com/huggingface/transformers/pull/24565
|
59 |
+
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
|
60 |
+
2024-07-19 15:19:13.824003: E external/local_xla/xla/stream_executor/cuda/cuda_dnn.cc:9261] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered
|
61 |
+
2024-07-19 15:19:13.824148: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:607] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered
|
62 |
+
2024-07-19 15:19:13.959281: E external/local_xla/xla/stream_executor/cuda/cuda_blas.cc:1515] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered
|
63 |
+
2024-07-19 15:19:14.228951: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.
|
64 |
+
To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.
|
65 |
+
/opt/conda/lib/python3.10/site-packages/gradio/layouts/column.py:53: UserWarning: 'scale' value should be an integer. Using 0.7 will cause issues.
|
66 |
+
warnings.warn(
|
67 |
+
/opt/conda/lib/python3.10/site-packages/gradio/layouts/column.py:53: UserWarning: 'scale' value should be an integer. Using 0.3 will cause issues.
|
68 |
+
warnings.warn(
|
69 |
+
Matplotlib created a temporary cache directory at /var/tmp/matplotlib-ge4ywtr4 because the default path (/home/aliasgarov/.config/matplotlib) is not a writable directory; it is highly recommended to set the MPLCONFIGDIR environment variable to a writable directory, in particular to speed up the import of Matplotlib and to better support multiprocessing.
|
70 |
+
Loaded adapter: XL Model Adapter, Num. params: 3000752128
|
71 |
+
Loaded adapter: XL Law Model Adapter, Num. params: 3151747072
|
72 |
+
Loaded adapter: XL Marketing Model Adapter, Num. params: 3302742016
|
73 |
+
Loaded adapter: XL Child Style Model Adapter, Num. params: 3453736960
|
74 |
+
IMPORTANT: You are using gradio version 4.26.0, however version 4.29.0 is available, please upgrade.
|
75 |
+
--------
|
76 |
+
Running on local URL: http://0.0.0.0:7890
|
77 |
+
|
78 |
+
To create a public link, set `share=True` in `launch()`.
|
79 |
+
Write a article about the topic in about 200 words using a casual tone: anar bayramov
|
80 |
+
Write a article about the topic in about 200 words using a casual tone: anar bayramov
|
81 |
+
Write a article about the topic in about 200 words using a professional tone: anar bayramov
|
82 |
+
Write a article about the topic in about 200 words using a formal tone: anar bayramov
|
83 |
+
Write a article about the topic in about 200 words using a friendly tone: anar bayramov
|
84 |
+
Write a article about the topic in about 200 words using a casual tone: anar bayramov
|
85 |
+
Write a essay about the topic in about 200 words using a casual tone: anar bayramov
|
86 |
+
Write a essay about the topic in about 200 words using a casual tone: cop29
|
87 |
+
Write a essay about the topic in about 200 words using a professional tone: United States Navy
|
88 |
+
Write a article about the topic in about 200 words using a casual tone: List of historic places in the Chatham Islands
|
89 |
+
Write a article about the topic in about 200 words using a professional tone: List of historic places in the Chatham Islands
|
90 |
+
Write a article about the topic in about 100 words using a professional tone: List of historic places in the Chatham Islands
|
91 |
+
Write a article about the topic in about 100 words using a professional tone: List of historic places in the Chatham Islands
|
92 |
+
|
93 |
+
Original: The Chatham Islands, an archipelago located around 800 kilometers east of New Zealand's South Island, are home to a rich historical and cultural heritage.
|
94 |
+
Paraphrased: About 800 kilometres east of New Zealand's South Island, the chathams a group of volcanic archipelagos, are cultural and historic centers.
|
95 |
+
|
96 |
+
Original: Visitors to these remote islands can explore a variety of historic sites that highlight their unique history and the resilience of the Moriori and European settlers who have called the Chathams home.
|
97 |
+
Paraphrased: For tourists to these vast, isolated islands, diverse historic sites are available that offer an insight into the unique history of the islands, as well as the strength and perseverance of the Moriori and European settlers that dwell on the Chathams.
|
98 |
+
|
99 |
+
Original: Notable historic sites include the Kopinga marae, a traditional Moriori meeting place and one of the few remaining examples of Moriori culture.
|
100 |
+
Paraphrased: Examples include the Kopinga marae, a Moriori funeral place, and one of the last vestiges of Moriori culture.
|
101 |
+
|
102 |
+
Original: The remnants of stone buildings and whaling stations at Waitangi West and Port Hutt offer evidence of the European whalers and sealers who once occupied the islands.
|
103 |
+
Paraphrased: The remains of stone buildings and whaling stations lie close to Waitangi West and Port Hutt that show the presence of the early European whalers and sealers at the islands.
|
104 |
+
|
105 |
+
Original: The Chatham Islands Museum provides a comprehensive overview of the archipelago's history, with collections that include Moriori artifacts, European settler relics, and natural history exhibits.
|
106 |
+
Paraphrased: The Chatham Islands Museum provides an understanding of the history of the archipelago through a collection of history and Moriori artefacts, relics of European settler's history, and natural history displays.
|
107 |
+
|
108 |
+
Original: Other notable sites include historic homesteads, telegraph stations, and graveyards scattered across the islands.
|
109 |
+
Paraphrased: Another is the surviving cottage settlement, telegraph stations and graveyards throughout the islands.
|
110 |
+
|
111 |
+
Original: Travelers interested in history and culture will find plenty to explore in the Chatham Islands.
|
112 |
+
Paraphrased: The Chatham Islands have plenty for those interested in history and culture to explore.
|
113 |
+
Using adapter: XL Model Adapter
|
114 |
+
|
115 |
+
Original: The Chatham Islands, an archipelago located around 800 kilometers east of New Zealand's South Island, are home to a rich historical and cultural heritage.
|
116 |
+
Paraphrased: Chatham Islands () is a sub-arctic archipelago lying approximately 800 kilometres east of the eastern half of New Zealand's South Island, with a rich local cultural and historical legacy.
|
117 |
+
|
118 |
+
Original: Visitors to these remote islands can explore a variety of historic sites that highlight their unique history and the resilience of the Moriori and European settlers who have called the Chathams home.
|
119 |
+
Paraphrased: Throughout these remote islands you may discover a host of places of historical interest that highlight the Chatham's particular features and in some cases, the resilience of their residents, the Moriori, and Europeans settlers who lived there.
|
120 |
+
|
121 |
+
Original: Notable historic sites include the Kopinga marae, a traditional Moriori meeting place and one of the few remaining examples of Moriori culture.
|
122 |
+
Paraphrased: Historic sites include the Kopinga marae, a traditional meeting point or mohelo, which is part of the few existing reminders of Moriori culture.
|
123 |
+
|
124 |
+
Original: The remnants of stone buildings and whaling stations at Waitangi West and Port Hutt offer evidence of the European whalers and sealers who once occupied the islands.
|
125 |
+
Paraphrased: The stone buildings and whaling stations at Waitangi West and at Port Hutt provide evidence of the European and sealer settlers who first inhabited the islands.
|
126 |
+
|
127 |
+
Original: The Chatham Islands Museum provides a comprehensive overview of the archipelago's history, with collections that include Moriori artifacts, European settler relics, and natural history exhibits.
|
128 |
+
Paraphrased: Chatham Islands Museum gives an overview of the archipelago's history and includes examples of Moriori artifacts like that of Tuwhareto when the island was part of the Cook islands, European settler ephemera and natural history expositions.
|
129 |
+
|
130 |
+
Original: Other notable sites include historic homesteads, telegraph stations, and graveyards scattered across the islands.
|
131 |
+
Paraphrased: Other historic sites include historical homesteads, telegraph stations and graveyards dispersed across the islands.
|
132 |
+
|
133 |
+
Original: Travelers interested in history and culture will find plenty to explore in the Chatham Islands.
|
134 |
+
Paraphrased: The Chatham Islands offer significant attractions to history-focused voyagers from around the world.
|
135 |
+
Write a letter about the topic in about 200 words using a humorous tone: Elon Musk
|
136 |
+
|
137 |
+
Original: **A Humorous Open Letter to Elon Musk**
|
138 |
+
|
139 |
+
Dear Elon,
|
140 |
+
|
141 |
+
Hi, it's me, a normal human who still can't believe they have to charge their phone every day.
|
142 |
+
Paraphrased: **a Hilarious open letter to Elon Musk** hello Elon: I am a human and I can't fathom that you charge your phone every single day (like an average human person).
|
143 |
+
|
144 |
+
Original: Meanwhile, you're over there making electric cars, space rockets, and tunnels because, why not?
|
145 |
+
Paraphrased: As if we won't, you here are building electric cars, rockets, tunnels, because you feel like it).
|
146 |
+
|
147 |
+
Original: Listen, I get it.
|
148 |
+
Paraphrased: Come on, I know that.
|
149 |
+
|
150 |
+
Original: You're a genius, billionaire, playboy philanthropist (just like Tony Stark, minus the suit... or do you have one in your garage too?).
|
151 |
+
Paraphrased: [194] You are a genius, playboy, billionaire philanthropist (for instance the same things as Tony Stark, for instance Tony Stark's wardrobe of a suit, did you have a suit in your garage?
|
152 |
+
|
153 |
+
Original: But seriously, can you slow down a bit?
|
154 |
+
Paraphrased: But in all seriousness, let's go slow.
|
155 |
+
|
156 |
+
Original: My brain hurts just thinking about all the things you're doing.
|
157 |
+
Paraphrased: My head hurts from thinking things that you’re doing."
|
158 |
+
|
159 |
+
Original: I mean, have you ever heard of the phrase "too many irons in the fire"?
|
160 |
+
Paraphrased: I mean, do you listen to the catchphrase; too many irons in the fire?
|
161 |
+
|
162 |
+
Original: I think you've got all the irons, and all the fires, and you're just stirring them all up like a mad scientist (not that I'm calling you mad, Elon, you're clearly a genius... have I mentioned that already?).
|
163 |
+
Paraphrased: I personally think you’re wielding all the irons, and making all the fires, and running around a mad scientist for the likes of Elon (not that I’m making you crazy, Elon you idiot... Did I say idiot?
|
164 |
+
|
165 |
+
Original: Anyway, I just wanted to say, thanks for making humanity's collective jaw drop every other week with your innovations.
|
166 |
+
Paraphrased: So basically I just want to say thanks for bringing everyone's jaw dropped every other week with your invention.
|
167 |
+
|
168 |
+
Original: Can you please just take a day off and play some video games or something?
|
169 |
+
Paraphrased: Just go off and play video games or something instead....
|
170 |
+
|
171 |
+
Original: I'm sure the Mars colonization project can wait... for a day.
|
172 |
+
Paraphrased: And the Mars settlement project can wait, for a day and for...
|
173 |
+
|
174 |
+
Original: Cheers,
|
175 |
+
[Your Name]
|
176 |
+
Paraphrased: [Your name] Cheers.
|
177 |
+
Using adapter: XL Law Model Adapter
|
178 |
+
|
179 |
+
Original: **A Humorous Open Letter to Elon Musk**
|
180 |
+
|
181 |
+
Dear Elon,
|
182 |
+
|
183 |
+
Hi, it's me, a normal human who still can't believe they have to charge their phone every day.
|
184 |
+
Paraphrased: **External Oral Communication TO ELOON MUSK**Dear Elon, Hello, I am A Normal human that can’t quite believe that I must now charge my phone every single day.
|
185 |
+
|
186 |
+
Original: Meanwhile, you're over there making electric cars, space rockets, and tunnels because, why not?
|
187 |
+
Paraphrased: All the while you’re out there creating all those electric autos and rocketships and tunnels so why not.
|
188 |
+
|
189 |
+
Original: Listen, I get it.
|
190 |
+
Paraphrased: Listen, I understand.
|
191 |
+
|
192 |
+
Original: You're a genius, billionaire, playboy philanthropist (just like Tony Stark, minus the suit... or do you have one in your garage too?).
|
193 |
+
Paraphrased: and you’re not, like I say, a brainwashed billionaire playboy philanthropist (the kind of genius Tony Stark is... don’t tell me your suit Is in your garage.
|
194 |
+
|
195 |
+
Original: But seriously, can you slow down a bit?
|
196 |
+
Paraphrased: But seriously, please, take a moment?
|
197 |
+
|
198 |
+
Original: My brain hurts just thinking about all the things you're doing.
|
199 |
+
Paraphrased: I'm getting an acrophobia from simply imagining what you're undertaking.
|
200 |
+
|
201 |
+
Original: I mean, have you ever heard of the phrase "too many irons in the fire"?
|
202 |
+
Paraphrased: I mean, there is a catchall phrase, "too many irons in the fire or not enough."
|
203 |
+
|
204 |
+
Original: I think you've got all the irons, and all the fires, and you're just stirring them all up like a mad scientist (not that I'm calling you mad, Elon, you're clearly a genius... have I mentioned that already?).
|
205 |
+
Paraphrased: I think you have all of the irons and all of the fires and you are tossing them around like crazy ass scientist (I don't consider myself crazy; you're absolutely genius... I 'd already said it.)
|
206 |
+
|
207 |
+
Original: Anyway, I just wanted to say, thanks for making humanity's collective jaw drop every other week with your innovations.
|
208 |
+
Paraphrased: So that's what I wanted to say in any case. Thanks for turning the collective mouth of humanity into a waterfall once every two weeks with what you create.
|
209 |
+
|
210 |
+
Original: Can you please just take a day off and play some video games or something?
|
211 |
+
Paraphrased: Can you maybe have an off day like just relax and play video games.
|
212 |
+
|
213 |
+
Original: I'm sure the Mars colonization project can wait... for a day.
|
214 |
+
Paraphrased: Perhaps the Mars colonization program can wait... a day.
|
215 |
+
|
216 |
+
Original: Cheers,
|
217 |
+
[Your Name]
|
218 |
+
Paraphrased: Fine, [Your Name] (sic).
|
219 |
+
Write a letter about the topic in about 200 words using a humorous tone: Elon Musk
|
220 |
+
Write a letter about the topic in about 200 words using a humorous tone: Elon Musk
|
221 |
+
Write a letter about the topic in about 200 words using a humorous tone: Elon Musk
|
222 |
+
|
223 |
+
Original: Dear Esteemed Reader,
|
224 |
+
|
225 |
+
Have you ever heard of the guy who's planning to colonize Mars and sell flamethrowers as a side hustle?
|
226 |
+
Paraphrased: As a side project can you shed the image of the egomaniacal soul set to colonize Mars and buy the right to sell flamethrowers? Dear Esteemed Reader, you are a man after my own heart. Have you heard of the guy who sells these.
|
227 |
+
|
228 |
+
Original: No, it's not a villain from a comic book, it's Elon Musk, the real-life Tony Stark!
|
229 |
+
Paraphrased: No, my friend, it is not some comic book villain; this is Elon Musk, the Tony Stark of the 21st Century.
|
230 |
+
|
231 |
+
Original: Musk, the visionary behind Tesla, SpaceX, and Neuralink, is the man who's making science fiction a reality.
|
232 |
+
Paraphrased: Musk, the man behind Tesla, SpaceX, and Neuralink is the man turning the page from science fiction to reality.
|
233 |
+
|
234 |
+
Original: From electric cars to reusable rockets, Musk's innovations are changing the world as we know it.
|
235 |
+
Paraphrased: With products from e-cars to rockets that can re-use rocket fuel, Musk is revolutionizing modern life.
|
236 |
+
|
237 |
+
Original: And let's not forget about his infamous tweets that keep us all entertained.
|
238 |
+
Paraphrased: And let's not forget his Twitter deeds, which have been keeping us amused.
|
239 |
+
|
240 |
+
Original: Just when we thought Musk couldn't get any more eccentric, he went ahead and sold flamethrowers to the public.
|
241 |
+
Paraphrased: And just when we thought Musk couldn’t get any goofier, he was selling flamethrowers to the public.
|
242 |
+
|
243 |
+
Original: I mean, who needs a flamethrower, right?
|
244 |
+
Paraphrased: I mean... flamethrower -- who needs it, I mean, right?
|
245 |
+
|
246 |
+
Original: But leave it to Musk to make something as absurd as that cool and desirable.
|
247 |
+
Paraphrased: But it’s Musk, it’s his idea to make an over-the-top concept like that not only cool, but actually appealing.
|
248 |
+
|
249 |
+
Original: And let's not forget about his plans to colonize Mars.
|
250 |
+
Paraphrased: Nor should we ignore his commitment to colonizing Mars.
|
251 |
+
|
252 |
+
Original: Musk's ultimate goal is to make humans a multi-planetary species, and he's not afraid to put his money where his mouth is.
|
253 |
+
Paraphrased: Musk remains keen on making humanity a multi-planetary species and is prepared to put his money where his mouth is.
|
254 |
+
|
255 |
+
Original: He's even announced that he's planning to die on Mars - now that's commitment!
|
256 |
+
Paraphrased: He's even committed to dying on Mars - what dedication there!
|
257 |
+
|
258 |
+
Original: But what makes Musk so fascinating isn't just his out-of-this-world ideas; it's his ability to make us believe that anything is possible.
|
259 |
+
Paraphrased: But it’s not just the crazy ideas that make Musk more than a fascinating thought experimenter: It’s how he manages to make us believe anything is possible.
|
260 |
+
|
261 |
+
Original: With each new innovation, Musk pushes the boundaries of what we thought was achievable, and inspires us to dream bigger.
|
262 |
+
Paraphrased: As time goes on and Musk innovates further, he pushes the limits of what people think is possible – and we have to think bigger than that.
|
263 |
+
|
264 |
+
Original: So, here's to Elon Musk, the man who's making boring old Earth seem a little less exciting and a whole lot more ordinary.
|
265 |
+
Paraphrased: So, a toast to Elon Musk, the man making boring old Earth a little less attractive and a lot more mundane for that matter.
|
266 |
+
|
267 |
+
Original: Keep on dreaming, Musk, and maybe one day we'll all be living on Mars!
|
268 |
+
Paraphrased: Keep dreaming Musk! One day we shall all be on Mars.
|
269 |
+
|
270 |
+
Original: Sincerely,
|
271 |
+
[Your Name]
|
272 |
+
|
273 |
+
(P.S.
|
274 |
+
Paraphrased: Dear [Your Name] (PS.
|
275 |
+
|
276 |
+
Original: If you need a ride to Mars, let me know.
|
277 |
+
Paraphrased: If you have any need for a ride somewhere else, just let me know.
|
278 |
+
|
279 |
+
Original: I'll hit up Musk for a discount.)
|
280 |
+
Paraphrased: I’m going to appeal to Musk for an exception.)
|
281 |
+
Using adapter: XL Model Adapter
|
282 |
+
|
283 |
+
Original: Dear Esteemed Reader,
|
284 |
+
|
285 |
+
Have you ever heard of the guy who's planning to colonize Mars and sell flamethrowers as a side hustle?
|
286 |
+
Paraphrased: Dear esteemed reader, do you know who told you that guy wants to build colonies on Mars and sell flamethrowers as a sideline?
|
287 |
+
|
288 |
+
Original: No, it's not a villain from a comic book, it's Elon Musk, the real-life Tony Stark!
|
289 |
+
Paraphrased: Is it Batman, Spider-Man or even the real Toyes as we meet them later in the series?
|
290 |
+
|
291 |
+
Original: Musk, the visionary behind Tesla, SpaceX, and Neuralink, is the man who's making science fiction a reality.
|
292 |
+
[nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
|
293 |
+
[nltk_data] Package punkt is already up-to-date!
|
294 |
+
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
|
295 |
+
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
|
296 |
+
Number of available GPUs: 2
|
297 |
+
Using GPU: 1
|
298 |
+
|
299 |
+
You are using the default legacy behaviour of the <class 'transformers.models.t5.tokenization_t5.T5Tokenizer'>. This is expected, and simply means that the `legacy` (previous) behavior will be used so nothing changes for you. If you want to use the new behaviour, set `legacy=False`. This should only be set if you understand what it means, and thoroughly read the reason why this was added as explained in https://github.com/huggingface/transformers/pull/24565
|
300 |
+
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
|
301 |
+
2024-07-20 06:31:32.665627: E external/local_xla/xla/stream_executor/cuda/cuda_dnn.cc:9261] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered
|
302 |
+
2024-07-20 06:31:32.665764: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:607] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered
|
303 |
+
2024-07-20 06:31:32.797367: E external/local_xla/xla/stream_executor/cuda/cuda_blas.cc:1515] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered
|
304 |
+
2024-07-20 06:31:33.055063: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.
|
305 |
+
To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.
|
306 |
+
Loaded adapter: XL Model Adapter, Num. params: 3000752128
|
307 |
+
Loaded adapter: XL Law Model Adapter, Num. params: 3151747072
|
308 |
+
Loaded adapter: XL Marketing Model Adapter, Num. params: 3302742016
|
309 |
+
Loaded adapter: XL Child Style Model Adapter, Num. params: 3453736960
|
310 |
+
Traceback (most recent call last):
|
311 |
+
File "/home/aliasgarov/article_writer/app.py", line 4, in <module>
|
312 |
+
from ai_generate import generate
|
313 |
+
File "/home/aliasgarov/article_writer/ai_generate.py", line 7, in <module>
|
314 |
+
groq_client = Groq(
|
315 |
+
File "/opt/conda/lib/python3.10/site-packages/groq/_client.py", line 89, in __init__
|
316 |
+
raise GroqError(
|
317 |
+
groq.GroqError: The api_key client option must be set either by passing api_key to the client or by setting the GROQ_API_KEY environment variable
|
318 |
+
[nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
|
319 |
+
[nltk_data] Package punkt is already up-to-date!
|
320 |
+
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
|
321 |
+
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
|
322 |
+
Number of available GPUs: 2
|
323 |
+
Using GPU: 1
|
324 |
+
|
325 |
+
You are using the default legacy behaviour of the <class 'transformers.models.t5.tokenization_t5.T5Tokenizer'>. This is expected, and simply means that the `legacy` (previous) behavior will be used so nothing changes for you. If you want to use the new behaviour, set `legacy=False`. This should only be set if you understand what it means, and thoroughly read the reason why this was added as explained in https://github.com/huggingface/transformers/pull/24565
|
326 |
+
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
|
327 |
+
2024-07-20 06:34:55.446249: E external/local_xla/xla/stream_executor/cuda/cuda_dnn.cc:9261] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered
|
328 |
+
2024-07-20 06:34:55.446397: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:607] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered
|
329 |
+
2024-07-20 06:34:55.579241: E external/local_xla/xla/stream_executor/cuda/cuda_blas.cc:1515] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered
|
330 |
+
2024-07-20 06:34:55.842812: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.
|
331 |
+
To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.
|
332 |
+
/opt/conda/lib/python3.10/site-packages/gradio/layouts/column.py:53: UserWarning: 'scale' value should be an integer. Using 0.7 will cause issues.
|
333 |
+
warnings.warn(
|
334 |
+
/opt/conda/lib/python3.10/site-packages/gradio/layouts/column.py:53: UserWarning: 'scale' value should be an integer. Using 0.3 will cause issues.
|
335 |
+
warnings.warn(
|
336 |
+
WARNING: Invalid HTTP request received.
|
337 |
+
WARNING: Invalid HTTP request received.
|
338 |
+
WARNING: Unsupported upgrade request.
|
339 |
+
WARNING: No supported WebSocket library detected. Please use "pip install 'uvicorn[standard]'", or install 'websockets' or 'wsproto' manually.
|
340 |
+
WARNING: Invalid HTTP request received.
|
341 |
+
WARNING: Invalid HTTP request received.
|
342 |
+
WARNING: Invalid HTTP request received.
|
343 |
+
WARNING: Unsupported upgrade request.
|
344 |
+
WARNING: No supported WebSocket library detected. Please use "pip install 'uvicorn[standard]'", or install 'websockets' or 'wsproto' manually.
|
345 |
+
WARNING: Invalid HTTP request received.
|
346 |
+
WARNING: Invalid HTTP request received.
|
347 |
+
WARNING: Invalid HTTP request received.
|
348 |
+
Matplotlib created a temporary cache directory at /var/tmp/matplotlib-1ownn046 because the default path (/home/aliasgarov/.config/matplotlib) is not a writable directory; it is highly recommended to set the MPLCONFIGDIR environment variable to a writable directory, in particular to speed up the import of Matplotlib and to better support multiprocessing.
|
349 |
+
Loaded adapter: XL Model Adapter, Num. params: 3000752128
|
350 |
+
Loaded adapter: XL Law Model Adapter, Num. params: 3151747072
|
351 |
+
Loaded adapter: XL Marketing Model Adapter, Num. params: 3302742016
|
352 |
+
Loaded adapter: XL Child Style Model Adapter, Num. params: 3453736960
|
353 |
+
IMPORTANT: You are using gradio version 4.26.0, however version 4.29.0 is available, please upgrade.
|
354 |
+
--------
|
355 |
+
Running on local URL: http://0.0.0.0:7890
|
356 |
+
|
357 |
+
To create a public link, set `share=True` in `launch()`.
|
358 |
+
Write a article about the topic in about 700 words using a professional tone: AI Impact on SEO rating
|
359 |
+
|
360 |
+
Original: Artificial Intelligence (AI) has become a transformative force in many industries, and digital marketing is no exception.
|
361 |
+
Paraphrased: AI is becoming an innovative powerhouse across a host of industries - including the practice of digital marketing.
|
362 |
+
|
363 |
+
Original: One area that has seen a significant impact from AI is Search Engine Optimization (SEO).
|
364 |
+
Paraphrased: An area that has been particularly affected by AI has turned out to be Search Engine Optimization (SEO).
|
365 |
+
|
366 |
+
Original: SEO is the process of improving a website’s visibility on search engine results pages (SERPs) through optimizing its content, structure, and external links.
|
367 |
+
Paraphrased: SEO is the study of what a website achieves by improving its presence on the SERPs by making it more useful; improving its content, structure, and external links.
|
368 |
+
|
369 |
+
Original: The primary goal of SEO is to attract more organic traffic to a website by improving its search engine ranking.
|
370 |
+
Paraphrased: SEO aims to boost or increase the quantity and quality of organic traffic to a website by improving its placement in search engines.
|
371 |
+
|
372 |
+
Original: In recent years, AI algorithms have become increasingly sophisticated, and search engines have adopted them to improve the accuracy and relevance of search results.
|
373 |
+
Paraphrased: AI algorithms have grown even more sophisticated over the past few years, and increasingly search engines use them to provide more reliable and applicable results.
|
374 |
+
|
375 |
+
Original: As a result, AI is now a crucial factor in SEO strategy.
|
376 |
+
Paraphrased: AI as a result has become a key marketing factor now.
|
377 |
+
|
378 |
+
Original: Here are some ways AI is impacting SEO ratings:
|
379 |
+
|
380 |
+
1.
|
381 |
+
Paraphrased: This is how AI is playing out in SEO rankings: 1.
|
382 |
+
|
383 |
+
Original: Keyword Research
|
384 |
+
|
385 |
+
Keyword research is a fundamental aspect of SEO.
|
386 |
+
Paraphrased: Keyword Research Keyword research is also a major component of Search Engine Optimization (SEO).
|
387 |
+
|
388 |
+
Original: It involves identifying the most relevant keywords for a website and optimizing its content around those keywords.
|
389 |
+
Paraphrased: This is a process of determining search related keywords to a website and bringing content up to these keywords.
|
390 |
+
|
391 |
+
Original: In the past, keyword research was manual and time-consuming, but with the help of AI, it has become much more efficient.
|
392 |
+
Paraphrased: Keyword Research used to be hand-held and time-consuming - and now using AI to provide a new way to extract relevant keywords makes it far faster than ever.
|
393 |
+
|
394 |
+
Original: AI tools like Google Keyword Planner and Moz Keyword Explorer use machine learning algorithms to analyze user search behavior and predict the most relevant keywords for a website.
|
395 |
+
Paraphrased: Artificial intelligence tools such as Google Keyword Planner and Moz Keyword Explorer use machine learning algorithms to analyze the search behavior of people searching on websites and predict search terms most related to one underlying website.
|
396 |
+
|
397 |
+
Original: These tools help SEO professionals make informed decisions about which keywords to target and how to optimize content for those keywords.
|
398 |
+
Paraphrased: These tools also help SEO professionals know what terms to target and how to optimize for those keywords.
|
399 |
+
|
400 |
+
Original: 2.
|
401 |
+
Paraphrased: 2.
|
402 |
+
|
403 |
+
Original: Content Optimization
|
404 |
+
|
405 |
+
AI algorithms can analyze the content of a website and determine its relevance to specific keywords.
|
406 |
+
Paraphrased: All Content Optimization AI algorithms make available the ability to read articles and videos contained on a website, determining their relevance to various keywords.
|
407 |
+
|
408 |
+
Original: These algorithms consider various factors, such as the use of keywords, sentence structure, and the overall context of the content.
|
409 |
+
Paraphrased: These algorithms incorporate variables such as keywords, sentence structurality, and the overall tone of the written material.
|
410 |
+
|
411 |
+
Original: By optimizing content for AI algorithms, businesses can improve their SEO ranking and attract more organic traffic.
|
412 |
+
Paraphrased: By building a proper content for AI algorithms, businesses can use this algorithm to improve their search engine ranking and more organic traffic.
|
413 |
+
|
414 |
+
Original: One way to optimize content for AI algorithms is through natural language processing (NLP).
|
415 |
+
Paraphrased: Natural language processing (NLP) is one key method to optimize content for neural computing algorithms.
|
416 |
+
|
417 |
+
Original: NLP involves using AI to analyze and understand human language.
|
418 |
+
Paraphrased: NLP is the application of AI to read and translate human languages.
|
419 |
+
|
420 |
+
Original: By using NLP techniques, businesses can create content that is easy for AI algorithms to understand and rank higher on SERPs.
|
421 |
+
Paraphrased: By exploiting NLP techniques, organizations can create content that aural computing algorithms can learn and get better rankings on the SERPs.
|
422 |
+
|
423 |
+
Original: 3.
|
424 |
+
Paraphrased: 3.
|
425 |
+
|
426 |
+
Original: Link Building
|
427 |
+
|
428 |
+
Link building is the process of acquiring external links from other websites that point to a business’s website.
|
429 |
+
Paraphrased: Link Building Link building in marketing is the acquisition of external links on other websites that lead to a company’s website.
|
430 |
+
|
431 |
+
Original: Link building is a crucial factor in SEO because search engines view backlinks as a sign of a website’s authority and relevance.
|
432 |
+
Paraphrased: However, building links is also an important part of a SEO strategy and they are looked at by search engines as a indicator of site authority and relevance.
|
433 |
+
|
434 |
+
Original: AI algorithms can analyze the quality and relevance of backlinks and determine their impact on a website’s SEO ranking.
|
435 |
+
Paraphrased: AI algorithms can analyse the strength and relevance of backlinks and see how these influenced a site’s ranking and where they would rank.
|
436 |
+
|
437 |
+
Original: By using AI tools, businesses can identify high-quality websites that are relevant to their industry and acquire backlinks from those websites.
|
438 |
+
Paraphrased: With AI methods, the company can identify highly efficient websites related to their activities and get backlinks from such websites.
|
439 |
+
|
440 |
+
Original: 4.
|
441 |
+
Paraphrased: 4.
|
442 |
+
|
443 |
+
Original: Site Speed and User Experience
|
444 |
+
|
445 |
+
AI algorithms consider site speed and user experience when determining a website’s SEO ranking.
|
446 |
+
Paraphrased: Site Speed and User Experience SEO algorithms consider site speed and user experience in obtaining a website’s SEO ranking.
|
447 |
+
|
448 |
+
Original: Slow site speed and a poor user experience can negatively impact a website’s ranking, while a fast site speed and a positive user experience can improve it.
|
449 |
+
Paraphrased: Fast site speed and user satisfaction can be negative to a rankings, while a fast site speed is positive.
|
450 |
+
|
451 |
+
Original: AI tools like Google Lighthouse can analyze a website’s performance and provide recommendations for improving its site speed and user experience.
|
452 |
+
Paraphrased: Machine intelligence (AI) tools, such as Google Lighthouse, analyze a website and determine suggestions for speed and site user experience improvement.
|
453 |
+
|
454 |
+
Original: By implementing these recommendations, businesses can improve their SEO ranking and attract more organic traffic.
|
455 |
+
Paraphrased: Businesses gain greater visibility by following these recommendations and getting more organic traffic.
|
456 |
+
|
457 |
+
Original: 5.
|
458 |
+
Paraphrased: 5.
|
459 |
+
|
460 |
+
Original: Personalization
|
461 |
+
|
462 |
+
AI algorithms can personalize search results based on a user’s search history, location, and other factors.
|
463 |
+
Paraphrased: Personalization AI algorithms can create different search results reflecting a user's search history, location, etc.
|
464 |
+
|
465 |
+
Original: By optimizing content for personalization, businesses can improve their SEO ranking and attract more targeted traffic.
|
466 |
+
WARNING: Invalid HTTP request received.
|
467 |
+
WARNING: Invalid HTTP request received.
|
468 |
+
WARNING: Invalid HTTP request received.
|
469 |
+
Paraphrased: People can be more specific when they have personally tailored content and generating improved online search ranking.
|
470 |
+
|
471 |
+
Original: One way to optimize content for personalization is through the use of long-tail keywords.
|
472 |
+
Paraphrased: Personalization: One way to personalize content involves long tail keywords.
|
473 |
+
|
474 |
+
Original: Long-tail keywords are specific phrases that are more likely to be used by users who are further along in the buying process.
|
475 |
+
Paraphrased: Long term keywords are precise words that users who were far down the buying funnel tend to use.
|
476 |
+
|
477 |
+
Original: By targeting long-tail keywords, businesses can create content that is more relevant to their target audience and improve their SEO ranking.
|
478 |
+
Paraphrased: With long-tail keywords business content must be created more suited towards your target market as well as ranking higher in SEO.
|
479 |
+
|
480 |
+
Original: In conclusion, AI is having a significant impact on SEO ratings.
|
481 |
+
Paraphrased: Finally, ie, AI is making a big impact on ranks and results in SEO.
|
482 |
+
|
483 |
+
Original: By using AI tools and techniques, businesses can improve their keyword research, content optimization, link building, site speed, and user experience.
|
484 |
+
Paraphrased: Business can use AI tools and techniques to analyze search keywords, find out more about the content on the web, improve the content development, speed up websites and much more to increase customers experiences.
|
485 |
+
|
486 |
+
Original: Additionally, AI algorithms allow for personalized search results, which can improve a website’s SEO ranking and attract more targeted traffic.
|
487 |
+
Paraphrased: Furthermore, AI algorithms enable personalized search results that could improve the SERP ranking of a website and increase targeted traffic.
|
488 |
+
|
489 |
+
Original: By embracing AI, businesses can stay ahead of the curve and maintain a competitive edge in the ever-evolving digital marketing landscape.
|
490 |
+
Paraphrased: AI is improving on how, but the business can also gain competitive advantage in the dynamic digital marketing industry by adopting it as many firms are doing.
|
491 |
+
Write a article about the topic in about 200 words using a casual tone: ai
|
492 |
+
Write a article about the topic in about 200 words using a casual tone: ai impact on seo ratings
|
493 |
+
Write a article about the topic in about 200 words using a casual tone: machine learning
|
494 |
+
|
495 |
+
Original: Sure thing!
|
496 |
+
Paraphrased: Sure!
|
497 |
+
|
498 |
+
Original: Here's a casual article about machine learning:
|
499 |
+
|
500 |
+
Have you ever heard of machine learning before?
|
501 |
+
Paraphrased: Here a casual article of machine learning: have you heard of machine learning?
|
502 |
+
|
503 |
+
Original: It's a type of artificial intelligence (AI) that allows computers to "learn" from data and make predictions or decisions without being explicitly programmed to do so.
|
504 |
+
Paraphrased: It is an alternative type of artificial intelligence (AI), in an artificial computer system that allows computers to "learn" from observations of things and make projections or decisions without being explicitly training its hardware.
|
505 |
+
|
506 |
+
Original: Machine learning is used in a variety of applications, from recommending products to you on Amazon or Netflix, to helping self-driving cars navigate the roads, to detecting fraud and spam.
|
507 |
+
Paraphrased: The different types of applications in which machine learning is used include suggestions for products on Amazon or Netflix, navigating roads by using autonomous automobiles, detecting fraud and spam.
|
508 |
+
|
509 |
+
Original: It's a powerful tool that can help businesses and organizations make better decisions and improve their products and services.
|
510 |
+
Paraphrased: It’s an incredible tool can help companies and organisations make good choices and improve products and services.
|
511 |
+
|
512 |
+
Original: There are different types of machine learning, including supervised learning, unsupervised learning, and reinforcement learning.
|
513 |
+
Paraphrased: Machine learning can be supervised, unsupervised, reinforcement, etc.
|
514 |
+
|
515 |
+
Original: In supervised learning, the computer is given labeled data and the correct answer, and it tries to learn the relationship between the input and the output.
|
516 |
+
Paraphrased: In supervised learning, the computer is given labeled data and the correct solution and seeks to know the relationship between the input and the solution.
|
517 |
+
|
518 |
+
Original: In unsupervised learning, the computer is given unlabeled data and it tries to find patterns and structure on its own.
|
519 |
+
Paraphrased: In unsupervised learning, there is unlabeled data and the computer searches for structure and patterns by itself.
|
520 |
+
|
521 |
+
Original: In reinforcement learning, the computer learns by interacting with its environment and receiving feedback in the form of rewards or penalties.
|
522 |
+
Paraphrased: Reinforcement Learning is when a computer learns by interacting with its environment as well as receiving feedback in some form (rewarded or punishing) when the computer moves.
|
523 |
+
|
524 |
+
Original: To get started with machine learning, you'll need some basic knowledge of programming and statistics.
|
525 |
+
Paraphrased: You must have some practical knowledge of operating and statistics before you can start machine learning.
|
526 |
+
|
527 |
+
Original: There are also many tools and libraries available that make it easier to build and train machine learning models, such as scikit-learn for Python.
|
528 |
+
Paraphrased: There are also many ML tools and libraries that allow machine learning models to be built and trained easier, such as scikit-learn for Python.
|
529 |
+
|
530 |
+
Original: Machine learning is a rapidly growing field, and there are many exciting opportunities for those who are interested in it.
|
531 |
+
Paraphrased: machine learning is an accelerating field, there are many exciting opportunities for those interested in the field.
|
532 |
+
|
533 |
+
Original: Whether you're a student, a professional, or just a curious learner, machine learning is definitely worth exploring.
|
534 |
+
Paraphrased: Machine learning is the ultimate resource for education, for professionals, or any curious learner and any student alike.
|
535 |
+
|
536 |
+
Original: Who knows, you might be the one to make the next big breakthrough in this fascinating field!
|
537 |
+
Paraphrased: What can I say, you may have a little to play with in this enthralling field!|<; |-:; |-:- |-:- @-dannya
|
538 |
+
Write a article about the topic in about 350 words using a professional tone: Importance of privacy and DLP solutions in the age of AI. Why we need to consider DLP and data privacy and how important it is for organizations to consider zero-trust environments. With references provided.
|
539 |
+
Using adapter: XL Model Adapter
|
540 |
+
|
541 |
+
Original: Title: The Imperative of Privacy and DLP Solutions in the Age of AI
|
542 |
+
|
543 |
+
In the era of artificial intelligence (AI), data privacy has become an increasingly critical concern for organizations globally.
|
544 |
+
Paraphrased: Title: An Empirical View of Privacy and DLP Solutions Within AI As data privacy has become a increasingly priority for all firms in the 21st century.
|
545 |
+
|
546 |
+
Original: With the exponential growth in data volumes and complexity, the importance of implementing effective Data Leakage Prevention (DLP) solutions cannot be overstated.
|
547 |
+
Paraphrased: Thanks to the staggering number of data volumes and types, the necessity of adopting effective Data Leakage Prevention (DLP) solutions cannot be ignored anytime soon.
|
548 |
+
|
549 |
+
Original: Moreover, adopting zero-trust environments is essential to safeguard against breaches that threaten data confidentiality, integrity, and availability.
|
550 |
+
Paraphrased: Additionally, the consideration of zero trust environments are vital to protect against data confidentiality, integrity and availability.
|
551 |
+
|
552 |
+
Original: Data leakage can have disastrous consequences for both individuals and organizations, resulting in reputational damage, financial liabilities, and legal implications.
|
553 |
+
Paraphrased: Individuals or organizations could suffer serious consequences if someone or something leaks data, including irreducible collateral damage to reputations (including financial liabilities and even legal claims), wrongful prosecution, or legal action (including an arrest warrant being issued to an affected person).
|
554 |
+
|
555 |
+
Original: A study by IBM Security identified that the average cost of a data breach in 2020 was approximately $3.86 million [1].
|
556 |
+
Paraphrased: IBM Security estimated average cost of breach when it examined 2020 cost to be about $3.86 million [1] in.
|
557 |
+
|
558 |
+
Original: Therefore, organizations must prioritize DLP solutions as part of their comprehensive cybersecurity strategies.
|
559 |
+
Paraphrased: As such, organizations should consider DLP solutions as part of extended cybersecurity strategies.
|
560 |
+
|
561 |
+
Original: The increasing reliance on AI technologies amplifies these risks by creating new vectors for data exposure.
|
562 |
+
Paraphrased: Thus, AI technology’s ubiquity reinforces these risks by increasing the number of vectors for their exposure.
|
563 |
+
|
564 |
+
Original: For instance, AI-driven analytics and predictive modeling require access to large datasets, which if not properly secured, can lead to unauthorized access and data exfiltration.
|
565 |
+
Paraphrased: For instance, AI-based analytics and predictive modeling take heavy use of high- volume data, and unless this data is properly secured there is the risk of unauthorized access and data exfiltration.
|
566 |
+
|
567 |
+
Original: Moreover, AI systems themselves are vulnerable to attacks, such as data poisoning attacks, where attackers manipulate training data to compromise model accuracy or confidentiality [2].
|
568 |
+
Paraphrased: On top of this, AI systems themselves can be targeted with attacks.[n] Typical data poisoning attacks, in which an attacker alters training data in such a way as to mislead the model or cause model confidentiality to be violated, are well known. [2]
|
569 |
+
|
570 |
+
Original: To mitigate these risks, organizations must adopt a zero-trust approach towards data access and handling.
|
571 |
+
Paraphrased: To mitigate these risks, organisations must take a zero trust approach to data accessibility and management practices.
|
572 |
+
|
573 |
+
Original: Zero-trust environments assume that no user or device, whether within or outside an organization, can be fully trusted.
|
574 |
+
Paraphrased: Zero-trust environments assume that no user or device be fully trusted both within and outside an organization.
|
575 |
+
|
576 |
+
Original: This approach involves implementing strict access controls, multi-factor authentication, regular security audits, and continuous monitoring for potential breaches [3].
|
577 |
+
Paraphrased: This strategy requires strict access control policies, multi factor authentication, biweekly security assessments and continuous monitoring of vulnerability [3].
|
578 |
+
|
579 |
+
Original: DLP solutions play a vital role in zero-trust environments by identifying, classifying, and protecting sensitive data.
|
580 |
+
Paraphrased: Zero-trust environments are dependent on DLP solutions that detect, classify and secure sensitive data.
|
581 |
+
|
582 |
+
Original: These solutions employ various techniques, including anomaly detection algorithms, data loss prevention tools, and content inspection systems, to prevent data leakage or unauthorized access [4].
|
583 |
+
Paraphrased: On a common path, these solutions use anomaly detection algorithms, tools such as data loss prevention and content inspection systems, to prevent the leakage or unauthorized access of sensitive data [4].
|
584 |
+
|
585 |
+
Original: In essence, DLP solutions act as an additional layer of defense against potential breaches, ensuring that data remains private and secure.
|
586 |
+
Paraphrased: Ultimately, it can be said that DLP solutions offer a second layer of protection over a targeted breach and guard a security-sensitive data.
|
587 |
+
|
588 |
+
Original: In conclusion, as we advance into the age of AI, the importance of privacy and DLP solutions in safeguarding data privacy cannot be overstated.
|
589 |
+
Paraphrased: In summary, privacy and DLP solutions on the protection of data privacy are vital for our current era of AI.
|
590 |
+
|
591 |
+
Original: Adopting a zero-trust environment requires organizations to rethink their security strategies and prioritize DLP solutions to protect against data breaches.
|
592 |
+
Paraphrased: In a zero trust environment it is more important for enterprises to rethink their approaches from security to DLP while aiming to safeguard their data from data breaches.
|
593 |
+
|
594 |
+
Original: In the face of escalating cyber threats, it is imperative to ensure that data remains private, confidential, and secure.
|
595 |
+
Paraphrased: In an age when hackers are going out of their way to get to your private and private data there is much more of a need to ensure this kind of data remains private, confidential, and secured."
|
596 |
+
|
597 |
+
Original: References:
|
598 |
+
|
599 |
+
[1] IBM Security.
|
600 |
+
Paraphrased: [1] IBM Security.
|
601 |
+
|
602 |
+
Original: (2020).
|
603 |
+
Paraphrased: (2020).
|
604 |
+
|
605 |
+
Original: Cost of a Data Breach Report.
|
606 |
+
Paraphrased: Insight, The Pricing of Data Breach Report:.
|
607 |
+
|
608 |
+
Original: [2] Chen, L., Zhang, D., & Wang, Y.
|
609 |
+
Paraphrased: [2] Chen, L., Zhang, D. & Wang, Y.
|
610 |
+
|
611 |
+
Original: (2018).
|
612 |
+
Paraphrased: (2018).
|
613 |
+
|
614 |
+
Original: Machine Learning for Security: Machine Learning for Data Poisoning Detection.
|
615 |
+
Paraphrased: Security ML: ML Detection of Data Poisoning.
|
616 |
+
|
617 |
+
Original: [3] NIST Cybersecurity Framework.
|
618 |
+
Paraphrased: [3] NIST Cybersecurity Framework.
|
619 |
+
|
620 |
+
Original: (2020).
|
621 |
+
Paraphrased: (2020).
|
622 |
+
|
623 |
+
Original: Framework for Improving Critical Infrastructure Cybersecurity.
|
624 |
+
Paraphrased: Framework for Critical Infrastructure Cybersecurity Improvement (CIS).
|
625 |
+
|
626 |
+
Original: [4] Kaspersky Lab.
|
627 |
+
Paraphrased: [4] Kaspersky Lab.
|
628 |
+
|
629 |
+
Original: (2020).
|
630 |
+
Paraphrased: (2020).
|
631 |
+
|
632 |
+
Original: Data Protection Strategy for the Digital Age.
|
633 |
+
Paraphrased: Data Protection For the Digital Age.
|
634 |
+
Write a article about the topic in about 200 words using a professional tone: talk about national security and AI and how to avoid cyber threats
|
635 |
+
Write a article about the topic in about 200 words using a professional tone: talk about national security and AI and how to avoid cyber threats
|
636 |
+
Write a article about the topic in about 200 words using a professional tone: talk about national security and AI and how to avoid cyber threats
|
637 |
+
Write a article about the topic in about 200 words using a professional tone: talk about national security and AI and how to avoid cyber threats
|
638 |
+
Write a article about the topic in about 200 words using a professional tone: talk about national security and AI and how to avoid cyber threats
|
639 |
+
Write a article about the topic in about 200 words using a professional tone: An LCL filter
|
640 |
+
|
641 |
+
Original: **Understanding the LCL Filter: A Power Quality Solution**
|
642 |
+
|
643 |
+
In industrial power systems, maintaining power quality is essential to ensure the reliable operation of equipment and minimize downtime.
|
644 |
+
Paraphrased: **Limited Computation Knowledge: An LCL Filter **In industrial power systems, quality of power must be maintained to provide a dependable operation of equipment and reduce down time.
|
645 |
+
|
646 |
+
Original: One essential component in achieving this goal is the LCL (Inductor-Capacitor-Inductor) filter.
|
647 |
+
Paraphrased: One of the important elements in accomplishing this end is the LCL (Inductor-Capacitor-Inductor) filter.
|
648 |
+
|
649 |
+
Original: This article provides an overview of the LCL filter, its working principle, and its benefits in improving power quality.
|
650 |
+
Paraphrased: An introduction is described to the LCL filter, its principle and application.
|
651 |
+
|
652 |
+
Original: The LCL filter is a passive harmonic filter designed to mitigate harmonics in power systems.
|
653 |
+
Paraphrased: The LCL filter is a passive harmonic filter that suppresses power harmonics.
|
654 |
+
|
655 |
+
Original: It consists of a combination of inductors and capacitors that work together to filter out unwanted harmonics and improve the overall power quality.
|
656 |
+
Paraphrased: It employs a multiple circuit configuration of inductors and capacitors that decouple undesirable harmonics from the supply and provide increased power quality.
|
657 |
+
|
658 |
+
Original: The filter is typically installed between the nonlinear load and the power source, where it absorbs harmonics and prevents them from propagating back into the grid.
|
659 |
+
Paraphrased: Generally the filter is designed to be between the nonlinear load and the power source, where the harmonics are collected through the filter and stopped from returning to the grid back through.
|
660 |
+
|
661 |
+
Original: The LCL filter operates on the principle of resonance, where the inductors and capacitors are tuned to resonate at specific frequencies, allowing the filter to effectively absorb harmonics.
|
662 |
+
WARNING: Invalid HTTP request received.
|
test.py
CHANGED
@@ -1,12 +1,398 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import requests
|
2 |
-
from
|
3 |
|
4 |
-
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
9 |
}
|
10 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
11 |
|
12 |
-
|
|
|
|
|
|
1 |
+
import openai
|
2 |
+
import gradio as gr
|
3 |
+
from typing import Dict, List
|
4 |
+
import re
|
5 |
+
from humanize import paraphrase_text
|
6 |
+
from ai_generate import generate
|
7 |
import requests
|
8 |
+
from gptzero_free import GPT2PPL
|
9 |
|
10 |
+
|
11 |
+
def clean_text(text: str) -> str:
|
12 |
+
paragraphs = text.split('\n\n')
|
13 |
+
cleaned_paragraphs = []
|
14 |
+
for paragraph in paragraphs:
|
15 |
+
cleaned = re.sub(r'\s+', ' ', paragraph).strip()
|
16 |
+
cleaned = re.sub(r'(?<=\.) ([a-z])', lambda x: x.group(1).upper(), cleaned)
|
17 |
+
cleaned_paragraphs.append(cleaned)
|
18 |
+
return '\n'.join(cleaned_paragraphs)
|
19 |
+
|
20 |
+
def format_and_correct(text: str) -> str:
|
21 |
+
"""Correct formatting and grammar without changing content significantly."""
|
22 |
+
prompt = f"""
|
23 |
+
Please correct the formatting, grammar, and spelling errors in the following text without changing its content significantly. Ensure proper paragraph breaks and maintain the original content:
|
24 |
+
{text}
|
25 |
+
"""
|
26 |
+
corrected_text = generate(prompt, "Groq", None)
|
27 |
+
return clean_text(corrected_text)
|
28 |
+
|
29 |
+
|
30 |
+
def generate_prompt(settings: Dict[str, str]) -> str:
|
31 |
+
"""Generate a detailed prompt based on user settings."""
|
32 |
+
prompt = f"""
|
33 |
+
Write a {settings['article_length']} {settings['format']} on {settings['topic']}.
|
34 |
+
|
35 |
+
Style and Tone:
|
36 |
+
- Writing style: {settings['writing_style']}
|
37 |
+
- Tone: {settings['tone']}
|
38 |
+
- Target audience: {settings['user_category']}
|
39 |
+
|
40 |
+
Content:
|
41 |
+
- Depth: {settings['depth_of_content']}
|
42 |
+
- Structure: {', '.join(settings['structure'])}
|
43 |
+
|
44 |
+
Keywords to incorporate:
|
45 |
+
{', '.join(settings['keywords'])}
|
46 |
+
|
47 |
+
Additional requirements:
|
48 |
+
- Include {settings['num_examples']} relevant examples or case studies
|
49 |
+
- Incorporate data or statistics from {', '.join(settings['references'])}
|
50 |
+
- End with a {settings['conclusion_type']} conclusion
|
51 |
+
- Add a "References" section at the end with at least 3 credible sources, formatted as [1], [2], etc.
|
52 |
+
- Do not make any headline, title bold.
|
53 |
+
|
54 |
+
Ensure proper paragraph breaks for better readability.
|
55 |
+
Avoid any references to artificial intelligence, language models, or the fact that this is generated by an AI, and do not mention something like here is the article etc.
|
56 |
+
"""
|
57 |
+
return prompt
|
58 |
+
|
59 |
+
def generate_article(
|
60 |
+
topic: str,
|
61 |
+
keywords: str,
|
62 |
+
article_length: str,
|
63 |
+
format: str,
|
64 |
+
writing_style: str,
|
65 |
+
tone: str,
|
66 |
+
user_category: str,
|
67 |
+
depth_of_content: str,
|
68 |
+
structure: str,
|
69 |
+
references: str,
|
70 |
+
num_examples: str,
|
71 |
+
conclusion_type: str,
|
72 |
+
ai_model: str,
|
73 |
+
api_key: str = None
|
74 |
+
) -> str:
|
75 |
+
"""Generate an article based on user-defined settings."""
|
76 |
+
settings = {
|
77 |
+
"topic": topic,
|
78 |
+
"keywords": [k.strip() for k in keywords.split(',')],
|
79 |
+
"article_length": article_length,
|
80 |
+
"format": format,
|
81 |
+
"writing_style": writing_style,
|
82 |
+
"tone": tone,
|
83 |
+
"user_category": user_category,
|
84 |
+
"depth_of_content": depth_of_content,
|
85 |
+
"structure": [s.strip() for s in structure.split(',')],
|
86 |
+
"references": [r.strip() for r in references.split(',')],
|
87 |
+
"num_examples": num_examples,
|
88 |
+
"conclusion_type": conclusion_type
|
89 |
+
}
|
90 |
+
|
91 |
+
prompt = generate_prompt(settings)
|
92 |
+
|
93 |
+
if ai_model in ['OpenAI GPT 3.5', 'OpenAI GPT 4']:
|
94 |
+
response = openai.ChatCompletion.create(
|
95 |
+
model="gpt-4" if ai_model == 'OpenAI GPT 4' else "gpt-3.5-turbo",
|
96 |
+
messages=[
|
97 |
+
{"role": "system", "content": "You are a professional content writer with expertise in various fields."},
|
98 |
+
{"role": "user", "content": prompt}
|
99 |
+
],
|
100 |
+
max_tokens=3000,
|
101 |
+
n=1,
|
102 |
+
stop=None,
|
103 |
+
temperature=0.7,
|
104 |
+
)
|
105 |
+
article = response.choices[0].message.content.strip()
|
106 |
+
else:
|
107 |
+
article = generate(prompt, ai_model, api_key)
|
108 |
+
|
109 |
+
return clean_text(article)
|
110 |
+
|
111 |
+
def humanize(
|
112 |
+
text: str,
|
113 |
+
model: str,
|
114 |
+
temperature: float = 1.2,
|
115 |
+
repetition_penalty: float = 1,
|
116 |
+
top_k: int = 50,
|
117 |
+
length_penalty: float = 1
|
118 |
+
) -> str:
|
119 |
+
result = paraphrase_text(
|
120 |
+
text=text,
|
121 |
+
model_name=model,
|
122 |
+
temperature=temperature,
|
123 |
+
repetition_penalty=repetition_penalty,
|
124 |
+
top_k=top_k,
|
125 |
+
length_penalty=length_penalty,
|
126 |
+
)
|
127 |
+
return format_and_correct(result)
|
128 |
+
|
129 |
+
ai_check_options = [
|
130 |
+
"Polygraf AI",
|
131 |
+
# "Sapling AI",
|
132 |
+
"GPTZero"
|
133 |
+
]
|
134 |
+
|
135 |
+
def ai_generated_test_polygraf(text: str) -> Dict:
|
136 |
+
url = "http://34.66.10.188/ai-vs-human"
|
137 |
+
access_key = "6mcemwsFycVVgVjMFwKXki3zJka1r7N4u$Z0Y|x$gecC$hdNtpQf-SpL0+=k;u%BZ"
|
138 |
+
headers = {
|
139 |
+
"ACCESS_KEY": access_key
|
140 |
}
|
141 |
+
data = {
|
142 |
+
"text" : f"{text}"
|
143 |
+
}
|
144 |
+
response = requests.post(url, headers=headers, json=data)
|
145 |
+
return response.json()
|
146 |
+
|
147 |
+
def ai_generated_test_sapling(text: str) -> Dict:
|
148 |
+
response = requests.post(
|
149 |
+
"https://api.sapling.ai/api/v1/aidetect",
|
150 |
+
json={
|
151 |
+
"key": "60L9BPSVPIIOEZM0CD1DQWRBPJIUR7SB",
|
152 |
+
"text": f"{text}"
|
153 |
+
}
|
154 |
+
)
|
155 |
+
return { "AI" : response.json()['score'], "HUMAN" : 1 - response.json()['score']}
|
156 |
+
|
157 |
+
|
158 |
+
def ai_generated_test_gptzero(text):
|
159 |
+
gptzero_model = GPT2PPL()
|
160 |
+
result = gptzero_model(text)
|
161 |
+
print(result)
|
162 |
+
return result
|
163 |
+
|
164 |
+
def ai_check(text: str, option: str) -> Dict:
|
165 |
+
if option == 'Polygraf AI':
|
166 |
+
return ai_generated_test_polygraf(text)
|
167 |
+
elif option == 'Sapling AI':
|
168 |
+
return ai_generated_test_sapling(text)
|
169 |
+
elif option == "GPTZero":
|
170 |
+
return ai_generated_test_gptzero(text)
|
171 |
+
else:
|
172 |
+
return ai_generated_test_polygraf(text)
|
173 |
+
|
174 |
+
def update_visibility_api(model: str):
|
175 |
+
if model in ['OpenAI GPT 3.5', 'OpenAI GPT 4']:
|
176 |
+
return gr.update(visible=True)
|
177 |
+
else:
|
178 |
+
return gr.update(visible=False)
|
179 |
+
|
180 |
+
def format_references(text: str) -> str:
|
181 |
+
"""Extract and format references from the generated text."""
|
182 |
+
lines = text.split('\n')
|
183 |
+
references = []
|
184 |
+
article_text = []
|
185 |
+
in_references = False
|
186 |
+
|
187 |
+
for line in lines:
|
188 |
+
if line.strip().lower() == "references":
|
189 |
+
in_references = True
|
190 |
+
continue
|
191 |
+
if in_references:
|
192 |
+
references.append(line.strip())
|
193 |
+
else:
|
194 |
+
article_text.append(line)
|
195 |
+
|
196 |
+
formatted_refs = []
|
197 |
+
for i, ref in enumerate(references, 1):
|
198 |
+
formatted_refs.append(f"[{i}] {ref}\n")
|
199 |
+
|
200 |
+
return "\n\n".join(article_text) + "\n\nReferences:\n" + "\n".join(formatted_refs)
|
201 |
+
|
202 |
+
def generate_and_format(
|
203 |
+
topic, keywords, article_length, format, writing_style, tone, user_category,
|
204 |
+
depth_of_content, structure, references, num_examples, conclusion_type, ai_model, api_key
|
205 |
+
):
|
206 |
+
article = generate_article(
|
207 |
+
topic, keywords, article_length, format, writing_style, tone, user_category,
|
208 |
+
depth_of_content, structure, references, num_examples, conclusion_type, ai_model, api_key
|
209 |
+
)
|
210 |
+
return format_references(article)
|
211 |
+
|
212 |
+
def copy_to_input(text):
|
213 |
+
return text
|
214 |
+
|
215 |
+
def create_interface():
|
216 |
+
with gr.Blocks(theme=gr.themes.Default(
|
217 |
+
primary_hue=gr.themes.colors.pink,
|
218 |
+
secondary_hue=gr.themes.colors.yellow,
|
219 |
+
neutral_hue=gr.themes.colors.gray
|
220 |
+
)) as demo:
|
221 |
+
gr.Markdown("# Polygraf AI Content Writer", elem_classes="text-center text-3xl mb-6")
|
222 |
+
|
223 |
+
with gr.Row():
|
224 |
+
with gr.Column(scale=2):
|
225 |
+
with gr.Group():
|
226 |
+
gr.Markdown("## Article Configuration", elem_classes="text-xl mb-4")
|
227 |
+
input_topic = gr.Textbox(label="Topic", placeholder="Enter the main topic of your article", elem_classes="input-highlight-pink")
|
228 |
+
input_keywords = gr.Textbox(label="Keywords", placeholder="Enter comma-separated keywords", elem_classes="input-highlight-yellow")
|
229 |
+
|
230 |
+
with gr.Row():
|
231 |
+
input_format = gr.Dropdown(
|
232 |
+
choices=['Article', 'Essay', 'Blog post', 'Report', 'Research paper', 'News article', 'White paper'],
|
233 |
+
value='Article',
|
234 |
+
label="Format",
|
235 |
+
elem_classes="input-highlight-turquoise"
|
236 |
+
)
|
237 |
+
input_length = gr.Dropdown(
|
238 |
+
choices=["Short (500 words)", "Medium (1000 words)", "Long (2000+ words)", "Very Long (3000+ words)"],
|
239 |
+
value="Medium (1000 words)",
|
240 |
+
label="Article Length",
|
241 |
+
elem_classes="input-highlight-pink"
|
242 |
+
)
|
243 |
+
|
244 |
+
with gr.Row():
|
245 |
+
input_writing_style = gr.Dropdown(
|
246 |
+
choices=["Formal", "Informal", "Technical", "Conversational", "Journalistic", "Academic", "Creative"],
|
247 |
+
value="Formal",
|
248 |
+
label="Writing Style",
|
249 |
+
elem_classes="input-highlight-yellow"
|
250 |
+
)
|
251 |
+
input_tone = gr.Dropdown(
|
252 |
+
choices=["Friendly", "Professional", "Neutral", "Enthusiastic", "Skeptical", "Humorous"],
|
253 |
+
value="Professional",
|
254 |
+
label="Tone",
|
255 |
+
elem_classes="input-highlight-turquoise"
|
256 |
+
)
|
257 |
+
|
258 |
+
input_user_category = gr.Dropdown(
|
259 |
+
choices=["Students", "Professionals", "Researchers", "General Public", "Policymakers", "Entrepreneurs"],
|
260 |
+
value="General Public",
|
261 |
+
label="Target Audience",
|
262 |
+
elem_classes="input-highlight-pink"
|
263 |
+
)
|
264 |
+
input_depth = gr.Dropdown(
|
265 |
+
choices=["Surface-level overview", "Moderate analysis", "In-depth research", "Comprehensive study"],
|
266 |
+
value="Moderate analysis",
|
267 |
+
label="Depth of Content",
|
268 |
+
elem_classes="input-highlight-yellow"
|
269 |
+
)
|
270 |
+
input_structure = gr.Dropdown(
|
271 |
+
choices=[
|
272 |
+
"Introduction, Body, Conclusion",
|
273 |
+
"Abstract, Introduction, Methods, Results, Discussion, Conclusion",
|
274 |
+
"Executive Summary, Problem Statement, Analysis, Recommendations, Conclusion",
|
275 |
+
"Introduction, Literature Review, Methodology, Findings, Analysis, Conclusion"
|
276 |
+
],
|
277 |
+
value="Introduction, Body, Conclusion",
|
278 |
+
label="Structure",
|
279 |
+
elem_classes="input-highlight-turquoise"
|
280 |
+
)
|
281 |
+
input_references = gr.Dropdown(
|
282 |
+
choices=["Academic journals", "Industry reports", "Government publications", "News outlets", "Expert interviews", "Case studies"],
|
283 |
+
value="News outlets",
|
284 |
+
label="References",
|
285 |
+
elem_classes="input-highlight-pink"
|
286 |
+
)
|
287 |
+
input_num_examples = gr.Dropdown(
|
288 |
+
choices=["1-2", "3-4", "5+"],
|
289 |
+
value="1-2",
|
290 |
+
label="Number of Examples/Case Studies",
|
291 |
+
elem_classes="input-highlight-yellow"
|
292 |
+
)
|
293 |
+
input_conclusion = gr.Dropdown(
|
294 |
+
choices=["Summary", "Call to Action", "Future Outlook", "Thought-provoking Question"],
|
295 |
+
value="Summary",
|
296 |
+
label="Conclusion Type",
|
297 |
+
elem_classes="input-highlight-turquoise"
|
298 |
+
)
|
299 |
+
|
300 |
+
with gr.Group():
|
301 |
+
gr.Markdown("## AI Model Configuration", elem_classes="text-xl mb-4")
|
302 |
+
ai_generator = gr.Dropdown(
|
303 |
+
choices=['Llama 3', 'Groq', 'Mistral', 'Gemma', 'OpenAI GPT 3.5', 'OpenAI GPT 4'],
|
304 |
+
value='Llama 3',
|
305 |
+
label="AI Model",
|
306 |
+
elem_classes="input-highlight-pink"
|
307 |
+
)
|
308 |
+
input_api = gr.Textbox(label="API Key", visible=False)
|
309 |
+
ai_generator.change(update_visibility_api, ai_generator, input_api)
|
310 |
+
|
311 |
+
generate_btn = gr.Button("Generate Article", variant="primary")
|
312 |
+
|
313 |
+
with gr.Column(scale=3):
|
314 |
+
output_article = gr.Textbox(label="Generated Article", lines=20)
|
315 |
+
|
316 |
+
with gr.Row():
|
317 |
+
with gr.Column():
|
318 |
+
ai_detector_dropdown = gr.Radio(
|
319 |
+
choices=ai_check_options, label="Select AI Detector", value="Polygraf AI")
|
320 |
+
ai_check_btn = gr.Button("AI Check")
|
321 |
+
ai_check_result = gr.Label(label="AI Check Result")
|
322 |
+
|
323 |
+
humanize_btn = gr.Button("Humanize")
|
324 |
+
humanized_output = gr.Textbox(label="Humanized Article", lines=20)
|
325 |
+
copy_to_input_btn = gr.Button("Copy to Input for AI Check")
|
326 |
+
|
327 |
+
with gr.Accordion("Advanced Humanizer Settings", open=False):
|
328 |
+
with gr.Row():
|
329 |
+
model_dropdown = gr.Radio(
|
330 |
+
choices=[
|
331 |
+
"Base Model",
|
332 |
+
"Large Model",
|
333 |
+
"XL Model",
|
334 |
+
# "XL Law Model",
|
335 |
+
# "XL Marketing Model",
|
336 |
+
# "XL Child Style Model",
|
337 |
+
],
|
338 |
+
value="Large Model",
|
339 |
+
label="Humanizer Model Version"
|
340 |
+
)
|
341 |
+
with gr.Row():
|
342 |
+
temperature_slider = gr.Slider(minimum=0.5, maximum=2.0, step=0.1, value=1.2, label="Temperature")
|
343 |
+
top_k_slider = gr.Slider(minimum=0, maximum=300, step=25, value=50, label="Top k")
|
344 |
+
with gr.Row():
|
345 |
+
repetition_penalty_slider = gr.Slider(minimum=1.0, maximum=2.0, step=0.1, value=1, label="Repetition Penalty")
|
346 |
+
length_penalty_slider = gr.Slider(minimum=0.0, maximum=2.0, step=0.1, value=1.0, label="Length Penalty")
|
347 |
+
|
348 |
+
generate_btn.click(
|
349 |
+
fn=generate_and_format,
|
350 |
+
inputs=[
|
351 |
+
input_topic,
|
352 |
+
input_keywords,
|
353 |
+
input_length,
|
354 |
+
input_format,
|
355 |
+
input_writing_style,
|
356 |
+
input_tone,
|
357 |
+
input_user_category,
|
358 |
+
input_depth,
|
359 |
+
input_structure,
|
360 |
+
input_references,
|
361 |
+
input_num_examples,
|
362 |
+
input_conclusion,
|
363 |
+
ai_generator,
|
364 |
+
input_api
|
365 |
+
],
|
366 |
+
outputs=[output_article],
|
367 |
+
)
|
368 |
+
|
369 |
+
ai_check_btn.click(
|
370 |
+
fn=ai_check,
|
371 |
+
inputs=[output_article, ai_detector_dropdown],
|
372 |
+
outputs=[ai_check_result],
|
373 |
+
)
|
374 |
+
|
375 |
+
humanize_btn.click(
|
376 |
+
fn=humanize,
|
377 |
+
inputs=[
|
378 |
+
output_article,
|
379 |
+
model_dropdown,
|
380 |
+
temperature_slider,
|
381 |
+
repetition_penalty_slider,
|
382 |
+
top_k_slider,
|
383 |
+
length_penalty_slider,
|
384 |
+
],
|
385 |
+
outputs=[humanized_output],
|
386 |
+
)
|
387 |
+
|
388 |
+
copy_to_input_btn.click(
|
389 |
+
fn=copy_to_input,
|
390 |
+
inputs=[humanized_output],
|
391 |
+
outputs=[output_article],
|
392 |
+
)
|
393 |
+
|
394 |
+
return demo
|
395 |
|
396 |
+
if __name__ == "__main__":
|
397 |
+
demo = create_interface()
|
398 |
+
demo.launch(server_name="0.0.0.0", share=True)
|