akoksal commited on
Commit
b7eb50d
·
1 Parent(s): ce08085
Files changed (1) hide show
  1. app.py +7 -6
app.py CHANGED
@@ -2,10 +2,10 @@ import gradio as gr
2
  from transformers import AutoTokenizer, pipeline
3
  import torch
4
 
5
- tokenizer1 = AutoTokenizer.from_pretrained("notexist/ttt")
6
- tdk1 = pipeline('text-generation', model='notexist/ttt', tokenizer=tokenizer1)
7
- tokenizer2 = AutoTokenizer.from_pretrained("notexist/ttt2")
8
- tdk2 = pipeline('text-generation', model='notexist/ttt2', tokenizer=tokenizer2)
9
 
10
  def predict(name, sl, topk, topp):
11
  if name == "":
@@ -17,7 +17,8 @@ def predict(name, sl, topk, topp):
17
  num_return_sequences=1,
18
  repetition_penalty=sl
19
  )[0]["generated_text"]
20
- x2 = tdk1(f"<|endoftext|>",
 
21
  do_sample=True,
22
  max_length=64,
23
  top_k=topk,
@@ -26,7 +27,7 @@ def predict(name, sl, topk, topp):
26
  repetition_penalty=sl
27
  )[0]["generated_text"]
28
 
29
- return x1[len(f"<|endoftext|>"):]+"\n\n"+x2[len(f"<|endoftext|>"):]
30
  else:
31
  x1 = tdk1(f"<|endoftext|>{name}\n\n",
32
  do_sample=True,
 
2
  from transformers import AutoTokenizer, pipeline
3
  import torch
4
 
5
+ tokenizer1 = AutoTokenizer.from_pretrained("notexist/ttt2")
6
+ tdk1 = pipeline('text-generation', model='notexist/ttt2', tokenizer=tokenizer1)
7
+ tokenizer2 = AutoTokenizer.from_pretrained("notexist/ttte")
8
+ tdk2 = pipeline('text-generation', model='notexist/ttte', tokenizer=tokenizer2)
9
 
10
  def predict(name, sl, topk, topp):
11
  if name == "":
 
17
  num_return_sequences=1,
18
  repetition_penalty=sl
19
  )[0]["generated_text"]
20
+ new_name = x1[len(f"<|endoftext|>"):x1.index("\n\n")]
21
+ x2 = tdk2(f"<|endoftext|>{new_name}\n\n",
22
  do_sample=True,
23
  max_length=64,
24
  top_k=topk,
 
27
  repetition_penalty=sl
28
  )[0]["generated_text"]
29
 
30
+ return x1[len(f"<|endoftext|>"):]+"\n\n"+x2[len(f"<|endoftext|>{new_name}\n\n"):]+"\n\n"+new_name
31
  else:
32
  x1 = tdk1(f"<|endoftext|>{name}\n\n",
33
  do_sample=True,