akoksal commited on
Commit
2c3dc94
·
1 Parent(s): e08a33a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -4
app.py CHANGED
@@ -8,15 +8,14 @@ tokenizer = AutoTokenizer.from_pretrained("akoksal/LongForm-OPT-2.7B")
8
  generate = pipeline('text-generation', model='akoksal/LongForm-OPT-2.7B', tokenizer=tokenizer)
9
 
10
 
11
- def predict(instruction, topp, max_new_tokens, temperature):
12
  if "[EOI]" not in instruction:
13
  instruction = instruction + " [EOI]"
14
  x = generate(instruction,
15
  do_sample=True,
16
  top_p=topp,
17
  num_return_sequences=1,
18
- max_new_tokens=max_new_tokens,
19
- max_length=None,
20
  temperature=temperature
21
  )[0]["generated_text"]
22
 
@@ -24,7 +23,7 @@ def predict(instruction, topp, max_new_tokens, temperature):
24
 
25
  iface = gr.Interface(fn=predict, inputs=["text",
26
  gr.inputs.Slider(0, 2, default=0.90, label="top_p"),
27
- gr.inputs.Slider(0, 512, default=64, label="max_new_tokens"),
28
  gr.inputs.Slider(0, 1, default=1, label="temperature")
29
  ],
30
  outputs="text")
 
8
  generate = pipeline('text-generation', model='akoksal/LongForm-OPT-2.7B', tokenizer=tokenizer)
9
 
10
 
11
+ def predict(instruction, topp, max_length, temperature):
12
  if "[EOI]" not in instruction:
13
  instruction = instruction + " [EOI]"
14
  x = generate(instruction,
15
  do_sample=True,
16
  top_p=topp,
17
  num_return_sequences=1,
18
+ max_length=max_length,
 
19
  temperature=temperature
20
  )[0]["generated_text"]
21
 
 
23
 
24
  iface = gr.Interface(fn=predict, inputs=["text",
25
  gr.inputs.Slider(0, 2, default=0.90, label="top_p"),
26
+ gr.inputs.Slider(0, 512, default=64, label="max_length"),
27
  gr.inputs.Slider(0, 1, default=1, label="temperature")
28
  ],
29
  outputs="text")