nisten commited on
Commit
673bbef
·
verified ·
1 Parent(s): 720352d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -6
app.py CHANGED
@@ -5,11 +5,11 @@ import subprocess
5
  import sys
6
 
7
  # Force install the specific transformers version from the GitHub PR
8
- subprocess.check_call([sys.executable, "-m", "pip", "install", "--force-reinstall", "--no-deps", "git+https://github.com/Muennighoff/transformers.git@olmoe"])
9
 
10
  from transformers import OlmoeForCausalLM, AutoTokenizer
11
 
12
- model_name = "allenai/OLMoE-1B-7B-0924-Instruct"
13
 
14
  # Wrap model loading in a try-except block to handle potential errors
15
  try:
@@ -17,7 +17,7 @@ try:
17
  model = OlmoeForCausalLM.from_pretrained(
18
  model_name,
19
  trust_remote_code=True,
20
- torch_dtype=torch.float16 if DEVICE == "cuda" else torch.float32,
21
  low_cpu_mem_usage=True,
22
  device_map="auto"
23
  )
@@ -55,7 +55,7 @@ def generate_response(message, history, temperature, max_new_tokens):
55
 
56
  css = """
57
  #output {
58
- height: 500px;
59
  overflow: auto;
60
  border: 1px solid #ccc;
61
  }
@@ -85,5 +85,5 @@ with gr.Blocks(css=css) as demo:
85
  clear.click(lambda: None, None, chatbot, queue=False)
86
 
87
  if __name__ == "__main__":
88
- demo.queue(api_open=False)
89
- demo.launch(debug=True, show_api=False)
 
5
  import sys
6
 
7
  # Force install the specific transformers version from the GitHub PR
8
+ subprocess.check_call([sys.executable, "-m", "pip", "install", "-U", "--force-reinstall", "accelerate", "git+https://github.com/Muennighoff/transformers.git@olmoe"])
9
 
10
  from transformers import OlmoeForCausalLM, AutoTokenizer
11
 
12
+ model_name = "allenai/OLMoE-1B-7B-0924"
13
 
14
  # Wrap model loading in a try-except block to handle potential errors
15
  try:
 
17
  model = OlmoeForCausalLM.from_pretrained(
18
  model_name,
19
  trust_remote_code=True,
20
+ torch_dtype=torch.bfloat16 if DEVICE == "cuda" else torch.float32,
21
  low_cpu_mem_usage=True,
22
  device_map="auto"
23
  )
 
55
 
56
  css = """
57
  #output {
58
+ height: 900px;
59
  overflow: auto;
60
  border: 1px solid #ccc;
61
  }
 
85
  clear.click(lambda: None, None, chatbot, queue=False)
86
 
87
  if __name__ == "__main__":
88
+ demo.queue(api_open=True)
89
+ demo.launch(debug=True, show_api=True, share=True)