han1997 commited on
Commit
5cc230a
·
verified ·
1 Parent(s): 4d71493

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -6
app.py CHANGED
@@ -16,21 +16,23 @@ from PIL import Image
16
 
17
  import time
18
 
19
- subprocess.check_call([sys.executable, '-m', 'pip', 'install', 'packaging'])
20
- subprocess.check_call([sys.executable, '-m', 'pip', 'install', 'ninja'])
21
- subprocess.check_call([sys.executable, '-m', 'pip', 'install', 'mamba-ssm'])
22
- subprocess.check_call([sys.executable, '-m', 'pip', 'install', 'causal-conv1d'])
 
 
 
23
 
24
  from cobra import load
25
  vlm = load("cobra+3b")
26
 
27
  if torch.cuda.is_available():
28
  DEVICE = "cuda"
29
- DTYPE = torch.float32
30
  else:
31
  DEVICE = "cpu"
32
  DTYPE = torch.float32
33
- vlm.enable_mixed_precision_training = False
34
  vlm.to(DEVICE, dtype=DTYPE)
35
 
36
  prompt_builder = vlm.get_prompt_builder()
 
16
 
17
  import time
18
 
19
+ # install packages for mamba
20
+ def install():
21
+ print("Install personal packages", flush=True)
22
+ subprocess.run(shlex.split("pip install causal_conv1d-1.2.0.post1-cp310-cp310-linux_x86_64.whl"))
23
+ subprocess.run(shlex.split("pip install mamba_ssm-1.2.0.post1-cp310-cp310-linux_x86_64.whl"))
24
+
25
+ install()
26
 
27
  from cobra import load
28
  vlm = load("cobra+3b")
29
 
30
  if torch.cuda.is_available():
31
  DEVICE = "cuda"
32
+ DTYPE = torch.bfloat16
33
  else:
34
  DEVICE = "cpu"
35
  DTYPE = torch.float32
 
36
  vlm.to(DEVICE, dtype=DTYPE)
37
 
38
  prompt_builder = vlm.get_prompt_builder()