Spaces:
Runtime error
Runtime error
Daniel Fried
commited on
Commit
·
460cb94
1
Parent(s):
0b73ae7
change model
Browse files- modules/app.py +4 -4
modules/app.py
CHANGED
@@ -6,9 +6,11 @@ import os
|
|
6 |
if os.path.exists('use_normal_tokenizers'):
|
7 |
import tokenizers
|
8 |
BIG_MODEL = False
|
|
|
9 |
else:
|
10 |
import tokenizers_patch
|
11 |
-
BIG_MODEL =
|
|
|
12 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
13 |
import json
|
14 |
|
@@ -29,10 +31,8 @@ MAX_LENGTH = 256+64
|
|
29 |
TRUNCATION_MESSAGE = f'warning: This demo is limited to {MAX_LENGTH} tokens in the document for efficiency.'
|
30 |
|
31 |
if BIG_MODEL:
|
32 |
-
|
33 |
-
model_name = "./incoder-6B"
|
34 |
else:
|
35 |
-
CUDA = False
|
36 |
model_name = "facebook/incoder-1B"
|
37 |
|
38 |
from fastapi import FastAPI, Request
|
|
|
6 |
if os.path.exists('use_normal_tokenizers'):
|
7 |
import tokenizers
|
8 |
BIG_MODEL = False
|
9 |
+
CUDA = False
|
10 |
else:
|
11 |
import tokenizers_patch
|
12 |
+
BIG_MODEL = False
|
13 |
+
CUDA = True
|
14 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
15 |
import json
|
16 |
|
|
|
31 |
TRUNCATION_MESSAGE = f'warning: This demo is limited to {MAX_LENGTH} tokens in the document for efficiency.'
|
32 |
|
33 |
if BIG_MODEL:
|
34 |
+
model_name = "facebook/incoder-6B"
|
|
|
35 |
else:
|
|
|
36 |
model_name = "facebook/incoder-1B"
|
37 |
|
38 |
from fastapi import FastAPI, Request
|