Spaces:
Running
Running
hanhainebula
commited on
Commit
·
f36d14b
1
Parent(s):
85255fd
update backend code
Browse files- app.py +13 -1
- src/backend.py +2 -5
- src/envs.py +2 -0
app.py
CHANGED
@@ -6,7 +6,7 @@ import multiprocessing
|
|
6 |
from src.backend import pull_search_results
|
7 |
from src.envs import (
|
8 |
API, REPO_ID, START_COMMIT_ID,
|
9 |
-
LOG_DIR, HF_CACHE_DIR,
|
10 |
HF_SEARCH_RESULTS_REPO_DIR, HF_EVAL_RESULTS_REPO_DIR,
|
11 |
UNZIP_TARGET_DIR,
|
12 |
TIME_DURATION,
|
@@ -14,6 +14,13 @@ from src.envs import (
|
|
14 |
)
|
15 |
|
16 |
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
17 |
|
18 |
|
19 |
def restart_space():
|
@@ -67,6 +74,11 @@ if __name__ == "__main__":
|
|
67 |
lines=20,
|
68 |
interactive=False,
|
69 |
)
|
|
|
|
|
|
|
|
|
|
|
70 |
refresh_button = gr.Button("Refresh log files")
|
71 |
|
72 |
log_file_dropdown.change(
|
|
|
6 |
from src.backend import pull_search_results
|
7 |
from src.envs import (
|
8 |
API, REPO_ID, START_COMMIT_ID,
|
9 |
+
LOG_DIR, LOG_FILE_PATH, HF_CACHE_DIR,
|
10 |
HF_SEARCH_RESULTS_REPO_DIR, HF_EVAL_RESULTS_REPO_DIR,
|
11 |
UNZIP_TARGET_DIR,
|
12 |
TIME_DURATION,
|
|
|
14 |
)
|
15 |
|
16 |
logger = logging.getLogger(__name__)
|
17 |
+
logging.basicConfig(
|
18 |
+
filename=LOG_FILE_PATH,
|
19 |
+
filemode='w',
|
20 |
+
level=logging.WARNING,
|
21 |
+
datefmt='%Y-%m-%d %H:%M:%S',
|
22 |
+
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
23 |
+
)
|
24 |
|
25 |
|
26 |
def restart_space():
|
|
|
74 |
lines=20,
|
75 |
interactive=False,
|
76 |
)
|
77 |
+
log_file_list_box = gr.Textbox(
|
78 |
+
label="\n".join(get_log_files()),
|
79 |
+
lines=20,
|
80 |
+
interactive=False,
|
81 |
+
)
|
82 |
refresh_button = gr.Button("Refresh log files")
|
83 |
|
84 |
log_file_dropdown.change(
|
src/backend.py
CHANGED
@@ -13,15 +13,13 @@ from air_benchmark.evaluation_utils.evaluator import Evaluator
|
|
13 |
|
14 |
from src.envs import (
|
15 |
API,
|
16 |
-
|
17 |
SEARCH_RESULTS_REPO, RESULTS_REPO
|
18 |
)
|
19 |
|
20 |
-
log_file = os.path.join(LOG_DIR, f"backend_{time.strftime('%Y-%m-%d_%H-%M-%S')}.log")
|
21 |
-
|
22 |
logger = logging.getLogger(__name__)
|
23 |
logging.basicConfig(
|
24 |
-
filename=
|
25 |
filemode='w',
|
26 |
level=logging.WARNING,
|
27 |
datefmt='%Y-%m-%d %H:%M:%S',
|
@@ -105,7 +103,6 @@ def get_file_list(dir_path: str, allowed_suffixes: List[str] = None) -> List[str
|
|
105 |
|
106 |
def get_zip_file_path(zip_file_name: str):
|
107 |
zip_file_path = None
|
108 |
-
# logger.warning(f"File list: {os.listdir(ZIP_CACHE_DIR)}")
|
109 |
for root, _, files in os.walk(ZIP_CACHE_DIR):
|
110 |
for file in files:
|
111 |
if file == zip_file_name:
|
|
|
13 |
|
14 |
from src.envs import (
|
15 |
API,
|
16 |
+
LOG_FILE_PATH, ZIP_CACHE_DIR,
|
17 |
SEARCH_RESULTS_REPO, RESULTS_REPO
|
18 |
)
|
19 |
|
|
|
|
|
20 |
logger = logging.getLogger(__name__)
|
21 |
logging.basicConfig(
|
22 |
+
filename=LOG_FILE_PATH,
|
23 |
filemode='w',
|
24 |
level=logging.WARNING,
|
25 |
datefmt='%Y-%m-%d %H:%M:%S',
|
|
|
103 |
|
104 |
def get_zip_file_path(zip_file_name: str):
|
105 |
zip_file_path = None
|
|
|
106 |
for root, _, files in os.walk(ZIP_CACHE_DIR):
|
107 |
for file in files:
|
108 |
if file == zip_file_name:
|
src/envs.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
import os
|
|
|
2 |
from huggingface_hub import HfApi
|
3 |
|
4 |
|
@@ -22,6 +23,7 @@ HF_CACHE_DIR = os.path.join(CACHE_PATH, ".cache")
|
|
22 |
ZIP_CACHE_DIR = os.path.join(CACHE_PATH, ".zip_cache")
|
23 |
|
24 |
LOG_DIR = os.path.join(CACHE_PATH, "logs")
|
|
|
25 |
|
26 |
API = HfApi(token=HF_TOKEN)
|
27 |
|
|
|
1 |
import os
|
2 |
+
import time
|
3 |
from huggingface_hub import HfApi
|
4 |
|
5 |
|
|
|
23 |
ZIP_CACHE_DIR = os.path.join(CACHE_PATH, ".zip_cache")
|
24 |
|
25 |
LOG_DIR = os.path.join(CACHE_PATH, "logs")
|
26 |
+
LOG_FILE_PATH = os.path.join(LOG_DIR, f"backend_{time.strftime('%Y-%m-%d_%H-%M-%S')}.log")
|
27 |
|
28 |
API = HfApi(token=HF_TOKEN)
|
29 |
|