John6666 commited on
Commit
c960e8b
β€’
1 Parent(s): f29fb3d

Upload 4 files

Browse files
Files changed (4) hide show
  1. README.md +13 -12
  2. app.py +60 -0
  3. pre-requirements.txt +1 -0
  4. requirements.txt +1 -0
README.md CHANGED
@@ -1,12 +1,13 @@
1
- ---
2
- title: Testwarm
3
- emoji: πŸ”₯
4
- colorFrom: purple
5
- colorTo: blue
6
- sdk: gradio
7
- sdk_version: 5.5.0
8
- app_file: app.py
9
- pinned: false
10
- ---
11
-
12
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
1
+ ---
2
+ title: test warm models
3
+ emoji: πŸ™„
4
+ colorFrom: indigo
5
+ colorTo: purple
6
+ sdk: gradio
7
+ sdk_version: 4.44.0
8
+ app_file: app.py
9
+ pinned: false
10
+ license: mit
11
+ ---
12
+
13
+ Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
app.py ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ if os.environ.get("SPACES_ZERO_GPU") is not None:
3
+ import spaces
4
+ else:
5
+ class spaces:
6
+ @staticmethod
7
+ def GPU(func):
8
+ def wrapper(*args, **kwargs):
9
+ return func(*args, **kwargs)
10
+ return wrapper
11
+ import gradio as gr
12
+ import subprocess
13
+ from huggingface_hub import HfApi
14
+
15
+ @spaces.GPU
16
+ def infer(filter: str, sort: str, sort_dir: bool, infer: str, gated: str, appr: list[str]):
17
+ try:
18
+ api = HfApi()
19
+ kwargs = {}
20
+ if filter: kwargs["filter"] = filter
21
+ if gated == "gated": kwargs["gated"] = True
22
+ elif gated == "non-gated": kwargs["gated"] = False
23
+ if sort_dir: kwargs["direction"] = -1
24
+ models = api.list_models(inference=infer, sort=sort, cardData=True, **kwargs)
25
+ md = "### Results:\n"
26
+ for model in models:
27
+ if model.gated and model.gated not in appr: continue
28
+ md += "1. "
29
+ md += f"[{model.id}](https://hf.co/{model.id})"
30
+ md += f" Inference: '{infer}'"
31
+ #gated_str = model.gated if model.gated else "false"
32
+ #md += f" Gated: '{gated_str}'"
33
+ md += f" Gated: '{gated}'"
34
+ if model.library_name: md += f" Lib:'{model.library_name}'"
35
+ if model.pipeline_tag: md += f" Pipeline:'{model.pipeline_tag}'"
36
+ if model.last_modified: md += f" LastMod:'{model.last_modified}'"
37
+ if model.likes: md += f" Likes:'{model.likes}'"
38
+ if model.downloads: md += f" DLs:'{model.downloads}'"
39
+ if model.downloads_all_time: md += f" AllDLs:'{model.downloads_all_time}'"
40
+ md += "\n"
41
+ return md
42
+ except Exception as e:
43
+ raise gr.Error(e)
44
+
45
+ with gr.Blocks() as demo:
46
+ filter = gr.Textbox(label="Query", value="")
47
+ with gr.Row(equal_height=True):
48
+ infer_status = gr.Radio(label="Inference status", choices=["warm", "cold", "frozen"], value="warm")
49
+ gated_status = gr.Radio(label="Gated status", choices=["gated", "non-gated", "all"], value="non-gated")
50
+ sort = gr.Radio(label="Sort", choices=["last_modified", "likes", "downloads"], value="likes")
51
+ sort_dir = gr.Checkbox(label="Sort by descending order", value=False)
52
+ appr_status = gr.CheckboxGroup(label="Approval method", choices=["auto", "manual"], value=["auto", "manual"], visible=False)
53
+
54
+ run_button = gr.Button("Search", variant="primary")
55
+
56
+ output_md = gr.Markdown("<br><br>")
57
+
58
+ run_button.click(infer, [filter, sort, sort_dir, infer_status, gated_status, appr_status], [output_md])
59
+
60
+ demo.launch()
pre-requirements.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ pip>=24.1
requirements.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ huggingface_hub