File size: 8,853 Bytes
2303139
c960e8b
a8f6f3c
 
 
c960e8b
a8f6f3c
 
 
 
 
 
 
2303139
 
a8f6f3c
 
2303139
a8f6f3c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2303139
 
a8f6f3c
2303139
 
 
 
a8f6f3c
c960e8b
a8f6f3c
 
 
 
 
 
 
2303139
a8f6f3c
2303139
 
a8f6f3c
 
 
c960e8b
2303139
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
import spaces
import gradio as gr
from hfsearch import (HFSearchResult, search, update_filter, update_df, get_labels, get_valid_labels,
                      get_tags, get_subtag_categories, update_subtag_items, update_tags, update_subtags,
                      DS_SIZE_CATEGORIES, SPACE_HARDWARES, SPACE_STAGES)

CSS = """

.title { align-items: center; text-align: center; }

.info { align-items: center; text-align: center; }

"""

with gr.Blocks(theme="NoCrypt/miku", fill_width=True, css=CSS) as demo:
    gr.Markdown("# Search Hugging Face🤗", elem_classes="title")
    with gr.Column():
        search_result = gr.State(value=HFSearchResult())
        with gr.Tab("Normal Search"):
            with gr.Group():
                with gr.Row(equal_height=True):
                    repo_types = gr.CheckboxGroup(label="Repo type", choices=["model", "dataset", "space"], value=["model", "dataset", "space"])
                with gr.Accordion("Advanced", open=False):
                    with gr.Row(equal_height=True):
                        filter_str = gr.Textbox(label="Filter", info="String(s) to filter repos", value="")
                        search_str = gr.Textbox(label="Search", info="A string that will be contained in the returned repo ids", placeholder="bert", value="", lines=1)
                        author = gr.Textbox(label="Author", info="The author (user or organization)", value="", lines=1)
                    with gr.Column():
                        tags = gr.Textbox(label="Tags", info="Tag(s) to filter repos", value="")
                        with gr.Accordion("Tag input assistance", open=False):
                            with gr.Row(equal_height=True):
                                tag_item = gr.Dropdown(label="Item", choices=get_tags(), value=get_tags()[0], allow_custom_value=True, scale=4)
                                tag_btn = gr.Button("Add", scale=1)
                            with gr.Row(equal_height=True):
                                subtag_cat = gr.Dropdown(label="Category", choices=get_subtag_categories(), value=get_subtag_categories()[0], scale=2)
                                subtag_item = gr.Dropdown(label="Item", choices=[""], value="", allow_custom_value=True, scale=2)
                                subtug_btn = gr.Button("Add", scale=1)
                    with gr.Column():
                        gated_status = gr.Radio(label="Gated status", choices=["gated", "non-gated", "all"], value="all")
                        appr_status = gr.CheckboxGroup(label="Approval method", choices=["auto", "manual"], value=["auto", "manual"])
                    with gr.Tab("for Models"):
                        with gr.Column():
                            infer_status = gr.Radio(label="Inference status", choices=["warm", "cold", "frozen", "all"], value="all")
                            gr.Markdown("[About the Inference API status (Warm, Cold, Frozen)](https://huggingface.co/docs/api-inference/supported-models)", elem_classes="info")
                    #    with gr.Row(equal_height=True):
                    #        model_task = gr.Textbox(label="Task", info="String(s) of tasks models were designed for", placeholder="fill-mask", value="")
                    #        trained_dataset = gr.Textbox(label="Trained dataset", info="Trained dataset for a model", value="")
                    with gr.Tab("for Datasets"):
                        size_categories = gr.CheckboxGroup(label="Size categories", info="The size of the dataset", choices=DS_SIZE_CATEGORIES, value=[])
                    #    task_categories = gr.Textbox(label="Task categories", info="Identify datasets by the designed task", value="")
                    #    task_ids = gr.Textbox(label="Task IDs", info="Identify datasets by the specific task", value="")
                    #    language_creators = gr.Textbox(label="Language creators", info="Identify datasets with how the data was curated", value="")
                    #    language = gr.Textbox(label="Language", info="String(s) representing two-character language to filter datasets by", value="")
                    #    multilinguality = gr.Textbox(label="Multilinguality", info="String(s) representing a filter for datasets that contain multiple languages", value="")
                    with gr.Tab("for Spaces"):
                        with gr.Row(equal_height=True):
                            hardware = gr.CheckboxGroup(label="Specify hardware", choices=SPACE_HARDWARES, value=[])
                            stage = gr.CheckboxGroup(label="Specify stage", choices=SPACE_STAGES, value=[])
                    with gr.Row(equal_height=True):
                        sort = gr.Radio(label="Sort", choices=["last_modified", "likes", "downloads", "trending_score"], value="likes")
                        sort_method = gr.Radio(label="Sort method", choices=["ascending order", "descending order"], value="ascending order")
                        limit = gr.Number(label="Limit", info="If 0, fetches all models", value=1000, step=1, minimum=0, maximum=10000000)
                        fetch_detail = gr.CheckboxGroup(label="Fetch detail", choices=["Space Runtime"], value=["Space Runtime"])
                    with gr.Row(equal_height=True):
                        show_labels = gr.CheckboxGroup(label="Show items", choices=get_labels(), value=get_valid_labels())
                run_button = gr.Button("Search", variant="primary")
        with gr.Tab("Find Serverless Inference API enabled models"):
            with gr.Group():
                with gr.Row(equal_height=True):
                    infer_repo_types = gr.CheckboxGroup(label="Repo type", choices=["model", "dataset", "space"], value=["model"], visible=False)
                    with gr.Column():
                        infer_infer_status = gr.Radio(label="Inference status", choices=["warm", "cold", "frozen", "all"], value="warm")
                        gr.Markdown("[About the Inference API status (Warm, Cold, Frozen)](https://huggingface.co/docs/api-inference/supported-models)", elem_classes="info")
                    with gr.Column():
                        infer_gated_status = gr.Radio(label="Gated status", choices=["gated", "non-gated", "all"], value="all")
                        infer_appr_status = gr.CheckboxGroup(label="Approval method", choices=["auto", "manual"], value=["auto", "manual"])
                infer_run_button = gr.Button("Search", variant="primary")
        with gr.Group():
            with gr.Accordion("Filter", open=False):
                hide_labels = gr.CheckboxGroup(label="Hide items", choices=[], value=[], visible=False)
                with gr.Row(equal_height=True):
                    filter_item1 = gr.Dropdown(label="Filter item", choices=[""], value="", visible=False)
                    filter1 = gr.Dropdown(label="Filter", choices=[""], value="", allow_custom_value=True, visible=False)
                    filter_btn = gr.Button("Apply filter", variant="secondary", visible=False)
            result_df = gr.DataFrame(label="Results", type="pandas", value=None, interactive=False)

    run_button.click(search, [repo_types, sort, sort_method, filter_str, search_str, author, tags, infer_status, gated_status, appr_status,
                              size_categories, limit, hardware, stage, fetch_detail, show_labels, search_result],
                     [result_df, hide_labels, search_result])\
    .success(update_filter, [filter_item1, search_result], [filter_item1, filter1, filter_btn, search_result], queue=False)
    infer_run_button.click(search, [infer_repo_types, sort, sort_method, filter_str, search_str, author, tags, infer_infer_status, infer_gated_status, infer_appr_status,
                                    size_categories, limit, hardware, stage, fetch_detail, show_labels, search_result],
                           [result_df, hide_labels, search_result])\
    .success(update_filter, [filter_item1, search_result], [filter_item1, filter1, filter_btn, search_result], queue=False)
    gr.on(triggers=[hide_labels.change, filter_btn.click], fn=update_df, inputs=[hide_labels, filter_item1, filter1, search_result],
          outputs=[result_df, search_result], trigger_mode="once", queue=False, show_api=False)
    filter_item1.change(update_filter, [filter_item1, search_result], [filter_item1, filter1, filter_btn, search_result], queue=False, show_api=False)
    subtag_cat.change(update_subtag_items, [subtag_cat], [subtag_item], queue=False, show_api=False)
    subtug_btn.click(update_subtags, [tags, subtag_cat, subtag_item], [tags], queue=False, show_api=False)
    tag_btn.click(update_tags, [tags, tag_item], [tags], queue=False, show_api=False)

demo.queue().launch()