ajitrajasekharan
commited on
Commit
·
8a3b8f4
1
Parent(s):
3eb018f
Update app.py
Browse files
app.py
CHANGED
@@ -35,9 +35,6 @@ def decode(tokenizer, pred_idx, top_clean):
|
|
35 |
def encode(tokenizer, text_sentence, add_special_tokens=True):
|
36 |
|
37 |
text_sentence = text_sentence.replace('<mask>', tokenizer.mask_token)
|
38 |
-
# if <mask> is the last token, append a "." so that models dont predict punctuation.
|
39 |
-
#if tokenizer.mask_token == text_sentence.split()[-1]:
|
40 |
-
# text_sentence += ' .'
|
41 |
|
42 |
tokenized_text = tokenizer.tokenize(text_sentence)
|
43 |
input_ids = torch.tensor([tokenizer.encode(text_sentence, add_special_tokens=add_special_tokens)])
|
@@ -142,32 +139,19 @@ def main():
|
|
142 |
st.markdown("""
|
143 |
<small style="font-size:18px; color: #7f7f7f">Pretrained BERT models can be used as is, <a href="https://ajitrajasekharan.github.io/2021/01/02/my-first-post.html"><b>with no fine tuning to perform tasks like NER</b></a> <i>ideally if both fill-mask and CLS predictions are good, or minimally if fill-mask predictions are adequate</i></small>
|
144 |
""", unsafe_allow_html=True)
|
145 |
-
|
146 |
st.write("This app can be used to examine both model prediction for a masked position as well as the neighborhood of CLS vector")
|
147 |
st.write(" - To examine model prediction for a position, enter the token [MASK] or <mask>")
|
148 |
st.write(" - To examine just the [CLS] vector, enter a word/phrase or sentence. Example: eGFR or EGFR or non small cell lung cancer")
|
149 |
st.sidebar.slider("Select how many predictions do you need", 1 , 50, 20,key='my_slider',on_change=on_results_count_change) #some times it is possible to have less words
|
150 |
|
151 |
|
152 |
-
|
153 |
-
|
154 |
-
#if st.button("Submit"):
|
155 |
-
|
156 |
-
# with st.spinner("Computing"):
|
157 |
try:
|
158 |
st.sidebar.selectbox(label='Select Model to Apply', options=['ajitrajasekharan/biomedical', 'bert-base-cased','bert-large-cased','microsoft/BiomedNLP-PubMedBERT-base-uncased-abstract-fulltext','allenai/scibert_scivocab_cased','dmis-lab/biobert-v1.1'], index=0, key = "my_model1",on_change=on_model_change1)
|
159 |
init_selectbox()
|
160 |
st.text_input("Enter text below", "",on_change=on_text_change,key='my_text')
|
161 |
st.text_input("Model not listed on left? Type the model name (fill-mask BERT models only)", "",key="my_model2",on_change=on_model_change2)
|
162 |
-
|
163 |
-
# model_name = custom_model_name
|
164 |
-
# st.info("Custom model selected: " + model_name)
|
165 |
-
# bert_tokenizer, bert_model = load_bert_model(model_name)
|
166 |
-
#if len(input_text) > 0:
|
167 |
-
# run_test(input_text,top_k,model_name)
|
168 |
-
#else:
|
169 |
-
# if len(option) > 0:
|
170 |
-
# run_test(option,top_k,model_name)
|
171 |
|
172 |
st.info("Currently selected results count = " + str(st.session_state['top_k']))
|
173 |
st.info("Currently selected Model name = " + st.session_state['model_name'])
|
|
|
35 |
def encode(tokenizer, text_sentence, add_special_tokens=True):
|
36 |
|
37 |
text_sentence = text_sentence.replace('<mask>', tokenizer.mask_token)
|
|
|
|
|
|
|
38 |
|
39 |
tokenized_text = tokenizer.tokenize(text_sentence)
|
40 |
input_ids = torch.tensor([tokenizer.encode(text_sentence, add_special_tokens=add_special_tokens)])
|
|
|
139 |
st.markdown("""
|
140 |
<small style="font-size:18px; color: #7f7f7f">Pretrained BERT models can be used as is, <a href="https://ajitrajasekharan.github.io/2021/01/02/my-first-post.html"><b>with no fine tuning to perform tasks like NER</b></a> <i>ideally if both fill-mask and CLS predictions are good, or minimally if fill-mask predictions are adequate</i></small>
|
141 |
""", unsafe_allow_html=True)
|
142 |
+
|
143 |
st.write("This app can be used to examine both model prediction for a masked position as well as the neighborhood of CLS vector")
|
144 |
st.write(" - To examine model prediction for a position, enter the token [MASK] or <mask>")
|
145 |
st.write(" - To examine just the [CLS] vector, enter a word/phrase or sentence. Example: eGFR or EGFR or non small cell lung cancer")
|
146 |
st.sidebar.slider("Select how many predictions do you need", 1 , 50, 20,key='my_slider',on_change=on_results_count_change) #some times it is possible to have less words
|
147 |
|
148 |
|
|
|
|
|
|
|
|
|
|
|
149 |
try:
|
150 |
st.sidebar.selectbox(label='Select Model to Apply', options=['ajitrajasekharan/biomedical', 'bert-base-cased','bert-large-cased','microsoft/BiomedNLP-PubMedBERT-base-uncased-abstract-fulltext','allenai/scibert_scivocab_cased','dmis-lab/biobert-v1.1'], index=0, key = "my_model1",on_change=on_model_change1)
|
151 |
init_selectbox()
|
152 |
st.text_input("Enter text below", "",on_change=on_text_change,key='my_text')
|
153 |
st.text_input("Model not listed on left? Type the model name (fill-mask BERT models only)", "",key="my_model2",on_change=on_model_change2)
|
154 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
155 |
|
156 |
st.info("Currently selected results count = " + str(st.session_state['top_k']))
|
157 |
st.info("Currently selected Model name = " + st.session_state['model_name'])
|