Safwanahmad619 commited on
Commit
746ff42
·
verified ·
1 Parent(s): 4b32c36

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -21
app.py CHANGED
@@ -1,10 +1,5 @@
1
  import gradio as gr
2
- from transformers import pipeline, AutoTokenizer, AutoModelForMaskedLM, AutoModelForSeq2SeqLM
3
-
4
- # Load DNA Analysis Model
5
- dna_tokenizer = AutoTokenizer.from_pretrained("facebook/esm2_t6_8M_UR50D")
6
- dna_model = AutoModelForMaskedLM.from_pretrained("facebook/esm2_t6_8M_UR50D")
7
- dna_pipeline = pipeline("fill-mask", model=dna_model, tokenizer=dna_tokenizer)
8
 
9
  # Load Ethical Inquiry and Learning Support Model
10
  ethics_tokenizer = AutoTokenizer.from_pretrained("google/flan-t5-base")
@@ -13,10 +8,8 @@ ethics_pipeline = pipeline("text2text-generation", model=ethics_model, tokenizer
13
 
14
  # Query Classification
15
  def classify_query(query):
16
- """Classify the query into DNA Analysis, Ethical Inquiry, or Learning Support."""
17
- if "DNA" in query or "sequence" in query:
18
- return "dna_analysis"
19
- elif "ethics" in query or "privacy" in query:
20
  return "ethical_inquiry"
21
  else:
22
  return "learning_support"
@@ -26,16 +19,7 @@ def handle_query(query):
26
  """Route the query to the appropriate model and generate a response."""
27
  task = classify_query(query)
28
 
29
- if task == "dna_analysis":
30
- try:
31
- # Example DNA sequence processing: Replace X or any part of the sequence with [MASK]
32
- masked_sequence = query.replace("X", "[MASK]")
33
- output = dna_pipeline(masked_sequence)
34
- return f"DNA Analysis Result: {output}"
35
- except Exception as e:
36
- return f"Error in DNA Analysis: {e}"
37
-
38
- elif task == "ethical_inquiry":
39
  try:
40
  # Ethical guidance response
41
  response = ethics_pipeline(query)
@@ -61,7 +45,7 @@ interface = gr.Interface(
61
  inputs="text",
62
  outputs="text",
63
  title="BioSphere AI Chatbot",
64
- description="A chatbot for DNA Analysis, Ethical Guidance, and Learning Support in Biotech.",
65
  )
66
 
67
  # Add Gemmini API Key Integration
 
1
  import gradio as gr
2
+ from transformers import pipeline, AutoTokenizer, AutoModelForSeq2SeqLM
 
 
 
 
 
3
 
4
  # Load Ethical Inquiry and Learning Support Model
5
  ethics_tokenizer = AutoTokenizer.from_pretrained("google/flan-t5-base")
 
8
 
9
  # Query Classification
10
  def classify_query(query):
11
+ """Classify the query into Ethical Inquiry or Learning Support."""
12
+ if "ethics" in query or "privacy" in query:
 
 
13
  return "ethical_inquiry"
14
  else:
15
  return "learning_support"
 
19
  """Route the query to the appropriate model and generate a response."""
20
  task = classify_query(query)
21
 
22
+ if task == "ethical_inquiry":
 
 
 
 
 
 
 
 
 
23
  try:
24
  # Ethical guidance response
25
  response = ethics_pipeline(query)
 
45
  inputs="text",
46
  outputs="text",
47
  title="BioSphere AI Chatbot",
48
+ description="A chatbot for Ethical Guidance and Learning Support in Biotech.",
49
  )
50
 
51
  # Add Gemmini API Key Integration