Spaces:
Running
Running
geekyrakshit
commited on
Commit
·
c32f628
1
Parent(s):
64eb74c
refactor: SurveyGuardrail to PromptInjectionSurveyGuardrail
Browse files
app.py
CHANGED
@@ -2,8 +2,7 @@ import streamlit as st
|
|
2 |
import weave
|
3 |
from dotenv import load_dotenv
|
4 |
|
5 |
-
from guardrails_genie.guardrails import GuardrailManager
|
6 |
-
from guardrails_genie.guardrails.injection import SurveyGuardrail
|
7 |
from guardrails_genie.llm import OpenAIModel
|
8 |
|
9 |
load_dotenv()
|
@@ -14,11 +13,19 @@ chat_condition = openai_model != ""
|
|
14 |
|
15 |
guardrails = []
|
16 |
|
17 |
-
with st.sidebar.expander("Switch on Guardrails"):
|
18 |
-
is_survey_guardrail_enabled = st.toggle("Survey Guardrail"
|
19 |
|
20 |
if is_survey_guardrail_enabled:
|
21 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
22 |
|
23 |
guardrails_manager = GuardrailManager(guardrails=guardrails)
|
24 |
|
|
|
2 |
import weave
|
3 |
from dotenv import load_dotenv
|
4 |
|
5 |
+
from guardrails_genie.guardrails import GuardrailManager, PromptInjectionSurveyGuardrail
|
|
|
6 |
from guardrails_genie.llm import OpenAIModel
|
7 |
|
8 |
load_dotenv()
|
|
|
13 |
|
14 |
guardrails = []
|
15 |
|
16 |
+
with st.sidebar.expander("Switch on Prompt Injection Guardrails"):
|
17 |
+
is_survey_guardrail_enabled = st.toggle("Survey Guardrail")
|
18 |
|
19 |
if is_survey_guardrail_enabled:
|
20 |
+
survey_guardrail_model = st.selectbox(
|
21 |
+
"Survey Guardrail Model", ["", "gpt-4o-mini", "gpt-4o"]
|
22 |
+
)
|
23 |
+
if survey_guardrail_model:
|
24 |
+
guardrails.append(
|
25 |
+
PromptInjectionSurveyGuardrail(
|
26 |
+
llm_model=OpenAIModel(model_name=survey_guardrail_model)
|
27 |
+
)
|
28 |
+
)
|
29 |
|
30 |
guardrails_manager = GuardrailManager(guardrails=guardrails)
|
31 |
|
guardrails_genie/guardrails/__init__.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
from .injection import
|
2 |
from .manager import GuardrailManager
|
3 |
|
4 |
-
__all__ = ["
|
|
|
1 |
+
from .injection import PromptInjectionSurveyGuardrail
|
2 |
from .manager import GuardrailManager
|
3 |
|
4 |
+
__all__ = ["PromptInjectionSurveyGuardrail", "GuardrailManager"]
|
guardrails_genie/guardrails/injection/__init__.py
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
-
from .survey_guardrail import
|
2 |
|
3 |
-
__all__ = ["
|
|
|
1 |
+
from .survey_guardrail import PromptInjectionSurveyGuardrail
|
2 |
|
3 |
+
__all__ = ["PromptInjectionSurveyGuardrail"]
|
guardrails_genie/guardrails/injection/survey_guardrail.py
CHANGED
@@ -15,7 +15,7 @@ class SurveyGuardrailResponse(BaseModel):
|
|
15 |
explanation: Optional[str]
|
16 |
|
17 |
|
18 |
-
class
|
19 |
llm_model: OpenAIModel
|
20 |
|
21 |
@weave.op()
|
|
|
15 |
explanation: Optional[str]
|
16 |
|
17 |
|
18 |
+
class PromptInjectionSurveyGuardrail(Guardrail):
|
19 |
llm_model: OpenAIModel
|
20 |
|
21 |
@weave.op()
|
pyproject.toml
CHANGED
@@ -12,7 +12,7 @@ dependencies = [
|
|
12 |
"ruff>=0.6.9",
|
13 |
"pip>=24.2",
|
14 |
"uv>=0.4.20",
|
15 |
-
"weave>=0.51.
|
16 |
"streamlit>=1.40.1",
|
17 |
"python-dotenv>=1.0.1",
|
18 |
"watchdog>=6.0.0",
|
|
|
12 |
"ruff>=0.6.9",
|
13 |
"pip>=24.2",
|
14 |
"uv>=0.4.20",
|
15 |
+
"weave>=0.51.22",
|
16 |
"streamlit>=1.40.1",
|
17 |
"python-dotenv>=1.0.1",
|
18 |
"watchdog>=6.0.0",
|