Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -29,7 +29,7 @@ models = [
|
|
29 |
"mistralai/Mixtral-8x7B-Instruct-v0.1",
|
30 |
]
|
31 |
LOG_FILE = "api_logs.json"
|
32 |
-
CHECK_INTERVAL =
|
33 |
|
34 |
|
35 |
client = AsyncInferenceClient(token=os.environ["HF_INFERENCE_API_TOKEN"])
|
@@ -43,7 +43,7 @@ class LogEntry(BaseModel):
|
|
43 |
model: str
|
44 |
success: bool
|
45 |
timestamp: str
|
46 |
-
|
47 |
|
48 |
async def check_apis():
|
49 |
results = []
|
@@ -53,15 +53,16 @@ async def check_apis():
|
|
53 |
messages=[{"role": "user", "content": "What is the capital of France?"}],
|
54 |
max_tokens=10,
|
55 |
)
|
56 |
-
success =
|
57 |
-
except
|
|
|
58 |
success = False
|
59 |
|
60 |
results.append(LogEntry(
|
61 |
model=model,
|
62 |
success=success,
|
63 |
timestamp=datetime.now().isoformat(),
|
64 |
-
|
65 |
))
|
66 |
|
67 |
with open(LOG_FILE, "r+") as f:
|
|
|
29 |
"mistralai/Mixtral-8x7B-Instruct-v0.1",
|
30 |
]
|
31 |
LOG_FILE = "api_logs.json"
|
32 |
+
CHECK_INTERVAL = 30 # 1 minute
|
33 |
|
34 |
|
35 |
client = AsyncInferenceClient(token=os.environ["HF_INFERENCE_API_TOKEN"])
|
|
|
43 |
model: str
|
44 |
success: bool
|
45 |
timestamp: str
|
46 |
+
failure_message: str
|
47 |
|
48 |
async def check_apis():
|
49 |
results = []
|
|
|
53 |
messages=[{"role": "user", "content": "What is the capital of France?"}],
|
54 |
max_tokens=10,
|
55 |
)
|
56 |
+
success = True
|
57 |
+
except Exception as e:
|
58 |
+
print(e)
|
59 |
success = False
|
60 |
|
61 |
results.append(LogEntry(
|
62 |
model=model,
|
63 |
success=success,
|
64 |
timestamp=datetime.now().isoformat(),
|
65 |
+
failure_message=str(e)
|
66 |
))
|
67 |
|
68 |
with open(LOG_FILE, "r+") as f:
|