Skip to content

Commit

Permalink
some minor fixes; better error handling
Browse files Browse the repository at this point in the history
  • Loading branch information
Robert Szefler committed Jun 12, 2024
1 parent 50499fe commit 7c7c8c3
Show file tree
Hide file tree
Showing 3 changed files with 34 additions and 15 deletions.
32 changes: 21 additions & 11 deletions holmes/core/robusta_ai.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,16 @@
# TODO finish refactor
import logging

import requests
from fastapi import HTTPException

from holmes.core.runbooks import RunbookManager
from holmes.core.tool_calling_llm import BaseIssueInvestigator, BaseToolCallingLLM, LLMResult
from holmes.core.tool_calling_llm import BaseIssueInvestigator, BaseToolCallingLLM, LLMError, LLMResult
from holmes.utils.auth import SessionManager


class RobustaAICallError(LLMError):
pass


class RobustaAIToolCallingLLM(BaseToolCallingLLM):
def __init__(self):
raise NotImplementedError("Robusta AI tool calling LLM is not supported yet")
Expand All @@ -27,12 +29,18 @@ def call(self, system_prompt: str, user_prompt: str) -> LLMResult:

payload = {
"auth": {"account_id": auth_token.account_id, "token": auth_token.token},
"system_message": system_prompt,
"user_message": user_prompt,
# TODO
# "model": request.model,
"body": {
"system_message": system_prompt,
"user_message": user_prompt,
# TODO?
# "model": request.model,
},
}
resp = requests.post(self.base_url + "/api/ai", json=payload)
try:
resp = requests.post(f"{self.base_url}/api/ai", json=payload)
except:
logging.exception("Robusta AI API call failed")
raise RobustaAICallError("Robusta AI API call failed")
if resp.status_code == 401:
self.session_manager.invalidate_token(auth_token)
# Attempt auth again using a fresh token
Expand All @@ -44,6 +52,8 @@ def call(self, system_prompt: str, user_prompt: str) -> LLMResult:
logging.error(
f"Failed to reauth with Robusta AI. Response status {resp.status_code}, content: {resp.text}"
)
raise HTTPException(status_code=400, detail="Unable to auth with Robusta AI")
# TODO LLMResult
return resp.json()
raise RobustaAICallError("Unable to auth with Robusta AI")
resp_data = resp.json()
if not resp_data["success"]:
raise RobustaAICallError("Robusta AI API call failed")
return LLMResult(result=resp_data["msg"], prompt=user_prompt)
4 changes: 4 additions & 0 deletions holmes/core/tool_calling_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,10 @@
from holmes.core.tools import YAMLToolExecutor


class LLMError(Exception):
pass


class ToolCallResult(BaseModel):
tool_name: str
description: str
Expand Down
13 changes: 9 additions & 4 deletions server.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@

import colorlog
import uvicorn
from fastapi import FastAPI
from fastapi import FastAPI, HTTPException
from rich.console import Console

from holmes.common.env_vars import ALLOWED_TOOLSETS, HOLMES_HOST, HOLMES_PORT
Expand All @@ -24,6 +24,7 @@
from holmes.core.provider import LLMProviderFactory
from holmes.core.server_models import InvestigateContext, InvestigateRequest
from holmes.core.supabase_dal import SupabaseDal
from holmes.core.tool_calling_llm import LLMError
from holmes.plugins.prompts import load_prompt
from holmes.utils.auth import SessionManager

Expand Down Expand Up @@ -54,7 +55,7 @@ def init_logging():
if not dal.initialized and config.llm_provider == LLMProviderType.ROBUSTA:
logging.error("Holmes cannot run without store configuration when the LLM provider is Robusta AI")
sys.exit(1)
session_manager = SessionManager(dal, "RelayHolmes")
session_manager = SessionManager(dal, "AIRelay")
provider_factory = LLMProviderFactory(config, session_manager)
app = FastAPI()

Expand Down Expand Up @@ -88,12 +89,16 @@ def investigate_issue(request: InvestigateRequest):
investigator = provider_factory.create_issue_investigator(
console, allowed_toolsets=ALLOWED_TOOLSETS
)
return {
"analysis": investigator.investigate(
try:
result = investigator.investigate(
issue,
prompt=load_prompt(request.system_prompt),
console=console,
).result
except LLMError as exc:
raise HTTPException(status_code=500, detail=f"Error calling the LLM provider: {str(exc)}")
return {
"analysis": result
}


Expand Down

0 comments on commit 7c7c8c3

Please sign in to comment.