Skip to content

Commit

Permalink
Removed response_format with ollama models due to issue in litellm
Browse files Browse the repository at this point in the history
  • Loading branch information
kevinmessiaen committed Nov 19, 2024
1 parent 5f51327 commit 39c4fa9
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 0 deletions.
7 changes: 7 additions & 0 deletions giskard/llm/client/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
from .base import ChatMessage, LLMClient
from .logger import LLMLogger

logger = logging.getLogger(__name__)

_default_client = None

_default_llm_model = os.getenv("GSK_LLM_MODEL", "gpt-4o")
Expand Down Expand Up @@ -66,6 +68,11 @@ def set_llm_model(llm_model: str, **kwargs):
global _default_llm_model
global _default_completion_params

if llm_model.startswith("ollama/"):
logger.warning(
"Giskard might not work properly with ollama. Please consider switching to another model provider."
)

_default_llm_model = llm_model
_default_completion_params = kwargs

Expand Down
4 changes: 4 additions & 0 deletions giskard/llm/client/litellm.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,10 @@ def __init__(self, model: str = "gpt-4o", completion_params: Optional[Dict[str,
def _build_supported_completion_params(self, **kwargs):
supported_params = litellm.get_supported_openai_params(model=self.model)

# response_format causes issues with ollama: https://github.com/BerriAI/litellm/issues/6359
if self.model.startswith("ollama/"):
supported_params.remove("response_format")

return {
param_name: param_value
for param_name, param_value in kwargs.items()
Expand Down

0 comments on commit 39c4fa9

Please sign in to comment.