From 3d5d053486b99a191112d619b415278876b018bf Mon Sep 17 00:00:00 2001 From: Meowww Date: Fri, 13 Dec 2024 16:39:47 +0300 Subject: [PATCH] Update json_adapter.py Ollama now supports constrained outputs (https://ollama.com/blog/structured-outputs) --- dspy/adapters/json_adapter.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/dspy/adapters/json_adapter.py b/dspy/adapters/json_adapter.py index 281df5cb4..71e10bba7 100644 --- a/dspy/adapters/json_adapter.py +++ b/dspy/adapters/json_adapter.py @@ -38,10 +38,20 @@ def __call__(self, lm, lm_kwargs, signature, demos, inputs, _parse_values=True): try: provider = lm.model.split("/", 1)[0] or "openai" - if "response_format" in litellm.get_supported_openai_params(model=lm.model, custom_llm_provider=provider): + + format_param = None + + if provider == 'ollama': + format_param = 'format' + else: + if 'response_format' in litellm.get_supported_openai_params(model=lm.model, custom_llm_provider=provider): + format_param = 'response_format' + + if format_param is not None: try: response_format = _get_structured_outputs_response_format(signature) - outputs = lm(**inputs, **lm_kwargs, response_format=response_format) + lm_kwargs[format_param] = response_format.model_json_schema() if provider == 'ollama' else response_format + outputs = lm(**inputs, **lm_kwargs ) except Exception: _logger.debug( "Failed to obtain response using signature-based structured outputs"