Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
73 changes: 50 additions & 23 deletions lib/req_llm/provider/defaults.ex
Original file line number Diff line number Diff line change
Expand Up @@ -1176,18 +1176,7 @@ defmodule ReqLLM.Provider.Defaults do
"type" => "function",
"function" => %{"name" => name, "arguments" => args_json}
}) do
case Jason.decode(args_json || "{}") do
{:ok, args} ->
ReqLLM.StreamChunk.tool_call(name, args, %{id: id})

{:error, _} ->
%ReqLLM.StreamChunk{
type: :tool_call,
name: name,
arguments: %{},
metadata: %{id: id, raw_arguments: args_json}
}
end
build_openai_tool_call_chunk(name, args_json, %{id: id})
end

# Mistral API omits "type" field - add it and delegate
Expand Down Expand Up @@ -1235,10 +1224,7 @@ defmodule ReqLLM.Provider.Defaults do
"function" => %{"name" => name, "arguments" => args_json}
})
when is_binary(name) do
case Jason.decode(args_json || "{}") do
{:ok, args} -> ReqLLM.StreamChunk.tool_call(name, args, %{id: id, index: index})
{:error, _} -> ReqLLM.StreamChunk.tool_call(name, %{}, %{id: id, index: index})
end
build_openai_tool_call_chunk(name, args_json, %{id: id, index: index})
end

# Handle tool call delta with only name (arguments may come in later chunks)
Expand Down Expand Up @@ -1281,10 +1267,7 @@ defmodule ReqLLM.Provider.Defaults do
"function" => %{"name" => name, "arguments" => args_json}
})
when is_binary(name) do
case Jason.decode(args_json || "{}") do
{:ok, args} -> ReqLLM.StreamChunk.tool_call(name, args, %{id: id})
{:error, _} -> ReqLLM.StreamChunk.tool_call(name, %{}, %{id: id})
end
build_openai_tool_call_chunk(name, args_json, %{id: id})
end

# Handle malformed tool call deltas (some APIs send incomplete structures)
Expand All @@ -1298,6 +1281,38 @@ defmodule ReqLLM.Provider.Defaults do

defp decode_openai_tool_call_delta(_), do: nil

defp build_openai_tool_call_chunk(name, args_json, metadata) do
case Jason.decode(args_json || "{}") do
{:ok, decoded_args} ->
{arguments, extra_metadata} = normalize_tool_call_arguments(decoded_args, args_json)
ReqLLM.StreamChunk.tool_call(name, arguments, Map.merge(metadata, extra_metadata))

{:error, _reason} ->
ReqLLM.StreamChunk.tool_call(
name,
%{},
Map.merge(metadata, %{
invalid_arguments: true,
raw_arguments: args_json,
unparseable_arguments: true
})
)
end
end

defp normalize_tool_call_arguments(arguments, _raw_arguments) when is_map(arguments) do
{arguments, %{}}
end

defp normalize_tool_call_arguments(arguments, raw_arguments) do
{%{},
%{
invalid_arguments: true,
raw_arguments: raw_arguments,
decoded_arguments: arguments
}}
end

defp build_openai_message_from_chunks(chunks) when is_list(chunks) do
content_parts =
chunks
Expand Down Expand Up @@ -1336,9 +1351,21 @@ defmodule ReqLLM.Provider.Defaults do
metadata: meta
}) do
args_json =
case Map.get(meta, :raw_arguments) do
raw when is_binary(raw) -> raw
_ -> if(is_binary(args), do: args, else: Jason.encode!(args))
cond do
Map.get(meta, :unparseable_arguments) && is_binary(Map.get(meta, :raw_arguments)) ->
Map.get(meta, :raw_arguments)

Map.get(meta, :invalid_arguments) ->
Jason.encode!(args)

is_binary(Map.get(meta, :raw_arguments)) ->
Map.get(meta, :raw_arguments)

is_binary(args) ->
args

true ->
Jason.encode!(args)
end

id = Map.get(meta, :id)
Expand Down
8 changes: 6 additions & 2 deletions lib/req_llm/providers/amazon_bedrock/openai.ex
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ defmodule ReqLLM.Providers.AmazonBedrock.OpenAI do

alias ReqLLM.Provider.Defaults
alias ReqLLM.Providers.AmazonBedrock
alias ReqLLM.Providers.OpenAI.AdapterHelpers

@doc """
Returns whether this model family supports toolChoice in Bedrock Converse API.
Expand Down Expand Up @@ -37,7 +38,7 @@ defmodule ReqLLM.Providers.AmazonBedrock.OpenAI do
end

# Get tools from context if available
tools = Map.get(context, :tools, [])
tools = opts[:tools] || Map.get(context, :tools, [])

# Create a minimal request struct to use default OpenAI encoding
temp_request =
Expand All @@ -55,7 +56,10 @@ defmodule ReqLLM.Providers.AmazonBedrock.OpenAI do
)
)

body = Defaults.default_build_body(temp_request)
body =
temp_request
|> Defaults.default_build_body()
|> AdapterHelpers.translate_tool_choice_format()

messages = body[:messages] || body["messages"]

Expand Down
60 changes: 24 additions & 36 deletions lib/req_llm/providers/cerebras.ex
Original file line number Diff line number Diff line change
Expand Up @@ -12,16 +12,15 @@ defmodule ReqLLM.Providers.Cerebras do
- Streaming not supported with reasoning models in JSON mode or tool calling
- `strict: true` is automatically added to tool schemas when the model supports it
- Models that don't support `strict: true` (e.g., Qwen, ZAI GLM models) have it automatically excluded
- Only supports `tool_choice: "auto"` or `"none"`, not function-specific choices
- Supports OpenAI-style tool calling, including `tool_choice: "auto"`, `"none"`, `"required"`, and function-specific choice objects
- Supports `parallel_tool_calls` to control whether tools can be requested concurrently

## Unsupported OpenAI Features

The following fields will result in a 400 error if supplied:
- `frequency_penalty`
- `logit_bias`
- `presence_penalty`
- `parallel_tool_calls`
- `service_tier`

## Configuration

Expand All @@ -34,6 +33,8 @@ defmodule ReqLLM.Providers.Cerebras do
default_base_url: "https://api.cerebras.ai/v1",
default_env_key: "CEREBRAS_API_KEY"

import ReqLLM.Provider.Utils, only: [maybe_put: 3]

@provider_schema []

@impl ReqLLM.Provider
Expand All @@ -49,7 +50,7 @@ defmodule ReqLLM.Providers.Cerebras do
ReqLLM.Provider.Defaults.default_build_body(request)
|> translate_tool_choice_format()
|> add_strict_to_tools(model)
|> normalize_tool_choice()
|> maybe_put(:parallel_tool_calls, request.options[:parallel_tool_calls])
|> normalize_assistant_content()
end

Expand All @@ -61,20 +62,29 @@ defmodule ReqLLM.Providers.Cerebras do
true -> {nil, nil}
end

type = tool_choice && (Map.get(tool_choice, :type) || Map.get(tool_choice, "type"))
name = tool_choice && (Map.get(tool_choice, :name) || Map.get(tool_choice, "name"))
case tool_choice do
map when is_map(map) ->
type = Map.get(tool_choice, :type) || Map.get(tool_choice, "type")
name = Map.get(tool_choice, :name) || Map.get(tool_choice, "name")

if type == "tool" && name do
replacement =
if is_map_key(tool_choice, :type) do
%{type: "function", function: %{name: name}}
else
%{"type" => "function", "function" => %{"name" => name}}
end

if type == "tool" && name do
replacement =
if is_map_key(tool_choice, :type) do
%{type: "function", function: %{name: name}}
Map.put(body, body_key, replacement)
else
%{"type" => "function", "function" => %{"name" => name}}
body
end

Map.put(body, body_key, replacement)
else
body
atom when not is_nil(atom) and is_atom(atom) ->
Map.put(body, body_key, to_string(atom))

_ ->
body
end
end

Expand Down Expand Up @@ -157,28 +167,6 @@ defmodule ReqLLM.Providers.Cerebras do

defp strip_constraints_recursive(value), do: value

defp normalize_tool_choice(%{"tool_choice" => %{} = tool_choice} = body) do
if tool_choice_type(tool_choice) == "function" do
Map.put(body, "tool_choice", "auto")
else
body
end
end

defp normalize_tool_choice(%{tool_choice: %{} = tool_choice} = body) do
if tool_choice_type(tool_choice) == "function" do
Map.put(body, :tool_choice, "auto")
else
body
end
end

defp normalize_tool_choice(body), do: body

defp tool_choice_type(tool_choice) do
Map.get(tool_choice, :type) || Map.get(tool_choice, "type")
end

defp normalize_assistant_content(%{"messages" => messages} = body) when is_list(messages) do
normalized_messages = Enum.map(messages, &normalize_assistant_message/1)
Map.put(body, "messages", normalized_messages)
Expand Down
7 changes: 5 additions & 2 deletions lib/req_llm/providers/google_vertex/openai_compat.ex
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ defmodule ReqLLM.Providers.GoogleVertex.OpenAICompat do
"""

alias ReqLLM.Provider.Defaults
alias ReqLLM.Providers.OpenAI.AdapterHelpers

@doc """
Formats a ReqLLM context into OpenAI Chat Completions request format.
Expand All @@ -34,7 +35,7 @@ defmodule ReqLLM.Providers.GoogleVertex.OpenAICompat do
end

# Get tools from context if available
tools = Map.get(context, :tools, [])
tools = opts[:tools] || Map.get(context, :tools, [])

# Build OpenAI-compatible request body using Defaults helper
temp_request =
Expand All @@ -52,7 +53,9 @@ defmodule ReqLLM.Providers.GoogleVertex.OpenAICompat do
)
)

Defaults.default_build_body(temp_request)
temp_request
|> Defaults.default_build_body()
|> AdapterHelpers.translate_tool_choice_format()
end

@doc """
Expand Down
31 changes: 20 additions & 11 deletions lib/req_llm/providers/groq.ex
Original file line number Diff line number Diff line change
Expand Up @@ -162,20 +162,29 @@ defmodule ReqLLM.Providers.Groq do
true -> {nil, nil}
end

type = tool_choice && (Map.get(tool_choice, :type) || Map.get(tool_choice, "type"))
name = tool_choice && (Map.get(tool_choice, :name) || Map.get(tool_choice, "name"))

if type == "tool" && name do
replacement =
if is_map_key(tool_choice, :type) do
%{type: "function", function: %{name: name}}
case tool_choice do
map when is_map(map) ->
type = Map.get(tool_choice, :type) || Map.get(tool_choice, "type")
name = Map.get(tool_choice, :name) || Map.get(tool_choice, "name")

if type == "tool" && name do
replacement =
if is_map_key(tool_choice, :type) do
%{type: "function", function: %{name: name}}
else
%{"type" => "function", "function" => %{"name" => name}}
end

Map.put(body, body_key, replacement)
else
%{"type" => "function", "function" => %{"name" => name}}
body
end

Map.put(body, body_key, replacement)
else
body
atom when not is_nil(atom) and is_atom(atom) ->
Map.put(body, body_key, to_string(atom))

_ ->
body
end
end

Expand Down
31 changes: 20 additions & 11 deletions lib/req_llm/providers/zenmux.ex
Original file line number Diff line number Diff line change
Expand Up @@ -226,20 +226,29 @@ defmodule ReqLLM.Providers.Zenmux do
true -> {nil, nil}
end

type = tool_choice && (Map.get(tool_choice, :type) || Map.get(tool_choice, "type"))
name = tool_choice && (Map.get(tool_choice, :name) || Map.get(tool_choice, "name"))

if type == "tool" && name do
replacement =
if is_map_key(tool_choice, :type) do
%{type: "function", function: %{name: name}}
case tool_choice do
map when is_map(map) ->
type = Map.get(tool_choice, :type) || Map.get(tool_choice, "type")
name = Map.get(tool_choice, :name) || Map.get(tool_choice, "name")

if type == "tool" && name do
replacement =
if is_map_key(tool_choice, :type) do
%{type: "function", function: %{name: name}}
else
%{"type" => "function", "function" => %{"name" => name}}
end

Map.put(body, body_key, replacement)
else
%{"type" => "function", "function" => %{"name" => name}}
body
end

Map.put(body, body_key, replacement)
else
body
atom when not is_nil(atom) and is_atom(atom) ->
Map.put(body, body_key, to_string(atom))

_ ->
body
end
end

Expand Down
Loading
Loading