Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 28 additions & 2 deletions src/google/adk/models/lite_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,24 @@
# Providers that require file_id instead of inline file_data
_FILE_ID_REQUIRED_PROVIDERS = frozenset({"openai", "azure"})

# Default filenames for file uploads when display_name is missing.
# OpenAI derives the MIME type from the filename extension during upload,
# so passing a proper name ensures the stored file gets the right content-type.
_MIME_TO_FILENAME = {
"application/pdf": "document.pdf",
"application/json": "document.json",
"application/msword": "document.doc",
"application/vnd.openxmlformats-officedocument.wordprocessingml.document": "document.docx",
"application/vnd.openxmlformats-officedocument.presentationml.presentation": "document.pptx",
"application/x-sh": "script.sh",
}


def _filename_for_mime(mime_type: str) -> str:
"""Return a default filename for a MIME type so uploads get the right content-type."""
return _MIME_TO_FILENAME.get(mime_type, "document.bin")


_MISSING_TOOL_RESULT_MESSAGE = (
"Error: Missing tool result (tool execution may have been interrupted "
"before a response was recorded)."
Expand Down Expand Up @@ -840,10 +858,18 @@ async def _get_content(
url_content_type: {"url": data_uri},
})
elif mime_type in _SUPPORTED_FILE_CONTENT_MIME_TYPES:
# OpenAI/Azure require file_id from uploaded file, not inline data
# OpenAI/Azure require file_id from uploaded file, not inline data.
# Pass (filename, content, content_type) so the upload gets the right MIME type.
if provider in _FILE_ID_REQUIRED_PROVIDERS:
display_name = getattr(
part.inline_data, "display_name", None
) or _filename_for_mime(part.inline_data.mime_type)
file_response = await litellm.acreate_file(
file=part.inline_data.data,
file=(
display_name,
part.inline_data.data,
part.inline_data.mime_type,
),
purpose="assistants",
custom_llm_provider=provider,
)
Expand Down
30 changes: 27 additions & 3 deletions tests/unittests/models/test_litellm.py
Original file line number Diff line number Diff line change
Expand Up @@ -3958,7 +3958,27 @@ async def test_get_content_pdf_openai_uses_file_id(mocker):
assert "file_data" not in content[0]["file"]

mock_acreate_file.assert_called_once_with(
file=b"test_pdf_data",
file=("document.pdf", b"test_pdf_data", "application/pdf"),
purpose="assistants",
custom_llm_provider="openai",
)


@pytest.mark.asyncio
async def test_get_content_pdf_openai_uses_display_name_as_filename(mocker):
"""Test that display_name is used as filename when available."""
mock_file_response = mocker.create_autospec(litellm.FileObject)
mock_file_response.id = "file-abc123"
mock_acreate_file = AsyncMock(return_value=mock_file_response)
mocker.patch.object(litellm, "acreate_file", new=mock_acreate_file)

part = types.Part.from_bytes(data=b"test_pdf_data", mime_type="application/pdf")
part.inline_data.display_name = "my_report.pdf"
content = await _get_content([part], provider="openai")

assert content[0]["file"]["file_id"] == "file-abc123"
mock_acreate_file.assert_called_once_with(
file=("my_report.pdf", b"test_pdf_data", "application/pdf"),
purpose="assistants",
custom_llm_provider="openai",
)
Expand Down Expand Up @@ -3997,7 +4017,7 @@ async def test_get_content_pdf_azure_uses_file_id(mocker):
assert content[0]["file"]["file_id"] == "file-xyz789"

mock_acreate_file.assert_called_once_with(
file=b"test_pdf_data",
file=("document.pdf", b"test_pdf_data", "application/pdf"),
purpose="assistants",
custom_llm_provider="azure",
)
Expand Down Expand Up @@ -4041,7 +4061,11 @@ async def test_get_completion_inputs_openai_file_upload(mocker):
assert content[1]["type"] == "file"
assert content[1]["file"]["file_id"] == "file-uploaded123"

mock_acreate_file.assert_called_once()
mock_acreate_file.assert_called_once_with(
file=("document.pdf", b"test_pdf_content", "application/pdf"),
purpose="assistants",
custom_llm_provider="openai",
)


@pytest.mark.asyncio
Expand Down