Skip to content
This repository was archived by the owner on Jun 7, 2025. It is now read-only.

Commit c6a017d

Browse files
authored
Merge pull request #29 from CodeHex16/hotfix-bazzan-sprint15
fix: change context on messages
2 parents aa7b853 + f19390a commit c6a017d

2 files changed

Lines changed: 14 additions & 9 deletions

File tree

app/services/llm_response_service.py

Lines changed: 12 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ def _get_context(self, question: str) -> Union[str, list[str]]:
5050
return output
5151
except ValueError as ve:
5252
raise HTTPException(
53-
status_code=500, detail=f"Error getting context: {str(ve)}"
53+
status_code=404, detail=f"Error getting context: {str(ve)}"
5454
)
5555
except Exception as e:
5656
logger.error(f"Unexpected error in _get_context for question '{question}': {str(e)}", exc_info=True)
@@ -59,26 +59,31 @@ def _get_context(self, question: str) -> Union[str, list[str]]:
5959
)
6060

6161
def generate_llm_response(self, question: schemas.Question) -> StreamingResponse:
62-
context = self._get_context(question.question)
62+
try:
63+
context = self._get_context(question.question)
64+
except HTTPException as e:
65+
logger.error(f"No context found", exc_info=True)
66+
context = ""
67+
6368
formatted_messages = ""
64-
context_messages = ""
69+
# context_messages = ""
6570

6671
if question.messages:
6772
formatted_messages = "\n".join(
6873
[f"{msg.sender}: {msg.content}" for msg in question.messages]
6974
)
70-
context_messages = self._get_context(formatted_messages)
75+
# context_messages = self._get_context(formatted_messages)
7176

7277
messages = [
7378
SystemMessage(self._CHATBOT_INSTRUCTIONS),
79+
SystemMessage(f"Conversazione precedente: {formatted_messages}"),
7480
SystemMessage(
75-
f"Contesto: {context}\n{context_messages}",
81+
f"Contesto: {context}",
7682
),
77-
SystemMessage(f"Conversazione precedente: {formatted_messages}"),
7883
HumanMessage(f"Domanda a cui devi rispondere: {question}"),
7984
]
8085
print()
81-
print(f"PROMPT: {context} {context_messages}")
86+
print(f"PROMPT: {context}")
8287
print()
8388
try:
8489
stream_response = self._LLM._model.astream(messages)

tests/services/test_llm_response_services.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -68,8 +68,8 @@ def test_llm_response_service_get_context_false(monkeypatch):
6868
with pytest.raises(HTTPException) as excinfo:
6969
llm_response_service._get_context(question)
7070
assert (
71-
excinfo.value.status_code == 500
72-
), "Should raise HTTPException with status code 500"
71+
excinfo.value.status_code == 404
72+
), "Should raise HTTPException with status code 404"
7373
assert str(excinfo.value.detail).startswith(
7474
"Error getting context:"
7575
), "Should raise HTTPException with the correct error message"

0 commit comments

Comments
 (0)