Skip to content

Commit f6a27ca

Browse files
committed
Address linter issues
Signed-off-by: Maysun J Faisal <[email protected]>
1 parent ef0b934 commit f6a27ca

File tree

12 files changed

+97
-60
lines changed

12 files changed

+97
-60
lines changed

src/app/endpoints/conversations_v2.py

Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -314,19 +314,13 @@ def check_conversation_existence(user_id: str, conversation_id: str) -> None:
314314

315315
def transform_chat_message(entry: CacheEntry) -> dict[str, Any]:
316316
"""Transform the message read from cache into format used by response payload."""
317-
user_message = {
318-
"content": entry.query,
319-
"type": "user"
320-
}
321-
assistant_message: dict[str, Any] = {
322-
"content": entry.response,
323-
"type": "assistant"
324-
}
317+
user_message = {"content": entry.query, "type": "user"}
318+
assistant_message: dict[str, Any] = {"content": entry.response, "type": "assistant"}
325319

326320
# If referenced_documents exist on the entry, add them to the assistant message
327321
if entry.referenced_documents is not None:
328322
assistant_message["referenced_documents"] = [
329-
doc.model_dump(mode='json') for doc in entry.referenced_documents
323+
doc.model_dump(mode="json") for doc in entry.referenced_documents
330324
]
331325

332326
return {

src/app/endpoints/query.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@
2222
from llama_stack_client.types.model_list_response import ModelListResponse
2323
from llama_stack_client.types.shared.interleaved_content_item import TextContentItem
2424
from llama_stack_client.types.tool_execution_step import ToolExecutionStep
25-
from pydantic import AnyUrl
2625

2726
import constants
2827
import metrics
@@ -341,9 +340,9 @@ async def query_endpoint_handler( # pylint: disable=R0914
341340
model=model_id,
342341
started_at=started_at,
343342
completed_at=completed_at,
344-
referenced_documents=referenced_documents if referenced_documents else None
343+
referenced_documents=referenced_documents if referenced_documents else None,
345344
)
346-
345+
347346
store_conversation_into_cache(
348347
configuration,
349348
user_id,

src/app/endpoints/streaming_query.py

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@
2121
)
2222
from llama_stack_client.types.shared import ToolCall
2323
from llama_stack_client.types.shared.interleaved_content_item import TextContentItem
24-
from pydantic import AnyUrl
2524

2625
from app.database import get_session
2726
from app.endpoints.query import (
@@ -48,7 +47,7 @@
4847
from models.config import Action
4948
from models.database.conversations import UserConversation
5049
from models.requests import QueryRequest
51-
from models.responses import ForbiddenResponse, UnauthorizedResponse, ReferencedDocument
50+
from models.responses import ForbiddenResponse, UnauthorizedResponse
5251
from utils.endpoints import (
5352
check_configuration_loaded,
5453
create_referenced_documents_with_metadata,
@@ -867,7 +866,9 @@ async def response_generator(
867866

868867
completed_at = datetime.now(UTC).strftime("%Y-%m-%dT%H:%M:%SZ")
869868

870-
referenced_documents = create_referenced_documents_with_metadata(summary, metadata_map)
869+
referenced_documents = create_referenced_documents_with_metadata(
870+
summary, metadata_map
871+
)
871872

872873
cache_entry = CacheEntry(
873874
query=query_request.query,
@@ -876,9 +877,11 @@ async def response_generator(
876877
model=model_id,
877878
started_at=started_at,
878879
completed_at=completed_at,
879-
referenced_documents=referenced_documents if referenced_documents else None
880+
referenced_documents=(
881+
referenced_documents if referenced_documents else None
882+
),
880883
)
881-
884+
882885
store_conversation_into_cache(
883886
configuration,
884887
user_id,

src/cache/postgres_cache.py

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,9 @@ def get(
219219
docs_data = conversation_entry[6]
220220
docs_obj = None
221221
if docs_data:
222-
docs_obj = [ReferencedDocument.model_validate(doc) for doc in docs_data]
222+
docs_obj = [
223+
ReferencedDocument.model_validate(doc) for doc in docs_data
224+
]
223225
cache_entry = CacheEntry(
224226
query=conversation_entry[0],
225227
response=conversation_entry[1],
@@ -257,7 +259,10 @@ def insert_or_append(
257259
try:
258260
referenced_documents_json = None
259261
if cache_entry.referenced_documents:
260-
docs_as_dicts = [doc.model_dump(mode='json') for doc in cache_entry.referenced_documents]
262+
docs_as_dicts = [
263+
doc.model_dump(mode="json")
264+
for doc in cache_entry.referenced_documents
265+
]
261266
referenced_documents_json = json.dumps(docs_as_dicts)
262267

263268
# the whole operation is run in one transaction

src/cache/sqlite_cache.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -257,7 +257,9 @@ def insert_or_append(
257257

258258
referenced_documents_json = None
259259
if cache_entry.referenced_documents:
260-
docs_as_dicts = [doc.model_dump(mode='json') for doc in cache_entry.referenced_documents]
260+
docs_as_dicts = [
261+
doc.model_dump(mode="json") for doc in cache_entry.referenced_documents
262+
]
261263
referenced_documents_json = json.dumps(docs_as_dicts)
262264

263265
cursor.execute(

src/models/cache_entry.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
"""Model for conversation history cache entry."""
22

3-
from pydantic import BaseModel, Field
43
from typing import List
4+
from pydantic import BaseModel
55
from models.responses import ReferencedDocument
66

77

src/models/responses.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -161,6 +161,7 @@ class ToolCall(BaseModel):
161161
arguments: dict[str, Any] = Field(description="Arguments passed to the tool")
162162
result: Optional[dict[str, Any]] = Field(None, description="Result from the tool")
163163

164+
164165
class ConversationData(BaseModel):
165166
"""Model representing conversation data returned by cache list operations.
166167
@@ -174,6 +175,7 @@ class ConversationData(BaseModel):
174175
topic_summary: str | None
175176
last_message_timestamp: float
176177

178+
177179
class ReferencedDocument(BaseModel):
178180
"""Model representing a document referenced in generating a response.
179181
@@ -186,9 +188,7 @@ class ReferencedDocument(BaseModel):
186188
None, description="URL of the referenced document"
187189
)
188190

189-
doc_title: str | None = Field(
190-
None, description="Title of the referenced document"
191-
)
191+
doc_title: str | None = Field(None, description="Title of the referenced document")
192192

193193

194194
class QueryResponse(BaseModel):

tests/unit/app/endpoints/test_conversations_v2.py

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -75,31 +75,33 @@ def test_transform_message_without_documents(self) -> None:
7575
# referenced_documents is None by default
7676
)
7777
transformed = transform_chat_message(entry)
78-
78+
7979
assistant_message = transformed["messages"][1]
8080

8181
# Assert that the key is NOT present when the list is None
8282
assert "referenced_documents" not in assistant_message
8383

8484
def test_transform_message_with_referenced_documents(self) -> None:
8585
"""Test the transformation when referenced_documents are present."""
86-
docs = [ReferencedDocument(doc_title="Test Doc", doc_url=AnyUrl("http://example.com"))]
87-
86+
docs = [
87+
ReferencedDocument(
88+
doc_title="Test Doc", doc_url=AnyUrl("http://example.com")
89+
)
90+
]
8891
entry = CacheEntry(
8992
query="query",
9093
response="response",
9194
provider="provider",
9295
model="model",
9396
started_at="2024-01-01T00:00:00Z",
9497
completed_at="2024-01-01T00:00:05Z",
95-
referenced_documents=docs
98+
referenced_documents=docs,
9699
)
97100

98101
transformed = transform_chat_message(entry)
99102
assistant_message = transformed["messages"][1]
100-
103+
101104
assert "referenced_documents" in assistant_message
102-
103105
ref_docs = assistant_message["referenced_documents"]
104106
assert len(ref_docs) == 1
105107
assert ref_docs[0]["doc_title"] == "Test Doc"
@@ -114,7 +116,7 @@ def test_transform_message_with_empty_referenced_documents(self) -> None:
114116
model="model",
115117
started_at="2024-01-01T00:00:00Z",
116118
completed_at="2024-01-01T00:00:05Z",
117-
referenced_documents=[] # Explicitly empty
119+
referenced_documents=[], # Explicitly empty
118120
)
119121

120122
transformed = transform_chat_message(entry)

tests/unit/app/endpoints/test_query.py

Lines changed: 17 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -167,6 +167,7 @@ def test_is_transcripts_disabled(setup_configuration, mocker) -> None:
167167
assert is_transcripts_enabled() is False, "Transcripts should be disabled"
168168

169169

170+
# pylint: disable=too-many-locals
170171
async def _test_query_endpoint_handler(
171172
mocker, dummy_request: Request, store_transcript_to_file=False
172173
) -> None:
@@ -184,12 +185,16 @@ async def _test_query_endpoint_handler(
184185
store_transcript_to_file
185186
)
186187
mocker.patch("app.endpoints.query.configuration", mock_config)
187-
188-
mock_store_in_cache = mocker.patch("app.endpoints.query.store_conversation_into_cache")
188+
189+
mock_store_in_cache = mocker.patch(
190+
"app.endpoints.query.store_conversation_into_cache"
191+
)
189192

190193
# Create mock referenced documents to simulate a successful RAG response
191194
mock_referenced_documents = [
192-
ReferencedDocument(doc_title="Test Doc 1", doc_url=AnyUrl("http://example.com/1"))
195+
ReferencedDocument(
196+
doc_title="Test Doc 1", doc_url=AnyUrl("http://example.com/1")
197+
)
193198
]
194199

195200
summary = TurnSummary(
@@ -208,7 +213,12 @@ async def _test_query_endpoint_handler(
208213

209214
mocker.patch(
210215
"app.endpoints.query.retrieve_response",
211-
return_value=(summary, conversation_id, mock_referenced_documents, TokenCounter()),
216+
return_value=(
217+
summary,
218+
conversation_id,
219+
mock_referenced_documents,
220+
TokenCounter(),
221+
),
212222
)
213223
mocker.patch(
214224
"app.endpoints.query.select_model_and_provider_id",
@@ -237,11 +247,12 @@ async def _test_query_endpoint_handler(
237247
# Assert the response is as expected
238248
assert response.response == summary.llm_response
239249
assert response.conversation_id == conversation_id
240-
250+
241251
# Assert that mock was called and get the arguments
242252
mock_store_in_cache.assert_called_once()
243253
call_args = mock_store_in_cache.call_args[0]
244-
# Extract CacheEntry object from the call arguments, it's the 4th argument from the func signature
254+
# Extract CacheEntry object from the call arguments,
255+
# it's the 4th argument from the func signature
245256
cached_entry = call_args[3]
246257

247258
assert isinstance(cached_entry, CacheEntry)

tests/unit/app/endpoints/test_streaming_query.py

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -209,6 +209,7 @@ async def test_streaming_query_endpoint_on_connection_error(mocker):
209209
assert response.media_type == "text/event-stream"
210210

211211

212+
# pylint: disable=too-many-locals
212213
async def _test_streaming_query_endpoint_handler(mocker, store_transcript=False):
213214
"""Test the streaming query endpoint handler."""
214215
mock_client = mocker.AsyncMock()
@@ -297,8 +298,9 @@ async def _test_streaming_query_endpoint_handler(mocker, store_transcript=False)
297298
),
298299
]
299300

300-
mock_store_in_cache = mocker.patch("app.endpoints.streaming_query.store_conversation_into_cache")
301-
301+
mock_store_in_cache = mocker.patch(
302+
"app.endpoints.streaming_query.store_conversation_into_cache"
303+
)
302304
query = "What is OpenStack?"
303305
mocker.patch(
304306
"app.endpoints.streaming_query.retrieve_response",
@@ -359,20 +361,23 @@ async def _test_streaming_query_endpoint_handler(mocker, store_transcript=False)
359361
referenced_documents = d["data"]["referenced_documents"]
360362
assert len(referenced_documents) == 2
361363
assert referenced_documents[1]["doc_title"] == "Doc2"
362-
364+
363365
# Assert that mock was called and get the arguments
364366
mock_store_in_cache.assert_called_once()
365367
call_args = mock_store_in_cache.call_args[0]
366-
# Extract CacheEntry object from the call arguments, it's the 4th argument from the func signature
368+
# Extract CacheEntry object from the call arguments,
369+
# it's the 4th argument from the func signature
367370
cached_entry = call_args[3]
368-
371+
369372
# Assert that the CacheEntry was constructed correctly
370373
assert isinstance(cached_entry, CacheEntry)
371374
assert cached_entry.response == "LLM answer"
372375
assert cached_entry.referenced_documents is not None
373376
assert len(cached_entry.referenced_documents) == 2
374377
assert cached_entry.referenced_documents[0].doc_title == "Doc1"
375-
assert str(cached_entry.referenced_documents[1].doc_url) == "https://example.com/doc2"
378+
assert (
379+
str(cached_entry.referenced_documents[1].doc_url) == "https://example.com/doc2"
380+
)
376381

377382
# Assert the store_transcript function is called if transcripts are enabled
378383
if store_transcript:

0 commit comments

Comments
 (0)