From c81ef38005cf4e7c63bb97b10b5ef426469d37fa Mon Sep 17 00:00:00 2001 From: lebaudantoine Date: Thu, 11 Dec 2025 16:32:20 +0100 Subject: [PATCH] =?UTF-8?q?=E2=99=BB=EF=B8=8F(summary)=20extract=20LLMServ?= =?UTF-8?q?ice=20class=20into=20dedicated=20module?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Move LLMService class from existing file into separate dedicated module to improve code organization. --- src/summary/summary/core/celery_worker.py | 48 +------------------ src/summary/summary/core/llm_service.py | 57 +++++++++++++++++++++++ 2 files changed, 59 insertions(+), 46 deletions(-) create mode 100644 src/summary/summary/core/llm_service.py diff --git a/src/summary/summary/core/celery_worker.py b/src/summary/summary/core/celery_worker.py index b86063c6..cd7a0619 100644 --- a/src/summary/summary/core/celery_worker.py +++ b/src/summary/summary/core/celery_worker.py @@ -7,7 +7,7 @@ import os import tempfile import time from pathlib import Path -from typing import Any, Mapping, Optional +from typing import Optional import openai import sentry_sdk @@ -21,6 +21,7 @@ from urllib3.util import Retry from summary.core.analytics import MetadataManager, get_analytics from summary.core.config import get_settings +from summary.core.llm_service import LLMException, LLMService from summary.core.prompt import ( FORMAT_NEXT_STEPS, FORMAT_PLAN, @@ -83,51 +84,6 @@ def create_retry_session(): return session -class LLMException(Exception): - """LLM call failed.""" - - -class LLMService: - """Service for performing calls to the LLM configured in the settings.""" - - def __init__(self): - """Init the LLMService once.""" - self._client = openai.OpenAI( - base_url=settings.llm_base_url, - api_key=settings.llm_api_key.get_secret_value(), - ) - - def call( - self, - system_prompt: str, - user_prompt: str, - response_format: Optional[Mapping[str, Any]] = None, - ): - """Call the LLM service. - - Takes a system prompt and a user prompt, and returns the LLM's response - Returns None if the call fails. - """ - try: - params: dict[str, Any] = { - "model": settings.llm_model, - "messages": [ - {"role": "system", "content": system_prompt}, - {"role": "user", "content": user_prompt}, - ], - } - if response_format is not None: - params["response_format"] = response_format - - response = self._client.chat.completions.create(**params) - - return response.choices[0].message.content - - except Exception as e: - logger.exception("LLM call failed: %s", e) - raise LLMException("LLM call failed: {e}") from e - - def format_actions(llm_output: dict) -> str: """Format the actions from the LLM output into a markdown list. diff --git a/src/summary/summary/core/llm_service.py b/src/summary/summary/core/llm_service.py new file mode 100644 index 00000000..872ecf2e --- /dev/null +++ b/src/summary/summary/core/llm_service.py @@ -0,0 +1,57 @@ +"""LLM service to encapsulate LLM's calls.""" + +import logging +from typing import Any, Mapping, Optional + +import openai + +from summary.core.config import get_settings + +settings = get_settings() + +logger = logging.getLogger(__name__) + + +class LLMException(Exception): + """LLM call failed.""" + + +class LLMService: + """Service for performing calls to the LLM configured in the settings.""" + + def __init__(self): + """Init the LLMService once.""" + self._client = openai.OpenAI( + base_url=settings.llm_base_url, + api_key=settings.llm_api_key.get_secret_value(), + ) + + def call( + self, + system_prompt: str, + user_prompt: str, + response_format: Optional[Mapping[str, Any]] = None, + ): + """Call the LLM service. + + Takes a system prompt and a user prompt, and returns the LLM's response + Returns None if the call fails. + """ + try: + params: dict[str, Any] = { + "model": settings.llm_model, + "messages": [ + {"role": "system", "content": system_prompt}, + {"role": "user", "content": user_prompt}, + ], + } + if response_format is not None: + params["response_format"] = response_format + + response = self._client.chat.completions.create(**params) + + return response.choices[0].message.content + + except Exception as e: + logger.exception("LLM call failed: %s", e) + raise LLMException("LLM call failed: {e}") from e