Skip to content

LLM#

dbally.llms.base.LLM #

LLM(model_name: str, default_options: Optional[LLMOptions] = None)

Bases: Generic[LLMClientOptions], ABC

Abstract class for interaction with Large Language Model.

Constructs a new LLM instance.

PARAMETER DESCRIPTION
model_name

Name of the model to be used.

TYPE: str

default_options

Default options to be used.

TYPE: Optional[LLMOptions] DEFAULT: None

RAISES DESCRIPTION
TypeError

If the subclass is missing the '_options_cls' attribute.

Source code in src/dbally/llms/base.py
def __init__(self, model_name: str, default_options: Optional[LLMOptions] = None) -> None:
    """
    Constructs a new LLM instance.

    Args:
        model_name: Name of the model to be used.
        default_options: Default options to be used.

    Raises:
        TypeError: If the subclass is missing the '_options_cls' attribute.
    """
    self.model_name = model_name
    self.default_options = default_options or self._options_cls()

model_name instance-attribute #

model_name = model_name

default_options instance-attribute #

default_options = default_options or _options_cls()

client abstractmethod cached property #

client: LLMClient

Client for the LLM.

count_tokens #

count_tokens(prompt: PromptTemplate) -> int

Counts tokens in the prompt.

PARAMETER DESCRIPTION
prompt

Formatted prompt template with conversation and response parsing configuration.

TYPE: PromptTemplate

RETURNS DESCRIPTION
int

Number of tokens in the prompt.

Source code in src/dbally/llms/base.py
def count_tokens(self, prompt: PromptTemplate) -> int:
    """
    Counts tokens in the prompt.

    Args:
        prompt: Formatted prompt template with conversation and response parsing configuration.

    Returns:
        Number of tokens in the prompt.
    """
    return sum(len(message["content"]) for message in prompt.chat)

generate_text async #

generate_text(prompt: PromptTemplate, *, event_tracker: Optional[EventTracker] = None, options: Optional[LLMOptions] = None) -> str

Prepares and sends a prompt to the LLM and returns the response.

PARAMETER DESCRIPTION
prompt

Formatted prompt template with conversation and response parsing configuration.

TYPE: PromptTemplate

event_tracker

Event store used to audit the generation process.

TYPE: Optional[EventTracker] DEFAULT: None

options

Options to use for the LLM client.

TYPE: Optional[LLMOptions] DEFAULT: None

RETURNS DESCRIPTION
str

Text response from LLM.

RAISES DESCRIPTION
LLMError

If LLM text generation fails.

Source code in src/dbally/llms/base.py
async def generate_text(
    self,
    prompt: PromptTemplate,
    *,
    event_tracker: Optional[EventTracker] = None,
    options: Optional[LLMOptions] = None,
) -> str:
    """
    Prepares and sends a prompt to the LLM and returns the response.

    Args:
        prompt: Formatted prompt template with conversation and response parsing configuration.
        event_tracker: Event store used to audit the generation process.
        options: Options to use for the LLM client.

    Returns:
        Text response from LLM.

    Raises:
        LLMError: If LLM text generation fails.
    """
    options = (self.default_options | options) if options else self.default_options
    event = LLMEvent(prompt=prompt.chat, type=type(prompt).__name__)
    event_tracker = event_tracker or EventTracker()

    async with event_tracker.track_event(event) as span:
        event.response = await self.client.call(
            conversation=prompt.chat,
            options=options,
            event=event,
            json_mode=prompt.json_mode,
        )
        span(event)

    return event.response

dbally.llms.clients.base.LLMClient #

LLMClient(model_name: str)

Bases: Generic[LLMClientOptions], ABC

Abstract client for a direct communication with LLM.

Constructs a new LLMClient instance.

PARAMETER DESCRIPTION
model_name

Name of the model to be used.

TYPE: str

Source code in src/dbally/llms/clients/base.py
def __init__(self, model_name: str) -> None:
    """
    Constructs a new LLMClient instance.

    Args:
        model_name: Name of the model to be used.
    """
    self.model_name = model_name

model_name instance-attribute #

model_name = model_name

call abstractmethod async #

call(conversation: ChatFormat, options: LLMClientOptions, event: LLMEvent, json_mode: bool = False) -> str

Calls LLM inference API.

PARAMETER DESCRIPTION
conversation

List of dicts with "role" and "content" keys, representing the chat history so far.

TYPE: ChatFormat

options

Additional settings used by LLM.

TYPE: LLMClientOptions

event

LLMEvent instance which fields should be filled during the method execution.

TYPE: LLMEvent

json_mode

Force the response to be in JSON format.

TYPE: bool DEFAULT: False

RETURNS DESCRIPTION
str

Response string from LLM.

Source code in src/dbally/llms/clients/base.py
@abstractmethod
async def call(
    self,
    conversation: ChatFormat,
    options: LLMClientOptions,
    event: LLMEvent,
    json_mode: bool = False,
) -> str:
    """
    Calls LLM inference API.

    Args:
        conversation: List of dicts with "role" and "content" keys, representing the chat history so far.
        options: Additional settings used by LLM.
        event: LLMEvent instance which fields should be filled during the method execution.
        json_mode: Force the response to be in JSON format.

    Returns:
        Response string from LLM.
    """

dbally.llms.clients.base.LLMOptions dataclass #

LLMOptions()

Bases: ABC

Abstract dataclass that represents all available LLM call options.

dict #

dict() -> Dict[str, Any]

Creates a dictionary representation of the LLMOptions instance. If a value is None, it will be replaced with a provider-specific not-given sentinel.

RETURNS DESCRIPTION
Dict[str, Any]

A dictionary representation of the LLMOptions instance.

Source code in src/dbally/llms/clients/base.py
def dict(self) -> Dict[str, Any]:
    """
    Creates a dictionary representation of the LLMOptions instance.
    If a value is None, it will be replaced with a provider-specific not-given sentinel.

    Returns:
        A dictionary representation of the LLMOptions instance.
    """
    options = asdict(self)
    return {
        key: self._not_given if value is None or isinstance(value, NotGiven) else value
        for key, value in options.items()
    }