Skip to content

Prompt#

dbally.prompt.template.PromptTemplate #

PromptTemplate(chat: ChatFormat, *, json_mode: bool = False, response_parser: Callable = lambda x: x)

Bases: Generic[PromptFormatT]

Class for prompt templates.

Constructs a new PromptTemplate instance.

PARAMETER DESCRIPTION
chat

Chat-formatted conversation template.

TYPE: ChatFormat

json_mode

Whether to enforce JSON response from LLM.

TYPE: bool DEFAULT: False

response_parser

Function parsing the LLM response into the desired format.

TYPE: Callable DEFAULT: lambda x: x

Source code in src/dbally/prompt/template.py
def __init__(
    self,
    chat: ChatFormat,
    *,
    json_mode: bool = False,
    response_parser: Callable = lambda x: x,
) -> None:
    """
    Constructs a new PromptTemplate instance.

    Args:
        chat: Chat-formatted conversation template.
        json_mode: Whether to enforce JSON response from LLM.
        response_parser: Function parsing the LLM response into the desired format.
    """
    self.chat: ChatFormat = _check_chat_order(chat)
    self.json_mode = json_mode
    self.response_parser = response_parser

chat instance-attribute #

chat: ChatFormat = _check_chat_order(chat)

json_mode instance-attribute #

json_mode = json_mode

response_parser instance-attribute #

response_parser = response_parser

format_prompt #

format_prompt(prompt_format: PromptFormatT) -> Self

Applies formatting to the prompt template chat contents.

PARAMETER DESCRIPTION
prompt_format

Format to be applied to the prompt.

TYPE: PromptFormatT

RETURNS DESCRIPTION
Self

PromptTemplate with formatted chat contents.

Source code in src/dbally/prompt/template.py
def format_prompt(self, prompt_format: PromptFormatT) -> Self:
    """
    Applies formatting to the prompt template chat contents.

    Args:
        prompt_format: Format to be applied to the prompt.

    Returns:
        PromptTemplate with formatted chat contents.
    """
    formatted_prompt = copy.deepcopy(self)
    formatting = dict(prompt_format.__dict__)

    if self._has_variable("examples"):
        formatting["examples"] = "\n".join(prompt_format.examples)
    else:
        formatted_prompt = formatted_prompt.clear_few_shot_messages()
        for example in prompt_format.examples:
            formatted_prompt = formatted_prompt.add_few_shot_message(example)

    formatted_prompt.chat = [
        {
            "role": message.get("role"),
            "content": message.get("content").format(**formatting),
            "is_example": message.get("is_example", False),
        }
        for message in formatted_prompt.chat
    ]
    return formatted_prompt

set_system_message #

set_system_message(content: str) -> Self

Sets a system message to the template prompt.

PARAMETER DESCRIPTION
content

Message to be added.

TYPE: str

RETURNS DESCRIPTION
Self

PromptTemplate with appended system message.

Source code in src/dbally/prompt/template.py
def set_system_message(self, content: str) -> Self:
    """
    Sets a system message to the template prompt.

    Args:
        content: Message to be added.

    Returns:
        PromptTemplate with appended system message.
    """
    return self.__class__(
        chat=[{"role": "system", "content": content}, *self.chat],
        json_mode=self.json_mode,
        response_parser=self.response_parser,
    )

add_user_message #

add_user_message(content: str) -> Self

Add a user message to the template prompt.

PARAMETER DESCRIPTION
content

Message to be added.

TYPE: str

RETURNS DESCRIPTION
Self

PromptTemplate with appended user message.

Source code in src/dbally/prompt/template.py
def add_user_message(self, content: str) -> Self:
    """
    Add a user message to the template prompt.

    Args:
        content: Message to be added.

    Returns:
        PromptTemplate with appended user message.
    """
    return self.__class__(
        chat=[*self.chat, {"role": "user", "content": content}],
        json_mode=self.json_mode,
        response_parser=self.response_parser,
    )

add_assistant_message #

add_assistant_message(content: str) -> Self

Add an assistant message to the template prompt.

PARAMETER DESCRIPTION
content

Message to be added.

TYPE: str

RETURNS DESCRIPTION
Self

PromptTemplate with appended assistant message.

Source code in src/dbally/prompt/template.py
def add_assistant_message(self, content: str) -> Self:
    """
    Add an assistant message to the template prompt.

    Args:
        content: Message to be added.

    Returns:
        PromptTemplate with appended assistant message.
    """
    return self.__class__(
        chat=[*self.chat, {"role": "assistant", "content": content}],
        json_mode=self.json_mode,
        response_parser=self.response_parser,
    )

add_few_shot_message #

add_few_shot_message(example: FewShotExample) -> Self

Add a few-shot message to the template prompt.

PARAMETER DESCRIPTION
example

Few-shot example to be added.

TYPE: FewShotExample

RETURNS DESCRIPTION
Self

PromptTemplate with appended few-shot message.

RAISES DESCRIPTION
PromptTemplateError

if the template is empty.

Source code in src/dbally/prompt/template.py
def add_few_shot_message(self, example: FewShotExample) -> Self:
    """
    Add a few-shot message to the template prompt.

    Args:
        example: Few-shot example to be added.

    Returns:
        PromptTemplate with appended few-shot message.

    Raises:
        PromptTemplateError: if the template is empty.
    """
    if len(self.chat) == 0:
        raise PromptTemplateError("Cannot add few-shot messages to an empty template.")

    few_shot = [
        {"role": "user", "content": example.question, "is_example": True},
        {"role": "assistant", "content": example.answer, "is_example": True},
    ]
    few_shot_index = max(
        (i for i, entry in enumerate(self.chat) if entry.get("is_example") or entry.get("role") == "system"),
        default=0,
    )
    chat = self.chat[: few_shot_index + 1] + few_shot + self.chat[few_shot_index + 1 :]

    return self.__class__(
        chat=chat,
        json_mode=self.json_mode,
        response_parser=self.response_parser,
    )

clear_few_shot_messages #

clear_few_shot_messages() -> Self

Removes all few-shot messages from the template prompt.

RETURNS DESCRIPTION
Self

PromptTemplate with few-shot messages removed.

Source code in src/dbally/prompt/template.py
def clear_few_shot_messages(self) -> Self:
    """
    Removes all few-shot messages from the template prompt.

    Returns:
        PromptTemplate with few-shot messages removed.
    """
    return self.__class__(
        chat=[message for message in self.chat if not message.get("is_example")],
        json_mode=self.json_mode,
        response_parser=self.response_parser,
    )

dbally.prompt.template.PromptFormat #

PromptFormat(examples: Optional[List[FewShotExample]] = None)

Generic format for prompts allowing to inject few shot examples into the conversation.

Constructs a new PromptFormat instance.

PARAMETER DESCRIPTION
examples

List of examples to be injected into the conversation.

TYPE: Optional[List[FewShotExample]] DEFAULT: None

Source code in src/dbally/prompt/template.py
def __init__(self, examples: Optional[List[FewShotExample]] = None) -> None:
    """
    Constructs a new PromptFormat instance.

    Args:
        examples: List of examples to be injected into the conversation.
    """
    self.examples = examples or []

examples instance-attribute #

examples = examples or []

dbally.prompt.elements.FewShotExample #

FewShotExample(question: str, answer_expr: Union[str, Callable])

A question:answer representation for few-shot prompting

PARAMETER DESCRIPTION
question

sample question

TYPE: str

answer_expr

it can be either a stringified expression or a lambda for greater safety and code completions.

TYPE: Union[str, Callable]

RAISES DESCRIPTION
ValueError

If answer_expr is not a correct type.

Source code in src/dbally/prompt/elements.py
def __init__(self, question: str, answer_expr: Union[str, Callable]) -> None:
    """
    Args:
        question: sample question
        answer_expr: it can be either a stringified expression or a lambda for greater safety and code completions.

    Raises:
        ValueError: If answer_expr is not a correct type.
    """
    self.question = question
    self.answer_expr = answer_expr

    if isinstance(self.answer_expr, str):
        self.answer = self.answer_expr
    elif callable(answer_expr):
        self.answer = self._parse_lambda(answer_expr)
    else:
        raise ValueError("Answer expression should be either a string or a lambda")

question instance-attribute #

question = question

answer_expr instance-attribute #

answer_expr = answer_expr

answer instance-attribute #

answer = answer_expr