Skip to content

src.prompt_builders.mistral_ai.mistral_ai_prompt_builder.MistralAIPromptBuilder

Bases: BasePromptBuilder

A prompt builder specialized for Mistral AI chat completion models.

This class handles the construction of prompts for Mistral models, with special handling for tool definitions and system messages. It loads configuration from a YAML file and supports embedding tool information into the conversation history.

Attributes:

Name Type Description
config Dict

Configuration dictionary loaded from prompt_builders.yaml. Expected to contain 'system_prompt' with 'header' and 'tool_instructions'.

Example
builder = MistralAIPromptBuilder()
payload = PromptPayload(
    conversation_history=history,
    tool_definitions=tools
)
output = await builder.build_chat(payload)
# Use output.chat_messages with Mistral API
Source code in src/prompt_builders/mistral_ai/mistral_ai_prompt_builder.py
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
class MistralAIPromptBuilder(BasePromptBuilder):
    """A prompt builder specialized for Mistral AI chat completion models.

    This class handles the construction of prompts for Mistral models, with special
    handling for tool definitions and system messages. It loads configuration from
    a YAML file and supports embedding tool information into the conversation history.

    Attributes:
        config (Dict): Configuration dictionary loaded from prompt_builders.yaml.
            Expected to contain 'system_prompt' with 'header' and 'tool_instructions'.

    Example:
        ```python
        builder = MistralAIPromptBuilder()
        payload = PromptPayload(
            conversation_history=history,
            tool_definitions=tools
        )
        output = await builder.build_chat(payload)
        # Use output.chat_messages with Mistral API
        ```
    """

    def __init__(self):
        """Initialize the Mistral prompt builder.

        Loads configuration from the prompt_builders.yaml file and sets up logging.
        Raises FileNotFoundError if the config file is not found.
        """
        self.logger = logging.getLogger(self.__class__.__name__)
        self.logger.debug("Initializing MistralAIPromptBuilder")
        super().__init__()
        self.config = self._load_config()

    async def build_chat(self, payload: PromptPayload) -> PromptBuilderOutput:
        """Build a chat completion prompt with optional tool definitions.

        Constructs a prompt by potentially modifying the conversation history to
        include tool information. If tools are defined, they are added to or merged
        with the system message.

        Args:
            payload (PromptPayload): Contains conversation history and optional tool
                definitions. History should be a list of message objects, and tool
                definitions should be a list of tool specification objects.

        Returns:
            PromptBuilderOutput: Contains the modified chat messages ready for use
                with Mistral's chat completion API.

        Note:
            If the first message in history is a system message, tool information
            will be prepended to it. Otherwise, a new system message will be created.
        """

        conversation_history = payload.conversation_history
        tool_definitions = payload.tool_definitions or []

        if not tool_definitions:
            self.logger.debug("No tool definitions provided, returning original history")
            return PromptBuilderOutput(chat_messages=conversation_history)

        tool_names = [tool.function.name for tool in tool_definitions]

        tool_section_header = self.config['system_prompt']['header'].format(
            tools=", ".join(tool_names),
            date=datetime.now().strftime('%Y-%m-%d')
        )
        tool_instructions = self.config['system_prompt']['tool_instructions']
        tool_info = await self._build_system_content(tool_definitions, tool_section_header, tool_instructions)
        modified_history = conversation_history.copy()

        # Mistral's formatting for tools in system messages might differ slightly
        # Format according to Mistral's requirements
        formatted_tool_info = f"[AVAILABLE_TOOLS]\n{tool_info}\n[/AVAILABLE_TOOLS]"

        if conversation_history and isinstance(conversation_history[0], SystemMessage):
            existing_content = conversation_history[0].content
            modified_history[0] = SystemMessage(
                content=f"{existing_content}\n\n{formatted_tool_info}"
            )
        else:
            system_msg = SystemMessage(content=formatted_tool_info)
            modified_history.insert(0, system_msg)

        self.logger.debug("Returning modified history with %d messages", len(modified_history))
        return PromptBuilderOutput(chat_messages=modified_history)

    async def build_text(self, payload: PromptPayload) -> PromptBuilderOutput:
        """Text completion is not fully implemented for Mistral models.

        This method is included to satisfy the interface but raises NotImplementedError.

        Args:
            payload (PromptPayload): Unused payload.

        Raises:
            NotImplementedError: Always raised as this method is not supported.
        """
        raise NotImplementedError(
            "Mistral models primarily use chat completions. Use build_chat() instead."
        )

    @staticmethod
    def _load_config() -> Dict:
        """Load the Mistral-specific configuration from the prompt builders YAML file.

        Returns:
            Dict: Configuration dictionary containing Mistral-specific settings.

        Raises:
            FileNotFoundError: If the config file doesn't exist.
            yaml.YAMLError: If the config file is malformed.
        """
        config_path = Path("src/configs/prompt_builders.yaml")
        with config_path.open() as f:
            config = yaml.safe_load(f)
            return config.get('mistral', {})

__init__()

Initialize the Mistral prompt builder.

Loads configuration from the prompt_builders.yaml file and sets up logging. Raises FileNotFoundError if the config file is not found.

Source code in src/prompt_builders/mistral_ai/mistral_ai_prompt_builder.py
37
38
39
40
41
42
43
44
45
46
def __init__(self):
    """Initialize the Mistral prompt builder.

    Loads configuration from the prompt_builders.yaml file and sets up logging.
    Raises FileNotFoundError if the config file is not found.
    """
    self.logger = logging.getLogger(self.__class__.__name__)
    self.logger.debug("Initializing MistralAIPromptBuilder")
    super().__init__()
    self.config = self._load_config()

build_chat(payload) async

Build a chat completion prompt with optional tool definitions.

Constructs a prompt by potentially modifying the conversation history to include tool information. If tools are defined, they are added to or merged with the system message.

Parameters:

Name Type Description Default
payload PromptPayload

Contains conversation history and optional tool definitions. History should be a list of message objects, and tool definitions should be a list of tool specification objects.

required

Returns:

Name Type Description
PromptBuilderOutput PromptBuilderOutput

Contains the modified chat messages ready for use with Mistral's chat completion API.

Note

If the first message in history is a system message, tool information will be prepended to it. Otherwise, a new system message will be created.

Source code in src/prompt_builders/mistral_ai/mistral_ai_prompt_builder.py
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
async def build_chat(self, payload: PromptPayload) -> PromptBuilderOutput:
    """Build a chat completion prompt with optional tool definitions.

    Constructs a prompt by potentially modifying the conversation history to
    include tool information. If tools are defined, they are added to or merged
    with the system message.

    Args:
        payload (PromptPayload): Contains conversation history and optional tool
            definitions. History should be a list of message objects, and tool
            definitions should be a list of tool specification objects.

    Returns:
        PromptBuilderOutput: Contains the modified chat messages ready for use
            with Mistral's chat completion API.

    Note:
        If the first message in history is a system message, tool information
        will be prepended to it. Otherwise, a new system message will be created.
    """

    conversation_history = payload.conversation_history
    tool_definitions = payload.tool_definitions or []

    if not tool_definitions:
        self.logger.debug("No tool definitions provided, returning original history")
        return PromptBuilderOutput(chat_messages=conversation_history)

    tool_names = [tool.function.name for tool in tool_definitions]

    tool_section_header = self.config['system_prompt']['header'].format(
        tools=", ".join(tool_names),
        date=datetime.now().strftime('%Y-%m-%d')
    )
    tool_instructions = self.config['system_prompt']['tool_instructions']
    tool_info = await self._build_system_content(tool_definitions, tool_section_header, tool_instructions)
    modified_history = conversation_history.copy()

    # Mistral's formatting for tools in system messages might differ slightly
    # Format according to Mistral's requirements
    formatted_tool_info = f"[AVAILABLE_TOOLS]\n{tool_info}\n[/AVAILABLE_TOOLS]"

    if conversation_history and isinstance(conversation_history[0], SystemMessage):
        existing_content = conversation_history[0].content
        modified_history[0] = SystemMessage(
            content=f"{existing_content}\n\n{formatted_tool_info}"
        )
    else:
        system_msg = SystemMessage(content=formatted_tool_info)
        modified_history.insert(0, system_msg)

    self.logger.debug("Returning modified history with %d messages", len(modified_history))
    return PromptBuilderOutput(chat_messages=modified_history)

build_text(payload) async

Text completion is not fully implemented for Mistral models.

This method is included to satisfy the interface but raises NotImplementedError.

Parameters:

Name Type Description Default
payload PromptPayload

Unused payload.

required

Raises:

Type Description
NotImplementedError

Always raised as this method is not supported.

Source code in src/prompt_builders/mistral_ai/mistral_ai_prompt_builder.py
102
103
104
105
106
107
108
109
110
111
112
113
114
115
async def build_text(self, payload: PromptPayload) -> PromptBuilderOutput:
    """Text completion is not fully implemented for Mistral models.

    This method is included to satisfy the interface but raises NotImplementedError.

    Args:
        payload (PromptPayload): Unused payload.

    Raises:
        NotImplementedError: Always raised as this method is not supported.
    """
    raise NotImplementedError(
        "Mistral models primarily use chat completions. Use build_chat() instead."
    )