Skip to content

mistral_common.tokens.instruct.request

FIMRequest(**data)

Bases: MistralBase

A valid Fill in the Middle completion request to be tokenized.

Attributes:

Name Type Description
prompt str

The prompt to be completed.

suffix Optional[str]

The suffix of the prompt. If provided, the model will generate text between the prompt and the suffix.

Examples:

>>> request = FIMRequest(prompt="Hello, my name is", suffix=" and I live in New York.")
Source code in .venv/lib/python3.13/site-packages/pydantic/main.py
def __init__(self, /, **data: Any) -> None:
    """Create a new model by parsing and validating input data from keyword arguments.

    Raises [`ValidationError`][pydantic_core.ValidationError] if the input data cannot be
    validated to form a valid model.

    `self` is explicitly positional-only to allow `self` as a field name.
    """
    # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks
    __tracebackhide__ = True
    validated_self = self.__pydantic_validator__.validate_python(data, self_instance=self)
    if self is not validated_self:
        warnings.warn(
            'A custom validator is returning a value other than `self`.\n'
            "Returning anything other than `self` from a top level model validator isn't supported when validating via `__init__`.\n"
            'See the `model_validator` docs (https://docs.pydantic.dev/latest/concepts/validators/#model-validators) for more details.',
            stacklevel=2,
        )

InstructRequest(**data)

Bases: MistralBase, Generic[ChatMessageType, ToolType]

A valid Instruct request to be tokenized.

Attributes:

Name Type Description
messages List[ChatMessageType]

The history of the conversation.

system_prompt Optional[str]

The system prompt to be used for the conversation.

available_tools Optional[List[ToolType]]

The tools available to the assistant.

truncate_at_max_tokens Optional[int]

The maximum number of tokens to truncate the conversation at.

Examples:

>>> from mistral_common.protocol.instruct.messages import UserMessage, SystemMessage
>>> request = InstructRequest(
...     messages=[UserMessage(content="Hello, how are you?")], system_prompt="You are a helpful assistant."
... )
Source code in .venv/lib/python3.13/site-packages/pydantic/main.py
def __init__(self, /, **data: Any) -> None:
    """Create a new model by parsing and validating input data from keyword arguments.

    Raises [`ValidationError`][pydantic_core.ValidationError] if the input data cannot be
    validated to form a valid model.

    `self` is explicitly positional-only to allow `self` as a field name.
    """
    # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks
    __tracebackhide__ = True
    validated_self = self.__pydantic_validator__.validate_python(data, self_instance=self)
    if self is not validated_self:
        warnings.warn(
            'A custom validator is returning a value other than `self`.\n'
            "Returning anything other than `self` from a top level model validator isn't supported when validating via `__init__`.\n"
            'See the `model_validator` docs (https://docs.pydantic.dev/latest/concepts/validators/#model-validators) for more details.',
            stacklevel=2,
        )