Skip to content

mistral_api.py

Define Mistral adapter class.

MistralAdapter

Bases: LlmAdapter

Define Mistral adapter class.

Source code in taglyatelle/llm_providers/mistral_api.py
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
class MistralAdapter(LlmAdapter):
    """Define Mistral adapter class."""

    def __init__(self, model: str, temperature: float | int = 0):
        """
        Initialize Mistral's llms.

        Parameters
        ----------
        model
            LLM model name

        temperature
            LLM temperature
        """
        self.client = Mistral(api_key=MISTRAL_API_KEY)
        self.model = model
        self.temperature = temperature

    def invoke_llm(self, prompt: str) -> str | None:
        """
        Send a request to a LLM.

        Parameters
        ----------
        prompt
            The prompt to send to the LLM

        Returns
        -------
        Answer of the LLM or None
        """
        response = self.client.chat.complete(
            model=self.model,
            temperature=self.temperature,
            messages=[  # type: ignore
                {
                    "content": prompt,
                    "role": "user",
                },
            ],
            stream=False,
        )

        return response.choices[0].message.content  # type: ignore

__init__(model, temperature=0)

Initialize Mistral's llms.

Parameters

model LLM model name

temperature LLM temperature

Source code in taglyatelle/llm_providers/mistral_api.py
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
def __init__(self, model: str, temperature: float | int = 0):
    """
    Initialize Mistral's llms.

    Parameters
    ----------
    model
        LLM model name

    temperature
        LLM temperature
    """
    self.client = Mistral(api_key=MISTRAL_API_KEY)
    self.model = model
    self.temperature = temperature

invoke_llm(prompt)

Send a request to a LLM.

Parameters

prompt The prompt to send to the LLM

Returns

Answer of the LLM or None

Source code in taglyatelle/llm_providers/mistral_api.py
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
def invoke_llm(self, prompt: str) -> str | None:
    """
    Send a request to a LLM.

    Parameters
    ----------
    prompt
        The prompt to send to the LLM

    Returns
    -------
    Answer of the LLM or None
    """
    response = self.client.chat.complete(
        model=self.model,
        temperature=self.temperature,
        messages=[  # type: ignore
            {
                "content": prompt,
                "role": "user",
            },
        ],
        stream=False,
    )

    return response.choices[0].message.content  # type: ignore