Skip to content

openai_api.py

Define OpenAI adapter class.

OpenAIAdapter

Bases: LlmAdapter

Define OpenAI adapter class.

Source code in taglyatelle/llm_providers/openai_api.py
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
class OpenAIAdapter(LlmAdapter):
    """Define OpenAI adapter class."""

    def __init__(self, model: str, temperature: float | int = 0):
        """
        Initialize OpenAI's llms.

        Parameters
        ----------
        model
            LLM model name

        temperature
            LLM temperature
        """
        self.client = OpenAI()
        self.model = model
        self.temperature = temperature

    def invoke_llm(self, prompt: str) -> str | None:
        """
        Send a request to a LLM.

        Parameters
        ----------
        prompt
            The prompt to send to the LLM

        Returns
        -------
        Answer of the LLM or None
        """
        response = self.client.chat.completions.create(
            model=self.model,
            temperature=self.temperature,
            messages=[
                {
                    "role": "user",
                    "content": [
                        {"type": "text", "text": prompt},
                    ],
                },
            ],
        )

        return response.choices[0].message.content

__init__(model, temperature=0)

Initialize OpenAI's llms.

Parameters

model LLM model name

temperature LLM temperature

Source code in taglyatelle/llm_providers/openai_api.py
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
def __init__(self, model: str, temperature: float | int = 0):
    """
    Initialize OpenAI's llms.

    Parameters
    ----------
    model
        LLM model name

    temperature
        LLM temperature
    """
    self.client = OpenAI()
    self.model = model
    self.temperature = temperature

invoke_llm(prompt)

Send a request to a LLM.

Parameters

prompt The prompt to send to the LLM

Returns

Answer of the LLM or None

Source code in taglyatelle/llm_providers/openai_api.py
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
def invoke_llm(self, prompt: str) -> str | None:
    """
    Send a request to a LLM.

    Parameters
    ----------
    prompt
        The prompt to send to the LLM

    Returns
    -------
    Answer of the LLM or None
    """
    response = self.client.chat.completions.create(
        model=self.model,
        temperature=self.temperature,
        messages=[
            {
                "role": "user",
                "content": [
                    {"type": "text", "text": prompt},
                ],
            },
        ],
    )

    return response.choices[0].message.content