LLMClient.predict:v3
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
import weave
from typing import Union
from typing import Optional
from typing import Any
from enum import Enum
class ClientType(str, Enum):
GEMINI = "gemini"
MISTRAL = "mistral"
OPENAI = "openai"
@weave.op()
def predict(
self,
user_prompt: Union[str, list[str]],
system_prompt: Optional[Union[str, list[str]]] = None,
schema: Optional[Any] = None,
) -> Union[str, Any]:
"""
Predicts the response from a language model based on the provided prompts and schema.
This function determines the client type and calls the appropriate SDK execution function
to get the response from the language model. It supports multiple client types including
GEMINI, MISTRAL, and OPENAI. Depending on the client type, it calls the corresponding
execution function with the provided user and system prompts, and an optional schema.